{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); `,\n scripts: ['node_modules/jquery/dist/jquery.js'],\n features: {\n ProcessExternalResources: [\"script\"] // needed for firing the onload event for about:blank iframes\n },\n done(err, window) {\n QUnit.module('DOMPurify in jsdom');\n if (err) {\n console.error('Unexpected error returned by jsdom.env():', err, err.stack);\n process.exit(1);\n }\n\n if (!window.jQuery) {\n console.warn('Unable to load jQuery');\n }\n\n const DOMPurify = dompurify(window);\n if (!DOMPurify.isSupported) {\n console.error('Unexpected error returned by jsdom.env():', err, err.stack);\n process.exit(1);\n }\n\n window.alert = () => {\n window.xssed = true;\n };\n\n testSuite(DOMPurify, window, tests, xssTests);\n QUnit.start();\n }\n});\n"},"new_file":{"kind":"string","value":"test/jsdom-node.js"},"old_contents":{"kind":"string","value":"/* jshint node: true, esnext: true */\n/* global QUnit */\n'use strict';\n\n// Test DOMPurify + jsdom using Node.js (version 4 and up)\nconst\n dompurify = require('../'),\n jsdom = require('jsdom'),\n testSuite = require('./test-suite'),\n tests = require('./fixtures/expect'),\n xssTests = tests.filter( element => /alert/.test( element.payload ) );\n\nrequire('qunit-parameterize/qunit-parameterize');\n\nQUnit.assert.contains = function( needle, haystack, message ) {\n const result = haystack.indexOf(needle) > -1;\n this.push(result, needle, haystack, message);\n};\n\nQUnit.config.autostart = false;\n\njsdom.env({\n html: `
`,\n scripts: ['node_modules/jquery/dist/jquery.js'],\n features: {\n ProcessExternalResources: [\"script\"] // needed for firing the onload event for about:blank iframes\n },\n done(err, window) {\n QUnit.module('DOMPurify in jsdom');\n if (err) {\n console.error('Unexpected error returned by jsdom.env():', err, err.stack);\n process.exit(1);\n }\n\n if (!window.jQuery) {\n console.warn('Unable to load jQuery');\n }\n\n const DOMPurify = dompurify(window);\n if (!DOMPurify.isSupported) {\n console.error('Unexpected error returned by jsdom.env():', err, err.stack);\n process.exit(1);\n }\n\n window.alert = () => {\n window.xssed = true;\n };\n\n testSuite(DOMPurify, window, tests, xssTests);\n QUnit.start();\n }\n});\n"},"message":{"kind":"string","value":"Change jsdom tests to consume built version.\n"},"old_file":{"kind":"string","value":"test/jsdom-node.js"},"subject":{"kind":"string","value":"Change jsdom tests to consume built version."},"git_diff":{"kind":"string","value":"est/jsdom-node.js\n \n // Test DOMPurify + jsdom using Node.js (version 4 and up)\n const\n dompurify = require('../'),\n dompurify = require('../dist/purify'),\n jsdom = require('jsdom'),\n testSuite = require('./test-suite'),\n tests = require('./fixtures/expect'),"}}},{"rowIdx":2019,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"e300692e84cbf360403ba5e83ef093e87c70ece8"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"apache/isis,niv0/isis,peridotperiod/isis,niv0/isis,apache/isis,kidaa/isis,oscarbou/isis,niv0/isis,incodehq/isis,sanderginn/isis,apache/isis,peridotperiod/isis,howepeng/isis,oscarbou/isis,kidaa/isis,estatio/isis,incodehq/isis,sanderginn/isis,estatio/isis,incodehq/isis,kidaa/isis,apache/isis,peridotperiod/isis,sanderginn/isis,howepeng/isis,incodehq/isis,sanderginn/isis,howepeng/isis,estatio/isis,oscarbou/isis,peridotperiod/isis,kidaa/isis,howepeng/isis,niv0/isis,estatio/isis,apache/isis,apache/isis,oscarbou/isis"},"new_contents":{"kind":"string","value":"package org.nakedobjects.distribution.xml;\n\nimport org.nakedobjects.distribution.ClientDistribution;\nimport org.nakedobjects.distribution.Data;\nimport org.nakedobjects.distribution.DataHelper;\nimport org.nakedobjects.distribution.ObjectData;\nimport org.nakedobjects.distribution.xml.request.AbortTransaction;\nimport org.nakedobjects.distribution.xml.request.AllInstances;\nimport org.nakedobjects.distribution.xml.request.ClearAssociation;\nimport org.nakedobjects.distribution.xml.request.EndTransaction;\nimport org.nakedobjects.distribution.xml.request.ExecuteAction;\nimport org.nakedobjects.distribution.xml.request.FindInstancesByTitle;\nimport org.nakedobjects.distribution.xml.request.HasInstances;\nimport org.nakedobjects.distribution.xml.request.MakePersistent;\nimport org.nakedobjects.distribution.xml.request.SetAssociation;\nimport org.nakedobjects.distribution.xml.request.SetValue;\nimport org.nakedobjects.distribution.xml.request.StartTransaction;\nimport org.nakedobjects.object.DirtyObjectSet;\nimport org.nakedobjects.object.NakedObjectRuntimeException;\nimport org.nakedobjects.object.control.Hint;\nimport org.nakedobjects.object.persistence.InstancesCriteria;\nimport org.nakedobjects.object.persistence.Oid;\nimport org.nakedobjects.object.persistence.TitleCriteria;\nimport org.nakedobjects.object.security.Session;\nimport org.nakedobjects.utility.NotImplementedException;\n\nimport org.apache.log4j.Logger;\n\nimport com.thoughtworks.xstream.XStream;\n\n\npublic class XmlClient implements ClientDistribution {\n private static final Logger LOG = Logger.getLogger(XmlClient.class); \n private ClientConnection connection;\n private DirtyObjectSet updateNotifier;\n\n public XmlClient() {\n connection = new ClientConnection();\n connection.init();\n }\n\n public ObjectData[] allInstances(Session session, String fullName, boolean includeSubclasses) {\n AllInstances request = new AllInstances(session, fullName, includeSubclasses);\n remoteExecute(request);\n return request.getInstances();\n }\n\n public void clearAssociation(Session session, String fieldIdentifier, Oid objectOid, String objectType, Oid associateOid,\n String associateType) {\n Request request = new ClearAssociation(session, fieldIdentifier, objectOid, objectType, associateOid, associateType);\n remoteExecute(request);\n }\n\n public void destroyObject(Session session, Oid oid, String type) {\n throw new NotImplementedException();\n }\n\n public Data executeAction(Session session, String actionType, String actionIdentifier, String[] parameterTypes,\n Oid objectOid, String objectType, Data[] parameters) {\n ExecuteAction request = new ExecuteAction(session, actionType, actionIdentifier, parameterTypes, objectOid, objectType,\n parameters);\n remoteExecute(request);\n return request.getActionResult();\n }\n\n public ObjectData[] findInstances(Session session, InstancesCriteria criteria) {\n if(criteria instanceof TitleCriteria) {\n\t FindInstancesByTitle request = new FindInstancesByTitle(session, (TitleCriteria) criteria);\n\t remoteExecute(request);\n\t return request.getInstances();\n } else {\n throw new NakedObjectRuntimeException();\n }\n }\n\n public Hint getActionHint(Session session, String actionType, String actionIdentifier, String[] parameterTypes,\n Oid objectOid, String objectType, Data[] parameters) {\n throw new NotImplementedException();\n }\n\n public ObjectData getObject(Session session, Oid oid, String fullName) {\n throw new NotImplementedException();\n }\n\n public boolean hasInstances(Session session, String fullName) {\n HasInstances request = new HasInstances(session, fullName);\n remoteExecute(request);\n return request.getFlag();\n }\n\n public Oid[] makePersistent(Session session, ObjectData data) {\n MakePersistent request = new MakePersistent(session, data);\n remoteExecute(request);\n return request.getOids();\n }\n\n public int numberOfInstances(Session session, String fullName) {\n throw new NotImplementedException();\n }\n\n private void remoteExecute(Request request) {\n XStream xstream = new XStream();\n String requestData = xstream.toXML(request);\n String responseData = connection.request(requestData);\n Response response = (Response) xstream.fromXML(responseData);\n\n if (request.getId() != response.getId()) {\n throw new NakedObjectRuntimeException(\"Response out of sequence with respect to the request: \" + request.getId()\n + \" & \" + response.getId() + \" respectively\");\n }\n request.setResponse(response.getObject());\n \n ObjectData[] updates = response.getUpdates();\n for (int i = 0; i < updates.length; i++) {\n LOG.debug(\"update \" + updates[i]);\n DataHelper.update(updates[i], updateNotifier);\n }\n }\n\n public void setAssociation(Session session, String fieldIdentifier, Oid objectOid, String objectType, Oid associateOid,\n String associateType) {\n Request request = new SetAssociation(session, fieldIdentifier, objectOid, objectType, associateOid, associateType);\n remoteExecute(request);\n }\n\n public void setValue(Session session, String fieldIdentifier, Oid oid, String objectType, Object associate) {\n Request request = new SetValue(session, fieldIdentifier, oid, objectType, associate);\n remoteExecute(request);\n }\n \n public void abortTransaction(Session session) {\n Request request = new AbortTransaction(session);\n remoteExecute(request); \n }\n\n public void endTransaction(Session session) {\n Request request = new EndTransaction(session);\n remoteExecute(request); \n }\n\n public void startTransaction(Session session) {\n Request request = new StartTransaction(session);\n remoteExecute(request); \n }\n\n public void setUpdateNotifier(DirtyObjectSet updateNotifier) {\n this.updateNotifier = updateNotifier;\n }\n\n}\n\n/*\n * Naked Objects - a framework that exposes behaviourally complete business\n * objects directly to the user. Copyright (C) 2000 - 2005 Naked Objects Group\n * Ltd\n * \n * This program is free software; you can redistribute it and/or modify it under\n * the terms of the GNU General Public License as published by the Free Software\n * Foundation; either version 2 of the License, or (at your option) any later\n * version.\n * \n * This program is distributed in the hope that it will be useful, but WITHOUT\n * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n * details.\n * \n * You should have received a copy of the GNU General Public License along with\n * this program; if not, write to the Free Software Foundation, Inc., 59 Temple\n * Place, Suite 330, Boston, MA 02111-1307 USA\n * \n * The authors can be contacted via www.nakedobjects.org (the registered address\n * of Naked Objects Group is Kingsway House, 123 Goldworth Road, Woking GU21\n * 1NR, UK).\n */"},"new_file":{"kind":"string","value":"no-distribution-xml/src/org/nakedobjects/distribution/xml/XmlClient.java"},"old_contents":{"kind":"string","value":"package org.nakedobjects.distribution.xml;\n\nimport org.nakedobjects.distribution.ClientDistribution;\nimport org.nakedobjects.distribution.Data;\nimport org.nakedobjects.distribution.DataHelper;\nimport org.nakedobjects.distribution.ObjectData;\nimport org.nakedobjects.distribution.xml.request.AbortTransaction;\nimport org.nakedobjects.distribution.xml.request.AllInstances;\nimport org.nakedobjects.distribution.xml.request.ClearAssociation;\nimport org.nakedobjects.distribution.xml.request.EndTransaction;\nimport org.nakedobjects.distribution.xml.request.ExecuteAction;\nimport org.nakedobjects.distribution.xml.request.FindInstancesByTitle;\nimport org.nakedobjects.distribution.xml.request.HasInstances;\nimport org.nakedobjects.distribution.xml.request.MakePersistent;\nimport org.nakedobjects.distribution.xml.request.SetAssociation;\nimport org.nakedobjects.distribution.xml.request.SetValue;\nimport org.nakedobjects.distribution.xml.request.StartTransaction;\nimport org.nakedobjects.object.NakedObjectRuntimeException;\nimport org.nakedobjects.object.control.Hint;\nimport org.nakedobjects.object.persistence.InstancesCriteria;\nimport org.nakedobjects.object.persistence.Oid;\nimport org.nakedobjects.object.persistence.TitleCriteria;\nimport org.nakedobjects.object.security.Session;\nimport org.nakedobjects.utility.NotImplementedException;\n\nimport com.thoughtworks.xstream.XStream;\n\n\npublic class XmlClient implements ClientDistribution {\n private ClientConnection connection;\n\n public XmlClient() {\n connection = new ClientConnection();\n connection.init();\n }\n\n public ObjectData[] allInstances(Session session, String fullName, boolean includeSubclasses) {\n AllInstances request = new AllInstances(session, fullName, includeSubclasses);\n remoteExecute(request);\n return request.getInstances();\n }\n\n public void clearAssociation(Session session, String fieldIdentifier, Oid objectOid, String objectType, Oid associateOid,\n String associateType) {\n Request request = new ClearAssociation(session, fieldIdentifier, objectOid, objectType, associateOid, associateType);\n remoteExecute(request);\n }\n\n public void destroyObject(Session session, Oid oid, String type) {\n throw new NotImplementedException();\n }\n\n public Data executeAction(Session session, String actionType, String actionIdentifier, String[] parameterTypes,\n Oid objectOid, String objectType, Data[] parameters) {\n ExecuteAction request = new ExecuteAction(session, actionType, actionIdentifier, parameterTypes, objectOid, objectType,\n parameters);\n remoteExecute(request);\n return request.getActionResult();\n }\n\n public ObjectData[] findInstances(Session session, InstancesCriteria criteria) {\n if(criteria instanceof TitleCriteria) {\n\t FindInstancesByTitle request = new FindInstancesByTitle(session, (TitleCriteria) criteria);\n\t remoteExecute(request);\n\t return request.getInstances();\n } else {\n throw new NakedObjectRuntimeException();\n }\n }\n\n public Hint getActionHint(Session session, String actionType, String actionIdentifier, String[] parameterTypes,\n Oid objectOid, String objectType, Data[] parameters) {\n throw new NotImplementedException();\n }\n\n public ObjectData getObject(Session session, Oid oid, String fullName) {\n throw new NotImplementedException();\n }\n\n public boolean hasInstances(Session session, String fullName) {\n HasInstances request = new HasInstances(session, fullName);\n remoteExecute(request);\n return request.getFlag();\n }\n\n public Oid[] makePersistent(Session session, ObjectData data) {\n MakePersistent request = new MakePersistent(session, data);\n remoteExecute(request);\n return request.getOids();\n }\n\n public int numberOfInstances(Session session, String fullName) {\n throw new NotImplementedException();\n }\n\n private void remoteExecute(Request request) {\n XStream xstream = new XStream();\n String requestData = xstream.toXML(request);\n String responseData = connection.request(requestData);\n Response response = (Response) xstream.fromXML(responseData);\n\n if (request.getId() != response.getId()) {\n throw new NakedObjectRuntimeException(\"Response out of sequence with respect to the request: \" + request.getId()\n + \" & \" + response.getId() + \" respectively\");\n }\n request.setResponse(response.getObject());\n \n ObjectData[] updates = response.getUpdates();\n for (int i = 0; i < updates.length; i++) {\n DataHelper.update(updates[i]);\n }\n }\n\n public void setAssociation(Session session, String fieldIdentifier, Oid objectOid, String objectType, Oid associateOid,\n String associateType) {\n Request request = new SetAssociation(session, fieldIdentifier, objectOid, objectType, associateOid, associateType);\n remoteExecute(request);\n }\n\n public void setValue(Session session, String fieldIdentifier, Oid oid, String objectType, Object associate) {\n Request request = new SetValue(session, fieldIdentifier, oid, objectType, associate);\n remoteExecute(request);\n }\n \n public void abortTransaction(Session session) {\n Request request = new AbortTransaction(session);\n remoteExecute(request); \n }\n\n public void endTransaction(Session session) {\n Request request = new EndTransaction(session);\n remoteExecute(request); \n }\n\n public void startTransaction(Session session) {\n Request request = new StartTransaction(session);\n remoteExecute(request); \n }\n\n}\n\n/*\n * Naked Objects - a framework that exposes behaviourally complete business\n * objects directly to the user. Copyright (C) 2000 - 2005 Naked Objects Group\n * Ltd\n * \n * This program is free software; you can redistribute it and/or modify it under\n * the terms of the GNU General Public License as published by the Free Software\n * Foundation; either version 2 of the License, or (at your option) any later\n * version.\n * \n * This program is distributed in the hope that it will be useful, but WITHOUT\n * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS\n * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more\n * details.\n * \n * You should have received a copy of the GNU General Public License along with\n * this program; if not, write to the Free Software Foundation, Inc., 59 Temple\n * Place, Suite 330, Boston, MA 02111-1307 USA\n * \n * The authors can be contacted via www.nakedobjects.org (the registered address\n * of Naked Objects Group is Kingsway House, 123 Goldworth Road, Woking GU21\n * 1NR, UK).\n */"},"message":{"kind":"string","value":"Datahelper now passes on the changes to object to the dirty object set, allowing the viewer to refresh views that have changed.\n\n\ngit-svn-id: 3f09329b2f6451ddff3637e937fd5de689f72c1f@1007098 13f79535-47bb-0310-9956-ffa450edef68\n"},"old_file":{"kind":"string","value":"no-distribution-xml/src/org/nakedobjects/distribution/xml/XmlClient.java"},"subject":{"kind":"string","value":"Datahelper now passes on the changes to object to the dirty object set, allowing the viewer to refresh views that have changed."},"git_diff":{"kind":"string","value":"o-distribution-xml/src/org/nakedobjects/distribution/xml/XmlClient.java\n import org.nakedobjects.distribution.xml.request.SetAssociation;\n import org.nakedobjects.distribution.xml.request.SetValue;\n import org.nakedobjects.distribution.xml.request.StartTransaction;\nimport org.nakedobjects.object.DirtyObjectSet;\n import org.nakedobjects.object.NakedObjectRuntimeException;\n import org.nakedobjects.object.control.Hint;\n import org.nakedobjects.object.persistence.InstancesCriteria;\n import org.nakedobjects.object.security.Session;\n import org.nakedobjects.utility.NotImplementedException;\n \nimport org.apache.log4j.Logger;\n\n import com.thoughtworks.xstream.XStream;\n \n \n public class XmlClient implements ClientDistribution {\n private static final Logger LOG = Logger.getLogger(XmlClient.class); \n private ClientConnection connection;\n private DirtyObjectSet updateNotifier;\n \n public XmlClient() {\n connection = new ClientConnection();\n \n ObjectData[] updates = response.getUpdates();\n for (int i = 0; i < updates.length; i++) {\n DataHelper.update(updates[i]);\n LOG.debug(\"update \" + updates[i]);\n DataHelper.update(updates[i], updateNotifier);\n }\n }\n \n remoteExecute(request); \n }\n \n public void setUpdateNotifier(DirtyObjectSet updateNotifier) {\n this.updateNotifier = updateNotifier;\n }\n\n }\n \n /*"}}},{"rowIdx":2020,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"5102a79fcc0c309fc7d41ba80d1d31a4ef32bab9"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"3ev/tev_glossary,3ev/tev_glossary,3ev/tev_glossary,3ev/tev_glossary"},"new_contents":{"kind":"string","value":"(function ($, config) {\n\n var justOnce = []\n\n /*\n * Constructor.\n */\n\n function TevGlossary(options) {\n var defaults = {\n selector: 'p'\n , position: 'top'\n , toggle: 'hover'\n , enable: true\n , firstOccOnly: true\n }\n\n this.options = $.extend(defaults, options)\n \n // If the window is in https, set the URL to make an https request instead of http\n if(window.location.protocol === 'https:') {\n this.options.url = this.options.url.replace(/^http:\\/\\//i, 'https://');\n }\n }\n\n /*\n * Replace text in the given DOM node and its children with a glossary popup\n * for the given entry, if that entry exists.\n */\n\n TevGlossary.prototype.replaceTextInNode = function (node, entry, noTraversal) {\n\n // Check if the current node is a text node. If it is, search for the\n // glossary term. If not, iterate its direct child text nodes and search\n // them\n\n if (node.nodeType === 3) {\n var pos = node.nodeValue.toLowerCase().indexOf(entry.term.toLowerCase())\n , wr = new RegExp('\\\\W')\n , wb\n , wa\n\n // Check for non-word chars before\n\n if (pos > 0) {\n wb = wr.test(node.nodeValue.substr(pos - 1, 1))\n } else {\n wb = true\n }\n\n // Check for non-word chars after\n\n if ((pos >= 0) && ((pos + entry.term.length - 1) < (node.nodeValue.length - 1))) {\n wa = wr.test(node.nodeValue.substr(pos + entry.term.length, 1))\n } else {\n wa = true\n }\n\n if ((pos >= 0) && wb && wa) {\n // Find the start of the chunk to replace\n\n var replace = node.splitText(pos)\n\n // Trim off the unwanted part from the end of the chunk\n\n replace.splitText(entry.term.length)\n\n // Create the wrapper\n\n var withPopover = $('')\n .addClass('tev-glossary-highlighted')\n .attr('data-toggle', 'popover')\n .attr('data-content', entry.definition)\n .attr('title', entry.term)\n .append(replace.cloneNode(true))\n\n // Insert the replacement back into the parent node.\n\n if(!(justOnce.indexOf(entry.term.toLowerCase()) >= 0)) {\n replace.parentNode.replaceChild(withPopover[0], replace)\n\n // Only the first occurrence of each word will be used if this option is true.\n if (this.options.firstOccOnly) {\n justOnce.push(entry.term.toLowerCase())\n }\n }\n\n // Return a skip value, to ensure we don't iterate the replaced\n // node and create an infinite loop\n\n return 1\n } else {\n return 0\n }\n } else if (!noTraversal && (node.nodeType === 1 && node.childNodes)) {\n for (var i = 0; i < node.childNodes.length; i++) {\n i += this.replaceTextInNode(node.childNodes[i], entry, true)\n }\n\n return 0\n }\n }\n\n /*\n * Search all target nodes for all target glossary entries, and setup\n * glossary tooltips.\n */\n\n TevGlossary.prototype.searchForEntries = function (entries) {\n var self = this\n , $nodes = $(self.options.selector)\n\n // Search all entries in all nodes\n\n $.each(entries, function (i, entry) {\n $nodes.each(function (j, el) {\n self.replaceTextInNode(el, entry)\n })\n })\n\n // Run the Bootstrap popover on the created glossary terms\n\n $('.tev-glossary-highlighted').popover({\n container: 'body'\n , placement: this.options.position\n , trigger: this.options.toggle\n , template: '

'\n })\n }\n\n /*\n * Run the service.\n */\n\n TevGlossary.prototype.run = function () {\n var self = this\n\n if (this.options.enable) {\n $.ajax({\n type: 'GET'\n , url: this.options.url\n , dataType: 'json'\n , success: function (data) {\n self.searchForEntries(data)\n }\n })\n }\n }\n\n // Create and run service instance.\n\n new TevGlossary(config).run()\n\n})(window.jQuery, window.tevGlossaryConfig);\n"},"new_file":{"kind":"string","value":"Resources/Public/js/tev-glossary.js"},"old_contents":{"kind":"string","value":"(function ($, config) {\n\n var justOnce = []\n\n /*\n * Constructor.\n */\n\n function TevGlossary(options) {\n var defaults = {\n selector: 'p'\n , position: 'top'\n , toggle: 'hover'\n , enable: true\n , firstOccOnly: true\n }\n\n this.options = $.extend(defaults, options)\n }\n\n /*\n * Replace text in the given DOM node and its children with a glossary popup\n * for the given entry, if that entry exists.\n */\n\n TevGlossary.prototype.replaceTextInNode = function (node, entry, noTraversal) {\n\n // Check if the current node is a text node. If it is, search for the\n // glossary term. If not, iterate its direct child text nodes and search\n // them\n\n if (node.nodeType === 3) {\n var pos = node.nodeValue.toLowerCase().indexOf(entry.term.toLowerCase())\n , wr = new RegExp('\\\\W')\n , wb\n , wa\n\n // Check for non-word chars before\n\n if (pos > 0) {\n wb = wr.test(node.nodeValue.substr(pos - 1, 1))\n } else {\n wb = true\n }\n\n // Check for non-word chars after\n\n if ((pos >= 0) && ((pos + entry.term.length - 1) < (node.nodeValue.length - 1))) {\n wa = wr.test(node.nodeValue.substr(pos + entry.term.length, 1))\n } else {\n wa = true\n }\n\n if ((pos >= 0) && wb && wa) {\n // Find the start of the chunk to replace\n\n var replace = node.splitText(pos)\n\n // Trim off the unwanted part from the end of the chunk\n\n replace.splitText(entry.term.length)\n\n // Create the wrapper\n\n var withPopover = $('')\n .addClass('tev-glossary-highlighted')\n .attr('data-toggle', 'popover')\n .attr('data-content', entry.definition)\n .attr('title', entry.term)\n .append(replace.cloneNode(true))\n\n // Insert the replacement back into the parent node.\n\n if(!(justOnce.indexOf(entry.term.toLowerCase()) >= 0)) {\n replace.parentNode.replaceChild(withPopover[0], replace)\n\n // Only the first occurrence of each word will be used if this option is true.\n if (this.options.firstOccOnly) {\n justOnce.push(entry.term.toLowerCase())\n }\n }\n\n // Return a skip value, to ensure we don't iterate the replaced\n // node and create an infinite loop\n\n return 1\n } else {\n return 0\n }\n } else if (!noTraversal && (node.nodeType === 1 && node.childNodes)) {\n for (var i = 0; i < node.childNodes.length; i++) {\n i += this.replaceTextInNode(node.childNodes[i], entry, true)\n }\n\n return 0\n }\n }\n\n /*\n * Search all target nodes for all target glossary entries, and setup\n * glossary tooltips.\n */\n\n TevGlossary.prototype.searchForEntries = function (entries) {\n var self = this\n , $nodes = $(self.options.selector)\n\n // Search all entries in all nodes\n\n $.each(entries, function (i, entry) {\n $nodes.each(function (j, el) {\n self.replaceTextInNode(el, entry)\n })\n })\n\n // Run the Bootstrap popover on the created glossary terms\n\n $('.tev-glossary-highlighted').popover({\n container: 'body'\n , placement: this.options.position\n , trigger: this.options.toggle\n , template: '

'\n })\n }\n\n /*\n * Run the service.\n */\n\n TevGlossary.prototype.run = function () {\n var self = this\n\n if (this.options.enable) {\n $.ajax({\n type: 'GET'\n , url: this.options.url\n , dataType: 'json'\n , success: function (data) {\n self.searchForEntries(data)\n }\n })\n }\n }\n\n // Create and run service instance.\n\n new TevGlossary(config).run()\n\n})(window.jQuery, window.tevGlossaryConfig);\n"},"message":{"kind":"string","value":"Added a way to switch the URL to https"},"old_file":{"kind":"string","value":"Resources/Public/js/tev-glossary.js"},"subject":{"kind":"string","value":"Added a way to switch the URL to https"},"git_diff":{"kind":"string","value":"esources/Public/js/tev-glossary.js\n }\n \n this.options = $.extend(defaults, options)\n \n // If the window is in https, set the URL to make an https request instead of http\n if(window.location.protocol === 'https:') {\n this.options.url = this.options.url.replace(/^http:\\/\\//i, 'https://');\n }\n }\n \n /*"}}},{"rowIdx":2021,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"6f88ccbc5c345865a1dacd6fae0f53d2510a314d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"billho/symphony,qiangber/symphony,qiangber/symphony,qiangber/symphony,billho/symphony,billho/symphony"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) 2012-2016, b3log.org & hacpai.com\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.b3log.symphony.service;\n\nimport java.text.DecimalFormat;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport javax.inject.Inject;\nimport javax.servlet.http.HttpServletRequest;\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.commons.lang.time.DateFormatUtils;\nimport org.apache.commons.lang.time.DateUtils;\nimport org.b3log.latke.Keys;\nimport org.b3log.latke.Latkes;\nimport org.b3log.latke.logging.Level;\nimport org.b3log.latke.logging.Logger;\nimport org.b3log.latke.model.Pagination;\nimport org.b3log.latke.model.Role;\nimport org.b3log.latke.model.User;\nimport org.b3log.latke.repository.CompositeFilter;\nimport org.b3log.latke.repository.CompositeFilterOperator;\nimport org.b3log.latke.repository.Filter;\nimport org.b3log.latke.repository.FilterOperator;\nimport org.b3log.latke.repository.PropertyFilter;\nimport org.b3log.latke.repository.Query;\nimport org.b3log.latke.repository.RepositoryException;\nimport org.b3log.latke.repository.SortDirection;\nimport org.b3log.latke.service.LangPropsService;\nimport org.b3log.latke.service.ServiceException;\nimport org.b3log.latke.service.annotation.Service;\nimport org.b3log.latke.util.CollectionUtils;\nimport org.b3log.latke.util.Paginator;\nimport org.b3log.latke.util.Strings;\nimport org.b3log.symphony.model.Article;\nimport org.b3log.symphony.model.Comment;\nimport org.b3log.symphony.model.Common;\nimport org.b3log.symphony.model.Tag;\nimport org.b3log.symphony.model.UserExt;\nimport org.b3log.symphony.processor.channel.ArticleChannel;\nimport org.b3log.symphony.repository.ArticleRepository;\nimport org.b3log.symphony.repository.CommentRepository;\nimport org.b3log.symphony.repository.DomainTagRepository;\nimport org.b3log.symphony.repository.TagArticleRepository;\nimport org.b3log.symphony.repository.TagRepository;\nimport org.b3log.symphony.repository.UserRepository;\nimport org.b3log.symphony.util.Emotions;\nimport org.b3log.symphony.util.Markdowns;\nimport org.b3log.symphony.util.Symphonys;\nimport org.b3log.symphony.util.Times;\nimport org.json.JSONArray;\nimport org.json.JSONException;\nimport org.json.JSONObject;\nimport org.jsoup.Jsoup;\nimport org.jsoup.nodes.Document;\nimport org.jsoup.safety.Whitelist;\n\n/**\n * Article query service.\n *\n * @author Liang Ding\n * @version 1.12.10.18, Mar 23, 2016\n * @since 0.2.0\n */\n@Service\npublic class ArticleQueryService {\n\n /**\n * Logger.\n */\n private static final Logger LOGGER = Logger.getLogger(ArticleQueryService.class.getName());\n\n /**\n * Article repository.\n */\n @Inject\n private ArticleRepository articleRepository;\n\n /**\n * Comment repository.\n */\n @Inject\n private CommentRepository commentRepository;\n\n /**\n * Tag-Article repository.\n */\n @Inject\n private TagArticleRepository tagArticleRepository;\n\n /**\n * Tag repository.\n */\n @Inject\n private TagRepository tagRepository;\n\n /**\n * User repository.\n */\n @Inject\n private UserRepository userRepository;\n\n /**\n * Domain tag repository.\n */\n @Inject\n private DomainTagRepository domainTagRepository;\n\n /**\n * Comment query service.\n */\n @Inject\n private CommentQueryService commentQueryService;\n\n /**\n * User query service.\n */\n @Inject\n private UserQueryService userQueryService;\n\n /**\n * Avatar query service.\n */\n @Inject\n private AvatarQueryService avatarQueryService;\n\n /**\n * Short link query service.\n */\n @Inject\n private ShortLinkQueryService shortLinkQueryService;\n\n /**\n * Language service.\n */\n @Inject\n private LangPropsService langPropsService;\n\n /**\n * Count to fetch article tags for relevant articles.\n */\n private static final int RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT = 3;\n\n /**\n * Gets domain articles.\n *\n * @param domainId the specified domain id\n * @param currentPageNum the specified current page number\n * @param pageSize the specified page size\n * @return result\n * @throws ServiceException service exception\n */\n public JSONObject getDomainArticles(final String domainId, final int currentPageNum, final int pageSize)\n throws ServiceException {\n final JSONObject ret = new JSONObject();\n ret.put(Article.ARTICLES, (Object) Collections.emptyList());\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, 0);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) Collections.emptyList());\n\n try {\n final JSONArray domainTags = domainTagRepository.getByDomainId(domainId, 1, Integer.MAX_VALUE)\n .optJSONArray(Keys.RESULTS);\n\n if (domainTags.length() <= 0) {\n return ret;\n }\n\n final List tagIds = new ArrayList();\n for (int i = 0; i < domainTags.length(); i++) {\n tagIds.add(domainTags.optJSONObject(i).optString(Tag.TAG + \"_\" + Keys.OBJECT_ID));\n }\n\n Query query = new Query().setFilter(\n new PropertyFilter(Tag.TAG + \"_\" + Keys.OBJECT_ID, FilterOperator.IN, tagIds)).\n setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticles = result.optJSONArray(Keys.RESULTS);\n if (tagArticles.length() <= 0) {\n return ret;\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final int windowSize = Symphonys.getInt(\"latestArticlesWindowSize\");\n\n final List pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) pageNums);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticles.length(); i++) {\n articleIds.add(tagArticles.optJSONObject(i).optString(Article.ARTICLE + \"_\" + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(CompositeFilterOperator.and(\n new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds),\n new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID))).\n setPageCount(1).addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n\n final List articles\n = CollectionUtils.jsonArrayToList(articleRepository.get(query).optJSONArray(Keys.RESULTS));\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"latestArticleParticipantsCnt\");\n genParticipants(articles, participantsCnt);\n\n ret.put(Article.ARTICLES, (Object) articles);\n\n return ret;\n } catch (final Exception e) {\n LOGGER.log(Level.ERROR, \"Gets domain articles error\", e);\n\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the relevant articles of the specified article with the specified fetch size.\n *\n *

\n * The relevant articles exist the same tag with the specified article.\n *

\n *\n * @param article the specified article\n * @param fetchSize the specified fetch size\n * @return relevant articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRelevantArticles(final JSONObject article, final int fetchSize) throws ServiceException {\n final String tagsString = article.optString(Article.ARTICLE_TAGS);\n final String[] tagTitles = tagsString.split(\",\");\n final int tagTitlesLength = tagTitles.length;\n final int subCnt = tagTitlesLength > RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT\n ? RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT : tagTitlesLength;\n\n final List tagIdx = CollectionUtils.getRandomIntegers(0, tagTitlesLength, subCnt);\n final int subFetchSize = fetchSize / subCnt;\n final Set fetchedArticleIds = new HashSet();\n\n final List ret = new ArrayList();\n try {\n for (int i = 0; i < tagIdx.size(); i++) {\n final String tagTitle = tagTitles[tagIdx.get(i)].trim();\n\n final JSONObject tag = tagRepository.getByTitle(tagTitle);\n final String tagId = tag.optString(Keys.OBJECT_ID);\n JSONObject result = tagArticleRepository.getByTagId(tagId, 1, subFetchSize);\n\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int j = 0; j < tagArticleRelations.length(); j++) {\n final String articleId = tagArticleRelations.optJSONObject(j).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID);\n\n if (fetchedArticleIds.contains(articleId)) {\n continue;\n }\n\n articleIds.add(articleId);\n fetchedArticleIds.add(articleId);\n }\n\n articleIds.remove(article.optString(Keys.OBJECT_ID));\n\n final Query query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds));\n result = articleRepository.get(query);\n\n ret.addAll(CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS)));\n }\n\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets relevant articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets broadcasts (articles permalink equals to \"aBroadcast\").\n *\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getBroadcasts(final int currentPageNum, final int pageSize) throws ServiceException {\n try {\n final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize).setFilter(\n new PropertyFilter(Article.ARTICLE_CLIENT_ARTICLE_ID, FilterOperator.EQUAL, \"aBroadcast\")).\n addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING);\n\n final JSONObject result = articleRepository.get(query);\n final JSONArray articles = result.optJSONArray(Keys.RESULTS);\n\n if (0 == articles.length()) {\n return Collections.emptyList();\n }\n\n final List ret = CollectionUtils.jsonArrayToList(articles);\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n article.remove(Article.ARTICLE_CONTENT);\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets broadcasts [currentPageNum=\" + currentPageNum + \", pageSize=\" + pageSize + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets interest articles.\n *\n * @param currentPageNum the specified current page number\n * @param pageSize the specified fetch size\n * @param tagTitles the specified tag titles\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getInterests(final int currentPageNum, final int pageSize, final String... tagTitles)\n throws ServiceException {\n try {\n final List tagList = new ArrayList();\n for (int i = 0; i < tagTitles.length; i++) {\n final String tagTitle = tagTitles[i];\n final JSONObject tag = tagRepository.getByTitle(tagTitle);\n if (null == tag) {\n continue;\n }\n\n tagList.add(tag);\n }\n\n final Map> articleFields = new HashMap>();\n articleFields.put(Article.ARTICLE_TITLE, String.class);\n articleFields.put(Article.ARTICLE_PERMALINK, String.class);\n articleFields.put(Article.ARTICLE_CREATE_TIME, Long.class);\n\n final List ret = new ArrayList();\n\n if (!tagList.isEmpty()) {\n final List tagArticles\n = getArticlesByTags(currentPageNum, pageSize, articleFields, tagList.toArray(new JSONObject[0]));\n for (final JSONObject article : tagArticles) {\n article.remove(Article.ARTICLE_T_PARTICIPANTS);\n article.remove(Article.ARTICLE_T_PARTICIPANT_NAME);\n article.remove(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL);\n article.remove(Article.ARTICLE_LATEST_CMT_TIME);\n article.remove(Article.ARTICLE_UPDATE_TIME);\n article.remove(Article.ARTICLE_T_HEAT);\n article.remove(Article.ARTICLE_T_TITLE_EMOJI);\n article.remove(Common.TIME_AGO);\n\n article.put(Article.ARTICLE_CREATE_TIME, ((Date) article.get(Article.ARTICLE_CREATE_TIME)).getTime());\n }\n\n ret.addAll(tagArticles);\n }\n\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setPageCount(currentPageNum).setPageSize(pageSize).setCurrentPageNum(1);\n query.setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n final JSONObject result = articleRepository.get(query);\n\n final List recentArticles = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n ret.addAll(recentArticles);\n\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n }\n\n return ret;\n } catch (final Exception e) {\n LOGGER.log(Level.ERROR, \"Gets interests failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets news (articles tags contains \"B3log Announcement\").\n *\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getNews(final int currentPageNum, final int pageSize) throws ServiceException {\n\n try {\n JSONObject oldAnnouncementTag = tagRepository.getByTitle(\"B3log Announcement\");\n JSONObject currentAnnouncementTag = tagRepository.getByTitle(\"B3log公告\");\n if (null == oldAnnouncementTag && null == currentAnnouncementTag) {\n return Collections.emptyList();\n }\n\n if (null == oldAnnouncementTag) {\n oldAnnouncementTag = new JSONObject();\n }\n\n if (null == currentAnnouncementTag) {\n currentAnnouncementTag = new JSONObject();\n }\n\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(CompositeFilterOperator.or(\n new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL,\n oldAnnouncementTag.optString(Keys.OBJECT_ID)),\n new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL,\n currentAnnouncementTag.optString(Keys.OBJECT_ID))\n ))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n final JSONObject sa = userQueryService.getSA();\n\n final List subFilters = new ArrayList();\n subFilters.add(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds));\n subFilters.add(new PropertyFilter(Article.ARTICLE_AUTHOR_EMAIL, FilterOperator.EQUAL, sa.optString(User.USER_EMAIL)));\n query = new Query().setFilter(new CompositeFilter(CompositeFilterOperator.AND, subFilters))\n .addProjection(Article.ARTICLE_TITLE, String.class).addProjection(Article.ARTICLE_PERMALINK, String.class)\n .addProjection(Article.ARTICLE_CREATE_TIME, Long.class).addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING);\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets news failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified tags (order by article create date desc).\n *\n * @param tags the specified tags\n * @param currentPageNum the specified page number\n * @param articleFields the specified article fields to return\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByTags(final int currentPageNum, final int pageSize,\n final Map> articleFields, final JSONObject... tags) throws ServiceException {\n try {\n final List filters = new ArrayList();\n for (final JSONObject tag : tags) {\n filters.add(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tag.optString(Keys.OBJECT_ID)));\n }\n\n Filter filter;\n if (filters.size() >= 2) {\n filter = new CompositeFilter(CompositeFilterOperator.OR, filters);\n } else {\n filter = filters.get(0);\n }\n\n // XXX: 这里的分页是有问题的,后面取文章的时候会少(因为一篇文章可以有多个标签,但是文章 id 一样)\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(filter).setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds)).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"tagArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by tags [tagLength=\" + tags.length + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified city (order by article create date desc).\n *\n * @param city the specified city\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByCity(final String city, final int currentPageNum, final int pageSize)\n throws ServiceException {\n try {\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(new PropertyFilter(Article.ARTICLE_CITY, FilterOperator.EQUAL, city))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n final JSONObject result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"cityArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by city [\" + city + \"] failed\", e);\n\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified tag (order by article create date desc).\n *\n * @param tag the specified tag\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByTag(final JSONObject tag, final int currentPageNum, final int pageSize)\n throws ServiceException {\n try {\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tag.optString(Keys.OBJECT_ID)))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds)).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"tagArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by tag [tagTitle=\" + tag.optString(Tag.TAG_TITLE) + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article by the specified client article id.\n *\n * @param authorId the specified author id\n * @param clientArticleId the specified client article id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticleByClientArticleId(final String authorId, final String clientArticleId) throws ServiceException {\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_CLIENT_ARTICLE_ID, FilterOperator.EQUAL, clientArticleId));\n filters.add(new PropertyFilter(Article.ARTICLE_AUTHOR_ID, FilterOperator.EQUAL, authorId));\n\n final Query query = new Query().setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n try {\n final JSONObject result = articleRepository.get(query);\n final JSONArray array = result.optJSONArray(Keys.RESULTS);\n\n if (0 == array.length()) {\n return null;\n }\n\n return array.optJSONObject(0);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets article [clientArticleId=\" + clientArticleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article with {@link #organizeArticle(org.json.JSONObject)} by the specified id.\n *\n * @param articleId the specified id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticleById(final String articleId) throws ServiceException {\n try {\n final JSONObject ret = articleRepository.get(articleId);\n\n if (null == ret) {\n return null;\n }\n\n organizeArticle(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets an article [articleId=\" + articleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article by the specified id.\n *\n * @param articleId the specified id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticle(final String articleId) throws ServiceException {\n try {\n final JSONObject ret = articleRepository.get(articleId);\n\n if (null == ret) {\n return null;\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets an article [articleId=\" + articleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets preview content of the article specified with the given article id.\n *\n * @param articleId the given article id\n * @param request the specified request\n * @return preview content\n * @throws ServiceException service exception\n */\n public String getArticlePreviewContent(final String articleId, final HttpServletRequest request) throws ServiceException {\n final JSONObject article = getArticle(articleId);\n if (null == article) {\n return null;\n }\n\n return getPreviewContent(article, request);\n }\n\n private String getPreviewContent(final JSONObject article, final HttpServletRequest request) throws ServiceException {\n final int length = Integer.valueOf(\"150\");\n String ret = article.optString(Article.ARTICLE_CONTENT);\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userQueryService.getUser(authorId);\n\n if (null != author && UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)\n || Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n return langPropsService.get(\"articleContentBlockLabel\");\n }\n\n final Set userNames = userQueryService.getUserNames(ret);\n final JSONObject currentUser = userQueryService.getCurrentUser(request);\n final String currentUserName = null == currentUser ? \"\" : currentUser.optString(User.USER_NAME);\n final String authorName = author.optString(User.USER_NAME);\n if (Article.ARTICLE_TYPE_C_DISCUSSION == article.optInt(Article.ARTICLE_TYPE)\n && !authorName.equals(currentUserName)) {\n boolean invited = false;\n for (final String userName : userNames) {\n if (userName.equals(currentUserName)) {\n invited = true;\n\n break;\n }\n }\n\n if (!invited) {\n String blockContent = langPropsService.get(\"articleDiscussionLabel\");\n blockContent = blockContent.replace(\"{user}\", \"\" + authorName + \"\");\n\n return blockContent;\n }\n }\n\n ret = Emotions.convert(ret);\n ret = Markdowns.toHTML(ret);\n\n ret = Jsoup.clean(ret, Whitelist.none());\n if (ret.length() >= length) {\n ret = StringUtils.substring(ret, 0, length)\n + \" ....\";\n }\n\n return ret;\n }\n\n /**\n * Gets the user articles with the specified user id, page number and page size.\n *\n * @param userId the specified user id\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return user articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getUserArticles(final String userId, final int currentPageNum, final int pageSize) throws ServiceException {\n final Query query = new Query().addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING)\n .setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n setFilter(CompositeFilterOperator.and(\n new PropertyFilter(Article.ARTICLE_AUTHOR_ID, FilterOperator.EQUAL, userId),\n new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID)));\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets user articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets hot articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getHotArticles(final int fetchSize) throws ServiceException {\n final String id = String.valueOf(DateUtils.addDays(new Date(), -15).getTime());\n\n try {\n final Query query = new Query().addSort(Article.ARTICLE_COMMENT_CNT, SortDirection.DESCENDING).\n addSort(Keys.OBJECT_ID, SortDirection.ASCENDING).setCurrentPageNum(1).setPageSize(fetchSize);\n\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.GREATER_THAN_OR_EQUAL, id));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n\n query.setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets hot articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the random articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return random articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRandomArticles(final int fetchSize) throws ServiceException {\n try {\n final List ret = articleRepository.getRandomly(fetchSize);\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets random articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Makes article showing filters.\n *\n * @return filter the article showing to user\n */\n private CompositeFilter makeArticleShowingFilter() {\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n return new CompositeFilter(CompositeFilterOperator.AND, filters);\n }\n\n /**\n * Makes the recent (sort by create time) articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles query\n */\n private Query makeRecentQuery(final int currentPageNum, final int fetchSize) {\n final Query query = new Query()\n .addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setPageSize(fetchSize).setCurrentPageNum(currentPageNum);\n query.setFilter(makeArticleShowingFilter());\n return query;\n }\n\n /**\n * Makes the top articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return top articles query\n */\n private Query makeTopQuery(final int currentPageNum, final int fetchSize) {\n final Query query = new Query()\n .addSort(Article.REDDIT_SCORE, SortDirection.DESCENDING)\n .addSort(Article.ARTICLE_LATEST_CMT_TIME, SortDirection.DESCENDING)\n .setPageCount(1).setPageSize(fetchSize).setCurrentPageNum(currentPageNum);\n\n query.setFilter(makeArticleShowingFilter());\n return query;\n }\n\n /**\n * Gets the recent (sort by create time) articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return for example,
\n     * {\n     *     \"pagination\": {\n     *         \"paginationPageCount\": 100,\n     *         \"paginationPageNums\": [1, 2, 3, 4, 5]\n     *     },\n     *     \"articles\": [{\n     *         \"oId\": \"\",\n     *         \"articleTitle\": \"\",\n     *         \"articleContent\": \"\",\n     *         ....\n     *      }, ....]\n     * }\n     * 
\n *\n * @throws ServiceException service exception\n */\n public JSONObject getRecentArticles(final int currentPageNum, final int fetchSize) throws ServiceException {\n final JSONObject ret = new JSONObject();\n\n final Query query = makeRecentQuery(currentPageNum, fetchSize);\n JSONObject result = null;\n\n try {\n result = articleRepository.get(query);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n\n final int windowSize = Symphonys.getInt(\"latestArticlesWindowSize\");\n\n final List pageNums = Paginator.paginate(currentPageNum, fetchSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) pageNums);\n\n final JSONArray data = result.optJSONArray(Keys.RESULTS);\n final List articles = CollectionUtils.jsonArrayToList(data);\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"latestArticleParticipantsCnt\");\n genParticipants(articles, participantsCnt);\n\n ret.put(Article.ARTICLES, (Object) articles);\n\n return ret;\n }\n\n /**\n * Gets the index articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getIndexArticles(final int fetchSize) throws ServiceException {\n final Query query = makeTopQuery(1, fetchSize);\n\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n\n organizeArticles(ret);\n\n for (final JSONObject article : ret) {\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userRepository.get(authorId);\n if (UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n }\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"indexArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the recent articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRecentArticlesWithComments(final int currentPageNum, final int fetchSize) throws ServiceException {\n return getArticles(makeRecentQuery(currentPageNum, fetchSize));\n }\n\n /**\n * Gets the index articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getTopArticlesWithComments(final int currentPageNum, final int fetchSize) throws ServiceException {\n return getArticles(makeTopQuery(currentPageNum, fetchSize));\n }\n\n /**\n * The specific articles.\n *\n * @param query conditions\n * @return articles\n * @throws ServiceException service exception\n */\n private List getArticles(final Query query) throws ServiceException {\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n final List stories = new ArrayList();\n\n for (final JSONObject article : ret) {\n final JSONObject story = new JSONObject();\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userRepository.get(authorId);\n if (UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)) {\n story.put(\"title\", langPropsService.get(\"articleTitleBlockLabel\"));\n } else {\n story.put(\"title\", article.optString(Article.ARTICLE_TITLE));\n }\n story.put(\"id\", article.optLong(\"oId\"));\n story.put(\"url\", Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n story.put(\"user_display_name\", article.optString(Article.ARTICLE_T_AUTHOR_NAME));\n story.put(\"user_job\", author.optString(UserExt.USER_INTRO));\n story.put(\"comment_html\", article.optString(Article.ARTICLE_CONTENT));\n story.put(\"comment_count\", article.optInt(Article.ARTICLE_COMMENT_CNT));\n story.put(\"vote_count\", article.optInt(Article.ARTICLE_GOOD_CNT));\n story.put(\"created_at\", formatDate(article.get(Article.ARTICLE_CREATE_TIME)));\n story.put(\"user_portrait_url\", article.optString(Article.ARTICLE_T_AUTHOR_THUMBNAIL_URL));\n story.put(\"comments\", getAllComments(article.optString(\"oId\")));\n final String tagsString = article.optString(Article.ARTICLE_TAGS);\n String[] tags = null;\n if (!Strings.isEmptyOrNull(tagsString)) {\n tags = tagsString.split(\",\");\n }\n story.put(\"badge\", tags == null ? \"\" : tags[0]);\n stories.add(story);\n }\n final Integer participantsCnt = Symphonys.getInt(\"indexArticleParticipantsCnt\");\n genParticipants(stories, participantsCnt);\n return stories;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", e);\n throw new ServiceException(e);\n } catch (final JSONException ex) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", ex);\n throw new ServiceException(ex);\n }\n }\n\n /**\n * Gets the article comments with the specified article id.\n *\n * @param articleId the specified article id\n * @return comments, return an empty list if not found\n * @throws ServiceException service exception\n * @throws JSONException json exception\n * @throws RepositoryException repository exception\n */\n private List getAllComments(final String articleId) throws ServiceException, JSONException, RepositoryException {\n final List commments = new ArrayList();\n final List articleComments = commentQueryService.getArticleComments(articleId, 1, Integer.MAX_VALUE);\n for (final JSONObject ac : articleComments) {\n final JSONObject comment = new JSONObject();\n final JSONObject author = userRepository.get(ac.optString(Comment.COMMENT_AUTHOR_ID));\n comment.put(\"id\", ac.optLong(\"oId\"));\n comment.put(\"body_html\", ac.optString(Comment.COMMENT_CONTENT));\n comment.put(\"depth\", 0);\n comment.put(\"user_display_name\", ac.optString(Comment.COMMENT_T_AUTHOR_NAME));\n comment.put(\"user_job\", author.optString(UserExt.USER_INTRO));\n comment.put(\"vote_count\", 0);\n comment.put(\"created_at\", formatDate(ac.get(Comment.COMMENT_CREATE_TIME)));\n comment.put(\"user_portrait_url\", ac.optString(Comment.COMMENT_T_ARTICLE_AUTHOR_THUMBNAIL_URL));\n commments.add(comment);\n }\n return commments;\n }\n\n /**\n * The demand format date.\n *\n * @param date the original date\n * @return the format date like \"2015-08-03T07:26:57Z\"\n */\n private String formatDate(final Object date) {\n return DateFormatUtils.format(((Date) date).getTime(), \"yyyy-MM-dd\")\n + \"T\" + DateFormatUtils.format(((Date) date).getTime(), \"HH:mm:ss\") + \"Z\";\n }\n\n /**\n * Organizes the specified articles.\n *\n *
    \n *
  • converts create/update/latest comment time (long) to date type
  • \n *
  • generates author thumbnail URL
  • \n *
  • generates author name
  • \n *
  • escapes article title &lt; and &gt;
  • \n *
  • generates article heat
  • \n *
  • generates article view count display format(1k+/1.5k+...)
  • \n *
  • generates time ago text
  • \n *
\n *\n * @param articles the specified articles\n * @throws RepositoryException repository exception\n */\n public void organizeArticles(final List articles) throws RepositoryException {\n for (final JSONObject article : articles) {\n organizeArticle(article);\n }\n }\n\n /**\n * Organizes the specified article.\n *\n *
    \n *
  • converts create/update/latest comment time (long) to date type
  • \n *
  • generates author thumbnail URL
  • \n *
  • generates author name
  • \n *
  • escapes article title &lt; and &gt;
  • \n *
  • generates article heat
  • \n *
  • generates article view count display format(1k+/1.5k+...)
  • \n *
  • generates time ago text
  • \n *
\n *\n * @param article the specified article\n * @throws RepositoryException repository exception\n */\n public void organizeArticle(final JSONObject article) throws RepositoryException {\n toArticleDate(article);\n genArticleAuthor(article);\n\n String title = article.optString(Article.ARTICLE_TITLE).replace(\"<\", \"&lt;\").replace(\">\", \"&gt;\");\n title = Markdowns.clean(title, \"\");\n article.put(Article.ARTICLE_TITLE, title);\n\n article.put(Article.ARTICLE_T_TITLE_EMOJI, Emotions.convert(title));\n\n if (Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_T_TITLE_EMOJI, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n }\n\n final String articleId = article.optString(Keys.OBJECT_ID);\n Integer viewingCnt = ArticleChannel.ARTICLE_VIEWS.get(articleId);\n if (null == viewingCnt) {\n viewingCnt = 0;\n }\n\n article.put(Article.ARTICLE_T_HEAT, viewingCnt);\n\n final int viewCnt = article.optInt(Article.ARTICLE_VIEW_CNT);\n final double views = (double) viewCnt / 1000;\n if (views >= 1) {\n final DecimalFormat df = new DecimalFormat(\"#.#\");\n article.put(Article.ARTICLE_T_VIEW_CNT_DISPLAY_FORMAT, df.format(views) + \"K\");\n }\n }\n\n /**\n * Converts the specified article create/update/latest comment time (long) to date type.\n *\n * @param article the specified article\n */\n private void toArticleDate(final JSONObject article) {\n article.put(Common.TIME_AGO, Times.getTimeAgo(article.optLong(Article.ARTICLE_CREATE_TIME), Latkes.getLocale()));\n\n article.put(Article.ARTICLE_CREATE_TIME, new Date(article.optLong(Article.ARTICLE_CREATE_TIME)));\n article.put(Article.ARTICLE_UPDATE_TIME, new Date(article.optLong(Article.ARTICLE_UPDATE_TIME)));\n article.put(Article.ARTICLE_LATEST_CMT_TIME, new Date(article.optLong(Article.ARTICLE_LATEST_CMT_TIME)));\n }\n\n /**\n * Generates the specified article author name and thumbnail URL.\n *\n * @param article the specified article\n * @throws RepositoryException repository exception\n */\n private void genArticleAuthor(final JSONObject article) throws RepositoryException {\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n\n if (Strings.isEmptyOrNull(authorId)) {\n return;\n }\n\n final JSONObject author = userRepository.get(authorId);\n\n article.put(Article.ARTICLE_T_AUTHOR_THUMBNAIL_URL, avatarQueryService.getAvatarURLByUser(author));\n article.put(Article.ARTICLE_T_AUTHOR, author);\n\n article.put(Article.ARTICLE_T_AUTHOR_NAME, author.optString(User.USER_NAME));\n }\n\n /**\n * Generates participants for the specified articles.\n *\n * @param articles the specified articles\n * @param participantsCnt the specified generate size\n * @throws ServiceException service exception\n */\n private void genParticipants(final List articles, final Integer participantsCnt) throws ServiceException {\n for (final JSONObject article : articles) {\n final String participantName = \"\";\n final String participantThumbnailURL = \"\";\n\n final List articleParticipants\n = getArticleLatestParticipants(article.optString(Keys.OBJECT_ID), participantsCnt);\n article.put(Article.ARTICLE_T_PARTICIPANTS, (Object) articleParticipants);\n\n article.put(Article.ARTICLE_T_PARTICIPANT_NAME, participantName);\n article.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL, participantThumbnailURL);\n }\n }\n\n /**\n * Gets the article participants (commenters) with the specified article article id and fetch size.\n *\n * @param articleId the specified article id\n * @param fetchSize the specified fetch size\n * @return article participants, for example,
\n     * [\n     *     {\n     *         \"oId\": \"\",\n     *         \"articleParticipantName\": \"\",\n     *         \"articleParticipantThumbnailURL\": \"\",\n     *         \"articleParticipantThumbnailUpdateTime\": long,\n     *         \"commentId\": \"\"\n     *     }, ....\n     * ]\n     * 
, returns an empty list if not found\n *\n * @throws ServiceException service exception\n */\n public List getArticleLatestParticipants(final String articleId, final int fetchSize) throws ServiceException {\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setFilter(new PropertyFilter(Comment.COMMENT_ON_ARTICLE_ID, FilterOperator.EQUAL, articleId))\n .addProjection(Comment.COMMENT_AUTHOR_EMAIL, String.class)\n .addProjection(Keys.OBJECT_ID, String.class)\n .addProjection(Comment.COMMENT_AUTHOR_ID, String.class)\n .setPageCount(1).setCurrentPageNum(1).setPageSize(fetchSize);\n final List ret = new ArrayList();\n\n try {\n final JSONObject result = commentRepository.get(query);\n\n final List comments = new ArrayList();\n final JSONArray records = result.optJSONArray(Keys.RESULTS);\n for (int i = 0; i < records.length(); i++) {\n final JSONObject comment = records.optJSONObject(i);\n\n boolean exist = false;\n // deduplicate\n for (final JSONObject c : comments) {\n if (comment.optString(Comment.COMMENT_AUTHOR_ID).equals(\n c.optString(Comment.COMMENT_AUTHOR_ID))) {\n exist = true;\n\n break;\n }\n }\n\n if (!exist) {\n comments.add(comment);\n }\n }\n\n for (final JSONObject comment : comments) {\n final String email = comment.optString(Comment.COMMENT_AUTHOR_EMAIL);\n final String userId = comment.optString(Comment.COMMENT_AUTHOR_ID);\n final JSONObject commenter = userRepository.get(userId);\n\n String thumbnailURL = Symphonys.get(\"defaultThumbnailURL\");\n if (!UserExt.DEFAULT_CMTER_EMAIL.equals(email)) {\n thumbnailURL = avatarQueryService.getAvatarURLByUser(commenter);\n }\n\n final JSONObject participant = new JSONObject();\n participant.put(Article.ARTICLE_T_PARTICIPANT_NAME, commenter.optString(User.USER_NAME));\n participant.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL, thumbnailURL);\n participant.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_UPDATE_TIME,\n commenter.optLong(UserExt.USER_UPDATE_TIME));\n participant.put(Article.ARTICLE_T_PARTICIPANT_URL, commenter.optString(User.USER_URL));\n participant.put(Keys.OBJECT_ID, commenter.optString(Keys.OBJECT_ID));\n participant.put(Comment.COMMENT_T_ID, comment.optString(Keys.OBJECT_ID));\n\n ret.add(participant);\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets article [\" + articleId + \"] participants failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Processes the specified article content.\n *\n *
    \n *
  • Generates &#64;username home URL
  • \n *
  • Markdowns
  • \n *
  • Generates secured article content
  • \n *
  • Blocks the article if need
  • \n *
  • Generates emotion images
  • \n *
  • Generates article link with article id
  • \n *
  • Generates article abstract (preview content)
  • \n *
\n *\n * @param article the specified article, for example,
\n     * {\n     *     \"articleTitle\": \"\",\n     *     ....,\n     *     \"author\": {}\n     * }\n     * 
\n *\n * @param request the specified request\n * @throws ServiceException service exception\n */\n public void processArticleContent(final JSONObject article, final HttpServletRequest request)\n throws ServiceException {\n final JSONObject author = article.optJSONObject(Article.ARTICLE_T_AUTHOR);\n if (null != author && UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)\n || Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n article.put(Article.ARTICLE_T_PREVIEW_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n\n article.put(Article.ARTICLE_REWARD_CONTENT, \"\");\n article.put(Article.ARTICLE_REWARD_POINT, 0);\n\n return;\n }\n\n String previewContent = getPreviewContent(article, request);\n previewContent = Jsoup.parse(previewContent).text();\n previewContent = previewContent.replaceAll(\"\\\"\", \"'\");\n article.put(Article.ARTICLE_T_PREVIEW_CONTENT, previewContent);\n\n String articleContent = article.optString(Article.ARTICLE_CONTENT);\n article.put(Common.DISCUSSION_VIEWABLE, true);\n\n final Set userNames = userQueryService.getUserNames(articleContent);\n final JSONObject currentUser = userQueryService.getCurrentUser(request);\n final String currentUserName = null == currentUser ? \"\" : currentUser.optString(User.USER_NAME);\n final String currentRole = null == currentUser ? \"\" : currentUser.optString(User.USER_ROLE);\n final String authorName = article.optString(Article.ARTICLE_T_AUTHOR_NAME);\n if (Article.ARTICLE_TYPE_C_DISCUSSION == article.optInt(Article.ARTICLE_TYPE)\n && !authorName.equals(currentUserName) && !Role.ADMIN_ROLE.equals(currentRole)) {\n boolean invited = false;\n for (final String userName : userNames) {\n if (userName.equals(currentUserName)) {\n invited = true;\n\n break;\n }\n }\n\n if (!invited) {\n String blockContent = langPropsService.get(\"articleDiscussionLabel\");\n blockContent = blockContent.replace(\"{user}\", \"\" + authorName + \"\");\n\n article.put(Article.ARTICLE_CONTENT, blockContent);\n article.put(Common.DISCUSSION_VIEWABLE, false);\n\n article.put(Article.ARTICLE_REWARD_CONTENT, \"\");\n article.put(Article.ARTICLE_REWARD_POINT, 0);\n\n return;\n }\n }\n\n for (final String userName : userNames) {\n articleContent = articleContent.replace('@' + userName, \"@\" + userName + \"\");\n }\n\n articleContent = shortLinkQueryService.linkArticle(articleContent);\n articleContent = shortLinkQueryService.linkTag(articleContent);\n\n articleContent = Emotions.convert(articleContent);\n article.put(Article.ARTICLE_CONTENT, articleContent);\n\n if (article.optInt(Article.ARTICLE_REWARD_POINT) > 0) {\n String articleRewardContent = article.optString(Article.ARTICLE_REWARD_CONTENT);\n\n final Set rewordContentUserNames = userQueryService.getUserNames(articleRewardContent);\n\n for (final String userName : rewordContentUserNames) {\n articleRewardContent = articleRewardContent.replace('@' + userName, \"@\" + userName + \"\");\n }\n\n articleRewardContent = Emotions.convert(articleRewardContent);\n article.put(Article.ARTICLE_REWARD_CONTENT, articleRewardContent);\n }\n\n markdown(article);\n }\n\n /**\n * Gets articles by the specified request json object.\n *\n * @param requestJSONObject the specified request json object, for example,
\n     * {\n     *     \"oId\": \"\", // optional\n     *     \"paginationCurrentPageNum\": 1,\n     *     \"paginationPageSize\": 20,\n     *     \"paginationWindowSize\": 10\n     * }, see {@link Pagination} for more details\n     * 
\n *\n * @param articleFields the specified article fields to return\n *\n * @return for example,
\n     * {\n     *     \"pagination\": {\n     *         \"paginationPageCount\": 100,\n     *         \"paginationPageNums\": [1, 2, 3, 4, 5]\n     *     },\n     *     \"articles\": [{\n     *         \"oId\": \"\",\n     *         \"articleTitle\": \"\",\n     *         \"articleContent\": \"\",\n     *         ....\n     *      }, ....]\n     * }\n     * 
\n *\n * @throws ServiceException service exception\n * @see Pagination\n */\n public JSONObject getArticles(final JSONObject requestJSONObject, final Map> articleFields) throws ServiceException {\n final JSONObject ret = new JSONObject();\n\n final int currentPageNum = requestJSONObject.optInt(Pagination.PAGINATION_CURRENT_PAGE_NUM);\n final int pageSize = requestJSONObject.optInt(Pagination.PAGINATION_PAGE_SIZE);\n final int windowSize = requestJSONObject.optInt(Pagination.PAGINATION_WINDOW_SIZE);\n final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n addSort(Article.ARTICLE_UPDATE_TIME, SortDirection.DESCENDING);\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n if (requestJSONObject.has(Keys.OBJECT_ID)) {\n query.setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.EQUAL, requestJSONObject.optString(Keys.OBJECT_ID)));\n }\n\n JSONObject result = null;\n\n try {\n result = articleRepository.get(query);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n final List pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);\n\n final JSONArray data = result.optJSONArray(Keys.RESULTS);\n final List articles = CollectionUtils.jsonArrayToList(data);\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n ret.put(Article.ARTICLES, articles);\n\n return ret;\n }\n\n /**\n * Markdowns the specified article content.\n *\n *
    \n *
  • Markdowns article content/reward content
  • \n *
  • Generates secured article content/reward content
  • \n *
\n *\n * @param article the specified article content\n */\n private void markdown(final JSONObject article) {\n String content = article.optString(Article.ARTICLE_CONTENT);\n\n final int articleType = article.optInt(Article.ARTICLE_TYPE);\n if (Article.ARTICLE_TYPE_C_THOUGHT != articleType) {\n content = Markdowns.toHTML(content);\n content = Markdowns.clean(content, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n } else {\n final Document.OutputSettings outputSettings = new Document.OutputSettings();\n outputSettings.prettyPrint(false);\n\n content = Jsoup.clean(content, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK),\n Whitelist.relaxed().addAttributes(\":all\", \"id\", \"target\", \"class\").\n addTags(\"span\", \"hr\").addAttributes(\"iframe\", \"src\", \"width\", \"height\")\n .addAttributes(\"audio\", \"controls\", \"src\"), outputSettings);\n\n content = content.replace(\"\\n\", \"\\\\n\").replace(\"'\", \"\\\\'\")\n .replace(\"\\\"\", \"\\\\\\\"\");\n }\n\n article.put(Article.ARTICLE_CONTENT, content);\n\n if (article.optInt(Article.ARTICLE_REWARD_POINT) > 0) {\n String rewardContent = article.optString(Article.ARTICLE_REWARD_CONTENT);\n rewardContent = Markdowns.toHTML(rewardContent);\n rewardContent = Markdowns.clean(rewardContent,\n Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n article.put(Article.ARTICLE_REWARD_CONTENT, rewardContent);\n }\n }\n}\n"},"new_file":{"kind":"string","value":"src/main/java/org/b3log/symphony/service/ArticleQueryService.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) 2012-2016, b3log.org & hacpai.com\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.b3log.symphony.service;\n\nimport java.text.DecimalFormat;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport javax.inject.Inject;\nimport javax.servlet.http.HttpServletRequest;\nimport org.apache.commons.lang.StringUtils;\nimport org.apache.commons.lang.time.DateFormatUtils;\nimport org.apache.commons.lang.time.DateUtils;\nimport org.b3log.latke.Keys;\nimport org.b3log.latke.Latkes;\nimport org.b3log.latke.logging.Level;\nimport org.b3log.latke.logging.Logger;\nimport org.b3log.latke.model.Pagination;\nimport org.b3log.latke.model.Role;\nimport org.b3log.latke.model.User;\nimport org.b3log.latke.repository.CompositeFilter;\nimport org.b3log.latke.repository.CompositeFilterOperator;\nimport org.b3log.latke.repository.Filter;\nimport org.b3log.latke.repository.FilterOperator;\nimport org.b3log.latke.repository.PropertyFilter;\nimport org.b3log.latke.repository.Query;\nimport org.b3log.latke.repository.RepositoryException;\nimport org.b3log.latke.repository.SortDirection;\nimport org.b3log.latke.service.LangPropsService;\nimport org.b3log.latke.service.ServiceException;\nimport org.b3log.latke.service.annotation.Service;\nimport org.b3log.latke.util.CollectionUtils;\nimport org.b3log.latke.util.Paginator;\nimport org.b3log.latke.util.Strings;\nimport org.b3log.symphony.model.Article;\nimport org.b3log.symphony.model.Comment;\nimport org.b3log.symphony.model.Common;\nimport org.b3log.symphony.model.Tag;\nimport org.b3log.symphony.model.UserExt;\nimport org.b3log.symphony.processor.channel.ArticleChannel;\nimport org.b3log.symphony.repository.ArticleRepository;\nimport org.b3log.symphony.repository.CommentRepository;\nimport org.b3log.symphony.repository.DomainTagRepository;\nimport org.b3log.symphony.repository.TagArticleRepository;\nimport org.b3log.symphony.repository.TagRepository;\nimport org.b3log.symphony.repository.UserRepository;\nimport org.b3log.symphony.util.Emotions;\nimport org.b3log.symphony.util.Markdowns;\nimport org.b3log.symphony.util.Symphonys;\nimport org.b3log.symphony.util.Times;\nimport org.json.JSONArray;\nimport org.json.JSONException;\nimport org.json.JSONObject;\nimport org.jsoup.Jsoup;\nimport org.jsoup.nodes.Document;\nimport org.jsoup.safety.Whitelist;\n\n/**\n * Article query service.\n *\n * @author Liang Ding\n * @version 1.12.10.17, Mar 15, 2016\n * @since 0.2.0\n */\n@Service\npublic class ArticleQueryService {\n\n /**\n * Logger.\n */\n private static final Logger LOGGER = Logger.getLogger(ArticleQueryService.class.getName());\n\n /**\n * Article repository.\n */\n @Inject\n private ArticleRepository articleRepository;\n\n /**\n * Comment repository.\n */\n @Inject\n private CommentRepository commentRepository;\n\n /**\n * Tag-Article repository.\n */\n @Inject\n private TagArticleRepository tagArticleRepository;\n\n /**\n * Tag repository.\n */\n @Inject\n private TagRepository tagRepository;\n\n /**\n * User repository.\n */\n @Inject\n private UserRepository userRepository;\n\n /**\n * Domain tag repository.\n */\n @Inject\n private DomainTagRepository domainTagRepository;\n\n /**\n * Comment query service.\n */\n @Inject\n private CommentQueryService commentQueryService;\n\n /**\n * User query service.\n */\n @Inject\n private UserQueryService userQueryService;\n\n /**\n * Avatar query service.\n */\n @Inject\n private AvatarQueryService avatarQueryService;\n\n /**\n * Short link query service.\n */\n @Inject\n private ShortLinkQueryService shortLinkQueryService;\n\n /**\n * Language service.\n */\n @Inject\n private LangPropsService langPropsService;\n\n /**\n * Count to fetch article tags for relevant articles.\n */\n private static final int RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT = 3;\n\n /**\n * Gets domain articles.\n *\n * @param domainId the specified domain id\n * @param currentPageNum the specified current page number\n * @param pageSize the specified page size\n * @return result\n * @throws ServiceException service exception\n */\n public JSONObject getDomainArticles(final String domainId, final int currentPageNum, final int pageSize)\n throws ServiceException {\n final JSONObject ret = new JSONObject();\n ret.put(Article.ARTICLES, (Object) Collections.emptyList());\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, 0);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) Collections.emptyList());\n\n try {\n final JSONArray domainTags = domainTagRepository.getByDomainId(domainId, 1, Integer.MAX_VALUE)\n .optJSONArray(Keys.RESULTS);\n\n if (domainTags.length() <= 0) {\n return ret;\n }\n\n final List tagIds = new ArrayList();\n for (int i = 0; i < domainTags.length(); i++) {\n tagIds.add(domainTags.optJSONObject(i).optString(Tag.TAG + \"_\" + Keys.OBJECT_ID));\n }\n\n Query query = new Query().setFilter(\n new PropertyFilter(Tag.TAG + \"_\" + Keys.OBJECT_ID, FilterOperator.IN, tagIds)).\n setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticles = result.optJSONArray(Keys.RESULTS);\n if (tagArticles.length() <= 0) {\n return ret;\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final int windowSize = Symphonys.getInt(\"latestArticlesWindowSize\");\n\n final List pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) pageNums);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticles.length(); i++) {\n articleIds.add(tagArticles.optJSONObject(i).optString(Article.ARTICLE + \"_\" + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(CompositeFilterOperator.and(\n new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds),\n new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID))).\n setPageCount(1).addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n\n final List articles\n = CollectionUtils.jsonArrayToList(articleRepository.get(query).optJSONArray(Keys.RESULTS));\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"latestArticleParticipantsCnt\");\n genParticipants(articles, participantsCnt);\n\n ret.put(Article.ARTICLES, (Object) articles);\n\n return ret;\n } catch (final Exception e) {\n LOGGER.log(Level.ERROR, \"Gets domain articles error\", e);\n\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the relevant articles of the specified article with the specified fetch size.\n *\n *

\n * The relevant articles exist the same tag with the specified article.\n *

\n *\n * @param article the specified article\n * @param fetchSize the specified fetch size\n * @return relevant articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRelevantArticles(final JSONObject article, final int fetchSize) throws ServiceException {\n final String tagsString = article.optString(Article.ARTICLE_TAGS);\n final String[] tagTitles = tagsString.split(\",\");\n final int tagTitlesLength = tagTitles.length;\n final int subCnt = tagTitlesLength > RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT\n ? RELEVANT_ARTICLE_RANDOM_FETCH_TAG_CNT : tagTitlesLength;\n\n final List tagIdx = CollectionUtils.getRandomIntegers(0, tagTitlesLength, subCnt);\n final int subFetchSize = fetchSize / subCnt;\n final Set fetchedArticleIds = new HashSet();\n\n final List ret = new ArrayList();\n try {\n for (int i = 0; i < tagIdx.size(); i++) {\n final String tagTitle = tagTitles[tagIdx.get(i)].trim();\n\n final JSONObject tag = tagRepository.getByTitle(tagTitle);\n final String tagId = tag.optString(Keys.OBJECT_ID);\n JSONObject result = tagArticleRepository.getByTagId(tagId, 1, subFetchSize);\n\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int j = 0; j < tagArticleRelations.length(); j++) {\n final String articleId = tagArticleRelations.optJSONObject(j).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID);\n\n if (fetchedArticleIds.contains(articleId)) {\n continue;\n }\n\n articleIds.add(articleId);\n fetchedArticleIds.add(articleId);\n }\n\n articleIds.remove(article.optString(Keys.OBJECT_ID));\n\n final Query query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds));\n result = articleRepository.get(query);\n\n ret.addAll(CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS)));\n }\n\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets relevant articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets broadcasts (articles permalink equals to \"aBroadcast\").\n *\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getBroadcasts(final int currentPageNum, final int pageSize) throws ServiceException {\n try {\n final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize).setFilter(\n new PropertyFilter(Article.ARTICLE_CLIENT_ARTICLE_ID, FilterOperator.EQUAL, \"aBroadcast\")).\n addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING);\n\n final JSONObject result = articleRepository.get(query);\n final JSONArray articles = result.optJSONArray(Keys.RESULTS);\n\n if (0 == articles.length()) {\n return Collections.emptyList();\n }\n\n final List ret = CollectionUtils.jsonArrayToList(articles);\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n article.remove(Article.ARTICLE_CONTENT);\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets broadcasts [currentPageNum=\" + currentPageNum + \", pageSize=\" + pageSize + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets interest articles.\n *\n * @param currentPageNum the specified current page number\n * @param pageSize the specified fetch size\n * @param tagTitles the specified tag titles\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getInterests(final int currentPageNum, final int pageSize, final String... tagTitles)\n throws ServiceException {\n try {\n final List tagList = new ArrayList();\n for (int i = 0; i < tagTitles.length; i++) {\n final String tagTitle = tagTitles[i];\n final JSONObject tag = tagRepository.getByTitle(tagTitle);\n if (null == tag) {\n continue;\n }\n\n tagList.add(tag);\n }\n\n final Map> articleFields = new HashMap>();\n articleFields.put(Article.ARTICLE_TITLE, String.class);\n articleFields.put(Article.ARTICLE_PERMALINK, String.class);\n articleFields.put(Article.ARTICLE_CREATE_TIME, Long.class);\n\n final List ret = new ArrayList();\n\n if (!tagList.isEmpty()) {\n final List tagArticles\n = getArticlesByTags(currentPageNum, pageSize, articleFields, tagList.toArray(new JSONObject[0]));\n for (final JSONObject article : tagArticles) {\n article.remove(Article.ARTICLE_T_PARTICIPANTS);\n article.remove(Article.ARTICLE_T_PARTICIPANT_NAME);\n article.remove(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL);\n article.remove(Article.ARTICLE_LATEST_CMT_TIME);\n article.remove(Article.ARTICLE_UPDATE_TIME);\n article.remove(Article.ARTICLE_T_HEAT);\n article.remove(Article.ARTICLE_T_TITLE_EMOJI);\n article.remove(Common.TIME_AGO);\n\n article.put(Article.ARTICLE_CREATE_TIME, ((Date) article.get(Article.ARTICLE_CREATE_TIME)).getTime());\n }\n\n ret.addAll(tagArticles);\n }\n\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setPageCount(currentPageNum).setPageSize(pageSize).setCurrentPageNum(1);\n query.setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n final JSONObject result = articleRepository.get(query);\n\n final List recentArticles = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n ret.addAll(recentArticles);\n\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n }\n\n return ret;\n } catch (final Exception e) {\n LOGGER.log(Level.ERROR, \"Gets interests failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets news (articles tags contains \"B3log Announcement\").\n *\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getNews(final int currentPageNum, final int pageSize) throws ServiceException {\n\n try {\n JSONObject oldAnnouncementTag = tagRepository.getByTitle(\"B3log Announcement\");\n JSONObject currentAnnouncementTag = tagRepository.getByTitle(\"B3log公告\");\n if (null == oldAnnouncementTag && null == currentAnnouncementTag) {\n return Collections.emptyList();\n }\n\n if (null == oldAnnouncementTag) {\n oldAnnouncementTag = new JSONObject();\n }\n\n if (null == currentAnnouncementTag) {\n currentAnnouncementTag = new JSONObject();\n }\n\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(CompositeFilterOperator.or(\n new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL,\n oldAnnouncementTag.optString(Keys.OBJECT_ID)),\n new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL,\n currentAnnouncementTag.optString(Keys.OBJECT_ID))\n ))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n final JSONObject sa = userQueryService.getSA();\n\n final List subFilters = new ArrayList();\n subFilters.add(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds));\n subFilters.add(new PropertyFilter(Article.ARTICLE_AUTHOR_EMAIL, FilterOperator.EQUAL, sa.optString(User.USER_EMAIL)));\n query = new Query().setFilter(new CompositeFilter(CompositeFilterOperator.AND, subFilters))\n .addProjection(Article.ARTICLE_TITLE, String.class).addProjection(Article.ARTICLE_PERMALINK, String.class)\n .addProjection(Article.ARTICLE_CREATE_TIME, Long.class).addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING);\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n for (final JSONObject article : ret) {\n article.put(Article.ARTICLE_PERMALINK, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets news failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified tags (order by article create date desc).\n *\n * @param tags the specified tags\n * @param currentPageNum the specified page number\n * @param articleFields the specified article fields to return\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByTags(final int currentPageNum, final int pageSize,\n final Map> articleFields, final JSONObject... tags) throws ServiceException {\n try {\n final List filters = new ArrayList();\n for (final JSONObject tag : tags) {\n filters.add(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tag.optString(Keys.OBJECT_ID)));\n }\n\n Filter filter;\n if (filters.size() >= 2) {\n filter = new CompositeFilter(CompositeFilterOperator.OR, filters);\n } else {\n filter = filters.get(0);\n }\n\n // XXX: 这里的分页是有问题的,后面取文章的时候会少(因为一篇文章可以有多个标签,但是文章 id 一样)\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(filter).setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds)).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"tagArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by tags [tagLength=\" + tags.length + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified city (order by article create date desc).\n *\n * @param city the specified city\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByCity(final String city, final int currentPageNum, final int pageSize)\n throws ServiceException {\n try {\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(new PropertyFilter(Article.ARTICLE_CITY, FilterOperator.EQUAL, city))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n final JSONObject result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"cityArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by city [\" + city + \"] failed\", e);\n\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets articles by the specified tag (order by article create date desc).\n *\n * @param tag the specified tag\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getArticlesByTag(final JSONObject tag, final int currentPageNum, final int pageSize)\n throws ServiceException {\n try {\n Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING).\n setFilter(new PropertyFilter(Tag.TAG + '_' + Keys.OBJECT_ID, FilterOperator.EQUAL, tag.optString(Keys.OBJECT_ID)))\n .setPageCount(1).setPageSize(pageSize).setCurrentPageNum(currentPageNum);\n\n JSONObject result = tagArticleRepository.get(query);\n final JSONArray tagArticleRelations = result.optJSONArray(Keys.RESULTS);\n\n final Set articleIds = new HashSet();\n for (int i = 0; i < tagArticleRelations.length(); i++) {\n articleIds.add(tagArticleRelations.optJSONObject(i).optString(Article.ARTICLE + '_' + Keys.OBJECT_ID));\n }\n\n query = new Query().setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.IN, articleIds)).\n addSort(Keys.OBJECT_ID, SortDirection.DESCENDING);\n result = articleRepository.get(query);\n\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n final Integer participantsCnt = Symphonys.getInt(\"tagArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles by tag [tagTitle=\" + tag.optString(Tag.TAG_TITLE) + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article by the specified client article id.\n *\n * @param authorId the specified author id\n * @param clientArticleId the specified client article id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticleByClientArticleId(final String authorId, final String clientArticleId) throws ServiceException {\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_CLIENT_ARTICLE_ID, FilterOperator.EQUAL, clientArticleId));\n filters.add(new PropertyFilter(Article.ARTICLE_AUTHOR_ID, FilterOperator.EQUAL, authorId));\n\n final Query query = new Query().setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n try {\n final JSONObject result = articleRepository.get(query);\n final JSONArray array = result.optJSONArray(Keys.RESULTS);\n\n if (0 == array.length()) {\n return null;\n }\n\n return array.optJSONObject(0);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets article [clientArticleId=\" + clientArticleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article with {@link #organizeArticle(org.json.JSONObject)} by the specified id.\n *\n * @param articleId the specified id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticleById(final String articleId) throws ServiceException {\n try {\n final JSONObject ret = articleRepository.get(articleId);\n\n if (null == ret) {\n return null;\n }\n\n organizeArticle(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets an article [articleId=\" + articleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets an article by the specified id.\n *\n * @param articleId the specified id\n * @return article, return {@code null} if not found\n * @throws ServiceException service exception\n */\n public JSONObject getArticle(final String articleId) throws ServiceException {\n try {\n final JSONObject ret = articleRepository.get(articleId);\n\n if (null == ret) {\n return null;\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets an article [articleId=\" + articleId + \"] failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets preview content of the article specified with the given article id.\n *\n * @param articleId the given article id\n * @param request the specified request\n * @return preview content\n * @throws ServiceException service exception\n */\n public String getArticlePreviewContent(final String articleId, final HttpServletRequest request) throws ServiceException {\n final JSONObject article = getArticle(articleId);\n if (null == article) {\n return null;\n }\n\n return getPreviewContent(article, request);\n }\n\n private String getPreviewContent(final JSONObject article, final HttpServletRequest request) throws ServiceException {\n final int length = Integer.valueOf(\"150\");\n String ret = article.optString(Article.ARTICLE_CONTENT);\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userQueryService.getUser(authorId);\n\n if (null != author && UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)\n || Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n return langPropsService.get(\"articleContentBlockLabel\");\n }\n\n final Set userNames = userQueryService.getUserNames(ret);\n final JSONObject currentUser = userQueryService.getCurrentUser(request);\n final String currentUserName = null == currentUser ? \"\" : currentUser.optString(User.USER_NAME);\n final String authorName = author.optString(User.USER_NAME);\n if (Article.ARTICLE_TYPE_C_DISCUSSION == article.optInt(Article.ARTICLE_TYPE)\n && !authorName.equals(currentUserName)) {\n boolean invited = false;\n for (final String userName : userNames) {\n if (userName.equals(currentUserName)) {\n invited = true;\n\n break;\n }\n }\n\n if (!invited) {\n String blockContent = langPropsService.get(\"articleDiscussionLabel\");\n blockContent = blockContent.replace(\"{user}\", \"\" + authorName + \"\");\n\n return blockContent;\n }\n }\n\n ret = Emotions.convert(ret);\n ret = Markdowns.toHTML(ret);\n\n ret = Jsoup.clean(ret, Whitelist.none());\n if (ret.length() >= length) {\n ret = StringUtils.substring(ret, 0, length)\n + \" ....\";\n }\n\n return ret;\n }\n\n /**\n * Gets the user articles with the specified user id, page number and page size.\n *\n * @param userId the specified user id\n * @param currentPageNum the specified page number\n * @param pageSize the specified page size\n * @return user articles, return an empty list if not found\n * @throws ServiceException service exception\n */\n public List getUserArticles(final String userId, final int currentPageNum, final int pageSize) throws ServiceException {\n final Query query = new Query().addSort(Article.ARTICLE_CREATE_TIME, SortDirection.DESCENDING)\n .setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n setFilter(CompositeFilterOperator.and(\n new PropertyFilter(Article.ARTICLE_AUTHOR_ID, FilterOperator.EQUAL, userId),\n new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID)));\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets user articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets hot articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getHotArticles(final int fetchSize) throws ServiceException {\n final String id = String.valueOf(DateUtils.addDays(new Date(), -15).getTime());\n\n try {\n final Query query = new Query().addSort(Article.ARTICLE_COMMENT_CNT, SortDirection.DESCENDING).\n addSort(Keys.OBJECT_ID, SortDirection.ASCENDING).setCurrentPageNum(1).setPageSize(fetchSize);\n\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.GREATER_THAN_OR_EQUAL, id));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n\n query.setFilter(new CompositeFilter(CompositeFilterOperator.AND, filters));\n\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets hot articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the random articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return random articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRandomArticles(final int fetchSize) throws ServiceException {\n try {\n final List ret = articleRepository.getRandomly(fetchSize);\n organizeArticles(ret);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets random articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Makes article showing filters.\n *\n * @return filter the article showing to user\n */\n private CompositeFilter makeArticleShowingFilter() {\n final List filters = new ArrayList();\n filters.add(new PropertyFilter(Article.ARTICLE_STATUS, FilterOperator.EQUAL, Article.ARTICLE_STATUS_C_VALID));\n filters.add(new PropertyFilter(Article.ARTICLE_TYPE, FilterOperator.NOT_EQUAL, Article.ARTICLE_TYPE_C_DISCUSSION));\n return new CompositeFilter(CompositeFilterOperator.AND, filters);\n }\n\n /**\n * Makes the recent (sort by create time) articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles query\n */\n private Query makeRecentQuery(final int currentPageNum, final int fetchSize) {\n final Query query = new Query()\n .addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setPageSize(fetchSize).setCurrentPageNum(currentPageNum);\n query.setFilter(makeArticleShowingFilter());\n return query;\n }\n\n /**\n * Makes the top articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return top articles query\n */\n private Query makeTopQuery(final int currentPageNum, final int fetchSize) {\n final Query query = new Query()\n .addSort(Article.REDDIT_SCORE, SortDirection.DESCENDING)\n .addSort(Article.ARTICLE_LATEST_CMT_TIME, SortDirection.DESCENDING)\n .setPageCount(1).setPageSize(fetchSize).setCurrentPageNum(currentPageNum);\n\n query.setFilter(makeArticleShowingFilter());\n return query;\n }\n\n /**\n * Gets the recent (sort by create time) articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return for example,
\n     * {\n     *     \"pagination\": {\n     *         \"paginationPageCount\": 100,\n     *         \"paginationPageNums\": [1, 2, 3, 4, 5]\n     *     },\n     *     \"articles\": [{\n     *         \"oId\": \"\",\n     *         \"articleTitle\": \"\",\n     *         \"articleContent\": \"\",\n     *         ....\n     *      }, ....]\n     * }\n     * 
\n *\n * @throws ServiceException service exception\n */\n public JSONObject getRecentArticles(final int currentPageNum, final int fetchSize) throws ServiceException {\n final JSONObject ret = new JSONObject();\n\n final Query query = makeRecentQuery(currentPageNum, fetchSize);\n JSONObject result = null;\n\n try {\n result = articleRepository.get(query);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n\n final int windowSize = Symphonys.getInt(\"latestArticlesWindowSize\");\n\n final List pageNums = Paginator.paginate(currentPageNum, fetchSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, (Object) pageNums);\n\n final JSONArray data = result.optJSONArray(Keys.RESULTS);\n final List articles = CollectionUtils.jsonArrayToList(data);\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"latestArticleParticipantsCnt\");\n genParticipants(articles, participantsCnt);\n\n ret.put(Article.ARTICLES, (Object) articles);\n\n return ret;\n }\n\n /**\n * Gets the index articles with the specified fetch size.\n *\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getIndexArticles(final int fetchSize) throws ServiceException {\n final Query query = makeTopQuery(1, fetchSize);\n\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n\n organizeArticles(ret);\n\n for (final JSONObject article : ret) {\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userRepository.get(authorId);\n if (UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n }\n }\n\n final Integer participantsCnt = Symphonys.getInt(\"indexArticleParticipantsCnt\");\n genParticipants(ret, participantsCnt);\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Gets the recent articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getRecentArticlesWithComments(final int currentPageNum, final int fetchSize) throws ServiceException {\n return getArticles(makeRecentQuery(currentPageNum, fetchSize));\n }\n\n /**\n * Gets the index articles with the specified fetch size.\n *\n * @param currentPageNum the specified current page number\n * @param fetchSize the specified fetch size\n * @return recent articles, returns an empty list if not found\n * @throws ServiceException service exception\n */\n public List getTopArticlesWithComments(final int currentPageNum, final int fetchSize) throws ServiceException {\n return getArticles(makeTopQuery(currentPageNum, fetchSize));\n }\n\n /**\n * The specific articles.\n *\n * @param query conditions\n * @return articles\n * @throws ServiceException service exception\n */\n private List getArticles(final Query query) throws ServiceException {\n try {\n final JSONObject result = articleRepository.get(query);\n final List ret = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n organizeArticles(ret);\n final List stories = new ArrayList();\n\n for (final JSONObject article : ret) {\n final JSONObject story = new JSONObject();\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n final JSONObject author = userRepository.get(authorId);\n if (UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)) {\n story.put(\"title\", langPropsService.get(\"articleTitleBlockLabel\"));\n } else {\n story.put(\"title\", article.optString(Article.ARTICLE_TITLE));\n }\n story.put(\"id\", article.optLong(\"oId\"));\n story.put(\"url\", Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n story.put(\"user_display_name\", article.optString(Article.ARTICLE_T_AUTHOR_NAME));\n story.put(\"user_job\", author.optString(UserExt.USER_INTRO));\n story.put(\"comment_html\", article.optString(Article.ARTICLE_CONTENT));\n story.put(\"comment_count\", article.optInt(Article.ARTICLE_COMMENT_CNT));\n story.put(\"vote_count\", article.optInt(Article.ARTICLE_GOOD_CNT));\n story.put(\"created_at\", formatDate(article.get(Article.ARTICLE_CREATE_TIME)));\n story.put(\"user_portrait_url\", article.optString(Article.ARTICLE_T_AUTHOR_THUMBNAIL_URL));\n story.put(\"comments\", getAllComments(article.optString(\"oId\")));\n final String tagsString = article.optString(Article.ARTICLE_TAGS);\n String[] tags = null;\n if (!Strings.isEmptyOrNull(tagsString)) {\n tags = tagsString.split(\",\");\n }\n story.put(\"badge\", tags == null ? \"\" : tags[0]);\n stories.add(story);\n }\n final Integer participantsCnt = Symphonys.getInt(\"indexArticleParticipantsCnt\");\n genParticipants(stories, participantsCnt);\n return stories;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", e);\n throw new ServiceException(e);\n } catch (final JSONException ex) {\n LOGGER.log(Level.ERROR, \"Gets index articles failed\", ex);\n throw new ServiceException(ex);\n }\n }\n\n /**\n * Gets the article comments with the specified article id.\n *\n * @param articleId the specified article id\n * @return comments, return an empty list if not found\n * @throws ServiceException service exception\n * @throws JSONException json exception\n * @throws RepositoryException repository exception\n */\n private List getAllComments(final String articleId) throws ServiceException, JSONException, RepositoryException {\n final List commments = new ArrayList();\n final List articleComments = commentQueryService.getArticleComments(articleId, 1, Integer.MAX_VALUE);\n for (final JSONObject ac : articleComments) {\n final JSONObject comment = new JSONObject();\n final JSONObject author = userRepository.get(ac.optString(Comment.COMMENT_AUTHOR_ID));\n comment.put(\"id\", ac.optLong(\"oId\"));\n comment.put(\"body_html\", ac.optString(Comment.COMMENT_CONTENT));\n comment.put(\"depth\", 0);\n comment.put(\"user_display_name\", ac.optString(Comment.COMMENT_T_AUTHOR_NAME));\n comment.put(\"user_job\", author.optString(UserExt.USER_INTRO));\n comment.put(\"vote_count\", 0);\n comment.put(\"created_at\", formatDate(ac.get(Comment.COMMENT_CREATE_TIME)));\n comment.put(\"user_portrait_url\", ac.optString(Comment.COMMENT_T_ARTICLE_AUTHOR_THUMBNAIL_URL));\n commments.add(comment);\n }\n return commments;\n }\n\n /**\n * The demand format date.\n *\n * @param date the original date\n * @return the format date like \"2015-08-03T07:26:57Z\"\n */\n private String formatDate(final Object date) {\n return DateFormatUtils.format(((Date) date).getTime(), \"yyyy-MM-dd\")\n + \"T\" + DateFormatUtils.format(((Date) date).getTime(), \"HH:mm:ss\") + \"Z\";\n }\n\n /**\n * Organizes the specified articles.\n *\n *
    \n *
  • converts create/update/latest comment time (long) to date type
  • \n *
  • generates author thumbnail URL
  • \n *
  • generates author name
  • \n *
  • escapes article title &lt; and &gt;
  • \n *
  • generates article heat
  • \n *
  • generates article view count display format(1k+/1.5k+...)
  • \n *
  • generates time ago text
  • \n *
\n *\n * @param articles the specified articles\n * @throws RepositoryException repository exception\n */\n public void organizeArticles(final List articles) throws RepositoryException {\n for (final JSONObject article : articles) {\n organizeArticle(article);\n }\n }\n\n /**\n * Organizes the specified article.\n *\n *
    \n *
  • converts create/update/latest comment time (long) to date type
  • \n *
  • generates author thumbnail URL
  • \n *
  • generates author name
  • \n *
  • escapes article title &lt; and &gt;
  • \n *
  • generates article heat
  • \n *
  • generates article view count display format(1k+/1.5k+...)
  • \n *
  • generates time ago text
  • \n *
\n *\n * @param article the specified article\n * @throws RepositoryException repository exception\n */\n public void organizeArticle(final JSONObject article) throws RepositoryException {\n toArticleDate(article);\n genArticleAuthor(article);\n\n String title = article.optString(Article.ARTICLE_TITLE).replace(\"<\", \"&lt;\").replace(\">\", \"&gt;\");\n title = Markdowns.clean(title, \"\");\n article.put(Article.ARTICLE_TITLE, title);\n\n article.put(Article.ARTICLE_T_TITLE_EMOJI, Emotions.convert(title));\n\n if (Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_T_TITLE_EMOJI, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n }\n\n final String articleId = article.optString(Keys.OBJECT_ID);\n Integer viewingCnt = ArticleChannel.ARTICLE_VIEWS.get(articleId);\n if (null == viewingCnt) {\n viewingCnt = 0;\n }\n\n article.put(Article.ARTICLE_T_HEAT, viewingCnt);\n\n final int viewCnt = article.optInt(Article.ARTICLE_VIEW_CNT);\n final double views = (double) viewCnt / 1000;\n if (views >= 1) {\n final DecimalFormat df = new DecimalFormat(\"#.#\");\n article.put(Article.ARTICLE_T_VIEW_CNT_DISPLAY_FORMAT, df.format(views) + \"K\");\n }\n }\n\n /**\n * Converts the specified article create/update/latest comment time (long) to date type.\n *\n * @param article the specified article\n */\n private void toArticleDate(final JSONObject article) {\n article.put(Common.TIME_AGO, Times.getTimeAgo(article.optLong(Article.ARTICLE_CREATE_TIME), Latkes.getLocale()));\n\n article.put(Article.ARTICLE_CREATE_TIME, new Date(article.optLong(Article.ARTICLE_CREATE_TIME)));\n article.put(Article.ARTICLE_UPDATE_TIME, new Date(article.optLong(Article.ARTICLE_UPDATE_TIME)));\n article.put(Article.ARTICLE_LATEST_CMT_TIME, new Date(article.optLong(Article.ARTICLE_LATEST_CMT_TIME)));\n }\n\n /**\n * Generates the specified article author name and thumbnail URL.\n *\n * @param article the specified article\n * @throws RepositoryException repository exception\n */\n private void genArticleAuthor(final JSONObject article) throws RepositoryException {\n final String authorId = article.optString(Article.ARTICLE_AUTHOR_ID);\n\n if (Strings.isEmptyOrNull(authorId)) {\n return;\n }\n\n final JSONObject author = userRepository.get(authorId);\n\n article.put(Article.ARTICLE_T_AUTHOR_THUMBNAIL_URL, avatarQueryService.getAvatarURLByUser(author));\n article.put(Article.ARTICLE_T_AUTHOR, author);\n\n article.put(Article.ARTICLE_T_AUTHOR_NAME, author.optString(User.USER_NAME));\n }\n\n /**\n * Generates participants for the specified articles.\n *\n * @param articles the specified articles\n * @param participantsCnt the specified generate size\n * @throws ServiceException service exception\n */\n private void genParticipants(final List articles, final Integer participantsCnt) throws ServiceException {\n for (final JSONObject article : articles) {\n final String participantName = \"\";\n final String participantThumbnailURL = \"\";\n\n final List articleParticipants\n = getArticleLatestParticipants(article.optString(Keys.OBJECT_ID), participantsCnt);\n article.put(Article.ARTICLE_T_PARTICIPANTS, (Object) articleParticipants);\n\n article.put(Article.ARTICLE_T_PARTICIPANT_NAME, participantName);\n article.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL, participantThumbnailURL);\n }\n }\n\n /**\n * Gets the article participants (commenters) with the specified article article id and fetch size.\n *\n * @param articleId the specified article id\n * @param fetchSize the specified fetch size\n * @return article participants, for example,
\n     * [\n     *     {\n     *         \"oId\": \"\",\n     *         \"articleParticipantName\": \"\",\n     *         \"articleParticipantThumbnailURL\": \"\",\n     *         \"articleParticipantThumbnailUpdateTime\": long,\n     *         \"commentId\": \"\"\n     *     }, ....\n     * ]\n     * 
, returns an empty list if not found\n *\n * @throws ServiceException service exception\n */\n public List getArticleLatestParticipants(final String articleId, final int fetchSize) throws ServiceException {\n final Query query = new Query().addSort(Keys.OBJECT_ID, SortDirection.DESCENDING)\n .setFilter(new PropertyFilter(Comment.COMMENT_ON_ARTICLE_ID, FilterOperator.EQUAL, articleId))\n .addProjection(Comment.COMMENT_AUTHOR_EMAIL, String.class)\n .addProjection(Keys.OBJECT_ID, String.class)\n .addProjection(Comment.COMMENT_AUTHOR_ID, String.class)\n .setPageCount(1).setCurrentPageNum(1).setPageSize(fetchSize);\n final List ret = new ArrayList();\n\n try {\n final JSONObject result = commentRepository.get(query);\n final List comments = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n\n for (final JSONObject comment : comments) {\n final String email = comment.optString(Comment.COMMENT_AUTHOR_EMAIL);\n final String userId = comment.optString(Comment.COMMENT_AUTHOR_ID);\n final JSONObject commenter = userRepository.get(userId);\n\n String thumbnailURL = Symphonys.get(\"defaultThumbnailURL\");\n if (!UserExt.DEFAULT_CMTER_EMAIL.equals(email)) {\n thumbnailURL = avatarQueryService.getAvatarURLByUser(commenter);\n }\n\n final JSONObject participant = new JSONObject();\n participant.put(Article.ARTICLE_T_PARTICIPANT_NAME, commenter.optString(User.USER_NAME));\n participant.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_URL, thumbnailURL);\n participant.put(Article.ARTICLE_T_PARTICIPANT_THUMBNAIL_UPDATE_TIME,\n commenter.optLong(UserExt.USER_UPDATE_TIME));\n participant.put(Article.ARTICLE_T_PARTICIPANT_URL, commenter.optString(User.USER_URL));\n participant.put(Keys.OBJECT_ID, commenter.optString(Keys.OBJECT_ID));\n participant.put(Comment.COMMENT_T_ID, comment.optString(Keys.OBJECT_ID));\n\n ret.add(participant);\n }\n\n return ret;\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets article [\" + articleId + \"] participants failed\", e);\n throw new ServiceException(e);\n }\n }\n\n /**\n * Processes the specified article content.\n *\n *
    \n *
  • Generates &#64;username home URL
  • \n *
  • Markdowns
  • \n *
  • Generates secured article content
  • \n *
  • Blocks the article if need
  • \n *
  • Generates emotion images
  • \n *
  • Generates article link with article id
  • \n *
  • Generates article abstract (preview content)
  • \n *
\n *\n * @param article the specified article, for example,
\n     * {\n     *     \"articleTitle\": \"\",\n     *     ....,\n     *     \"author\": {}\n     * }\n     * 
\n *\n * @param request the specified request\n * @throws ServiceException service exception\n */\n public void processArticleContent(final JSONObject article, final HttpServletRequest request)\n throws ServiceException {\n final JSONObject author = article.optJSONObject(Article.ARTICLE_T_AUTHOR);\n if (null != author && UserExt.USER_STATUS_C_INVALID == author.optInt(UserExt.USER_STATUS)\n || Article.ARTICLE_STATUS_C_INVALID == article.optInt(Article.ARTICLE_STATUS)) {\n article.put(Article.ARTICLE_TITLE, langPropsService.get(\"articleTitleBlockLabel\"));\n article.put(Article.ARTICLE_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n article.put(Article.ARTICLE_T_PREVIEW_CONTENT, langPropsService.get(\"articleContentBlockLabel\"));\n\n article.put(Article.ARTICLE_REWARD_CONTENT, \"\");\n article.put(Article.ARTICLE_REWARD_POINT, 0);\n\n return;\n }\n\n String previewContent = getPreviewContent(article, request);\n previewContent = Jsoup.parse(previewContent).text();\n previewContent = previewContent.replaceAll(\"\\\"\", \"'\");\n article.put(Article.ARTICLE_T_PREVIEW_CONTENT, previewContent);\n\n String articleContent = article.optString(Article.ARTICLE_CONTENT);\n article.put(Common.DISCUSSION_VIEWABLE, true);\n\n final Set userNames = userQueryService.getUserNames(articleContent);\n final JSONObject currentUser = userQueryService.getCurrentUser(request);\n final String currentUserName = null == currentUser ? \"\" : currentUser.optString(User.USER_NAME);\n final String currentRole = null == currentUser ? \"\" : currentUser.optString(User.USER_ROLE);\n final String authorName = article.optString(Article.ARTICLE_T_AUTHOR_NAME);\n if (Article.ARTICLE_TYPE_C_DISCUSSION == article.optInt(Article.ARTICLE_TYPE)\n && !authorName.equals(currentUserName) && !Role.ADMIN_ROLE.equals(currentRole)) {\n boolean invited = false;\n for (final String userName : userNames) {\n if (userName.equals(currentUserName)) {\n invited = true;\n\n break;\n }\n }\n\n if (!invited) {\n String blockContent = langPropsService.get(\"articleDiscussionLabel\");\n blockContent = blockContent.replace(\"{user}\", \"\" + authorName + \"\");\n\n article.put(Article.ARTICLE_CONTENT, blockContent);\n article.put(Common.DISCUSSION_VIEWABLE, false);\n\n article.put(Article.ARTICLE_REWARD_CONTENT, \"\");\n article.put(Article.ARTICLE_REWARD_POINT, 0);\n\n return;\n }\n }\n\n for (final String userName : userNames) {\n articleContent = articleContent.replace('@' + userName, \"@\" + userName + \"\");\n }\n\n articleContent = shortLinkQueryService.linkArticle(articleContent);\n articleContent = shortLinkQueryService.linkTag(articleContent);\n\n articleContent = Emotions.convert(articleContent);\n article.put(Article.ARTICLE_CONTENT, articleContent);\n\n if (article.optInt(Article.ARTICLE_REWARD_POINT) > 0) {\n String articleRewardContent = article.optString(Article.ARTICLE_REWARD_CONTENT);\n\n final Set rewordContentUserNames = userQueryService.getUserNames(articleRewardContent);\n\n for (final String userName : rewordContentUserNames) {\n articleRewardContent = articleRewardContent.replace('@' + userName, \"@\" + userName + \"\");\n }\n\n articleRewardContent = Emotions.convert(articleRewardContent);\n article.put(Article.ARTICLE_REWARD_CONTENT, articleRewardContent);\n }\n\n markdown(article);\n }\n\n /**\n * Gets articles by the specified request json object.\n *\n * @param requestJSONObject the specified request json object, for example,
\n     * {\n     *     \"oId\": \"\", // optional\n     *     \"paginationCurrentPageNum\": 1,\n     *     \"paginationPageSize\": 20,\n     *     \"paginationWindowSize\": 10\n     * }, see {@link Pagination} for more details\n     * 
\n *\n * @param articleFields the specified article fields to return\n *\n * @return for example,
\n     * {\n     *     \"pagination\": {\n     *         \"paginationPageCount\": 100,\n     *         \"paginationPageNums\": [1, 2, 3, 4, 5]\n     *     },\n     *     \"articles\": [{\n     *         \"oId\": \"\",\n     *         \"articleTitle\": \"\",\n     *         \"articleContent\": \"\",\n     *         ....\n     *      }, ....]\n     * }\n     * 
\n *\n * @throws ServiceException service exception\n * @see Pagination\n */\n public JSONObject getArticles(final JSONObject requestJSONObject, final Map> articleFields) throws ServiceException {\n final JSONObject ret = new JSONObject();\n\n final int currentPageNum = requestJSONObject.optInt(Pagination.PAGINATION_CURRENT_PAGE_NUM);\n final int pageSize = requestJSONObject.optInt(Pagination.PAGINATION_PAGE_SIZE);\n final int windowSize = requestJSONObject.optInt(Pagination.PAGINATION_WINDOW_SIZE);\n final Query query = new Query().setCurrentPageNum(currentPageNum).setPageSize(pageSize).\n addSort(Article.ARTICLE_UPDATE_TIME, SortDirection.DESCENDING);\n for (final Map.Entry> articleField : articleFields.entrySet()) {\n query.addProjection(articleField.getKey(), articleField.getValue());\n }\n\n if (requestJSONObject.has(Keys.OBJECT_ID)) {\n query.setFilter(new PropertyFilter(Keys.OBJECT_ID, FilterOperator.EQUAL, requestJSONObject.optString(Keys.OBJECT_ID)));\n }\n\n JSONObject result = null;\n\n try {\n result = articleRepository.get(query);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Gets articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n final int pageCount = result.optJSONObject(Pagination.PAGINATION).optInt(Pagination.PAGINATION_PAGE_COUNT);\n\n final JSONObject pagination = new JSONObject();\n ret.put(Pagination.PAGINATION, pagination);\n final List pageNums = Paginator.paginate(currentPageNum, pageSize, pageCount, windowSize);\n pagination.put(Pagination.PAGINATION_PAGE_COUNT, pageCount);\n pagination.put(Pagination.PAGINATION_PAGE_NUMS, pageNums);\n\n final JSONArray data = result.optJSONArray(Keys.RESULTS);\n final List articles = CollectionUtils.jsonArrayToList(data);\n\n try {\n organizeArticles(articles);\n } catch (final RepositoryException e) {\n LOGGER.log(Level.ERROR, \"Organizes articles failed\", e);\n\n throw new ServiceException(e);\n }\n\n ret.put(Article.ARTICLES, articles);\n\n return ret;\n }\n\n /**\n * Markdowns the specified article content.\n *\n *
    \n *
  • Markdowns article content/reward content
  • \n *
  • Generates secured article content/reward content
  • \n *
\n *\n * @param article the specified article content\n */\n private void markdown(final JSONObject article) {\n String content = article.optString(Article.ARTICLE_CONTENT);\n\n final int articleType = article.optInt(Article.ARTICLE_TYPE);\n if (Article.ARTICLE_TYPE_C_THOUGHT != articleType) {\n content = Markdowns.toHTML(content);\n content = Markdowns.clean(content, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n } else {\n final Document.OutputSettings outputSettings = new Document.OutputSettings();\n outputSettings.prettyPrint(false);\n\n content = Jsoup.clean(content, Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK),\n Whitelist.relaxed().addAttributes(\":all\", \"id\", \"target\", \"class\").\n addTags(\"span\", \"hr\").addAttributes(\"iframe\", \"src\", \"width\", \"height\")\n .addAttributes(\"audio\", \"controls\", \"src\"), outputSettings);\n\n content = content.replace(\"\\n\", \"\\\\n\").replace(\"'\", \"\\\\'\")\n .replace(\"\\\"\", \"\\\\\\\"\");\n }\n\n article.put(Article.ARTICLE_CONTENT, content);\n\n if (article.optInt(Article.ARTICLE_REWARD_POINT) > 0) {\n String rewardContent = article.optString(Article.ARTICLE_REWARD_CONTENT);\n rewardContent = Markdowns.toHTML(rewardContent);\n rewardContent = Markdowns.clean(rewardContent,\n Latkes.getServePath() + article.optString(Article.ARTICLE_PERMALINK));\n article.put(Article.ARTICLE_REWARD_CONTENT, rewardContent);\n }\n }\n}\n"},"message":{"kind":"string","value":"帖子参与者去重\n"},"old_file":{"kind":"string","value":"src/main/java/org/b3log/symphony/service/ArticleQueryService.java"},"subject":{"kind":"string","value":"帖子参与者去重"},"git_diff":{"kind":"string","value":"rc/main/java/org/b3log/symphony/service/ArticleQueryService.java\n * Article query service.\n *\n * @author Liang Ding\n * @version 1.12.10.17, Mar 15, 2016\n * @version 1.12.10.18, Mar 23, 2016\n * @since 0.2.0\n */\n @Service\n \n try {\n final JSONObject result = commentRepository.get(query);\n final List comments = CollectionUtils.jsonArrayToList(result.optJSONArray(Keys.RESULTS));\n\n final List comments = new ArrayList();\n final JSONArray records = result.optJSONArray(Keys.RESULTS);\n for (int i = 0; i < records.length(); i++) {\n final JSONObject comment = records.optJSONObject(i);\n\n boolean exist = false;\n // deduplicate\n for (final JSONObject c : comments) {\n if (comment.optString(Comment.COMMENT_AUTHOR_ID).equals(\n c.optString(Comment.COMMENT_AUTHOR_ID))) {\n exist = true;\n\n break;\n }\n }\n\n if (!exist) {\n comments.add(comment);\n }\n }\n \n for (final JSONObject comment : comments) {\n final String email = comment.optString(Comment.COMMENT_AUTHOR_EMAIL);"}}},{"rowIdx":2022,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"lgpl-2.1"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"3f17073fec1517c2553745e86536cfd6726153f3"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"samskivert/samskivert,samskivert/samskivert"},"new_contents":{"kind":"string","value":"//\n// $Id: ServerControl.java,v 1.5 2002/03/03 17:21:29 mdb Exp $\n\npackage robodj.util;\n\nimport java.io.*;\nimport java.net.*;\nimport java.util.ArrayList;\nimport java.util.StringTokenizer;\nimport javax.swing.SwingUtilities;\n\nimport com.samskivert.util.StringUtil;\n\nimport robodj.Log;\n\n/**\n * A simple class used to remotely control the music server through its\n * network interface.\n */\npublic class ServerControl\n implements Runnable\n{\n /**\n * Used to report changes to the playing song.\n */\n public static interface PlayingListener\n {\n /**\n * Called when the playing song is known to have changed to the\n * specified songid.\n *\n * @param songid the id of the song that's playing or -1 if\n * nothing is playing.\n * @param paused true if the music daemon is paused.\n */\n public void playingUpdated (int songid, boolean paused);\n }\n\n public ServerControl (String host, int port)\n throws IOException\n {\n // create our server connection\n _conn = new Socket(host, port);\n // create our IO objects\n _in = new BufferedReader(new InputStreamReader(\n _conn.getInputStream()));\n _out = new PrintWriter(_conn.getOutputStream());\n }\n\n public void addPlayingListener (PlayingListener listener)\n {\n _listeners.add(listener);\n }\n\n public void removePlayingListener (PlayingListener listener)\n {\n _listeners.remove(listener);\n }\n\n public void pause ()\n {\n sendCommand(\"PAUSE\");\n refreshPlaying();\n }\n\n public void play ()\n {\n sendCommand(\"PLAY\");\n refreshPlaying();\n }\n\n public void stop ()\n {\n sendCommand(\"STOP\");\n refreshPlaying();\n }\n\n public void clear ()\n {\n sendCommand(\"CLEAR\");\n }\n\n public void back ()\n {\n sendCommand(\"BACK\");\n refreshPlaying();\n }\n\n public void skip ()\n {\n sendCommand(\"SKIP\");\n refreshPlaying();\n }\n\n public void append (int eid, int sid, String trackPath)\n {\n sendCommand(\"APPEND \" + eid + \" \" + sid + \" \" + trackPath);\n }\n\n public void remove (int sid)\n {\n sendCommand(\"REMOVE \" + sid);\n refreshPlaying();\n }\n\n public void removeGroup (int sid, int count)\n {\n sendCommand(\"REMOVEGRP \" + sid + \" \" + count);\n refreshPlaying();\n }\n\n public void skipto (int sid)\n {\n sendCommand(\"SKIPTO \" + sid);\n refreshPlaying();\n }\n\n public int getPlaying ()\n {\n refreshPlaying();\n return _playingSongId;\n }\n\n public void refreshPlaying ()\n {\n String playing = sendCommand(\"PLAYING\");\n\n // figure out if we're paused\n _paused = (playing.indexOf(\"paused\") != -1);\n\n // figure out what song is playing\n playing = StringUtil.split(playing, \":\")[1].trim();\n _playingSongId = -1;\n if (!playing.equals(\"\")) {\n try {\n _playingSongId = Integer.parseInt(playing);\n } catch (NumberFormatException nfe) {\n Log.warning(\"Unable to parse currently playing id '\" +\n playing + \"'.\");\n }\n }\n\n // let our listeners know about the new info\n SwingUtilities.invokeLater(this);\n }\n\n public String[] getPlaylist ()\n {\n String result = sendCommand(\"PLAYLIST\");\n ArrayList songs = new ArrayList();\n // parse the result string and then read the proper number of\n // playlist entries from the output\n if (!result.startsWith(\"200\")) {\n return null;\n }\n\n // the result looks like this:\n // 200 Playlist songs: 9 current: /export/.../02.mp3\n StringTokenizer tok = new StringTokenizer(result);\n // skip the first three tokens to get to the actual count\n tok.nextToken(); tok.nextToken(); tok.nextToken();\n int count = 0;\n\n try {\n count = Integer.parseInt(tok.nextToken());\n for (int i = 0; i < count; i++) {\n songs.add(_in.readLine());\n }\n\n } catch (IOException ioe) {\n Log.warning(\"Error communicating with music server: \" + ioe);\n return null;\n\n } catch (NumberFormatException nfe) {\n Log.warning(\"Bogus response from music server: \" + result);\n return null;\n }\n\n String[] plist = new String[count];\n songs.toArray(plist);\n return plist;\n }\n\n public void run ()\n {\n // notify our playing listeners\n for (int i = 0; i < _listeners.size(); i++) {\n PlayingListener listener = (PlayingListener)_listeners.get(i);\n try {\n listener.playingUpdated(_playingSongId, _paused);\n } catch (Exception e) {\n Log.warning(\"PlayingListener choked during update \" +\n \"[listener=\" + listener +\n \", songid=\" + _playingSongId + \"].\");\n Log.logStackTrace(e);\n }\n }\n }\n\n protected String sendCommand (String command)\n {\n try {\n Log.info(\"Sending: \" + command);\n _out.println(command);\n _out.flush();\n String rsp = _in.readLine();\n Log.info(\"Read response: \" + rsp);\n return rsp;\n\n } catch (IOException ioe) {\n Log.warning(\"Error communicating with server: \" + ioe);\n return null;\n }\n }\n\n protected Socket _conn;\n protected PrintWriter _out;\n protected BufferedReader _in;\n\n /** A list of entities that are informed when the playing song\n * changes. */\n protected ArrayList _listeners = new ArrayList();\n\n /** The most recently fetched playing song id. */\n protected int _playingSongId;\n\n /** The most recently fetched paused state. */\n protected boolean _paused;\n}\n"},"new_file":{"kind":"string","value":"projects/robodj/src/java/robodj/util/ServerControl.java"},"old_contents":{"kind":"string","value":"//\n// $Id: ServerControl.java,v 1.4 2002/02/22 08:37:34 mdb Exp $\n\npackage robodj.util;\n\nimport java.io.*;\nimport java.net.*;\nimport java.util.ArrayList;\nimport java.util.StringTokenizer;\n\nimport robodj.Log;\n\n/**\n * A simple class used to remotely control the music server through its\n * network interface.\n */\npublic class ServerControl\n{\n public ServerControl (String host, int port)\n throws IOException\n {\n // create our server connection\n _conn = new Socket(host, port);\n // create our IO objects\n _in = new BufferedReader(new InputStreamReader(\n _conn.getInputStream()));\n _out = new PrintWriter(_conn.getOutputStream());\n }\n\n public void pause ()\n {\n sendCommand(\"PAUSE\");\n }\n\n public void play ()\n {\n sendCommand(\"PLAY\");\n }\n\n public void stop ()\n {\n sendCommand(\"STOP\");\n }\n\n public void clear ()\n {\n sendCommand(\"CLEAR\");\n }\n\n public void skip ()\n {\n sendCommand(\"SKIP\");\n }\n\n public void append (int eid, int sid, String trackPath)\n {\n sendCommand(\"APPEND \" + eid + \" \" + sid + \" \" + trackPath);\n }\n\n public void remove (int sid)\n {\n sendCommand(\"REMOVE \" + sid);\n }\n\n public void removeGroup (int sid, int count)\n {\n sendCommand(\"REMOVEGRP \" + sid + \" \" + count);\n }\n\n public void skipto (int sid)\n {\n sendCommand(\"SKIPTO \" + sid);\n }\n\n public String getPlaying ()\n {\n return sendCommand(\"PLAYING\");\n }\n\n public String[] getPlaylist ()\n {\n String result = sendCommand(\"PLAYLIST\");\n ArrayList songs = new ArrayList();\n // parse the result string and then read the proper number of\n // playlist entries from the output\n if (!result.startsWith(\"200\")) {\n return null;\n }\n\n // the result looks like this:\n // 200 Playlist songs: 9 current: /export/.../02.mp3\n StringTokenizer tok = new StringTokenizer(result);\n // skip the first three tokens to get to the actual count\n tok.nextToken(); tok.nextToken(); tok.nextToken();\n int count = 0;\n\n try {\n count = Integer.parseInt(tok.nextToken());\n for (int i = 0; i < count; i++) {\n songs.add(_in.readLine());\n }\n\n } catch (IOException ioe) {\n Log.warning(\"Error communicating with music server: \" + ioe);\n return null;\n\n } catch (NumberFormatException nfe) {\n Log.warning(\"Bogus response from music server: \" + result);\n return null;\n }\n\n String[] plist = new String[count];\n songs.toArray(plist);\n return plist;\n }\n\n protected String sendCommand (String command)\n {\n try {\n Log.info(\"Sending: \" + command);\n _out.println(command);\n _out.flush();\n String rsp = _in.readLine();\n Log.info(\"Read response: \" + rsp);\n return rsp;\n\n } catch (IOException ioe) {\n Log.warning(\"Error communicating with server: \" + ioe);\n return null;\n }\n }\n\n protected Socket _conn;\n protected PrintWriter _out;\n protected BufferedReader _in;\n}\n"},"message":{"kind":"string","value":"Added mechanism for reporting the \"playing\" state of the music server.\nAutomatically query the playing state after requesting a command that is\nlikely to modify it.\n\n\ngit-svn-id: 64ebf368729f38804935acb7146e017e0f909c6b@632 6335cc39-0255-0410-8fd6-9bcaacd3b74c\n"},"old_file":{"kind":"string","value":"projects/robodj/src/java/robodj/util/ServerControl.java"},"subject":{"kind":"string","value":"Added mechanism for reporting the \"playing\" state of the music server. Automatically query the playing state after requesting a command that is likely to modify it."},"git_diff":{"kind":"string","value":"rojects/robodj/src/java/robodj/util/ServerControl.java\n //\n// $Id: ServerControl.java,v 1.4 2002/02/22 08:37:34 mdb Exp $\n// $Id: ServerControl.java,v 1.5 2002/03/03 17:21:29 mdb Exp $\n \n package robodj.util;\n \n import java.net.*;\n import java.util.ArrayList;\n import java.util.StringTokenizer;\nimport javax.swing.SwingUtilities;\n\nimport com.samskivert.util.StringUtil;\n \n import robodj.Log;\n \n * network interface.\n */\n public class ServerControl\n implements Runnable\n {\n /**\n * Used to report changes to the playing song.\n */\n public static interface PlayingListener\n {\n /**\n * Called when the playing song is known to have changed to the\n * specified songid.\n *\n * @param songid the id of the song that's playing or -1 if\n * nothing is playing.\n * @param paused true if the music daemon is paused.\n */\n public void playingUpdated (int songid, boolean paused);\n }\n\n public ServerControl (String host, int port)\n throws IOException\n {\n _out = new PrintWriter(_conn.getOutputStream());\n }\n \n public void addPlayingListener (PlayingListener listener)\n {\n _listeners.add(listener);\n }\n\n public void removePlayingListener (PlayingListener listener)\n {\n _listeners.remove(listener);\n }\n\n public void pause ()\n {\n sendCommand(\"PAUSE\");\n refreshPlaying();\n }\n \n public void play ()\n {\n sendCommand(\"PLAY\");\n refreshPlaying();\n }\n \n public void stop ()\n {\n sendCommand(\"STOP\");\n refreshPlaying();\n }\n \n public void clear ()\n sendCommand(\"CLEAR\");\n }\n \n public void back ()\n {\n sendCommand(\"BACK\");\n refreshPlaying();\n }\n\n public void skip ()\n {\n sendCommand(\"SKIP\");\n refreshPlaying();\n }\n \n public void append (int eid, int sid, String trackPath)\n public void remove (int sid)\n {\n sendCommand(\"REMOVE \" + sid);\n refreshPlaying();\n }\n \n public void removeGroup (int sid, int count)\n {\n sendCommand(\"REMOVEGRP \" + sid + \" \" + count);\n refreshPlaying();\n }\n \n public void skipto (int sid)\n {\n sendCommand(\"SKIPTO \" + sid);\n }\n\n public String getPlaying ()\n {\n return sendCommand(\"PLAYING\");\n refreshPlaying();\n }\n\n public int getPlaying ()\n {\n refreshPlaying();\n return _playingSongId;\n }\n\n public void refreshPlaying ()\n {\n String playing = sendCommand(\"PLAYING\");\n\n // figure out if we're paused\n _paused = (playing.indexOf(\"paused\") != -1);\n\n // figure out what song is playing\n playing = StringUtil.split(playing, \":\")[1].trim();\n _playingSongId = -1;\n if (!playing.equals(\"\")) {\n try {\n _playingSongId = Integer.parseInt(playing);\n } catch (NumberFormatException nfe) {\n Log.warning(\"Unable to parse currently playing id '\" +\n playing + \"'.\");\n }\n }\n\n // let our listeners know about the new info\n SwingUtilities.invokeLater(this);\n }\n \n public String[] getPlaylist ()\n String[] plist = new String[count];\n songs.toArray(plist);\n return plist;\n }\n\n public void run ()\n {\n // notify our playing listeners\n for (int i = 0; i < _listeners.size(); i++) {\n PlayingListener listener = (PlayingListener)_listeners.get(i);\n try {\n listener.playingUpdated(_playingSongId, _paused);\n } catch (Exception e) {\n Log.warning(\"PlayingListener choked during update \" +\n \"[listener=\" + listener +\n \", songid=\" + _playingSongId + \"].\");\n Log.logStackTrace(e);\n }\n }\n }\n \n protected String sendCommand (String command)\n protected Socket _conn;\n protected PrintWriter _out;\n protected BufferedReader _in;\n\n /** A list of entities that are informed when the playing song\n * changes. */\n protected ArrayList _listeners = new ArrayList();\n\n /** The most recently fetched playing song id. */\n protected int _playingSongId;\n\n /** The most recently fetched paused state. */\n protected boolean _paused;\n }"}}},{"rowIdx":2023,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"67e1dd180df7ec6a6749922401df3d0d82dae623"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"majorseitan/dataverse,leeper/dataverse-1,quarian/dataverse,quarian/dataverse,ekoi/DANS-DVN-4.6.1,quarian/dataverse,jacksonokuhn/dataverse,quarian/dataverse,leeper/dataverse-1,bmckinney/dataverse-canonical,majorseitan/dataverse,jacksonokuhn/dataverse,jacksonokuhn/dataverse,majorseitan/dataverse,majorseitan/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,JayanthyChengan/dataverse,jacksonokuhn/dataverse,ekoi/DANS-DVN-4.6.1,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,leeper/dataverse-1,JayanthyChengan/dataverse,quarian/dataverse,ekoi/DANS-DVN-4.6.1,leeper/dataverse-1,quarian/dataverse,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,leeper/dataverse-1,majorseitan/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,leeper/dataverse-1,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,quarian/dataverse,leeper/dataverse-1,jacksonokuhn/dataverse,leeper/dataverse-1,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,majorseitan/dataverse,jacksonokuhn/dataverse,jacksonokuhn/dataverse"},"new_contents":{"kind":"string","value":"package edu.harvard.iq.dataverse.util.json;\n\nimport edu.harvard.iq.dataverse.DatasetFieldValue;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport javax.json.Json;\nimport javax.json.JsonArrayBuilder;\nimport javax.json.JsonObject;\nimport javax.json.JsonObjectBuilder;\nimport javax.json.JsonValue;\n\n/**\n * A JSON builder that drops any null values. If we didn't drop'em,\n * we'd get an NPE from the standard JSON builder. But just omitting them\n * makes sense. So there.\n * \n * @author michael\n */\npublic class NullSafeJsonBuilder implements JsonObjectBuilder {\n\t\n\tpublic static NullSafeJsonBuilder jsonObjectBuilder() {\n\t\treturn new NullSafeJsonBuilder();\n\t}\n\t\n\tprivate final JsonObjectBuilder delegate;\n\t\n\tpublic NullSafeJsonBuilder() {\n\t\tdelegate = Json.createObjectBuilder();\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonValue value) {\n\t\tif ( value!=null ) delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, String value) {\n\t\tif ( value!=null ) \n\t\t\t delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, BigInteger value) {\n\t\tif ( value!=null ) \n\t\t\t delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, BigDecimal value) {\n\t\tif ( value!=null ) \n\t\t\tdelegate.add(name, value);\n\t\t\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, int value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, long value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, double value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, boolean value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder addNull(String name) {\n\t\tdelegate.addNull(name);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonObjectBuilder builder) {\n\t\tif ( builder!=null ) \n\t\t\t delegate.add(name, builder);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonArrayBuilder builder) {\n\t\tif ( builder!=null ) \n\t\t\tdelegate.add(name, builder);\n\t\treturn this;\n\t}\n\t\n\tpublic NullSafeJsonBuilder addStrValue( String name, DatasetFieldValue field ) {\n\t\tif ( field != null ) {\n\t\t\tdelegate.add( name, field.getValue() );\n\t\t}\n\t\treturn this;\n\t}\n\t\n\t@Override\n\tpublic JsonObject build() {\n\t\treturn delegate.build();\n\t}\n\t\n\t\n}\n"},"new_file":{"kind":"string","value":"src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java"},"old_contents":{"kind":"string","value":"package edu.harvard.iq.dataverse.util.json;\n\nimport edu.harvard.iq.dataverse.DatasetFieldValue;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport javax.json.Json;\nimport javax.json.JsonArrayBuilder;\nimport javax.json.JsonObject;\nimport javax.json.JsonObjectBuilder;\nimport javax.json.JsonValue;\n\n/**\n * A JSON builder that drops any null values. If we didn't drop'em,\n * we'd get an NPE from the standard JSON builder. But just omitting them\n * makes sense. So there.\n * \n * @author michael\n */\npublic class NullSafeJsonBuilder implements JsonObjectBuilder {\n\t\n\tpublic static NullSafeJsonBuilder jsonObjectBuilder() {\n\t\treturn new NullSafeJsonBuilder();\n\t}\n\t\n\tprivate final JsonObjectBuilder delegate;\n\t\n\tpublic NullSafeJsonBuilder() {\n\t\tdelegate = Json.createObjectBuilder();\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonValue value) {\n\t\tif ( value!=null ) delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, String value) {\n\t\tif ( value!=null ) \n\t\t\t delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, BigInteger value) {\n\t\tif ( value!=null ) \n\t\t\t delegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, BigDecimal value) {\n\t\tif ( value!=null ) \n\t\t\tdelegate.add(name, value);\n\t\t\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, int value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, long value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, double value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, boolean value) {\n\t\tdelegate.add(name, value);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder addNull(String name) {\n\t\tdelegate.addNull(name);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonObjectBuilder builder) {\n\t\tif ( builder!=null ) \n\t\t\t delegate.add(name, builder);\n\t\treturn this;\n\t}\n\n\t@Override\n\tpublic NullSafeJsonBuilder add(String name, JsonArrayBuilder builder) {\n\t\tif ( builder!=null ) \n\t\t\tdelegate.add(name, builder);\n\t\treturn this;\n\t}\n\t\n\tpublic NullSafeJsonBuilder addStrValue( String name, DatasetFieldValue field ) {\n\t\tif ( field != null ) {\n\t\t\tdelegate.add( name, field.getStrValue() );\n\t\t}\n\t\treturn this;\n\t}\n\t\n\t@Override\n\tpublic JsonObject build() {\n\t\treturn delegate.build();\n\t}\n\t\n\t\n}\n"},"message":{"kind":"string","value":"changed called from getStrValue (deprecated) to getValue"},"old_file":{"kind":"string","value":"src/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java"},"subject":{"kind":"string","value":"changed called from getStrValue (deprecated) to getValue"},"git_diff":{"kind":"string","value":"rc/main/java/edu/harvard/iq/dataverse/util/json/NullSafeJsonBuilder.java\n \t\n \tpublic NullSafeJsonBuilder addStrValue( String name, DatasetFieldValue field ) {\n \t\tif ( field != null ) {\n\t\t\tdelegate.add( name, field.getStrValue() );\n\t\t\tdelegate.add( name, field.getValue() );\n \t\t}\n \t\treturn this;\n \t}"}}},{"rowIdx":2024,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"agpl-3.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"fce00d3c329b0100bb0e06c1e59defa44a7307ee"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"imCodePartnerAB/imcms,imCodePartnerAB/imcms,imCodePartnerAB/imcms"},"new_contents":{"kind":"string","value":"package imcode.server.document;\n\nimport imcode.util.Utility;\nimport imcode.util.io.ExceptionFreeInputStreamSource;\nimport imcode.util.io.FileInputStreamSource;\nimport imcode.util.io.InputStreamSource;\nimport org.apache.commons.collections.MapUtils;\nimport org.apache.commons.lang.NullArgumentException;\nimport org.apache.commons.lang.UnhandledException;\n\nimport java.io.File;\nimport java.io.Serializable;\nimport java.util.HashMap;\nimport java.util.Map;\n\n/**\n * A FileDocumentDomainObject contains a collection files.\n * In this context a file is set of attributes associated with a data {@link FileDocumentFile}.\n *

\n * A file is identified by fileId - a string which is unique to a FileDocumentDomainObject.\n */\npublic class FileDocumentDomainObject extends DocumentDomainObject {\n\n public static final String MIME_TYPE__APPLICATION_OCTET_STREAM = \"application/octet-stream\";\n\n @SuppressWarnings(\"unused\")\n public static final String MIME_TYPE__UNKNOWN_DEFAULT = MIME_TYPE__APPLICATION_OCTET_STREAM;\n\n // key: file id\n private Map files = createFilesMap();\n\n private String defaultFileId;\n\n public DocumentTypeDomainObject getDocumentType() {\n return DocumentTypeDomainObject.FILE;\n }\n\n public void accept(DocumentVisitor documentVisitor) {\n documentVisitor.visitFileDocument(this);\n }\n\n public void addFile(String fileId, FileDocumentFile file) {\n if (null == fileId) {\n throw new NullArgumentException(\"fileId\");\n }\n if (!files.containsKey(defaultFileId)) {\n defaultFileId = fileId;\n }\n FileDocumentFile fileClone = cloneFile(file);\n fileClone.setId(fileId);\n files.put(fileId, fileClone);\n }\n\n /**\n * @param file file to clone\n * @return file clone or null if provided file is null\n */\n private FileDocumentFile cloneFile(FileDocumentFile file) {\n if (null == file) {\n return null;\n }\n FileDocumentFile fileClone;\n try {\n fileClone = file.clone();\n } catch (CloneNotSupportedException e) {\n throw new UnhandledException(e);\n }\n return fileClone;\n }\n\n public Map getFiles() {\n Map map = createFilesMap();\n map.putAll(files);\n return map;\n }\n\n @SuppressWarnings(\"unchecked\")\n private Map createFilesMap() {\n return MapUtils.orderedMap(new HashMap());\n }\n\n public FileDocumentFile getFile(String fileId) {\n return cloneFile(files.get(fileId));\n }\n\n public FileDocumentFile removeFile(String fileId) {\n FileDocumentFile fileDocumentFile = files.remove(fileId);\n selectDefaultFileName(fileId);\n return fileDocumentFile;\n }\n\n private void selectDefaultFileName(String fileId) {\n if (files.isEmpty()) {\n defaultFileId = null;\n } else if (defaultFileId.equals(fileId)) {\n defaultFileId = Utility.firstElementOfSetByOrderOf(files.keySet(), String.CASE_INSENSITIVE_ORDER);\n }\n }\n\n public String getDefaultFileId() {\n return defaultFileId;\n }\n\n public void setDefaultFileId(String defaultFileId) {\n if (!files.containsKey(defaultFileId)) {\n throw new IllegalArgumentException(\"Cannot set defaultFile to non-existant key \"\n + defaultFileId);\n }\n this.defaultFileId = defaultFileId;\n }\n\n /**\n * @param fileId\n * @return file with fileId or default file if fileId is null or there is no file with a such id.\n */\n public FileDocumentFile getFileOrDefault(String fileId) {\n if (null == fileId) {\n return getDefaultFile();\n }\n FileDocumentFile fileDocumentFile = getFile(fileId);\n if (null == fileDocumentFile) {\n fileDocumentFile = getDefaultFile();\n }\n return fileDocumentFile;\n }\n\n public FileDocumentFile getDefaultFile() {\n return getFile(defaultFileId);\n\n }\n\n @SuppressWarnings(\"unused\")\n public void changeFileId(String oldFileId, String newFileId) {\n if (null == oldFileId) {\n throw new NullArgumentException(\"oldFileId\");\n }\n if (null == newFileId) {\n throw new NullArgumentException(\"newFileId\");\n }\n if (!files.containsKey(oldFileId)) {\n throw new IllegalStateException(\"There is no file with the id \" + oldFileId);\n }\n if (oldFileId.equals(newFileId)) {\n return;\n }\n if (files.containsKey(newFileId)) {\n throw new IllegalStateException(\"There already is a file with the id \" + newFileId);\n }\n addFile(newFileId, files.remove(oldFileId));\n if (defaultFileId.equals(oldFileId)) {\n defaultFileId = newFileId;\n }\n }\n\n /**\n * File attributes associated with a data.\n *\n * @see imcode.util.io.InputStreamSource\n */\n public static class FileDocumentFile implements Cloneable, Serializable {\n\n private String id;\n\n /**\n * If this object represent a new file then assigned by the system before the file is stored in a FS.\n * Otherwise set by the system when FileDocumentDomainObject is initialized.\n */\n private String filename;\n\n private String mimeType;\n private InputStreamSource inputStreamSource;\n private boolean createdAsImage;\n\n public String getFilename() {\n return filename;\n }\n\n public void setFilename(String v) {\n this.filename = v;\n }\n\n public String getMimeType() {\n return mimeType;\n }\n\n public void setMimeType(String mimeType) {\n this.mimeType = mimeType;\n }\n\n public InputStreamSource getInputStreamSource() {\n return new ExceptionFreeInputStreamSource(inputStreamSource);\n }\n\n public void setInputStreamSource(InputStreamSource inputStreamSource) {\n this.inputStreamSource = inputStreamSource;\n }\n\n public boolean isFileInputStreamSource() {\n return inputStreamSource instanceof FileInputStreamSource;\n }\n\n public boolean isCreatedAsImage() {\n return createdAsImage;\n }\n\n public void setCreatedAsImage(boolean createdAsImage) {\n this.createdAsImage = createdAsImage;\n }\n\n public String getId() {\n return id;\n }\n\n public void setId(String id) {\n this.id = id;\n }\n\n public FileDocumentFile clone() throws CloneNotSupportedException {\n return (FileDocumentFile) super.clone();\n }\n\n public File getFile() {\n return ((FileInputStreamSource) inputStreamSource).getFile();\n }\n }\n}"},"new_file":{"kind":"string","value":"src/main/java/imcode/server/document/FileDocumentDomainObject.java"},"old_contents":{"kind":"string","value":"package imcode.server.document;\n\nimport imcode.util.Utility;\nimport imcode.util.io.ExceptionFreeInputStreamSource;\nimport imcode.util.io.FileInputStreamSource;\nimport imcode.util.io.InputStreamSource;\nimport org.apache.commons.collections.MapUtils;\nimport org.apache.commons.lang.NullArgumentException;\nimport org.apache.commons.lang.UnhandledException;\n\nimport java.io.File;\nimport java.io.Serializable;\nimport java.util.HashMap;\nimport java.util.Map;\n\n/**\n * A FileDocumentDomainObject contains a collection files.\n * In this context a file is set of attributes associated with a data {@link FileDocumentFile}.\n *

\n * A file is identified by fileId - a string which is unique to a FileDocumentDomainObject.\n */\npublic class FileDocumentDomainObject extends DocumentDomainObject {\n\n public static final String MIME_TYPE__APPLICATION_OCTET_STREAM = \"application/octet-stream\";\n\n @SuppressWarnings(\"unused\")\n public static final String MIME_TYPE__UNKNOWN_DEFAULT = MIME_TYPE__APPLICATION_OCTET_STREAM;\n\n // key: file id\n private Map files = createFilesMap();\n\n private String defaultFileId;\n\n public DocumentTypeDomainObject getDocumentType() {\n return DocumentTypeDomainObject.FILE;\n }\n\n public void accept(DocumentVisitor documentVisitor) {\n documentVisitor.visitFileDocument(this);\n }\n\n public void addFile(String fileId, FileDocumentFile file) {\n if (null == fileId) {\n throw new NullArgumentException(\"fileId\");\n }\n if (!files.containsKey(defaultFileId)) {\n defaultFileId = fileId;\n }\n FileDocumentFile fileClone = cloneFile(file);\n fileClone.setId(fileId);\n files.put(fileId, fileClone);\n }\n\n /**\n * @param file file to clone\n * @return file clone or null if provided file is null\n */\n private FileDocumentFile cloneFile(FileDocumentFile file) {\n if (null == file) {\n return null;\n }\n FileDocumentFile fileClone;\n try {\n fileClone = file.clone();\n } catch (CloneNotSupportedException e) {\n throw new UnhandledException(e);\n }\n return fileClone;\n }\n\n public Map getFiles() {\n Map map = createFilesMap();\n map.putAll(files);\n return map;\n }\n\n @SuppressWarnings(\"unchecked\")\n private Map createFilesMap() {\n return MapUtils.orderedMap(new HashMap());\n }\n\n public FileDocumentFile getFile(String fileId) {\n return cloneFile(files.get(fileId));\n }\n\n public FileDocumentFile removeFile(String fileId) {\n FileDocumentFile fileDocumentFile = files.remove(fileId);\n selectDefaultFileName(fileId);\n return fileDocumentFile;\n }\n\n private void selectDefaultFileName(String fileId) {\n if (files.isEmpty()) {\n defaultFileId = null;\n } else if (defaultFileId.equals(fileId)) {\n defaultFileId = Utility.firstElementOfSetByOrderOf(files.keySet(), String.CASE_INSENSITIVE_ORDER);\n }\n }\n\n public String getDefaultFileId() {\n return defaultFileId;\n }\n\n public void setDefaultFileId(String defaultFileId) {\n if (!files.containsKey(defaultFileId)) {\n throw new IllegalArgumentException(\"Cannot set defaultFileId to non-existant key \"\n + defaultFileId);\n }\n this.defaultFileId = defaultFileId;\n }\n\n /**\n * @param fileId\n * @return file with fileId or default file if fileId is null or there is no file with a such id.\n */\n public FileDocumentFile getFileOrDefault(String fileId) {\n if (null == fileId) {\n return getDefaultFile();\n }\n FileDocumentFile fileDocumentFile = getFile(fileId);\n if (null == fileDocumentFile) {\n fileDocumentFile = getDefaultFile();\n }\n return fileDocumentFile;\n }\n\n public FileDocumentFile getDefaultFile() {\n return getFile(defaultFileId);\n\n }\n\n @SuppressWarnings(\"unused\")\n public void changeFileId(String oldFileId, String newFileId) {\n if (null == oldFileId) {\n throw new NullArgumentException(\"oldFileId\");\n }\n if (null == newFileId) {\n throw new NullArgumentException(\"newFileId\");\n }\n if (!files.containsKey(oldFileId)) {\n throw new IllegalStateException(\"There is no file with the id \" + oldFileId);\n }\n if (oldFileId.equals(newFileId)) {\n return;\n }\n if (files.containsKey(newFileId)) {\n throw new IllegalStateException(\"There already is a file with the id \" + newFileId);\n }\n addFile(newFileId, files.remove(oldFileId));\n if (defaultFileId.equals(oldFileId)) {\n defaultFileId = newFileId;\n }\n }\n\n /**\n * File attributes associated with a data.\n *\n * @see imcode.util.io.InputStreamSource\n */\n public static class FileDocumentFile implements Cloneable, Serializable {\n\n private String id;\n\n /**\n * If this object represent a new file then assigned by the system before the file is stored in a FS.\n * Otherwise set by the system when FileDocumentDomainObject is initialized.\n */\n private String filename;\n\n private String mimeType;\n private InputStreamSource inputStreamSource;\n private boolean createdAsImage;\n\n public String getFilename() {\n return filename;\n }\n\n public void setFilename(String v) {\n this.filename = v;\n }\n\n public String getMimeType() {\n return mimeType;\n }\n\n public void setMimeType(String mimeType) {\n this.mimeType = mimeType;\n }\n\n public InputStreamSource getInputStreamSource() {\n return new ExceptionFreeInputStreamSource(inputStreamSource);\n }\n\n public void setInputStreamSource(InputStreamSource inputStreamSource) {\n this.inputStreamSource = inputStreamSource;\n }\n\n public boolean isFileInputStreamSource() {\n return inputStreamSource instanceof FileInputStreamSource;\n }\n\n public boolean isCreatedAsImage() {\n return createdAsImage;\n }\n\n public void setCreatedAsImage(boolean createdAsImage) {\n this.createdAsImage = createdAsImage;\n }\n\n public String getId() {\n return id;\n }\n\n public void setId(String id) {\n this.id = id;\n }\n\n public FileDocumentFile clone() throws CloneNotSupportedException {\n return (FileDocumentFile) super.clone();\n }\n\n public File getFile() {\n return ((FileInputStreamSource) inputStreamSource).getFile();\n }\n }\n}"},"message":{"kind":"string","value":"IMCMS-255 - Upgrade server side to work with new client:\n- Naming fixes.\n"},"old_file":{"kind":"string","value":"src/main/java/imcode/server/document/FileDocumentDomainObject.java"},"subject":{"kind":"string","value":"IMCMS-255 - Upgrade server side to work with new client: - Naming fixes."},"git_diff":{"kind":"string","value":"rc/main/java/imcode/server/document/FileDocumentDomainObject.java\n \n public void setDefaultFileId(String defaultFileId) {\n if (!files.containsKey(defaultFileId)) {\n throw new IllegalArgumentException(\"Cannot set defaultFileId to non-existant key \"\n throw new IllegalArgumentException(\"Cannot set defaultFile to non-existant key \"\n + defaultFileId);\n }\n this.defaultFileId = defaultFileId;"}}},{"rowIdx":2025,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"a81cbb1c4899a1c0d826bba2738b199c0071181c"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"treykc78/popcorn-js,TwoD/popcorn-js,ryanirelan/popcorn-js,azmenak/popcorn-js,stevemao/popcorn-js,ryanirelan/popcorn-js,Qambar/popcorn-js,josedab/popcorn-js,megomars/popcornjsdemo,stevemao/popcorn-js,Qambar/popcorn-js,cadecairos/popcorn-js,mozilla/popcorn-js,justindelacruz/popcorn-js,pculture/popcorn-js,mozilla/popcorn-js,pculture/popcorn-js,josedab/popcorn-js,azmenak/popcorn-js,ScottDowne/popcorn-js,TwoD/popcorn-js,azmenak/popcorn-js,mbuttu/popcorn-js,megomars/popcornjsdemo,mozilla/popcorn-js,ivesbai/popcorn-js,treykc78/popcorn-js,rwaldron/popcorn-js,ScottDowne/popcorn-js,justindelacruz/popcorn-js,Qambar/popcorn-js,rwaldron/popcorn-js,stevemao/popcorn-js,mbuttu/popcorn-js,ivesbai/popcorn-js,treykc78/popcorn-js,ivesbai/popcorn-js,cadecairos/popcorn-js,pculture/popcorn-js,ryanirelan/popcorn-js,justindelacruz/popcorn-js"},"new_contents":{"kind":"string","value":"(function(global, document) {\n\n // Popcorn.js does not support archaic browsers\n if ( !document.addEventListener ) {\n global.Popcorn = {};\n\n var methods = ( \"removeInstance addInstance getInstanceById removeInstanceById \" +\n \"forEach extend effects error guid sizeOf isArray nop position disable enable destroy \" +\n \"addTrackEvent removeTrackEvent getTrackEvents getTrackEvent getLastTrackEventId \" +\n \"timeUpdate plugin removePlugin compose effect parser xhr getJSONP getScript\" ).split(/\\s+/);\n\n while( methods.length ) {\n global.Popcorn[ methods.shift() ] = function() {};\n }\n return;\n }\n\n var\n\n AP = Array.prototype,\n OP = Object.prototype,\n\n forEach = AP.forEach,\n slice = AP.slice,\n hasOwn = OP.hasOwnProperty,\n toString = OP.toString,\n\n // ID string matching\n rIdExp = /^(#([\\w\\-\\_\\.]+))$/,\n\n // Ready fn cache\n readyStack = [],\n readyBound = false,\n readyFired = false,\n\n // Non-public internal data object\n internal = {\n events: {\n hash: {},\n apis: {}\n }\n },\n\n // Non-public `requestAnimFrame`\n // http://paulirish.com/2011/requestanimationframe-for-smart-animating/\n requestAnimFrame = (function(){\n return global.requestAnimationFrame ||\n global.webkitRequestAnimationFrame ||\n global.mozRequestAnimationFrame ||\n global.oRequestAnimationFrame ||\n global.msRequestAnimationFrame ||\n function( callback, element ) {\n global.setTimeout( callback, 16 );\n };\n }()),\n\n // Declare constructor\n // Returns an instance object.\n Popcorn = function( entity, options ) {\n // Return new Popcorn object\n return new Popcorn.p.init( entity, options || null );\n };\n\n // Instance caching\n Popcorn.instances = [];\n Popcorn.instanceIds = {};\n\n Popcorn.removeInstance = function( instance ) {\n // If called prior to any instances being created\n // Return early to avoid splicing on nothing\n if ( !Popcorn.instances.length ) {\n return;\n }\n\n // Remove instance from Popcorn.instances\n Popcorn.instances.splice( Popcorn.instanceIds[ instance.id ], 1 );\n\n // Delete the instance id key\n delete Popcorn.instanceIds[ instance.id ];\n\n // Return current modified instances\n return Popcorn.instances;\n };\n\n // Addes a Popcorn instance to the Popcorn instance array\n Popcorn.addInstance = function( instance ) {\n\n var instanceLen = Popcorn.instances.length,\n instanceId = instance.media.id && instance.media.id;\n\n // If the media element has its own `id` use it, otherwise provide one\n // Ensure that instances have unique ids and unique entries\n // Uses `in` operator to avoid false positives on 0\n instance.id = !( instanceId in Popcorn.instanceIds ) && instanceId ||\n \"__popcorn\" + instanceLen;\n\n // Create a reference entry for this instance\n Popcorn.instanceIds[ instance.id ] = instanceLen;\n\n // Add this instance to the cache\n Popcorn.instances.push( instance );\n\n // Return the current modified instances\n return Popcorn.instances;\n };\n\n // Request Popcorn object instance by id\n Popcorn.getInstanceById = function( id ) {\n return Popcorn.instances[ Popcorn.instanceIds[ id ] ];\n };\n\n // Remove Popcorn object instance by id\n Popcorn.removeInstanceById = function( id ) {\n return Popcorn.removeInstance( Popcorn.instances[ Popcorn.instanceIds[ id ] ] );\n };\n\n // Declare a shortcut (Popcorn.p) to and a definition of\n // the new prototype for our Popcorn constructor\n Popcorn.p = Popcorn.prototype = {\n\n init: function( entity, options ) {\n\n var matches;\n\n // Supports Popcorn(function () { /../ })\n // Originally proposed by Daniel Brooks\n\n if ( typeof entity === \"function\" ) {\n\n // If document ready has already fired\n if ( document.readyState === \"interactive\" || document.readyState === \"complete\" ) {\n\n entity( document, Popcorn );\n\n return;\n }\n // Add `entity` fn to ready stack\n readyStack.push( entity );\n\n // This process should happen once per page load\n if ( !readyBound ) {\n\n // set readyBound flag\n readyBound = true;\n\n var DOMContentLoaded = function() {\n\n readyFired = true;\n\n // Remove global DOM ready listener\n document.removeEventListener( \"DOMContentLoaded\", DOMContentLoaded, false );\n\n // Execute all ready function in the stack\n for ( var i = 0, readyStackLength = readyStack.length; i < readyStackLength; i++ ) {\n\n readyStack[ i ].call( document, Popcorn );\n\n }\n // GC readyStack\n readyStack = null;\n };\n\n // Register global DOM ready listener\n document.addEventListener( \"DOMContentLoaded\", DOMContentLoaded, false );\n }\n\n return;\n }\n\n // Check if entity is a valid string id\n matches = rIdExp.exec( entity );\n\n // Get media element by id or object reference\n this.media = matches && matches.length && matches[ 2 ] ?\n document.getElementById( matches[ 2 ] ) :\n entity;\n\n // Create an audio or video element property reference\n this[ ( this.media.nodeName && this.media.nodeName.toLowerCase() ) || \"video\" ] = this.media;\n\n // Register new instance\n Popcorn.addInstance( this );\n\n this.options = options || {};\n\n this.isDestroyed = false;\n\n this.data = {\n\n // Allows disabling a plugin per instance\n disabled: [],\n\n // Stores DOM event queues by type\n events: {},\n\n // Stores Special event hooks data\n hooks: {},\n\n // Store track event history data\n history: [],\n\n // Stores ad-hoc state related data]\n state: {\n volume: this.media.volume\n },\n\n // Store track event object references by trackId\n trackRefs: {},\n\n // Playback track event queues\n trackEvents: {\n byStart: [{\n\n start: -1,\n end: -1\n }],\n byEnd: [{\n start: -1,\n end: -1\n }],\n animating: [],\n startIndex: 0,\n endIndex: 0,\n previousUpdateTime: -1\n }\n };\n\n // Wrap true ready check\n var isReady = function( that ) {\n\n var duration, videoDurationPlus, animate;\n\n if ( that.media.readyState >= 2 ) {\n // Adding padding to the front and end of the arrays\n // this is so we do not fall off either end\n\n duration = that.media.duration;\n // Check for no duration info (NaN)\n videoDurationPlus = duration != duration ? Number.MAX_VALUE : duration + 1;\n\n Popcorn.addTrackEvent( that, {\n start: videoDurationPlus,\n end: videoDurationPlus\n });\n\n if ( that.options.frameAnimation ) {\n // if Popcorn is created with frameAnimation option set to true,\n // requestAnimFrame is used instead of \"timeupdate\" media event.\n // This is for greater frame time accuracy, theoretically up to\n // 60 frames per second as opposed to ~4 ( ~every 15-250ms)\n animate = function () {\n\n Popcorn.timeUpdate( that, {} );\n\n that.trigger( \"timeupdate\" );\n\n requestAnimFrame( animate );\n };\n\n requestAnimFrame( animate );\n\n } else {\n\n that.data.timeUpdateFunction = function( event ) {\n Popcorn.timeUpdate( that, event );\n };\n\n if ( !that.isDestroyed ) {\n that.media.addEventListener( \"timeupdate\", that.data.timeUpdateFunction, false );\n }\n }\n } else {\n global.setTimeout(function() {\n isReady( that );\n }, 1 );\n }\n };\n\n isReady( this );\n\n return this;\n }\n };\n\n // Extend constructor prototype to instance prototype\n // Allows chaining methods to instances\n Popcorn.p.init.prototype = Popcorn.p;\n\n Popcorn.forEach = function( obj, fn, context ) {\n\n if ( !obj || !fn ) {\n return {};\n }\n\n context = context || this;\n\n var key, len;\n\n // Use native whenever possible\n if ( forEach && obj.forEach === forEach ) {\n return obj.forEach( fn, context );\n }\n\n if ( toString.call( obj ) === \"[object NodeList]\" ) {\n for ( key = 0, len = obj.length; key < len; key++ ) {\n fn.call( context, obj[ key ], key, obj );\n }\n return obj;\n }\n\n for ( key in obj ) {\n if ( hasOwn.call( obj, key ) ) {\n fn.call( context, obj[ key ], key, obj );\n }\n }\n return obj;\n };\n\n Popcorn.extend = function( obj ) {\n var dest = obj, src = slice.call( arguments, 1 );\n\n Popcorn.forEach( src, function( copy ) {\n for ( var prop in copy ) {\n dest[ prop ] = copy[ prop ];\n }\n });\n\n return dest;\n };\n\n\n // A Few reusable utils, memoized onto Popcorn\n Popcorn.extend( Popcorn, {\n error: function( msg ) {\n throw new Error( msg );\n },\n guid: function( prefix ) {\n Popcorn.guid.counter++;\n return ( prefix ? prefix : \"\" ) + ( +new Date() + Popcorn.guid.counter );\n },\n sizeOf: function( obj ) {\n var size = 0;\n\n for ( var prop in obj ) {\n size++;\n }\n\n return size;\n },\n isArray: Array.isArray || function( array ) {\n return toString.call( array ) === \"[object Array]\";\n },\n\n nop: function() {},\n\n position: function( elem ) {\n\n var clientRect = elem.getBoundingClientRect(),\n bounds = {},\n doc = elem.ownerDocument,\n docElem = document.documentElement,\n body = document.body,\n clientTop, clientLeft, scrollTop, scrollLeft, top, left;\n\n // Determine correct clientTop/Left\n clientTop = docElem.clientTop || body.clientTop || 0;\n clientLeft = docElem.clientLeft || body.clientLeft || 0;\n\n // Determine correct scrollTop/Left\n scrollTop = ( global.pageYOffset && docElem.scrollTop || body.scrollTop );\n scrollLeft = ( global.pageXOffset && docElem.scrollLeft || body.scrollLeft );\n\n // Temp top/left\n top = Math.ceil( clientRect.top + scrollTop - clientTop );\n left = Math.ceil( clientRect.left + scrollLeft - clientLeft );\n\n for ( var p in clientRect ) {\n bounds[ p ] = Math.round( clientRect[ p ] );\n }\n\n return Popcorn.extend({}, bounds, { top: top, left: left });\n },\n\n disable: function( instance, plugin ) {\n\n var disabled = instance.data.disabled;\n\n if ( disabled.indexOf( plugin ) === -1 ) {\n disabled.push( plugin );\n }\n\n return instance;\n },\n enable: function( instance, plugin ) {\n\n var disabled = instance.data.disabled,\n index = disabled.indexOf( plugin );\n\n if ( index > -1 ) {\n disabled.splice( index, 1 );\n }\n\n return instance;\n },\n destroy: function( instance ) {\n var events = instance.data.events,\n singleEvent, item, fn;\n\n // Iterate through all events and remove them\n for ( item in events ) {\n singleEvent = events[ item ];\n for ( fn in singleEvent ) {\n delete singleEvent[ fn ];\n }\n events[ item ] = null;\n }\n\n if ( !instance.isDestroyed ) {\n instance.media.removeEventListener( \"timeupdate\", instance.data.timeUpdateFunction, false );\n instance.isDestroyed = true;\n }\n\n Popcorn.instances.splice( Popcorn.instanceIds[ instance.id ], 1 );\n\n delete Popcorn.instanceIds[ instance.id ];\n }\n });\n\n // Memoized GUID Counter\n Popcorn.guid.counter = 1;\n\n // Factory to implement getters, setters and controllers\n // as Popcorn instance methods. The IIFE will create and return\n // an object with defined methods\n Popcorn.extend(Popcorn.p, (function() {\n\n var methods = \"load play pause currentTime playbackRate volume duration preload playbackRate \" +\n \"autoplay loop controls muted buffered readyState seeking paused played seekable ended\",\n ret = {};\n\n\n // Build methods, store in object that is returned and passed to extend\n Popcorn.forEach( methods.split( /\\s+/g ), function( name ) {\n\n ret[ name ] = function( arg ) {\n\n if ( typeof this.media[ name ] === \"function\" ) {\n this.media[ name ]();\n\n return this;\n }\n\n\n if ( arg != null ) {\n\n this.media[ name ] = arg;\n\n return this;\n }\n\n return this.media[ name ];\n };\n });\n\n return ret;\n\n })()\n );\n\n Popcorn.forEach( \"enable disable\".split(\" \"), function( method ) {\n Popcorn.p[ method ] = function( plugin ) {\n return Popcorn[ method ]( this, plugin );\n };\n });\n\n Popcorn.extend(Popcorn.p, {\n\n // Rounded currentTime\n roundTime: function() {\n return -~this.media.currentTime;\n },\n\n // Attach an event to a single point in time\n exec: function( time, fn ) {\n\n // Creating a one second track event with an empty end\n Popcorn.addTrackEvent( this, {\n start: time,\n end: time + 1,\n _running: false,\n _natives: {\n start: fn || Popcorn.nop,\n end: Popcorn.nop,\n type: \"exec\"\n }\n });\n\n return this;\n },\n\n // Mute the calling media, optionally toggle\n mute: function( toggle ) {\n\n var event = toggle == null || toggle === true ? \"muted\" : \"unmuted\";\n\n // If `toggle` is explicitly `false`,\n // unmute the media and restore the volume level\n if ( event === \"unmuted\" ) {\n this.media.muted = false;\n this.media.volume = this.data.state.volume;\n }\n\n // If `toggle` is either null or undefined,\n // save the current volume and mute the media element\n if ( event === \"muted\" ) {\n this.data.state.volume = this.media.volume;\n this.media.muted = true;\n }\n\n // Trigger either muted|unmuted event\n this.trigger( event );\n\n return this;\n },\n\n // Convenience method, unmute the calling media\n unmute: function( toggle ) {\n\n return this.mute( toggle == null ? false : !toggle );\n },\n\n // Get the client bounding box of an instance element\n position: function() {\n return Popcorn.position( this.media );\n },\n\n // Toggle a plugin's playback behaviour (on or off) per instance\n toggle: function( plugin ) {\n return Popcorn[ this.data.disabled.indexOf( plugin ) > -1 ? \"enable\" : \"disable\" ]( this, plugin );\n },\n\n // Set default values for plugin options objects per instance\n defaults: function( plugin, defaults ) {\n\n // If an array of default configurations is provided,\n // iterate and apply each to this instance\n if ( Popcorn.isArray( plugin ) ) {\n\n Popcorn.forEach( plugin, function( obj ) {\n for ( var name in obj ) {\n this.defaults( name, obj[ name ] );\n }\n }, this );\n\n return this;\n }\n\n if ( !this.options.defaults ) {\n this.options.defaults = {};\n }\n\n if ( !this.options.defaults[ plugin ] ) {\n this.options.defaults[ plugin ] = {};\n }\n\n Popcorn.extend( this.options.defaults[ plugin ], defaults );\n\n return this;\n }\n });\n\n Popcorn.Events = {\n UIEvents: \"blur focus focusin focusout load resize scroll unload\",\n MouseEvents: \"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave click dblclick\",\n Events: \"loadstart progress suspend emptied stalled play pause \" +\n \"loadedmetadata loadeddata waiting playing canplay canplaythrough \" +\n \"seeking seeked timeupdate ended ratechange durationchange volumechange\"\n };\n\n Popcorn.Events.Natives = Popcorn.Events.UIEvents + \" \" +\n Popcorn.Events.MouseEvents + \" \" +\n Popcorn.Events.Events;\n\n internal.events.apiTypes = [ \"UIEvents\", \"MouseEvents\", \"Events\" ];\n\n // Privately compile events table at load time\n (function( events, data ) {\n\n var apis = internal.events.apiTypes,\n eventsList = events.Natives.split( /\\s+/g ),\n idx = 0, len = eventsList.length, prop;\n\n for( ; idx < len; idx++ ) {\n data.hash[ eventsList[idx] ] = true;\n }\n\n apis.forEach(function( val, idx ) {\n\n data.apis[ val ] = {};\n\n var apiEvents = events[ val ].split( /\\s+/g ),\n len = apiEvents.length,\n k = 0;\n\n for ( ; k < len; k++ ) {\n data.apis[ val ][ apiEvents[ k ] ] = true;\n }\n });\n })( Popcorn.Events, internal.events );\n\n Popcorn.events = {\n\n isNative: function( type ) {\n return !!internal.events.hash[ type ];\n },\n getInterface: function( type ) {\n\n if ( !Popcorn.events.isNative( type ) ) {\n return false;\n }\n\n var eventApi = internal.events,\n apis = eventApi.apiTypes,\n apihash = eventApi.apis,\n idx = 0, len = apis.length, api, tmp;\n\n for ( ; idx < len; idx++ ) {\n tmp = apis[ idx ];\n\n if ( apihash[ tmp ][ type ] ) {\n api = tmp;\n break;\n }\n }\n return api;\n },\n // Compile all native events to single array\n all: Popcorn.Events.Natives.split( /\\s+/g ),\n // Defines all Event handling static functions\n fn: {\n trigger: function( type, data ) {\n\n var eventInterface, evt;\n // setup checks for custom event system\n if ( this.data.events[ type ] && Popcorn.sizeOf( this.data.events[ type ] ) ) {\n\n eventInterface = Popcorn.events.getInterface( type );\n\n if ( eventInterface ) {\n\n evt = document.createEvent( eventInterface );\n evt.initEvent( type, true, true, global, 1 );\n\n this.media.dispatchEvent( evt );\n\n return this;\n }\n\n // Custom events\n Popcorn.forEach( this.data.events[ type ], function( obj, key ) {\n\n obj.call( this, data );\n\n }, this );\n\n }\n\n return this;\n },\n listen: function( type, fn ) {\n\n var self = this,\n hasEvents = true,\n eventHook = Popcorn.events.hooks[ type ],\n origType = type,\n tmp;\n\n if ( !this.data.events[ type ] ) {\n this.data.events[ type ] = {};\n hasEvents = false;\n }\n\n // Check and setup event hooks\n if ( eventHook ) {\n\n // Execute hook add method if defined\n if ( eventHook.add ) {\n eventHook.add.call( this, {}, fn );\n }\n\n // Reassign event type to our piggyback event type if defined\n if ( eventHook.bind ) {\n type = eventHook.bind;\n }\n\n // Reassign handler if defined\n if ( eventHook.handler ) {\n tmp = fn;\n\n fn = function wrapper( event ) {\n eventHook.handler.call( self, event, tmp );\n };\n }\n\n // assume the piggy back event is registered\n hasEvents = true;\n\n // Setup event registry entry\n if ( !this.data.events[ type ] ) {\n this.data.events[ type ] = {};\n // Toggle if the previous assumption was untrue\n hasEvents = false;\n }\n }\n\n // Register event and handler\n this.data.events[ type ][ fn.name || ( fn.toString() + Popcorn.guid() ) ] = fn;\n\n // only attach one event of any type\n if ( !hasEvents && Popcorn.events.all.indexOf( type ) > -1 ) {\n\n this.media.addEventListener( type, function( event ) {\n\n Popcorn.forEach( self.data.events[ type ], function( obj, key ) {\n if ( typeof obj === \"function\" ) {\n obj.call( self, event );\n }\n });\n\n }, false);\n }\n return this;\n },\n unlisten: function( type, fn ) {\n\n if ( this.data.events[ type ] && this.data.events[ type ][ fn ] ) {\n\n delete this.data.events[ type ][ fn ];\n\n return this;\n }\n\n this.data.events[ type ] = null;\n\n return this;\n }\n },\n hooks: {\n canplayall: {\n bind: \"canplaythrough\",\n add: function( event, callback ) {\n\n var state = false;\n\n if ( this.media.readyState ) {\n\n callback.call( this, event );\n\n state = true;\n }\n\n this.data.hooks.canplayall = {\n fired: state\n };\n },\n // declare special handling instructions\n handler: function canplayall( event, callback ) {\n\n if ( !this.data.hooks.canplayall.fired ) {\n // trigger original user callback once\n callback.call( this, event );\n\n this.data.hooks.canplayall.fired = true;\n }\n }\n }\n }\n };\n\n // Extend Popcorn.events.fns (listen, unlisten, trigger) to all Popcorn instances\n Popcorn.forEach( [ \"trigger\", \"listen\", \"unlisten\" ], function( key ) {\n Popcorn.p[ key ] = Popcorn.events.fn[ key ];\n });\n\n // Protected API methods\n Popcorn.protect = {\n natives: ( \"load play pause currentTime playbackRate mute volume duration removePlugin roundTime trigger listen unlisten exec\" +\n \"preload playbackRate autoplay loop controls muted buffered readyState seeking paused played seekable ended\" ).toLowerCase().split( /\\s+/ )\n };\n\n // Internal Only - Adds track events to the instance object\n Popcorn.addTrackEvent = function( obj, track ) {\n\n // Determine if this track has default options set for it\n // If so, apply them to the track object\n if ( track && track._natives && track._natives.type &&\n ( obj.options.defaults && obj.options.defaults[ track._natives.type ] ) ) {\n\n track = Popcorn.extend( {}, obj.options.defaults[ track._natives.type ], track );\n }\n\n if ( track._natives ) {\n // Supports user defined track event id\n track._id = !track.id ? Popcorn.guid( track._natives.type ) : track.id;\n\n // Push track event ids into the history\n obj.data.history.push( track._id );\n }\n\n track.start = Popcorn.util.toSeconds( track.start, obj.options.framerate );\n track.end = Popcorn.util.toSeconds( track.end, obj.options.framerate );\n\n // Store this definition in an array sorted by times\n var byStart = obj.data.trackEvents.byStart,\n byEnd = obj.data.trackEvents.byEnd,\n idx;\n\n for ( idx = byStart.length - 1; idx >= 0; idx-- ) {\n\n if ( track.start >= byStart[ idx ].start ) {\n byStart.splice( idx + 1, 0, track );\n break;\n }\n }\n\n for ( idx = byEnd.length - 1; idx >= 0; idx-- ) {\n\n if ( track.end > byEnd[ idx ].end ) {\n byEnd.splice( idx + 1, 0, track );\n break;\n }\n }\n\n // Store references to user added trackevents in ref table\n if ( track._id ) {\n Popcorn.addTrackEvent.ref( obj, track );\n }\n };\n\n // Internal Only - Adds track event references to the instance object's trackRefs hash table\n Popcorn.addTrackEvent.ref = function( obj, track ) {\n obj.data.trackRefs[ track._id ] = track;\n\n return obj;\n };\n\n Popcorn.removeTrackEvent = function( obj, trackId ) {\n\n var historyLen = obj.data.history.length,\n indexWasAt = 0,\n byStart = [],\n byEnd = [],\n animating = [],\n history = [];\n\n Popcorn.forEach( obj.data.trackEvents.byStart, function( o, i, context ) {\n // Preserve the original start/end trackEvents\n if ( !o._id ) {\n byStart.push( obj.data.trackEvents.byStart[i] );\n byEnd.push( obj.data.trackEvents.byEnd[i] );\n }\n\n // Filter for user track events (vs system track events)\n if ( o._id ) {\n\n // Filter for the trackevent to remove\n if ( o._id !== trackId ) {\n byStart.push( obj.data.trackEvents.byStart[i] );\n byEnd.push( obj.data.trackEvents.byEnd[i] );\n }\n\n // Capture the position of the track being removed.\n if ( o._id === trackId ) {\n indexWasAt = i;\n o._natives._teardown && o._natives._teardown.call( obj, o );\n }\n }\n\n });\n\n if ( obj.data.trackEvents.animating.length ) {\n Popcorn.forEach( obj.data.trackEvents.animating, function( o, i, context ) {\n // Preserve the original start/end trackEvents\n if ( !o._id ) {\n animating.push( obj.data.trackEvents.animating[i] );\n }\n\n // Filter for user track events (vs system track events)\n if ( o._id ) {\n // Filter for the trackevent to remove\n if ( o._id !== trackId ) {\n animating.push( obj.data.trackEvents.animating[i] );\n }\n }\n });\n }\n\n // Update\n if ( indexWasAt <= obj.data.trackEvents.startIndex ) {\n obj.data.trackEvents.startIndex--;\n }\n\n if ( indexWasAt <= obj.data.trackEvents.endIndex ) {\n obj.data.trackEvents.endIndex--;\n }\n\n obj.data.trackEvents.byStart = byStart;\n obj.data.trackEvents.byEnd = byEnd;\n obj.data.trackEvents.animating = animating;\n\n for ( var i = 0; i < historyLen; i++ ) {\n if ( obj.data.history[ i ] !== trackId ) {\n history.push( obj.data.history[ i ] );\n }\n }\n\n // Update ordered history array\n obj.data.history = history;\n\n // Update track event references\n Popcorn.removeTrackEvent.ref( obj, trackId );\n };\n\n // Internal Only - Removes track event references from instance object's trackRefs hash table\n Popcorn.removeTrackEvent.ref = function( obj, trackId ) {\n delete obj.data.trackRefs[ trackId ];\n\n return obj;\n };\n\n // Return an array of track events bound to this instance object\n Popcorn.getTrackEvents = function( obj ) {\n\n var trackevents = [],\n refs = obj.data.trackEvents.byStart,\n length = refs.length,\n idx = 0,\n ref;\n\n for ( ; idx < length; idx++ ) {\n ref = refs[ idx ];\n // Return only user attributed track event references\n if ( ref._id ) {\n trackevents.push( ref );\n }\n }\n\n return trackevents;\n };\n\n // Internal Only - Returns an instance object's trackRefs hash table\n Popcorn.getTrackEvents.ref = function( obj ) {\n return obj.data.trackRefs;\n };\n\n // Return a single track event bound to this instance object\n Popcorn.getTrackEvent = function( obj, trackId ) {\n return obj.data.trackRefs[ trackId ];\n };\n\n // Internal Only - Returns an instance object's track reference by track id\n Popcorn.getTrackEvent.ref = function( obj, trackId ) {\n return obj.data.trackRefs[ trackId ];\n };\n\n Popcorn.getLastTrackEventId = function( obj ) {\n return obj.data.history[ obj.data.history.length - 1 ];\n };\n\n Popcorn.timeUpdate = function( obj, event ) {\n\n var currentTime = obj.media.currentTime,\n previousTime = obj.data.trackEvents.previousUpdateTime,\n tracks = obj.data.trackEvents,\n animating = tracks.animating,\n end = tracks.endIndex,\n start = tracks.startIndex,\n animIndex = 0,\n\n registryByName = Popcorn.registryByName,\n\n byEnd, byStart, byAnimate, natives, type;\n\n // Playbar advancing\n if ( previousTime < currentTime ) {\n\n while ( tracks.byEnd[ end ] && tracks.byEnd[ end ].end <= currentTime ) {\n\n byEnd = tracks.byEnd[ end ];\n natives = byEnd._natives;\n type = natives && natives.type;\n\n // If plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byEnd._running === true ) {\n byEnd._running = false;\n natives.end.call( obj, event, byEnd );\n }\n\n end++;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byEnd._id );\n return;\n }\n }\n\n while ( tracks.byStart[ start ] && tracks.byStart[ start ].start <= currentTime ) {\n\n byStart = tracks.byStart[ start ];\n natives = byStart._natives;\n type = natives && natives.type;\n\n // If plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byStart.end > currentTime &&\n byStart._running === false &&\n obj.data.disabled.indexOf( type ) === -1 ) {\n\n byStart._running = true;\n natives.start.call( obj, event, byStart );\n\n // If the `frameAnimation` option is used,\n // push the current byStart object into the `animating` cue\n if ( obj.options.frameAnimation &&\n ( byStart && byStart._running && byStart._natives.frame ) ) {\n\n animating.push( byStart );\n }\n }\n start++;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byStart._id );\n return;\n }\n }\n\n // If the `frameAnimation` option is used, iterate the animating track\n // and execute the `frame` callback\n if ( obj.options.frameAnimation ) {\n while ( animIndex < animating.length ) {\n\n byAnimate = animating[ animIndex ];\n\n if ( !byAnimate._running ) {\n animating.splice( animIndex, 1 );\n } else {\n byAnimate._natives.frame.call( obj, event, byAnimate, currentTime );\n animIndex++;\n }\n }\n }\n\n // Playbar receding\n } else if ( previousTime > currentTime ) {\n\n while ( tracks.byStart[ start ] && tracks.byStart[ start ].start > currentTime ) {\n\n byStart = tracks.byStart[ start ];\n natives = byStart._natives;\n type = natives && natives.type;\n\n // if plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byStart._running === true ) {\n byStart._running = false;\n natives.end.call( obj, event, byStart );\n }\n start--;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byStart._id );\n return;\n }\n }\n\n while ( tracks.byEnd[ end ] && tracks.byEnd[ end ].end > currentTime ) {\n\n byEnd = tracks.byEnd[ end ];\n natives = byEnd._natives;\n type = natives && natives.type;\n\n // if plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byEnd.start <= currentTime &&\n byEnd._running === false &&\n obj.data.disabled.indexOf( type ) === -1 ) {\n\n byEnd._running = true;\n natives.start.call( obj, event, byEnd );\n\n // If the `frameAnimation` option is used,\n // push the current byEnd object into the `animating` cue\n if ( obj.options.frameAnimation &&\n ( byEnd && byEnd._running && byEnd._natives.frame ) ) {\n\n animating.push( byEnd );\n }\n }\n end--;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byEnd._id );\n return;\n }\n }\n\n // If the `frameAnimation` option is used, iterate the animating track\n // and execute the `frame` callback\n if ( obj.options.frameAnimation ) {\n while ( animIndex < animating.length ) {\n\n byAnimate = animating[ animIndex ];\n\n if ( !byAnimate._running ) {\n animating.splice( animIndex, 1 );\n } else {\n byAnimate._natives.frame.call( obj, event, byAnimate, currentTime );\n animIndex++;\n }\n }\n }\n // time bar is not moving ( video is paused )\n }\n\n tracks.endIndex = end;\n tracks.startIndex = start;\n tracks.previousUpdateTime = currentTime;\n };\n\n // Map and Extend TrackEvent functions to all Popcorn instances\n Popcorn.extend( Popcorn.p, {\n\n getTrackEvents: function() {\n return Popcorn.getTrackEvents.call( null, this );\n },\n\n getTrackEvent: function( id ) {\n return Popcorn.getTrackEvent.call( null, this, id );\n },\n\n getLastTrackEventId: function() {\n return Popcorn.getLastTrackEventId.call( null, this );\n },\n\n removeTrackEvent: function( id ) {\n\n Popcorn.removeTrackEvent.call( null, this, id );\n return this;\n },\n\n removePlugin: function( name ) {\n Popcorn.removePlugin.call( null, this, name );\n return this;\n },\n\n timeUpdate: function( event ) {\n Popcorn.timeUpdate.call( null, this, event );\n return this;\n },\n\n destroy: function() {\n Popcorn.destroy.call( null, this );\n return this;\n }\n });\n\n // Plugin manifests\n Popcorn.manifest = {};\n // Plugins are registered\n Popcorn.registry = [];\n Popcorn.registryByName = {};\n // An interface for extending Popcorn\n // with plugin functionality\n Popcorn.plugin = function( name, definition, manifest ) {\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n // Provides some sugar, but ultimately extends\n // the definition into Popcorn.p\n var reserved = [ \"start\", \"end\" ],\n plugin = {},\n setup,\n isfn = typeof definition === \"function\",\n methods = [ \"_setup\", \"_teardown\", \"start\", \"end\", \"frame\" ];\n\n // combines calls of two function calls into one\n var combineFn = function( first, second ) {\n\n first = first || Popcorn.nop;\n second = second || Popcorn.nop;\n\n return function() {\n\n first.apply( this, arguments );\n second.apply( this, arguments );\n };\n };\n\n // If `manifest` arg is undefined, check for manifest within the `definition` object\n // If no `definition.manifest`, an empty object is a sufficient fallback\n Popcorn.manifest[ name ] = manifest = manifest || definition.manifest || {};\n\n // apply safe, and empty default functions\n methods.forEach(function( method ) {\n\n definition[ method ] = definition[ method ] || Popcorn.nop;\n });\n\n var pluginFn = function( setup, options ) {\n\n if ( !options ) {\n return this;\n }\n\n // Storing the plugin natives\n var natives = options._natives = {},\n compose = \"\",\n defaults, originalOpts, manifestOpts, mergedSetupOpts;\n\n Popcorn.extend( natives, setup );\n\n options._natives.type = name;\n options._running = false;\n\n // Check for previously set default options\n defaults = this.options.defaults && this.options.defaults[ options._natives && options._natives.type ];\n\n // default to an empty string if no effect exists\n // split string into an array of effects\n options.compose = options.compose && options.compose.split( \" \" ) || [];\n options.effect = options.effect && options.effect.split( \" \" ) || [];\n\n // join the two arrays together\n options.compose = options.compose.concat( options.effect );\n\n options.compose.forEach(function( composeOption ) {\n\n // if the requested compose is garbage, throw it away\n compose = Popcorn.compositions[ composeOption ] || {};\n\n // extends previous functions with compose function\n methods.forEach(function( method ) {\n\n natives[ method ] = combineFn( natives[ method ], compose[ method ] );\n });\n });\n\n // Ensure a manifest object, an empty object is a sufficient fallback\n options._natives.manifest = manifest;\n\n // Checks for expected properties\n if ( !( \"start\" in options ) ) {\n options.start = 0;\n }\n\n if ( !( \"end\" in options ) ) {\n options.end = this.duration() || Number.MAX_VALUE;\n }\n\n // Merge with defaults if they exist, make sure per call is prioritized\n mergedSetupOpts = defaults ? Popcorn.extend( {}, defaults, options ) :\n options;\n\n // Resolves 239, 241, 242\n if ( !mergedSetupOpts.target ) {\n\n // Sometimes the manifest may be missing entirely\n // or it has an options object that doesn't have a `target` property\n manifestOpts = \"options\" in manifest && manifest.options;\n\n mergedSetupOpts.target = manifestOpts && \"target\" in manifestOpts && manifestOpts.target;\n }\n\n // Trigger _setup method if exists\n options._natives._setup && options._natives._setup.call( this, mergedSetupOpts );\n\n // Create new track event for this instance\n Popcorn.addTrackEvent( this, Popcorn.extend( mergedSetupOpts, options ) );\n\n // Future support for plugin event definitions\n // for all of the native events\n Popcorn.forEach( setup, function( callback, type ) {\n\n if ( type !== \"type\" ) {\n\n if ( reserved.indexOf( type ) === -1 ) {\n\n this.listen( type, callback );\n }\n }\n\n }, this );\n\n return this;\n };\n\n // Assign new named definition\n plugin[ name ] = function( options ) {\n return pluginFn.call( this, isfn ? definition.call( this, options ) : definition,\n options );\n };\n\n // Extend Popcorn.p with new named definition\n Popcorn.extend( Popcorn.p, plugin );\n\n // Push into the registry\n var entry = {\n fn: plugin[ name ],\n definition: definition,\n base: definition,\n parents: [],\n name: name\n };\n Popcorn.registry.push(\n Popcorn.extend( plugin, entry, {\n type: name\n })\n );\n Popcorn.registryByName[ name ] = entry;\n\n return plugin;\n };\n\n Popcorn.plugin.debug = false;\n\n // removePlugin( type ) removes all tracks of that from all instances of popcorn\n // removePlugin( obj, type ) removes all tracks of type from obj, where obj is a single instance of popcorn\n Popcorn.removePlugin = function( obj, name ) {\n\n // Check if we are removing plugin from an instance or from all of Popcorn\n if ( !name ) {\n\n // Fix the order\n name = obj;\n obj = Popcorn.p;\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n var registryLen = Popcorn.registry.length,\n registryIdx;\n\n // remove plugin reference from registry\n for ( registryIdx = 0; registryIdx < registryLen; registryIdx++ ) {\n if ( Popcorn.registry[ registryIdx ].name === name ) {\n Popcorn.registry.splice( registryIdx, 1 );\n delete Popcorn.registryByName[ name ];\n\n // delete the plugin\n delete obj[ name ];\n\n // plugin found and removed, stop checking, we are done\n return;\n }\n }\n\n }\n\n var byStart = obj.data.trackEvents.byStart,\n byEnd = obj.data.trackEvents.byEnd,\n animating = obj.data.trackEvents.animating,\n idx, sl;\n\n // remove all trackEvents\n for ( idx = 0, sl = byStart.length; idx < sl; idx++ ) {\n\n if ( ( byStart[ idx ] && byStart[ idx ]._natives && byStart[ idx ]._natives.type === name ) &&\n ( byEnd[ idx ] && byEnd[ idx ]._natives && byEnd[ idx ]._natives.type === name ) ) {\n\n byStart[ idx ]._natives._teardown && byStart[ idx ]._natives._teardown.call( obj, byStart[ idx ] );\n\n byStart.splice( idx, 1 );\n byEnd.splice( idx, 1 );\n\n // update for loop if something removed, but keep checking\n idx--; sl--;\n if ( obj.data.trackEvents.startIndex <= idx ) {\n obj.data.trackEvents.startIndex--;\n obj.data.trackEvents.endIndex--;\n }\n }\n }\n\n //remove all animating events\n for ( idx = 0, sl = animating.length; idx < sl; idx++ ) {\n\n if ( animating[ idx ] && animating[ idx ]._natives && animating[ idx ]._natives.type === name ) {\n\n animating.splice( idx, 1 );\n\n // update for loop if something removed, but keep checking\n idx--; sl--;\n }\n }\n\n };\n\n Popcorn.compositions = {};\n\n // Plugin inheritance\n Popcorn.compose = function( name, definition, manifest ) {\n\n // If `manifest` arg is undefined, check for manifest within the `definition` object\n // If no `definition.manifest`, an empty object is a sufficient fallback\n Popcorn.manifest[ name ] = manifest = manifest || definition.manifest || {};\n\n // register the effect by name\n Popcorn.compositions[ name ] = definition;\n };\n\n Popcorn.plugin.effect = Popcorn.effect = Popcorn.compose;\n\n // stores parsers keyed on filetype\n Popcorn.parsers = {};\n\n // An interface for extending Popcorn\n // with parser functionality\n Popcorn.parser = function( name, type, definition ) {\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n // fixes parameters for overloaded function call\n if ( typeof type === \"function\" && !definition ) {\n definition = type;\n type = \"\";\n }\n\n if ( typeof definition !== \"function\" || typeof type !== \"string\" ) {\n return;\n }\n\n // Provides some sugar, but ultimately extends\n // the definition into Popcorn.p\n\n var natives = Popcorn.events.all,\n parseFn,\n parser = {};\n\n parseFn = function( filename, callback ) {\n\n if ( !filename ) {\n return this;\n }\n\n var that = this;\n\n Popcorn.xhr({\n url: filename,\n dataType: type,\n success: function( data ) {\n\n var tracksObject = definition( data ),\n tracksData,\n tracksDataLen,\n tracksDef,\n idx = 0;\n\n tracksData = tracksObject.data || [];\n tracksDataLen = tracksData.length;\n tracksDef = null;\n\n // If no tracks to process, return immediately\n if ( !tracksDataLen ) {\n return;\n }\n\n // Create tracks out of parsed object\n for ( ; idx < tracksDataLen; idx++ ) {\n\n tracksDef = tracksData[ idx ];\n\n for ( var key in tracksDef ) {\n\n if ( hasOwn.call( tracksDef, key ) && !!that[ key ] ) {\n\n that[ key ]( tracksDef[ key ] );\n }\n }\n }\n if ( callback ) {\n callback();\n }\n }\n });\n\n return this;\n };\n\n // Assign new named definition\n parser[ name ] = parseFn;\n\n // Extend Popcorn.p with new named definition\n Popcorn.extend( Popcorn.p, parser );\n\n // keys the function name by filetype extension\n //Popcorn.parsers[ name ] = true;\n\n return parser;\n };\n\n\n // Cache references to reused RegExps\n var rparams = /\\?/,\n // XHR Setup object\n setup = {\n url: \"\",\n data: \"\",\n dataType: \"\",\n success: Popcorn.nop,\n type: \"GET\",\n async: true,\n xhr: function() {\n return new global.XMLHttpRequest();\n }\n };\n\n Popcorn.xhr = function( options ) {\n\n options.dataType = options.dataType && options.dataType.toLowerCase() || null;\n\n if ( options.dataType &&\n ( options.dataType === \"jsonp\" || options.dataType === \"script\" ) ) {\n\n Popcorn.xhr.getJSONP(\n options.url,\n options.success,\n options.dataType === \"script\"\n );\n return;\n }\n\n var settings = Popcorn.extend( {}, setup, options );\n\n // Create new XMLHttpRequest object\n settings.ajax = settings.xhr();\n\n if ( settings.ajax ) {\n\n if ( settings.type === \"GET\" && settings.data ) {\n\n // append query string\n settings.url += ( rparams.test( settings.url ) ? \"&\" : \"?\" ) + settings.data;\n\n // Garbage collect and reset settings.data\n settings.data = null;\n }\n\n\n settings.ajax.open( settings.type, settings.url, settings.async );\n settings.ajax.send( settings.data || null );\n\n return Popcorn.xhr.httpData( settings );\n }\n };\n\n\n Popcorn.xhr.httpData = function( settings ) {\n\n var data, json = null;\n\n settings.ajax.onreadystatechange = function() {\n\n if ( settings.ajax.readyState === 4 ) {\n\n try {\n json = JSON.parse( settings.ajax.responseText );\n } catch( e ) {\n //suppress\n }\n\n data = {\n xml: settings.ajax.responseXML,\n text: settings.ajax.responseText,\n json: json\n };\n\n // If a dataType was specified, return that type of data\n if ( settings.dataType ) {\n data = data[ settings.dataType ];\n }\n\n\n settings.success.call( settings.ajax, data );\n\n }\n };\n return data;\n };\n\n Popcorn.xhr.getJSONP = function( url, success, isScript ) {\n\n var head = document.head || document.getElementsByTagName( \"head\" )[ 0 ] || document.documentElement,\n script = document.createElement( \"script\" ),\n paramStr = url.split( \"?\" )[ 1 ],\n isFired = false,\n params = [],\n callback, parts, callparam;\n\n if ( paramStr && !isScript ) {\n params = paramStr.split( \"&\" );\n }\n\n if ( params.length ) {\n parts = params[ params.length - 1 ].split( \"=\" );\n }\n\n callback = params.length ? ( parts[ 1 ] ? parts[ 1 ] : parts[ 0 ] ) : \"jsonp\";\n\n if ( !paramStr && !isScript ) {\n url += \"?callback=\" + callback;\n }\n\n if ( callback && !isScript ) {\n\n // If a callback name already exists\n if ( !!window[ callback ] ) {\n // Create a new unique callback name\n callback = Popcorn.guid( callback );\n }\n\n // Define the JSONP success callback globally\n window[ callback ] = function( data ) {\n // Fire success callbacks\n success && success( data );\n isFired = true;\n };\n\n // Replace callback param and callback name\n url = url.replace( parts.join( \"=\" ), parts[ 0 ] + \"=\" + callback );\n }\n\n script.onload = function() {\n\n // Handling remote script loading callbacks\n if ( isScript ) {\n // getScript\n success && success();\n }\n\n // Executing for JSONP requests\n if ( isFired ) {\n // Garbage collect the callback\n delete window[ callback ];\n }\n // Garbage collect the script resource\n head.removeChild( script );\n };\n\n script.src = url;\n\n head.insertBefore( script, head.firstChild );\n\n return;\n };\n\n Popcorn.getJSONP = Popcorn.xhr.getJSONP;\n\n Popcorn.getScript = Popcorn.xhr.getScript = function( url, success ) {\n\n return Popcorn.xhr.getJSONP( url, success, true );\n };\n\n Popcorn.util = {\n // Simple function to parse a timestamp into seconds\n // Acceptable formats are:\n // HH:MM:SS.MMM\n // HH:MM:SS;FF\n // Hours and minutes are optional. They default to 0\n toSeconds: function( timeStr, framerate ) {\n // Hours and minutes are optional\n // Seconds must be specified\n // Seconds can be followed by milliseconds OR by the frame information\n var validTimeFormat = /^([0-9]+:){0,2}[0-9]+([.;][0-9]+)?$/,\n errorMessage = \"Invalid time format\",\n digitPairs, lastIndex, lastPair, firstPair,\n frameInfo, frameTime;\n\n if ( typeof timeStr === \"number\" ) {\n return timeStr;\n }\n\n if ( typeof timeStr === \"string\" &&\n !validTimeFormat.test( timeStr ) ) {\n Popcorn.error( errorMessage );\n }\n\n digitPairs = timeStr.split( \":\" );\n lastIndex = digitPairs.length - 1;\n lastPair = digitPairs[ lastIndex ];\n\n // Fix last element:\n if ( lastPair.indexOf( \";\" ) > -1 ) {\n\n frameInfo = lastPair.split( \";\" );\n frameTime = 0;\n\n if ( framerate && ( typeof framerate === \"number\" ) ) {\n frameTime = parseFloat( frameInfo[ 1 ], 10 ) / framerate;\n }\n\n digitPairs[ lastIndex ] = parseInt( frameInfo[ 0 ], 10 ) + frameTime;\n }\n\n firstPair = digitPairs[ 0 ];\n\n return {\n\n 1: parseFloat( firstPair, 10 ),\n\n 2: ( parseInt( firstPair, 10 ) * 60 ) +\n parseFloat( digitPairs[ 1 ], 10 ),\n\n 3: ( parseInt( firstPair, 10 ) * 3600 ) +\n ( parseInt( digitPairs[ 1 ], 10 ) * 60 ) +\n parseFloat( digitPairs[ 2 ], 10 )\n\n }[ digitPairs.length || 1 ];\n }\n };\n\n\n // Initialize locale data\n // Based on http://en.wikipedia.org/wiki/Language_localisation#Language_tags_and_codes\n function initLocale( arg ) {\n\n var locale = typeof arg === \"string\" ? arg : [ arg.language, arg.region ].join( \"-\" ),\n parts = locale.split( \"-\" );\n\n // Setup locale data table\n return {\n iso6391: locale,\n language: parts[ 0 ] || \"\",\n region: parts[ 1 ] || \"\"\n };\n }\n\n // Declare locale data table\n var localeData = initLocale( global.navigator.userLanguage || global.navigator.language );\n\n Popcorn.locale = {\n\n // Popcorn.locale.get()\n // returns reference to privately\n // defined localeData\n get: function() {\n return localeData;\n },\n\n // Popcorn.locale.set( string|object );\n set: function( arg ) {\n\n localeData = initLocale( arg );\n\n Popcorn.locale.broadcast();\n\n return localeData;\n },\n\n // Popcorn.locale.broadcast( type )\n // Sends events to all popcorn media instances that are\n // listening for locale events\n broadcast: function( type ) {\n\n var instances = Popcorn.instances,\n length = instances.length,\n idx = 0,\n instance;\n\n type = type || \"locale:changed\";\n\n // Iterate all current instances\n for ( ; idx < length; idx++ ) {\n instance = instances[ idx ];\n\n // For those instances with locale event listeners,\n // trigger a locale change event\n if ( type in instance.data.events ) {\n instance.trigger( type );\n }\n }\n }\n };\n\n // alias for exec function\n Popcorn.p.cue = Popcorn.p.exec;\n\n // Exposes Popcorn to global context\n global.Popcorn = Popcorn;\n\n document.addEventListener( \"DOMContentLoaded\", function() {\n\n // Supports non-specific elements\n var dataAttr = \"data-timeline-sources\",\n medias = document.querySelectorAll( \"[\" + dataAttr + \"]\" );\n\n Popcorn.forEach( medias, function( idx, key ) {\n\n var media = medias[ key ],\n hasDataSources = false,\n dataSources, data, popcornMedia;\n\n // Ensure that the DOM has an id\n if ( !media.id ) {\n\n media.id = Popcorn.guid( \"__popcorn\" );\n }\n\n // Ensure we're looking at a dom node\n if ( media.nodeType && media.nodeType === 1 ) {\n\n popcornMedia = Popcorn( \"#\" + media.id );\n\n dataSources = ( media.getAttribute( dataAttr ) || \"\" ).split( \",\" );\n\n if ( dataSources[ 0 ] ) {\n\n Popcorn.forEach( dataSources, function( source ) {\n\n // split the parser and data as parser!file\n data = source.split( \"!\" );\n\n // if no parser is defined for the file, assume \"parse\" + file extension\n if ( data.length === 1 ) {\n\n data = source.split( \".\" );\n data[ 0 ] = \"parse\" + data[ data.length - 1 ].toUpperCase();\n data[ 1 ] = source;\n }\n\n // If the media has data sources and the correct parser is registered, continue to load\n if ( dataSources[ 0 ] && popcornMedia[ data[ 0 ] ] ) {\n\n // Set up the media and load in the datasources\n popcornMedia[ data[ 0 ] ]( data[ 1 ] );\n\n }\n });\n\n }\n\n // Only play the media if it was specified to do so\n if ( !!popcornMedia.autoplay ) {\n popcornMedia.play();\n }\n\n }\n });\n }, false );\n\n})(window, window.document);\n\n"},"new_file":{"kind":"string","value":"popcorn.js"},"old_contents":{"kind":"string","value":"(function(global, document) {\n\n // Popcorn.js does not support archaic browsers\n if ( !document.addEventListener ) {\n global.Popcorn = {};\n\n var methods = ( \"removeInstance addInstance getInstanceById removeInstanceById \" +\n \"forEach extend effects error guid sizeOf isArray nop position disable enable destroy \" +\n \"addTrackEvent removeTrackEvent getTrackEvents getTrackEvent getLastTrackEventId \" +\n \"timeUpdate plugin removePlugin compose effect parser xhr getJSONP getScript\" ).split(/\\s+/);\n\n while( methods.length ) {\n global.Popcorn[ methods.shift() ] = function() {};\n }\n return;\n }\n\n var\n\n AP = Array.prototype,\n OP = Object.prototype,\n\n forEach = AP.forEach,\n slice = AP.slice,\n hasOwn = OP.hasOwnProperty,\n toString = OP.toString,\n\n // ID string matching\n rIdExp = /^(#([\\w\\-\\_\\.]+))$/,\n\n // Ready fn cache\n readyStack = [],\n readyBound = false,\n readyFired = false,\n\n // Non-public internal data object\n internal = {\n events: {\n hash: {},\n apis: {}\n }\n },\n\n // Non-public `requestAnimFrame`\n // http://paulirish.com/2011/requestanimationframe-for-smart-animating/\n requestAnimFrame = (function(){\n return global.requestAnimationFrame ||\n global.webkitRequestAnimationFrame ||\n global.mozRequestAnimationFrame ||\n global.oRequestAnimationFrame ||\n global.msRequestAnimationFrame ||\n function( callback, element ) {\n global.setTimeout( callback, 16 );\n };\n }()),\n\n // Declare constructor\n // Returns an instance object.\n Popcorn = function( entity, options ) {\n // Return new Popcorn object\n return new Popcorn.p.init( entity, options || null );\n };\n\n // Instance caching\n Popcorn.instances = [];\n Popcorn.instanceIds = {};\n\n Popcorn.removeInstance = function( instance ) {\n // If called prior to any instances being created\n // Return early to avoid splicing on nothing\n if ( !Popcorn.instances.length ) {\n return;\n }\n\n // Remove instance from Popcorn.instances\n Popcorn.instances.splice( Popcorn.instanceIds[ instance.id ], 1 );\n\n // Delete the instance id key\n delete Popcorn.instanceIds[ instance.id ];\n\n // Return current modified instances\n return Popcorn.instances;\n };\n\n // Addes a Popcorn instance to the Popcorn instance array\n Popcorn.addInstance = function( instance ) {\n\n var instanceLen = Popcorn.instances.length,\n instanceId = instance.media.id && instance.media.id;\n\n // If the media element has its own `id` use it, otherwise provide one\n // Ensure that instances have unique ids and unique entries\n // Uses `in` operator to avoid false positives on 0\n instance.id = !( instanceId in Popcorn.instanceIds ) && instanceId ||\n \"__popcorn\" + instanceLen;\n\n // Create a reference entry for this instance\n Popcorn.instanceIds[ instance.id ] = instanceLen;\n\n // Add this instance to the cache\n Popcorn.instances.push( instance );\n\n // Return the current modified instances\n return Popcorn.instances;\n };\n\n // Request Popcorn object instance by id\n Popcorn.getInstanceById = function( id ) {\n return Popcorn.instances[ Popcorn.instanceIds[ id ] ];\n };\n\n // Remove Popcorn object instance by id\n Popcorn.removeInstanceById = function( id ) {\n return Popcorn.removeInstance( Popcorn.instances[ Popcorn.instanceIds[ id ] ] );\n };\n\n // Declare a shortcut (Popcorn.p) to and a definition of\n // the new prototype for our Popcorn constructor\n Popcorn.p = Popcorn.prototype = {\n\n init: function( entity, options ) {\n\n var matches;\n\n // Supports Popcorn(function () { /../ })\n // Originally proposed by Daniel Brooks\n\n if ( typeof entity === \"function\" ) {\n\n // If document ready has already fired\n if ( document.readyState === \"interactive\" || document.readyState === \"complete\" ) {\n\n entity( document, Popcorn );\n\n return;\n }\n // Add `entity` fn to ready stack\n readyStack.push( entity );\n\n // This process should happen once per page load\n if ( !readyBound ) {\n\n // set readyBound flag\n readyBound = true;\n\n var DOMContentLoaded = function() {\n\n readyFired = true;\n\n // Remove global DOM ready listener\n document.removeEventListener( \"DOMContentLoaded\", DOMContentLoaded, false );\n\n // Execute all ready function in the stack\n for ( var i = 0, readyStackLength = readyStack.length; i < readyStackLength; i++ ) {\n\n readyStack[ i ].call( document, Popcorn );\n\n }\n // GC readyStack\n readyStack = null;\n };\n\n // Register global DOM ready listener\n document.addEventListener( \"DOMContentLoaded\", DOMContentLoaded, false );\n }\n\n return;\n }\n\n // Check if entity is a valid string id\n matches = rIdExp.exec( entity );\n\n // Get media element by id or object reference\n this.media = matches && matches.length && matches[ 2 ] ?\n document.getElementById( matches[ 2 ] ) :\n entity;\n\n // Create an audio or video element property reference\n this[ ( this.media.nodeName && this.media.nodeName.toLowerCase() ) || \"video\" ] = this.media;\n\n // Register new instance\n Popcorn.addInstance( this );\n\n this.options = options || {};\n\n this.isDestroyed = false;\n\n this.data = {\n\n // Allows disabling a plugin per instance\n disabled: [],\n\n // Stores DOM event queues by type\n events: {},\n\n // Stores Special event hooks data\n hooks: {},\n\n // Store track event history data\n history: [],\n\n // Stores ad-hoc state related data]\n state: {\n volume: this.media.volume\n },\n\n // Store track event object references by trackId\n trackRefs: {},\n\n // Playback track event queues\n trackEvents: {\n byStart: [{\n\n start: -1,\n end: -1\n }],\n byEnd: [{\n start: -1,\n end: -1\n }],\n animating: [],\n startIndex: 0,\n endIndex: 0,\n previousUpdateTime: -1\n }\n };\n\n // Wrap true ready check\n var isReady = function( that ) {\n\n var duration, videoDurationPlus, animate;\n\n if ( that.media.readyState >= 2 ) {\n // Adding padding to the front and end of the arrays\n // this is so we do not fall off either end\n\n duration = that.media.duration;\n // Check for no duration info (NaN)\n videoDurationPlus = duration != duration ? Number.MAX_VALUE : duration + 1;\n\n Popcorn.addTrackEvent( that, {\n start: videoDurationPlus,\n end: videoDurationPlus\n });\n\n if ( that.options.frameAnimation ) {\n // if Popcorn is created with frameAnimation option set to true,\n // requestAnimFrame is used instead of \"timeupdate\" media event.\n // This is for greater frame time accuracy, theoretically up to\n // 60 frames per second as opposed to ~4 ( ~every 15-250ms)\n animate = function () {\n\n Popcorn.timeUpdate( that, {} );\n\n that.trigger( \"timeupdate\" );\n\n requestAnimFrame( animate );\n };\n\n requestAnimFrame( animate );\n\n } else {\n\n that.data.timeUpdateFunction = function( event ) {\n Popcorn.timeUpdate( that, event );\n }\n\n if ( !that.isDestroyed ) {\n that.media.addEventListener( \"timeupdate\", that.data.timeUpdateFunction, false );\n }\n }\n } else {\n global.setTimeout(function() {\n isReady( that );\n }, 1 );\n }\n };\n\n isReady( this );\n\n return this;\n }\n };\n\n // Extend constructor prototype to instance prototype\n // Allows chaining methods to instances\n Popcorn.p.init.prototype = Popcorn.p;\n\n Popcorn.forEach = function( obj, fn, context ) {\n\n if ( !obj || !fn ) {\n return {};\n }\n\n context = context || this;\n\n var key, len;\n\n // Use native whenever possible\n if ( forEach && obj.forEach === forEach ) {\n return obj.forEach( fn, context );\n }\n\n if ( toString.call( obj ) === \"[object NodeList]\" ) {\n for ( key = 0, len = obj.length; key < len; key++ ) {\n fn.call( context, obj[ key ], key, obj );\n }\n return obj;\n }\n\n for ( key in obj ) {\n if ( hasOwn.call( obj, key ) ) {\n fn.call( context, obj[ key ], key, obj );\n }\n }\n return obj;\n };\n\n Popcorn.extend = function( obj ) {\n var dest = obj, src = slice.call( arguments, 1 );\n\n Popcorn.forEach( src, function( copy ) {\n for ( var prop in copy ) {\n dest[ prop ] = copy[ prop ];\n }\n });\n\n return dest;\n };\n\n\n // A Few reusable utils, memoized onto Popcorn\n Popcorn.extend( Popcorn, {\n error: function( msg ) {\n throw new Error( msg );\n },\n guid: function( prefix ) {\n Popcorn.guid.counter++;\n return ( prefix ? prefix : \"\" ) + ( +new Date() + Popcorn.guid.counter );\n },\n sizeOf: function( obj ) {\n var size = 0;\n\n for ( var prop in obj ) {\n size++;\n }\n\n return size;\n },\n isArray: Array.isArray || function( array ) {\n return toString.call( array ) === \"[object Array]\";\n },\n\n nop: function() {},\n\n position: function( elem ) {\n\n var clientRect = elem.getBoundingClientRect(),\n bounds = {},\n doc = elem.ownerDocument,\n docElem = document.documentElement,\n body = document.body,\n clientTop, clientLeft, scrollTop, scrollLeft, top, left;\n\n // Determine correct clientTop/Left\n clientTop = docElem.clientTop || body.clientTop || 0;\n clientLeft = docElem.clientLeft || body.clientLeft || 0;\n\n // Determine correct scrollTop/Left\n scrollTop = ( global.pageYOffset && docElem.scrollTop || body.scrollTop );\n scrollLeft = ( global.pageXOffset && docElem.scrollLeft || body.scrollLeft );\n\n // Temp top/left\n top = Math.ceil( clientRect.top + scrollTop - clientTop );\n left = Math.ceil( clientRect.left + scrollLeft - clientLeft );\n\n for ( var p in clientRect ) {\n bounds[ p ] = Math.round( clientRect[ p ] );\n }\n\n return Popcorn.extend({}, bounds, { top: top, left: left });\n },\n\n disable: function( instance, plugin ) {\n\n var disabled = instance.data.disabled;\n\n if ( disabled.indexOf( plugin ) === -1 ) {\n disabled.push( plugin );\n }\n\n return instance;\n },\n enable: function( instance, plugin ) {\n\n var disabled = instance.data.disabled,\n index = disabled.indexOf( plugin );\n\n if ( index > -1 ) {\n disabled.splice( index, 1 );\n }\n\n return instance;\n },\n destroy: function( instance ) {\n var events = instance.data.events,\n singleEvent, item, fn;\n\n // Iterate through all events and remove them\n for ( item in events ) {\n singleEvent = events[ item ];\n for ( fn in singleEvent ) {\n delete singleEvent[ fn ];\n }\n events[ item ] = null;\n }\n\n if ( !instance.isDestroyed ) {\n instance.media.removeEventListener( \"timeupdate\", instance.data.timeUpdateFunction, false );\n instance.isDestroyed = true;\n }\n\n Popcorn.removeInstance( instance );\n }\n });\n\n // Memoized GUID Counter\n Popcorn.guid.counter = 1;\n\n // Factory to implement getters, setters and controllers\n // as Popcorn instance methods. The IIFE will create and return\n // an object with defined methods\n Popcorn.extend(Popcorn.p, (function() {\n\n var methods = \"load play pause currentTime playbackRate volume duration preload playbackRate \" +\n \"autoplay loop controls muted buffered readyState seeking paused played seekable ended\",\n ret = {};\n\n\n // Build methods, store in object that is returned and passed to extend\n Popcorn.forEach( methods.split( /\\s+/g ), function( name ) {\n\n ret[ name ] = function( arg ) {\n\n if ( typeof this.media[ name ] === \"function\" ) {\n this.media[ name ]();\n\n return this;\n }\n\n\n if ( arg != null ) {\n\n this.media[ name ] = arg;\n\n return this;\n }\n\n return this.media[ name ];\n };\n });\n\n return ret;\n\n })()\n );\n\n Popcorn.forEach( \"enable disable\".split(\" \"), function( method ) {\n Popcorn.p[ method ] = function( plugin ) {\n return Popcorn[ method ]( this, plugin );\n };\n });\n\n Popcorn.extend(Popcorn.p, {\n\n // Rounded currentTime\n roundTime: function() {\n return -~this.media.currentTime;\n },\n\n // Attach an event to a single point in time\n exec: function( time, fn ) {\n\n // Creating a one second track event with an empty end\n Popcorn.addTrackEvent( this, {\n start: time,\n end: time + 1,\n _running: false,\n _natives: {\n start: fn || Popcorn.nop,\n end: Popcorn.nop,\n type: \"exec\"\n }\n });\n\n return this;\n },\n\n // Mute the calling media, optionally toggle\n mute: function( toggle ) {\n\n var event = toggle == null || toggle === true ? \"muted\" : \"unmuted\";\n\n // If `toggle` is explicitly `false`,\n // unmute the media and restore the volume level\n if ( event === \"unmuted\" ) {\n this.media.muted = false;\n this.media.volume = this.data.state.volume;\n }\n\n // If `toggle` is either null or undefined,\n // save the current volume and mute the media element\n if ( event === \"muted\" ) {\n this.data.state.volume = this.media.volume;\n this.media.muted = true;\n }\n\n // Trigger either muted|unmuted event\n this.trigger( event );\n\n return this;\n },\n\n // Convenience method, unmute the calling media\n unmute: function( toggle ) {\n\n return this.mute( toggle == null ? false : !toggle );\n },\n\n // Get the client bounding box of an instance element\n position: function() {\n return Popcorn.position( this.media );\n },\n\n // Toggle a plugin's playback behaviour (on or off) per instance\n toggle: function( plugin ) {\n return Popcorn[ this.data.disabled.indexOf( plugin ) > -1 ? \"enable\" : \"disable\" ]( this, plugin );\n },\n\n // Set default values for plugin options objects per instance\n defaults: function( plugin, defaults ) {\n\n // If an array of default configurations is provided,\n // iterate and apply each to this instance\n if ( Popcorn.isArray( plugin ) ) {\n\n Popcorn.forEach( plugin, function( obj ) {\n for ( var name in obj ) {\n this.defaults( name, obj[ name ] );\n }\n }, this );\n\n return this;\n }\n\n if ( !this.options.defaults ) {\n this.options.defaults = {};\n }\n\n if ( !this.options.defaults[ plugin ] ) {\n this.options.defaults[ plugin ] = {};\n }\n\n Popcorn.extend( this.options.defaults[ plugin ], defaults );\n\n return this;\n }\n });\n\n Popcorn.Events = {\n UIEvents: \"blur focus focusin focusout load resize scroll unload\",\n MouseEvents: \"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave click dblclick\",\n Events: \"loadstart progress suspend emptied stalled play pause \" +\n \"loadedmetadata loadeddata waiting playing canplay canplaythrough \" +\n \"seeking seeked timeupdate ended ratechange durationchange volumechange\"\n };\n\n Popcorn.Events.Natives = Popcorn.Events.UIEvents + \" \" +\n Popcorn.Events.MouseEvents + \" \" +\n Popcorn.Events.Events;\n\n internal.events.apiTypes = [ \"UIEvents\", \"MouseEvents\", \"Events\" ];\n\n // Privately compile events table at load time\n (function( events, data ) {\n\n var apis = internal.events.apiTypes,\n eventsList = events.Natives.split( /\\s+/g ),\n idx = 0, len = eventsList.length, prop;\n\n for( ; idx < len; idx++ ) {\n data.hash[ eventsList[idx] ] = true;\n }\n\n apis.forEach(function( val, idx ) {\n\n data.apis[ val ] = {};\n\n var apiEvents = events[ val ].split( /\\s+/g ),\n len = apiEvents.length,\n k = 0;\n\n for ( ; k < len; k++ ) {\n data.apis[ val ][ apiEvents[ k ] ] = true;\n }\n });\n })( Popcorn.Events, internal.events );\n\n Popcorn.events = {\n\n isNative: function( type ) {\n return !!internal.events.hash[ type ];\n },\n getInterface: function( type ) {\n\n if ( !Popcorn.events.isNative( type ) ) {\n return false;\n }\n\n var eventApi = internal.events,\n apis = eventApi.apiTypes,\n apihash = eventApi.apis,\n idx = 0, len = apis.length, api, tmp;\n\n for ( ; idx < len; idx++ ) {\n tmp = apis[ idx ];\n\n if ( apihash[ tmp ][ type ] ) {\n api = tmp;\n break;\n }\n }\n return api;\n },\n // Compile all native events to single array\n all: Popcorn.Events.Natives.split( /\\s+/g ),\n // Defines all Event handling static functions\n fn: {\n trigger: function( type, data ) {\n\n var eventInterface, evt;\n // setup checks for custom event system\n if ( this.data.events[ type ] && Popcorn.sizeOf( this.data.events[ type ] ) ) {\n\n eventInterface = Popcorn.events.getInterface( type );\n\n if ( eventInterface ) {\n\n evt = document.createEvent( eventInterface );\n evt.initEvent( type, true, true, global, 1 );\n\n this.media.dispatchEvent( evt );\n\n return this;\n }\n\n // Custom events\n Popcorn.forEach( this.data.events[ type ], function( obj, key ) {\n\n obj.call( this, data );\n\n }, this );\n\n }\n\n return this;\n },\n listen: function( type, fn ) {\n\n var self = this,\n hasEvents = true,\n eventHook = Popcorn.events.hooks[ type ],\n origType = type,\n tmp;\n\n if ( !this.data.events[ type ] ) {\n this.data.events[ type ] = {};\n hasEvents = false;\n }\n\n // Check and setup event hooks\n if ( eventHook ) {\n\n // Execute hook add method if defined\n if ( eventHook.add ) {\n eventHook.add.call( this, {}, fn );\n }\n\n // Reassign event type to our piggyback event type if defined\n if ( eventHook.bind ) {\n type = eventHook.bind;\n }\n\n // Reassign handler if defined\n if ( eventHook.handler ) {\n tmp = fn;\n\n fn = function wrapper( event ) {\n eventHook.handler.call( self, event, tmp );\n };\n }\n\n // assume the piggy back event is registered\n hasEvents = true;\n\n // Setup event registry entry\n if ( !this.data.events[ type ] ) {\n this.data.events[ type ] = {};\n // Toggle if the previous assumption was untrue\n hasEvents = false;\n }\n }\n\n // Register event and handler\n this.data.events[ type ][ fn.name || ( fn.toString() + Popcorn.guid() ) ] = fn;\n\n // only attach one event of any type\n if ( !hasEvents && Popcorn.events.all.indexOf( type ) > -1 ) {\n\n this.media.addEventListener( type, function( event ) {\n\n Popcorn.forEach( self.data.events[ type ], function( obj, key ) {\n if ( typeof obj === \"function\" ) {\n obj.call( self, event );\n }\n });\n\n }, false);\n }\n return this;\n },\n unlisten: function( type, fn ) {\n\n if ( this.data.events[ type ] && this.data.events[ type ][ fn ] ) {\n\n delete this.data.events[ type ][ fn ];\n\n return this;\n }\n\n this.data.events[ type ] = null;\n\n return this;\n }\n },\n hooks: {\n canplayall: {\n bind: \"canplaythrough\",\n add: function( event, callback ) {\n\n var state = false;\n\n if ( this.media.readyState ) {\n\n callback.call( this, event );\n\n state = true;\n }\n\n this.data.hooks.canplayall = {\n fired: state\n };\n },\n // declare special handling instructions\n handler: function canplayall( event, callback ) {\n\n if ( !this.data.hooks.canplayall.fired ) {\n // trigger original user callback once\n callback.call( this, event );\n\n this.data.hooks.canplayall.fired = true;\n }\n }\n }\n }\n };\n\n // Extend Popcorn.events.fns (listen, unlisten, trigger) to all Popcorn instances\n Popcorn.forEach( [ \"trigger\", \"listen\", \"unlisten\" ], function( key ) {\n Popcorn.p[ key ] = Popcorn.events.fn[ key ];\n });\n\n // Protected API methods\n Popcorn.protect = {\n natives: ( \"load play pause currentTime playbackRate mute volume duration removePlugin roundTime trigger listen unlisten exec\" +\n \"preload playbackRate autoplay loop controls muted buffered readyState seeking paused played seekable ended\" ).toLowerCase().split( /\\s+/ )\n };\n\n // Internal Only - Adds track events to the instance object\n Popcorn.addTrackEvent = function( obj, track ) {\n\n // Determine if this track has default options set for it\n // If so, apply them to the track object\n if ( track && track._natives && track._natives.type &&\n ( obj.options.defaults && obj.options.defaults[ track._natives.type ] ) ) {\n\n track = Popcorn.extend( {}, obj.options.defaults[ track._natives.type ], track );\n }\n\n if ( track._natives ) {\n // Supports user defined track event id\n track._id = !track.id ? Popcorn.guid( track._natives.type ) : track.id;\n\n // Push track event ids into the history\n obj.data.history.push( track._id );\n }\n\n track.start = Popcorn.util.toSeconds( track.start, obj.options.framerate );\n track.end = Popcorn.util.toSeconds( track.end, obj.options.framerate );\n\n // Store this definition in an array sorted by times\n var byStart = obj.data.trackEvents.byStart,\n byEnd = obj.data.trackEvents.byEnd,\n idx;\n\n for ( idx = byStart.length - 1; idx >= 0; idx-- ) {\n\n if ( track.start >= byStart[ idx ].start ) {\n byStart.splice( idx + 1, 0, track );\n break;\n }\n }\n\n for ( idx = byEnd.length - 1; idx >= 0; idx-- ) {\n\n if ( track.end > byEnd[ idx ].end ) {\n byEnd.splice( idx + 1, 0, track );\n break;\n }\n }\n\n // Store references to user added trackevents in ref table\n if ( track._id ) {\n Popcorn.addTrackEvent.ref( obj, track );\n }\n };\n\n // Internal Only - Adds track event references to the instance object's trackRefs hash table\n Popcorn.addTrackEvent.ref = function( obj, track ) {\n obj.data.trackRefs[ track._id ] = track;\n\n return obj;\n };\n\n Popcorn.removeTrackEvent = function( obj, trackId ) {\n\n var historyLen = obj.data.history.length,\n indexWasAt = 0,\n byStart = [],\n byEnd = [],\n animating = [],\n history = [];\n\n Popcorn.forEach( obj.data.trackEvents.byStart, function( o, i, context ) {\n // Preserve the original start/end trackEvents\n if ( !o._id ) {\n byStart.push( obj.data.trackEvents.byStart[i] );\n byEnd.push( obj.data.trackEvents.byEnd[i] );\n }\n\n // Filter for user track events (vs system track events)\n if ( o._id ) {\n\n // Filter for the trackevent to remove\n if ( o._id !== trackId ) {\n byStart.push( obj.data.trackEvents.byStart[i] );\n byEnd.push( obj.data.trackEvents.byEnd[i] );\n }\n\n // Capture the position of the track being removed.\n if ( o._id === trackId ) {\n indexWasAt = i;\n o._natives._teardown && o._natives._teardown.call( obj, o );\n }\n }\n\n });\n\n if ( obj.data.trackEvents.animating.length ) {\n Popcorn.forEach( obj.data.trackEvents.animating, function( o, i, context ) {\n // Preserve the original start/end trackEvents\n if ( !o._id ) {\n animating.push( obj.data.trackEvents.animating[i] );\n }\n\n // Filter for user track events (vs system track events)\n if ( o._id ) {\n // Filter for the trackevent to remove\n if ( o._id !== trackId ) {\n animating.push( obj.data.trackEvents.animating[i] );\n }\n }\n });\n }\n\n // Update\n if ( indexWasAt <= obj.data.trackEvents.startIndex ) {\n obj.data.trackEvents.startIndex--;\n }\n\n if ( indexWasAt <= obj.data.trackEvents.endIndex ) {\n obj.data.trackEvents.endIndex--;\n }\n\n obj.data.trackEvents.byStart = byStart;\n obj.data.trackEvents.byEnd = byEnd;\n obj.data.trackEvents.animating = animating;\n\n for ( var i = 0; i < historyLen; i++ ) {\n if ( obj.data.history[ i ] !== trackId ) {\n history.push( obj.data.history[ i ] );\n }\n }\n\n // Update ordered history array\n obj.data.history = history;\n\n // Update track event references\n Popcorn.removeTrackEvent.ref( obj, trackId );\n };\n\n // Internal Only - Removes track event references from instance object's trackRefs hash table\n Popcorn.removeTrackEvent.ref = function( obj, trackId ) {\n delete obj.data.trackRefs[ trackId ];\n\n return obj;\n };\n\n // Return an array of track events bound to this instance object\n Popcorn.getTrackEvents = function( obj ) {\n\n var trackevents = [],\n refs = obj.data.trackEvents.byStart,\n length = refs.length,\n idx = 0,\n ref;\n\n for ( ; idx < length; idx++ ) {\n ref = refs[ idx ];\n // Return only user attributed track event references\n if ( ref._id ) {\n trackevents.push( ref );\n }\n }\n\n return trackevents;\n };\n\n // Internal Only - Returns an instance object's trackRefs hash table\n Popcorn.getTrackEvents.ref = function( obj ) {\n return obj.data.trackRefs;\n };\n\n // Return a single track event bound to this instance object\n Popcorn.getTrackEvent = function( obj, trackId ) {\n return obj.data.trackRefs[ trackId ];\n };\n\n // Internal Only - Returns an instance object's track reference by track id\n Popcorn.getTrackEvent.ref = function( obj, trackId ) {\n return obj.data.trackRefs[ trackId ];\n };\n\n Popcorn.getLastTrackEventId = function( obj ) {\n return obj.data.history[ obj.data.history.length - 1 ];\n };\n\n Popcorn.timeUpdate = function( obj, event ) {\n\n var currentTime = obj.media.currentTime,\n previousTime = obj.data.trackEvents.previousUpdateTime,\n tracks = obj.data.trackEvents,\n animating = tracks.animating,\n end = tracks.endIndex,\n start = tracks.startIndex,\n animIndex = 0,\n\n registryByName = Popcorn.registryByName,\n\n byEnd, byStart, byAnimate, natives, type;\n\n // Playbar advancing\n if ( previousTime < currentTime ) {\n\n while ( tracks.byEnd[ end ] && tracks.byEnd[ end ].end <= currentTime ) {\n\n byEnd = tracks.byEnd[ end ];\n natives = byEnd._natives;\n type = natives && natives.type;\n\n // If plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byEnd._running === true ) {\n byEnd._running = false;\n natives.end.call( obj, event, byEnd );\n }\n\n end++;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byEnd._id );\n return;\n }\n }\n\n while ( tracks.byStart[ start ] && tracks.byStart[ start ].start <= currentTime ) {\n\n byStart = tracks.byStart[ start ];\n natives = byStart._natives;\n type = natives && natives.type;\n\n // If plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byStart.end > currentTime &&\n byStart._running === false &&\n obj.data.disabled.indexOf( type ) === -1 ) {\n\n byStart._running = true;\n natives.start.call( obj, event, byStart );\n\n // If the `frameAnimation` option is used,\n // push the current byStart object into the `animating` cue\n if ( obj.options.frameAnimation &&\n ( byStart && byStart._running && byStart._natives.frame ) ) {\n\n animating.push( byStart );\n }\n }\n start++;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byStart._id );\n return;\n }\n }\n\n // If the `frameAnimation` option is used, iterate the animating track\n // and execute the `frame` callback\n if ( obj.options.frameAnimation ) {\n while ( animIndex < animating.length ) {\n\n byAnimate = animating[ animIndex ];\n\n if ( !byAnimate._running ) {\n animating.splice( animIndex, 1 );\n } else {\n byAnimate._natives.frame.call( obj, event, byAnimate, currentTime );\n animIndex++;\n }\n }\n }\n\n // Playbar receding\n } else if ( previousTime > currentTime ) {\n\n while ( tracks.byStart[ start ] && tracks.byStart[ start ].start > currentTime ) {\n\n byStart = tracks.byStart[ start ];\n natives = byStart._natives;\n type = natives && natives.type;\n\n // if plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byStart._running === true ) {\n byStart._running = false;\n natives.end.call( obj, event, byStart );\n }\n start--;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byStart._id );\n return;\n }\n }\n\n while ( tracks.byEnd[ end ] && tracks.byEnd[ end ].end > currentTime ) {\n\n byEnd = tracks.byEnd[ end ];\n natives = byEnd._natives;\n type = natives && natives.type;\n\n // if plugin does not exist on this instance, remove it\n if ( !natives ||\n ( !!registryByName[ type ] ||\n !!obj[ type ] ) ) {\n\n if ( byEnd.start <= currentTime &&\n byEnd._running === false &&\n obj.data.disabled.indexOf( type ) === -1 ) {\n\n byEnd._running = true;\n natives.start.call( obj, event, byEnd );\n\n // If the `frameAnimation` option is used,\n // push the current byEnd object into the `animating` cue\n if ( obj.options.frameAnimation &&\n ( byEnd && byEnd._running && byEnd._natives.frame ) ) {\n\n animating.push( byEnd );\n }\n }\n end--;\n } else {\n // remove track event\n Popcorn.removeTrackEvent( obj, byEnd._id );\n return;\n }\n }\n\n // If the `frameAnimation` option is used, iterate the animating track\n // and execute the `frame` callback\n if ( obj.options.frameAnimation ) {\n while ( animIndex < animating.length ) {\n\n byAnimate = animating[ animIndex ];\n\n if ( !byAnimate._running ) {\n animating.splice( animIndex, 1 );\n } else {\n byAnimate._natives.frame.call( obj, event, byAnimate, currentTime );\n animIndex++;\n }\n }\n }\n // time bar is not moving ( video is paused )\n }\n\n tracks.endIndex = end;\n tracks.startIndex = start;\n tracks.previousUpdateTime = currentTime;\n };\n\n // Map and Extend TrackEvent functions to all Popcorn instances\n Popcorn.extend( Popcorn.p, {\n\n getTrackEvents: function() {\n return Popcorn.getTrackEvents.call( null, this );\n },\n\n getTrackEvent: function( id ) {\n return Popcorn.getTrackEvent.call( null, this, id );\n },\n\n getLastTrackEventId: function() {\n return Popcorn.getLastTrackEventId.call( null, this );\n },\n\n removeTrackEvent: function( id ) {\n\n Popcorn.removeTrackEvent.call( null, this, id );\n return this;\n },\n\n removePlugin: function( name ) {\n Popcorn.removePlugin.call( null, this, name );\n return this;\n },\n\n timeUpdate: function( event ) {\n Popcorn.timeUpdate.call( null, this, event );\n return this;\n },\n\n destroy: function() {\n Popcorn.destroy.call( null, this );\n return this;\n }\n });\n\n // Plugin manifests\n Popcorn.manifest = {};\n // Plugins are registered\n Popcorn.registry = [];\n Popcorn.registryByName = {};\n // An interface for extending Popcorn\n // with plugin functionality\n Popcorn.plugin = function( name, definition, manifest ) {\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n // Provides some sugar, but ultimately extends\n // the definition into Popcorn.p\n var reserved = [ \"start\", \"end\" ],\n plugin = {},\n setup,\n isfn = typeof definition === \"function\",\n methods = [ \"_setup\", \"_teardown\", \"start\", \"end\", \"frame\" ];\n\n // combines calls of two function calls into one\n var combineFn = function( first, second ) {\n\n first = first || Popcorn.nop;\n second = second || Popcorn.nop;\n\n return function() {\n\n first.apply( this, arguments );\n second.apply( this, arguments );\n };\n };\n\n // If `manifest` arg is undefined, check for manifest within the `definition` object\n // If no `definition.manifest`, an empty object is a sufficient fallback\n Popcorn.manifest[ name ] = manifest = manifest || definition.manifest || {};\n\n // apply safe, and empty default functions\n methods.forEach(function( method ) {\n\n definition[ method ] = definition[ method ] || Popcorn.nop;\n });\n\n var pluginFn = function( setup, options ) {\n\n if ( !options ) {\n return this;\n }\n\n // Storing the plugin natives\n var natives = options._natives = {},\n compose = \"\",\n defaults, originalOpts, manifestOpts, mergedSetupOpts;\n\n Popcorn.extend( natives, setup );\n\n options._natives.type = name;\n options._running = false;\n\n // Check for previously set default options\n defaults = this.options.defaults && this.options.defaults[ options._natives && options._natives.type ];\n\n // default to an empty string if no effect exists\n // split string into an array of effects\n options.compose = options.compose && options.compose.split( \" \" ) || [];\n options.effect = options.effect && options.effect.split( \" \" ) || [];\n\n // join the two arrays together\n options.compose = options.compose.concat( options.effect );\n\n options.compose.forEach(function( composeOption ) {\n\n // if the requested compose is garbage, throw it away\n compose = Popcorn.compositions[ composeOption ] || {};\n\n // extends previous functions with compose function\n methods.forEach(function( method ) {\n\n natives[ method ] = combineFn( natives[ method ], compose[ method ] );\n });\n });\n\n // Ensure a manifest object, an empty object is a sufficient fallback\n options._natives.manifest = manifest;\n\n // Checks for expected properties\n if ( !( \"start\" in options ) ) {\n options.start = 0;\n }\n\n if ( !( \"end\" in options ) ) {\n options.end = this.duration() || Number.MAX_VALUE;\n }\n\n // Merge with defaults if they exist, make sure per call is prioritized\n mergedSetupOpts = defaults ? Popcorn.extend( {}, defaults, options ) :\n options;\n\n // Resolves 239, 241, 242\n if ( !mergedSetupOpts.target ) {\n\n // Sometimes the manifest may be missing entirely\n // or it has an options object that doesn't have a `target` property\n manifestOpts = \"options\" in manifest && manifest.options;\n\n mergedSetupOpts.target = manifestOpts && \"target\" in manifestOpts && manifestOpts.target;\n }\n\n // Trigger _setup method if exists\n options._natives._setup && options._natives._setup.call( this, mergedSetupOpts );\n\n // Create new track event for this instance\n Popcorn.addTrackEvent( this, Popcorn.extend( mergedSetupOpts, options ) );\n\n // Future support for plugin event definitions\n // for all of the native events\n Popcorn.forEach( setup, function( callback, type ) {\n\n if ( type !== \"type\" ) {\n\n if ( reserved.indexOf( type ) === -1 ) {\n\n this.listen( type, callback );\n }\n }\n\n }, this );\n\n return this;\n };\n\n // Assign new named definition\n plugin[ name ] = function( options ) {\n return pluginFn.call( this, isfn ? definition.call( this, options ) : definition,\n options );\n };\n\n // Extend Popcorn.p with new named definition\n Popcorn.extend( Popcorn.p, plugin );\n\n // Push into the registry\n var entry = {\n fn: plugin[ name ],\n definition: definition,\n base: definition,\n parents: [],\n name: name\n };\n Popcorn.registry.push(\n Popcorn.extend( plugin, entry, {\n type: name\n })\n );\n Popcorn.registryByName[ name ] = entry;\n\n return plugin;\n };\n\n Popcorn.plugin.debug = false;\n\n // removePlugin( type ) removes all tracks of that from all instances of popcorn\n // removePlugin( obj, type ) removes all tracks of type from obj, where obj is a single instance of popcorn\n Popcorn.removePlugin = function( obj, name ) {\n\n // Check if we are removing plugin from an instance or from all of Popcorn\n if ( !name ) {\n\n // Fix the order\n name = obj;\n obj = Popcorn.p;\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n var registryLen = Popcorn.registry.length,\n registryIdx;\n\n // remove plugin reference from registry\n for ( registryIdx = 0; registryIdx < registryLen; registryIdx++ ) {\n if ( Popcorn.registry[ registryIdx ].name === name ) {\n Popcorn.registry.splice( registryIdx, 1 );\n delete Popcorn.registryByName[ name ];\n\n // delete the plugin\n delete obj[ name ];\n\n // plugin found and removed, stop checking, we are done\n return;\n }\n }\n\n }\n\n var byStart = obj.data.trackEvents.byStart,\n byEnd = obj.data.trackEvents.byEnd,\n animating = obj.data.trackEvents.animating,\n idx, sl;\n\n // remove all trackEvents\n for ( idx = 0, sl = byStart.length; idx < sl; idx++ ) {\n\n if ( ( byStart[ idx ] && byStart[ idx ]._natives && byStart[ idx ]._natives.type === name ) &&\n ( byEnd[ idx ] && byEnd[ idx ]._natives && byEnd[ idx ]._natives.type === name ) ) {\n\n byStart[ idx ]._natives._teardown && byStart[ idx ]._natives._teardown.call( obj, byStart[ idx ] );\n\n byStart.splice( idx, 1 );\n byEnd.splice( idx, 1 );\n\n // update for loop if something removed, but keep checking\n idx--; sl--;\n if ( obj.data.trackEvents.startIndex <= idx ) {\n obj.data.trackEvents.startIndex--;\n obj.data.trackEvents.endIndex--;\n }\n }\n }\n\n //remove all animating events\n for ( idx = 0, sl = animating.length; idx < sl; idx++ ) {\n\n if ( animating[ idx ] && animating[ idx ]._natives && animating[ idx ]._natives.type === name ) {\n\n animating.splice( idx, 1 );\n\n // update for loop if something removed, but keep checking\n idx--; sl--;\n }\n }\n\n };\n\n Popcorn.compositions = {};\n\n // Plugin inheritance\n Popcorn.compose = function( name, definition, manifest ) {\n\n // If `manifest` arg is undefined, check for manifest within the `definition` object\n // If no `definition.manifest`, an empty object is a sufficient fallback\n Popcorn.manifest[ name ] = manifest = manifest || definition.manifest || {};\n\n // register the effect by name\n Popcorn.compositions[ name ] = definition;\n };\n\n Popcorn.plugin.effect = Popcorn.effect = Popcorn.compose;\n\n // stores parsers keyed on filetype\n Popcorn.parsers = {};\n\n // An interface for extending Popcorn\n // with parser functionality\n Popcorn.parser = function( name, type, definition ) {\n\n if ( Popcorn.protect.natives.indexOf( name.toLowerCase() ) >= 0 ) {\n Popcorn.error( \"'\" + name + \"' is a protected function name\" );\n return;\n }\n\n // fixes parameters for overloaded function call\n if ( typeof type === \"function\" && !definition ) {\n definition = type;\n type = \"\";\n }\n\n if ( typeof definition !== \"function\" || typeof type !== \"string\" ) {\n return;\n }\n\n // Provides some sugar, but ultimately extends\n // the definition into Popcorn.p\n\n var natives = Popcorn.events.all,\n parseFn,\n parser = {};\n\n parseFn = function( filename, callback ) {\n\n if ( !filename ) {\n return this;\n }\n\n var that = this;\n\n Popcorn.xhr({\n url: filename,\n dataType: type,\n success: function( data ) {\n\n var tracksObject = definition( data ),\n tracksData,\n tracksDataLen,\n tracksDef,\n idx = 0;\n\n tracksData = tracksObject.data || [];\n tracksDataLen = tracksData.length;\n tracksDef = null;\n\n // If no tracks to process, return immediately\n if ( !tracksDataLen ) {\n return;\n }\n\n // Create tracks out of parsed object\n for ( ; idx < tracksDataLen; idx++ ) {\n\n tracksDef = tracksData[ idx ];\n\n for ( var key in tracksDef ) {\n\n if ( hasOwn.call( tracksDef, key ) && !!that[ key ] ) {\n\n that[ key ]( tracksDef[ key ] );\n }\n }\n }\n if ( callback ) {\n callback();\n }\n }\n });\n\n return this;\n };\n\n // Assign new named definition\n parser[ name ] = parseFn;\n\n // Extend Popcorn.p with new named definition\n Popcorn.extend( Popcorn.p, parser );\n\n // keys the function name by filetype extension\n //Popcorn.parsers[ name ] = true;\n\n return parser;\n };\n\n\n // Cache references to reused RegExps\n var rparams = /\\?/,\n // XHR Setup object\n setup = {\n url: \"\",\n data: \"\",\n dataType: \"\",\n success: Popcorn.nop,\n type: \"GET\",\n async: true,\n xhr: function() {\n return new global.XMLHttpRequest();\n }\n };\n\n Popcorn.xhr = function( options ) {\n\n options.dataType = options.dataType && options.dataType.toLowerCase() || null;\n\n if ( options.dataType &&\n ( options.dataType === \"jsonp\" || options.dataType === \"script\" ) ) {\n\n Popcorn.xhr.getJSONP(\n options.url,\n options.success,\n options.dataType === \"script\"\n );\n return;\n }\n\n var settings = Popcorn.extend( {}, setup, options );\n\n // Create new XMLHttpRequest object\n settings.ajax = settings.xhr();\n\n if ( settings.ajax ) {\n\n if ( settings.type === \"GET\" && settings.data ) {\n\n // append query string\n settings.url += ( rparams.test( settings.url ) ? \"&\" : \"?\" ) + settings.data;\n\n // Garbage collect and reset settings.data\n settings.data = null;\n }\n\n\n settings.ajax.open( settings.type, settings.url, settings.async );\n settings.ajax.send( settings.data || null );\n\n return Popcorn.xhr.httpData( settings );\n }\n };\n\n\n Popcorn.xhr.httpData = function( settings ) {\n\n var data, json = null;\n\n settings.ajax.onreadystatechange = function() {\n\n if ( settings.ajax.readyState === 4 ) {\n\n try {\n json = JSON.parse( settings.ajax.responseText );\n } catch( e ) {\n //suppress\n }\n\n data = {\n xml: settings.ajax.responseXML,\n text: settings.ajax.responseText,\n json: json\n };\n\n // If a dataType was specified, return that type of data\n if ( settings.dataType ) {\n data = data[ settings.dataType ];\n }\n\n\n settings.success.call( settings.ajax, data );\n\n }\n };\n return data;\n };\n\n Popcorn.xhr.getJSONP = function( url, success, isScript ) {\n\n var head = document.head || document.getElementsByTagName( \"head\" )[ 0 ] || document.documentElement,\n script = document.createElement( \"script\" ),\n paramStr = url.split( \"?\" )[ 1 ],\n isFired = false,\n params = [],\n callback, parts, callparam;\n\n if ( paramStr && !isScript ) {\n params = paramStr.split( \"&\" );\n }\n\n if ( params.length ) {\n parts = params[ params.length - 1 ].split( \"=\" );\n }\n\n callback = params.length ? ( parts[ 1 ] ? parts[ 1 ] : parts[ 0 ] ) : \"jsonp\";\n\n if ( !paramStr && !isScript ) {\n url += \"?callback=\" + callback;\n }\n\n if ( callback && !isScript ) {\n\n // If a callback name already exists\n if ( !!window[ callback ] ) {\n // Create a new unique callback name\n callback = Popcorn.guid( callback );\n }\n\n // Define the JSONP success callback globally\n window[ callback ] = function( data ) {\n // Fire success callbacks\n success && success( data );\n isFired = true;\n };\n\n // Replace callback param and callback name\n url = url.replace( parts.join( \"=\" ), parts[ 0 ] + \"=\" + callback );\n }\n\n script.onload = function() {\n\n // Handling remote script loading callbacks\n if ( isScript ) {\n // getScript\n success && success();\n }\n\n // Executing for JSONP requests\n if ( isFired ) {\n // Garbage collect the callback\n delete window[ callback ];\n }\n // Garbage collect the script resource\n head.removeChild( script );\n };\n\n script.src = url;\n\n head.insertBefore( script, head.firstChild );\n\n return;\n };\n\n Popcorn.getJSONP = Popcorn.xhr.getJSONP;\n\n Popcorn.getScript = Popcorn.xhr.getScript = function( url, success ) {\n\n return Popcorn.xhr.getJSONP( url, success, true );\n };\n\n Popcorn.util = {\n // Simple function to parse a timestamp into seconds\n // Acceptable formats are:\n // HH:MM:SS.MMM\n // HH:MM:SS;FF\n // Hours and minutes are optional. They default to 0\n toSeconds: function( timeStr, framerate ) {\n // Hours and minutes are optional\n // Seconds must be specified\n // Seconds can be followed by milliseconds OR by the frame information\n var validTimeFormat = /^([0-9]+:){0,2}[0-9]+([.;][0-9]+)?$/,\n errorMessage = \"Invalid time format\",\n digitPairs, lastIndex, lastPair, firstPair,\n frameInfo, frameTime;\n\n if ( typeof timeStr === \"number\" ) {\n return timeStr;\n }\n\n if ( typeof timeStr === \"string\" &&\n !validTimeFormat.test( timeStr ) ) {\n Popcorn.error( errorMessage );\n }\n\n digitPairs = timeStr.split( \":\" );\n lastIndex = digitPairs.length - 1;\n lastPair = digitPairs[ lastIndex ];\n\n // Fix last element:\n if ( lastPair.indexOf( \";\" ) > -1 ) {\n\n frameInfo = lastPair.split( \";\" );\n frameTime = 0;\n\n if ( framerate && ( typeof framerate === \"number\" ) ) {\n frameTime = parseFloat( frameInfo[ 1 ], 10 ) / framerate;\n }\n\n digitPairs[ lastIndex ] = parseInt( frameInfo[ 0 ], 10 ) + frameTime;\n }\n\n firstPair = digitPairs[ 0 ];\n\n return {\n\n 1: parseFloat( firstPair, 10 ),\n\n 2: ( parseInt( firstPair, 10 ) * 60 ) +\n parseFloat( digitPairs[ 1 ], 10 ),\n\n 3: ( parseInt( firstPair, 10 ) * 3600 ) +\n ( parseInt( digitPairs[ 1 ], 10 ) * 60 ) +\n parseFloat( digitPairs[ 2 ], 10 )\n\n }[ digitPairs.length || 1 ];\n }\n };\n\n\n // Initialize locale data\n // Based on http://en.wikipedia.org/wiki/Language_localisation#Language_tags_and_codes\n function initLocale( arg ) {\n\n var locale = typeof arg === \"string\" ? arg : [ arg.language, arg.region ].join( \"-\" ),\n parts = locale.split( \"-\" );\n\n // Setup locale data table\n return {\n iso6391: locale,\n language: parts[ 0 ] || \"\",\n region: parts[ 1 ] || \"\"\n };\n }\n\n // Declare locale data table\n var localeData = initLocale( global.navigator.userLanguage || global.navigator.language );\n\n Popcorn.locale = {\n\n // Popcorn.locale.get()\n // returns reference to privately\n // defined localeData\n get: function() {\n return localeData;\n },\n\n // Popcorn.locale.set( string|object );\n set: function( arg ) {\n\n localeData = initLocale( arg );\n\n Popcorn.locale.broadcast();\n\n return localeData;\n },\n\n // Popcorn.locale.broadcast( type )\n // Sends events to all popcorn media instances that are\n // listening for locale events\n broadcast: function( type ) {\n\n var instances = Popcorn.instances,\n length = instances.length,\n idx = 0,\n instance;\n\n type = type || \"locale:changed\";\n\n // Iterate all current instances\n for ( ; idx < length; idx++ ) {\n instance = instances[ idx ];\n\n // For those instances with locale event listeners,\n // trigger a locale change event\n if ( type in instance.data.events ) {\n instance.trigger( type );\n }\n }\n }\n };\n\n // alias for exec function\n Popcorn.p.cue = Popcorn.p.exec;\n\n // Exposes Popcorn to global context\n global.Popcorn = Popcorn;\n\n document.addEventListener( \"DOMContentLoaded\", function() {\n\n // Supports non-specific elements\n var dataAttr = \"data-timeline-sources\",\n medias = document.querySelectorAll( \"[\" + dataAttr + \"]\" );\n\n Popcorn.forEach( medias, function( idx, key ) {\n\n var media = medias[ key ],\n hasDataSources = false,\n dataSources, data, popcornMedia;\n\n // Ensure that the DOM has an id\n if ( !media.id ) {\n\n media.id = Popcorn.guid( \"__popcorn\" );\n }\n\n // Ensure we're looking at a dom node\n if ( media.nodeType && media.nodeType === 1 ) {\n\n popcornMedia = Popcorn( \"#\" + media.id );\n\n dataSources = ( media.getAttribute( dataAttr ) || \"\" ).split( \",\" );\n\n if ( dataSources[ 0 ] ) {\n\n Popcorn.forEach( dataSources, function( source ) {\n\n // split the parser and data as parser!file\n data = source.split( \"!\" );\n\n // if no parser is defined for the file, assume \"parse\" + file extension\n if ( data.length === 1 ) {\n\n data = source.split( \".\" );\n data[ 0 ] = \"parse\" + data[ data.length - 1 ].toUpperCase();\n data[ 1 ] = source;\n }\n\n // If the media has data sources and the correct parser is registered, continue to load\n if ( dataSources[ 0 ] && popcornMedia[ data[ 0 ] ] ) {\n\n // Set up the media and load in the datasources\n popcornMedia[ data[ 0 ] ]( data[ 1 ] );\n\n }\n });\n\n }\n\n // Only play the media if it was specified to do so\n if ( !!popcornMedia.autoplay ) {\n popcornMedia.play();\n }\n\n }\n });\n }, false );\n\n})(window, window.document);\n\n"},"message":{"kind":"string","value":"Fixed linting error and remove any leftover reference to the instance methods\n"},"old_file":{"kind":"string","value":"popcorn.js"},"subject":{"kind":"string","value":"Fixed linting error and remove any leftover reference to the instance methods"},"git_diff":{"kind":"string","value":"opcorn.js\n \n that.data.timeUpdateFunction = function( event ) {\n Popcorn.timeUpdate( that, event );\n }\n };\n \n if ( !that.isDestroyed ) {\n that.media.addEventListener( \"timeupdate\", that.data.timeUpdateFunction, false );\n instance.isDestroyed = true;\n }\n \n Popcorn.removeInstance( instance );\n Popcorn.instances.splice( Popcorn.instanceIds[ instance.id ], 1 );\n\n delete Popcorn.instanceIds[ instance.id ];\n }\n });\n "}}},{"rowIdx":2026,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"68d146eac64bfd70f992d0310b39388b82fc0bd4"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"philchand/mpdroid-2014,philchand/mpdroid-2014,joansmith/dmix,abarisain/dmix,0359xiaodong/dmix,abarisain/dmix,0359xiaodong/dmix,hurzl/dmix,philchand/mpdroid-2014,hurzl/dmix,philchand/mpdroid-2014,jcnoir/dmix,jcnoir/dmix,joansmith/dmix"},"new_contents":{"kind":"string","value":"package com.namelessdev.mpdroid.fragments;\n\nimport android.annotation.TargetApi;\nimport android.app.ActionBar;\nimport android.app.Activity;\nimport android.app.AlertDialog;\nimport android.content.DialogInterface;\nimport android.os.Bundle;\nimport android.support.v4.app.Fragment;\nimport android.view.*;\nimport android.view.MenuItem.OnMenuItemClickListener;\nimport android.widget.*;\nimport android.widget.AdapterView.AdapterContextMenuInfo;\nimport android.widget.AdapterView.OnItemClickListener;\nimport com.namelessdev.mpdroid.MPDApplication;\nimport com.namelessdev.mpdroid.R;\nimport com.namelessdev.mpdroid.adapters.ArrayIndexerAdapter;\nimport com.namelessdev.mpdroid.helpers.MPDAsyncHelper.AsyncExecListener;\nimport org.a0z.mpd.Item;\nimport org.a0z.mpd.exception.MPDServerException;\n\nimport java.util.List;\n\npublic abstract class BrowseFragment extends Fragment implements OnMenuItemClickListener, AsyncExecListener, OnItemClickListener {\n\n\tprotected int iJobID = -1;\n\n\tpublic static final int MAIN = 0;\n\tpublic static final int PLAYLIST = 3;\n\n\tpublic static final int ADD = 0;\n\tpublic static final int ADDNREPLACE = 1;\n\tpublic static final int ADDNREPLACEPLAY = 4;\n\tpublic static final int ADDNPLAY = 2;\n\tpublic static final int ADD_TO_PLAYLIST = 3;\n\n\tprotected List items = null;\n\t\n\tprotected MPDApplication app = null;\n\tprotected View loadingView;\n\tprotected TextView loadingTextView;\n\tprotected View noResultView;\n\tprotected AbsListView list;\n\tprivate boolean firstUpdateDone = false;\n\n\tString context;\n\tint irAdd, irAdded;\n\n\tpublic BrowseFragment(int rAdd, int rAdded, String pContext) {\n\t\tsuper();\n\t\tirAdd = rAdd;\n\t\tirAdded = rAdded;\n\n\t\tcontext = pContext;\n\n\t\tsetHasOptionsMenu(false);\n\t}\n\n\t@TargetApi(11)\n\t@Override\n\tpublic void onActivityCreated(Bundle savedInstanceState) {\n\t\tsuper.onActivityCreated(savedInstanceState);\n\t\tapp = (MPDApplication) getActivity().getApplicationContext();\n\t\ttry {\n\t\t\tActivity activity = this.getActivity();\n\t\t\tActionBar actionBar = activity.getActionBar();\n\t\t\tactionBar.setDisplayHomeAsUpEnabled(true);\n\t\t} catch (NoClassDefFoundError e) {\n\t\t\t// Older android\n\t\t} catch (NullPointerException e) {\n\n\t\t} catch (NoSuchMethodError e) {\n\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onStart() {\n\t\tsuper.onStart();\n\t\tapp.setActivity(getActivity());\n\t\tif(!firstUpdateDone) {\n\t\t\tfirstUpdateDone = true;\n\t\t\tUpdateList();\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onStop() {\n\t\tsuper.onStop();\n\t\tapp.unsetActivity(getActivity());\n\t}\n\n\t@Override\n\tpublic View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n\t\tView view = inflater.inflate(R.layout.browse, container, false);\n\t\tlist = (ListView) view.findViewById(R.id.list);\n\t\tregisterForContextMenu(list);\n\t\tlist.setOnItemClickListener(this);\n\t\tif (android.os.Build.VERSION.SDK_INT == 19)\n\t\t\tlist.setFastScrollAlwaysVisible(true);\n\t\tloadingView = view.findViewById(R.id.loadingLayout);\n\t\tloadingTextView = (TextView) view.findViewById(R.id.loadingText);\n\t\tnoResultView = view.findViewById(R.id.noResultLayout);\n\t\tloadingTextView.setText(getLoadingText());\n\t\treturn view;\n\t}\n\t\n\t@Override\n\tpublic void onViewCreated(View view, Bundle savedInstanceState) {\n\t\tsuper.onViewCreated(view, savedInstanceState);\n\t\tif(items != null) {\n\t\t\tlist.setAdapter(getCustomListAdapter());\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onDestroyView() {\n\t\t// help out the GC; imitated from ListFragment source\n\t\tloadingView = null;\n\t\tloadingTextView = null;\n\t\tnoResultView = null;\n\t\tsuper.onDestroyView();\n\t}\n\n\t/*\n\t * Override this to display a custom activity title\n\t */\n\tpublic String getTitle() {\n\t\treturn \"\";\n\t}\n\n\t/*\n\t * Override this to display a custom loading text\n\t */\n\tpublic int getLoadingText() {\n\t\treturn R.string.loading;\n\t}\n\n\tpublic void setActivityTitle(String title) {\n\t\tgetActivity().setTitle(title);\n\t}\n\n\tpublic void UpdateList() {\n\t\tlist.setAdapter(null);\n\t\tnoResultView.setVisibility(View.GONE);\n\t\tloadingView.setVisibility(View.VISIBLE);\n\n\t\t// Loading Artists asynchronous...\n\t\tapp.oMPDAsyncHelper.addAsyncExecListener(this);\n\t\tiJobID = app.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t@Override\n\t\t\tpublic void run() {\n\t\t\t\tasyncUpdate();\n\t\t\t}\n\t\t});\n\t}\n\n\t@Override\n\tpublic void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {\n\t\tAdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo;\n\n int index = (int) info.id;\n if (index >= 0 && items.size() > index ) {\n menu.setHeaderTitle(items.get((int) info.id).toString());\n\t\tandroid.view.MenuItem addItem = menu.add(ADD, ADD, 0, getResources().getString(irAdd));\n\t\taddItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndReplaceItem = menu.add(ADDNREPLACE, ADDNREPLACE, 0, R.string.addAndReplace);\n\t\taddAndReplaceItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndReplacePlayItem = menu.add(ADDNREPLACEPLAY, ADDNREPLACEPLAY, 0, R.string.addAndReplacePlay);\n\t\taddAndReplacePlayItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndPlayItem = menu.add(ADDNPLAY, ADDNPLAY, 0, R.string.addAndPlay);\n\t\taddAndPlayItem.setOnMenuItemClickListener(this);\n\t\t\n\t\tif (R.string.addPlaylist!=irAdd && R.string.addStream!=irAdd) {\n\t\t\tint id=0;\n\t\t\tSubMenu playlistMenu=menu.addSubMenu(R.string.addToPlaylist);\n\t\t\tandroid.view.MenuItem item=playlistMenu.add(ADD_TO_PLAYLIST, id++, (int)info.id, R.string.newPlaylist);\n\t\t\titem.setOnMenuItemClickListener(this);\n\t\t\t\n\t\t\ttry {\n\t\t\t\tList playlists=((MPDApplication) getActivity().getApplication()).oMPDAsyncHelper.oMPD.getPlaylists();\n\t\t\t\t\n\t\t\t\tif (null!=playlists) {\n\t\t\t\t\tfor (Item pl : playlists) {\n\t\t\t\t\t\titem = playlistMenu.add(ADD_TO_PLAYLIST, id++, (int) info.id, pl.getName());\n\t\t\t\t\t\titem.setOnMenuItemClickListener(this);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch (MPDServerException e) {\n\t\t\t\t// TODO Auto-generated catch block\n\t\t\t\te.printStackTrace();\n\t\t\t}\n\t\t}\n }\n }\n\n\tprotected abstract void add(Item item, boolean replace, boolean play);\n\n\tprotected abstract void add(Item item, String playlist);\n\n\t@Override\n\tpublic boolean onMenuItemClick(final android.view.MenuItem item) {\n\t\tfinal AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo();\n\t\tswitch (item.getGroupId()) {\n\t\t\tcase ADDNREPLACEPLAY:\n\t\t\tcase ADDNREPLACE:\n\t\t\tcase ADD:\n\t\t\tcase ADDNPLAY:\n\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t@Override\n\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\tboolean replace = false;\n\t\t\t\t\t\tboolean play = false;\n\t\t\t\t\t\tswitch (item.getGroupId()) {\n\t\t\t\t\t\t\tcase ADDNREPLACEPLAY:\n\t\t\t\t\t\t\t\treplace = true;\n\t\t\t\t\t\t\t\tplay = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase ADDNREPLACE:\n\t\t\t\t\t\t\t\treplace = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase ADDNPLAY:\n\t\t\t\t\t\t\t\tplay = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tadd(items.get((int) info.id), replace, play);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\tcase ADD_TO_PLAYLIST: {\n\t\t\t\tfinal EditText input = new EditText(getActivity());\n\t\t\t\tfinal int id = (int) item.getOrder();\n\t\t\t\tif (item.getItemId() == 0) {\n\t\t\t\t\tnew AlertDialog.Builder(getActivity())\n\t\t\t\t\t\t\t.setTitle(R.string.playlistName)\n\t\t\t\t\t\t\t.setMessage(R.string.newPlaylistPrompt)\n\t\t\t\t\t\t\t.setView(input)\n\t\t\t\t\t\t\t.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {\n\t\t\t\t\t\t\t\tpublic void onClick(DialogInterface dialog, int whichButton) {\n\t\t\t\t\t\t\t\t\tfinal String name = input.getText().toString().trim();\n\t\t\t\t\t\t\t\t\tif (null != name && name.length() > 0) {\n\t\t\t\t\t\t\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t\t\t\t\t\t\t@Override\n\t\t\t\t\t\t\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\t\t\t\t\t\t\tadd(items.get(id), name);\n\t\t\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {\n\t\t\t\t\t\t\t\tpublic void onClick(DialogInterface dialog, int whichButton) {\n\t\t\t\t\t\t\t\t\t// Do nothing.\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}).show();\n\t\t\t\t} else {\n\t\t\t\t\tadd(items.get(id), item.getTitle().toString());\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault:\n\t\t\t\tfinal String name = item.getTitle().toString();\n\t\t\t\tfinal int id = (int) item.getOrder();\n\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t@Override\n\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\tadd(items.get(id), name);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprotected void asyncUpdate() {\n\n\t}\n\n\t/**\n\t * Update the view from the items list if items is set.\n\t */\n\tpublic void updateFromItems() {\n\t\tif (getView() == null) {\n\t\t\t// The view has been destroyed, bail.\n\t\t\treturn;\n\t\t}\n\t\tif (items != null) {\n\t\t\tlist.setAdapter(getCustomListAdapter());\n\t\t\ttry {\n\t\t\t\tif (forceEmptyView() || ((list instanceof ListView) && ((ListView) list).getHeaderViewsCount() == 0))\n\t\t\t\t\tlist.setEmptyView(noResultView);\n\t\t\t\tloadingView.setVisibility(View.GONE);\n\t\t\t} catch (Exception e) {}\n\t\t}\n\t}\n\n\tprotected ListAdapter getCustomListAdapter() {\n\t\treturn new ArrayIndexerAdapter(getActivity(), R.layout.simple_list_item_1, items);\n\t}\n\t\n\t//Override if you want setEmptyView to be called on the list even if you have a header\n\tprotected boolean forceEmptyView() {\n\t\treturn false;\n\t}\n\t\n\t@Override\n\tpublic void asyncExecSucceeded(int jobID) {\n\t\tif (iJobID == jobID) {\n\t\t\tupdateFromItems();\n\t\t}\n\n\t}\n\n\tpublic void scrollToTop() {\n\t\ttry {\n\t\t\tlist.setSelection(-1);\n\t\t} catch (Exception e) {\n\t\t\t// What if the list is empty or some other bug ? I don't want any crashes because of that\n\t\t}\n\t}\n\n}\n"},"new_file":{"kind":"string","value":"MPDroid/src/com/namelessdev/mpdroid/fragments/BrowseFragment.java"},"old_contents":{"kind":"string","value":"package com.namelessdev.mpdroid.fragments;\n\nimport android.annotation.TargetApi;\nimport android.app.ActionBar;\nimport android.app.Activity;\nimport android.app.AlertDialog;\nimport android.content.DialogInterface;\nimport android.os.Bundle;\nimport android.support.v4.app.Fragment;\nimport android.view.*;\nimport android.view.MenuItem.OnMenuItemClickListener;\nimport android.widget.*;\nimport android.widget.AdapterView.AdapterContextMenuInfo;\nimport android.widget.AdapterView.OnItemClickListener;\nimport com.namelessdev.mpdroid.MPDApplication;\nimport com.namelessdev.mpdroid.R;\nimport com.namelessdev.mpdroid.adapters.ArrayIndexerAdapter;\nimport com.namelessdev.mpdroid.helpers.MPDAsyncHelper.AsyncExecListener;\nimport org.a0z.mpd.Item;\nimport org.a0z.mpd.exception.MPDServerException;\n\nimport java.util.List;\n\npublic abstract class BrowseFragment extends Fragment implements OnMenuItemClickListener, AsyncExecListener, OnItemClickListener {\n\n\tprotected int iJobID = -1;\n\n\tpublic static final int MAIN = 0;\n\tpublic static final int PLAYLIST = 3;\n\n\tpublic static final int ADD = 0;\n\tpublic static final int ADDNREPLACE = 1;\n\tpublic static final int ADDNREPLACEPLAY = 4;\n\tpublic static final int ADDNPLAY = 2;\n\tpublic static final int ADD_TO_PLAYLIST = 3;\n\n\tprotected List items = null;\n\t\n\tprotected MPDApplication app = null;\n\tprotected View loadingView;\n\tprotected TextView loadingTextView;\n\tprotected View noResultView;\n\tprotected AbsListView list;\n\tprivate boolean firstUpdateDone = false;\n\n\tString context;\n\tint irAdd, irAdded;\n\n\tpublic BrowseFragment(int rAdd, int rAdded, String pContext) {\n\t\tsuper();\n\t\tirAdd = rAdd;\n\t\tirAdded = rAdded;\n\n\t\tcontext = pContext;\n\n\t\tsetHasOptionsMenu(false);\n\t}\n\n\t@TargetApi(11)\n\t@Override\n\tpublic void onActivityCreated(Bundle savedInstanceState) {\n\t\tsuper.onActivityCreated(savedInstanceState);\n\t\tapp = (MPDApplication) getActivity().getApplicationContext();\n\t\ttry {\n\t\t\tActivity activity = this.getActivity();\n\t\t\tActionBar actionBar = activity.getActionBar();\n\t\t\tactionBar.setDisplayHomeAsUpEnabled(true);\n\t\t} catch (NoClassDefFoundError e) {\n\t\t\t// Older android\n\t\t} catch (NullPointerException e) {\n\n\t\t} catch (NoSuchMethodError e) {\n\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onStart() {\n\t\tsuper.onStart();\n\t\tapp.setActivity(getActivity());\n\t\tif(!firstUpdateDone) {\n\t\t\tfirstUpdateDone = true;\n\t\t\tUpdateList();\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onStop() {\n\t\tsuper.onStop();\n\t\tapp.unsetActivity(getActivity());\n\t}\n\n\t@Override\n\tpublic View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {\n\t\tView view = inflater.inflate(R.layout.browse, container, false);\n\t\tlist = (ListView) view.findViewById(R.id.list);\n\t\tregisterForContextMenu(list);\n\t\tlist.setOnItemClickListener(this);\n\t\tloadingView = view.findViewById(R.id.loadingLayout);\n\t\tloadingTextView = (TextView) view.findViewById(R.id.loadingText);\n\t\tnoResultView = view.findViewById(R.id.noResultLayout);\n\t\tloadingTextView.setText(getLoadingText());\n\t\treturn view;\n\t}\n\t\n\t@Override\n\tpublic void onViewCreated(View view, Bundle savedInstanceState) {\n\t\tsuper.onViewCreated(view, savedInstanceState);\n\t\tif(items != null) {\n\t\t\tlist.setAdapter(getCustomListAdapter());\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onDestroyView() {\n\t\t// help out the GC; imitated from ListFragment source\n\t\tloadingView = null;\n\t\tloadingTextView = null;\n\t\tnoResultView = null;\n\t\tsuper.onDestroyView();\n\t}\n\n\t/*\n\t * Override this to display a custom activity title\n\t */\n\tpublic String getTitle() {\n\t\treturn \"\";\n\t}\n\n\t/*\n\t * Override this to display a custom loading text\n\t */\n\tpublic int getLoadingText() {\n\t\treturn R.string.loading;\n\t}\n\n\tpublic void setActivityTitle(String title) {\n\t\tgetActivity().setTitle(title);\n\t}\n\n\tpublic void UpdateList() {\n\t\tlist.setAdapter(null);\n\t\tnoResultView.setVisibility(View.GONE);\n\t\tloadingView.setVisibility(View.VISIBLE);\n\n\t\t// Loading Artists asynchronous...\n\t\tapp.oMPDAsyncHelper.addAsyncExecListener(this);\n\t\tiJobID = app.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t@Override\n\t\t\tpublic void run() {\n\t\t\t\tasyncUpdate();\n\t\t\t}\n\t\t});\n\t}\n\n\t@Override\n\tpublic void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {\n\t\tAdapterContextMenuInfo info = (AdapterContextMenuInfo) menuInfo;\n\n int index = (int) info.id;\n if (index >= 0 && items.size() > index ) {\n menu.setHeaderTitle(items.get((int) info.id).toString());\n\t\tandroid.view.MenuItem addItem = menu.add(ADD, ADD, 0, getResources().getString(irAdd));\n\t\taddItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndReplaceItem = menu.add(ADDNREPLACE, ADDNREPLACE, 0, R.string.addAndReplace);\n\t\taddAndReplaceItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndReplacePlayItem = menu.add(ADDNREPLACEPLAY, ADDNREPLACEPLAY, 0, R.string.addAndReplacePlay);\n\t\taddAndReplacePlayItem.setOnMenuItemClickListener(this);\n\t\tandroid.view.MenuItem addAndPlayItem = menu.add(ADDNPLAY, ADDNPLAY, 0, R.string.addAndPlay);\n\t\taddAndPlayItem.setOnMenuItemClickListener(this);\n\t\t\n\t\tif (R.string.addPlaylist!=irAdd && R.string.addStream!=irAdd) {\n\t\t\tint id=0;\n\t\t\tSubMenu playlistMenu=menu.addSubMenu(R.string.addToPlaylist);\n\t\t\tandroid.view.MenuItem item=playlistMenu.add(ADD_TO_PLAYLIST, id++, (int)info.id, R.string.newPlaylist);\n\t\t\titem.setOnMenuItemClickListener(this);\n\t\t\t\n\t\t\ttry {\n\t\t\t\tList playlists=((MPDApplication) getActivity().getApplication()).oMPDAsyncHelper.oMPD.getPlaylists();\n\t\t\t\t\n\t\t\t\tif (null!=playlists) {\n\t\t\t\t\tfor (Item pl : playlists) {\n\t\t\t\t\t\titem = playlistMenu.add(ADD_TO_PLAYLIST, id++, (int) info.id, pl.getName());\n\t\t\t\t\t\titem.setOnMenuItemClickListener(this);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch (MPDServerException e) {\n\t\t\t\t// TODO Auto-generated catch block\n\t\t\t\te.printStackTrace();\n\t\t\t}\n\t\t}\n }\n }\n\n\tprotected abstract void add(Item item, boolean replace, boolean play);\n\n\tprotected abstract void add(Item item, String playlist);\n\n\t@Override\n\tpublic boolean onMenuItemClick(final android.view.MenuItem item) {\n\t\tfinal AdapterContextMenuInfo info = (AdapterContextMenuInfo) item.getMenuInfo();\n\t\tswitch (item.getGroupId()) {\n\t\t\tcase ADDNREPLACEPLAY:\n\t\t\tcase ADDNREPLACE:\n\t\t\tcase ADD:\n\t\t\tcase ADDNPLAY:\n\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t@Override\n\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\tboolean replace = false;\n\t\t\t\t\t\tboolean play = false;\n\t\t\t\t\t\tswitch (item.getGroupId()) {\n\t\t\t\t\t\t\tcase ADDNREPLACEPLAY:\n\t\t\t\t\t\t\t\treplace = true;\n\t\t\t\t\t\t\t\tplay = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase ADDNREPLACE:\n\t\t\t\t\t\t\t\treplace = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t\tcase ADDNPLAY:\n\t\t\t\t\t\t\t\tplay = true;\n\t\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tadd(items.get((int) info.id), replace, play);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\tcase ADD_TO_PLAYLIST: {\n\t\t\t\tfinal EditText input = new EditText(getActivity());\n\t\t\t\tfinal int id = (int) item.getOrder();\n\t\t\t\tif (item.getItemId() == 0) {\n\t\t\t\t\tnew AlertDialog.Builder(getActivity())\n\t\t\t\t\t\t\t.setTitle(R.string.playlistName)\n\t\t\t\t\t\t\t.setMessage(R.string.newPlaylistPrompt)\n\t\t\t\t\t\t\t.setView(input)\n\t\t\t\t\t\t\t.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {\n\t\t\t\t\t\t\t\tpublic void onClick(DialogInterface dialog, int whichButton) {\n\t\t\t\t\t\t\t\t\tfinal String name = input.getText().toString().trim();\n\t\t\t\t\t\t\t\t\tif (null != name && name.length() > 0) {\n\t\t\t\t\t\t\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t\t\t\t\t\t\t@Override\n\t\t\t\t\t\t\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\t\t\t\t\t\t\tadd(items.get(id), name);\n\t\t\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {\n\t\t\t\t\t\t\t\tpublic void onClick(DialogInterface dialog, int whichButton) {\n\t\t\t\t\t\t\t\t\t// Do nothing.\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}).show();\n\t\t\t\t} else {\n\t\t\t\t\tadd(items.get(id), item.getTitle().toString());\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault:\n\t\t\t\tfinal String name = item.getTitle().toString();\n\t\t\t\tfinal int id = (int) item.getOrder();\n\t\t\t\tapp.oMPDAsyncHelper.execAsync(new Runnable() {\n\t\t\t\t\t@Override\n\t\t\t\t\tpublic void run() {\n\t\t\t\t\t\tadd(items.get(id), name);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprotected void asyncUpdate() {\n\n\t}\n\n\t/**\n\t * Update the view from the items list if items is set.\n\t */\n\tpublic void updateFromItems() {\n\t\tif (getView() == null) {\n\t\t\t// The view has been destroyed, bail.\n\t\t\treturn;\n\t\t}\n\t\tif (items != null) {\n\t\t\tlist.setAdapter(getCustomListAdapter());\n\t\t\ttry {\n\t\t\t\tif (forceEmptyView() || ((list instanceof ListView) && ((ListView) list).getHeaderViewsCount() == 0))\n\t\t\t\t\tlist.setEmptyView(noResultView);\n\t\t\t\tloadingView.setVisibility(View.GONE);\n\t\t\t} catch (Exception e) {}\n\t\t}\n\t}\n\n\tprotected ListAdapter getCustomListAdapter() {\n\t\treturn new ArrayIndexerAdapter(getActivity(), R.layout.simple_list_item_1, items);\n\t}\n\t\n\t//Override if you want setEmptyView to be called on the list even if you have a header\n\tprotected boolean forceEmptyView() {\n\t\treturn false;\n\t}\n\t\n\t@Override\n\tpublic void asyncExecSucceeded(int jobID) {\n\t\tif (iJobID == jobID) {\n\t\t\tupdateFromItems();\n\t\t}\n\n\t}\n\n\tpublic void scrollToTop() {\n\t\ttry {\n\t\t\tlist.setSelection(-1);\n\t\t} catch (Exception e) {\n\t\t\t// What if the list is empty or some other bug ? I don't want any crashes because of that\n\t\t}\n\t}\n\n}\n"},"message":{"kind":"string","value":"Always show fast scroll for 4.4\n\nWorkaround for fast scroll not being visible on 4.4 devices.\n"},"old_file":{"kind":"string","value":"MPDroid/src/com/namelessdev/mpdroid/fragments/BrowseFragment.java"},"subject":{"kind":"string","value":"Always show fast scroll for 4.4"},"git_diff":{"kind":"string","value":"PDroid/src/com/namelessdev/mpdroid/fragments/BrowseFragment.java\n \t\tlist = (ListView) view.findViewById(R.id.list);\n \t\tregisterForContextMenu(list);\n \t\tlist.setOnItemClickListener(this);\n\t\tif (android.os.Build.VERSION.SDK_INT == 19)\n\t\t\tlist.setFastScrollAlwaysVisible(true);\n \t\tloadingView = view.findViewById(R.id.loadingLayout);\n \t\tloadingTextView = (TextView) view.findViewById(R.id.loadingText);\n \t\tnoResultView = view.findViewById(R.id.noResultLayout);"}}},{"rowIdx":2027,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":"error: pathspec 'src/test/java/me/jamiemansfield/lorenz/test/model/jar/TypeTest.java' did not match any file(s) known to git\n"},"commit":{"kind":"string","value":"35fa0c841ddea31cbe721fb5fa812131c127880c"},"returncode":{"kind":"number","value":1,"string":"1"},"repos":{"kind":"string","value":"jamiemansfield/Lorenz,Lexteam/Lorenz"},"new_contents":{"kind":"string","value":"/*\n * This file is part of Lorenz, licensed under the MIT License (MIT).\n *\n * Copyright (c) Jamie Mansfield \n * Copyright (c) contributors\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage me.jamiemansfield.lorenz.test.model.jar;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\nimport me.jamiemansfield.lorenz.model.jar.ArrayType;\nimport me.jamiemansfield.lorenz.model.jar.MethodDescriptor;\nimport me.jamiemansfield.lorenz.model.jar.ObjectType;\nimport me.jamiemansfield.lorenz.model.jar.PrimitiveType;\nimport me.jamiemansfield.lorenz.model.jar.Type;\nimport org.junit.Test;\n\n/**\n * A variety of unit tests pertaining to the de-obfuscation\n * pertaining to {@link MethodDescriptor}.\n */\npublic final class TypeTest {\n\n @Test\n public void arrayType() {\n final String raw = \"[[I\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an ArrayType!\", type instanceof ArrayType);\n assertEquals(raw, type.getObfuscated());\n final ArrayType array = (ArrayType) type;\n assertEquals(2, array.getDimCount());\n assertEquals(PrimitiveType.INT, array.getComponent());\n }\n\n @Test\n public void objectTest() {\n final String raw = \"Lme/jamiemansfield/Test;\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an ObjectType!\", type instanceof ObjectType);\n assertEquals(raw, type.getObfuscated());\n }\n\n @Test\n public void primtiveTest() {\n final String raw = \"Z\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an PrimitiveType!\", type instanceof PrimitiveType);\n assertEquals(PrimitiveType.BOOLEAN, type);\n assertEquals(raw, type.getObfuscated());\n }\n\n}\n"},"new_file":{"kind":"string","value":"src/test/java/me/jamiemansfield/lorenz/test/model/jar/TypeTest.java"},"old_contents":{"kind":"string","value":""},"message":{"kind":"string","value":"Add unit tests for type models\n"},"old_file":{"kind":"string","value":"src/test/java/me/jamiemansfield/lorenz/test/model/jar/TypeTest.java"},"subject":{"kind":"string","value":"Add unit tests for type models"},"git_diff":{"kind":"string","value":"rc/test/java/me/jamiemansfield/lorenz/test/model/jar/TypeTest.java\n/*\n * This file is part of Lorenz, licensed under the MIT License (MIT).\n *\n * Copyright (c) Jamie Mansfield \n * Copyright (c) contributors\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage me.jamiemansfield.lorenz.test.model.jar;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\nimport me.jamiemansfield.lorenz.model.jar.ArrayType;\nimport me.jamiemansfield.lorenz.model.jar.MethodDescriptor;\nimport me.jamiemansfield.lorenz.model.jar.ObjectType;\nimport me.jamiemansfield.lorenz.model.jar.PrimitiveType;\nimport me.jamiemansfield.lorenz.model.jar.Type;\nimport org.junit.Test;\n\n/**\n * A variety of unit tests pertaining to the de-obfuscation\n * pertaining to {@link MethodDescriptor}.\n */\npublic final class TypeTest {\n\n @Test\n public void arrayType() {\n final String raw = \"[[I\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an ArrayType!\", type instanceof ArrayType);\n assertEquals(raw, type.getObfuscated());\n final ArrayType array = (ArrayType) type;\n assertEquals(2, array.getDimCount());\n assertEquals(PrimitiveType.INT, array.getComponent());\n }\n\n @Test\n public void objectTest() {\n final String raw = \"Lme/jamiemansfield/Test;\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an ObjectType!\", type instanceof ObjectType);\n assertEquals(raw, type.getObfuscated());\n }\n\n @Test\n public void primtiveTest() {\n final String raw = \"Z\";\n final Type type = Type.of(raw);\n assertTrue(\"Type should be an PrimitiveType!\", type instanceof PrimitiveType);\n assertEquals(PrimitiveType.BOOLEAN, type);\n assertEquals(raw, type.getObfuscated());\n }\n\n}"}}},{"rowIdx":2028,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"5028eeb57908916d98d9e7c82d59ad958ee8f611"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"DeviaVir/ShowNotify,DeviaVir/ShowNotify"},"new_contents":{"kind":"string","value":"/**\n * Module dependencies.\n */\nvar app = require('../app');\nvar http = require('http');\nvar xml = require('xml2js').parseString;\nvar colors = require('colors');\n\nvar secrets = require('../config/secrets');\nvar Show = require('../models/Show');\nvar User = require('../models/User');\n\nvar nodemailer = require('nodemailer');\nvar transporter = nodemailer.createTransport(\"SMTP\", {\n service: 'gmail',\n auth: {\n user: secrets.email.username,\n pass: secrets.email.password\n }\n});\n\nif (!Date.now) {\n Date.now = function now() {\n return new Date().getTime();\n };\n}\n\nfunction pad(a,b) { return(1e15+a+\"\").slice(-b); }\n\nvar tomorrowDate = new Date(new Date().getTime() + 24 * 60 * 60 * 1000),\n yesterdayDate = new Date(new Date().getTime() - 24 * 60 * 60 * 1000);\n\nvar tomorrow = tomorrowDate.getFullYear() + '-' + ( tomorrowDate.getMonth() + 1 ) + '-' + tomorrowDate.getDate(),\n yesterday = yesterdayDate.getFullYear() + '-' + ( yesterdayDate.getMonth() + 1 ) + '-' + yesterdayDate.getDate();\n\nShow.find({}, function(err, shows) {\n if(err === null) {\n shows.forEach(function(show) {\n var tomorrows = 'http://www.thetvdb.com/api/GetEpisodeByAirDate.php?apikey=' + secrets.thetvdb.apiKey + '&seriesid=' + show.id + '&airdate=' + tomorrow,\n yesterdays = 'http://www.thetvdb.com/api/GetEpisodeByAirDate.php?apikey=' + secrets.thetvdb.apiKey + '&seriesid=' + show.id + '&airdate=' + yesterday;\n\n // Get tomorrow\n http.get(tomorrows, function(res) {\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj && ('Data' in obj) && obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping tomorrow (' + tomorrow + '), no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.find(userId, function(err, user) {\n if(err === null) {\n user = user[0];\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returns tomorrow - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n }\n });\n });\n }\n });\n });\n }).on('error', function(e) {\n console.error('Got http error: ' + e.message + ''.underline.red);\n });\n\n // Get yesterday\n http.get(yesterdays, function(res) {\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj && ('Data' in obj) && obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping yesterday (' + yesterday + '), no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.find(userId, function(err, user) {\n if(err === null) {\n user = user[0];\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returned yesterday - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n }\n });\n });\n }\n });\n });\n }).on('error', function(e) {\n console.error('Got http error: ' + e.message + ''.underline.red);\n });\n });\n }\n\n setTimeout(function() {\n process.exit(0);\n }, 30000);\n});\n"},"new_file":{"kind":"string","value":"scripts/notifier.js"},"old_contents":{"kind":"string","value":"var app = require('../app');\nvar http = require('http');\nvar xml = require('xml2js').parseString;\nvar colors = require('colors');\n\nvar secrets = require('../config/secrets');\nvar Show = require('../models/Show');\nvar User = require('../models/User');\n\nvar nodemailer = require('nodemailer');\nvar transporter = nodemailer.createTransport(\"SMTP\", {\n service: 'gmail',\n auth: {\n user: secrets.email.username,\n pass: secrets.email.password\n }\n});\n\nif (!Date.now) {\n Date.now = function now() {\n return new Date().getTime();\n };\n}\n\nfunction pad(a,b) { return(1e15+a+\"\").slice(-b); }\n\nvar tomorrowDate = new Date(new Date().getTime() + 24 * 60 * 60 * 1000),\n yesterdayDate = new Date(new Date().getTime() - 24 * 60 * 60 * 1000);\n\nvar tomorrow = tomorrowDate.getFullYear() + '-' + ( tomorrowDate.getMonth() + 1 ) + '-' + tomorrowDate.getDate(),\n yesterday = yesterdayDate.getFullYear() + '-' + ( yesterdayDate.getMonth() + 1 ) + '-' + yesterdayDate.getDate();\n\nShow.find({}, function(err, shows) {\n if(err === null) {\n shows.forEach(function(show) {\n var tomorrows = 'http://www.thetvdb.com/api/GetEpisodeByAirDate.php?apikey=' + secrets.thetvdb.apiKey + '&seriesid=' + show.id + '&airdate=' + tomorrow,\n yesterdays = 'http://www.thetvdb.com/api/GetEpisodeByAirDate.php?apikey=' + secrets.thetvdb.apiKey + '&seriesid=' + show.id + '&airdate=' + yesterday;\n\n // Get tomorrow\n http.get(tomorrows, function(res) {\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping tomorrow, no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.findById(userId, function(user) {\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returns tomorrow - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n });\n });\n }\n });\n });\n }).on('error', function(e) {\n console.error('Got http error: ' + e.message + ''.underline.red);\n });\n\n // Get yesterday\n http.get(yesterdays, function(res) {\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping yesterday, no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.findById(userId, function(err, user) {\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returned yesterday - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n });\n });\n }\n });\n });\n }).on('error', function(e) {\n console.error('Got http error: ' + e.message + ''.underline.red);\n });\n });\n }\n\n setTimeout(function() {\n process.exit(0);\n }, 30000);\n});\n"},"message":{"kind":"string","value":"Fix bug with the notifier script\n"},"old_file":{"kind":"string","value":"scripts/notifier.js"},"subject":{"kind":"string","value":"Fix bug with the notifier script"},"git_diff":{"kind":"string","value":"cripts/notifier.js\n/**\n * Module dependencies.\n */\n var app = require('../app');\n var http = require('http');\n var xml = require('xml2js').parseString;\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping tomorrow, no relevant airDate'.green, show.id);\n if(obj && ('Data' in obj) && obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping tomorrow (' + tomorrow + '), no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.findById(userId, function(user) {\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returns tomorrow - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n User.find(userId, function(err, user) {\n if(err === null) {\n user = user[0];\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returns tomorrow - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returns tomorrow with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n }\n });\n });\n }\n res.setEncoding('utf8');\n res.on('data', function (body) {\n xml(body, function(err, obj) {\n if(obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping yesterday, no relevant airDate'.green, show.id);\n if(obj && ('Data' in obj) && obj.Data && ('Error' in obj.Data) && obj.Data.Error) {\n console.info('Skipping yesterday (' + yesterday + '), no relevant airDate'.green, show.id);\n }\n else {\n show.users.forEach(function(userId) {\n User.findById(userId, function(err, user) {\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returned yesterday - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpImgFlag, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n User.find(userId, function(err, user) {\n if(err === null) {\n user = user[0];\n var mailOptions = {\n from: 'ShowNotify ✔ <' + secrets.email.username + '>',\n to: user.email,\n subject: show.name + ' returned yesterday - ShowNotify',\n text: 'Look sharp! ' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ') it has been called \"' + obj.Data.Episode[0].EpisodeName[0] + '\" and this is a short overview: ' + obj.Data.Episode[0].Overview[0],\n html: 'Look sharp!

' +\n '' + show.name + ' returned yesterday with season ' + pad(obj.Data.Episode[0].SeasonNumber, 2) + ' episode ' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ' (s' + pad(obj.Data.Episode[0].SeasonNumber, 2) + 'e' + pad(obj.Data.Episode[0].EpisodeNumber, 2) + ').

' +\n '\"' + obj.Data.Episode[0].EpisodeName[0] + '\"
' + obj.Data.Episode[0].Overview[0]\n };\n transporter.sendMail(mailOptions, function(error, info){\n console.log(error, info);\n });\n }\n });\n });\n }"}}},{"rowIdx":2029,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"bsd-2-clause"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"93c0d63c3121b96e0653f4d2298c6b6bd97a2027"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"scenerygraphics/SciView,scenerygraphics/SciView"},"new_contents":{"kind":"string","value":"/*-\n * #%L\n * Scenery-backed 3D visualization package for ImageJ.\n * %%\n * Copyright (C) 2016 - 2018 SciView developers.\n * %%\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n * #L%\n */\npackage sc.iview;\n\nimport cleargl.GLTypeEnum;\nimport cleargl.GLVector;\nimport com.bulenkov.darcula.DarculaLaf;\nimport com.jogamp.opengl.math.Quaternion;\nimport com.sun.javafx.application.PlatformImpl;\nimport coremem.enums.NativeTypeEnum;\nimport graphics.scenery.Box;\nimport graphics.scenery.*;\nimport graphics.scenery.backends.Renderer;\nimport graphics.scenery.backends.vulkan.VulkanRenderer;\nimport graphics.scenery.controls.InputHandler;\nimport graphics.scenery.controls.OpenVRHMD;\nimport graphics.scenery.controls.TrackerInput;\nimport graphics.scenery.controls.behaviours.ArcballCameraControl;\nimport graphics.scenery.controls.behaviours.FPSCameraControl;\nimport graphics.scenery.controls.behaviours.MovementCommand;\nimport graphics.scenery.controls.behaviours.SelectCommand;\nimport graphics.scenery.utils.SceneryFXPanel;\nimport graphics.scenery.utils.SceneryJPanel;\nimport graphics.scenery.utils.SceneryPanel;\nimport graphics.scenery.utils.Statistics;\nimport graphics.scenery.volumes.TransferFunction;\nimport graphics.scenery.volumes.Volume;\nimport graphics.scenery.volumes.bdv.BDVVolume;\nimport javafx.animation.FadeTransition;\nimport javafx.animation.Interpolator;\nimport javafx.application.Platform;\nimport javafx.embed.swing.JFXPanel;\nimport javafx.geometry.Insets;\nimport javafx.geometry.*;\nimport javafx.scene.control.Label;\nimport javafx.scene.control.MenuBar;\nimport javafx.scene.control.MenuItem;\nimport javafx.scene.control.*;\nimport javafx.scene.image.Image;\nimport javafx.scene.image.ImageView;\nimport javafx.scene.input.MouseButton;\nimport javafx.scene.layout.*;\nimport javafx.scene.paint.Color;\nimport javafx.scene.paint.Paint;\nimport javafx.scene.text.TextAlignment;\nimport javafx.util.Duration;\nimport kotlin.Unit;\nimport kotlin.jvm.functions.Function1;\nimport net.imagej.Dataset;\nimport net.imagej.lut.LUTService;\nimport net.imagej.ops.OpService;\nimport net.imglib2.Cursor;\nimport net.imglib2.IterableInterval;\nimport net.imglib2.RealLocalizable;\nimport net.imglib2.RealPoint;\nimport net.imglib2.display.ColorTable;\nimport net.imglib2.type.numeric.RealType;\nimport net.imglib2.type.numeric.integer.UnsignedByteType;\nimport net.imglib2.type.numeric.integer.UnsignedShortType;\nimport net.imglib2.type.numeric.real.FloatType;\nimport net.imglib2.view.Views;\nimport org.scijava.Context;\nimport org.scijava.display.Display;\nimport org.scijava.display.DisplayService;\nimport org.scijava.event.EventService;\nimport org.scijava.io.IOService;\nimport org.scijava.log.LogService;\nimport org.scijava.menu.MenuService;\nimport org.scijava.plugin.Parameter;\nimport org.scijava.thread.ThreadService;\nimport org.scijava.ui.behaviour.ClickBehaviour;\nimport org.scijava.ui.behaviour.InputTrigger;\nimport org.scijava.ui.swing.menu.SwingJMenuBarCreator;\nimport org.scijava.util.ColorRGB;\nimport org.scijava.util.ColorRGBA;\nimport org.scijava.util.Colors;\nimport sc.iview.commands.view.NodePropertyEditor;\nimport sc.iview.controls.behaviours.CameraTranslateControl;\nimport sc.iview.controls.behaviours.NodeTranslateControl;\nimport sc.iview.event.NodeActivatedEvent;\nimport sc.iview.event.NodeAddedEvent;\nimport sc.iview.event.NodeRemovedEvent;\nimport sc.iview.javafx.JavaFXMenuCreator;\nimport sc.iview.process.MeshConverter;\nimport sc.iview.vector.ClearGLVector3;\nimport sc.iview.vector.Vector3;\nimport tpietzsch.example2.VolumeViewerOptions;\n\nimport javax.imageio.ImageIO;\nimport javax.swing.*;\nimport javax.swing.plaf.basic.BasicLookAndFeel;\nimport java.awt.*;\nimport java.awt.image.BufferedImage;\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.nio.FloatBuffer;\nimport java.util.List;\nimport java.util.Queue;\nimport java.util.*;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.Future;\nimport java.util.function.Predicate;\nimport java.util.function.Supplier;\n\npublic class SciView extends SceneryBase {\n\n public static final ColorRGB DEFAULT_COLOR = Colors.LIGHTGRAY;\n\n @Parameter\n private LogService log;\n\n @Parameter\n private MenuService menus;\n\n @Parameter\n private IOService io;\n\n @Parameter\n private OpService ops;\n\n @Parameter\n private EventService eventService;\n\n @Parameter\n private DisplayService displayService;\n\n @Parameter\n private LUTService lutService;\n\n @Parameter\n private ThreadService threadService;\n\n /**\n * Queue keeps track of the currently running animations\n **/\n private Queue animations;\n\n /**\n * Animation pause tracking\n **/\n private boolean animating;\n\n /**\n * This tracks the actively selected Node in the scene\n */\n private Node activeNode = null;\n\n /**\n * Mouse controls for FPS movement and Arcball rotation\n */\n protected ArcballCameraControl targetArcball;\n protected FPSCameraControl fpsControl;\n\n /**\n * The primary camera/observer in the scene\n */\n Camera camera = null;\n\n /**\n * JavaFX UI\n */\n private boolean useJavaFX = false;\n\n /**\n * Speeds for input controls\n */\n private float fpsScrollSpeed = 3.0f;\n\n private float mouseSpeedMult = 0.25f;\n\n private Display scijavaDisplay;\n\n /**\n * The floor that orients the user in the scene\n */\n protected Node floor;\n\n private Label statusLabel;\n private Label loadingLabel;\n private JLabel splashLabel;\n private SceneryJPanel panel;\n private StackPane stackPane;\n private MenuBar menuBar;\n private JSplitPane mainSplitPane;\n private final SceneryPanel[] sceneryPanel = { null };\n private JSplitPane inspector;\n private NodePropertyEditor nodePropertyEditor;\n\n public SciView( Context context ) {\n super( \"SciView\", 1280, 720, false, context );\n context.inject( this );\n }\n\n public SciView( String applicationName, int windowWidth, int windowHeight ) {\n super( applicationName, windowWidth, windowHeight, false );\n }\n\n public InputHandler publicGetInputHandler() {\n return getInputHandler();\n }\n\n public class TransparentSlider extends JSlider {\n\n public TransparentSlider() {\n // Important, we taking over the filling of the\n // component...\n setOpaque(false);\n setBackground(java.awt.Color.DARK_GRAY);\n setForeground(java.awt.Color.LIGHT_GRAY);\n }\n\n @Override\n protected void paintComponent(Graphics g) {\n Graphics2D g2d = (Graphics2D) g.create();\n g2d.setColor(getBackground());\n g2d.setComposite(AlphaComposite.SrcOver.derive(0.9f));\n g2d.fillRect(0, 0, getWidth(), getHeight());\n g2d.dispose();\n\n super.paintComponent(g);\n }\n\n }\n\n @SuppressWarnings(\"restriction\") @Override public void init() {\n if(Boolean.parseBoolean(System.getProperty(\"sciview.useDarcula\", \"false\"))) {\n try {\n BasicLookAndFeel darcula = new DarculaLaf();\n UIManager.setLookAndFeel(darcula);\n } catch (Exception e) {\n System.err.println(\"Could not load Darcula Look and Feel\");\n }\n }\n\n int x, y;\n\n try {\n Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();\n\n x = screenSize.width/2 - getWindowWidth()/2;\n y = screenSize.height/2 - getWindowHeight()/2;\n } catch(HeadlessException e) {\n x = 10;\n y = 10;\n }\n\n JFrame frame = new JFrame(\"SciView\");\n frame.setLayout(new BorderLayout(0, 0));\n frame.setSize(getWindowWidth(), getWindowHeight());\n frame.setLocation(x, y);\n frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);\n nodePropertyEditor = new NodePropertyEditor( this );\n\n if( useJavaFX ) {\n final JFXPanel fxPanel = new JFXPanel();\n frame.add(fxPanel);\n\n CountDownLatch latch = new CountDownLatch( 1 );\n\n PlatformImpl.startup( () -> {\n } );\n\n Platform.runLater( () -> {\n stackPane = new StackPane();\n stackPane.setBackground(\n new Background( new BackgroundFill( Color.TRANSPARENT, CornerRadii.EMPTY, Insets.EMPTY ) ) );\n\n GridPane pane = new GridPane();\n statusLabel = new Label( \"SciView - press U for usage help\" );\n statusLabel.setVisible(false);\n\n final SceneryFXPanel panel = new SceneryFXPanel(100, 100);\n\n Image loadingImage = new Image(this.getClass().getResourceAsStream(\"sciview-logo.png\"), 600, 200, true, true);\n ImageView loadingImageView = new ImageView(loadingImage);\n loadingLabel = new Label(\"SciView is starting.\");\n loadingLabel.setStyle(\n \"-fx-background-color: rgb(50,48,47);\" +\n \"-fx-opacity: 1.0;\" +\n \"-fx-font-weight: 400; \" +\n \"-fx-font-size: 2.2em; \" +\n \"-fx-text-fill: white;\");\n loadingLabel.setTextFill(Paint.valueOf(\"white\"));\n loadingLabel.setGraphic(loadingImageView);\n loadingLabel.setGraphicTextGap(40.0);\n loadingLabel.setContentDisplay(ContentDisplay.TOP);\n loadingLabel.prefHeightProperty().bind(pane.heightProperty());\n loadingLabel.prefWidthProperty().bind(pane.widthProperty());\n loadingLabel.setAlignment(Pos.CENTER);\n\n GridPane.setHgrow( panel, Priority.ALWAYS );\n GridPane.setVgrow( panel, Priority.ALWAYS );\n\n GridPane.setFillHeight( panel, true );\n GridPane.setFillWidth( panel, true );\n\n GridPane.setHgrow( statusLabel, Priority.ALWAYS );\n GridPane.setHalignment( statusLabel, HPos.CENTER );\n GridPane.setValignment( statusLabel, VPos.BOTTOM );\n\n statusLabel.maxWidthProperty().bind( pane.widthProperty() );\n\n pane.setStyle( \"-fx-background-color: rgb(50,48,47);\" +\n \"-fx-font-family: Helvetica Neue, Helvetica, Segoe, Proxima Nova, Arial, sans-serif;\" +\n \"-fx-font-weight: 400;\" + \"-fx-font-size: 1.2em;\" + \"-fx-text-fill: white;\" +\n \"-fx-text-alignment: center;\" );\n\n statusLabel.setStyle( \"-fx-padding: 0.2em;\" + \"-fx-text-fill: white;\" );\n\n statusLabel.setTextAlignment( TextAlignment.CENTER );\n\n menuBar = new MenuBar();\n pane.add( menuBar, 1, 1 );\n pane.add( panel, 1, 2 );\n pane.add( statusLabel, 1, 3 );\n stackPane.getChildren().addAll(pane, loadingLabel);\n\n final ContextMenu contextMenu = new ContextMenu();\n final MenuItem title = new MenuItem(\"Node\");\n final MenuItem position = new MenuItem(\"Position\");\n title.setDisable(true);\n position.setDisable(true);\n contextMenu.getItems().addAll(title, position);\n\n panel.setOnContextMenuRequested(event -> {\n final Point2D localPosition = panel.sceneToLocal(event.getSceneX(), event.getSceneY());\n final List matches = camera.getNodesForScreenSpacePosition((int)localPosition.getX(), (int)localPosition.getY());\n if(matches.size() > 0) {\n final Node firstMatch = matches.get(0).getNode();\n title.setText(\"Node: \" + firstMatch.getName() + \" (\" + firstMatch.getClass().getSimpleName() + \")\");\n position.setText(firstMatch.getPosition().toString());\n } else {\n title.setText(\"(no matches)\");\n position.setText(\"\");\n }\n contextMenu.show(panel, event.getScreenX(), event.getScreenY());\n });\n\n panel.setOnMouseClicked(event -> {\n if(event.getButton() == MouseButton.PRIMARY) {\n contextMenu.hide();\n }\n });\n\n sceneryPanel[0] = panel;\n\n javafx.scene.Scene scene = new javafx.scene.Scene( stackPane );\n fxPanel.setScene(scene);\n// scene.addEventHandler(MouseEvent.ANY, event -> getLogger().info(\"Mouse event: \" + event.toString()));\n// sceneryPanel[0].addEventHandler(MouseEvent.ANY, event -> getLogger().info(\"PANEL Mouse event: \" + event.toString()));\n\n frame.setVisible(true);\n// stage.setScene( scene );\n// stage.setOnCloseRequest( event -> {\n// getDisplay().close();\n// this.close();\n// } );\n// stage.focusedProperty().addListener( ( ov, t, t1 ) -> {\n// if( t1 )// If you just gained focus\n// displayService.setActiveDisplay( getDisplay() );\n// } );\n\n new JavaFXMenuCreator().createMenus( menus.getMenu( \"SciView\" ), menuBar );\n\n// stage.show();\n\n latch.countDown();\n } );\n\n try {\n latch.await();\n } catch( InterruptedException e1 ) {\n e1.printStackTrace();\n }\n\n // window width and window height get ignored by the renderer if it is embedded.\n // dimensions are determined from the SceneryFXPanel, then.\n setRenderer( Renderer.createRenderer( getHub(), getApplicationName(), getScene(),\n getWindowWidth(), getWindowHeight(),\n sceneryPanel[0] ) );\n } else {\n final JPanel p = new JPanel(new BorderLayout(0, 0));\n panel = new SceneryJPanel();\n JPopupMenu.setDefaultLightWeightPopupEnabled(false);\n final JMenuBar swingMenuBar = new JMenuBar();\n new SwingJMenuBarCreator().createMenus(menus.getMenu(\"SciView\"), swingMenuBar);\n frame.setJMenuBar(swingMenuBar);\n\n BufferedImage splashImage;\n try {\n splashImage = ImageIO.read(this.getClass().getResourceAsStream(\"sciview-logo.png\"));\n } catch (IOException e) {\n getLogger().warn(\"Could not read splash image 'sciview-logo.png'\");\n splashImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB);\n }\n\n final String sceneryVersion = SceneryBase.class.getPackage().getImplementationVersion();\n final String sciviewVersion = SciView.class.getPackage().getImplementationVersion();\n final String versionString;\n\n if(sceneryVersion == null || sciviewVersion == null) {\n versionString = \"\";\n } else {\n versionString = \"\\n\\nsciview \" + sciviewVersion + \" / scenery \" + sceneryVersion;\n }\n\n splashLabel = new JLabel(versionString,\n new ImageIcon(splashImage.getScaledInstance(500, 200, java.awt.Image.SCALE_SMOOTH)),\n SwingConstants.CENTER);\n splashLabel.setBackground(new java.awt.Color(50, 48, 47));\n splashLabel.setForeground(new java.awt.Color(78, 76, 75));\n splashLabel.setOpaque(true);\n splashLabel.setVerticalTextPosition(JLabel.BOTTOM);\n splashLabel.setHorizontalTextPosition(JLabel.CENTER);\n\n p.setLayout(new OverlayLayout(p));\n p.setBackground(new java.awt.Color(50, 48, 47));\n p.add(panel, BorderLayout.CENTER);\n panel.setVisible(true);\n\n nodePropertyEditor.getComponent(); // Initialize node property panel\n\n JTree inspectorTree = nodePropertyEditor.getTree();\n JPanel inspectorProperties = nodePropertyEditor.getProps();\n\n inspector = new JSplitPane(JSplitPane.VERTICAL_SPLIT, //\n new JScrollPane( inspectorTree ),\n new JScrollPane( inspectorProperties ));\n inspector.setDividerLocation( getWindowHeight() / 3 );\n inspector.setContinuousLayout(true);\n inspector.setBorder(BorderFactory.createEmptyBorder());\n\n mainSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, //\n p,\n inspector\n );\n mainSplitPane.setDividerLocation( getWindowWidth()/3 * 2 );\n mainSplitPane.setBorder(BorderFactory.createEmptyBorder());\n\n frame.add(mainSplitPane, BorderLayout.CENTER);\n\n frame.setGlassPane(splashLabel);\n frame.getGlassPane().setVisible(true);\n// frame.getGlassPane().setBackground(new java.awt.Color(50, 48, 47, 255));\n frame.setVisible(true);\n\n sceneryPanel[0] = panel;\n\n setRenderer( Renderer.createRenderer( getHub(), getApplicationName(), getScene(),\n getWindowWidth(), getWindowHeight(),\n sceneryPanel[0]) );\n }\n\n // Enable push rendering by default\n getRenderer().setPushMode( true );\n\n getHub().add( SceneryElement.Renderer, getRenderer() );\n\n GLVector[] tetrahedron = new GLVector[4];\n tetrahedron[0] = new GLVector( 1.0f, 0f, -1.0f/(float)Math.sqrt(2.0f) );\n tetrahedron[1] = new GLVector( -1.0f,0f,-1.0f/(float)Math.sqrt(2.0) );\n tetrahedron[2] = new GLVector( 0.0f,1.0f,1.0f/(float)Math.sqrt(2.0) );\n tetrahedron[3] = new GLVector( 0.0f,-1.0f,1.0f/(float)Math.sqrt(2.0) );\n\n PointLight[] lights = new PointLight[4];\n\n for( int i = 0; i < lights.length; i++ ) {\n lights[i] = new PointLight( 150.0f );\n lights[i].setPosition( tetrahedron[i].times(25.0f) );\n lights[i].setEmissionColor( new GLVector( 1.0f, 1.0f, 1.0f ) );\n lights[i].setIntensity( 100.0f );\n getScene().addChild( lights[i] );\n }\n\n Camera cam = new DetachedHeadCamera();\n cam.setPosition( new GLVector( 0.0f, 5.0f, 5.0f ) );\n cam.perspectiveCamera( 50.0f, getWindowWidth(), getWindowHeight(), 0.1f, 1000.0f );\n //cam.setTarget( new GLVector( 0, 0, 0 ) );\n //cam.setTargeted( true );\n cam.setActive( true );\n getScene().addChild( cam );\n this.camera = cam;\n\n floor = new Box( new GLVector( 500f, 0.2f, 500f ) );\n floor.setName( \"Floor\" );\n floor.setPosition( new GLVector( 0f, -1f, 0f ) );\n floor.getMaterial().setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n getScene().addChild( floor );\n\n animations = new LinkedList<>();\n\n if(useJavaFX) {\n Platform.runLater(() -> {\n while (!getRenderer().getFirstImageReady()) {\n try {\n Thread.sleep(100);\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n }\n\n // fade out loading screen, show status bar\n FadeTransition ft = new FadeTransition(Duration.millis(500), loadingLabel);\n ft.setFromValue(1.0);\n ft.setToValue(0.0);\n ft.setCycleCount(1);\n ft.setInterpolator(Interpolator.EASE_OUT);\n ft.setOnFinished(event -> {\n loadingLabel.setVisible(false);\n statusLabel.setVisible(true);\n });\n\n ft.play();\n });\n } else {\n SwingUtilities.invokeLater(() -> {\n try {\n while (!getSceneryRenderer().getFirstImageReady()) {\n getLogger().info(\"Waiting for renderer\");\n Thread.sleep(100);\n }\n\n Thread.sleep(200);\n } catch (InterruptedException e) {\n }\n\n nodePropertyEditor.rebuildTree();\n frame.getGlassPane().setVisible(false);\n getLogger().info(\"Done initializing SciView\");\n });\n }\n\n }\n\n public void setStatusText(String text) {\n statusLabel.setText(text);\n }\n\n public void setFloor( Node n ) {\n floor = n;\n }\n\n public Node getFloor() {\n return floor;\n }\n\n private float getFloory() {\n return floor.getPosition().y();\n }\n\n private void setFloory( float new_pos ) {\n float temp_pos = 0f;\n temp_pos = new_pos;\n if( temp_pos < -100f ) temp_pos = -100f;\n else if( new_pos > 5f ) temp_pos = 5f;\n floor.getPosition().set( 1, temp_pos );\n }\n\n public boolean isInitialized() {\n return sceneInitialized();\n }\n\n public Camera getCamera() {\n return camera;\n }\n\n public void setDisplay( Display display ) {\n scijavaDisplay = display;\n }\n\n public Display getDisplay() {\n return scijavaDisplay;\n }\n\n public void centerOnNode( Node currentNode ) {\n if( currentNode == null ) return;\n\n Node.OrientedBoundingBox bb = currentNode.generateBoundingBox();\n\n getCamera().setTarget( currentNode.getPosition() );\n getCamera().setTargeted( true );\n\n // Set forward direction to point from camera at active node\n getCamera().setForward( bb.getBoundingSphere().getOrigin().minus( getCamera().getPosition() ).normalize().times( -1 ) );\n\n float distance = (float) (bb.getBoundingSphere().getRadius() / Math.tan( getCamera().getFov() / 360 * java.lang.Math.PI ));\n\n // Solve for the proper rotation\n Quaternion rotation = new Quaternion().setLookAt( getCamera().getForward().toFloatArray(),\n new GLVector(0,1,0).toFloatArray(),\n new GLVector(1,0,0).toFloatArray(),\n new GLVector( 0,1,0).toFloatArray(),\n new GLVector( 0, 0, 1).toFloatArray() );\n\n getCamera().setRotation( rotation.normalize() );\n getCamera().setPosition( bb.getBoundingSphere().getOrigin().plus( getCamera().getForward().times( distance * -1 ) ) );\n\n getCamera().setDirty(true);\n getCamera().setNeedsUpdate(true);\n }\n\n public void setFPSSpeed( float newspeed ) {\n if( newspeed < 0.30f ) newspeed = 0.3f;\n else if( newspeed > 30.0f ) newspeed = 30.0f;\n fpsScrollSpeed = newspeed;\n log.debug( \"FPS scroll speed: \" + fpsScrollSpeed );\n }\n\n public float getFPSSpeed() {\n return fpsScrollSpeed;\n }\n\n public void setMouseSpeed( float newspeed ) {\n if( newspeed < 0.30f ) newspeed = 0.3f;\n else if( newspeed > 3.0f ) newspeed = 3.0f;\n mouseSpeedMult = newspeed;\n log.debug( \"Mouse speed: \" + mouseSpeedMult );\n }\n\n public float getMouseSpeed() {\n return mouseSpeedMult;\n }\n\n public void resetFPSInputs() {\n getInputHandler().addBehaviour( \"move_forward_scroll\",\n new MovementCommand( \"move_forward\", \"forward\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_forward\",\n new MovementCommand( \"move_forward\", \"forward\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_back\",\n new MovementCommand( \"move_back\", \"back\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_left\",\n new MovementCommand( \"move_left\", \"left\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_right\",\n new MovementCommand( \"move_right\", \"right\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_up\",\n new MovementCommand( \"move_up\", \"up\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_down\",\n new MovementCommand( \"move_down\", \"down\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n }\n\n class enableIncrease implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n setFPSSpeed( getFPSSpeed() + 0.5f );\n setMouseSpeed( getMouseSpeed() + 0.05f );\n\n log.debug( \"Increasing FPS scroll Speed\" );\n\n resetFPSInputs();\n }\n }\n\n class enableDecrease implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n setFPSSpeed( getFPSSpeed() - 0.1f );\n setMouseSpeed( getMouseSpeed() - 0.05f );\n\n log.debug( \"Decreasing FPS scroll Speed\" );\n\n resetFPSInputs();\n }\n }\n\n class showHelpDisplay implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n String helpString = \"SciView help:\\n\\n\";\n for( InputTrigger trigger : getInputHandler().getAllBindings().keySet() ) {\n helpString += trigger + \"\\t-\\t\" + getInputHandler().getAllBindings().get( trigger ) + \"\\n\";\n }\n // HACK: Make the console pop via stderr.\n // Later, we will use a nicer dialog box or some such.\n log.warn( helpString );\n }\n }\n\n @Override public void inputSetup() {\n Function1, Unit> selectAction = nearest -> {\n if( !nearest.isEmpty() ) {\n setActiveNode( nearest.get( 0 ).getNode() );\n log.debug( \"Selected node: \" + getActiveNode().getName() );\n }\n return Unit.INSTANCE;\n };\n\n List> ignoredObjects = new ArrayList<>();\n ignoredObjects.add( BoundingGrid.class );\n\n getInputHandler().useDefaultBindings( \"\" );\n\n // Mouse controls\n getInputHandler().addBehaviour( \"object_selection_mode\",\n new SelectCommand( \"objectSelector\", getRenderer(), getScene(),\n () -> getScene().findObserver(), false, ignoredObjects,\n selectAction ) );\n getInputHandler().addKeyBinding( \"object_selection_mode\", \"double-click button1\" );\n\n enableArcBallControl();\n enableFPSControl();\n\n getInputHandler().addBehaviour( \"mouse_control_nodetranslate\", new NodeTranslateControl( this, 0.002f ) );\n getInputHandler().addKeyBinding( \"mouse_control_nodetranslate\", \"shift button2\" );\n\n // Extra keyboard controls\n getInputHandler().addBehaviour( \"show_help\", new showHelpDisplay() );\n getInputHandler().addKeyBinding( \"show_help\", \"U\" );\n\n getInputHandler().addBehaviour( \"enable_decrease\", new enableDecrease() );\n getInputHandler().addKeyBinding( \"enable_decrease\", \"M\" );\n\n getInputHandler().addBehaviour( \"enable_increase\", new enableIncrease() );\n getInputHandler().addKeyBinding( \"enable_increase\", \"N\" );\n }\n\n private void enableArcBallControl() {\n GLVector target;\n if( getActiveNode() == null ) {\n target = new GLVector( 0, 0, 0 );\n } else {\n target = getActiveNode().getPosition();\n }\n\n float mouseSpeed = 0.25f;\n mouseSpeed = getMouseSpeed();\n\n Supplier cameraSupplier = () -> getScene().findObserver();\n targetArcball = new ArcballCameraControl( \"mouse_control_arcball\", cameraSupplier,\n getRenderer().getWindow().getWidth(),\n getRenderer().getWindow().getHeight(), target );\n targetArcball.setMaximumDistance( Float.MAX_VALUE );\n targetArcball.setMouseSpeedMultiplier( mouseSpeed );\n targetArcball.setScrollSpeedMultiplier( 0.05f );\n targetArcball.setDistance( getCamera().getPosition().minus( target ).magnitude() );\n\n getInputHandler().addBehaviour( \"mouse_control_arcball\", targetArcball );\n getInputHandler().addKeyBinding( \"mouse_control_arcball\", \"shift button1\" );\n getInputHandler().addBehaviour( \"scroll_arcball\", targetArcball );\n getInputHandler().addKeyBinding( \"scroll_arcball\", \"shift scroll\" );\n }\n\n private void enableFPSControl() {\n Supplier cameraSupplier = () -> getScene().findObserver();\n fpsControl = new FPSCameraControl( \"mouse_control\", cameraSupplier, getRenderer().getWindow().getWidth(),\n getRenderer().getWindow().getHeight() );\n\n getInputHandler().addBehaviour( \"mouse_control\", fpsControl );\n getInputHandler().addKeyBinding( \"mouse_control\", \"button1\" );\n\n getInputHandler().addBehaviour( \"mouse_control_cameratranslate\", new CameraTranslateControl( this, 0.002f ) );\n getInputHandler().addKeyBinding( \"mouse_control_cameratranslate\", \"button2\" );\n\n resetFPSInputs();\n\n getInputHandler().addKeyBinding( \"move_forward_scroll\", \"scroll\" );\n }\n\n public Node addBox() {\n return addBox( new ClearGLVector3( 0.0f, 0.0f, 0.0f ) );\n }\n\n public Node addBox( Vector3 position ) {\n return addBox( position, new ClearGLVector3( 1.0f, 1.0f, 1.0f ) );\n }\n\n public Node addBox( Vector3 position, Vector3 size ) {\n return addBox( position, size, DEFAULT_COLOR, false );\n }\n\n public Node addBox( final Vector3 position, final Vector3 size, final ColorRGB color,\n final boolean inside ) {\n // TODO: use a material from the current palate by default\n final Material boxmaterial = new Material();\n boxmaterial.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n boxmaterial.setDiffuse( vector( color ) );\n boxmaterial.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Box box = new Box( ClearGLVector3.convert( size ), inside );\n box.setMaterial( boxmaterial );\n box.setPosition( ClearGLVector3.convert( position ) );\n\n return addNode( box );\n }\n\n public Node addSphere() {\n return addSphere( new ClearGLVector3( 0.0f, 0.0f, 0.0f ), 1 );\n }\n\n public Node addSphere( Vector3 position, float radius ) {\n return addSphere( position, radius, DEFAULT_COLOR );\n }\n\n public Node addSphere( final Vector3 position, final float radius, final ColorRGB color ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( vector( color ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Sphere sphere = new Sphere( radius, 20 );\n sphere.setMaterial( material );\n sphere.setPosition( ClearGLVector3.convert( position ) );\n\n return addNode( sphere );\n }\n\n public Node addLine() {\n return addLine( new ClearGLVector3( 0.0f, 0.0f, 0.0f ), new ClearGLVector3( 0.0f, 0.0f, 0.0f ) );\n }\n\n public Node addLine( Vector3 start, Vector3 stop ) {\n return addLine( start, stop, DEFAULT_COLOR );\n }\n\n public Node addLine( Vector3 start, Vector3 stop, ColorRGB color ) {\n return addLine( new Vector3[] { start, stop }, color, 0.1f );\n }\n\n public Node addLine( final Vector3[] points, final ColorRGB color, final double edgeWidth ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setDiffuse( vector( color ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Line line = new Line( points.length );\n for( final Vector3 pt : points ) {\n line.addPoint( ClearGLVector3.convert( pt ) );\n }\n\n line.setEdgeWidth( ( float ) edgeWidth );\n\n line.setMaterial( material );\n line.setPosition( ClearGLVector3.convert( points[0] ) );\n\n return addNode( line );\n }\n\n public Node addPointLight() {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( new GLVector( 0.0f, 1.0f, 0.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final PointLight light = new PointLight( 5.0f );\n light.setMaterial( material );\n light.setPosition( new GLVector( 0.0f, 0.0f, 0.0f ) );\n\n return addNode( light );\n }\n\n public void writeSCMesh( String filename, Mesh scMesh ) {\n File f = new File( filename );\n BufferedOutputStream out;\n try {\n out = new BufferedOutputStream( new FileOutputStream( f ) );\n out.write( \"solid STL generated by FIJI\\n\".getBytes() );\n\n FloatBuffer normalsFB = scMesh.getNormals();\n FloatBuffer verticesFB = scMesh.getVertices();\n\n while( verticesFB.hasRemaining() && normalsFB.hasRemaining() ) {\n out.write( ( \"facet normal \" + normalsFB.get() + \" \" + normalsFB.get() + \" \" + normalsFB.get() +\n \"\\n\" ).getBytes() );\n out.write( \"outer loop\\n\".getBytes() );\n for( int v = 0; v < 3; v++ ) {\n out.write( ( \"vertex\\t\" + verticesFB.get() + \" \" + verticesFB.get() + \" \" + verticesFB.get() +\n \"\\n\" ).getBytes() );\n }\n out.write( \"endloop\\n\".getBytes() );\n out.write( \"endfacet\\n\".getBytes() );\n }\n out.write( \"endsolid vcg\\n\".getBytes() );\n out.close();\n } catch( FileNotFoundException e ) {\n e.printStackTrace();\n } catch( IOException e ) {\n e.printStackTrace();\n }\n\n }\n\n public float getDefaultPointSize() {\n return 0.025f;\n }\n\n public float[] makeNormalsFromVertices( ArrayList verts ) {\n float[] normals = new float[verts.size()];// div3 * 3coords\n\n for( int k = 0; k < verts.size(); k += 3 ) {\n GLVector v1 = new GLVector( verts.get( k ).getFloatPosition( 0 ), //\n verts.get( k ).getFloatPosition( 1 ), //\n verts.get( k ).getFloatPosition( 2 ) );\n GLVector v2 = new GLVector( verts.get( k + 1 ).getFloatPosition( 0 ),\n verts.get( k + 1 ).getFloatPosition( 1 ),\n verts.get( k + 1 ).getFloatPosition( 2 ) );\n GLVector v3 = new GLVector( verts.get( k + 2 ).getFloatPosition( 0 ),\n verts.get( k + 2 ).getFloatPosition( 1 ),\n verts.get( k + 2 ).getFloatPosition( 2 ) );\n GLVector a = v2.minus( v1 );\n GLVector b = v3.minus( v1 );\n GLVector n = a.cross( b ).getNormalized();\n normals[k / 3] = n.get( 0 );\n normals[k / 3 + 1] = n.get( 1 );\n normals[k / 3 + 2] = n.get( 2 );\n }\n return normals;\n }\n\n public void open( final String source ) throws IOException {\n if(source.endsWith(\".xml\")) {\n addBDVVolume(source);\n return;\n }\n\n final Object data = io.open( source );\n if( data instanceof net.imagej.mesh.Mesh ) addMesh( ( net.imagej.mesh.Mesh ) data );\n else if( data instanceof graphics.scenery.Mesh ) addMesh( ( graphics.scenery.Mesh ) data );\n else if( data instanceof graphics.scenery.PointCloud ) addPointCloud( ( graphics.scenery.PointCloud ) data );\n else if( data instanceof Dataset ) addVolume( ( Dataset ) data );\n else if( data instanceof IterableInterval ) addVolume( ( ( IterableInterval ) data ), source );\n else if( data instanceof List ) {\n final List list = ( List ) data;\n if( list.isEmpty() ) {\n throw new IllegalArgumentException( \"Data source '\" + source + \"' appears empty.\" );\n }\n final Object element = list.get( 0 );\n if( element instanceof RealLocalizable ) {\n // NB: For now, we assume all elements will be RealLocalizable.\n // Highly likely to be the case, barring antagonistic importers.\n @SuppressWarnings(\"unchecked\") final List points = ( List ) list;\n addPointCloud( points, source );\n } else {\n final String type = element == null ? \"\" : element.getClass().getName();\n throw new IllegalArgumentException( \"Data source '\" + source + //\n \"' contains elements of unknown type '\" + type + \"'\" );\n }\n } else {\n final String type = data == null ? \"\" : data.getClass().getName();\n throw new IllegalArgumentException( \"Data source '\" + source + //\n \"' contains data of unknown type '\" + type + \"'\" );\n }\n }\n\n public Node addPointCloud( Collection points ) {\n return addPointCloud( points, \"PointCloud\" );\n }\n\n public Node addPointCloud( final Collection points,\n final String name ) {\n final float[] flatVerts = new float[points.size() * 3];\n int k = 0;\n for( final RealLocalizable point : points ) {\n flatVerts[k * 3] = point.getFloatPosition( 0 );\n flatVerts[k * 3 + 1] = point.getFloatPosition( 1 );\n flatVerts[k * 3 + 2] = point.getFloatPosition( 2 );\n k++;\n }\n\n final PointCloud pointCloud = new PointCloud( getDefaultPointSize(), name );\n final Material material = new Material();\n final FloatBuffer vBuffer = ByteBuffer.allocateDirect( flatVerts.length * 4 ) //\n .order( ByteOrder.nativeOrder() ).asFloatBuffer();\n final FloatBuffer nBuffer = ByteBuffer.allocateDirect( 0 ) //\n .order( ByteOrder.nativeOrder() ).asFloatBuffer();\n\n vBuffer.put( flatVerts );\n vBuffer.flip();\n\n pointCloud.setVertices( vBuffer );\n pointCloud.setNormals( nBuffer );\n pointCloud.setIndices( ByteBuffer.allocateDirect( 0 ) //\n .order( ByteOrder.nativeOrder() ).asIntBuffer() );\n pointCloud.setupPointCloud();\n material.setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.setMaterial( material );\n pointCloud.setPosition( new GLVector( 0f, 0f, 0f ) );\n\n return addNode( pointCloud );\n }\n\n public Node addPointCloud( final PointCloud pointCloud ) {\n pointCloud.setupPointCloud();\n pointCloud.getMaterial().setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.getMaterial().setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.getMaterial().setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.setPosition( new GLVector( 0f, 0f, 0f ) );\n\n return addNode( pointCloud );\n }\n\n public Node addNode( final Node n ) {\n getScene().addChild( n );\n setActiveNode( n );\n updateFloorPosition();\n eventService.publish( new NodeAddedEvent( n ) );\n return n;\n }\n\n public Node addMesh( final Mesh scMesh ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( new GLVector( 0.0f, 1.0f, 0.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n scMesh.setMaterial( material );\n scMesh.setPosition( new GLVector( 0.0f, 0.0f, 0.0f ) );\n\n return addNode( scMesh );\n }\n\n public Node addMesh( net.imagej.mesh.Mesh mesh ) {\n Mesh scMesh = MeshConverter.toScenery( mesh );\n\n return addMesh( scMesh );\n }\n\n public void removeMesh( Mesh scMesh ) {\n getScene().removeChild( scMesh );\n }\n\n public Node getActiveNode() {\n return activeNode;\n }\n\n public Node setActiveNode( Node n ) {\n if( activeNode == n ) return activeNode;\n activeNode = n;\n targetArcball.setTarget( n == null ? () -> new GLVector( 0, 0, 0 ) : n::getPosition);\n eventService.publish( new NodeActivatedEvent( activeNode ) );\n nodePropertyEditor.rebuildTree();\n getScene().getOnNodePropertiesChanged().put(\"updateInspector\",\n node -> { if(node == activeNode) {\n nodePropertyEditor.updateProperties(activeNode);\n }\n return null;\n });\n return activeNode;\n }\n\n public void toggleInspectorWindow()\n {\n boolean currentlyVisible = inspector.isVisible();\n if(currentlyVisible) {\n inspector.setVisible(false);\n mainSplitPane.setDividerLocation(getWindowWidth());\n }\n else {\n inspector.setVisible(true);\n mainSplitPane.setDividerLocation(getWindowWidth()/4 * 3);\n }\n\n }\n\n public synchronized void animate( int fps, Runnable action ) {\n // TODO: Make animation speed less laggy and more accurate.\n final int delay = 1000 / fps;\n animations.add( threadService.run( () -> {\n while( animating ) {\n action.run();\n try {\n Thread.sleep( delay );\n } catch( InterruptedException e ) {\n break;\n }\n }\n } ) );\n animating = true;\n }\n\n public synchronized void stopAnimation() {\n animating = false;\n while( !animations.isEmpty() ) {\n animations.peek().cancel( true );\n animations.remove();\n }\n }\n\n public void takeScreenshot() {\n getRenderer().screenshot();\n }\n\n public void takeScreenshot( String path ) {\n getRenderer().screenshot( path, false );\n }\n\n public Node[] getSceneNodes() {\n return getSceneNodes( n -> !( n instanceof Camera ) && !( n instanceof PointLight ) );\n }\n\n public Node[] getSceneNodes( Predicate filter ) {\n return getScene().getChildren().stream().filter( filter ).toArray( Node[]::new );\n }\n\n public Node[] getAllSceneNodes() {\n return getSceneNodes( n -> true );\n }\n\n public void deleteActiveNode() {\n deleteNode( getActiveNode() );\n }\n\n public void deleteNode( Node node ) {\n node.getParent().removeChild( node );\n eventService.publish( new NodeRemovedEvent( node ) );\n if( activeNode == node ) setActiveNode( null );\n }\n\n public void dispose() {\n this.close();\n }\n\n public void moveCamera( float[] position ) {\n getCamera().setPosition( new GLVector( position[0], position[1], position[2] ) );\n }\n\n public void moveCamera( double[] position ) {\n getCamera().setPosition( new GLVector( ( float ) position[0], ( float ) position[1], ( float ) position[2] ) );\n }\n\n public String getName() {\n return getApplicationName();\n }\n\n public void addChild( Node node ) {\n getScene().addChild( node );\n }\n\n public Node addVolume( Dataset image ) {\n float[] voxelDims = new float[image.numDimensions()];\n for( int d = 0; d < voxelDims.length; d++ ) {\n voxelDims[d] = ( float ) image.axis( d ).averageScale( 0, 1 );\n }\n return addVolume( image, voxelDims );\n }\n\n public Node addBDVVolume( String source ) {\n final VolumeViewerOptions opts = new VolumeViewerOptions();\n opts.maxCacheSizeInMB(Integer.parseInt(System.getProperty(\"scenery.BDVVolume.maxCacheSize\", \"512\")));\n final BDVVolume v = new BDVVolume(source, opts);\n v.setScale(new GLVector(0.01f, 0.01f, 0.01f));\n\n getScene().addChild(v);\n setActiveNode(v);\n v.goToTimePoint(0);\n\n return v;\n }\n\n @SuppressWarnings({ \"rawtypes\", \"unchecked\" }) public Node addVolume( Dataset image,\n float[] voxelDimensions ) {\n return addVolume( ( IterableInterval ) Views.flatIterable( image.getImgPlus() ), image.getName(),\n voxelDimensions );\n }\n\n public > Node addVolume( IterableInterval image ) {\n return addVolume( image, \"Volume\" );\n }\n\n public > Node addVolume( IterableInterval image, String name ) {\n return addVolume( image, name, 1, 1, 1 );\n }\n\n public void setColormap( Node n, ColorTable colorTable ) {\n final int copies = 16;\n\n final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(\n 4 * colorTable.getLength() * copies );// Num bytes * num components * color map length * height of color map texture\n\n final byte[] tmp = new byte[4 * colorTable.getLength()];\n for( int k = 0; k < colorTable.getLength(); k++ ) {\n for( int c = 0; c < colorTable.getComponentCount(); c++ ) {\n // TODO this assumes numBits is 8, could be 16\n tmp[4 * k + c] = ( byte ) colorTable.get( c, k );\n }\n\n if( colorTable.getComponentCount() == 3 ) {\n tmp[4 * k + 3] = (byte)255;\n }\n }\n\n for( int i = 0; i < copies; i++ ) {\n byteBuffer.put(tmp);\n }\n\n byteBuffer.flip();\n\n n.getMetadata().put(\"sciviewColormap\", colorTable);\n\n if(n instanceof Volume) {\n ((Volume) n).getColormaps().put(\"sciviewColormap\", new Volume.Colormap.ColormapBuffer(new GenericTexture(\"colorTable\",\n new GLVector(colorTable.getLength(),\n copies, 1.0f), 4,\n GLTypeEnum.UnsignedByte,\n byteBuffer)));\n ((Volume) n).setColormap(\"sciviewColormap\");\n }\n }\n\n public > Node addVolume( IterableInterval image, String name,\n float... voxelDimensions ) {\n log.debug( \"Add Volume\" );\n\n long dimensions[] = new long[3];\n image.dimensions( dimensions );\n\n Volume v = new Volume();\n\n getScene().addChild( v );\n\n @SuppressWarnings(\"unchecked\") Class voxelType = ( Class ) image.firstElement().getClass();\n float minVal, maxVal;\n\n if( voxelType == UnsignedByteType.class ) {\n minVal = 0;\n maxVal = 255;\n } else if( voxelType == UnsignedShortType.class ) {\n minVal = 0;\n maxVal = 65535;\n } else if( voxelType == FloatType.class ) {\n minVal = 0;\n maxVal = 1;\n } else {\n log.debug( \"Type: \" + voxelType +\n \" cannot be displayed as a volume. Convert to UnsignedByteType, UnsignedShortType, or FloatType.\" );\n return null;\n }\n\n updateVolume( image, name, voxelDimensions, v );\n\n GLVector scaleVec = new GLVector( 0.5f * dimensions[0], //\n 0.5f * dimensions[1], //\n 0.5f * dimensions[2] );\n\n v.setScale( scaleVec );// TODO maybe dont do this\n // TODO: This translation should probably be accounted for in scenery; volumes use a corner-origin and\n // meshes use center-origin coordinate systems.\n v.setPosition( v.getPosition().plus( new GLVector( 0.5f * dimensions[0] - 0.5f, 0.5f * dimensions[1] - 0.5f,\n 0.5f * dimensions[2] - 0.5f ) ) );\n\n v.setTrangemin( minVal );\n v.setTrangemax( maxVal );\n v.setTransferFunction(TransferFunction.ramp(0.0f, 0.4f));\n\n try {\n setColormap( v, lutService.loadLUT( lutService.findLUTs().get( \"WCIF/ICA.lut\" ) ) );\n } catch( IOException e ) {\n e.printStackTrace();\n }\n\n\n setActiveNode( v );\n\n return v;\n }\n\n public > Node updateVolume( IterableInterval image, String name,\n float[] voxelDimensions, Volume v ) {\n log.debug( \"Update Volume\" );\n\n long dimensions[] = new long[3];\n image.dimensions( dimensions );\n\n @SuppressWarnings(\"unchecked\") Class voxelType = ( Class ) image.firstElement().getClass();\n int bytesPerVoxel = image.firstElement().getBitsPerPixel() / 8;\n NativeTypeEnum nType;\n\n if( voxelType == UnsignedByteType.class ) {\n nType = NativeTypeEnum.UnsignedByte;\n } else if( voxelType == UnsignedShortType.class ) {\n nType = NativeTypeEnum.UnsignedShort;\n } else if( voxelType == FloatType.class ) {\n nType = NativeTypeEnum.Float;\n } else {\n log.debug( \"Type: \" + voxelType +\n \" cannot be displayed as a volume. Convert to UnsignedByteType, UnsignedShortType, or FloatType.\" );\n return null;\n }\n\n // Make and populate a ByteBuffer with the content of the Dataset\n ByteBuffer byteBuffer = ByteBuffer.allocateDirect(\n ( int ) ( bytesPerVoxel * dimensions[0] * dimensions[1] * dimensions[2] ) );\n Cursor cursor = image.cursor();\n\n while( cursor.hasNext() ) {\n cursor.fwd();\n if( voxelType == UnsignedByteType.class ) {\n byteBuffer.put( ( byte ) ( ( ( UnsignedByteType ) cursor.get() ).get() ) );\n } else if( voxelType == UnsignedShortType.class ) {\n byteBuffer.putShort( ( short ) Math.abs( ( ( UnsignedShortType ) cursor.get() ).getShort() ) );\n } else if( voxelType == FloatType.class ) {\n byteBuffer.putFloat( ( ( FloatType ) cursor.get() ).get() );\n }\n }\n byteBuffer.flip();\n\n v.readFromBuffer( name, byteBuffer, dimensions[0], dimensions[1], dimensions[2], voxelDimensions[0],\n voxelDimensions[1], voxelDimensions[2], nType, bytesPerVoxel );\n\n v.setDirty( true );\n v.setNeedsUpdate( true );\n v.setNeedsUpdateWorld( true );\n\n return v;\n }\n\n private static GLVector vector( ColorRGB color ) {\n if( color instanceof ColorRGBA ) {\n return new GLVector( color.getRed() / 255f, //\n color.getGreen() / 255f, //\n color.getBlue() / 255f, //\n color.getAlpha() / 255f );\n }\n return new GLVector( color.getRed() / 255f, //\n color.getGreen() / 255f, //\n color.getBlue() / 255f );\n }\n\n public boolean getPushMode() {\n return getRenderer().getPushMode();\n }\n\n public boolean setPushMode( boolean push ) {\n getRenderer().setPushMode( push );\n return getRenderer().getPushMode();\n }\n\n public ArcballCameraControl getTargetArcball() {\n return targetArcball;\n }\n\n @Override\n protected void finalize() {\n stopAnimation();\n }\n\n private void updateFloorPosition() {\n // Lower the floor below the active node, as needed.\n final Node currentNode = getActiveNode();\n if( currentNode != null ) {\n final Node.OrientedBoundingBox bb = currentNode.generateBoundingBox();\n final Node.BoundingSphere bs = bb.getBoundingSphere();\n final float neededFloor = bb.getMin().y() - Math.max( bs.getRadius(), 1 );\n if( neededFloor < getFloory() ) setFloory( neededFloor );\n }\n\n floor.setPosition( new GLVector( 0f, getFloory(), 0f ) );\n }\n\n public Settings getScenerySettings() {\n return this.getSettings();\n }\n\n public Statistics getSceneryStats() {\n return this.getStats();\n }\n\n public Renderer getSceneryRenderer() {\n return this.getRenderer();\n }\n\n protected boolean vrActive = false;\n\n public void toggleVRRendering() {\n vrActive = !vrActive;\n Camera cam = getScene().getActiveObserver();\n if(!(cam instanceof DetachedHeadCamera)) {\n return;\n }\n\n TrackerInput ti = null;\n\n if (!getHub().has(SceneryElement.HMDInput)) {\n try {\n final OpenVRHMD hmd = new OpenVRHMD(false, true);\n getHub().add(SceneryElement.HMDInput, hmd);\n ti = hmd;\n\n // we need to force reloading the renderer as the HMD might require device or instance extensions\n if(getRenderer() instanceof VulkanRenderer) {\n replaceRenderer(getRenderer().getClass().getSimpleName(), true);\n Thread.sleep(1000);\n }\n } catch (Exception e) {\n getLogger().error(\"Could not add OpenVRHMD: \" + e.toString());\n }\n } else {\n ti = getHub().getWorkingHMD();\n }\n\n if(vrActive && ti != null) {\n ((DetachedHeadCamera) cam).setTracker(ti);\n } else {\n ((DetachedHeadCamera) cam).setTracker(null);\n }\n\n while(getRenderer().getInitialized() == false) {\n getLogger().info(\"Waiting for renderer\");\n try {\n Thread.sleep(200);\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n }\n getRenderer().toggleVR();\n }\n}\n"},"new_file":{"kind":"string","value":"src/main/java/sc/iview/SciView.java"},"old_contents":{"kind":"string","value":"/*-\n * #%L\n * Scenery-backed 3D visualization package for ImageJ.\n * %%\n * Copyright (C) 2016 - 2018 SciView developers.\n * %%\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE\n * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n * POSSIBILITY OF SUCH DAMAGE.\n * #L%\n */\npackage sc.iview;\n\nimport cleargl.GLTypeEnum;\nimport cleargl.GLVector;\nimport com.bulenkov.darcula.DarculaLaf;\nimport com.jogamp.opengl.math.Quaternion;\nimport com.sun.javafx.application.PlatformImpl;\nimport coremem.enums.NativeTypeEnum;\nimport graphics.scenery.Box;\nimport graphics.scenery.*;\nimport graphics.scenery.backends.Renderer;\nimport graphics.scenery.backends.vulkan.VulkanRenderer;\nimport graphics.scenery.controls.InputHandler;\nimport graphics.scenery.controls.OpenVRHMD;\nimport graphics.scenery.controls.TrackerInput;\nimport graphics.scenery.controls.behaviours.ArcballCameraControl;\nimport graphics.scenery.controls.behaviours.FPSCameraControl;\nimport graphics.scenery.controls.behaviours.MovementCommand;\nimport graphics.scenery.controls.behaviours.SelectCommand;\nimport graphics.scenery.utils.SceneryFXPanel;\nimport graphics.scenery.utils.SceneryJPanel;\nimport graphics.scenery.utils.SceneryPanel;\nimport graphics.scenery.utils.Statistics;\nimport graphics.scenery.volumes.TransferFunction;\nimport graphics.scenery.volumes.Volume;\nimport graphics.scenery.volumes.bdv.BDVVolume;\nimport javafx.animation.FadeTransition;\nimport javafx.animation.Interpolator;\nimport javafx.application.Platform;\nimport javafx.embed.swing.JFXPanel;\nimport javafx.geometry.Insets;\nimport javafx.geometry.*;\nimport javafx.scene.control.Label;\nimport javafx.scene.control.MenuBar;\nimport javafx.scene.control.MenuItem;\nimport javafx.scene.control.*;\nimport javafx.scene.image.Image;\nimport javafx.scene.image.ImageView;\nimport javafx.scene.input.MouseButton;\nimport javafx.scene.layout.*;\nimport javafx.scene.paint.Color;\nimport javafx.scene.paint.Paint;\nimport javafx.scene.text.TextAlignment;\nimport javafx.util.Duration;\nimport kotlin.Unit;\nimport kotlin.jvm.functions.Function1;\nimport net.imagej.Dataset;\nimport net.imagej.lut.LUTService;\nimport net.imagej.ops.OpService;\nimport net.imglib2.Cursor;\nimport net.imglib2.IterableInterval;\nimport net.imglib2.RealLocalizable;\nimport net.imglib2.RealPoint;\nimport net.imglib2.display.ColorTable;\nimport net.imglib2.type.numeric.RealType;\nimport net.imglib2.type.numeric.integer.UnsignedByteType;\nimport net.imglib2.type.numeric.integer.UnsignedShortType;\nimport net.imglib2.type.numeric.real.FloatType;\nimport net.imglib2.view.Views;\nimport org.scijava.Context;\nimport org.scijava.display.Display;\nimport org.scijava.display.DisplayService;\nimport org.scijava.event.EventService;\nimport org.scijava.io.IOService;\nimport org.scijava.log.LogService;\nimport org.scijava.menu.MenuService;\nimport org.scijava.plugin.Parameter;\nimport org.scijava.thread.ThreadService;\nimport org.scijava.ui.behaviour.ClickBehaviour;\nimport org.scijava.ui.behaviour.InputTrigger;\nimport org.scijava.ui.swing.menu.SwingJMenuBarCreator;\nimport org.scijava.util.ColorRGB;\nimport org.scijava.util.ColorRGBA;\nimport org.scijava.util.Colors;\nimport sc.iview.commands.view.NodePropertyEditor;\nimport sc.iview.controls.behaviours.CameraTranslateControl;\nimport sc.iview.controls.behaviours.NodeTranslateControl;\nimport sc.iview.event.NodeActivatedEvent;\nimport sc.iview.event.NodeAddedEvent;\nimport sc.iview.event.NodeRemovedEvent;\nimport sc.iview.javafx.JavaFXMenuCreator;\nimport sc.iview.process.MeshConverter;\nimport sc.iview.vector.ClearGLVector3;\nimport sc.iview.vector.Vector3;\nimport tpietzsch.example2.VolumeViewerOptions;\n\nimport javax.imageio.ImageIO;\nimport javax.swing.*;\nimport javax.swing.event.ChangeListener;\nimport javax.swing.plaf.basic.BasicLookAndFeel;\nimport java.awt.*;\nimport java.awt.image.BufferedImage;\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.nio.FloatBuffer;\nimport java.util.List;\nimport java.util.Queue;\nimport java.util.*;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.Future;\nimport java.util.function.Predicate;\nimport java.util.function.Supplier;\n\npublic class SciView extends SceneryBase {\n\n public static final ColorRGB DEFAULT_COLOR = Colors.LIGHTGRAY;\n\n @Parameter\n private LogService log;\n\n @Parameter\n private MenuService menus;\n\n @Parameter\n private IOService io;\n\n @Parameter\n private OpService ops;\n\n @Parameter\n private EventService eventService;\n\n @Parameter\n private DisplayService displayService;\n\n @Parameter\n private LUTService lutService;\n\n @Parameter\n private ThreadService threadService;\n\n /**\n * Queue keeps track of the currently running animations\n **/\n private Queue animations;\n\n /**\n * Animation pause tracking\n **/\n private boolean animating;\n\n /**\n * This tracks the actively selected Node in the scene\n */\n private Node activeNode = null;\n\n /**\n * Mouse controls for FPS movement and Arcball rotation\n */\n protected ArcballCameraControl targetArcball;\n protected FPSCameraControl fpsControl;\n\n /**\n * The primary camera/observer in the scene\n */\n Camera camera = null;\n\n /**\n * JavaFX UI\n */\n private boolean useJavaFX = false;\n\n /**\n * Speeds for input controls\n */\n private float fpsScrollSpeed = 3.0f;\n\n private float mouseSpeedMult = 0.25f;\n\n private Display scijavaDisplay;\n\n /**\n * The floor that orients the user in the scene\n */\n protected Node floor;\n\n private Label statusLabel;\n private Label loadingLabel;\n private JLabel splashLabel;\n private SceneryJPanel panel;\n private StackPane stackPane;\n private MenuBar menuBar;\n private JSplitPane mainSplitPane;\n private final SceneryPanel[] sceneryPanel = { null };\n private JSplitPane inspector;\n private NodePropertyEditor nodePropertyEditor;\n\n public SciView( Context context ) {\n super( \"SciView\", 1280, 720, false, context );\n context.inject( this );\n }\n\n public SciView( String applicationName, int windowWidth, int windowHeight ) {\n super( applicationName, windowWidth, windowHeight, false );\n }\n\n public InputHandler publicGetInputHandler() {\n return getInputHandler();\n }\n\n public class TransparentSlider extends JSlider {\n\n public TransparentSlider() {\n // Important, we taking over the filling of the\n // component...\n setOpaque(false);\n setBackground(java.awt.Color.DARK_GRAY);\n setForeground(java.awt.Color.LIGHT_GRAY);\n }\n\n @Override\n protected void paintComponent(Graphics g) {\n Graphics2D g2d = (Graphics2D) g.create();\n g2d.setColor(getBackground());\n g2d.setComposite(AlphaComposite.SrcOver.derive(0.9f));\n g2d.fillRect(0, 0, getWidth(), getHeight());\n g2d.dispose();\n\n super.paintComponent(g);\n }\n\n }\n\n @SuppressWarnings(\"restriction\") @Override public void init() {\n if(Boolean.parseBoolean(System.getProperty(\"sciview.useDarcula\", \"false\"))) {\n try {\n BasicLookAndFeel darcula = new DarculaLaf();\n UIManager.setLookAndFeel(darcula);\n } catch (Exception e) {\n System.err.println(\"Could not load Darcula Look and Feel\");\n }\n }\n\n int x, y;\n\n try {\n Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();\n\n x = screenSize.width/2 - getWindowWidth()/2;\n y = screenSize.height/2 - getWindowHeight()/2;\n } catch(HeadlessException e) {\n x = 10;\n y = 10;\n }\n\n JFrame frame = new JFrame(\"SciView\");\n frame.setLayout(new BorderLayout(0, 0));\n frame.setSize(getWindowWidth(), getWindowHeight());\n frame.setLocation(x, y);\n frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);\n nodePropertyEditor = new NodePropertyEditor( this );\n\n if( useJavaFX ) {\n final JFXPanel fxPanel = new JFXPanel();\n frame.add(fxPanel);\n\n CountDownLatch latch = new CountDownLatch( 1 );\n\n PlatformImpl.startup( () -> {\n } );\n\n Platform.runLater( () -> {\n stackPane = new StackPane();\n stackPane.setBackground(\n new Background( new BackgroundFill( Color.TRANSPARENT, CornerRadii.EMPTY, Insets.EMPTY ) ) );\n\n GridPane pane = new GridPane();\n statusLabel = new Label( \"SciView - press U for usage help\" );\n statusLabel.setVisible(false);\n\n final SceneryFXPanel panel = new SceneryFXPanel(100, 100);\n\n Image loadingImage = new Image(this.getClass().getResourceAsStream(\"sciview-logo.png\"), 600, 200, true, true);\n ImageView loadingImageView = new ImageView(loadingImage);\n loadingLabel = new Label(\"SciView is starting.\");\n loadingLabel.setStyle(\n \"-fx-background-color: rgb(50,48,47);\" +\n \"-fx-opacity: 1.0;\" +\n \"-fx-font-weight: 400; \" +\n \"-fx-font-size: 2.2em; \" +\n \"-fx-text-fill: white;\");\n loadingLabel.setTextFill(Paint.valueOf(\"white\"));\n loadingLabel.setGraphic(loadingImageView);\n loadingLabel.setGraphicTextGap(40.0);\n loadingLabel.setContentDisplay(ContentDisplay.TOP);\n loadingLabel.prefHeightProperty().bind(pane.heightProperty());\n loadingLabel.prefWidthProperty().bind(pane.widthProperty());\n loadingLabel.setAlignment(Pos.CENTER);\n\n GridPane.setHgrow( panel, Priority.ALWAYS );\n GridPane.setVgrow( panel, Priority.ALWAYS );\n\n GridPane.setFillHeight( panel, true );\n GridPane.setFillWidth( panel, true );\n\n GridPane.setHgrow( statusLabel, Priority.ALWAYS );\n GridPane.setHalignment( statusLabel, HPos.CENTER );\n GridPane.setValignment( statusLabel, VPos.BOTTOM );\n\n statusLabel.maxWidthProperty().bind( pane.widthProperty() );\n\n pane.setStyle( \"-fx-background-color: rgb(50,48,47);\" +\n \"-fx-font-family: Helvetica Neue, Helvetica, Segoe, Proxima Nova, Arial, sans-serif;\" +\n \"-fx-font-weight: 400;\" + \"-fx-font-size: 1.2em;\" + \"-fx-text-fill: white;\" +\n \"-fx-text-alignment: center;\" );\n\n statusLabel.setStyle( \"-fx-padding: 0.2em;\" + \"-fx-text-fill: white;\" );\n\n statusLabel.setTextAlignment( TextAlignment.CENTER );\n\n menuBar = new MenuBar();\n pane.add( menuBar, 1, 1 );\n pane.add( panel, 1, 2 );\n pane.add( statusLabel, 1, 3 );\n stackPane.getChildren().addAll(pane, loadingLabel);\n\n final ContextMenu contextMenu = new ContextMenu();\n final MenuItem title = new MenuItem(\"Node\");\n final MenuItem position = new MenuItem(\"Position\");\n title.setDisable(true);\n position.setDisable(true);\n contextMenu.getItems().addAll(title, position);\n\n panel.setOnContextMenuRequested(event -> {\n final Point2D localPosition = panel.sceneToLocal(event.getSceneX(), event.getSceneY());\n final List matches = camera.getNodesForScreenSpacePosition((int)localPosition.getX(), (int)localPosition.getY());\n if(matches.size() > 0) {\n final Node firstMatch = matches.get(0).getNode();\n title.setText(\"Node: \" + firstMatch.getName() + \" (\" + firstMatch.getClass().getSimpleName() + \")\");\n position.setText(firstMatch.getPosition().toString());\n } else {\n title.setText(\"(no matches)\");\n position.setText(\"\");\n }\n contextMenu.show(panel, event.getScreenX(), event.getScreenY());\n });\n\n panel.setOnMouseClicked(event -> {\n if(event.getButton() == MouseButton.PRIMARY) {\n contextMenu.hide();\n }\n });\n\n sceneryPanel[0] = panel;\n\n javafx.scene.Scene scene = new javafx.scene.Scene( stackPane );\n fxPanel.setScene(scene);\n// scene.addEventHandler(MouseEvent.ANY, event -> getLogger().info(\"Mouse event: \" + event.toString()));\n// sceneryPanel[0].addEventHandler(MouseEvent.ANY, event -> getLogger().info(\"PANEL Mouse event: \" + event.toString()));\n\n frame.setVisible(true);\n// stage.setScene( scene );\n// stage.setOnCloseRequest( event -> {\n// getDisplay().close();\n// this.close();\n// } );\n// stage.focusedProperty().addListener( ( ov, t, t1 ) -> {\n// if( t1 )// If you just gained focus\n// displayService.setActiveDisplay( getDisplay() );\n// } );\n\n new JavaFXMenuCreator().createMenus( menus.getMenu( \"SciView\" ), menuBar );\n\n// stage.show();\n\n latch.countDown();\n } );\n\n try {\n latch.await();\n } catch( InterruptedException e1 ) {\n e1.printStackTrace();\n }\n\n // window width and window height get ignored by the renderer if it is embedded.\n // dimensions are determined from the SceneryFXPanel, then.\n setRenderer( Renderer.createRenderer( getHub(), getApplicationName(), getScene(),\n getWindowWidth(), getWindowHeight(),\n sceneryPanel[0] ) );\n } else {\n final JPanel p = new JPanel(new BorderLayout(0, 0));\n panel = new SceneryJPanel();\n JPopupMenu.setDefaultLightWeightPopupEnabled(false);\n final JMenuBar swingMenuBar = new JMenuBar();\n new SwingJMenuBarCreator().createMenus(menus.getMenu(\"SciView\"), swingMenuBar);\n frame.setJMenuBar(swingMenuBar);\n\n BufferedImage splashImage;\n try {\n splashImage = ImageIO.read(this.getClass().getResourceAsStream(\"sciview-logo.png\"));\n } catch (IOException e) {\n getLogger().warn(\"Could not read splash image 'sciview-logo.png'\");\n splashImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB);\n }\n splashLabel = new JLabel(new ImageIcon(splashImage.getScaledInstance(500, 200, java.awt.Image.SCALE_SMOOTH)));\n splashLabel.setBackground(new java.awt.Color(50, 48, 47));\n splashLabel.setOpaque(true);\n\n p.setLayout(new OverlayLayout(p));\n p.setBackground(new java.awt.Color(50, 48, 47));\n p.add(panel, BorderLayout.CENTER);\n panel.setVisible(true);\n\n nodePropertyEditor.getComponent(); // Initialize node property panel\n\n JTree inspectorTree = nodePropertyEditor.getTree();\n JPanel inspectorProperties = nodePropertyEditor.getProps();\n\n inspector = new JSplitPane(JSplitPane.VERTICAL_SPLIT, //\n new JScrollPane( inspectorTree ),\n new JScrollPane( inspectorProperties ));\n inspector.setDividerLocation( getWindowHeight() / 3 );\n inspector.setContinuousLayout(true);\n inspector.setBorder(BorderFactory.createEmptyBorder());\n\n mainSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, //\n p,\n inspector\n );\n mainSplitPane.setDividerLocation( getWindowWidth()/3 * 2 );\n mainSplitPane.setBorder(BorderFactory.createEmptyBorder());\n\n frame.add(mainSplitPane, BorderLayout.CENTER);\n\n frame.setGlassPane(splashLabel);\n frame.getGlassPane().setVisible(true);\n// frame.getGlassPane().setBackground(new java.awt.Color(50, 48, 47, 255));\n frame.setVisible(true);\n\n sceneryPanel[0] = panel;\n\n setRenderer( Renderer.createRenderer( getHub(), getApplicationName(), getScene(),\n getWindowWidth(), getWindowHeight(),\n sceneryPanel[0]) );\n }\n\n // Enable push rendering by default\n getRenderer().setPushMode( true );\n\n getHub().add( SceneryElement.Renderer, getRenderer() );\n\n GLVector[] tetrahedron = new GLVector[4];\n tetrahedron[0] = new GLVector( 1.0f, 0f, -1.0f/(float)Math.sqrt(2.0f) );\n tetrahedron[1] = new GLVector( -1.0f,0f,-1.0f/(float)Math.sqrt(2.0) );\n tetrahedron[2] = new GLVector( 0.0f,1.0f,1.0f/(float)Math.sqrt(2.0) );\n tetrahedron[3] = new GLVector( 0.0f,-1.0f,1.0f/(float)Math.sqrt(2.0) );\n\n PointLight[] lights = new PointLight[4];\n\n for( int i = 0; i < lights.length; i++ ) {\n lights[i] = new PointLight( 150.0f );\n lights[i].setPosition( tetrahedron[i].times(25.0f) );\n lights[i].setEmissionColor( new GLVector( 1.0f, 1.0f, 1.0f ) );\n lights[i].setIntensity( 100.0f );\n getScene().addChild( lights[i] );\n }\n\n Camera cam = new DetachedHeadCamera();\n cam.setPosition( new GLVector( 0.0f, 5.0f, 5.0f ) );\n cam.perspectiveCamera( 50.0f, getWindowWidth(), getWindowHeight(), 0.1f, 1000.0f );\n //cam.setTarget( new GLVector( 0, 0, 0 ) );\n //cam.setTargeted( true );\n cam.setActive( true );\n getScene().addChild( cam );\n this.camera = cam;\n\n floor = new Box( new GLVector( 500f, 0.2f, 500f ) );\n floor.setName( \"Floor\" );\n floor.setPosition( new GLVector( 0f, -1f, 0f ) );\n floor.getMaterial().setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n getScene().addChild( floor );\n\n animations = new LinkedList<>();\n\n if(useJavaFX) {\n Platform.runLater(() -> {\n while (!getRenderer().getFirstImageReady()) {\n try {\n Thread.sleep(100);\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n }\n\n // fade out loading screen, show status bar\n FadeTransition ft = new FadeTransition(Duration.millis(500), loadingLabel);\n ft.setFromValue(1.0);\n ft.setToValue(0.0);\n ft.setCycleCount(1);\n ft.setInterpolator(Interpolator.EASE_OUT);\n ft.setOnFinished(event -> {\n loadingLabel.setVisible(false);\n statusLabel.setVisible(true);\n });\n\n ft.play();\n });\n } else {\n SwingUtilities.invokeLater(() -> {\n try {\n while (!getSceneryRenderer().getFirstImageReady()) {\n getLogger().info(\"Waiting for renderer\");\n Thread.sleep(100);\n }\n\n Thread.sleep(200);\n } catch (InterruptedException e) {\n }\n\n nodePropertyEditor.rebuildTree();\n frame.getGlassPane().setVisible(false);\n getLogger().info(\"Done initializing SciView\");\n });\n }\n\n }\n\n public void setStatusText(String text) {\n statusLabel.setText(text);\n }\n\n public void setFloor( Node n ) {\n floor = n;\n }\n\n public Node getFloor() {\n return floor;\n }\n\n private float getFloory() {\n return floor.getPosition().y();\n }\n\n private void setFloory( float new_pos ) {\n float temp_pos = 0f;\n temp_pos = new_pos;\n if( temp_pos < -100f ) temp_pos = -100f;\n else if( new_pos > 5f ) temp_pos = 5f;\n floor.getPosition().set( 1, temp_pos );\n }\n\n public boolean isInitialized() {\n return sceneInitialized();\n }\n\n public Camera getCamera() {\n return camera;\n }\n\n public void setDisplay( Display display ) {\n scijavaDisplay = display;\n }\n\n public Display getDisplay() {\n return scijavaDisplay;\n }\n\n public void centerOnNode( Node currentNode ) {\n if( currentNode == null ) return;\n\n Node.OrientedBoundingBox bb = currentNode.generateBoundingBox();\n\n getCamera().setTarget( currentNode.getPosition() );\n getCamera().setTargeted( true );\n\n // Set forward direction to point from camera at active node\n getCamera().setForward( bb.getBoundingSphere().getOrigin().minus( getCamera().getPosition() ).normalize().times( -1 ) );\n\n float distance = (float) (bb.getBoundingSphere().getRadius() / Math.tan( getCamera().getFov() / 360 * java.lang.Math.PI ));\n\n // Solve for the proper rotation\n Quaternion rotation = new Quaternion().setLookAt( getCamera().getForward().toFloatArray(),\n new GLVector(0,1,0).toFloatArray(),\n new GLVector(1,0,0).toFloatArray(),\n new GLVector( 0,1,0).toFloatArray(),\n new GLVector( 0, 0, 1).toFloatArray() );\n\n getCamera().setRotation( rotation.normalize() );\n getCamera().setPosition( bb.getBoundingSphere().getOrigin().plus( getCamera().getForward().times( distance * -1 ) ) );\n\n getCamera().setDirty(true);\n getCamera().setNeedsUpdate(true);\n }\n\n public void setFPSSpeed( float newspeed ) {\n if( newspeed < 0.30f ) newspeed = 0.3f;\n else if( newspeed > 30.0f ) newspeed = 30.0f;\n fpsScrollSpeed = newspeed;\n log.debug( \"FPS scroll speed: \" + fpsScrollSpeed );\n }\n\n public float getFPSSpeed() {\n return fpsScrollSpeed;\n }\n\n public void setMouseSpeed( float newspeed ) {\n if( newspeed < 0.30f ) newspeed = 0.3f;\n else if( newspeed > 3.0f ) newspeed = 3.0f;\n mouseSpeedMult = newspeed;\n log.debug( \"Mouse speed: \" + mouseSpeedMult );\n }\n\n public float getMouseSpeed() {\n return mouseSpeedMult;\n }\n\n public void resetFPSInputs() {\n getInputHandler().addBehaviour( \"move_forward_scroll\",\n new MovementCommand( \"move_forward\", \"forward\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_forward\",\n new MovementCommand( \"move_forward\", \"forward\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_back\",\n new MovementCommand( \"move_back\", \"back\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_left\",\n new MovementCommand( \"move_left\", \"left\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_right\",\n new MovementCommand( \"move_right\", \"right\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_up\",\n new MovementCommand( \"move_up\", \"up\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n getInputHandler().addBehaviour( \"move_down\",\n new MovementCommand( \"move_down\", \"down\", () -> getScene().findObserver(),\n getFPSSpeed() ) );\n }\n\n class enableIncrease implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n setFPSSpeed( getFPSSpeed() + 0.5f );\n setMouseSpeed( getMouseSpeed() + 0.05f );\n\n log.debug( \"Increasing FPS scroll Speed\" );\n\n resetFPSInputs();\n }\n }\n\n class enableDecrease implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n setFPSSpeed( getFPSSpeed() - 0.1f );\n setMouseSpeed( getMouseSpeed() - 0.05f );\n\n log.debug( \"Decreasing FPS scroll Speed\" );\n\n resetFPSInputs();\n }\n }\n\n class showHelpDisplay implements ClickBehaviour {\n\n @Override public void click( int x, int y ) {\n String helpString = \"SciView help:\\n\\n\";\n for( InputTrigger trigger : getInputHandler().getAllBindings().keySet() ) {\n helpString += trigger + \"\\t-\\t\" + getInputHandler().getAllBindings().get( trigger ) + \"\\n\";\n }\n // HACK: Make the console pop via stderr.\n // Later, we will use a nicer dialog box or some such.\n log.warn( helpString );\n }\n }\n\n @Override public void inputSetup() {\n Function1, Unit> selectAction = nearest -> {\n if( !nearest.isEmpty() ) {\n setActiveNode( nearest.get( 0 ).getNode() );\n log.debug( \"Selected node: \" + getActiveNode().getName() );\n }\n return Unit.INSTANCE;\n };\n\n List> ignoredObjects = new ArrayList<>();\n ignoredObjects.add( BoundingGrid.class );\n\n getInputHandler().useDefaultBindings( \"\" );\n\n // Mouse controls\n getInputHandler().addBehaviour( \"object_selection_mode\",\n new SelectCommand( \"objectSelector\", getRenderer(), getScene(),\n () -> getScene().findObserver(), false, ignoredObjects,\n selectAction ) );\n getInputHandler().addKeyBinding( \"object_selection_mode\", \"double-click button1\" );\n\n enableArcBallControl();\n enableFPSControl();\n\n getInputHandler().addBehaviour( \"mouse_control_nodetranslate\", new NodeTranslateControl( this, 0.002f ) );\n getInputHandler().addKeyBinding( \"mouse_control_nodetranslate\", \"shift button2\" );\n\n // Extra keyboard controls\n getInputHandler().addBehaviour( \"show_help\", new showHelpDisplay() );\n getInputHandler().addKeyBinding( \"show_help\", \"U\" );\n\n getInputHandler().addBehaviour( \"enable_decrease\", new enableDecrease() );\n getInputHandler().addKeyBinding( \"enable_decrease\", \"M\" );\n\n getInputHandler().addBehaviour( \"enable_increase\", new enableIncrease() );\n getInputHandler().addKeyBinding( \"enable_increase\", \"N\" );\n }\n\n private void enableArcBallControl() {\n GLVector target;\n if( getActiveNode() == null ) {\n target = new GLVector( 0, 0, 0 );\n } else {\n target = getActiveNode().getPosition();\n }\n\n float mouseSpeed = 0.25f;\n mouseSpeed = getMouseSpeed();\n\n Supplier cameraSupplier = () -> getScene().findObserver();\n targetArcball = new ArcballCameraControl( \"mouse_control_arcball\", cameraSupplier,\n getRenderer().getWindow().getWidth(),\n getRenderer().getWindow().getHeight(), target );\n targetArcball.setMaximumDistance( Float.MAX_VALUE );\n targetArcball.setMouseSpeedMultiplier( mouseSpeed );\n targetArcball.setScrollSpeedMultiplier( 0.05f );\n targetArcball.setDistance( getCamera().getPosition().minus( target ).magnitude() );\n\n getInputHandler().addBehaviour( \"mouse_control_arcball\", targetArcball );\n getInputHandler().addKeyBinding( \"mouse_control_arcball\", \"shift button1\" );\n getInputHandler().addBehaviour( \"scroll_arcball\", targetArcball );\n getInputHandler().addKeyBinding( \"scroll_arcball\", \"shift scroll\" );\n }\n\n private void enableFPSControl() {\n Supplier cameraSupplier = () -> getScene().findObserver();\n fpsControl = new FPSCameraControl( \"mouse_control\", cameraSupplier, getRenderer().getWindow().getWidth(),\n getRenderer().getWindow().getHeight() );\n\n getInputHandler().addBehaviour( \"mouse_control\", fpsControl );\n getInputHandler().addKeyBinding( \"mouse_control\", \"button1\" );\n\n getInputHandler().addBehaviour( \"mouse_control_cameratranslate\", new CameraTranslateControl( this, 0.002f ) );\n getInputHandler().addKeyBinding( \"mouse_control_cameratranslate\", \"button2\" );\n\n resetFPSInputs();\n\n getInputHandler().addKeyBinding( \"move_forward_scroll\", \"scroll\" );\n }\n\n public Node addBox() {\n return addBox( new ClearGLVector3( 0.0f, 0.0f, 0.0f ) );\n }\n\n public Node addBox( Vector3 position ) {\n return addBox( position, new ClearGLVector3( 1.0f, 1.0f, 1.0f ) );\n }\n\n public Node addBox( Vector3 position, Vector3 size ) {\n return addBox( position, size, DEFAULT_COLOR, false );\n }\n\n public Node addBox( final Vector3 position, final Vector3 size, final ColorRGB color,\n final boolean inside ) {\n // TODO: use a material from the current palate by default\n final Material boxmaterial = new Material();\n boxmaterial.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n boxmaterial.setDiffuse( vector( color ) );\n boxmaterial.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Box box = new Box( ClearGLVector3.convert( size ), inside );\n box.setMaterial( boxmaterial );\n box.setPosition( ClearGLVector3.convert( position ) );\n\n return addNode( box );\n }\n\n public Node addSphere() {\n return addSphere( new ClearGLVector3( 0.0f, 0.0f, 0.0f ), 1 );\n }\n\n public Node addSphere( Vector3 position, float radius ) {\n return addSphere( position, radius, DEFAULT_COLOR );\n }\n\n public Node addSphere( final Vector3 position, final float radius, final ColorRGB color ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( vector( color ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Sphere sphere = new Sphere( radius, 20 );\n sphere.setMaterial( material );\n sphere.setPosition( ClearGLVector3.convert( position ) );\n\n return addNode( sphere );\n }\n\n public Node addLine() {\n return addLine( new ClearGLVector3( 0.0f, 0.0f, 0.0f ), new ClearGLVector3( 0.0f, 0.0f, 0.0f ) );\n }\n\n public Node addLine( Vector3 start, Vector3 stop ) {\n return addLine( start, stop, DEFAULT_COLOR );\n }\n\n public Node addLine( Vector3 start, Vector3 stop, ColorRGB color ) {\n return addLine( new Vector3[] { start, stop }, color, 0.1f );\n }\n\n public Node addLine( final Vector3[] points, final ColorRGB color, final double edgeWidth ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setDiffuse( vector( color ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final Line line = new Line( points.length );\n for( final Vector3 pt : points ) {\n line.addPoint( ClearGLVector3.convert( pt ) );\n }\n\n line.setEdgeWidth( ( float ) edgeWidth );\n\n line.setMaterial( material );\n line.setPosition( ClearGLVector3.convert( points[0] ) );\n\n return addNode( line );\n }\n\n public Node addPointLight() {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( new GLVector( 0.0f, 1.0f, 0.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n final PointLight light = new PointLight( 5.0f );\n light.setMaterial( material );\n light.setPosition( new GLVector( 0.0f, 0.0f, 0.0f ) );\n\n return addNode( light );\n }\n\n public void writeSCMesh( String filename, Mesh scMesh ) {\n File f = new File( filename );\n BufferedOutputStream out;\n try {\n out = new BufferedOutputStream( new FileOutputStream( f ) );\n out.write( \"solid STL generated by FIJI\\n\".getBytes() );\n\n FloatBuffer normalsFB = scMesh.getNormals();\n FloatBuffer verticesFB = scMesh.getVertices();\n\n while( verticesFB.hasRemaining() && normalsFB.hasRemaining() ) {\n out.write( ( \"facet normal \" + normalsFB.get() + \" \" + normalsFB.get() + \" \" + normalsFB.get() +\n \"\\n\" ).getBytes() );\n out.write( \"outer loop\\n\".getBytes() );\n for( int v = 0; v < 3; v++ ) {\n out.write( ( \"vertex\\t\" + verticesFB.get() + \" \" + verticesFB.get() + \" \" + verticesFB.get() +\n \"\\n\" ).getBytes() );\n }\n out.write( \"endloop\\n\".getBytes() );\n out.write( \"endfacet\\n\".getBytes() );\n }\n out.write( \"endsolid vcg\\n\".getBytes() );\n out.close();\n } catch( FileNotFoundException e ) {\n e.printStackTrace();\n } catch( IOException e ) {\n e.printStackTrace();\n }\n\n }\n\n public float getDefaultPointSize() {\n return 0.025f;\n }\n\n public float[] makeNormalsFromVertices( ArrayList verts ) {\n float[] normals = new float[verts.size()];// div3 * 3coords\n\n for( int k = 0; k < verts.size(); k += 3 ) {\n GLVector v1 = new GLVector( verts.get( k ).getFloatPosition( 0 ), //\n verts.get( k ).getFloatPosition( 1 ), //\n verts.get( k ).getFloatPosition( 2 ) );\n GLVector v2 = new GLVector( verts.get( k + 1 ).getFloatPosition( 0 ),\n verts.get( k + 1 ).getFloatPosition( 1 ),\n verts.get( k + 1 ).getFloatPosition( 2 ) );\n GLVector v3 = new GLVector( verts.get( k + 2 ).getFloatPosition( 0 ),\n verts.get( k + 2 ).getFloatPosition( 1 ),\n verts.get( k + 2 ).getFloatPosition( 2 ) );\n GLVector a = v2.minus( v1 );\n GLVector b = v3.minus( v1 );\n GLVector n = a.cross( b ).getNormalized();\n normals[k / 3] = n.get( 0 );\n normals[k / 3 + 1] = n.get( 1 );\n normals[k / 3 + 2] = n.get( 2 );\n }\n return normals;\n }\n\n public void open( final String source ) throws IOException {\n if(source.endsWith(\".xml\")) {\n addBDVVolume(source);\n return;\n }\n\n final Object data = io.open( source );\n if( data instanceof net.imagej.mesh.Mesh ) addMesh( ( net.imagej.mesh.Mesh ) data );\n else if( data instanceof graphics.scenery.Mesh ) addMesh( ( graphics.scenery.Mesh ) data );\n else if( data instanceof graphics.scenery.PointCloud ) addPointCloud( ( graphics.scenery.PointCloud ) data );\n else if( data instanceof Dataset ) addVolume( ( Dataset ) data );\n else if( data instanceof IterableInterval ) addVolume( ( ( IterableInterval ) data ), source );\n else if( data instanceof List ) {\n final List list = ( List ) data;\n if( list.isEmpty() ) {\n throw new IllegalArgumentException( \"Data source '\" + source + \"' appears empty.\" );\n }\n final Object element = list.get( 0 );\n if( element instanceof RealLocalizable ) {\n // NB: For now, we assume all elements will be RealLocalizable.\n // Highly likely to be the case, barring antagonistic importers.\n @SuppressWarnings(\"unchecked\") final List points = ( List ) list;\n addPointCloud( points, source );\n } else {\n final String type = element == null ? \"\" : element.getClass().getName();\n throw new IllegalArgumentException( \"Data source '\" + source + //\n \"' contains elements of unknown type '\" + type + \"'\" );\n }\n } else {\n final String type = data == null ? \"\" : data.getClass().getName();\n throw new IllegalArgumentException( \"Data source '\" + source + //\n \"' contains data of unknown type '\" + type + \"'\" );\n }\n }\n\n public Node addPointCloud( Collection points ) {\n return addPointCloud( points, \"PointCloud\" );\n }\n\n public Node addPointCloud( final Collection points,\n final String name ) {\n final float[] flatVerts = new float[points.size() * 3];\n int k = 0;\n for( final RealLocalizable point : points ) {\n flatVerts[k * 3] = point.getFloatPosition( 0 );\n flatVerts[k * 3 + 1] = point.getFloatPosition( 1 );\n flatVerts[k * 3 + 2] = point.getFloatPosition( 2 );\n k++;\n }\n\n final PointCloud pointCloud = new PointCloud( getDefaultPointSize(), name );\n final Material material = new Material();\n final FloatBuffer vBuffer = ByteBuffer.allocateDirect( flatVerts.length * 4 ) //\n .order( ByteOrder.nativeOrder() ).asFloatBuffer();\n final FloatBuffer nBuffer = ByteBuffer.allocateDirect( 0 ) //\n .order( ByteOrder.nativeOrder() ).asFloatBuffer();\n\n vBuffer.put( flatVerts );\n vBuffer.flip();\n\n pointCloud.setVertices( vBuffer );\n pointCloud.setNormals( nBuffer );\n pointCloud.setIndices( ByteBuffer.allocateDirect( 0 ) //\n .order( ByteOrder.nativeOrder() ).asIntBuffer() );\n pointCloud.setupPointCloud();\n material.setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.setMaterial( material );\n pointCloud.setPosition( new GLVector( 0f, 0f, 0f ) );\n\n return addNode( pointCloud );\n }\n\n public Node addPointCloud( final PointCloud pointCloud ) {\n pointCloud.setupPointCloud();\n pointCloud.getMaterial().setAmbient( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.getMaterial().setDiffuse( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.getMaterial().setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n pointCloud.setPosition( new GLVector( 0f, 0f, 0f ) );\n\n return addNode( pointCloud );\n }\n\n public Node addNode( final Node n ) {\n getScene().addChild( n );\n setActiveNode( n );\n updateFloorPosition();\n eventService.publish( new NodeAddedEvent( n ) );\n return n;\n }\n\n public Node addMesh( final Mesh scMesh ) {\n final Material material = new Material();\n material.setAmbient( new GLVector( 1.0f, 0.0f, 0.0f ) );\n material.setDiffuse( new GLVector( 0.0f, 1.0f, 0.0f ) );\n material.setSpecular( new GLVector( 1.0f, 1.0f, 1.0f ) );\n\n scMesh.setMaterial( material );\n scMesh.setPosition( new GLVector( 0.0f, 0.0f, 0.0f ) );\n\n return addNode( scMesh );\n }\n\n public Node addMesh( net.imagej.mesh.Mesh mesh ) {\n Mesh scMesh = MeshConverter.toScenery( mesh );\n\n return addMesh( scMesh );\n }\n\n public void removeMesh( Mesh scMesh ) {\n getScene().removeChild( scMesh );\n }\n\n public Node getActiveNode() {\n return activeNode;\n }\n\n public Node setActiveNode( Node n ) {\n if( activeNode == n ) return activeNode;\n activeNode = n;\n targetArcball.setTarget( n == null ? () -> new GLVector( 0, 0, 0 ) : n::getPosition);\n eventService.publish( new NodeActivatedEvent( activeNode ) );\n nodePropertyEditor.rebuildTree();\n getScene().getOnNodePropertiesChanged().put(\"updateInspector\",\n node -> { if(node == activeNode) {\n nodePropertyEditor.updateProperties(activeNode);\n }\n return null;\n });\n return activeNode;\n }\n\n public void toggleInspectorWindow()\n {\n boolean currentlyVisible = inspector.isVisible();\n if(currentlyVisible) {\n inspector.setVisible(false);\n mainSplitPane.setDividerLocation(getWindowWidth());\n }\n else {\n inspector.setVisible(true);\n mainSplitPane.setDividerLocation(getWindowWidth()/4 * 3);\n }\n\n }\n\n public synchronized void animate( int fps, Runnable action ) {\n // TODO: Make animation speed less laggy and more accurate.\n final int delay = 1000 / fps;\n animations.add( threadService.run( () -> {\n while( animating ) {\n action.run();\n try {\n Thread.sleep( delay );\n } catch( InterruptedException e ) {\n break;\n }\n }\n } ) );\n animating = true;\n }\n\n public synchronized void stopAnimation() {\n animating = false;\n while( !animations.isEmpty() ) {\n animations.peek().cancel( true );\n animations.remove();\n }\n }\n\n public void takeScreenshot() {\n getRenderer().screenshot();\n }\n\n public void takeScreenshot( String path ) {\n getRenderer().screenshot( path, false );\n }\n\n public Node[] getSceneNodes() {\n return getSceneNodes( n -> !( n instanceof Camera ) && !( n instanceof PointLight ) );\n }\n\n public Node[] getSceneNodes( Predicate filter ) {\n return getScene().getChildren().stream().filter( filter ).toArray( Node[]::new );\n }\n\n public Node[] getAllSceneNodes() {\n return getSceneNodes( n -> true );\n }\n\n public void deleteActiveNode() {\n deleteNode( getActiveNode() );\n }\n\n public void deleteNode( Node node ) {\n node.getParent().removeChild( node );\n eventService.publish( new NodeRemovedEvent( node ) );\n if( activeNode == node ) setActiveNode( null );\n }\n\n public void dispose() {\n this.close();\n }\n\n public void moveCamera( float[] position ) {\n getCamera().setPosition( new GLVector( position[0], position[1], position[2] ) );\n }\n\n public void moveCamera( double[] position ) {\n getCamera().setPosition( new GLVector( ( float ) position[0], ( float ) position[1], ( float ) position[2] ) );\n }\n\n public String getName() {\n return getApplicationName();\n }\n\n public void addChild( Node node ) {\n getScene().addChild( node );\n }\n\n public Node addVolume( Dataset image ) {\n float[] voxelDims = new float[image.numDimensions()];\n for( int d = 0; d < voxelDims.length; d++ ) {\n voxelDims[d] = ( float ) image.axis( d ).averageScale( 0, 1 );\n }\n return addVolume( image, voxelDims );\n }\n\n public Node addBDVVolume( String source ) {\n final VolumeViewerOptions opts = new VolumeViewerOptions();\n opts.maxCacheSizeInMB(Integer.parseInt(System.getProperty(\"scenery.BDVVolume.maxCacheSize\", \"512\")));\n final BDVVolume v = new BDVVolume(source, opts);\n v.setScale(new GLVector(0.01f, 0.01f, 0.01f));\n\n getScene().addChild(v);\n setActiveNode(v);\n v.goToTimePoint(0);\n\n return v;\n }\n\n @SuppressWarnings({ \"rawtypes\", \"unchecked\" }) public Node addVolume( Dataset image,\n float[] voxelDimensions ) {\n return addVolume( ( IterableInterval ) Views.flatIterable( image.getImgPlus() ), image.getName(),\n voxelDimensions );\n }\n\n public > Node addVolume( IterableInterval image ) {\n return addVolume( image, \"Volume\" );\n }\n\n public > Node addVolume( IterableInterval image, String name ) {\n return addVolume( image, name, 1, 1, 1 );\n }\n\n public void setColormap( Node n, ColorTable colorTable ) {\n final int copies = 16;\n\n final ByteBuffer byteBuffer = ByteBuffer.allocateDirect(\n 4 * colorTable.getLength() * copies );// Num bytes * num components * color map length * height of color map texture\n\n final byte[] tmp = new byte[4 * colorTable.getLength()];\n for( int k = 0; k < colorTable.getLength(); k++ ) {\n for( int c = 0; c < colorTable.getComponentCount(); c++ ) {\n // TODO this assumes numBits is 8, could be 16\n tmp[4 * k + c] = ( byte ) colorTable.get( c, k );\n }\n\n if( colorTable.getComponentCount() == 3 ) {\n tmp[4 * k + 3] = (byte)255;\n }\n }\n\n for( int i = 0; i < copies; i++ ) {\n byteBuffer.put(tmp);\n }\n\n byteBuffer.flip();\n\n n.getMetadata().put(\"sciviewColormap\", colorTable);\n\n if(n instanceof Volume) {\n ((Volume) n).getColormaps().put(\"sciviewColormap\", new Volume.Colormap.ColormapBuffer(new GenericTexture(\"colorTable\",\n new GLVector(colorTable.getLength(),\n copies, 1.0f), 4,\n GLTypeEnum.UnsignedByte,\n byteBuffer)));\n ((Volume) n).setColormap(\"sciviewColormap\");\n }\n }\n\n public > Node addVolume( IterableInterval image, String name,\n float... voxelDimensions ) {\n log.debug( \"Add Volume\" );\n\n long dimensions[] = new long[3];\n image.dimensions( dimensions );\n\n Volume v = new Volume();\n\n getScene().addChild( v );\n\n @SuppressWarnings(\"unchecked\") Class voxelType = ( Class ) image.firstElement().getClass();\n float minVal, maxVal;\n\n if( voxelType == UnsignedByteType.class ) {\n minVal = 0;\n maxVal = 255;\n } else if( voxelType == UnsignedShortType.class ) {\n minVal = 0;\n maxVal = 65535;\n } else if( voxelType == FloatType.class ) {\n minVal = 0;\n maxVal = 1;\n } else {\n log.debug( \"Type: \" + voxelType +\n \" cannot be displayed as a volume. Convert to UnsignedByteType, UnsignedShortType, or FloatType.\" );\n return null;\n }\n\n updateVolume( image, name, voxelDimensions, v );\n\n GLVector scaleVec = new GLVector( 0.5f * dimensions[0], //\n 0.5f * dimensions[1], //\n 0.5f * dimensions[2] );\n\n v.setScale( scaleVec );// TODO maybe dont do this\n // TODO: This translation should probably be accounted for in scenery; volumes use a corner-origin and\n // meshes use center-origin coordinate systems.\n v.setPosition( v.getPosition().plus( new GLVector( 0.5f * dimensions[0] - 0.5f, 0.5f * dimensions[1] - 0.5f,\n 0.5f * dimensions[2] - 0.5f ) ) );\n\n v.setTrangemin( minVal );\n v.setTrangemax( maxVal );\n v.setTransferFunction(TransferFunction.ramp(0.0f, 0.4f));\n\n try {\n setColormap( v, lutService.loadLUT( lutService.findLUTs().get( \"WCIF/ICA.lut\" ) ) );\n } catch( IOException e ) {\n e.printStackTrace();\n }\n\n\n setActiveNode( v );\n\n return v;\n }\n\n public > Node updateVolume( IterableInterval image, String name,\n float[] voxelDimensions, Volume v ) {\n log.debug( \"Update Volume\" );\n\n long dimensions[] = new long[3];\n image.dimensions( dimensions );\n\n @SuppressWarnings(\"unchecked\") Class voxelType = ( Class ) image.firstElement().getClass();\n int bytesPerVoxel = image.firstElement().getBitsPerPixel() / 8;\n NativeTypeEnum nType;\n\n if( voxelType == UnsignedByteType.class ) {\n nType = NativeTypeEnum.UnsignedByte;\n } else if( voxelType == UnsignedShortType.class ) {\n nType = NativeTypeEnum.UnsignedShort;\n } else if( voxelType == FloatType.class ) {\n nType = NativeTypeEnum.Float;\n } else {\n log.debug( \"Type: \" + voxelType +\n \" cannot be displayed as a volume. Convert to UnsignedByteType, UnsignedShortType, or FloatType.\" );\n return null;\n }\n\n // Make and populate a ByteBuffer with the content of the Dataset\n ByteBuffer byteBuffer = ByteBuffer.allocateDirect(\n ( int ) ( bytesPerVoxel * dimensions[0] * dimensions[1] * dimensions[2] ) );\n Cursor cursor = image.cursor();\n\n while( cursor.hasNext() ) {\n cursor.fwd();\n if( voxelType == UnsignedByteType.class ) {\n byteBuffer.put( ( byte ) ( ( ( UnsignedByteType ) cursor.get() ).get() ) );\n } else if( voxelType == UnsignedShortType.class ) {\n byteBuffer.putShort( ( short ) Math.abs( ( ( UnsignedShortType ) cursor.get() ).getShort() ) );\n } else if( voxelType == FloatType.class ) {\n byteBuffer.putFloat( ( ( FloatType ) cursor.get() ).get() );\n }\n }\n byteBuffer.flip();\n\n v.readFromBuffer( name, byteBuffer, dimensions[0], dimensions[1], dimensions[2], voxelDimensions[0],\n voxelDimensions[1], voxelDimensions[2], nType, bytesPerVoxel );\n\n v.setDirty( true );\n v.setNeedsUpdate( true );\n v.setNeedsUpdateWorld( true );\n\n return v;\n }\n\n private static GLVector vector( ColorRGB color ) {\n if( color instanceof ColorRGBA ) {\n return new GLVector( color.getRed() / 255f, //\n color.getGreen() / 255f, //\n color.getBlue() / 255f, //\n color.getAlpha() / 255f );\n }\n return new GLVector( color.getRed() / 255f, //\n color.getGreen() / 255f, //\n color.getBlue() / 255f );\n }\n\n public boolean getPushMode() {\n return getRenderer().getPushMode();\n }\n\n public boolean setPushMode( boolean push ) {\n getRenderer().setPushMode( push );\n return getRenderer().getPushMode();\n }\n\n public ArcballCameraControl getTargetArcball() {\n return targetArcball;\n }\n\n @Override\n protected void finalize() {\n stopAnimation();\n }\n\n private void updateFloorPosition() {\n // Lower the floor below the active node, as needed.\n final Node currentNode = getActiveNode();\n if( currentNode != null ) {\n final Node.OrientedBoundingBox bb = currentNode.generateBoundingBox();\n final Node.BoundingSphere bs = bb.getBoundingSphere();\n final float neededFloor = bb.getMin().y() - Math.max( bs.getRadius(), 1 );\n if( neededFloor < getFloory() ) setFloory( neededFloor );\n }\n\n floor.setPosition( new GLVector( 0f, getFloory(), 0f ) );\n }\n\n public Settings getScenerySettings() {\n return this.getSettings();\n }\n\n public Statistics getSceneryStats() {\n return this.getStats();\n }\n\n public Renderer getSceneryRenderer() {\n return this.getRenderer();\n }\n\n protected boolean vrActive = false;\n\n public void toggleVRRendering() {\n vrActive = !vrActive;\n Camera cam = getScene().getActiveObserver();\n if(!(cam instanceof DetachedHeadCamera)) {\n return;\n }\n\n TrackerInput ti = null;\n\n if (!getHub().has(SceneryElement.HMDInput)) {\n try {\n final OpenVRHMD hmd = new OpenVRHMD(false, true);\n getHub().add(SceneryElement.HMDInput, hmd);\n ti = hmd;\n\n // we need to force reloading the renderer as the HMD might require device or instance extensions\n if(getRenderer() instanceof VulkanRenderer) {\n replaceRenderer(getRenderer().getClass().getSimpleName(), true);\n Thread.sleep(1000);\n }\n } catch (Exception e) {\n getLogger().error(\"Could not add OpenVRHMD: \" + e.toString());\n }\n } else {\n ti = getHub().getWorkingHMD();\n }\n\n if(vrActive && ti != null) {\n ((DetachedHeadCamera) cam).setTracker(ti);\n } else {\n ((DetachedHeadCamera) cam).setTracker(null);\n }\n\n while(getRenderer().getInitialized() == false) {\n getLogger().info(\"Waiting for renderer\");\n try {\n Thread.sleep(200);\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n }\n getRenderer().toggleVR();\n }\n}\n"},"message":{"kind":"string","value":"SciView: Show scenery and sciview version number in splash screen if it can be retrieved from JAR manifest\n"},"old_file":{"kind":"string","value":"src/main/java/sc/iview/SciView.java"},"subject":{"kind":"string","value":"SciView: Show scenery and sciview version number in splash screen if it can be retrieved from JAR manifest"},"git_diff":{"kind":"string","value":"rc/main/java/sc/iview/SciView.java\n \n import javax.imageio.ImageIO;\n import javax.swing.*;\nimport javax.swing.event.ChangeListener;\n import javax.swing.plaf.basic.BasicLookAndFeel;\n import java.awt.*;\n import java.awt.image.BufferedImage;\n getLogger().warn(\"Could not read splash image 'sciview-logo.png'\");\n splashImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB);\n }\n splashLabel = new JLabel(new ImageIcon(splashImage.getScaledInstance(500, 200, java.awt.Image.SCALE_SMOOTH)));\n\n final String sceneryVersion = SceneryBase.class.getPackage().getImplementationVersion();\n final String sciviewVersion = SciView.class.getPackage().getImplementationVersion();\n final String versionString;\n\n if(sceneryVersion == null || sciviewVersion == null) {\n versionString = \"\";\n } else {\n versionString = \"\\n\\nsciview \" + sciviewVersion + \" / scenery \" + sceneryVersion;\n }\n\n splashLabel = new JLabel(versionString,\n new ImageIcon(splashImage.getScaledInstance(500, 200, java.awt.Image.SCALE_SMOOTH)),\n SwingConstants.CENTER);\n splashLabel.setBackground(new java.awt.Color(50, 48, 47));\n splashLabel.setForeground(new java.awt.Color(78, 76, 75));\n splashLabel.setOpaque(true);\n splashLabel.setVerticalTextPosition(JLabel.BOTTOM);\n splashLabel.setHorizontalTextPosition(JLabel.CENTER);\n \n p.setLayout(new OverlayLayout(p));\n p.setBackground(new java.awt.Color(50, 48, 47));"}}},{"rowIdx":2030,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"a1b05198fa6d63980870b51d32c0408e0e41c90d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"mystdeim/vertx-web,mystdeim/vertx-web,vert-x3/vertx-web,vert-x3/vertx-web,InfoSec812/vertx-web,InfoSec812/vertx-web,vert-x3/vertx-web,vert-x3/vertx-web,InfoSec812/vertx-web,InfoSec812/vertx-web,aesteve/vertx-web,mystdeim/vertx-web,InfoSec812/vertx-web,vert-x3/vertx-web,aesteve/vertx-web,InfoSec812/vertx-web,mystdeim/vertx-web,mystdeim/vertx-web,aesteve/vertx-web,aesteve/vertx-web,aesteve/vertx-web,mystdeim/vertx-web"},"new_contents":{"kind":"string","value":"package io.vertx.ext.web.client;\n\nimport java.io.File;\nimport java.net.ConnectException;\nimport java.nio.file.Files;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.TimeoutException;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.atomic.AtomicReference;\nimport java.util.function.BiConsumer;\nimport java.util.function.Consumer;\nimport java.util.function.Function;\n\nimport io.vertx.core.http.HttpConnection;\nimport org.junit.Test;\n\nimport io.vertx.core.Handler;\nimport io.vertx.core.MultiMap;\nimport io.vertx.core.VertxException;\nimport io.vertx.core.VertxOptions;\nimport io.vertx.core.buffer.Buffer;\nimport io.vertx.core.dns.AddressResolverOptions;\nimport io.vertx.core.file.AsyncFile;\nimport io.vertx.core.file.OpenOptions;\nimport io.vertx.core.http.HttpClientOptions;\nimport io.vertx.core.http.HttpHeaders;\nimport io.vertx.core.http.HttpMethod;\nimport io.vertx.core.http.HttpServer;\nimport io.vertx.core.http.HttpServerOptions;\nimport io.vertx.core.http.HttpServerRequest;\nimport io.vertx.core.http.HttpServerResponse;\nimport io.vertx.core.json.DecodeException;\nimport io.vertx.core.json.JsonArray;\nimport io.vertx.core.json.JsonObject;\nimport io.vertx.core.net.ProxyOptions;\nimport io.vertx.core.net.ProxyType;\nimport io.vertx.core.streams.ReadStream;\nimport io.vertx.core.streams.WriteStream;\nimport io.vertx.ext.web.client.impl.HttpContext;\nimport io.vertx.ext.web.client.jackson.WineAndCheese;\nimport io.vertx.ext.web.codec.BodyCodec;\nimport io.vertx.test.core.HttpTestBase;\nimport io.vertx.test.core.TestUtils;\nimport io.vertx.test.core.tls.Cert;\n\n/**\n * @author Julien Viet\n */\npublic class WebClientTest extends HttpTestBase {\n\n private WebClient client;\n\n @Override\n protected VertxOptions getOptions() {\n return super.getOptions().setAddressResolverOptions(new AddressResolverOptions().\n setHostsValue(Buffer.buffer(\n \"127.0.0.1 somehost\\n\" +\n \"127.0.0.1 localhost\")));\n }\n\n @Override\n public void setUp() throws Exception {\n super.setUp();\n super.client = vertx.createHttpClient(new HttpClientOptions().setDefaultPort(8080).setDefaultHost(\"localhost\"));\n client = WebClient.wrap(super.client);\n server.close();\n server = vertx.createHttpServer(new HttpServerOptions().setPort(DEFAULT_HTTP_PORT).setHost(DEFAULT_HTTP_HOST));\n }\n\n @Test\n public void testDefaultHostAndPort() throws Exception {\n testRequest(client -> client.get(\"somepath\"), req -> {\n assertEquals(\"localhost:8080\", req.host());\n });\n }\n\n @Test\n public void testDefaultPort() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(\"somehost:8080\", req.host());\n });\n }\n\n @Test\n public void testDefaultUserAgent() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n String ua = req.headers().get(HttpHeaders.USER_AGENT);\n assertTrue(\"Was expecting use agent header \" + ua + \" to start with Vert.x-WebClient/\", ua.startsWith(\"Vert.x-WebClient/\"));\n });\n }\n\n @Test\n public void testCustomUserAgent() throws Exception {\n client = WebClient.wrap(super.client, new WebClientOptions().setUserAgent(\"smith\"));\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(Collections.singletonList(\"smith\"), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentDisabled() throws Exception {\n client = WebClient.wrap(super.client, new WebClientOptions().setUserAgentEnabled(false));\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(Collections.emptyList(), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentHeaderOverride() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\").putHeader(HttpHeaders.USER_AGENT.toString(), \"smith\"), req -> {\n assertEquals(Collections.singletonList(\"smith\"), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentHeaderRemoved() throws Exception {\n testRequest(client -> {\n HttpRequest request = client.get(\"somehost\", \"somepath\");\n request.headers().remove(HttpHeaders.USER_AGENT);\n return request;\n }, req -> {\n assertEquals(Collections.emptyList(), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testGet() throws Exception {\n testRequest(HttpMethod.GET);\n }\n\n @Test\n public void testHead() throws Exception {\n testRequest(HttpMethod.HEAD);\n }\n\n @Test\n public void testDelete() throws Exception {\n testRequest(HttpMethod.DELETE);\n }\n\n private void testRequest(HttpMethod method) throws Exception {\n testRequest(client -> {\n switch (method) {\n case GET:\n return client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n case HEAD:\n return client.head(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n case DELETE:\n return client.delete(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n default:\n fail(\"Invalid HTTP method\");\n return null;\n }\n }, req -> assertEquals(method, req.method()));\n }\n\n private void testRequest(Function> reqFactory, Consumer reqChecker) throws Exception {\n waitFor(4);\n server.requestHandler(req -> {\n try {\n reqChecker.accept(req);\n complete();\n } finally {\n req.response().end();\n }\n });\n startServer();\n HttpRequest builder = reqFactory.apply(client);\n builder.send(onSuccess(resp -> {\n complete();\n }));\n builder.send(onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testPost() throws Exception {\n testRequestWithBody(HttpMethod.POST, false);\n }\n\n @Test\n public void testPostChunked() throws Exception {\n testRequestWithBody(HttpMethod.POST, true);\n }\n\n @Test\n public void testPut() throws Exception {\n testRequestWithBody(HttpMethod.PUT, false);\n }\n\n @Test\n public void testPutChunked() throws Exception {\n testRequestWithBody(HttpMethod.PUT, true);\n }\n\n @Test\n public void testPatch() throws Exception {\n testRequestWithBody(HttpMethod.PATCH, false);\n }\n\n private void testRequestWithBody(HttpMethod method, boolean chunked) throws Exception {\n String expected = TestUtils.randomAlphaString(1024 * 1024);\n File f = File.createTempFile(\"vertx\", \".data\");\n f.deleteOnExit();\n Files.write(f.toPath(), expected.getBytes());\n waitFor(2);\n server.requestHandler(req -> req.bodyHandler(buff -> {\n assertEquals(method, req.method());\n assertEquals(Buffer.buffer(expected), buff);\n complete();\n req.response().end();\n }));\n startServer();\n vertx.runOnContext(v -> {\n AsyncFile asyncFile = vertx.fileSystem().openBlocking(f.getAbsolutePath(), new OpenOptions());\n\n HttpRequest builder = null;\n\n switch (method) {\n case POST:\n builder = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n case PUT:\n builder = client.put(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n case PATCH:\n builder = client.patch(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n default:\n fail(\"Invalid HTTP method\");\n }\n\n if (!chunked) {\n builder = builder.putHeader(\"Content-Length\", \"\" + expected.length());\n }\n builder.sendStream(asyncFile, onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n complete();\n }));\n });\n await();\n }\n\n @Test\n public void testSendJsonObjectBody() throws Exception {\n JsonObject body = new JsonObject().put(\"wine\", \"Chateauneuf Du Pape\").put(\"cheese\", \"roquefort\");\n testSendBody(body, (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(body, buff.toJsonObject());\n });\n }\n\n @Test\n public void testSendJsonPojoBody() throws Exception {\n testSendBody(new WineAndCheese().setCheese(\"roquefort\").setWine(\"Chateauneuf Du Pape\"),\n (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(new JsonObject().put(\"wine\", \"Chateauneuf Du Pape\").put(\"cheese\", \"roquefort\"), buff.toJsonObject());\n });\n }\n\n @Test\n public void testSendJsonArrayBody() throws Exception {\n JsonArray body = new JsonArray().add(0).add(1).add(2);\n testSendBody(body, (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(body, buff.toJsonArray());\n });\n }\n\n @Test\n public void testSendBufferBody() throws Exception {\n Buffer body = TestUtils.randomBuffer(2048);\n testSendBody(body, (contentType, buff) -> assertEquals(body, buff));\n }\n\n private void testSendBody(Object body, BiConsumer checker) throws Exception {\n waitFor(2);\n server.requestHandler(req -> {\n req.bodyHandler(buff -> {\n checker.accept(req.getHeader(\"content-type\"), buff);\n complete();\n req.response().end();\n });\n });\n startServer();\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n if (body instanceof Buffer) {\n post.sendBuffer((Buffer) body, onSuccess(resp -> {\n complete();\n }));\n } else if (body instanceof JsonObject) {\n post.sendJsonObject((JsonObject) body, onSuccess(resp -> {\n complete();\n }));\n } else {\n post.sendJson(body, onSuccess(resp -> {\n complete();\n }));\n }\n await();\n }\n\n @Test\n public void testConnectError() throws Exception {\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onFailure(err -> {\n assertTrue(err instanceof ConnectException);\n complete();\n }));\n await();\n }\n\n @Test\n public void testRequestSendError() throws Exception {\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n CountDownLatch latch = new CountDownLatch(1);\n AtomicReference conn = new AtomicReference<>();\n server.requestHandler(req -> {\n conn.set(req.connection());\n req.pause();\n latch.countDown();\n });\n startServer();\n AtomicReference> dataHandler = new AtomicReference<>();\n AtomicReference> endHandler = new AtomicReference<>();\n AtomicBoolean paused = new AtomicBoolean();\n post.sendStream(new ReadStream() {\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n dataHandler.set(handler);\n return this;\n }\n @Override\n public ReadStream pause() {\n paused.set(true);\n return this;\n }\n @Override\n public ReadStream resume() {\n paused.set(false);\n return this;\n }\n @Override\n public ReadStream endHandler(Handler handler) {\n endHandler.set(handler);\n return this;\n }\n }, onFailure(err -> {\n // Should be a connection reset by peer or closed\n assertNull(endHandler.get());\n assertNull(dataHandler.get());\n assertFalse(paused.get());\n complete();\n }));\n assertWaitUntil(() -> dataHandler.get() != null);\n dataHandler.get().handle(TestUtils.randomBuffer(1024));\n awaitLatch(latch);\n while (!paused.get()) {\n dataHandler.get().handle(TestUtils.randomBuffer(1024));\n }\n conn.get().close();\n await();\n }\n\n @Test\n public void testRequestPumpError() throws Exception {\n waitFor(2);\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n CompletableFuture done = new CompletableFuture<>();\n server.requestHandler(req -> {\n req.response().closeHandler(v -> {\n complete();\n });\n req.handler(buff -> {\n done.complete(null);\n });\n });\n Throwable cause = new Throwable();\n startServer();\n post.sendStream(new ReadStream() {\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n if (handler != null) {\n done.thenAccept(v -> {\n handler.handle(cause);\n });\n }\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n if (handler != null) {\n handler.handle(TestUtils.randomBuffer(1024));\n }\n return this;\n }\n @Override\n public ReadStream pause() {\n return this;\n }\n @Override\n public ReadStream resume() {\n return this;\n }\n @Override\n public ReadStream endHandler(Handler endHandler) {\n return this;\n }\n }, onFailure(err -> {\n assertSame(cause, err);\n complete();\n }));\n await();\n }\n\n @Test\n public void testRequestPumpErrorNotYetConnected() throws Exception {\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n server.requestHandler(req -> {\n fail();\n });\n Throwable cause = new Throwable();\n startServer();\n post.sendStream(new ReadStream() {\n Handler exceptionHandler;\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n exceptionHandler = handler;\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n if (handler != null) {\n handler.handle(TestUtils.randomBuffer(1024));\n vertx.runOnContext(v -> {\n exceptionHandler.handle(cause);\n });\n }\n return this;\n }\n @Override\n public ReadStream pause() {\n return this;\n }\n @Override\n public ReadStream resume() {\n return this;\n }\n @Override\n public ReadStream endHandler(Handler endHandler) {\n return this;\n }\n }, onFailure(err -> {\n assertSame(cause, err);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsBuffer() throws Exception {\n Buffer expected = TestUtils.randomBuffer(2000);\n server.requestHandler(req -> {\n req.response().end(expected);\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonObject() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonMapped() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.json(WineAndCheese.class))\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(new WineAndCheese().setCheese(\"Goat Cheese\").setWine(\"Condrieu\"), resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonArray() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonArray())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonArrayMapped() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.json(List.class))\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected.getList(), resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyDiscarded() throws Exception {\n server.requestHandler(req -> {\n req.response().end(TestUtils.randomAlphaString(1024));\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.none())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(null, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonObject() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.bodyAsJsonObject());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonArray() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.bodyAsJsonArray());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonMapped() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(new WineAndCheese().setCheese(\"Goat Cheese\").setWine(\"Condrieu\"), resp.bodyAsJson(WineAndCheese.class));\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyUnmarshallingError() throws Exception {\n server.requestHandler(req -> {\n req.response().end(\"not-json-object\");\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onFailure(err -> {\n assertTrue(err instanceof DecodeException);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyStream() throws Exception {\n AtomicBoolean paused = new AtomicBoolean();\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n vertx.setPeriodic(1, id -> {\n if (!resp.writeQueueFull()) {\n resp.write(TestUtils.randomAlphaString(1024));\n } else {\n resp.drainHandler(v -> {\n resp.end();\n });\n paused.set(true);\n vertx.cancelTimer(id);\n }\n });\n });\n startServer();\n CompletableFuture resume = new CompletableFuture<>();\n AtomicInteger size = new AtomicInteger();\n AtomicBoolean ended = new AtomicBoolean();\n WriteStream stream = new WriteStream() {\n boolean paused = true;\n Handler drainHandler;\n {\n resume.thenAccept(v -> {\n paused = false;\n if (drainHandler != null) {\n drainHandler.handle(null);\n }\n });\n }\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n size.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n ended.set(true);\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return paused;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n drainHandler = handler;\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onSuccess(resp -> {\n assertTrue(ended.get());\n assertEquals(200, resp.statusCode());\n assertEquals(null, resp.body());\n testComplete();\n }));\n assertWaitUntil(paused::get);\n resume.complete(null);\n await();\n }\n\n @Test\n public void testResponseBodyStreamError() throws Exception {\n CompletableFuture fail = new CompletableFuture<>();\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n resp.write(TestUtils.randomBuffer(2048));\n fail.thenAccept(v -> {\n resp.close();\n });\n });\n startServer();\n AtomicInteger received = new AtomicInteger();\n WriteStream stream = new WriteStream() {\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n received.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onFailure(err -> {\n testComplete();\n }));\n assertWaitUntil(() -> received.get() == 2048);\n fail.complete(null);\n await();\n }\n\n @Test\n public void testResponseBodyCodecError() throws Exception {\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n resp.end(TestUtils.randomBuffer(2048));\n });\n startServer();\n RuntimeException cause = new RuntimeException();\n WriteStream stream = new WriteStream() {\n Handler exceptionHandler;\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n exceptionHandler = handler;\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n exceptionHandler.handle(cause);\n return this;\n }\n @Override\n public void end() {\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onFailure(err -> {\n assertSame(cause, err);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseJsonObjectMissingBody() throws Exception {\n testResponseMissingBody(BodyCodec.jsonObject());\n }\n\n @Test\n public void testResponseJsonMissingBody() throws Exception {\n testResponseMissingBody(BodyCodec.json(WineAndCheese.class));\n }\n\n @Test\n public void testResponseWriteStreamMissingBody() throws Exception {\n AtomicInteger length = new AtomicInteger();\n AtomicBoolean ended = new AtomicBoolean();\n WriteStream stream = new WriteStream() {\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n length.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n ended.set(true);\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n testResponseMissingBody(BodyCodec.pipe(stream));\n assertTrue(ended.get());\n assertEquals(0, length.get());\n }\n\n private void testResponseMissingBody(BodyCodec codec) throws Exception {\n server.requestHandler(req -> {\n req.response().setStatusCode(403).end();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(codec)\n .send(onSuccess(resp -> {\n assertEquals(403, resp.statusCode());\n assertNull(resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testHttpResponseError() throws Exception {\n server.requestHandler(req -> {\n req.response().setChunked(true).write(Buffer.buffer(\"some-data\")).close();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onFailure(err -> {\n assertTrue(err instanceof VertxException);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testTimeout() throws Exception {\n AtomicInteger count = new AtomicInteger();\n server.requestHandler(req -> {\n count.incrementAndGet();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.timeout(50).send(onFailure(err -> {\n assertEquals(err.getClass(), TimeoutException.class);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testQueryParam() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\").addQueryParam(\"param\", \"param_value\"), req -> {\n assertEquals(\"param=param_value\", req.query());\n assertEquals(\"param_value\", req.getParam(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamMulti() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\").addQueryParam(\"param\", \"param_value1\").addQueryParam(\"param\", \"param_value2\"), req -> {\n assertEquals(\"param=param_value1&param=param_value2\", req.query());\n assertEquals(Arrays.asList(\"param_value1\", \"param_value2\"), req.params().getAll(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamAppend() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/?param1=param1_value1\").addQueryParam(\"param1\", \"param1_value2\").addQueryParam(\"param2\", \"param2_value\"), req -> {\n assertEquals(\"param1=param1_value1&param1=param1_value2&param2=param2_value\", req.query());\n assertEquals(\"param1_value2\", req.getParam(\"param1\"));\n assertEquals(\"param2_value\", req.getParam(\"param2\"));\n });\n }\n\n @Test\n public void testOverwriteQueryParams() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/?param=param_value1\").setQueryParam(\"param\", \"param_value2\"), req -> {\n assertEquals(\"param=param_value2\", req.query());\n assertEquals(\"param_value2\", req.getParam(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamEncoding() throws Exception {\n testRequest(client -> client\n .get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\")\n .addQueryParam(\"param1\", \" \")\n .addQueryParam(\"param2\", \"\\u20AC\"), req -> {\n assertEquals(\"param1=%20&param2=%E2%82%AC\", req.query());\n assertEquals(\" \", req.getParam(\"param1\"));\n assertEquals(\"\\u20AC\", req.getParam(\"param2\"));\n });\n }\n\n @Test\n public void testFormUrlEncoded() throws Exception {\n server.requestHandler(req -> {\n req.setExpectMultipart(true);\n req.endHandler(v -> {\n assertEquals(\"param1_value\", req.getFormAttribute(\"param1\"));\n req.response().end();\n });\n });\n startServer();\n MultiMap form = MultiMap.caseInsensitiveMultiMap();\n form.add(\"param1\", \"param1_value\");\n HttpRequest builder = client.post(\"/somepath\");\n builder.sendForm(form, onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testFormMultipart() throws Exception {\n server.requestHandler(req -> {\n req.setExpectMultipart(true);\n req.endHandler(v -> {\n assertEquals(\"param1_value\", req.getFormAttribute(\"param1\"));\n req.response().end();\n });\n });\n startServer();\n MultiMap form = MultiMap.caseInsensitiveMultiMap();\n form.add(\"param1\", \"param1_value\");\n HttpRequest builder = client.post(\"/somepath\");\n builder.putHeader(\"content-type\", \"multipart/form-data\");\n builder.sendForm(form, onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testDefaultFollowRedirects() throws Exception {\n testFollowRedirects(null, true);\n }\n\n @Test\n public void testFollowRedirects() throws Exception {\n testFollowRedirects(true, true);\n }\n\n @Test\n public void testDoNotFollowRedirects() throws Exception {\n testFollowRedirects(false, false);\n }\n\n private void testFollowRedirects(Boolean set, boolean expect) throws Exception {\n waitFor(2);\n String location = \"http://\" + DEFAULT_HTTP_HOST + \":\" + DEFAULT_HTTP_PORT + \"/ok\";\n server.requestHandler(req -> {\n if (req.path().equals(\"/redirect\")) {\n req.response().setStatusCode(301).putHeader(\"Location\", location).end();\n if (!expect) {\n complete();\n }\n } else {\n req.response().end(req.path());\n if (expect) {\n complete();\n }\n }\n });\n startServer();\n HttpRequest builder = client.get(\"/redirect\");\n if (set != null) {\n builder = builder.followRedirects(set);\n }\n builder.send(onSuccess(resp -> {\n if (expect) {\n assertEquals(200, resp.statusCode());\n assertEquals(\"/ok\", resp.body().toString());\n } else {\n assertEquals(301, resp.statusCode());\n assertEquals(location, resp.getHeader(\"location\"));\n }\n complete();\n }));\n await();\n }\n\n @Test\n public void testTLSEnabled() throws Exception {\n testTLS(true, true, client -> client.get(\"/\"));\n }\n\n @Test\n public void testTLSEnabledDisableRequestTLS() throws Exception {\n testTLS(true, false, client -> client.get(\"/\").ssl(false));\n }\n\n @Test\n public void testTLSEnabledEnableRequestTLS() throws Exception {\n testTLS(true, true, client -> client.get(\"/\").ssl(true));\n }\n\n @Test\n public void testTLSDisabledDisableRequestTLS() throws Exception {\n testTLS(false, false, client -> client.get(\"/\").ssl(false));\n }\n\n @Test\n public void testTLSDisabledEnableRequestTLS() throws Exception {\n testTLS(false, true, client -> client.get(\"/\").ssl(true));\n }\n\n @Test\n public void testTLSEnabledDisableRequestTLSAbsURI() throws Exception {\n testTLS(true, false, client -> client.getAbs(\"http://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSEnabledEnableRequestTLSAbsURI() throws Exception {\n testTLS(true, true, client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSDisabledDisableRequestTLSAbsURI() throws Exception {\n testTLS(false, false, client -> client.getAbs(\"http://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSDisabledEnableRequestTLSAbsURI() throws Exception {\n testTLS(false, true, client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n /**\n * Regression test for issue #563 (https://github.com/vert-x3/vertx-web/issues/563)\n *

\n * Only occurred when {@link WebClientOptions#isSsl()} was false for an SSL request.\n */\n @Test\n public void testTLSQueryParametersIssue563() throws Exception {\n testTLS(false, true,\n client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT)\n .addQueryParam(\"query1\", \"value1\")\n .addQueryParam(\"query2\", \"value2\"),\n serverRequest -> assertEquals(\"query1=value1&query2=value2\", serverRequest.query()));\n }\n\n private void testTLS(boolean clientSSL, boolean serverSSL, Function> requestProvider) throws Exception {\n testTLS(clientSSL, serverSSL, requestProvider, null);\n }\n\n private void testTLS(boolean clientSSL, boolean serverSSL, Function> requestProvider, Consumer serverAssertions) throws Exception {\n WebClient sslClient = WebClient.create(vertx, new WebClientOptions()\n .setSsl(clientSSL)\n .setTrustAll(true)\n .setDefaultHost(DEFAULT_HTTPS_HOST)\n .setDefaultPort(DEFAULT_HTTPS_PORT));\n HttpServer sslServer = vertx.createHttpServer(new HttpServerOptions()\n .setSsl(serverSSL)\n .setKeyStoreOptions(Cert.CLIENT_JKS.get())\n .setPort(DEFAULT_HTTPS_PORT)\n .setHost(DEFAULT_HTTPS_HOST));\n sslServer.requestHandler(req -> {\n assertEquals(serverSSL, req.isSSL());\n if (serverAssertions != null) {\n serverAssertions.accept(req);\n }\n req.response().end();\n });\n try {\n startServer(sslServer);\n HttpRequest builder = requestProvider.apply(sslClient);\n builder.send(onSuccess(resp -> {\n testComplete();\n }));\n await();\n } finally {\n sslClient.close();\n sslServer.close();\n }\n }\n\n @Test\n public void testHttpProxyFtpRequest() throws Exception {\n startProxy(null, ProxyType.HTTP);\n proxy.setForceUri(\"http://\" + DEFAULT_HTTP_HOST + \":\" + DEFAULT_HTTP_PORT);\n server.requestHandler(req -> {\n req.response().setStatusCode(200).end();\n });\n startServer();\n\n WebClientOptions options = new WebClientOptions();\n options.setProxyOptions(new ProxyOptions().setPort(proxy.getPort()));\n WebClient client = WebClient.create(vertx, options);\n client\n .getAbs(\"ftp://ftp.gnu.org/gnu/\")\n .send(ar -> {\n if (ar.succeeded()) {\n // Obtain response\n HttpResponse response = ar.result();\n assertEquals(200, response.statusCode());\n assertEquals(\"ftp://ftp.gnu.org/gnu/\", proxy.getLastUri());\n testComplete();\n } else {\n fail(ar.cause());\n }\n });\n await();\n }\n\n @Test\n public void testStreamHttpServerRequest() throws Exception {\n Buffer expected = TestUtils.randomBuffer(10000);\n HttpServer server2 = vertx.createHttpServer(new HttpServerOptions().setPort(8081)).requestHandler(req -> {\n req.bodyHandler(body -> {\n assertEquals(body, expected);\n req.response().end();\n });\n });\n startServer(server2);\n WebClient webClient = WebClient.create(vertx);\n try {\n server.requestHandler(req -> {\n webClient.postAbs(\"http://localhost:8081/\")\n .sendStream(req, onSuccess(resp -> {\n req.response().end(\"ok\");\n }));\n });\n startServer();\n webClient.post(8080, \"localhost\", \"/\").sendBuffer(expected, onSuccess(resp -> {\n assertEquals(\"ok\", resp.bodyAsString());\n complete();\n }));\n await();\n } finally {\n server2.close();\n }\n }\n}\n"},"new_file":{"kind":"string","value":"vertx-web-client/src/test/java/io/vertx/ext/web/client/WebClientTest.java"},"old_contents":{"kind":"string","value":"package io.vertx.ext.web.client;\n\nimport java.io.File;\nimport java.net.ConnectException;\nimport java.nio.file.Files;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.TimeoutException;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.atomic.AtomicReference;\nimport java.util.function.BiConsumer;\nimport java.util.function.Consumer;\nimport java.util.function.Function;\n\nimport io.vertx.core.http.HttpConnection;\nimport org.junit.Test;\n\nimport io.vertx.core.Handler;\nimport io.vertx.core.MultiMap;\nimport io.vertx.core.VertxException;\nimport io.vertx.core.VertxOptions;\nimport io.vertx.core.buffer.Buffer;\nimport io.vertx.core.dns.AddressResolverOptions;\nimport io.vertx.core.file.AsyncFile;\nimport io.vertx.core.file.OpenOptions;\nimport io.vertx.core.http.HttpClientOptions;\nimport io.vertx.core.http.HttpHeaders;\nimport io.vertx.core.http.HttpMethod;\nimport io.vertx.core.http.HttpServer;\nimport io.vertx.core.http.HttpServerOptions;\nimport io.vertx.core.http.HttpServerRequest;\nimport io.vertx.core.http.HttpServerResponse;\nimport io.vertx.core.json.DecodeException;\nimport io.vertx.core.json.JsonArray;\nimport io.vertx.core.json.JsonObject;\nimport io.vertx.core.net.ProxyOptions;\nimport io.vertx.core.net.ProxyType;\nimport io.vertx.core.streams.ReadStream;\nimport io.vertx.core.streams.WriteStream;\nimport io.vertx.ext.web.client.impl.HttpContext;\nimport io.vertx.ext.web.client.jackson.WineAndCheese;\nimport io.vertx.ext.web.codec.BodyCodec;\nimport io.vertx.test.core.HttpTestBase;\nimport io.vertx.test.core.TestUtils;\nimport io.vertx.test.core.tls.Cert;\n\n/**\n * @author Julien Viet\n */\npublic class WebClientTest extends HttpTestBase {\n\n private WebClient client;\n\n @Override\n protected VertxOptions getOptions() {\n return super.getOptions().setAddressResolverOptions(new AddressResolverOptions().\n setHostsValue(Buffer.buffer(\n \"127.0.0.1 somehost\\n\" +\n \"127.0.0.1 localhost\")));\n }\n\n @Override\n public void setUp() throws Exception {\n super.setUp();\n super.client = vertx.createHttpClient(new HttpClientOptions().setDefaultPort(8080).setDefaultHost(\"localhost\"));\n client = WebClient.wrap(super.client);\n server.close();\n server = vertx.createHttpServer(new HttpServerOptions().setPort(DEFAULT_HTTP_PORT).setHost(DEFAULT_HTTP_HOST));\n }\n\n @Test\n public void testDefaultHostAndPort() throws Exception {\n testRequest(client -> client.get(\"somepath\"), req -> {\n assertEquals(\"localhost:8080\", req.host());\n });\n }\n\n @Test\n public void testDefaultPort() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(\"somehost:8080\", req.host());\n });\n }\n\n @Test\n public void testDefaultUserAgent() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n String ua = req.headers().get(HttpHeaders.USER_AGENT);\n assertTrue(\"Was expecting use agent header \" + ua + \" to start with Vert.x-WebClient/\", ua.startsWith(\"Vert.x-WebClient/\"));\n });\n }\n\n @Test\n public void testCustomUserAgent() throws Exception {\n client = WebClient.wrap(super.client, new WebClientOptions().setUserAgent(\"smith\"));\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(Collections.singletonList(\"smith\"), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentDisabled() throws Exception {\n client = WebClient.wrap(super.client, new WebClientOptions().setUserAgentEnabled(false));\n testRequest(client -> client.get(\"somehost\", \"somepath\"), req -> {\n assertEquals(Collections.emptyList(), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentHeaderOverride() throws Exception {\n testRequest(client -> client.get(\"somehost\", \"somepath\").putHeader(HttpHeaders.USER_AGENT.toString(), \"smith\"), req -> {\n assertEquals(Collections.singletonList(\"smith\"), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testUserAgentHeaderRemoved() throws Exception {\n testRequest(client -> {\n HttpRequest request = client.get(\"somehost\", \"somepath\");\n request.headers().remove(HttpHeaders.USER_AGENT);\n return request;\n }, req -> {\n assertEquals(Collections.emptyList(), req.headers().getAll(HttpHeaders.USER_AGENT));\n });\n }\n\n @Test\n public void testGet() throws Exception {\n testRequest(HttpMethod.GET);\n }\n\n @Test\n public void testHead() throws Exception {\n testRequest(HttpMethod.HEAD);\n }\n\n @Test\n public void testDelete() throws Exception {\n testRequest(HttpMethod.DELETE);\n }\n\n private void testRequest(HttpMethod method) throws Exception {\n testRequest(client -> {\n switch (method) {\n case GET:\n return client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n case HEAD:\n return client.head(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n case DELETE:\n return client.delete(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n default:\n fail(\"Invalid HTTP method\");\n return null;\n }\n }, req -> assertEquals(method, req.method()));\n }\n\n private void testRequest(Function> reqFactory, Consumer reqChecker) throws Exception {\n waitFor(4);\n server.requestHandler(req -> {\n try {\n reqChecker.accept(req);\n complete();\n } finally {\n req.response().end();\n }\n });\n startServer();\n HttpRequest builder = reqFactory.apply(client);\n builder.send(onSuccess(resp -> {\n complete();\n }));\n builder.send(onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testPost() throws Exception {\n testRequestWithBody(HttpMethod.POST, false);\n }\n\n @Test\n public void testPostChunked() throws Exception {\n testRequestWithBody(HttpMethod.POST, true);\n }\n\n @Test\n public void testPut() throws Exception {\n testRequestWithBody(HttpMethod.PUT, false);\n }\n\n @Test\n public void testPutChunked() throws Exception {\n testRequestWithBody(HttpMethod.PUT, true);\n }\n\n @Test\n public void testPatch() throws Exception {\n testRequestWithBody(HttpMethod.PATCH, false);\n }\n\n private void testRequestWithBody(HttpMethod method, boolean chunked) throws Exception {\n String expected = TestUtils.randomAlphaString(1024 * 1024);\n File f = File.createTempFile(\"vertx\", \".data\");\n f.deleteOnExit();\n Files.write(f.toPath(), expected.getBytes());\n waitFor(2);\n server.requestHandler(req -> req.bodyHandler(buff -> {\n assertEquals(method, req.method());\n assertEquals(Buffer.buffer(expected), buff);\n complete();\n req.response().end();\n }));\n startServer();\n vertx.runOnContext(v -> {\n AsyncFile asyncFile = vertx.fileSystem().openBlocking(f.getAbsolutePath(), new OpenOptions());\n\n HttpRequest builder = null;\n\n switch (method) {\n case POST:\n builder = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n case PUT:\n builder = client.put(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n case PATCH:\n builder = client.patch(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n break;\n default:\n fail(\"Invalid HTTP method\");\n }\n\n if (!chunked) {\n builder = builder.putHeader(\"Content-Length\", \"\" + expected.length());\n }\n builder.sendStream(asyncFile, onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n complete();\n }));\n });\n await();\n }\n\n @Test\n public void testSendJsonObjectBody() throws Exception {\n JsonObject body = new JsonObject().put(\"wine\", \"Chateauneuf Du Pape\").put(\"cheese\", \"roquefort\");\n testSendBody(body, (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(body, buff.toJsonObject());\n });\n }\n\n @Test\n public void testSendJsonPojoBody() throws Exception {\n testSendBody(new WineAndCheese().setCheese(\"roquefort\").setWine(\"Chateauneuf Du Pape\"),\n (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(new JsonObject().put(\"wine\", \"Chateauneuf Du Pape\").put(\"cheese\", \"roquefort\"), buff.toJsonObject());\n });\n }\n\n @Test\n public void testSendJsonArrayBody() throws Exception {\n JsonArray body = new JsonArray().add(0).add(1).add(2);\n testSendBody(body, (contentType, buff) -> {\n assertEquals(\"application/json\", contentType);\n assertEquals(body, buff.toJsonArray());\n });\n }\n\n @Test\n public void testSendBufferBody() throws Exception {\n Buffer body = TestUtils.randomBuffer(2048);\n testSendBody(body, (contentType, buff) -> assertEquals(body, buff));\n }\n\n private void testSendBody(Object body, BiConsumer checker) throws Exception {\n waitFor(2);\n server.requestHandler(req -> {\n req.bodyHandler(buff -> {\n checker.accept(req.getHeader(\"content-type\"), buff);\n complete();\n req.response().end();\n });\n });\n startServer();\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n if (body instanceof Buffer) {\n post.sendBuffer((Buffer) body, onSuccess(resp -> {\n complete();\n }));\n } else if (body instanceof JsonObject) {\n post.sendJsonObject((JsonObject) body, onSuccess(resp -> {\n complete();\n }));\n } else {\n post.sendJson(body, onSuccess(resp -> {\n complete();\n }));\n }\n await();\n }\n\n @Test\n public void testConnectError() throws Exception {\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onFailure(err -> {\n assertTrue(err instanceof ConnectException);\n complete();\n }));\n await();\n }\n\n @Test\n public void testRequestSendError() throws Exception {\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n CountDownLatch latch = new CountDownLatch(1);\n AtomicReference conn = new AtomicReference<>();\n server.requestHandler(req -> {\n conn.set(req.connection());\n req.pause();\n latch.countDown();\n });\n startServer();\n AtomicReference> dataHandler = new AtomicReference<>();\n AtomicReference> endHandler = new AtomicReference<>();\n AtomicBoolean paused = new AtomicBoolean();\n post.sendStream(new ReadStream() {\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n dataHandler.set(handler);\n return this;\n }\n @Override\n public ReadStream pause() {\n paused.set(true);\n return this;\n }\n @Override\n public ReadStream resume() {\n paused.set(false);\n return this;\n }\n @Override\n public ReadStream endHandler(Handler handler) {\n endHandler.set(handler);\n return this;\n }\n }, onFailure(err -> {\n // Should be a connection reset by peer or closed\n assertNull(endHandler.get());\n assertNull(dataHandler.get());\n assertFalse(paused.get());\n complete();\n }));\n assertWaitUntil(() -> dataHandler.get() != null);\n dataHandler.get().handle(TestUtils.randomBuffer(1024));\n awaitLatch(latch);\n while (!paused.get()) {\n dataHandler.get().handle(TestUtils.randomBuffer(1024));\n }\n conn.get().close();\n await();\n }\n\n @Test\n public void testRequestPumpError() throws Exception {\n waitFor(2);\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n CompletableFuture done = new CompletableFuture<>();\n server.requestHandler(req -> {\n req.response().closeHandler(v -> {\n complete();\n });\n req.handler(buff -> {\n done.complete(null);\n });\n });\n Throwable cause = new Throwable();\n startServer();\n post.sendStream(new ReadStream() {\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n if (handler != null) {\n done.thenAccept(v -> {\n handler.handle(cause);\n });\n }\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n if (handler != null) {\n handler.handle(TestUtils.randomBuffer(1024));\n }\n return this;\n }\n @Override\n public ReadStream pause() {\n return this;\n }\n @Override\n public ReadStream resume() {\n return this;\n }\n @Override\n public ReadStream endHandler(Handler endHandler) {\n return this;\n }\n }, onFailure(err -> {\n assertSame(cause, err);\n complete();\n }));\n await();\n }\n\n @Test\n public void testRequestPumpErrorNotYetConnected() throws Exception {\n HttpRequest post = client.post(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n server.requestHandler(req -> {\n fail();\n });\n Throwable cause = new Throwable();\n startServer();\n post.sendStream(new ReadStream() {\n Handler exceptionHandler;\n @Override\n public ReadStream exceptionHandler(Handler handler) {\n exceptionHandler = handler;\n return this;\n }\n @Override\n public ReadStream handler(Handler handler) {\n if (handler != null) {\n handler.handle(TestUtils.randomBuffer(1024));\n vertx.runOnContext(v -> {\n exceptionHandler.handle(cause);\n });\n }\n return this;\n }\n @Override\n public ReadStream pause() {\n return this;\n }\n @Override\n public ReadStream resume() {\n return this;\n }\n @Override\n public ReadStream endHandler(Handler endHandler) {\n return this;\n }\n }, onFailure(err -> {\n assertSame(cause, err);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsBuffer() throws Exception {\n Buffer expected = TestUtils.randomBuffer(2000);\n server.requestHandler(req -> {\n req.response().end(expected);\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonObject() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonMapped() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.json(WineAndCheese.class))\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(new WineAndCheese().setCheese(\"Goat Cheese\").setWine(\"Condrieu\"), resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonArray() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonArray())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyAsAsJsonArrayMapped() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.json(List.class))\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected.getList(), resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyDiscarded() throws Exception {\n server.requestHandler(req -> {\n req.response().end(TestUtils.randomAlphaString(1024));\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.none())\n .send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(null, resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonObject() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.bodyAsJsonObject());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonArray() throws Exception {\n JsonArray expected = new JsonArray().add(\"cheese\").add(\"wine\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(expected, resp.bodyAsJsonArray());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseUnknownContentTypeBodyAsJsonMapped() throws Exception {\n JsonObject expected = new JsonObject().put(\"cheese\", \"Goat Cheese\").put(\"wine\", \"Condrieu\");\n server.requestHandler(req -> {\n req.response().end(expected.encode());\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.send(onSuccess(resp -> {\n assertEquals(200, resp.statusCode());\n assertEquals(new WineAndCheese().setCheese(\"Goat Cheese\").setWine(\"Condrieu\"), resp.bodyAsJson(WineAndCheese.class));\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyUnmarshallingError() throws Exception {\n server.requestHandler(req -> {\n req.response().end(\"not-json-object\");\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onFailure(err -> {\n assertTrue(err instanceof DecodeException);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseBodyStream() throws Exception {\n AtomicBoolean paused = new AtomicBoolean();\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n vertx.setPeriodic(1, id -> {\n if (!resp.writeQueueFull()) {\n resp.write(TestUtils.randomAlphaString(1024));\n } else {\n resp.drainHandler(v -> {\n resp.end();\n });\n paused.set(true);\n vertx.cancelTimer(id);\n }\n });\n });\n startServer();\n CompletableFuture resume = new CompletableFuture<>();\n AtomicInteger size = new AtomicInteger();\n AtomicBoolean ended = new AtomicBoolean();\n WriteStream stream = new WriteStream() {\n boolean paused = true;\n Handler drainHandler;\n {\n resume.thenAccept(v -> {\n paused = false;\n if (drainHandler != null) {\n drainHandler.handle(null);\n }\n });\n }\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n size.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n ended.set(true);\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return paused;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n drainHandler = handler;\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onSuccess(resp -> {\n assertTrue(ended.get());\n assertEquals(200, resp.statusCode());\n assertEquals(null, resp.body());\n testComplete();\n }));\n assertWaitUntil(paused::get);\n resume.complete(null);\n await();\n }\n\n @Test\n public void testResponseBodyStreamError() throws Exception {\n CompletableFuture fail = new CompletableFuture<>();\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n resp.write(TestUtils.randomBuffer(2048));\n fail.thenAccept(v -> {\n resp.close();\n });\n });\n startServer();\n AtomicInteger received = new AtomicInteger();\n WriteStream stream = new WriteStream() {\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n received.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onFailure(err -> {\n testComplete();\n }));\n assertWaitUntil(() -> received.get() == 2048);\n fail.complete(null);\n await();\n }\n\n @Test\n public void testResponseBodyCodecError() throws Exception {\n server.requestHandler(req -> {\n HttpServerResponse resp = req.response();\n resp.setChunked(true);\n resp.end(TestUtils.randomBuffer(2048));\n });\n startServer();\n RuntimeException cause = new RuntimeException();\n WriteStream stream = new WriteStream() {\n Handler exceptionHandler;\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n exceptionHandler = handler;\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n exceptionHandler.handle(cause);\n return this;\n }\n @Override\n public void end() {\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.pipe(stream))\n .send(onFailure(err -> {\n assertSame(cause, err);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testResponseJsonObjectMissingBody() throws Exception {\n testResponseMissingBody(BodyCodec.jsonObject());\n }\n\n @Test\n public void testResponseJsonMissingBody() throws Exception {\n testResponseMissingBody(BodyCodec.json(WineAndCheese.class));\n }\n\n @Test\n public void testResponseWriteStreamMissingBody() throws Exception {\n AtomicInteger length = new AtomicInteger();\n AtomicBoolean ended = new AtomicBoolean();\n WriteStream stream = new WriteStream() {\n @Override\n public WriteStream exceptionHandler(Handler handler) {\n return this;\n }\n @Override\n public WriteStream write(Buffer data) {\n length.addAndGet(data.length());\n return this;\n }\n @Override\n public void end() {\n ended.set(true);\n }\n @Override\n public WriteStream setWriteQueueMaxSize(int maxSize) {\n return this;\n }\n @Override\n public boolean writeQueueFull() {\n return false;\n }\n @Override\n public WriteStream drainHandler(Handler handler) {\n return this;\n }\n };\n testResponseMissingBody(BodyCodec.pipe(stream));\n assertTrue(ended.get());\n assertEquals(0, length.get());\n }\n\n private void testResponseMissingBody(BodyCodec codec) throws Exception {\n server.requestHandler(req -> {\n req.response().setStatusCode(403).end();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(codec)\n .send(onSuccess(resp -> {\n assertEquals(403, resp.statusCode());\n assertNull(resp.body());\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testHttpResponseError() throws Exception {\n server.requestHandler(req -> {\n req.response().setChunked(true).write(Buffer.buffer(\"some-data\")).close();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get\n .as(BodyCodec.jsonObject())\n .send(onFailure(err -> {\n assertTrue(err instanceof VertxException);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testTimeout() throws Exception {\n AtomicInteger count = new AtomicInteger();\n server.requestHandler(req -> {\n count.incrementAndGet();\n });\n startServer();\n HttpRequest get = client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/somepath\");\n get.timeout(50).send(onFailure(err -> {\n assertEquals(err.getClass(), TimeoutException.class);\n testComplete();\n }));\n await();\n }\n\n @Test\n public void testQueryParam() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\").addQueryParam(\"param\", \"param_value\"), req -> {\n assertEquals(\"param=param_value\", req.query());\n assertEquals(\"param_value\", req.getParam(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamMulti() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\").addQueryParam(\"param\", \"param_value1\").addQueryParam(\"param\", \"param_value2\"), req -> {\n assertEquals(\"param=param_value1&param=param_value2\", req.query());\n assertEquals(Arrays.asList(\"param_value1\", \"param_value2\"), req.params().getAll(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamAppend() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/?param1=param1_value1\").addQueryParam(\"param1\", \"param1_value2\").addQueryParam(\"param2\", \"param2_value\"), req -> {\n assertEquals(\"param1=param1_value1&param1=param1_value2&param2=param2_value\", req.query());\n assertEquals(\"param1_value2\", req.getParam(\"param1\"));\n assertEquals(\"param2_value\", req.getParam(\"param2\"));\n });\n }\n\n @Test\n public void testOverwriteQueryParams() throws Exception {\n testRequest(client -> client.get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/?param=param_value1\").setQueryParam(\"param\", \"param_value2\"), req -> {\n assertEquals(\"param=param_value2\", req.query());\n assertEquals(\"param_value2\", req.getParam(\"param\"));\n });\n }\n\n @Test\n public void testQueryParamEncoding() throws Exception {\n testRequest(client -> client\n .get(DEFAULT_HTTP_PORT, DEFAULT_HTTP_HOST, \"/\")\n .addQueryParam(\"param1\", \" \")\n .addQueryParam(\"param2\", \"\\u20AC\"), req -> {\n assertEquals(\"param1=%20&param2=%E2%82%AC\", req.query());\n assertEquals(\" \", req.getParam(\"param1\"));\n assertEquals(\"\\u20AC\", req.getParam(\"param2\"));\n });\n }\n\n @Test\n public void testFormUrlEncoded() throws Exception {\n server.requestHandler(req -> {\n req.setExpectMultipart(true);\n req.endHandler(v -> {\n assertEquals(\"param1_value\", req.getFormAttribute(\"param1\"));\n req.response().end();\n });\n });\n startServer();\n MultiMap form = MultiMap.caseInsensitiveMultiMap();\n form.add(\"param1\", \"param1_value\");\n HttpRequest builder = client.post(\"/somepath\");\n builder.sendForm(form, onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testFormMultipart() throws Exception {\n server.requestHandler(req -> {\n req.setExpectMultipart(true);\n req.endHandler(v -> {\n assertEquals(\"param1_value\", req.getFormAttribute(\"param1\"));\n req.response().end();\n });\n });\n startServer();\n MultiMap form = MultiMap.caseInsensitiveMultiMap();\n form.add(\"param1\", \"param1_value\");\n HttpRequest builder = client.post(\"/somepath\");\n builder.putHeader(\"content-type\", \"multipart/form-data\");\n builder.sendForm(form, onSuccess(resp -> {\n complete();\n }));\n await();\n }\n\n @Test\n public void testDefaultFollowRedirects() throws Exception {\n testFollowRedirects(null, true);\n }\n\n @Test\n public void testFollowRedirects() throws Exception {\n testFollowRedirects(true, true);\n }\n\n @Test\n public void testDoNotFollowRedirects() throws Exception {\n testFollowRedirects(false, false);\n }\n\n private void testFollowRedirects(Boolean set, boolean expect) throws Exception {\n waitFor(2);\n String location = \"http://\" + DEFAULT_HTTP_HOST + \":\" + DEFAULT_HTTP_PORT + \"/ok\";\n server.requestHandler(req -> {\n if (req.path().equals(\"/redirect\")) {\n req.response().setStatusCode(301).putHeader(\"Location\", location).end();\n if (!expect) {\n complete();\n }\n } else {\n req.response().end(req.path());\n if (expect) {\n complete();\n }\n }\n });\n startServer();\n HttpRequest builder = client.get(\"/redirect\");\n if (set != null) {\n builder = builder.followRedirects(set);\n }\n builder.send(onSuccess(resp -> {\n if (expect) {\n assertEquals(200, resp.statusCode());\n assertEquals(\"/ok\", resp.body().toString());\n } else {\n assertEquals(301, resp.statusCode());\n assertEquals(location, resp.getHeader(\"location\"));\n }\n complete();\n }));\n await();\n }\n\n @Test\n public void testTLSEnabled() throws Exception {\n testTLS(true, true, client -> client.get(\"/\"));\n }\n\n @Test\n public void testTLSEnabledDisableRequestTLS() throws Exception {\n testTLS(true, false, client -> client.get(\"/\").ssl(false));\n }\n\n @Test\n public void testTLSEnabledEnableRequestTLS() throws Exception {\n testTLS(true, true, client -> client.get(\"/\").ssl(true));\n }\n\n @Test\n public void testTLSDisabledDisableRequestTLS() throws Exception {\n testTLS(false, false, client -> client.get(\"/\").ssl(false));\n }\n\n @Test\n public void testTLSDisabledEnableRequestTLS() throws Exception {\n testTLS(false, true, client -> client.get(\"/\").ssl(true));\n }\n\n @Test\n public void testTLSEnabledDisableRequestTLSAbsURI() throws Exception {\n testTLS(true, false, client -> client.getAbs(\"http://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSEnabledEnableRequestTLSAbsURI() throws Exception {\n testTLS(true, true, client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSDisabledDisableRequestTLSAbsURI() throws Exception {\n testTLS(false, false, client -> client.getAbs(\"http://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n @Test\n public void testTLSDisabledEnableRequestTLSAbsURI() throws Exception {\n testTLS(false, true, client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT));\n }\n\n /**\n * Regression test for issue #563 (https://github.com/vert-x3/vertx-web/issues/563)\n *

\n * Only occurred when {@link WebClientOptions#isSsl()} was false for an SSL request.\n */\n @Test\n public void testTLSQueryParametersIssue563() throws Exception {\n testTLS(false, true,\n client -> client.getAbs(\"https://\" + DEFAULT_HTTPS_HOST + \":\" + DEFAULT_HTTPS_PORT)\n .addQueryParam(\"query1\", \"value1\")\n .addQueryParam(\"query2\", \"value2\"),\n serverRequest -> assertEquals(\"query1=value1&query2=value2\", serverRequest.query()));\n }\n\n private void testTLS(boolean clientSSL, boolean serverSSL, Function> requestProvider) throws Exception {\n testTLS(clientSSL, serverSSL, requestProvider, null);\n }\n\n private void testTLS(boolean clientSSL, boolean serverSSL, Function> requestProvider, Consumer serverAssertions) throws Exception {\n WebClient sslClient = WebClient.create(vertx, new WebClientOptions()\n .setSsl(clientSSL)\n .setTrustAll(true)\n .setDefaultHost(DEFAULT_HTTPS_HOST)\n .setDefaultPort(DEFAULT_HTTPS_PORT));\n HttpServer sslServer = vertx.createHttpServer(new HttpServerOptions()\n .setSsl(serverSSL)\n .setKeyStoreOptions(Cert.CLIENT_JKS.get())\n .setPort(DEFAULT_HTTPS_PORT)\n .setHost(DEFAULT_HTTPS_HOST));\n sslServer.requestHandler(req -> {\n assertEquals(serverSSL, req.isSSL());\n if (serverAssertions != null) {\n serverAssertions.accept(req);\n }\n req.response().end();\n });\n try {\n startServer(sslServer);\n HttpRequest builder = requestProvider.apply(sslClient);\n builder.send(onSuccess(resp -> {\n testComplete();\n }));\n await();\n } finally {\n sslClient.close();\n sslServer.close();\n }\n }\n\n @Test\n public void testHttpProxyFtpRequest() throws Exception {\n startProxy(null, ProxyType.HTTP);\n proxy.setForceUri(\"http://\" + DEFAULT_HTTP_HOST + \":\" + DEFAULT_HTTP_PORT);\n server.requestHandler(req -> {\n req.response().setStatusCode(200).end();\n });\n startServer();\n\n WebClientOptions options = new WebClientOptions();\n options.setProxyOptions(new ProxyOptions().setPort(proxy.getPort()));\n WebClient client = WebClient.create(vertx, options);\n client\n .getAbs(\"ftp://ftp.gnu.org/gnu/\")\n .send(ar -> {\n if (ar.succeeded()) {\n // Obtain response\n HttpResponse response = ar.result();\n assertEquals(200, response.statusCode());\n assertEquals(\"ftp://ftp.gnu.org/gnu/\", proxy.getLastUri());\n testComplete();\n } else {\n fail(ar.cause());\n }\n });\n await();\n }\n\n private void handleMutateRequest(HttpContext context) {\n context.request().host(\"localhost\");\n context.request().port(8080);\n context.next();\n }\n}\n"},"message":{"kind":"string","value":"Add a test with a real HttpServerRequest sent to a backend server\n"},"old_file":{"kind":"string","value":"vertx-web-client/src/test/java/io/vertx/ext/web/client/WebClientTest.java"},"subject":{"kind":"string","value":"Add a test with a real HttpServerRequest sent to a backend server"},"git_diff":{"kind":"string","value":"ertx-web-client/src/test/java/io/vertx/ext/web/client/WebClientTest.java\n await();\n }\n \n private void handleMutateRequest(HttpContext context) {\n context.request().host(\"localhost\");\n context.request().port(8080);\n context.next();\n @Test\n public void testStreamHttpServerRequest() throws Exception {\n Buffer expected = TestUtils.randomBuffer(10000);\n HttpServer server2 = vertx.createHttpServer(new HttpServerOptions().setPort(8081)).requestHandler(req -> {\n req.bodyHandler(body -> {\n assertEquals(body, expected);\n req.response().end();\n });\n });\n startServer(server2);\n WebClient webClient = WebClient.create(vertx);\n try {\n server.requestHandler(req -> {\n webClient.postAbs(\"http://localhost:8081/\")\n .sendStream(req, onSuccess(resp -> {\n req.response().end(\"ok\");\n }));\n });\n startServer();\n webClient.post(8080, \"localhost\", \"/\").sendBuffer(expected, onSuccess(resp -> {\n assertEquals(\"ok\", resp.bodyAsString());\n complete();\n }));\n await();\n } finally {\n server2.close();\n }\n }\n }"}}},{"rowIdx":2031,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"4c1ce29b6a7d3f2e774d71b331e5fc8e680cb9ee"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"atomizejs/atomize-examples"},"new_contents":{"kind":"string","value":"// atomize-translate unittests.js unittests-compat.js atomize '$(document)' NiceException Error\n\nvar URL = \"http://localhost:9999/atomize\";\n\n\nfunction NiceException() {};\nNiceException.prototype = Error.prototype;\n\nvar niceException = new NiceException();\n\nfunction withAtomize (clientsAry, test) {\n var atomize = new Atomize(URL);\n atomize.onAuthenticated = function () {\n atomize.atomically(function () {\n var key = Date();\n atomize.root[key] = atomize.lift({});\n return key;\n }, function (key) {\n var i;\n for (i = 0; i < clientsAry.length; i += 1) {\n clientsAry[i] = new Atomize(URL);\n if (0 === i) {\n clientsAry[i].onAuthenticated = function () {\n test(key, clientsAry, function () {\n for (i = 0; i < clientsAry.length; i += 1) {\n clientsAry[i].close();\n clientsAry[i] = undefined;\n }\n atomize.atomically(function () {\n delete atomize.root[key];\n }, function () {\n atomize.close();\n });\n });\n };\n } else {\n (function () {\n var j = i - 1;\n clientsAry[i].onAuthenticated = function () {\n clientsAry[j].connect();\n }\n })();\n }\n }\n clientsAry[clientsAry.length - 1].connect();\n });\n };\n atomize.connect();\n}\n\nfunction clients (n) {\n var ary = [];\n ary.length = n;\n return ary;\n}\n\nfunction contAndStart (cont) {\n cont();\n start();\n}\n\nfunction Semaphore (cont) {\n this.count = 0;\n this.cont = cont;\n}\nSemaphore.prototype = {\n fired: false,\n up: function () {\n if (this.fired) {\n throw \"Semaphore Already Fired\";\n }\n this.count += 1;\n },\n down: function () {\n if (this.fired) {\n throw \"Semaphore Already Fired\";\n }\n this.count -= 1;\n if (0 === this.count) {\n this.fired = true;\n this.cont()\n }\n }\n};\n\n$(document).ready(function(){\n\n asyncTest(\"Empty transaction\", 2, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0];\n c1.atomically(function () {\n ok(true, \"This txn has no read or writes so should run once\");\n }, function () {\n ok(true, \"The continuation should be run\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Await private empty root\", 2, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0];\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n ok(true, \"We should retry at least once\");\n c1.retry();\n }\n return Object.keys(c1.root[key]).length;\n }, function (fieldCount) {\n strictEqual(fieldCount, 0, \"Root object should be empty\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Primitive\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = 5;\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = value;\n return c1.root[key].field;\n }, function (result) {\n strictEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Empty Object\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = {};\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n return c1.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Complex Object\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = {a: \"hello\", b: true, c: 5, d: {}};\n value.e = value; // add loop\n value.f = value.d; // add non-loop alias\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n return c1.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Trigger (add field)\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger = \"pop!\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n return c1.root[key].trigger;\n }, function (result) {\n strictEqual(trigger, result, \"Should have received the trigger\");\n contAndStart(cont);\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger;\n } else {\n throw \"Found existing trigger!\";\n }\n }); // no need for a continuation here\n });\n });\n\n asyncTest(\"Trigger (add and change field)\", 3, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger1 = \"pop!\",\n trigger2 = \"!pop\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n if (c1.root[key].trigger == trigger1) {\n c1.root[key].trigger = trigger2;\n return true;\n } else {\n return false;\n }\n }, function (success) {\n ok(success, \"Reached 1\");\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger1;\n } else {\n throw \"Found existing trigger!\";\n }\n }, function () {\n ok(true, \"Reached 2\");\n c2.atomically(function () {\n if (trigger2 != c2.root[key].trigger) {\n c2.retry();\n }\n }, function () {\n ok(true, \"Reached 3\");\n contAndStart(cont);\n });\n });\n });\n });\n\n asyncTest(\"Trigger (add and remove field)\", 3, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger = \"pop!\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n delete c1.root[key].trigger;\n }, function () {\n ok(true, \"Reached 1\");\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger;\n } else {\n throw \"Found existing trigger!\";\n }\n }, function () {\n ok(true, \"Reached 2\");\n c2.atomically(function () {\n if (undefined !== c2.root[key].trigger) {\n c2.retry();\n }\n }, function () {\n ok(true, \"Reached 3\");\n contAndStart(cont);\n });\n });\n });\n });\n\n asyncTest(\"Send Primitive\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = 5;\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = value;\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n strictEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Send Empty Object\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = {};\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Send Complex Object\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = {a: \"hello\", b: true, c: 5, d: {}};\n value.e = value; // add loop\n value.f = value.d; // add non-loop alias\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n // For some reason, the current Proxy thing suggests all\n // descriptors should be configurable. Thus we don't test for the\n // 'configurable' meta-property here. This issue should go away\n // once \"direct proxies\" arrive.\n asyncTest(\"Keys, Enumerate, etc\", 10, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n descriptors = {a: {value: 1,\n writable: true,\n enumerable: true},\n b: {value: 2,\n writable: false,\n enumerable: true},\n c: {value: 3,\n writable: true,\n enumerable: false},\n d: {value: 4,\n writable: false,\n enumerable: false}};\n\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n }\n var keys = Object.keys(descriptors),\n x, field, descriptor;\n for (x = 0; x < keys.length; x += 1) {\n field = keys[x];\n descriptor = descriptors[field];\n Object.defineProperty(c1.root[key], field, descriptor);\n }\n c1.root[key].done = true;\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].done) {\n c2.retry();\n }\n delete c2.root[key].done;\n var keys = Object.keys(c2.root[key]),\n names = Object.getOwnPropertyNames(c2.root[key]),\n enumerable = [],\n descriptors = {},\n field, x;\n for (field in c2.root[key]) {\n enumerable.push(field);\n }\n for (x = 0; x < names.length; x += 1) {\n field = names[x];\n descriptors[field] = Object.getOwnPropertyDescriptor(c2.root[key], field);\n delete descriptors[field].configurable; // see comment above\n }\n return {keys: keys.sort(),\n names: names.sort(),\n enumerable: enumerable.sort(),\n descriptors: descriptors,\n hasA: 'a' in c2.root[key],\n hasC: 'c' in c2.root[key],\n hasZ: 'z' in c2.root[key],\n hasOwnA: ({}).hasOwnProperty.call(c2.root[key], 'a'),\n hasOwnC: ({}).hasOwnProperty.call(c2.root[key], 'c'),\n hasOwnZ: ({}).hasOwnProperty.call(c2.root[key], 'z')};\n }, function (result) {\n deepEqual(result.keys, ['a', 'b'],\n \"Keys should have found enumerable fields\");\n deepEqual(result.enumerable, ['a', 'b'],\n \"Enumeration should have found enumerable fields\");\n deepEqual(result.names, ['a', 'b', 'c', 'd'],\n \"Should have found field names 'a' to 'd'\");\n deepEqual(result.descriptors, descriptors,\n \"Should have got same descriptors back\");\n ok(result.hasA, \"Should have found field 'a'\");\n ok(result.hasC, \"Should have found field 'c'\");\n ok(! result.hasZ, \"Should not have found field 'z'\");\n ok(result.hasOwnA, \"Should have found own field 'a'\");\n ok(result.hasOwnC, \"Should have found own field 'c'\");\n ok(! result.hasOwnZ, \"Should not have found own field 'z'\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Triggers: Multiple concurrent retries, multiple clients\", 6, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1];\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].ready) {\n c1.retry();\n }\n c1.root[key].ready = ! c1.root[key].ready; // 2. Flip it false to true\n }, function () {\n ok(true, \"Reached 1\");\n });\n // We do the 'gone' thing because otherwise c1's txns can\n // create and remove it, before c2 spots its\n // existence. I.e. classic race condition.\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].ready ||\n ! c1.root[key].ready) {\n c1.retry();\n }\n delete c1.root[key].ready; // 3. Delete it\n c1.root[key].gone = true;\n }, function () {\n ok(true, \"Reached 2\");\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n (undefined === c2.root[key].ready &&\n undefined === c2.root[key].gone)) {\n c2.retry(); // A. Await its existence\n }\n }, function () {\n ok(true, \"Reached 3\");\n c2.atomically(function () {\n if (Object.hasOwnProperty.call(c2.root[key], 'ready')) {\n c2.retry(); // B. Await its disappearance\n }\n ok(c2.root[key].gone, \"If 'ready' has gone, 'gone' must be truth\");\n delete c2.root[key].gone;\n }, function () {\n ok(true, \"Reached 4\");\n contAndStart(cont); // C. All done\n });\n });\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].ready = false; // 1. Create it as false\n }, function () {\n ok(true, \"Reached 5\");\n });\n });\n });\n\n asyncTest(\"OrElse\", 4, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n fun;\n fun = function (sum) {\n ok(true, \"Reached 1\"); // should reach this 4 times\n if (10 === sum) { // 10 === 1+2+3+4\n contAndStart(cont);\n return;\n }\n c1.orElse(\n [function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].a) {\n c1.retry();\n }\n c1.root[key].b = c1.root[key].a + 2;\n delete c1.root[key].a;\n return c1.root[key].b;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].b) {\n c1.retry();\n }\n c1.root[key].c = c1.root[key].b + 3;\n delete c1.root[key].b;\n return c1.root[key].c;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].c) {\n c1.retry();\n }\n c1.root[key].d = c1.root[key].c + 4;\n delete c1.root[key].c;\n return c1.root[key].d;\n }], fun);\n };\n fun(0);\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].a = 1;\n });\n });\n });\n\n asyncTest(\"OrElse - observing order\", 4, function () {\n // Same as before, but drop the deletes, and invert the order\n // of the orElse statements. As its deterministic choice,\n // should do the same as before.\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n fun;\n fun = function (sum) {\n ok(true, \"Reached 1\"); // should reach this 4 times\n if (10 === sum) { // 10 === 1+2+3+4\n contAndStart(cont);\n return;\n }\n c1.orElse(\n [function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].c) {\n c1.retry();\n }\n c1.root[key].d = c1.root[key].c + 4;\n return c1.root[key].d;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].b) {\n c1.retry();\n }\n c1.root[key].c = c1.root[key].b + 3;\n return c1.root[key].c;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].a) {\n c1.retry();\n }\n c1.root[key].b = c1.root[key].a + 2;\n return c1.root[key].b;\n }], fun);\n };\n fun(0);\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].a = 1;\n });\n });\n });\n\n (function () {\n var clientCount = 6,\n clientConcurrency = 10,\n txnCount = 10;\n\n asyncTest(\"Rampaging Transactions 1 (this takes a while)\",\n ((clientCount - 1) * clientConcurrency * txnCount) -1, function () {\n withAtomize(clients(clientCount), function (key, clients, cont) {\n var semaphore = new Semaphore(function () { contAndStart(cont); }),\n fun, x, y;\n fun = function (c) {\n c.atomically(function () {\n if (undefined === c.root[key] ||\n undefined === c.root[key].obj) {\n c.retry();\n }\n var keys = Object.keys(c.root[key].obj),\n max = 0,\n x, field, n, obj;\n for (x = 0; x < keys.length; x += 1) {\n field = parseInt(keys[x]);\n max = field > max ? field : max;\n if (undefined === n) {\n n = c.root[key].obj[field].num;\n if (0 === n) {\n return n;\n }\n } else if (n !== c.root[key].obj[field].num) {\n throw (\"All fields should have the same number: \" +\n n + \" vs \" + c.root[key].obj[field].num);\n }\n if (0.75 < Math.random()) {\n obj = c.lift({});\n obj.num = n;\n c.root[key].obj[field] = obj;\n }\n c.root[key].obj[field].num -= 1;\n }\n n -= 1;\n max += 1;\n if (0.75 < Math.random()) {\n c.root[key].obj[max] = c.lift({num: n});\n delete c.root[key].obj[keys[0]];\n }\n return n;\n }, function (n) {\n if (n > 0) {\n ok(true, \"Reached\");\n fun(c);\n } else {\n semaphore.down();\n }\n });\n };\n // We use all but one client, and each of those gets 10\n // txns concurrently\n for (x = 1; x < clients.length; x += 1) {\n clients[x].stm.prefix = \"(\" + x + \"): \";\n for (y = 0; y < clientConcurrency; y += 1) {\n semaphore.up();\n fun(clients[x]);\n }\n }\n x = clients[0];\n x.atomically(function () {\n if (undefined === x.root[key]) {\n x.retry();\n }\n var obj = x.lift({});\n for (y = 0; y < 5; y += 1) {\n obj[y] = x.lift({num: (clientCount - 1) * clientConcurrency * txnCount});\n }\n x.root[key].obj = obj;\n });\n });\n });\n }());\n\n (function () {\n var clientCount = 6,\n clientConcurrency = 6,\n txnCount = 10;\n\n asyncTest(\"Rampaging Transactions 2 (this takes a while)\",\n (clientCount - 1) * clientConcurrency * txnCount, function () {\n withAtomize(clients(clientCount), function (key, clients, cont) {\n var semaphore = new Semaphore(function () { contAndStart(cont); }),\n fun;\n fun = function (c, n) {\n c.atomically(function () {\n if (undefined === c.root[key] ||\n undefined === c.root[key].obj) {\n c.retry();\n }\n var ops, names, secret, x, name, op;\n\n // First verify the old thing\n ops = c.root[key].obj.log;\n if (undefined !== ops) {\n secret = ops.secret;\n names = Object.keys(ops);\n for (x = 0; x < names.length; x += 1) {\n name = names[x];\n if ('secret' === name) {\n continue;\n } else if ('delete' === ops[name]) {\n if (({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Found field which should be deleted: \" + name)\n }\n } else if ('modify' === ops[name]) {\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].modified.value) {\n throw (\"Found the wrong modified value in field: \" + name);\n }\n } else if ('create' === ops[name]) {\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].created.value) {\n throw (\"Found the wrong created value in field: \" + name);\n }\n } else {\n throw (\"Found unknown op: \" + ops[name]);\n }\n }\n }\n\n secret = Math.random();\n ops = {secret: secret};\n for (x = 0; x < 20; x += 1) {\n name = Math.round(Math.random() * 50);\n op = Math.random();\n if (op > 0.9) {\n delete c.root[key].obj[name];\n ops[name] = 'delete';\n } else if (op > 0.1 && ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n c.root[key].obj[name].modified.value = secret;\n ops[name] = 'modify';\n } else {\n c.root[key].obj[name] = c.lift({});\n c.root[key].obj[name].created = c.lift({value: secret});\n c.root[key].obj[name].modified = c.lift({value: secret});\n ops[name] = 'create';\n }\n }\n c.root[key].obj.log = c.lift(ops);\n }, function () {\n ok(true, \"Reached\");\n n += 1;\n if (10 === n) {\n semaphore.down();\n } else {\n fun(c, n);\n }\n });\n };\n // We use all but one client, and each of those gets 10\n // txns concurrently\n for (x = 1; x < clients.length; x += 1) {\n clients[x].stm.prefix = \"(\" + x + \"): \";\n for (y = 0; y < clientConcurrency; y += 1) {\n semaphore.up();\n fun(clients[x], 0);\n }\n }\n x = clients[0];\n x.atomically(function () {\n if (undefined === x.root[key]) {\n x.retry();\n }\n x.root[key].obj = x.lift({});\n });\n });\n });\n }());\n\n});\n"},"new_file":{"kind":"string","value":"test/unittests.js"},"old_contents":{"kind":"string","value":"// atomize-translate unittests.js unittests-compat.js atomize '$(document)' NiceException Error\n\nvar URL = \"http://localhost:9999/atomize\";\n\n\nfunction NiceException() {};\nNiceException.prototype = Error.prototype;\n\nvar niceException = new NiceException();\n\nfunction withAtomize (clientsAry, test) {\n var atomize = new Atomize(URL);\n atomize.onAuthenticated = function () {\n atomize.atomically(function () {\n var key = Date();\n atomize.root[key] = atomize.lift({});\n return key;\n }, function (key) {\n var i;\n for (i = 0; i < clientsAry.length; i += 1) {\n clientsAry[i] = new Atomize(URL);\n if (0 === i) {\n clientsAry[i].onAuthenticated = function () {\n test(key, clientsAry, function () {\n for (i = 0; i < clientsAry.length; i += 1) {\n clientsAry[i].close();\n clientsAry[i] = undefined;\n }\n atomize.atomically(function () {\n delete atomize.root[key];\n }, function () {\n atomize.close();\n });\n });\n };\n } else {\n (function () {\n var j = i - 1;\n clientsAry[i].onAuthenticated = function () {\n clientsAry[j].connect();\n }\n })();\n }\n }\n clientsAry[clientsAry.length - 1].connect();\n });\n };\n atomize.connect();\n}\n\nfunction clients (n) {\n var ary = [];\n ary.length = n;\n return ary;\n}\n\nfunction contAndStart (cont) {\n cont();\n start();\n}\n\nfunction Semaphore (cont) {\n this.count = 0;\n this.cont = cont;\n}\nSemaphore.prototype = {\n fired: false,\n up: function () {\n if (this.fired) {\n throw \"Semaphore Already Fired\";\n }\n this.count += 1;\n },\n down: function () {\n if (this.fired) {\n throw \"Semaphore Already Fired\";\n }\n this.count -= 1;\n if (0 === this.count) {\n this.fired = true;\n this.cont()\n }\n }\n};\n\n$(document).ready(function(){\n\n asyncTest(\"Empty transaction\", 2, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0];\n c1.atomically(function () {\n ok(true, \"This txn has no read or writes so should run once\");\n }, function () {\n ok(true, \"The continuation should be run\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Await private empty root\", 2, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0];\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n ok(true, \"We should retry at least once\");\n c1.retry();\n }\n return Object.keys(c1.root[key]).length;\n }, function (fieldCount) {\n strictEqual(fieldCount, 0, \"Root object should be empty\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Primitive\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = 5;\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = value;\n return c1.root[key].field;\n }, function (result) {\n strictEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Empty Object\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = {};\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n return c1.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Set Complex Object\", 1, function () {\n withAtomize(clients(1), function (key, clients, cont) {\n var c1 = clients[0],\n value = {a: \"hello\", b: true, c: 5, d: {}};\n value.e = value; // add loop\n value.f = value.d; // add non-loop alias\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n return c1.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Trigger (add field)\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger = \"pop!\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n return c1.root[key].trigger;\n }, function (result) {\n strictEqual(trigger, result, \"Should have received the trigger\");\n contAndStart(cont);\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger;\n } else {\n throw \"Found existing trigger!\";\n }\n }); // no need for a continuation here\n });\n });\n\n asyncTest(\"Trigger (add and change field)\", 3, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger1 = \"pop!\",\n trigger2 = \"!pop\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n if (c1.root[key].trigger == trigger1) {\n c1.root[key].trigger = trigger2;\n return true;\n } else {\n return false;\n }\n }, function (success) {\n ok(success, \"Reached 1\");\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger1;\n } else {\n throw \"Found existing trigger!\";\n }\n }, function () {\n ok(true, \"Reached 2\");\n c2.atomically(function () {\n if (trigger2 != c2.root[key].trigger) {\n c2.retry();\n }\n }, function () {\n ok(true, \"Reached 3\");\n contAndStart(cont);\n });\n });\n });\n });\n\n asyncTest(\"Trigger (add and remove field)\", 3, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n trigger = \"pop!\";\n\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].trigger) {\n c1.retry();\n }\n delete c1.root[key].trigger;\n }, function () {\n ok(true, \"Reached 1\");\n });\n\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n if (undefined === c2.root[key].trigger) {\n c2.root[key].trigger = trigger;\n } else {\n throw \"Found existing trigger!\";\n }\n }, function () {\n ok(true, \"Reached 2\");\n c2.atomically(function () {\n if (undefined !== c2.root[key].trigger) {\n c2.retry();\n }\n }, function () {\n ok(true, \"Reached 3\");\n contAndStart(cont);\n });\n });\n });\n });\n\n asyncTest(\"Send Primitive\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = 5;\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = value;\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n strictEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Send Empty Object\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = {};\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Send Complex Object\", 1, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n value = {a: \"hello\", b: true, c: 5, d: {}};\n value.e = value; // add loop\n value.f = value.d; // add non-loop alias\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n } else if (undefined !== c1.root[key].field) {\n throw \"Found existing field!\";\n }\n c1.root[key].field = c1.lift(value);\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].field) {\n c2.retry();\n }\n return c2.root[key].field;\n }, function (result) {\n deepEqual(result, value, \"Should have got back value\");\n contAndStart(cont);\n });\n });\n });\n\n // For some reason, the current Proxy thing suggests all\n // descriptors should be configurable. Thus we don't test for the\n // 'configurable' meta-property here. This issue should go away\n // once \"direct proxies\" arrive.\n asyncTest(\"Keys, Enumerate, etc\", 10, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n descriptors = {a: {value: 1,\n writable: true,\n enumerable: true},\n b: {value: 2,\n writable: false,\n enumerable: true},\n c: {value: 3,\n writable: true,\n enumerable: false},\n d: {value: 4,\n writable: false,\n enumerable: false}};\n\n c1.atomically(function () {\n if (undefined === c1.root[key]) {\n c1.retry();\n }\n var keys = Object.keys(descriptors),\n x, field, descriptor;\n for (x = 0; x < keys.length; x += 1) {\n field = keys[x];\n descriptor = descriptors[field];\n Object.defineProperty(c1.root[key], field, descriptor);\n }\n c1.root[key].done = true;\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n undefined === c2.root[key].done) {\n c2.retry();\n }\n delete c2.root[key].done;\n var keys = Object.keys(c2.root[key]),\n names = Object.getOwnPropertyNames(c2.root[key]),\n enumerable = [],\n descriptors = {},\n field, x;\n for (field in c2.root[key]) {\n enumerable.push(field);\n }\n for (x = 0; x < names.length; x += 1) {\n field = names[x];\n descriptors[field] = Object.getOwnPropertyDescriptor(c2.root[key], field);\n delete descriptors[field].configurable; // see comment above\n }\n return {keys: keys.sort(),\n names: names.sort(),\n enumerable: enumerable.sort(),\n descriptors: descriptors,\n hasA: 'a' in c2.root[key],\n hasC: 'c' in c2.root[key],\n hasZ: 'z' in c2.root[key],\n hasOwnA: ({}).hasOwnProperty.call(c2.root[key], 'a'),\n hasOwnC: ({}).hasOwnProperty.call(c2.root[key], 'c'),\n hasOwnZ: ({}).hasOwnProperty.call(c2.root[key], 'z')};\n }, function (result) {\n deepEqual(result.keys, ['a', 'b'],\n \"Keys should have found enumerable fields\");\n deepEqual(result.enumerable, ['a', 'b'],\n \"Enumeration should have found enumerable fields\");\n deepEqual(result.names, ['a', 'b', 'c', 'd'],\n \"Should have found field names 'a' to 'd'\");\n deepEqual(result.descriptors, descriptors,\n \"Should have got same descriptors back\");\n ok(result.hasA, \"Should have found field 'a'\");\n ok(result.hasC, \"Should have found field 'c'\");\n ok(! result.hasZ, \"Should not have found field 'z'\");\n ok(result.hasOwnA, \"Should have found own field 'a'\");\n ok(result.hasOwnC, \"Should have found own field 'c'\");\n ok(! result.hasOwnZ, \"Should not have found own field 'z'\");\n contAndStart(cont);\n });\n });\n });\n\n asyncTest(\"Triggers: Multiple concurrent retries, multiple clients\", 6, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1];\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].ready) {\n c1.retry();\n }\n c1.root[key].ready = ! c1.root[key].ready; // 2. Flip it false to true\n }, function () {\n ok(true, \"Reached 1\");\n });\n // We do the 'gone' thing because otherwise c1's txns can\n // create and remove it, before c2 spots its\n // existence. I.e. classic race condition.\n c1.atomically(function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].ready ||\n ! c1.root[key].ready) {\n c1.retry();\n }\n delete c1.root[key].ready; // 3. Delete it\n c1.root[key].gone = true;\n }, function () {\n ok(true, \"Reached 2\");\n });\n c2.atomically(function () {\n if (undefined === c2.root[key] ||\n (undefined === c2.root[key].ready &&\n undefined === c2.root[key].gone)) {\n c2.retry(); // A. Await its existence\n }\n }, function () {\n ok(true, \"Reached 3\");\n c2.atomically(function () {\n if (Object.hasOwnProperty.call(c2.root[key], 'ready')) {\n c2.retry(); // B. Await its disappearance\n }\n ok(c2.root[key].gone, \"If 'ready' has gone, 'gone' must be truth\");\n delete c2.root[key].gone;\n }, function () {\n ok(true, \"Reached 4\");\n contAndStart(cont); // C. All done\n });\n });\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].ready = false; // 1. Create it as false\n }, function () {\n ok(true, \"Reached 5\");\n });\n });\n });\n\n asyncTest(\"OrElse\", 4, function () {\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n fun;\n fun = function (sum) {\n ok(true, \"Reached 1\"); // should reach this 4 times\n if (10 === sum) { // 10 === 1+2+3+4\n contAndStart(cont);\n return;\n }\n c1.orElse(\n [function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].a) {\n c1.retry();\n }\n c1.root[key].b = c1.root[key].a + 2;\n delete c1.root[key].a;\n return c1.root[key].b;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].b) {\n c1.retry();\n }\n c1.root[key].c = c1.root[key].b + 3;\n delete c1.root[key].b;\n return c1.root[key].c;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].c) {\n c1.retry();\n }\n c1.root[key].d = c1.root[key].c + 4;\n delete c1.root[key].c;\n return c1.root[key].d;\n }], fun);\n };\n fun(0);\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].a = 1;\n });\n });\n });\n\n asyncTest(\"OrElse - observing order\", 4, function () {\n // Same as before, but drop the deletes, and invert the order\n // of the orElse statements. As its deterministic choice,\n // should do the same as before.\n withAtomize(clients(2), function (key, clients, cont) {\n var c1 = clients[0],\n c2 = clients[1],\n fun;\n fun = function (sum) {\n ok(true, \"Reached 1\"); // should reach this 4 times\n if (10 === sum) { // 10 === 1+2+3+4\n contAndStart(cont);\n return;\n }\n c1.orElse(\n [function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].c) {\n c1.retry();\n }\n c1.root[key].d = c1.root[key].c + 4;\n return c1.root[key].d;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].b) {\n c1.retry();\n }\n c1.root[key].c = c1.root[key].b + 3;\n return c1.root[key].c;\n }, function () {\n if (undefined === c1.root[key] ||\n undefined === c1.root[key].a) {\n c1.retry();\n }\n c1.root[key].b = c1.root[key].a + 2;\n return c1.root[key].b;\n }], fun);\n };\n fun(0);\n c2.atomically(function () {\n if (undefined === c2.root[key]) {\n c2.retry();\n }\n c2.root[key].a = 1;\n });\n });\n });\n\n (function () {\n var clientCount = 6,\n clientConcurrency = 10,\n txnCount = 10;\n\n asyncTest(\"Rampaging Transactions 1 (this takes a while)\",\n ((clientCount - 1) * clientConcurrency * txnCount) -1, function () {\n withAtomize(clients(clientCount), function (key, clients, cont) {\n var semaphore = new Semaphore(function () { contAndStart(cont); }),\n fun, x, y;\n fun = function (c) {\n c.atomically(function () {\n if (undefined === c.root[key] ||\n undefined === c.root[key].obj) {\n c.retry();\n }\n var keys = Object.keys(c.root[key].obj),\n max = 0,\n x, field, n, obj;\n for (x = 0; x < keys.length; x += 1) {\n field = parseInt(keys[x]);\n max = field > max ? field : max;\n if (undefined === n) {\n n = c.root[key].obj[field].num;\n if (0 === n) {\n return n;\n }\n } else if (n !== c.root[key].obj[field].num) {\n throw (\"All fields should have the same number: \" +\n n + \" vs \" + c.root[key].obj[field].num);\n }\n if (0.75 < Math.random()) {\n obj = c.lift({});\n obj.num = n;\n c.root[key].obj[field] = obj;\n }\n c.root[key].obj[field].num -= 1;\n }\n n -= 1;\n max += 1;\n if (0.75 < Math.random()) {\n c.root[key].obj[max] = c.lift({num: n});\n delete c.root[key].obj[keys[0]];\n }\n return n;\n }, function (n) {\n if (n > 0) {\n ok(true, \"Reached\");\n fun(c);\n } else {\n semaphore.down();\n }\n });\n };\n // We use all but one client, and each of those gets 10\n // txns concurrently\n for (x = 1; x < clients.length; x += 1) {\n clients[x].stm.prefix = \"(\" + x + \"): \";\n for (y = 0; y < clientConcurrency; y += 1) {\n semaphore.up();\n fun(clients[x]);\n }\n }\n x = clients[0];\n x.atomically(function () {\n if (undefined === x.root[key]) {\n x.retry();\n }\n var obj = x.lift({});\n for (y = 0; y < 5; y += 1) {\n obj[y] = x.lift({num: (clientCount - 1) * clientConcurrency * txnCount});\n }\n x.root[key].obj = obj;\n });\n });\n });\n }());\n\n (function () {\n var clientCount = 6,\n clientConcurrency = 5,\n txnCount = 10;\n\n asyncTest(\"Rampaging Transactions 2 (this takes a while)\",\n (clientCount - 1) * clientConcurrency * txnCount, function () {\n withAtomize(clients(clientCount), function (key, clients, cont) {\n var semaphore = new Semaphore(function () { contAndStart(cont); }),\n fun;\n fun = function (c, n) {\n c.atomically(function () {\n if (undefined === c.root[key] ||\n undefined === c.root[key].obj) {\n c.retry();\n }\n var ops, names, secret, x, name, op;\n\n // First verify the old thing\n ops = c.root[key].obj.log;\n if (undefined !== ops) {\n secret = ops.secret;\n names = Object.keys(ops);\n for (x = 0; x < names.length; x += 1) {\n name = names[x];\n if ('secret' === name) {\n continue;\n } else if ('delete' === ops[name]) {\n if (({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Found field which should be deleted: \" + name)\n }\n } else if ('modify' === ops[name]) {\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].modified) {\n throw (\"Found the wrong modified value in field: \" + name);\n }\n } else if ('create' === ops[name]) {\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].created) {\n throw (\"Found the wrong created value in field: \" + name);\n }\n } else {\n throw (\"Found unknown op: \" + ops[name]);\n }\n }\n }\n\n secret = Math.random();\n ops = {secret: secret};\n for (x = 0; x < 10; x += 1) {\n name = Math.round(Math.random() * 100);\n op = Math.random();\n if (op > 0.9) {\n delete c.root[key].obj[name];\n ops[name] = 'delete';\n } else if (op > 0.4 && undefined !== c.root[key].obj[name]) {\n c.root[key].obj[name].modified = secret;\n ops[name] = 'modify';\n } else {\n c.root[key].obj[name] = c.lift({created: secret,\n modified: secret});\n ops[name] = 'create';\n }\n }\n c.root[key].obj.log = c.lift(ops);\n }, function () {\n ok(true, \"Reached\");\n n += 1;\n if (10 === n) {\n semaphore.down();\n } else {\n fun(c, n);\n }\n });\n };\n // We use all but one client, and each of those gets 10\n // txns concurrently\n for (x = 1; x < clients.length; x += 1) {\n clients[x].stm.prefix = \"(\" + x + \"): \";\n for (y = 0; y < clientConcurrency; y += 1) {\n semaphore.up();\n fun(clients[x], 0);\n }\n }\n x = clients[0];\n x.atomically(function () {\n if (undefined === x.root[key]) {\n x.retry();\n }\n x.root[key].obj = x.lift({});\n });\n });\n });\n }());\n\n});\n"},"message":{"kind":"string","value":"Some cosmetic, some param tuning, and use a few more objects to ensure the test hits the previously existing bug (test against server revision 42768119819b to provoke bug)\n\n--HG--\nextra : signature : gnupg%3AiQEcBAABAgAGBQJPaLaGAAoJEO%2BewIrBgcV35AMIAIEjVhZl0XWfnQBLGfxqQjVPYlOy3g6vDtA5rmJHDua8IlLAeN60ht9V352q52xvl/JbL5XL2cxFzcUvHnU2fYZyY8N5dx9L39KsTeiZeSibcLvXTw1lnUMYDVoNdwfi1/nBiWCvIC%2B7zkC0Nsix11eD9Dimnv%2BbYLpHC3MBGNy8o86Lb06sghbblqYwvPk4wAQArkjCBDdv9MAl4c/qRVbtb/bPrPqkeGtkZ/pf93NmRT6U%2B01UEpOClyQcHkI%2BIRU/3SgT//3qzJ6ZgQd0VbtdxEQ%2B3LLnxN0kWeTje9s43o2V6Mx%2BrX6M2dl8uxpVBjRN496HNfxsaCzJysfeXy0%3D\n"},"old_file":{"kind":"string","value":"test/unittests.js"},"subject":{"kind":"string","value":"Some cosmetic, some param tuning, and use a few more objects to ensure the test hits the previously existing bug (test against server revision 42768119819b to provoke bug)"},"git_diff":{"kind":"string","value":"est/unittests.js\n c.retry();\n }\n var keys = Object.keys(c.root[key].obj),\n max = 0,\n x, field, n, obj;\n max = 0,\n x, field, n, obj;\n for (x = 0; x < keys.length; x += 1) {\n field = parseInt(keys[x]);\n max = field > max ? field : max;\n \n (function () {\n var clientCount = 6,\n clientConcurrency = 5,\n clientConcurrency = 6,\n txnCount = 10;\n \n asyncTest(\"Rampaging Transactions 2 (this takes a while)\",\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].modified) {\n if (secret !== c.root[key].obj[name].modified.value) {\n throw (\"Found the wrong modified value in field: \" + name);\n }\n } else if ('create' === ops[name]) {\n if (! ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n throw (\"Failed to find field: \" + name);\n }\n if (secret !== c.root[key].obj[name].created) {\n if (secret !== c.root[key].obj[name].created.value) {\n throw (\"Found the wrong created value in field: \" + name);\n }\n } else {\n \n secret = Math.random();\n ops = {secret: secret};\n for (x = 0; x < 10; x += 1) {\n name = Math.round(Math.random() * 100);\n for (x = 0; x < 20; x += 1) {\n name = Math.round(Math.random() * 50);\n op = Math.random();\n if (op > 0.9) {\n delete c.root[key].obj[name];\n ops[name] = 'delete';\n } else if (op > 0.4 && undefined !== c.root[key].obj[name]) {\n c.root[key].obj[name].modified = secret;\n } else if (op > 0.1 && ({}).hasOwnProperty.call(c.root[key].obj, name)) {\n c.root[key].obj[name].modified.value = secret;\n ops[name] = 'modify';\n } else {\n c.root[key].obj[name] = c.lift({created: secret,\n modified: secret});\n c.root[key].obj[name] = c.lift({});\n c.root[key].obj[name].created = c.lift({value: secret});\n c.root[key].obj[name].modified = c.lift({value: secret});\n ops[name] = 'create';\n }\n }"}}},{"rowIdx":2032,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"aa3129a504d6a0bb08031eb29253117da4608bb5"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler,boalang/compiler"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2017, Hridesh Rajan, Ganesha Upadhyaya, Ramanathan Ramu\n * and Iowa State University of Science and Technology\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage boa.functions;\n\nimport java.util.*;\n\nimport boa.graphs.cfg.CFG;\nimport boa.types.Ast.Expression;\nimport boa.types.Ast.Expression.ExpressionKind;\nimport boa.types.Ast.Method;\nimport boa.types.Ast.Variable;\nimport boa.types.Control.CFGNode;\n\n/**\n * Boa functions for working with control flow graphs.\n *\n * @author ganeshau\n * @author rramu\n */\npublic class BoaGraphIntrinsics {\n\t@FunctionSpec(name = \"getcfg\", returnType = \"CFG\", formalParameters = { \"Method\" })\n\tpublic static CFG getcfg(final Method method) {\n\t\tfinal CFG cfg = new CFG(method);\n\t\tcfg.astToCFG();\n\t\treturn cfg;\n\t}\n\n\t@FunctionSpec(name = \"get_nodes_with_definition\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getNodesWithDefinition(final CFGNode node) {\n\t\tfinal HashSet vardef = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL || node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef.add(String.valueOf(node.getId()));\n\t\t\t}\n\t\t}\n\t\treturn vardef;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_killed\", returnType = \"set of string\", formalParameters = {\"CFG\", \"CFGNode\" })\n\tpublic static HashSet getVariableKilled(final boa.types.Control.CFG cfg, final CFGNode node) {\n\t\tfinal HashSet varkilled = new HashSet();\n\t\tString vardef = \"\";\n\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\tvardef = node.getExpression().getVariableDeclsList().get(0).getName();\n\t\t\t}\n\t\t\telse if (node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef = node.getExpression().getExpressionsList().get(0).getVariable();\n\t\t\t}\n\t\t\telse {\n\t\t\t\treturn varkilled;\n\t\t\t}\n\n\t\t\tfor (final CFGNode tnode : cfg.getNodesList()) {\n\t\t\t\tif (tnode.getExpression() != null && tnode.getId() != node.getId()) {\n\t\t\t\t\tif (tnode.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\t\t\tif (tnode.getExpression().getVariableDeclsList().get(0).getName().equals(vardef)) {\n\t\t\t\t\t\t\tvarkilled.add(String.valueOf(tnode.getId()));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\telse if (tnode.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\t\t\tif (tnode.getExpression().getExpressionsList().get(0).getVariable().equals(vardef)) {\n\t\t\t\t\t\t\tvarkilled.add(String.valueOf(tnode.getId()));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn varkilled;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_def\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getVariableDef(final CFGNode node) {\n\t\tfinal HashSet vardef = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\tvardef.add(node.getExpression().getVariableDeclsList().get(0).getName());\n\t\t\t}\n\t\t\telse if (node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef.add(node.getExpression().getExpressionsList().get(0).getVariable());\n\t\t\t}\n\t\t}\n\t\treturn vardef;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_used\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getVariableUsed(final CFGNode node) {\n\t\tfinal HashSet varused = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\ttraverseExpr(varused,node.getExpression());\n\t\t}\n\t\treturn varused;\n\t}\n\n\tpublic static void traverseExpr(final HashSet varused, final Expression expr) {\n\t\tif (expr.getVariable() != null) {\n\t\t\tvarused.add(expr.getVariable());\n\t\t}\n\t\tfor (final Expression exprs : expr.getExpressionsList()) {\n\t\t\ttraverseExpr(varused, exprs);\n\t\t}\n\t\tfor (final Variable vardecls : expr.getVariableDeclsList()) {\n\t\t\ttraverseVarDecls(varused, vardecls);\n\t\t}\n\t\tfor (final Expression methodexpr : expr.getMethodArgsList()) {\n\t\t\ttraverseExpr(varused, methodexpr);\n\t\t}\n\t}\n\n\tpublic static void traverseVarDecls(final HashSet varused, final Variable vardecls) {\n\t\tif (vardecls.getInitializer() != null) {\n\t\t\ttraverseExpr(varused, vardecls.getInitializer());\n\t\t}\n\t}\n\n\tprivate static String dotEscape(final String s) {\n\t\tfinal String escaped = s.replaceAll(\"\\\\\\\\\", \"\\\\\\\\\\\\\\\\\").replaceAll(\"\\\"\", \"\\\\\\\\\\\"\").replaceAll(\"\\n\", \"\\\\\\\\l\").replaceAll(\"\\r\", \"\\\\\\\\l\");\n\t\tif (escaped.indexOf(\"\\\\l\") != -1 && !escaped.endsWith(\"\\\\l\"))\n\t\t\treturn escaped + \"\\\\l\";\n\t\treturn escaped;\n\t}\n\n\t@FunctionSpec(name = \"dot\", returnType = \"string\", formalParameters = { \"CFG\" })\n\tpublic static String cfgToDot(final CFG cfg) {\n\t\tfinal StringBuilder str = new StringBuilder();\n\t\tstr.append(\"digraph G {\\n\");\n\n\t\tfor (final boa.graphs.cfg.CFGNode n : cfg.getNodes()) {\n\t\t\tfinal String shape;\n\t\t\tswitch (n.getKind()) {\n\t\t\t\tcase CONTROL:\n\t\t\t\t\tshape = \"shape=diamond\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase METHOD:\n\t\t\t\t\tshape = \"shape=parallelogram\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase OTHER:\n\t\t\t\t\tshape = \"shape=box\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase ENTRY:\n\t\t\t\tdefault:\n\t\t\t\t\tshape = \"shape=ellipse\";\n\t\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tif (n.hasStmt())\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + dotEscape(boa.functions.BoaAstIntrinsics.prettyprint(n.getStmt())) + \"\\\"]\\n\");\n\t\t\telse if (n.hasExpr())\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + dotEscape(boa.functions.BoaAstIntrinsics.prettyprint(n.getExpr())) + \"\\\"]\\n\");\n\t\t\telse if (n.getKind() == boa.types.Control.CFGNode.CFGNodeType.ENTRY)\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + n.getName() + \"\\\"]\\n\");\n\t\t\telse\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \"]\\n\");\n\t\t}\n\n\t\tfinal boa.runtime.BoaAbstractTraversal printGraph = new boa.runtime.BoaAbstractTraversal(false, false) {\n\t\t\tprotected Object preTraverse(final boa.graphs.cfg.CFGNode node) throws Exception {\n\t\t\t\tfinal java.util.Set edges = node.getOutEdges();\n\t\t\t\tfor (final boa.graphs.cfg.CFGEdge e : node.getOutEdges()) {\n\t\t\t\t\tstr.append(\"\\t\" + node.getId() + \" -> \" + e.getDest().getId());\n\t\t\t\t\tif (!(e.label() == null || e.label().equals(\".\") || e.label().equals(\"\")))\n\t\t\t\t\t\tstr.append(\" [label=\\\"\" + dotEscape(e.label()) + \"\\\"]\");\n\t\t\t\t\tstr.append(\"\\n\");\n\t\t\t\t}\n\t\t\t\treturn null;\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic void traverse(final boa.graphs.cfg.CFGNode node, boolean flag) throws Exception {\n\t\t\t\tif (flag) {\n\t\t\t\t\tcurrentResult = preTraverse(node);\n\t\t\t\t\toutputMapObj.put(node.getId(), currentResult);\n\t\t\t\t} else {\n\t\t\t\t\toutputMapObj.put(node.getId(), preTraverse(node));\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\n\t\ttry {\n\t\t\tprintGraph.traverse(cfg, boa.types.Graph.Traversal.TraversalDirection.FORWARD, boa.types.Graph.Traversal.TraversalKind.DFS);\n\t\t} catch (final Exception e) {\n\t\t\t// do nothing\n\t\t}\n\n\t\tstr.append(\"}\");\n\n\t\treturn str.toString();\n\t}\n}\n"},"new_file":{"kind":"string","value":"src/java/boa/functions/BoaGraphIntrinsics.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright 2017, Hridesh Rajan, Ganesha Upadhyaya, Ramanathan Ramu\n * and Iowa State University of Science and Technology\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage boa.functions;\n\nimport java.util.*;\n\nimport boa.graphs.cfg.CFG;\nimport boa.types.Ast.Expression;\nimport boa.types.Ast.Expression.ExpressionKind;\nimport boa.types.Ast.Method;\nimport boa.types.Ast.Variable;\nimport boa.types.Control.CFGNode;\n\n/**\n * Boa functions for working with control flow graphs.\n *\n * @author ganeshau\n * @author rramu\n */\npublic class BoaGraphIntrinsics {\n\t@FunctionSpec(name = \"getcfg\", returnType = \"CFG\", formalParameters = { \"Method\" })\n\tpublic static CFG getcfg(final Method method) {\n\t\tfinal CFG cfg = new CFG(method);\n\t\tcfg.astToCFG();\n\t\treturn cfg;\n\t}\n\n\t@FunctionSpec(name = \"get_nodes_with_definition\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getNodesWithDefinition(final CFGNode node) {\n\t\tfinal HashSet vardef = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL || node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef.add(String.valueOf(node.getId()));\n\t\t\t}\n\t\t}\n\t\treturn vardef;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_killed\", returnType = \"set of string\", formalParameters = {\"CFG\", \"CFGNode\" })\n\tpublic static HashSet getVariableKilled(final boa.types.Control.CFG cfg, final CFGNode node) {\n\t\tfinal HashSet varkilled = new HashSet();\n\t\tString vardef = \"\";\n\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\tvardef = node.getExpression().getVariableDeclsList().get(0).getName();\n\t\t\t}\n\t\t\telse if (node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef = node.getExpression().getExpressionsList().get(0).getVariable();\n\t\t\t}\n\t\t\telse {\n\t\t\t\treturn varkilled;\n\t\t\t}\n\n\t\t\tfor (final CFGNode tnode : cfg.getNodesList()) {\n\t\t\t\tif (tnode.getExpression() != null && tnode.getId() != node.getId()) {\n\t\t\t\t\tif (tnode.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\t\t\tif (tnode.getExpression().getVariableDeclsList().get(0).getName().equals(vardef)) {\n\t\t\t\t\t\t\tvarkilled.add(String.valueOf(tnode.getId()));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\telse if (tnode.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\t\t\tif (tnode.getExpression().getExpressionsList().get(0).getVariable().equals(vardef)) {\n\t\t\t\t\t\t\tvarkilled.add(String.valueOf(tnode.getId()));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn varkilled;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_def\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getVariableDef(final CFGNode node) {\n\t\tfinal HashSet vardef = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\tif (node.getExpression().getKind() == ExpressionKind.VARDECL) {\n\t\t\t\tvardef.add(node.getExpression().getVariableDeclsList().get(0).getName());\n\t\t\t}\n\t\t\telse if (node.getExpression().getKind() == ExpressionKind.ASSIGN) {\n\t\t\t\tvardef.add(node.getExpression().getExpressionsList().get(0).getVariable());\n\t\t\t}\n\t\t}\n\t\treturn vardef;\n\t}\n\n\t@FunctionSpec(name = \"get_variable_used\", returnType = \"set of string\", formalParameters = { \"CFGNode\" })\n\tpublic static HashSet getVariableUsed(final CFGNode node) {\n\t\tfinal HashSet varused = new HashSet();\n\t\tif (node.getExpression() != null) {\n\t\t\ttraverseExpr(varused,node.getExpression());\n\t\t}\n\t\treturn varused;\n\t}\n\n\tpublic static void traverseExpr(final HashSet varused, final Expression expr) {\n\t\tif (expr.getVariable() != null) {\n\t\t\tvarused.add(expr.getVariable());\n\t\t}\n\t\tfor (final Expression exprs : expr.getExpressionsList()) {\n\t\t\ttraverseExpr(varused, exprs);\n\t\t}\n\t\tfor (final Variable vardecls : expr.getVariableDeclsList()) {\n\t\t\ttraverseVarDecls(varused, vardecls);\n\t\t}\n\t\tfor (final Expression methodexpr : expr.getMethodArgsList()) {\n\t\t\ttraverseExpr(varused, methodexpr);\n\t\t}\n\t}\n\n\tpublic static void traverseVarDecls(final HashSet varused, final Variable vardecls) {\n\t\tif (vardecls.getInitializer() != null) {\n\t\t\ttraverseExpr(varused, vardecls.getInitializer());\n\t\t}\n\t}\n\n\tpublic static String cfgToDot(final CFG cfg) {\n\t\tfinal StringBuilder str = new StringBuilder();\n\t\tstr.append(\"digraph G {\\n\");\n\n\t\tfinal boa.runtime.BoaAbstractTraversal printGraph = new boa.runtime.BoaAbstractTraversal(false, false) {\n\t\t\tprotected Object preTraverse(final boa.graphs.cfg.CFGNode node) throws Exception {\n\t\t\t\tfinal java.util.List succs = node .getSuccessorsList();\n\t\t\t\tfor (long i = 0; i < succs .size(); i++) {\n\t\t\t\t\tif ((succs.get((int)(i)) != null)) {\n\t\t\t\t\t\tstr.append(\"\\t\" + node.getId() + \" -> \" + succs.get((int)(i)).getId() + \"\\n\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn null;\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic void traverse(final boa.graphs.cfg.CFGNode node, boolean flag) throws Exception {\n\t\t\t\tif (flag) {\n\t\t\t\t\tcurrentResult = preTraverse(node);\n\t\t\t\t\toutputMapObj.put(node.getId(), currentResult);\n\t\t\t\t} else {\n\t\t\t\t\toutputMapObj.put(node.getId(), preTraverse(node));\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\n\t\ttry {\n\t\t\tprintGraph.traverse(cfg, boa.types.Graph.Traversal.TraversalDirection.FORWARD, boa.types.Graph.Traversal.TraversalKind.DFS);\n\t\t} catch (final Exception e) {\n\t\t\t// do nothing\n\t\t}\n\n\t\tstr.append(\"}\");\n\n\t\treturn str.toString();\n\t}\n}\n"},"message":{"kind":"string","value":"add node/edge labels to dot() output\n"},"old_file":{"kind":"string","value":"src/java/boa/functions/BoaGraphIntrinsics.java"},"subject":{"kind":"string","value":"add node/edge labels to dot() output"},"git_diff":{"kind":"string","value":"rc/java/boa/functions/BoaGraphIntrinsics.java\n \t\t}\n \t}\n \n\tprivate static String dotEscape(final String s) {\n\t\tfinal String escaped = s.replaceAll(\"\\\\\\\\\", \"\\\\\\\\\\\\\\\\\").replaceAll(\"\\\"\", \"\\\\\\\\\\\"\").replaceAll(\"\\n\", \"\\\\\\\\l\").replaceAll(\"\\r\", \"\\\\\\\\l\");\n\t\tif (escaped.indexOf(\"\\\\l\") != -1 && !escaped.endsWith(\"\\\\l\"))\n\t\t\treturn escaped + \"\\\\l\";\n\t\treturn escaped;\n\t}\n\n\t@FunctionSpec(name = \"dot\", returnType = \"string\", formalParameters = { \"CFG\" })\n \tpublic static String cfgToDot(final CFG cfg) {\n \t\tfinal StringBuilder str = new StringBuilder();\n \t\tstr.append(\"digraph G {\\n\");\n \n\t\tfor (final boa.graphs.cfg.CFGNode n : cfg.getNodes()) {\n\t\t\tfinal String shape;\n\t\t\tswitch (n.getKind()) {\n\t\t\t\tcase CONTROL:\n\t\t\t\t\tshape = \"shape=diamond\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase METHOD:\n\t\t\t\t\tshape = \"shape=parallelogram\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase OTHER:\n\t\t\t\t\tshape = \"shape=box\";\n\t\t\t\t\tbreak;\n\t\t\t\tcase ENTRY:\n\t\t\t\tdefault:\n\t\t\t\t\tshape = \"shape=ellipse\";\n\t\t\t\t\tbreak;\n\t\t\t}\n\n\t\t\tif (n.hasStmt())\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + dotEscape(boa.functions.BoaAstIntrinsics.prettyprint(n.getStmt())) + \"\\\"]\\n\");\n\t\t\telse if (n.hasExpr())\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + dotEscape(boa.functions.BoaAstIntrinsics.prettyprint(n.getExpr())) + \"\\\"]\\n\");\n\t\t\telse if (n.getKind() == boa.types.Control.CFGNode.CFGNodeType.ENTRY)\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \",label=\\\"\" + n.getName() + \"\\\"]\\n\");\n\t\t\telse\n\t\t\t\tstr.append(\"\\t\" + n.getId() + \"[\" + shape + \"]\\n\");\n\t\t}\n\n \t\tfinal boa.runtime.BoaAbstractTraversal printGraph = new boa.runtime.BoaAbstractTraversal(false, false) {\n \t\t\tprotected Object preTraverse(final boa.graphs.cfg.CFGNode node) throws Exception {\n\t\t\t\tfinal java.util.List succs = node .getSuccessorsList();\n\t\t\t\tfor (long i = 0; i < succs .size(); i++) {\n\t\t\t\t\tif ((succs.get((int)(i)) != null)) {\n\t\t\t\t\t\tstr.append(\"\\t\" + node.getId() + \" -> \" + succs.get((int)(i)).getId() + \"\\n\");\n\t\t\t\t\t}\n\t\t\t\tfinal java.util.Set edges = node.getOutEdges();\n\t\t\t\tfor (final boa.graphs.cfg.CFGEdge e : node.getOutEdges()) {\n\t\t\t\t\tstr.append(\"\\t\" + node.getId() + \" -> \" + e.getDest().getId());\n\t\t\t\t\tif (!(e.label() == null || e.label().equals(\".\") || e.label().equals(\"\")))\n\t\t\t\t\t\tstr.append(\" [label=\\\"\" + dotEscape(e.label()) + \"\\\"]\");\n\t\t\t\t\tstr.append(\"\\n\");\n \t\t\t\t}\n \t\t\t\treturn null;\n \t\t\t}"}}},{"rowIdx":2033,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"2197fae2c1de5846c324f90aeb04f7548442bfb8"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"iBotPeaches/SimpleServer,iBotPeaches/SimpleServer,SimpleServer/SimpleServer,SimpleServer/SimpleServer"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) 2010 SimpleServer authors (see CONTRIBUTORS)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\npackage simpleserver.stream;\n\nimport java.io.BufferedInputStream;\nimport java.io.BufferedOutputStream;\nimport java.io.DataInput;\nimport java.io.DataInputStream;\nimport java.io.DataOutput;\nimport java.io.DataOutputStream;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.util.IllegalFormatException;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\nimport simpleserver.Coordinate;\nimport simpleserver.Group;\nimport simpleserver.Player;\nimport simpleserver.Server;\nimport simpleserver.command.LocalSayCommand;\nimport simpleserver.command.PlayerListCommand;\nimport simpleserver.config.ChestList.Chest;\n\npublic class StreamTunnel {\n private static final boolean EXPENSIVE_DEBUG_LOGGING = Boolean.getBoolean(\"EXPENSIVE_DEBUG_LOGGING\");\n private static final int IDLE_TIME = 30000;\n private static final int BUFFER_SIZE = 1024;\n private static final byte BLOCK_DESTROYED_STATUS = 2;\n private static final Pattern MESSAGE_PATTERN = Pattern.compile(\"^<([^>]+)> (.*)$\");\n private static final Pattern COLOR_PATTERN = Pattern.compile(\"\\u00a7[0-9a-f]\");\n\n private final boolean isServerTunnel;\n private final String streamType;\n private final Player player;\n private final Server server;\n private final byte[] buffer;\n private final Tunneler tunneler;\n\n private DataInput in;\n private DataOutput out;\n private StreamDumper inputDumper;\n private StreamDumper outputDumper;\n\n private int motionCounter = 0;\n private boolean inGame = false;\n\n private volatile long lastRead;\n private volatile boolean run = true;\n\n public StreamTunnel(InputStream in, OutputStream out, boolean isServerTunnel,\n Player player) {\n this.isServerTunnel = isServerTunnel;\n if (isServerTunnel) {\n streamType = \"ServerStream\";\n }\n else {\n streamType = \"PlayerStream\";\n }\n\n this.player = player;\n server = player.getServer();\n\n DataInputStream dIn = new DataInputStream(new BufferedInputStream(in));\n DataOutputStream dOut = new DataOutputStream(new BufferedOutputStream(out));\n if (EXPENSIVE_DEBUG_LOGGING) {\n try {\n OutputStream dump = new FileOutputStream(streamType + \"Input.debug\");\n InputStreamDumper dumper = new InputStreamDumper(dIn, dump);\n inputDumper = dumper;\n this.in = dumper;\n }\n catch (FileNotFoundException e) {\n System.out.println(\"Unable to open input debug dump!\");\n throw new RuntimeException(e);\n }\n\n try {\n OutputStream dump = new FileOutputStream(streamType + \"Output.debug\");\n OutputStreamDumper dumper = new OutputStreamDumper(dOut, dump);\n outputDumper = dumper;\n this.out = dumper;\n }\n catch (FileNotFoundException e) {\n System.out.println(\"Unable to open output debug dump!\");\n throw new RuntimeException(e);\n }\n }\n else {\n this.in = dIn;\n this.out = dOut;\n }\n\n buffer = new byte[BUFFER_SIZE];\n\n tunneler = new Tunneler();\n tunneler.start();\n\n lastRead = System.currentTimeMillis();\n }\n\n public void stop() {\n run = false;\n }\n\n public boolean isAlive() {\n return tunneler.isAlive();\n }\n\n public boolean isActive() {\n return System.currentTimeMillis() - lastRead < IDLE_TIME\n || player.isRobot();\n }\n\n private void handlePacket() throws IOException {\n Byte packetId = in.readByte();\n int x;\n byte y;\n int z;\n switch (packetId) {\n case 0x00: // Keep Alive\n write(packetId);\n break;\n case 0x01: // Login Request/Response\n write(packetId);\n if (isServerTunnel) {\n player.setEntityId(in.readInt());\n write(player.getEntityId());\n }\n else {\n write(in.readInt());\n }\n write(readUTF16());\n write(in.readLong());\n write(in.readByte());\n break;\n case 0x02: // Handshake\n String name = readUTF16();\n if (isServerTunnel || player.setName(name)) {\n tunneler.setName(streamType + \"-\" + player.getName());\n write(packetId);\n write(name);\n }\n break;\n case 0x03: // Chat Message\n String message = readUTF16();\n System.out.println(message);\n if (isServerTunnel && server.options.getBoolean(\"useMsgFormats\")) {\n \n Matcher colorMatcher = COLOR_PATTERN.matcher(message);\n String cleanMessage = colorMatcher.replaceAll(\"\");\n \n \n \n Matcher messageMatcher = MESSAGE_PATTERN.matcher(cleanMessage);\n if (messageMatcher.find()) {\n Player friend = server.findPlayerExact(messageMatcher.group(1));\n\n if (friend != null) {\n String color = \"f\";\n String title = \"\";\n String format = server.options.get(\"msgFormat\");\n Group group = friend.getGroup();\n\n if (group != null) {\n color = group.getColor();\n if (group.showTitle()) {\n title = group.getName();\n format = server.options.get(\"msgTitleFormat\");\n }\n }\n\n try {\n message = String.format(format, friend.getName(), title, color)\n + messageMatcher.group(2);\n }\n catch (IllegalFormatException e) {\n System.out.println(\"[SimpleServer] There is an error in your msgFormat/msgTitleFormat settings!\");\n }\n }\n }\n }\n\n if (!isServerTunnel) {\n \n if (player.isMuted() && !message.startsWith(\"/\")\n && !message.startsWith(\"!\")) {\n player.addMessage(\"\\u00a7cYou are muted! You may not send messages to all players.\");\n break;\n }\n\n if (player.parseCommand(message)) {\n break;\n }\n \n if(player.localChat() && !message.startsWith(\"/\") && !message.startsWith(\"!\")) {\n player.execute(LocalSayCommand.class, message);\n break;\n }\n }\n\n write(packetId);\n write(message);\n break;\n case 0x04: // Time Update\n write(packetId);\n copyNBytes(8);\n break;\n case 0x05: // Player Inventory\n write(packetId);\n write(in.readInt());\n write(in.readShort());\n write(in.readShort());\n write(in.readShort());\n break;\n case 0x06: // Spawn Position\n write(packetId);\n copyNBytes(12);\n break;\n case 0x07: // Use Entity?\n int user = in.readInt();\n int target = in.readInt();\n Player targetPlayer = server.playerList.findPlayer(target);\n if (targetPlayer != null) {\n if (targetPlayer.godModeEnabled()) {\n in.readBoolean();\n break;\n }\n }\n write(packetId);\n write(user);\n write(target);\n write(in.readBoolean());\n break;\n case 0x08: // Update Health\n write(packetId);\n copyNBytes(2);\n break;\n case 0x09: // Respawn\n write(packetId);\n break;\n case 0x0a: // Player\n write(packetId);\n copyNBytes(1);\n if (!inGame && !isServerTunnel) {\n player.sendMOTD();\n \n if(server.options.getBoolean(\"showListOnConnect\")){\n //display player list if enabled in config\n player.execute(PlayerListCommand.class);\n }\n \n inGame = true;\n }\n break;\n case 0x0b: // Player Position\n write(packetId);\n copyPlayerLocation();\n break;\n case 0x0c: // Player Look\n write(packetId);\n copyNBytes(9);\n break;\n case 0x0d: // Player Position & Look\n write(packetId);\n copyPlayerLocation();\n copyNBytes(8);\n break;\n case 0x0e: // Player Digging\n if (!isServerTunnel) {\n if (player.getGroupId() < 0) {\n skipNBytes(11);\n }\n else {\n byte status = in.readByte();\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n byte face = in.readByte();\n if (!server.chests.isLocked(x, y, z) || player.isAdmin()) {\n if (server.chests.isLocked(x, y, z)\n && status == BLOCK_DESTROYED_STATUS) {\n server.chests.releaseLock(x, y, z);\n }\n\n write(packetId);\n write(status);\n write(x);\n write(y);\n write(z);\n write(face);\n\n if (player.instantDestroyEnabled()) {\n packetFinished();\n write(packetId);\n write(BLOCK_DESTROYED_STATUS);\n write(x);\n write(y);\n write(z);\n write(face);\n }\n \n if(status == BLOCK_DESTROYED_STATUS) {\n player.destroyedBlock();\n }\n }\n }\n }\n else {\n write(packetId);\n copyNBytes(11);\n }\n break;\n case 0x0f: // Player Block Placement\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n final byte direction = in.readByte();\n final short dropItem = in.readShort();\n byte itemCount = 0;\n short uses = 0;\n if (dropItem != -1) {\n itemCount = in.readByte();\n uses = in.readShort();\n }\n \n boolean writePacket = true;\n boolean drop = false;\n if (isServerTunnel || server.chests.isChest(x, y, z)) {\n // continue\n } else if ((player.getGroupId() < 0) || !server.blockFirewall.playerAllowed(player, dropItem)) {\n String badBlock = String.format(server.l.get(\"BAD_BLOCK\"), player.getName(), Short.toString(dropItem));\n server.runCommand(\"say\", badBlock);\n writePacket = false;\n drop = true;\n } else if (dropItem == 54) {\n int xPosition = x;\n byte yPosition = y;\n int zPosition = z;\n switch (direction) {\n case 0:\n --yPosition;\n break;\n case 1:\n ++yPosition;\n break;\n case 2:\n --zPosition;\n break;\n case 3:\n ++zPosition;\n break;\n case 4:\n --xPosition;\n break;\n case 5:\n ++xPosition;\n break;\n }\n\n Chest adjacentChest = server.chests.adjacentChest(xPosition, yPosition, zPosition);\n \n if (adjacentChest != null && !adjacentChest.isOpen() && !adjacentChest.ownedBy(player)) {\n player.addMessage(\"\\u00a7cThe adjacent chest is locked!\");\n writePacket = false;\n drop = true;\n } else {\n player.placingChest(new Coordinate(xPosition, yPosition, zPosition));\n }\n }\n\n if (writePacket) {\n write(packetId);\n write(x);\n write(y);\n write(z);\n write(direction);\n write(dropItem);\n\n if (dropItem != -1) {\n write(itemCount);\n write(uses);\n \n if(dropItem <= 94 && direction >= 0) {\n player.placedBlock();\n }\n }\n \n player.openingChest(x,y,z);\n \n }\n else if(drop) {\n // Drop the item in hand. This keeps the client state in-sync with the\n // server. This generally prevents empty-hand clicks by the client\n // from placing blocks the server thinks the client has in hand.\n write((byte) 0x0e);\n write((byte) 0x04);\n write(x);\n write(y);\n write(z);\n write(direction);\n }\n\n \n \n break;\n case 0x10: // Holding Change\n write(packetId);\n copyNBytes(2);\n break;\n case 0x11: // Use Bed\n write(packetId);\n copyNBytes(14);\n break;\n case 0x12: // Animation\n write(packetId);\n copyNBytes(5);\n break;\n case 0x13: // ???\n write(packetId);\n write(in.readInt());\n write(in.readByte());\n break;\n case 0x14: // Named Entity Spawn\n write(packetId);\n write(in.readInt());\n write(readUTF16());\n copyNBytes(16);\n break;\n case 0x15: // Pickup spawn\n if (player.getGroupId() < 0) {\n skipNBytes(24);\n break;\n }\n write(packetId);\n copyNBytes(24);\n break;\n case 0x16: // Collect Item\n write(packetId);\n copyNBytes(8);\n break;\n case 0x17: // Add Object/Vehicle\n write(packetId);\n copyNBytes(17);\n break;\n case 0x18: // Mob Spawn\n write(packetId);\n write(in.readInt());\n write(in.readByte());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n write(in.readByte());\n write(in.readByte());\n\n copyUnknownBlob();\n break;\n case 0x19: // Painting\n write(packetId);\n write(in.readInt());\n write(readUTF16());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n break;\n case 0x1b: // ???\n write(packetId);\n copyNBytes(18);\n break;\n case 0x1c: // Entity Velocity?\n write(packetId);\n copyNBytes(10);\n break;\n case 0x1d: // Destroy Entity\n write(packetId);\n copyNBytes(4);\n break;\n case 0x1e: // Entity\n write(packetId);\n copyNBytes(4);\n break;\n case 0x1f: // Entity Relative Move\n write(packetId);\n copyNBytes(7);\n break;\n case 0x20: // Entity Look\n write(packetId);\n copyNBytes(6);\n break;\n case 0x21: // Entity Look and Relative Move\n write(packetId);\n copyNBytes(9);\n break;\n case 0x22: // Entity Teleport\n write(packetId);\n copyNBytes(18);\n break;\n case 0x26: // Entity status?\n write(packetId);\n copyNBytes(5);\n break;\n case 0x27: // Attach Entity?\n write(packetId);\n copyNBytes(8);\n break;\n case 0x28: // Entity Metadata\n write(packetId);\n write(in.readInt());\n\n copyUnknownBlob();\n break;\n case 0x32: // Pre-Chunk\n write(packetId);\n copyNBytes(9);\n break;\n case 0x33: // Map Chunk\n write(packetId);\n copyNBytes(13);\n int chunkSize = in.readInt();\n write(chunkSize);\n copyNBytes(chunkSize);\n break;\n case 0x34: // Multi Block Change\n write(packetId);\n copyNBytes(8);\n short arraySize = in.readShort();\n write(arraySize);\n copyNBytes(arraySize * 4);\n break;\n case 0x35: // Block Change\n write(packetId);\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n byte blockType = in.readByte();\n byte metadata = in.readByte();\n \n if(blockType == 54 && player.placedChest(x,y,z)) {\n lockChest(x,y,z);\n player.placingChest(null);\n }\n \n write(x);\n write(y);\n write(z);\n write(blockType);\n write(metadata);\n \n break;\n case 0x36: // ???\n write(packetId);\n copyNBytes(12);\n break;\n case 0x3c: // Explosion\n write(packetId);\n copyNBytes(28);\n int recordCount = in.readInt();\n write(recordCount);\n copyNBytes(recordCount * 3);\n break;\n case 0x46: // Invalid Bed\n write(packetId);\n copyNBytes(1);\n break;\n case 0x47: // Weather\n write(packetId);\n copyNBytes(17);\n break;\n case 0x64:\n byte id = in.readByte();\n byte invtype = in.readByte();\n String typeString = in.readUTF();\n if(invtype == 0) {\n if(server.chests.canOpen(player, player.openedChest()) || player.isAdmin()) {\n if(server.chests.isLocked(player.openedChest())) {\n if(player.isAttemptingUnlock()) {\n server.chests.unlock(player.openedChest());\n player.setAttemptedAction(null);\n player.addMessage(\"\\u00a77This chest is no longer locked!\");\n typeString = \"Open Chest\";\n } else {\n typeString = server.chests.chestName(player.openedChest());\n }\n } else {\n typeString = \"Open Chest\";\n if(player.isAttemptLock()) {\n lockChest(player.openedChest());\n typeString = player.nextChestName();\n }\n }\n \n } else {\n player.addMessage(\"\\u00a7cThis chest is locked!\");\n in.readByte();\n break;\n }\n }\n write(packetId);\n write(id);\n write(invtype);\n write8(typeString);\n write(in.readByte());\n break;\n case 0x65:\n write(packetId);\n write(in.readByte());\n break;\n case 0x66: // Inventory Item Move\n byte typeFrom = in.readByte();\n short slotFrom = in.readShort();\n byte typeTo = in.readByte();\n short slotTo = in.readShort();\n if ((typeFrom < 0 && typeTo < 0) || player.getGroupId() >= 0) {\n write(packetId);\n write(typeFrom);\n write(slotFrom);\n write(typeTo);\n write(slotTo);\n write(in.readBoolean());\n short moveItem = in.readShort();\n write(moveItem);\n if (moveItem != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n else {\n short moveItem = in.readShort();\n if (moveItem != -1) {\n in.readByte();\n in.readShort();\n }\n }\n break;\n case 0x67: // Inventory Item Update\n byte type67 = in.readByte();\n if (type67 < 0 || player.getGroupId() >= 0) {\n write(packetId);\n short slot = in.readShort();\n write(type67);\n write(slot);\n short setItem = in.readShort();\n write(setItem);\n if (setItem != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n else {\n in.readShort();\n short setItem = in.readShort();\n if (setItem != -1) {\n in.readByte();\n in.readShort();\n }\n }\n break;\n case 0x68: // Inventory\n byte type = in.readByte();\n if (type < 0 || player.getGroupId() >= 0) {\n write(packetId);\n write(type);\n short count = in.readShort();\n write(count);\n for (int c = 0; c < count; ++c) {\n short item = in.readShort();\n write(item);\n\n if (item != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n }\n else {\n short count = in.readShort();\n for (int c = 0; c < count; ++c) {\n short item = in.readShort();\n if (item != -1) {\n in.readByte();\n in.readShort();\n }\n }\n }\n break;\n case 0x69:\n write(packetId);\n write(in.readByte());\n write(in.readShort());\n write(in.readShort());\n break;\n case 0x6a:\n write(packetId);\n write(in.readByte());\n write(in.readShort());\n write(in.readByte());\n break;\n case (byte) 0x82: // Update Sign\n write(packetId);\n write(in.readInt());\n write(in.readShort());\n write(in.readInt());\n write(readUTF16());\n write(readUTF16());\n write(readUTF16());\n write(readUTF16());\n break;\n case (byte)0xc6:\n write(packetId);\n copyNBytes(5);\n break;\n case (byte) 0xff: // Disconnect/Kick\n write(packetId);\n String reason = readUTF16();\n write(reason);\n if (reason.startsWith(\"Took too long\")) {\n server.addRobot(player);\n }\n player.close();\n break;\n default:\n if (EXPENSIVE_DEBUG_LOGGING) {\n while (true) {\n skipNBytes(1);\n flushAll();\n }\n }\n else {\n throw new IOException(\"Unable to parse unknown \" + streamType\n + \" packet 0x\" + Integer.toHexString(packetId) + \" for player \"\n + player.getName());\n }\n }\n packetFinished();\n }\n \n private String readUTF16() throws IOException {\n short length = in.readShort();\n byte[] bytes = new byte[2+length*2];\n bytes[0] = (byte)((length>>8)&0xFF);\n bytes[1] = (byte)(length&0xFF);\n for(short i = 0; i < length*2; i++) {\n bytes[i+2] = in.readByte();\n }\n try {\n String result = new String(bytes, \"UTF-16\");\n return result;\n } catch(Exception e) {\n return \"FUUUUU\"; \n }\n }\n\n\n private void print(byte[] bytes) {\n for(int i=0; i> 5;\n\n switch (type) {\n case 0:\n write(in.readByte());\n break;\n case 1:\n write(in.readShort());\n break;\n case 2:\n write(in.readInt());\n break;\n case 3:\n write(in.readFloat());\n break;\n case 4:\n write(readUTF16());\n break;\n case 5:\n write(in.readShort());\n write(in.readByte());\n write(in.readShort());\n }\n\n unknown = in.readByte();\n write(unknown);\n }\n }\n\n private void write(byte b) throws IOException {\n out.writeByte(b);\n }\n\n private void write(short s) throws IOException {\n out.writeShort(s);\n }\n\n private void write(int i) throws IOException {\n out.writeInt(i);\n }\n\n private void write(long l) throws IOException {\n out.writeLong(l);\n }\n\n private void write(float f) throws IOException {\n out.writeFloat(f);\n }\n\n private void write(double d) throws IOException {\n out.writeDouble(d);\n }\n\n private void write(String s) throws IOException {\n byte[] bytes = s.getBytes(\"UTF-16\");\n for(int i = 2; i < bytes.length; i++) {\n write(bytes[i]);\n }\n }\n \n private void write8(String s) throws IOException {\n out.writeUTF(s);\n }\n\n private void write(boolean b) throws IOException {\n out.writeBoolean(b);\n }\n\n private void skipNBytes(int bytes) throws IOException {\n int overflow = bytes / buffer.length;\n for (int c = 0; c < overflow; ++c) {\n in.readFully(buffer, 0, buffer.length);\n }\n in.readFully(buffer, 0, bytes % buffer.length);\n }\n\n private void copyNBytes(int bytes) throws IOException {\n int overflow = bytes / buffer.length;\n for (int c = 0; c < overflow; ++c) {\n in.readFully(buffer, 0, buffer.length);\n out.write(buffer, 0, buffer.length);\n }\n in.readFully(buffer, 0, bytes % buffer.length);\n out.write(buffer, 0, bytes % buffer.length);\n }\n\n private void kick(String reason) throws IOException {\n write((byte) 0xff);\n write(reason);\n packetFinished();\n }\n\n private void sendMessage(String message) throws IOException {\n //write(0x03);\n //write(message);\n //packetFinished();\n }\n\n private void packetFinished() throws IOException {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.packetFinished();\n outputDumper.packetFinished();\n }\n }\n\n private void flushAll() throws IOException {\n try {\n ((OutputStream) out).flush();\n }\n finally {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.flush();\n }\n }\n }\n\n private final class Tunneler extends Thread {\n @Override\n public void run() {\n try {\n while (run) {\n lastRead = System.currentTimeMillis();\n\n try {\n handlePacket();\n\n if (isServerTunnel) {\n while (player.hasMessages()) {\n sendMessage(player.getMessage());\n }\n }\n\n flushAll();\n }\n catch (IOException e) {\n if (run && !player.isRobot()) {\n System.out.println(\"[SimpleServer] \" + e);\n System.out.println(\"[SimpleServer] \" + streamType\n + \" error handling traffic for \" + player.getIPAddress());\n e.printStackTrace();\n }\n break;\n }\n }\n\n try {\n if (player.isKicked()) {\n kick(player.getKickMsg());\n }\n flushAll();\n }\n catch (IOException e) {\n }\n }\n finally {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.cleanup();\n outputDumper.cleanup();\n }\n }\n }\n }\n}\n"},"new_file":{"kind":"string","value":"src/simpleserver/stream/StreamTunnel.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) 2010 SimpleServer authors (see CONTRIBUTORS)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\npackage simpleserver.stream;\n\nimport java.io.BufferedInputStream;\nimport java.io.BufferedOutputStream;\nimport java.io.DataInput;\nimport java.io.DataInputStream;\nimport java.io.DataOutput;\nimport java.io.DataOutputStream;\nimport java.io.FileNotFoundException;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.util.IllegalFormatException;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\nimport simpleserver.Coordinate;\nimport simpleserver.Group;\nimport simpleserver.Player;\nimport simpleserver.Server;\nimport simpleserver.command.LocalSayCommand;\nimport simpleserver.command.PlayerListCommand;\nimport simpleserver.config.ChestList.Chest;\n\npublic class StreamTunnel {\n private static final boolean EXPENSIVE_DEBUG_LOGGING = Boolean.getBoolean(\"EXPENSIVE_DEBUG_LOGGING\");\n private static final int IDLE_TIME = 30000;\n private static final int BUFFER_SIZE = 1024;\n private static final byte BLOCK_DESTROYED_STATUS = 2;\n private static final Pattern MESSAGE_PATTERN = Pattern.compile(\"^<([^>]+)> (.*)$\");\n private static final Pattern COLOR_PATTERN = Pattern.compile(\"\\u00a7[0-9a-f]\");\n\n private final boolean isServerTunnel;\n private final String streamType;\n private final Player player;\n private final Server server;\n private final byte[] buffer;\n private final Tunneler tunneler;\n\n private DataInput in;\n private DataOutput out;\n private StreamDumper inputDumper;\n private StreamDumper outputDumper;\n\n private int motionCounter = 0;\n private boolean inGame = false;\n\n private volatile long lastRead;\n private volatile boolean run = true;\n\n public StreamTunnel(InputStream in, OutputStream out, boolean isServerTunnel,\n Player player) {\n this.isServerTunnel = isServerTunnel;\n if (isServerTunnel) {\n streamType = \"ServerStream\";\n }\n else {\n streamType = \"PlayerStream\";\n }\n\n this.player = player;\n server = player.getServer();\n\n DataInputStream dIn = new DataInputStream(new BufferedInputStream(in));\n DataOutputStream dOut = new DataOutputStream(new BufferedOutputStream(out));\n if (EXPENSIVE_DEBUG_LOGGING) {\n try {\n OutputStream dump = new FileOutputStream(streamType + \"Input.debug\");\n InputStreamDumper dumper = new InputStreamDumper(dIn, dump);\n inputDumper = dumper;\n this.in = dumper;\n }\n catch (FileNotFoundException e) {\n System.out.println(\"Unable to open input debug dump!\");\n throw new RuntimeException(e);\n }\n\n try {\n OutputStream dump = new FileOutputStream(streamType + \"Output.debug\");\n OutputStreamDumper dumper = new OutputStreamDumper(dOut, dump);\n outputDumper = dumper;\n this.out = dumper;\n }\n catch (FileNotFoundException e) {\n System.out.println(\"Unable to open output debug dump!\");\n throw new RuntimeException(e);\n }\n }\n else {\n this.in = dIn;\n this.out = dOut;\n }\n\n buffer = new byte[BUFFER_SIZE];\n\n tunneler = new Tunneler();\n tunneler.start();\n\n lastRead = System.currentTimeMillis();\n }\n\n public void stop() {\n run = false;\n }\n\n public boolean isAlive() {\n return tunneler.isAlive();\n }\n\n public boolean isActive() {\n return System.currentTimeMillis() - lastRead < IDLE_TIME\n || player.isRobot();\n }\n\n private void handlePacket() throws IOException {\n Byte packetId = in.readByte();\n int x;\n byte y;\n int z;\n switch (packetId) {\n case 0x00: // Keep Alive\n write(packetId);\n break;\n case 0x01: // Login Request/Response\n write(packetId);\n if (isServerTunnel) {\n player.setEntityId(in.readInt());\n write(player.getEntityId());\n }\n else {\n write(in.readInt());\n }\n write(in.readUTF());\n write(in.readUTF());\n write(in.readLong());\n write(in.readByte());\n break;\n case 0x02: // Handshake\n String name = in.readUTF();\n if (isServerTunnel || player.setName(name)) {\n tunneler.setName(streamType + \"-\" + player.getName());\n write(packetId);\n write(name);\n }\n break;\n case 0x03: // Chat Message\n String message = in.readUTF();\n if (isServerTunnel && server.options.getBoolean(\"useMsgFormats\")) {\n \n Matcher colorMatcher = COLOR_PATTERN.matcher(message);\n String cleanMessage = colorMatcher.replaceAll(\"\");\n \n \n \n Matcher messageMatcher = MESSAGE_PATTERN.matcher(cleanMessage);\n if (messageMatcher.find()) {\n Player friend = server.findPlayerExact(messageMatcher.group(1));\n\n if (friend != null) {\n String color = \"f\";\n String title = \"\";\n String format = server.options.get(\"msgFormat\");\n Group group = friend.getGroup();\n\n if (group != null) {\n color = group.getColor();\n if (group.showTitle()) {\n title = group.getName();\n format = server.options.get(\"msgTitleFormat\");\n }\n }\n\n try {\n message = String.format(format, friend.getName(), title, color)\n + messageMatcher.group(2);\n }\n catch (IllegalFormatException e) {\n System.out.println(\"[SimpleServer] There is an error in your msgFormat/msgTitleFormat settings!\");\n }\n }\n }\n }\n\n if (!isServerTunnel) {\n \n if (player.isMuted() && !message.startsWith(\"/\")\n && !message.startsWith(\"!\")) {\n player.addMessage(\"\\u00a7cYou are muted! You may not send messages to all players.\");\n break;\n }\n\n if (player.parseCommand(message)) {\n break;\n }\n \n if(player.localChat() && !message.startsWith(\"/\") && !message.startsWith(\"!\")) {\n player.execute(LocalSayCommand.class, message);\n break;\n }\n }\n\n write(packetId);\n write(message);\n break;\n case 0x04: // Time Update\n write(packetId);\n copyNBytes(8);\n break;\n case 0x05: // Player Inventory\n write(packetId);\n write(in.readInt());\n write(in.readShort());\n write(in.readShort());\n write(in.readShort());\n break;\n case 0x06: // Spawn Position\n write(packetId);\n copyNBytes(12);\n break;\n case 0x07: // Use Entity?\n int user = in.readInt();\n int target = in.readInt();\n Player targetPlayer = server.playerList.findPlayer(target);\n if (targetPlayer != null) {\n if (targetPlayer.godModeEnabled()) {\n in.readBoolean();\n break;\n }\n }\n write(packetId);\n write(user);\n write(target);\n write(in.readBoolean());\n break;\n case 0x08: // Update Health\n write(packetId);\n copyNBytes(2);\n break;\n case 0x09: // Respawn\n write(packetId);\n break;\n case 0x0a: // Player\n write(packetId);\n copyNBytes(1);\n if (!inGame && !isServerTunnel) {\n player.sendMOTD();\n \n if(server.options.getBoolean(\"showListOnConnect\")){\n //display player list if enabled in config\n player.execute(PlayerListCommand.class);\n }\n \n inGame = true;\n }\n break;\n case 0x0b: // Player Position\n write(packetId);\n copyPlayerLocation();\n break;\n case 0x0c: // Player Look\n write(packetId);\n copyNBytes(9);\n break;\n case 0x0d: // Player Position & Look\n write(packetId);\n copyPlayerLocation();\n copyNBytes(8);\n break;\n case 0x0e: // Player Digging\n if (!isServerTunnel) {\n if (player.getGroupId() < 0) {\n skipNBytes(11);\n }\n else {\n byte status = in.readByte();\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n byte face = in.readByte();\n if (!server.chests.isLocked(x, y, z) || player.isAdmin()) {\n if (server.chests.isLocked(x, y, z)\n && status == BLOCK_DESTROYED_STATUS) {\n server.chests.releaseLock(x, y, z);\n }\n\n write(packetId);\n write(status);\n write(x);\n write(y);\n write(z);\n write(face);\n\n if (player.instantDestroyEnabled()) {\n packetFinished();\n write(packetId);\n write(BLOCK_DESTROYED_STATUS);\n write(x);\n write(y);\n write(z);\n write(face);\n }\n \n if(status == BLOCK_DESTROYED_STATUS) {\n player.destroyedBlock();\n }\n }\n }\n }\n else {\n write(packetId);\n copyNBytes(11);\n }\n break;\n case 0x0f: // Player Block Placement\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n final byte direction = in.readByte();\n final short dropItem = in.readShort();\n byte itemCount = 0;\n short uses = 0;\n if (dropItem != -1) {\n itemCount = in.readByte();\n uses = in.readShort();\n }\n \n boolean writePacket = true;\n boolean drop = false;\n if (isServerTunnel || server.chests.isChest(x, y, z)) {\n // continue\n } else if ((player.getGroupId() < 0) || !server.blockFirewall.playerAllowed(player, dropItem)) {\n String badBlock = String.format(server.l.get(\"BAD_BLOCK\"), player.getName(), Short.toString(dropItem));\n server.runCommand(\"say\", badBlock);\n writePacket = false;\n drop = true;\n } else if (dropItem == 54) {\n int xPosition = x;\n byte yPosition = y;\n int zPosition = z;\n switch (direction) {\n case 0:\n --yPosition;\n break;\n case 1:\n ++yPosition;\n break;\n case 2:\n --zPosition;\n break;\n case 3:\n ++zPosition;\n break;\n case 4:\n --xPosition;\n break;\n case 5:\n ++xPosition;\n break;\n }\n\n Chest adjacentChest = server.chests.adjacentChest(xPosition, yPosition, zPosition);\n \n if (adjacentChest != null && !adjacentChest.isOpen() && !adjacentChest.ownedBy(player)) {\n player.addMessage(\"\\u00a7cThe adjacent chest is locked!\");\n writePacket = false;\n drop = true;\n } else {\n player.placingChest(new Coordinate(xPosition, yPosition, zPosition));\n }\n }\n\n if (writePacket) {\n write(packetId);\n write(x);\n write(y);\n write(z);\n write(direction);\n write(dropItem);\n\n if (dropItem != -1) {\n write(itemCount);\n write(uses);\n \n if(dropItem <= 94 && direction >= 0) {\n player.placedBlock();\n }\n }\n \n player.openingChest(x,y,z);\n \n }\n else if(drop) {\n // Drop the item in hand. This keeps the client state in-sync with the\n // server. This generally prevents empty-hand clicks by the client\n // from placing blocks the server thinks the client has in hand.\n write((byte) 0x0e);\n write((byte) 0x04);\n write(x);\n write(y);\n write(z);\n write(direction);\n }\n\n \n \n break;\n case 0x10: // Holding Change\n write(packetId);\n copyNBytes(2);\n break;\n case 0x11: // Use Bed\n write(packetId);\n copyNBytes(14);\n break;\n case 0x12: // Animation\n write(packetId);\n copyNBytes(5);\n break;\n case 0x13: // ???\n write(packetId);\n write(in.readInt());\n write(in.readByte());\n break;\n case 0x14: // Named Entity Spawn\n write(packetId);\n write(in.readInt());\n write(in.readUTF());\n copyNBytes(16);\n break;\n case 0x15: // Pickup spawn\n if (player.getGroupId() < 0) {\n skipNBytes(24);\n break;\n }\n write(packetId);\n copyNBytes(24);\n break;\n case 0x16: // Collect Item\n write(packetId);\n copyNBytes(8);\n break;\n case 0x17: // Add Object/Vehicle\n write(packetId);\n copyNBytes(17);\n break;\n case 0x18: // Mob Spawn\n write(packetId);\n write(in.readInt());\n write(in.readByte());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n write(in.readByte());\n write(in.readByte());\n\n copyUnknownBlob();\n break;\n case 0x19: // Painting\n write(packetId);\n write(in.readInt());\n write(in.readUTF());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n break;\n case 0x1b: // ???\n write(packetId);\n copyNBytes(18);\n break;\n case 0x1c: // Entity Velocity?\n write(packetId);\n copyNBytes(10);\n break;\n case 0x1d: // Destroy Entity\n write(packetId);\n copyNBytes(4);\n break;\n case 0x1e: // Entity\n write(packetId);\n copyNBytes(4);\n break;\n case 0x1f: // Entity Relative Move\n write(packetId);\n copyNBytes(7);\n break;\n case 0x20: // Entity Look\n write(packetId);\n copyNBytes(6);\n break;\n case 0x21: // Entity Look and Relative Move\n write(packetId);\n copyNBytes(9);\n break;\n case 0x22: // Entity Teleport\n write(packetId);\n copyNBytes(18);\n break;\n case 0x26: // Entity status?\n write(packetId);\n copyNBytes(5);\n break;\n case 0x27: // Attach Entity?\n write(packetId);\n copyNBytes(8);\n break;\n case 0x28: // Entity Metadata\n write(packetId);\n write(in.readInt());\n\n copyUnknownBlob();\n break;\n case 0x32: // Pre-Chunk\n write(packetId);\n copyNBytes(9);\n break;\n case 0x33: // Map Chunk\n write(packetId);\n copyNBytes(13);\n int chunkSize = in.readInt();\n write(chunkSize);\n copyNBytes(chunkSize);\n break;\n case 0x34: // Multi Block Change\n write(packetId);\n copyNBytes(8);\n short arraySize = in.readShort();\n write(arraySize);\n copyNBytes(arraySize * 4);\n break;\n case 0x35: // Block Change\n write(packetId);\n x = in.readInt();\n y = in.readByte();\n z = in.readInt();\n byte blockType = in.readByte();\n byte metadata = in.readByte();\n \n if(blockType == 54 && player.placedChest(x,y,z)) {\n lockChest(x,y,z);\n player.placingChest(null);\n }\n \n write(x);\n write(y);\n write(z);\n write(blockType);\n write(metadata);\n \n break;\n case 0x36: // ???\n write(packetId);\n copyNBytes(12);\n break;\n case 0x3c: // Explosion\n write(packetId);\n copyNBytes(28);\n int recordCount = in.readInt();\n write(recordCount);\n copyNBytes(recordCount * 3);\n break;\n case 0x46: // Invalid Bed\n write(packetId);\n copyNBytes(1);\n break;\n case 0x64:\n byte id = in.readByte();\n byte invtype = in.readByte();\n String typeString = in.readUTF();\n if(invtype == 0) {\n if(server.chests.canOpen(player, player.openedChest()) || player.isAdmin()) {\n if(server.chests.isLocked(player.openedChest())) {\n if(player.isAttemptingUnlock()) {\n server.chests.unlock(player.openedChest());\n player.setAttemptedAction(null);\n player.addMessage(\"\\u00a77This chest is no longer locked!\");\n typeString = \"Open Chest\";\n } else {\n typeString = server.chests.chestName(player.openedChest());\n }\n } else {\n typeString = \"Open Chest\";\n if(player.isAttemptLock()) {\n lockChest(player.openedChest());\n typeString = player.nextChestName();\n }\n }\n \n } else {\n player.addMessage(\"\\u00a7cThis chest is locked!\");\n in.readByte();\n break;\n }\n }\n write(packetId);\n write(id);\n write(invtype);\n write(typeString);\n write(in.readByte());\n break;\n case 0x65:\n write(packetId);\n write(in.readByte());\n break;\n case 0x66: // Inventory Item Move\n byte typeFrom = in.readByte();\n short slotFrom = in.readShort();\n byte typeTo = in.readByte();\n short slotTo = in.readShort();\n if ((typeFrom < 0 && typeTo < 0) || player.getGroupId() >= 0) {\n write(packetId);\n write(typeFrom);\n write(slotFrom);\n write(typeTo);\n write(slotTo);\n short moveItem = in.readShort();\n write(moveItem);\n if (moveItem != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n else {\n short moveItem = in.readShort();\n if (moveItem != -1) {\n in.readByte();\n in.readShort();\n }\n }\n break;\n case 0x67: // Inventory Item Update\n byte type67 = in.readByte();\n if (type67 < 0 || player.getGroupId() >= 0) {\n write(packetId);\n short slot = in.readShort();\n write(type67);\n write(slot);\n short setItem = in.readShort();\n write(setItem);\n if (setItem != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n else {\n in.readShort();\n short setItem = in.readShort();\n if (setItem != -1) {\n in.readByte();\n in.readShort();\n }\n }\n break;\n case 0x68: // Inventory\n byte type = in.readByte();\n if (type < 0 || player.getGroupId() >= 0) {\n write(packetId);\n write(type);\n short count = in.readShort();\n write(count);\n for (int c = 0; c < count; ++c) {\n short item = in.readShort();\n write(item);\n\n if (item != -1) {\n write(in.readByte());\n write(in.readShort());\n }\n }\n }\n else {\n short count = in.readShort();\n for (int c = 0; c < count; ++c) {\n short item = in.readShort();\n if (item != -1) {\n in.readByte();\n in.readShort();\n }\n }\n }\n break;\n case 0x69:\n write(packetId);\n write(in.readByte());\n write(in.readShort());\n write(in.readShort());\n break;\n case 0x6a:\n write(packetId);\n write(in.readByte());\n write(in.readShort());\n write(in.readByte());\n break;\n case (byte) 0x82: // Update Sign\n write(packetId);\n write(in.readInt());\n write(in.readShort());\n write(in.readInt());\n write(in.readUTF());\n write(in.readUTF());\n write(in.readUTF());\n write(in.readUTF());\n break;\n case (byte) 0xff: // Disconnect/Kick\n write(packetId);\n String reason = in.readUTF();\n write(reason);\n if (reason.startsWith(\"Took too long\")) {\n server.addRobot(player);\n }\n player.close();\n break;\n default:\n if (EXPENSIVE_DEBUG_LOGGING) {\n while (true) {\n skipNBytes(1);\n flushAll();\n }\n }\n else {\n throw new IOException(\"Unable to parse unknown \" + streamType\n + \" packet 0x\" + Integer.toHexString(packetId) + \" for player \"\n + player.getName());\n }\n }\n packetFinished();\n }\n\n private void lockChest(Coordinate coords) {\n lockChest(coords.x, coords.y, coords.z);\n }\n \n private void lockChest(int x, byte y, int z) {\n Chest adjacentChest = server.chests.adjacentChest(x, y, z);\n if(player.isAttemptLock() || adjacentChest != null && !adjacentChest.isOpen()) {\n if(adjacentChest != null && !adjacentChest.isOpen()) {\n server.chests.giveLock(adjacentChest.owner(), x, y, z, false, adjacentChest.name());\n } else {\n if(adjacentChest != null) {\n adjacentChest.lock(player);\n adjacentChest.rename(player.nextChestName());\n }\n server.chests.giveLock(player, x, y, z, false, player.nextChestName());\n }\n player.setAttemptedAction(null);\n player.addMessage(\"\\u00a77This chest is now locked.\");\n } else if(!server.chests.isChest(x, y, z)){\n server.chests.addOpenChest(x, y, z);\n }\n }\n\n private void copyPlayerLocation() throws IOException {\n if (!isServerTunnel) {\n motionCounter++;\n }\n if (!isServerTunnel && motionCounter % 8 == 0) {\n double x = in.readDouble();\n double y = in.readDouble();\n double stance = in.readDouble();\n double z = in.readDouble();\n player.updateLocation(x, y, z, stance);\n write(x);\n write(y);\n write(stance);\n write(z);\n copyNBytes(1);\n }\n else {\n copyNBytes(33);\n }\n }\n\n private void copyUnknownBlob() throws IOException {\n byte unknown = in.readByte();\n write(unknown);\n\n while (unknown != 0x7f) {\n int type = (unknown & 0xE0) >> 5;\n\n switch (type) {\n case 0:\n write(in.readByte());\n break;\n case 1:\n write(in.readShort());\n break;\n case 2:\n write(in.readInt());\n break;\n case 3:\n write(in.readFloat());\n break;\n case 4:\n write(in.readUTF());\n break;\n case 5:\n write(in.readShort());\n write(in.readByte());\n write(in.readShort());\n }\n\n unknown = in.readByte();\n write(unknown);\n }\n }\n\n private void write(byte b) throws IOException {\n out.writeByte(b);\n }\n\n private void write(short s) throws IOException {\n out.writeShort(s);\n }\n\n private void write(int i) throws IOException {\n out.writeInt(i);\n }\n\n private void write(long l) throws IOException {\n out.writeLong(l);\n }\n\n private void write(float f) throws IOException {\n out.writeFloat(f);\n }\n\n private void write(double d) throws IOException {\n out.writeDouble(d);\n }\n\n private void write(String s) throws IOException {\n out.writeUTF(s);\n }\n\n private void write(boolean b) throws IOException {\n out.writeBoolean(b);\n }\n\n private void skipNBytes(int bytes) throws IOException {\n int overflow = bytes / buffer.length;\n for (int c = 0; c < overflow; ++c) {\n in.readFully(buffer, 0, buffer.length);\n }\n in.readFully(buffer, 0, bytes % buffer.length);\n }\n\n private void copyNBytes(int bytes) throws IOException {\n int overflow = bytes / buffer.length;\n for (int c = 0; c < overflow; ++c) {\n in.readFully(buffer, 0, buffer.length);\n out.write(buffer, 0, buffer.length);\n }\n in.readFully(buffer, 0, bytes % buffer.length);\n out.write(buffer, 0, bytes % buffer.length);\n }\n\n private void kick(String reason) throws IOException {\n write((byte) 0xff);\n write(reason);\n packetFinished();\n }\n\n private void sendMessage(String message) throws IOException {\n write(0x03);\n write(message);\n packetFinished();\n }\n\n private void packetFinished() throws IOException {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.packetFinished();\n outputDumper.packetFinished();\n }\n }\n\n private void flushAll() throws IOException {\n try {\n ((OutputStream) out).flush();\n }\n finally {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.flush();\n }\n }\n }\n\n private final class Tunneler extends Thread {\n @Override\n public void run() {\n try {\n while (run) {\n lastRead = System.currentTimeMillis();\n\n try {\n handlePacket();\n\n if (isServerTunnel) {\n while (player.hasMessages()) {\n sendMessage(player.getMessage());\n }\n }\n\n flushAll();\n }\n catch (IOException e) {\n if (run && !player.isRobot()) {\n System.out.println(\"[SimpleServer] \" + e);\n System.out.println(\"[SimpleServer] \" + streamType\n + \" error handling traffic for \" + player.getIPAddress());\n }\n break;\n }\n }\n\n try {\n if (player.isKicked()) {\n kick(player.getKickMsg());\n }\n flushAll();\n }\n catch (IOException e) {\n }\n }\n finally {\n if (EXPENSIVE_DEBUG_LOGGING) {\n inputDumper.cleanup();\n outputDumper.cleanup();\n }\n }\n }\n }\n}\n"},"message":{"kind":"string","value":"fixed some 1.5 issues; still not working\n"},"old_file":{"kind":"string","value":"src/simpleserver/stream/StreamTunnel.java"},"subject":{"kind":"string","value":"fixed some 1.5 issues; still not working"},"git_diff":{"kind":"string","value":"rc/simpleserver/stream/StreamTunnel.java\n else {\n write(in.readInt());\n }\n write(in.readUTF());\n write(in.readUTF());\n write(readUTF16());\n write(in.readLong());\n write(in.readByte());\n break;\n case 0x02: // Handshake\n String name = in.readUTF();\n String name = readUTF16();\n if (isServerTunnel || player.setName(name)) {\n tunneler.setName(streamType + \"-\" + player.getName());\n write(packetId);\n }\n break;\n case 0x03: // Chat Message\n String message = in.readUTF();\n String message = readUTF16();\n System.out.println(message);\n if (isServerTunnel && server.options.getBoolean(\"useMsgFormats\")) {\n \n Matcher colorMatcher = COLOR_PATTERN.matcher(message);\n case 0x14: // Named Entity Spawn\n write(packetId);\n write(in.readInt());\n write(in.readUTF());\n write(readUTF16());\n copyNBytes(16);\n break;\n case 0x15: // Pickup spawn\n case 0x19: // Painting\n write(packetId);\n write(in.readInt());\n write(in.readUTF());\n write(readUTF16());\n write(in.readInt());\n write(in.readInt());\n write(in.readInt());\n case 0x46: // Invalid Bed\n write(packetId);\n copyNBytes(1);\n break;\n case 0x47: // Weather\n write(packetId);\n copyNBytes(17);\n break;\n case 0x64:\n byte id = in.readByte();\n write(packetId);\n write(id);\n write(invtype);\n write(typeString);\n write8(typeString);\n write(in.readByte());\n break;\n case 0x65:\n write(slotFrom);\n write(typeTo);\n write(slotTo);\n write(in.readBoolean());\n short moveItem = in.readShort();\n write(moveItem);\n if (moveItem != -1) {\n write(in.readInt());\n write(in.readShort());\n write(in.readInt());\n write(in.readUTF());\n write(in.readUTF());\n write(in.readUTF());\n write(in.readUTF());\n write(readUTF16());\n write(readUTF16());\n write(readUTF16());\n write(readUTF16());\n break;\n case (byte)0xc6:\n write(packetId);\n copyNBytes(5);\n break;\n case (byte) 0xff: // Disconnect/Kick\n write(packetId);\n String reason = in.readUTF();\n String reason = readUTF16();\n write(reason);\n if (reason.startsWith(\"Took too long\")) {\n server.addRobot(player);\n }\n }\n packetFinished();\n }\n \n private String readUTF16() throws IOException {\n short length = in.readShort();\n byte[] bytes = new byte[2+length*2];\n bytes[0] = (byte)((length>>8)&0xFF);\n bytes[1] = (byte)(length&0xFF);\n for(short i = 0; i < length*2; i++) {\n bytes[i+2] = in.readByte();\n }\n try {\n String result = new String(bytes, \"UTF-16\");\n return result;\n } catch(Exception e) {\n return \"FUUUUU\"; \n }\n }\n\n\n private void print(byte[] bytes) {\n for(int i=0; i System.out.print(Integer.toHexString(bytes[i]) + \" \");\n }\n System.out.println();\n }\n \n private void lockChest(Coordinate coords) {\n write(in.readFloat());\n break;\n case 4:\n write(in.readUTF());\n write(readUTF16());\n break;\n case 5:\n write(in.readShort());\n }\n \n private void write(String s) throws IOException {\n byte[] bytes = s.getBytes(\"UTF-16\");\n for(int i = 2; i < bytes.length; i++) {\n write(bytes[i]);\n }\n }\n \n private void write8(String s) throws IOException {\n out.writeUTF(s);\n }\n \n }\n \n private void sendMessage(String message) throws IOException {\n write(0x03);\n write(message);\n packetFinished();\n //write(0x03);\n //write(message);\n //packetFinished();\n }\n \n private void packetFinished() throws IOException {\n System.out.println(\"[SimpleServer] \" + e);\n System.out.println(\"[SimpleServer] \" + streamType\n + \" error handling traffic for \" + player.getIPAddress());\n e.printStackTrace();\n }\n break;\n }"}}},{"rowIdx":2034,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"f223ec712d84a5eb4664a1fc76bb39ef895801eb"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid,osmdroid/osmdroid"},"new_contents":{"kind":"string","value":"// Created by plusminus on 21:37:08 - 27.09.2008\npackage org.osmdroid.views;\n\nimport android.animation.Animator;\nimport android.animation.ValueAnimator;\nimport android.animation.ValueAnimator.AnimatorUpdateListener;\nimport android.annotation.TargetApi;\nimport android.graphics.Point;\nimport android.os.Build;\nimport android.view.View;\nimport android.view.animation.Animation;\nimport android.view.animation.Animation.AnimationListener;\nimport android.view.animation.ScaleAnimation;\n\nimport org.osmdroid.api.IGeoPoint;\nimport org.osmdroid.api.IMapController;\nimport org.osmdroid.config.Configuration;\nimport org.osmdroid.events.MapListener;\nimport org.osmdroid.events.ScrollEvent;\nimport org.osmdroid.events.ZoomEvent;\nimport org.osmdroid.util.BoundingBox;\nimport org.osmdroid.util.GeoPoint;\nimport org.osmdroid.util.TileSystem;\nimport org.osmdroid.views.MapView.OnFirstLayoutListener;\nimport org.osmdroid.views.util.MyMath;\n\nimport java.util.LinkedList;\n\n\n/**\n * @author Nicolas Gramlich\n * @author Marc Kurtz\n */\npublic class MapController implements IMapController, OnFirstLayoutListener {\n\n // ===========================================================\n // Constants\n // ===========================================================\n\n // ===========================================================\n // Fields\n // ===========================================================\n\n protected final MapView mMapView;\n\n // Zoom animations\n private ScaleAnimation mZoomInAnimationOld;\n private ScaleAnimation mZoomOutAnimationOld;\n private double mTargetZoomLevel=0;\n\n private Animator mCurrentAnimator;\n\n // Keep track of calls before initial layout\n private ReplayController mReplayController;\n\n // ===========================================================\n // Constructors\n // ===========================================================\n\n public MapController(MapView mapView) {\n mMapView = mapView;\n\n // Keep track of initial layout\n mReplayController = new ReplayController();\n if (!mMapView.isLayoutOccurred()) {\n mMapView.addOnFirstLayoutListener(this);\n }\n\n\n if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {\n ZoomAnimationListener zoomAnimationListener = new ZoomAnimationListener(this);\n mZoomInAnimationOld = new ScaleAnimation(1, 2, 1, 2, Animation.RELATIVE_TO_SELF, 0.5f,\n Animation.RELATIVE_TO_SELF, 0.5f);\n mZoomOutAnimationOld = new ScaleAnimation(1, 0.5f, 1, 0.5f, Animation.RELATIVE_TO_SELF,\n 0.5f, Animation.RELATIVE_TO_SELF, 0.5f);\n mZoomInAnimationOld.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n mZoomOutAnimationOld.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n mZoomInAnimationOld.setAnimationListener(zoomAnimationListener);\n mZoomOutAnimationOld.setAnimationListener(zoomAnimationListener);\n }\n }\n\n @Override\n public void onFirstLayout(View v, int left, int top, int right, int bottom) {\n mReplayController.replayCalls();\n }\n\n @Override\n public void zoomToSpan(double latSpan, double lonSpan) {\n if (latSpan <= 0 || lonSpan <= 0) {\n return;\n }\n\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.zoomToSpan(latSpan, lonSpan);\n return;\n }\n\n final BoundingBox bb = this.mMapView.getProjection().getBoundingBox();\n final double curZoomLevel = this.mMapView.getProjection().getZoomLevel();\n\n final double curLatSpan = bb.getLatitudeSpan();\n final double curLonSpan = bb.getLongitudeSpan();\n\n final double diffNeededLat = (double) latSpan / curLatSpan; // i.e. 600/500 = 1,2\n final double diffNeededLon = (double) lonSpan / curLonSpan; // i.e. 300/400 = 0,75\n\n final double diffNeeded = Math.max(diffNeededLat, diffNeededLon); // i.e. 1,2\n\n if (diffNeeded > 1) { // Zoom Out\n this.mMapView.setZoomLevel(curZoomLevel - MyMath.getNextSquareNumberAbove((float) diffNeeded));\n } else if (diffNeeded < 0.5) { // Can Zoom in\n this.mMapView.setZoomLevel(curZoomLevel\n + MyMath.getNextSquareNumberAbove(1 / (float) diffNeeded) - 1);\n }\n }\n\n // TODO rework zoomToSpan\n @Override\n public void zoomToSpan(int latSpanE6, int lonSpanE6) {\n zoomToSpan(latSpanE6 * 1E-6, lonSpanE6 * 1E-6);\n }\n\n /**\n * Start animating the map towards the given point.\n */\n @Override\n public void animateTo(final IGeoPoint point) {\n animateTo(point, null, null);\n }\n\n /**\n * @since 6.0.2\n */\n public void animateTo(final IGeoPoint point, final Double pZoom, final Long pSpeed) {\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.animateTo(point, pZoom, pSpeed);\n return;\n }\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final IGeoPoint currentCenter = new GeoPoint(mMapView.getProjection().getCurrentCenter());\n final MapAnimatorListener mapAnimatorListener =\n new MapAnimatorListener(this, mMapView.getZoomLevelDouble(), pZoom, currentCenter, point);\n final ValueAnimator mapAnimator = ValueAnimator.ofFloat(0, 1);\n mapAnimator.addListener(mapAnimatorListener);\n mapAnimator.addUpdateListener(mapAnimatorListener);\n if (pSpeed == null) {\n mapAnimator.setDuration(Configuration.getInstance().getAnimationSpeedDefault());\n } else {\n mapAnimator.setDuration(pSpeed);\n }\n\n mCurrentAnimator = mapAnimator;\n mapAnimator.start();\n return;\n }\n // TODO handle the zoom part for the .3% of the population below HONEYCOMB (Feb. 2018)\n Point p = mMapView.getProjection().toPixels(point, null);\n animateTo(p.x, p.y);\n }\n\n /**\n * Start animating the map towards the given point.\n */\n @Override\n public void animateTo(int x, int y) {\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.animateTo(x, y);\n return;\n }\n\n if (!mMapView.isAnimating()) {\n mMapView.mIsFlinging = false;\n final int xStart = (int)mMapView.getMapScrollX();\n final int yStart = (int)mMapView.getMapScrollY();\n\n final int dx = x - mMapView.getWidth() / 2;\n final int dy = y - mMapView.getHeight() / 2;\n\n if (dx != xStart || dy != yStart) {\n mMapView.getScroller().startScroll(xStart, yStart, dx, dy, Configuration.getInstance().getAnimationSpeedDefault());\n mMapView.postInvalidate();\n }\n }\n }\n\n @Override\n public void scrollBy(int x, int y) {\n this.mMapView.scrollBy(x, y);\n }\n\n /**\n * Set the map view to the given center. There will be no animation.\n */\n @Override\n public void setCenter(final IGeoPoint point) {\n // If no layout, delay this call\n for (MapListener mapListener: mMapView.mListners) {\n mapListener.onScroll(new ScrollEvent(mMapView, 0, 0));\n }\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.setCenter(point);\n return;\n }\n mMapView.setExpectedCenter(point);\n }\n\n @Override\n public void stopPanning() {\n mMapView.mIsFlinging = false;\n mMapView.getScroller().forceFinished(true);\n }\n\n /**\n * Stops a running animation.\n *\n * @param jumpToTarget\n */\n @Override\n public void stopAnimation(final boolean jumpToTarget) {\n\n if (!mMapView.getScroller().isFinished()) {\n if (jumpToTarget) {\n mMapView.mIsFlinging = false;\n mMapView.getScroller().abortAnimation();\n } else\n stopPanning();\n }\n\n // We ignore the jumpToTarget for zoom levels since it doesn't make sense to stop\n // the animation in the middle. Maybe we could have it cancel the zoom operation and jump\n // back to original zoom level?\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final Animator currentAnimator = this.mCurrentAnimator;\n if (mMapView.mIsAnimating.get()) {\n currentAnimator.end();\n }\n } else {\n if (mMapView.mIsAnimating.get()) {\n mMapView.clearAnimation();\n }\n }\n }\n\n @Override\n public int setZoom(final int zoomlevel) {\n return (int) setZoom((double) zoomlevel);\n }\n\n /**\n * @since 6.0\n */\n @Override\n public double setZoom(final double pZoomlevel) {\n return mMapView.setZoomLevel(pZoomlevel);\n }\n\n /**\n * Zoom in by one zoom level.\n */\n @Override\n public boolean zoomIn() {\n return zoomIn(null);\n }\n\n @Override\n public boolean zoomIn(Long animationSpeed) {\n return zoomTo(mMapView.getZoomLevelDouble() + 1, animationSpeed);\n }\n\n /**\n * @param xPixel\n * @param yPixel\n * @param zoomAnimation if null, the default is used\n * @return\n */\n @Override\n public boolean zoomInFixing(final int xPixel, final int yPixel, Long zoomAnimation) {\n return zoomToFixing(mMapView.getZoomLevelDouble() + 1, xPixel, yPixel, zoomAnimation);\n }\n\n @Override\n public boolean zoomInFixing(final int xPixel, final int yPixel) {\n return zoomInFixing(xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomOut(Long animationSpeed) {\n return zoomTo(mMapView.getZoomLevelDouble() - 1, animationSpeed);\n }\n\n /**\n * Zoom out by one zoom level.\n */\n @Override\n public boolean zoomOut() {\n return zoomOut(null);\n }\n\n @Deprecated\n @Override\n public boolean zoomOutFixing(final int xPixel, final int yPixel) {\n return zoomToFixing(mMapView.getZoomLevelDouble() - 1, xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomTo(int zoomLevel) {\n return zoomTo(zoomLevel, null);\n }\n\n /**\n * @since 6.0\n */\n @Override\n public boolean zoomTo(int zoomLevel, Long animationSpeed) {\n return zoomTo((double)zoomLevel, animationSpeed);\n }\n\n /**\n * @param zoomLevel\n * @param xPixel\n * @param yPixel\n * @param zoomAnimationSpeed time in milliseconds, if null, the default settings will be used\n * @return\n * @since 6.0.0\n */\n @Override\n public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel, Long zoomAnimationSpeed) {\n return zoomToFixing((double) zoomLevel, xPixel, yPixel, zoomAnimationSpeed);\n }\n\n @Override\n public boolean zoomTo(double pZoomLevel, Long animationSpeed) {\n return zoomToFixing(pZoomLevel, mMapView.getWidth() / 2, mMapView.getHeight() / 2, animationSpeed);\n }\n\n @Override\n public boolean zoomTo(double pZoomLevel) {\n return zoomTo(pZoomLevel, null);\n }\n\n\n @Override\n public boolean zoomToFixing(double zoomLevel, int xPixel, int yPixel, Long zoomAnimationSpeed) {\n zoomLevel = zoomLevel > mMapView.getMaxZoomLevel() ? mMapView.getMaxZoomLevel() : zoomLevel;\n zoomLevel = zoomLevel < mMapView.getMinZoomLevel() ? mMapView.getMinZoomLevel() : zoomLevel;\n\n double currentZoomLevel = mMapView.getZoomLevelDouble();\n boolean canZoom = zoomLevel < currentZoomLevel && mMapView.canZoomOut() ||\n zoomLevel > currentZoomLevel && mMapView.canZoomIn();\n\n if (!canZoom) {\n return false;\n }\n if (mMapView.mIsAnimating.getAndSet(true)) {\n // TODO extend zoom (and return true)\n return false;\n }\n for (MapListener mapListener: mMapView.mListners) {\n mapListener.onZoom(new ZoomEvent(mMapView, zoomLevel));\n }\n mMapView.setMultiTouchScaleInitPoint(xPixel, yPixel);\n mMapView.startAnimation();\n\n float end = (float) Math.pow(2.0, zoomLevel - currentZoomLevel);\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final MapAnimatorListener zoomAnimatorListener = new MapAnimatorListener(this, currentZoomLevel, zoomLevel, null, null);\n final ValueAnimator zoomToAnimator = ValueAnimator.ofFloat(0, 1);\n zoomToAnimator.addListener(zoomAnimatorListener);\n zoomToAnimator.addUpdateListener(zoomAnimatorListener);\n if (zoomAnimationSpeed == null) {\n zoomToAnimator.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n } else {\n zoomToAnimator.setDuration(zoomAnimationSpeed);\n }\n\n mCurrentAnimator = zoomToAnimator;\n zoomToAnimator.start();\n return true;\n }\n mTargetZoomLevel = zoomLevel;\n if (zoomLevel > currentZoomLevel)\n mMapView.startAnimation(mZoomInAnimationOld);\n else\n mMapView.startAnimation(mZoomOutAnimationOld);\n ScaleAnimation scaleAnimation;\n\n scaleAnimation = new ScaleAnimation(\n 1f, end, //X\n 1f, end, //Y\n Animation.RELATIVE_TO_SELF, 0.5f, //Pivot X\n Animation.RELATIVE_TO_SELF, 0.5f); //Pivot Y\n if (zoomAnimationSpeed == null) {\n scaleAnimation.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n } else {\n scaleAnimation.setDuration(zoomAnimationSpeed);\n }\n scaleAnimation.setAnimationListener(new ZoomAnimationListener(this));\n return true;\n }\n\n /**\n * @since 6.0\n */\n @Override\n public boolean zoomToFixing(double zoomLevel, int xPixel, int yPixel) {\n return zoomToFixing(zoomLevel, xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel) {\n return zoomToFixing(zoomLevel, xPixel, yPixel, null);\n }\n\n\n protected void onAnimationStart() {\n mMapView.mIsAnimating.set(true);\n }\n\n protected void onAnimationEnd() {\n mMapView.mIsAnimating.set(false);\n mMapView.resetMultiTouchScale();\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n mCurrentAnimator = null;\n } else { // Fix for issue 477\n mMapView.clearAnimation();\n mZoomInAnimationOld.reset();\n mZoomOutAnimationOld.reset();\n setZoom(mTargetZoomLevel);\n }\n mMapView.invalidate();\n }\n\n @TargetApi(Build.VERSION_CODES.HONEYCOMB)\n private static class MapAnimatorListener\n implements Animator.AnimatorListener, AnimatorUpdateListener {\n\n private final GeoPoint mCenter = new GeoPoint(0., 0);\n private final MapController mMapController;\n private final Double mZoomStart;\n private final Double mZoomEnd;\n private final IGeoPoint mCenterStart;\n private final IGeoPoint mCenterEnd;\n\n public MapAnimatorListener(final MapController pMapController,\n final Double pZoomStart, final Double pZoomEnd,\n final IGeoPoint pCenterStart, final IGeoPoint pCenterEnd) {\n mMapController = pMapController;\n mZoomStart = pZoomStart;\n mZoomEnd = pZoomEnd;\n mCenterStart = pCenterStart;\n mCenterEnd = pCenterEnd;\n }\n\n @Override\n public void onAnimationStart(Animator animator) {\n mMapController.onAnimationStart();\n }\n\n @Override\n public void onAnimationEnd(Animator animator) {\n mMapController.onAnimationEnd();\n }\n\n @Override\n public void onAnimationCancel(Animator animator) {\n //noOp\n }\n\n @Override\n public void onAnimationRepeat(Animator animator) {\n //noOp\n }\n\n @Override\n public void onAnimationUpdate(ValueAnimator valueAnimator) {\n final float value = (Float) valueAnimator.getAnimatedValue();\n if (mZoomEnd != null) {\n final double zoom = mZoomStart + (mZoomEnd - mZoomStart) * value;\n mMapController.mMapView.setZoomLevel(zoom);\n }\n if (mCenterEnd != null) {\n final double longitudeStart = cleanLongitude(mCenterStart.getLongitude());\n final double longitudeEnd = cleanLongitude(mCenterEnd.getLongitude());\n final double longitude = cleanLongitude(longitudeStart + (longitudeEnd - longitudeStart) * value);\n final double latitudeStart = mCenterStart.getLatitude();\n final double latitudeEnd = mCenterEnd.getLatitude();\n final double latitude = cleanLongitude(latitudeStart + (latitudeEnd - latitudeStart) * value);\n mCenter.setCoords(latitude, longitude);\n mMapController.mMapView.setExpectedCenter(mCenter);\n }\n mMapController.mMapView.invalidate();\n }\n\n private double cleanLongitude(double pLongitude) {\n while (pLongitude < TileSystem.MinLongitude) {\n pLongitude += (TileSystem.MaxLongitude - TileSystem.MinLongitude);\n }\n while (pLongitude > TileSystem.MaxLongitude) {\n pLongitude -= (TileSystem.MaxLongitude - TileSystem.MinLongitude);\n }\n return pLongitude;\n }\n }\n\n protected static class ZoomAnimationListener implements AnimationListener {\n\n private MapController mMapController;\n\n public ZoomAnimationListener(MapController mapController) {\n mMapController = mapController;\n }\n\n @Override\n public void onAnimationStart(Animation animation) {\n mMapController.onAnimationStart();\n }\n\n @Override\n public void onAnimationEnd(Animation animation) {\n mMapController.onAnimationEnd();\n }\n\n @Override\n public void onAnimationRepeat(Animation animation) {\n //noOp\n }\n }\n\n private enum ReplayType {\n ZoomToSpanPoint, AnimateToPoint, AnimateToGeoPoint, SetCenterPoint\n }\n\n ;\n\n private class ReplayController {\n private LinkedList mReplayList = new LinkedList();\n\n public void animateTo(IGeoPoint geoPoint, Double pZoom, Long pSpeed) {\n mReplayList.add(new ReplayClass(ReplayType.AnimateToGeoPoint, null, geoPoint, pZoom, pSpeed));\n }\n\n public void animateTo(int x, int y) {\n mReplayList.add(new ReplayClass(ReplayType.AnimateToPoint, new Point(x, y), null));\n }\n\n public void setCenter(IGeoPoint geoPoint) {\n mReplayList.add(new ReplayClass(ReplayType.SetCenterPoint, null, geoPoint));\n }\n\n public void zoomToSpan(int x, int y) {\n mReplayList.add(new ReplayClass(ReplayType.ZoomToSpanPoint, new Point(x, y), null));\n }\n\n public void zoomToSpan(double x, double y) {\n mReplayList.add(new ReplayClass(ReplayType.ZoomToSpanPoint, new Point((int) (x * 1E6), (int) (y * 1E6)), null));\n }\n\n\n public void replayCalls() {\n for (ReplayClass replay : mReplayList) {\n switch (replay.mReplayType) {\n case AnimateToGeoPoint:\n if (replay.mGeoPoint != null)\n MapController.this.animateTo(replay.mGeoPoint, replay.mZoom, replay.mSpeed);\n break;\n case AnimateToPoint:\n if (replay.mPoint != null)\n MapController.this.animateTo(replay.mPoint.x, replay.mPoint.y);\n break;\n case SetCenterPoint:\n if (replay.mGeoPoint != null)\n MapController.this.setCenter(replay.mGeoPoint);\n break;\n case ZoomToSpanPoint:\n if (replay.mPoint != null)\n MapController.this.zoomToSpan(replay.mPoint.x, replay.mPoint.y);\n break;\n }\n }\n mReplayList.clear();\n }\n\n private class ReplayClass {\n private ReplayType mReplayType;\n private Point mPoint;\n private IGeoPoint mGeoPoint;\n private final Long mSpeed;\n private final Double mZoom;\n\n public ReplayClass(ReplayType mReplayType, Point mPoint, IGeoPoint mGeoPoint) {\n this(mReplayType, mPoint, mGeoPoint, null, null);\n }\n\n /**\n * @since 6.0.2\n */\n public ReplayClass(ReplayType pReplayType, Point pPoint, IGeoPoint pGeoPoint, Double pZoom, Long pSpeed) {\n mReplayType = pReplayType;\n mPoint = pPoint;\n mGeoPoint = pGeoPoint;\n mSpeed = pSpeed;\n mZoom = pZoom;\n }\n }\n }\n\n}\n"},"new_file":{"kind":"string","value":"osmdroid-android/src/main/java/org/osmdroid/views/MapController.java"},"old_contents":{"kind":"string","value":"// Created by plusminus on 21:37:08 - 27.09.2008\npackage org.osmdroid.views;\n\nimport android.animation.Animator;\nimport android.animation.ValueAnimator;\nimport android.animation.ValueAnimator.AnimatorUpdateListener;\nimport android.annotation.TargetApi;\nimport android.graphics.Point;\nimport android.os.Build;\nimport android.view.View;\nimport android.view.animation.Animation;\nimport android.view.animation.Animation.AnimationListener;\nimport android.view.animation.ScaleAnimation;\n\nimport org.osmdroid.api.IGeoPoint;\nimport org.osmdroid.api.IMapController;\nimport org.osmdroid.config.Configuration;\nimport org.osmdroid.events.MapListener;\nimport org.osmdroid.events.ScrollEvent;\nimport org.osmdroid.events.ZoomEvent;\nimport org.osmdroid.util.BoundingBox;\nimport org.osmdroid.util.GeoPoint;\nimport org.osmdroid.util.TileSystem;\nimport org.osmdroid.views.MapView.OnFirstLayoutListener;\nimport org.osmdroid.views.util.MyMath;\n\nimport java.util.LinkedList;\n\n\n/**\n * @author Nicolas Gramlich\n * @author Marc Kurtz\n */\npublic class MapController implements IMapController, OnFirstLayoutListener {\n\n // ===========================================================\n // Constants\n // ===========================================================\n\n // ===========================================================\n // Fields\n // ===========================================================\n\n protected final MapView mMapView;\n\n // Zoom animations\n private ScaleAnimation mZoomInAnimationOld;\n private ScaleAnimation mZoomOutAnimationOld;\n private double mTargetZoomLevel=0;\n\n private Animator mCurrentAnimator;\n\n // Keep track of calls before initial layout\n private ReplayController mReplayController;\n\n // ===========================================================\n // Constructors\n // ===========================================================\n\n public MapController(MapView mapView) {\n mMapView = mapView;\n\n // Keep track of initial layout\n mReplayController = new ReplayController();\n if (!mMapView.isLayoutOccurred()) {\n mMapView.addOnFirstLayoutListener(this);\n }\n\n\n if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {\n ZoomAnimationListener zoomAnimationListener = new ZoomAnimationListener(this);\n mZoomInAnimationOld = new ScaleAnimation(1, 2, 1, 2, Animation.RELATIVE_TO_SELF, 0.5f,\n Animation.RELATIVE_TO_SELF, 0.5f);\n mZoomOutAnimationOld = new ScaleAnimation(1, 0.5f, 1, 0.5f, Animation.RELATIVE_TO_SELF,\n 0.5f, Animation.RELATIVE_TO_SELF, 0.5f);\n mZoomInAnimationOld.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n mZoomOutAnimationOld.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n mZoomInAnimationOld.setAnimationListener(zoomAnimationListener);\n mZoomOutAnimationOld.setAnimationListener(zoomAnimationListener);\n }\n }\n\n @Override\n public void onFirstLayout(View v, int left, int top, int right, int bottom) {\n mReplayController.replayCalls();\n }\n\n @Override\n public void zoomToSpan(double latSpan, double lonSpan) {\n if (latSpan <= 0 || lonSpan <= 0) {\n return;\n }\n\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.zoomToSpan(latSpan, lonSpan);\n return;\n }\n\n final BoundingBox bb = this.mMapView.getProjection().getBoundingBox();\n final double curZoomLevel = this.mMapView.getProjection().getZoomLevel();\n\n final double curLatSpan = bb.getLatitudeSpan();\n final double curLonSpan = bb.getLongitudeSpan();\n\n final double diffNeededLat = (double) latSpan / curLatSpan; // i.e. 600/500 = 1,2\n final double diffNeededLon = (double) lonSpan / curLonSpan; // i.e. 300/400 = 0,75\n\n final double diffNeeded = Math.max(diffNeededLat, diffNeededLon); // i.e. 1,2\n\n if (diffNeeded > 1) { // Zoom Out\n this.mMapView.setZoomLevel(curZoomLevel - MyMath.getNextSquareNumberAbove((float) diffNeeded));\n } else if (diffNeeded < 0.5) { // Can Zoom in\n this.mMapView.setZoomLevel(curZoomLevel\n + MyMath.getNextSquareNumberAbove(1 / (float) diffNeeded) - 1);\n }\n }\n\n // TODO rework zoomToSpan\n @Override\n public void zoomToSpan(int latSpanE6, int lonSpanE6) {\n zoomToSpan(latSpanE6 * 1E-6, lonSpanE6 * 1E-6);\n }\n\n /**\n * Start animating the map towards the given point.\n */\n @Override\n public void animateTo(final IGeoPoint point) {\n animateTo(point, null, null);\n }\n\n /**\n * @since 6.0.2\n */\n public void animateTo(final IGeoPoint point, final Double pZoom, final Long pSpeed) {\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.animateTo(point, pZoom, pSpeed);\n return;\n }\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final IGeoPoint currentCenter = new GeoPoint(mMapView.getProjection().getCurrentCenter());\n final MapAnimatorListener mapAnimatorListener =\n new MapAnimatorListener(this, mMapView.getZoomLevelDouble(), pZoom, currentCenter, point);\n final ValueAnimator mapAnimator = ValueAnimator.ofFloat(0, 1);\n mapAnimator.addListener(mapAnimatorListener);\n mapAnimator.addUpdateListener(mapAnimatorListener);\n if (pSpeed == null) {\n mapAnimator.setDuration(Configuration.getInstance().getAnimationSpeedDefault());\n } else {\n mapAnimator.setDuration(pSpeed);\n }\n\n mCurrentAnimator = mapAnimator;\n mapAnimator.start();\n return;\n }\n // TODO handle the zoom part for the .3% of the population below HONEYCOMB (Feb. 2018)\n Point p = mMapView.getProjection().toPixels(point, null);\n animateTo(p.x, p.y);\n }\n\n /**\n * Start animating the map towards the given point.\n */\n @Override\n public void animateTo(int x, int y) {\n // If no layout, delay this call\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.animateTo(x, y);\n return;\n }\n\n if (!mMapView.isAnimating()) {\n mMapView.mIsFlinging = false;\n final int xStart = (int)mMapView.getMapScrollX();\n final int yStart = (int)mMapView.getMapScrollY();\n\n final int dx = x - mMapView.getWidth() / 2;\n final int dy = y - mMapView.getHeight() / 2;\n\n if (dx != xStart || dy != yStart) {\n mMapView.getScroller().startScroll(xStart, yStart, dx, dy, Configuration.getInstance().getAnimationSpeedDefault());\n mMapView.postInvalidate();\n }\n }\n }\n\n @Override\n public void scrollBy(int x, int y) {\n this.mMapView.scrollBy(x, y);\n }\n\n /**\n * Set the map view to the given center. There will be no animation.\n */\n @Override\n public void setCenter(final IGeoPoint point) {\n // If no layout, delay this call\n for (MapListener mapListener: mMapView.mListners) {\n mapListener.onScroll(new ScrollEvent(mMapView, 0, 0));\n }\n if (!mMapView.isLayoutOccurred()) {\n mReplayController.setCenter(point);\n return;\n }\n mMapView.setExpectedCenter(point);\n }\n\n @Override\n public void stopPanning() {\n mMapView.mIsFlinging = false;\n mMapView.getScroller().forceFinished(true);\n }\n\n /**\n * Stops a running animation.\n *\n * @param jumpToTarget\n */\n @Override\n public void stopAnimation(final boolean jumpToTarget) {\n\n if (!mMapView.getScroller().isFinished()) {\n if (jumpToTarget) {\n mMapView.mIsFlinging = false;\n mMapView.getScroller().abortAnimation();\n } else\n stopPanning();\n }\n\n // We ignore the jumpToTarget for zoom levels since it doesn't make sense to stop\n // the animation in the middle. Maybe we could have it cancel the zoom operation and jump\n // back to original zoom level?\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final Animator currentAnimator = this.mCurrentAnimator;\n if (mMapView.mIsAnimating.get()) {\n currentAnimator.end();\n }\n } else {\n if (mMapView.mIsAnimating.get()) {\n mMapView.clearAnimation();\n }\n }\n }\n\n @Override\n public int setZoom(final int zoomlevel) {\n return (int) setZoom((double) zoomlevel);\n }\n\n /**\n * @since 6.0\n */\n @Override\n public double setZoom(final double pZoomlevel) {\n return mMapView.setZoomLevel(pZoomlevel);\n }\n\n /**\n * Zoom in by one zoom level.\n */\n @Override\n public boolean zoomIn() {\n return zoomIn(null);\n }\n\n @Override\n public boolean zoomIn(Long animationSpeed) {\n return zoomTo(mMapView.getZoomLevelDouble() + 1, animationSpeed);\n }\n\n /**\n * @param xPixel\n * @param yPixel\n * @param zoomAnimation if null, the default is used\n * @return\n */\n @Override\n public boolean zoomInFixing(final int xPixel, final int yPixel, Long zoomAnimation) {\n return zoomToFixing(mMapView.getZoomLevelDouble() + 1, xPixel, yPixel, zoomAnimation);\n }\n\n @Override\n public boolean zoomInFixing(final int xPixel, final int yPixel) {\n return zoomInFixing(xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomOut(Long animationSpeed) {\n return zoomTo(mMapView.getZoomLevelDouble() - 1, animationSpeed);\n }\n\n /**\n * Zoom out by one zoom level.\n */\n @Override\n public boolean zoomOut() {\n return zoomOut(null);\n }\n\n @Deprecated\n @Override\n public boolean zoomOutFixing(final int xPixel, final int yPixel) {\n return zoomToFixing(mMapView.getZoomLevelDouble() - 1, xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomTo(int zoomLevel) {\n return zoomTo(zoomLevel, null);\n }\n\n /**\n * @since 6.0\n */\n @Override\n public boolean zoomTo(int zoomLevel, Long animationSpeed) {\n return zoomTo((double)zoomLevel, animationSpeed);\n }\n\n /**\n * @param zoomLevel\n * @param xPixel\n * @param yPixel\n * @param zoomAnimationSpeed time in milliseconds, if null, the default settings will be used\n * @return\n * @since 6.0.0\n */\n @Override\n public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel, Long zoomAnimationSpeed) {\n return zoomToFixing((double) zoomLevel, xPixel, yPixel, zoomAnimationSpeed);\n }\n\n @Override\n public boolean zoomTo(double pZoomLevel, Long animationSpeed) {\n return zoomToFixing(pZoomLevel, mMapView.getWidth() / 2, mMapView.getHeight() / 2, animationSpeed);\n }\n\n @Override\n public boolean zoomTo(double pZoomLevel) {\n return zoomTo(pZoomLevel, null);\n }\n\n\n @Override\n public boolean zoomToFixing(double zoomLevel, int xPixel, int yPixel, Long zoomAnimationSpeed) {\n zoomLevel = zoomLevel > mMapView.getMaxZoomLevel() ? mMapView.getMaxZoomLevel() : zoomLevel;\n zoomLevel = zoomLevel < mMapView.getMinZoomLevel() ? mMapView.getMinZoomLevel() : zoomLevel;\n\n double currentZoomLevel = mMapView.getZoomLevelDouble();\n boolean canZoom = zoomLevel < currentZoomLevel && mMapView.canZoomOut() ||\n zoomLevel > currentZoomLevel && mMapView.canZoomIn();\n\n if (!canZoom) {\n return false;\n }\n if (mMapView.mIsAnimating.getAndSet(true)) {\n // TODO extend zoom (and return true)\n return false;\n }\n for (MapListener mapListener: mMapView.mListners) {\n mapListener.onZoom(new ZoomEvent(mMapView, zoomLevel));\n }\n mMapView.setMultiTouchScaleInitPoint(xPixel, yPixel);\n mMapView.startAnimation();\n\n float end = (float) Math.pow(2.0, zoomLevel - currentZoomLevel);\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n final MapAnimatorListener zoomAnimatorListener = new MapAnimatorListener(this, currentZoomLevel, zoomLevel, null, null);\n final ValueAnimator zoomToAnimator = ValueAnimator.ofFloat(0, 1);\n zoomToAnimator.addListener(zoomAnimatorListener);\n zoomToAnimator.addUpdateListener(zoomAnimatorListener);\n if (zoomAnimationSpeed == null) {\n zoomToAnimator.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n } else {\n zoomToAnimator.setDuration(zoomAnimationSpeed);\n }\n\n mCurrentAnimator = zoomToAnimator;\n zoomToAnimator.start();\n return true;\n }\n mTargetZoomLevel = zoomLevel;\n if (zoomLevel > currentZoomLevel)\n mMapView.startAnimation(mZoomInAnimationOld);\n else\n mMapView.startAnimation(mZoomOutAnimationOld);\n ScaleAnimation scaleAnimation;\n\n scaleAnimation = new ScaleAnimation(\n 1f, end, //X\n 1f, end, //Y\n Animation.RELATIVE_TO_SELF, 0.5f, //Pivot X\n Animation.RELATIVE_TO_SELF, 0.5f); //Pivot Y\n if (zoomAnimationSpeed == null) {\n scaleAnimation.setDuration(Configuration.getInstance().getAnimationSpeedShort());\n } else {\n scaleAnimation.setDuration(zoomAnimationSpeed);\n }\n scaleAnimation.setAnimationListener(new ZoomAnimationListener(this));\n return true;\n }\n\n /**\n * @since 6.0\n */\n @Override\n public boolean zoomToFixing(double zoomLevel, int xPixel, int yPixel) {\n return zoomToFixing(zoomLevel, xPixel, yPixel, null);\n }\n\n @Override\n public boolean zoomToFixing(int zoomLevel, int xPixel, int yPixel) {\n return zoomToFixing(zoomLevel, xPixel, yPixel, null);\n }\n\n\n protected void onAnimationStart() {\n mMapView.mIsAnimating.set(true);\n }\n\n protected void onAnimationEnd() {\n mMapView.mIsAnimating.set(false);\n mMapView.resetMultiTouchScale();\n if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n mCurrentAnimator = null;\n } else { // Fix for issue 477\n mMapView.clearAnimation();\n mZoomInAnimationOld.reset();\n mZoomOutAnimationOld.reset();\n setZoom(mTargetZoomLevel);\n }\n }\n\n @TargetApi(Build.VERSION_CODES.HONEYCOMB)\n private static class MapAnimatorListener\n implements Animator.AnimatorListener, AnimatorUpdateListener {\n\n private final GeoPoint mCenter = new GeoPoint(0., 0);\n private final MapController mMapController;\n private final Double mZoomStart;\n private final Double mZoomEnd;\n private final IGeoPoint mCenterStart;\n private final IGeoPoint mCenterEnd;\n\n public MapAnimatorListener(final MapController pMapController,\n final Double pZoomStart, final Double pZoomEnd,\n final IGeoPoint pCenterStart, final IGeoPoint pCenterEnd) {\n mMapController = pMapController;\n mZoomStart = pZoomStart;\n mZoomEnd = pZoomEnd;\n mCenterStart = pCenterStart;\n mCenterEnd = pCenterEnd;\n }\n\n @Override\n public void onAnimationStart(Animator animator) {\n mMapController.onAnimationStart();\n }\n\n @Override\n public void onAnimationEnd(Animator animator) {\n mMapController.onAnimationEnd();\n }\n\n @Override\n public void onAnimationCancel(Animator animator) {\n //noOp\n }\n\n @Override\n public void onAnimationRepeat(Animator animator) {\n //noOp\n }\n\n @Override\n public void onAnimationUpdate(ValueAnimator valueAnimator) {\n final float value = (Float) valueAnimator.getAnimatedValue();\n if (mZoomEnd != null) {\n final double zoom = mZoomStart + (mZoomEnd - mZoomStart) * value;\n mMapController.mMapView.setZoomLevel(zoom);\n }\n if (mCenterEnd != null) {\n final double longitudeStart = cleanLongitude(mCenterStart.getLongitude());\n final double longitudeEnd = cleanLongitude(mCenterEnd.getLongitude());\n final double longitude = cleanLongitude(longitudeStart + (longitudeEnd - longitudeStart) * value);\n final double latitudeStart = mCenterStart.getLatitude();\n final double latitudeEnd = mCenterEnd.getLatitude();\n final double latitude = cleanLongitude(latitudeStart + (latitudeEnd - latitudeStart) * value);\n mCenter.setCoords(latitude, longitude);\n mMapController.mMapView.setExpectedCenter(mCenter);\n }\n mMapController.mMapView.invalidate();\n }\n\n private double cleanLongitude(double pLongitude) {\n while (pLongitude < TileSystem.MinLongitude) {\n pLongitude += (TileSystem.MaxLongitude - TileSystem.MinLongitude);\n }\n while (pLongitude > TileSystem.MaxLongitude) {\n pLongitude -= (TileSystem.MaxLongitude - TileSystem.MinLongitude);\n }\n return pLongitude;\n }\n }\n\n protected static class ZoomAnimationListener implements AnimationListener {\n\n private MapController mMapController;\n\n public ZoomAnimationListener(MapController mapController) {\n mMapController = mapController;\n }\n\n @Override\n public void onAnimationStart(Animation animation) {\n mMapController.onAnimationStart();\n }\n\n @Override\n public void onAnimationEnd(Animation animation) {\n mMapController.onAnimationEnd();\n }\n\n @Override\n public void onAnimationRepeat(Animation animation) {\n //noOp\n }\n }\n\n private enum ReplayType {\n ZoomToSpanPoint, AnimateToPoint, AnimateToGeoPoint, SetCenterPoint\n }\n\n ;\n\n private class ReplayController {\n private LinkedList mReplayList = new LinkedList();\n\n public void animateTo(IGeoPoint geoPoint, Double pZoom, Long pSpeed) {\n mReplayList.add(new ReplayClass(ReplayType.AnimateToGeoPoint, null, geoPoint, pZoom, pSpeed));\n }\n\n public void animateTo(int x, int y) {\n mReplayList.add(new ReplayClass(ReplayType.AnimateToPoint, new Point(x, y), null));\n }\n\n public void setCenter(IGeoPoint geoPoint) {\n mReplayList.add(new ReplayClass(ReplayType.SetCenterPoint, null, geoPoint));\n }\n\n public void zoomToSpan(int x, int y) {\n mReplayList.add(new ReplayClass(ReplayType.ZoomToSpanPoint, new Point(x, y), null));\n }\n\n public void zoomToSpan(double x, double y) {\n mReplayList.add(new ReplayClass(ReplayType.ZoomToSpanPoint, new Point((int) (x * 1E6), (int) (y * 1E6)), null));\n }\n\n\n public void replayCalls() {\n for (ReplayClass replay : mReplayList) {\n switch (replay.mReplayType) {\n case AnimateToGeoPoint:\n if (replay.mGeoPoint != null)\n MapController.this.animateTo(replay.mGeoPoint, replay.mZoom, replay.mSpeed);\n break;\n case AnimateToPoint:\n if (replay.mPoint != null)\n MapController.this.animateTo(replay.mPoint.x, replay.mPoint.y);\n break;\n case SetCenterPoint:\n if (replay.mGeoPoint != null)\n MapController.this.setCenter(replay.mGeoPoint);\n break;\n case ZoomToSpanPoint:\n if (replay.mPoint != null)\n MapController.this.zoomToSpan(replay.mPoint.x, replay.mPoint.y);\n break;\n }\n }\n mReplayList.clear();\n }\n\n private class ReplayClass {\n private ReplayType mReplayType;\n private Point mPoint;\n private IGeoPoint mGeoPoint;\n private final Long mSpeed;\n private final Double mZoom;\n\n public ReplayClass(ReplayType mReplayType, Point mPoint, IGeoPoint mGeoPoint) {\n this(mReplayType, mPoint, mGeoPoint, null, null);\n }\n\n /**\n * @since 6.0.2\n */\n public ReplayClass(ReplayType pReplayType, Point pPoint, IGeoPoint pGeoPoint, Double pZoom, Long pSpeed) {\n mReplayType = pReplayType;\n mPoint = pPoint;\n mGeoPoint = pGeoPoint;\n mSpeed = pSpeed;\n mZoom = pZoom;\n }\n }\n }\n\n}\n"},"message":{"kind":"string","value":"bug/#1040 - explicit call to invalidate after animation\n\nThank you @InI4 for this bug fix suggestion.\n\nImpacted classes:\n* `MapController`: added an explicit call to `invalidate` after animation, in method `onAnimationEnd`\n"},"old_file":{"kind":"string","value":"osmdroid-android/src/main/java/org/osmdroid/views/MapController.java"},"subject":{"kind":"string","value":"bug/#1040 - explicit call to invalidate after animation"},"git_diff":{"kind":"string","value":"smdroid-android/src/main/java/org/osmdroid/views/MapController.java\n mZoomOutAnimationOld.reset();\n setZoom(mTargetZoomLevel);\n }\n mMapView.invalidate();\n }\n \n @TargetApi(Build.VERSION_CODES.HONEYCOMB)"}}},{"rowIdx":2035,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"agpl-3.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"b2b9bcc4ad07c2f88e810747ee5405cfae12d3ec"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,CompilerWorks/spliceengine,splicemachine/spliceengine,splicemachine/spliceengine"},"new_contents":{"kind":"string","value":"/*\n\n Derby - Class com.splicemachine.db.iapi.sql.execute.ResultSetFactory\n\n Licensed to the Apache Software Foundation (ASF) under one or more\n contributor license agreements. See the NOTICE file distributed with\n this work for additional information regarding copyright ownership.\n The ASF licenses this file to you under the Apache License, Version 2.0\n (the \"License\"); you may not use this file except in compliance with\n the License. You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n */\n\npackage com.splicemachine.db.iapi.sql.execute;\n\nimport java.util.List;\n\nimport com.splicemachine.db.iapi.error.StandardException;\nimport com.splicemachine.db.iapi.services.loader.GeneratedMethod;\nimport com.splicemachine.db.iapi.sql.Activation;\nimport com.splicemachine.db.iapi.sql.ResultSet;\nimport com.splicemachine.db.iapi.types.DataValueDescriptor;\n\n/**\n * ResultSetFactory provides a wrapper around all of\n * the result sets needed in an execution implementation.\n *

\n * For the activations to avoid searching for this module\n * in their execute methods, the base activation supertype\n * should implement a method that does the lookup and salts\n * away this factory for the activation to use as it needs it.\n *\n */\npublic interface ResultSetFactory {\n\t/**\n\t\tModule name for the monitor's module locating system.\n\t */\n\tString MODULE = \"com.splicemachine.db.iapi.sql.execute.ResultSetFactory\";\n\n\t//\n\t// DDL operations\n\t//\n\n\t/**\n\t Generic DDL result set creation.\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getDDLResultSet(Activation activation)\n\t\t\t\t\tthrows StandardException;\n\n\n\t//\n\t// MISC operations\n\t//\n\n\t/**\n\t Generic Misc result set creation.\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getMiscResultSet(Activation activation)\n\t\t\t\t\tthrows StandardException;\n\n\t//\n\t// Transaction operations\n\t//\n\t/**\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getSetTransactionResultSet(Activation activation) \n\t\tthrows StandardException;\n\n\t//\n\t// DML statement operations\n\t//\n\t/**\n\t\tAn insert result set simply reports that it completed, and\n\t\tthe number of rows inserted. It does not return rows.\n\t\tThe insert has been completed once the\n\t\tinsert result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@param generationClauses\tThe code to compute column generation clauses if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@return the insert operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getInsertResultSet(NoPutResultSet source,\n\t GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,\n\t\t\t\t\t\t\t\t String insertMode,\n\t\t\t\t\t\t\t\t String statusDirectory,\n\t\t\t\t\t\t\t\t int failBadRecordCount,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n throws StandardException;\n\n\t/**\n\t\tAn insert VTI result set simply reports that it completed, and\n\t\tthe number of rows inserted. It does not return rows.\n\t\tThe insert has been completed once the\n\t\tinsert result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@param vtiRS\tThe code to instantiate the VTI, if necessary\n\t\t@return the insert VTI operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getInsertVTIResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t NoPutResultSet vtiRS,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tA delete VTI result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@return the delete VTI operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getDeleteVTIResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tA delete result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe deleted from the target table. This result set must\n\t\t\tcontain one column which provides RowLocations that are\n\t\t\tvalid in the target table.\n\t\t@return the delete operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the delete\n\t */\n\tResultSet getDeleteResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost, String tableVersion,\n String explainPlan)\n\t\t\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tA delete Cascade result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe deleted from the target table.\n\t\t@param constantActionItem a constant action saved object reference\n\t\t@param dependentResultSets an array of DeleteCascade Resultsets\n for the current table referential action\n\t\t\t\t\t\t\t\t dependents tables.\n\t\t@param resultSetId an Id which is used to store the refence\n to the temporary result set created of\n the materilized rows.Dependent table resultsets\n\t\t\t\t\t\t\tuses the same id to access their parent temporary result sets.\n\t\t@return the delete operation as a delete cascade result set.\n\t\t@exception StandardException thrown when unable to perform the delete\n\t */\n\tResultSet getDeleteCascadeResultSet(NoPutResultSet source,\n\t\t\t\t\t\t\t\t\t\tint constantActionItem,\n\t\t\t\t\t\t\t\t\t\tResultSet[] dependentResultSets, \n\t\t\t\t\t\t\t\t\t\tString resultSetId)\n\t\t\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tAn update result set simply reports that it completed, and\n\t\tthe number of rows updated. It does not return rows.\n\t\tThe update has been completed once the\n\t\tupdate result set is available.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tupdated in the target table. This result set must contain \n\t\t\ta column which provides RowLocations that are valid in the \n\t\t\ttarget table, and new values to be placed in those rows.\n\t\t@param generationClauses\tThe code to compute column generation clauses if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@return the update operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the update\n\t */\n\tResultSet getUpdateResultSet(NoPutResultSet source, GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost, String tableVersion,\n String explainPlan)\n throws StandardException;\n\n\t/**\n * @param source the result set from which to take rows to be \n * updated in the target table.\n * @return the update operation as a result set.\n * @exception StandardException thrown on error\n\t */\n\tpublic ResultSet getUpdateVTIResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tAn update result set simply reports that it completed, and\n\t\tthe number of rows updated. It does not return rows.\n\t\tThe update has been completed once the\n\t\tupdate result set is available.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tupdated in the target table. This result set must contain \n\t\t\ta column which provides RowLocations that are valid in the \n\t\t\ttarget table, and new values to be placed in those rows.\n\t\t@param generationClauses\tThe code to compute generated columns, if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@param constantActionItem a constant action saved object reference\n\t\t@param rsdItem result Description, saved object id. \t\t\t\t\n\t\t@return the update operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the update\n\t */\n\tResultSet getDeleteCascadeUpdateResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,\n\t\t\t\t\t\t\t\t int constantActionItem,\n\t\t\t\t\t\t\t\t int rsdItem)\n throws StandardException;\n\n\t/**\n\t\tA call statement result set simply reports that it completed. \n\t\tIt does not return rows.\n\n\t\t@param methodCall a reference to a method in the activation\n\t\t\t for the method call\n\t\t@param activation the activation for this result set\n\n\t\t@return the call statement operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the call statement\n\t */\n\tResultSet getCallStatementResultSet(GeneratedMethod methodCall,\n\t\t\t\t Activation activation) \n throws StandardException;\n\n ResultSet getCallStatementResultSet(GeneratedMethod methodCall,\n Activation activation,\n String origClassName,\n String origMethodName) \n throws StandardException;\n \n\t//\n\t// Query expression operations\n\t//\n\n\t/**\n\t\tA project restrict result set iterates over its source,\n\t\tevaluating a restriction and when it is satisfied,\n\t\tconstructing a row to return in its result set based on\n\t\tits projection.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param restriction a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the restriction is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean restriction() throws StandardException;\n\t\t\t\n\t\t@param projection a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto project out the expected result row.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tExecRow projection() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param constantRestriction a reference to a method in the activation\n\t\t\tthat represents a constant expression (eg where 1 = 2).\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean restriction() throws StandardException;\n\t\t\t\n\t\t@param mapArrayItem\tItem # for mapping of source to target columns\n @param cloneMapItem Item # for columns that need cloning\n @param reuseResult Whether or not to reuse the result row.\n\t\t@param doesProjection\tWhether or not this PRN does a projection\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the project restrict operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getProjectRestrictResultSet(NoPutResultSet source,\n\t\tGeneratedMethod restriction, \n\t\tGeneratedMethod projection, int resultSetNumber,\n\t\tGeneratedMethod constantRestriction,\n\t\tint mapArrayItem,\n int cloneMapItem,\n\t\tboolean reuseResult,\n\t\tboolean doesProjection,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) throws StandardException;\n\n\t/**\n\t\tA hash table result set builds a hash table on its source,\n\t\tapplying a list of predicates, if any, to the source,\n\t\twhen building the hash table. It then does a look up into\n\t\tthe hash table on a probe.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param singleTableRestriction restriction, if any, applied to\n\t\t\tinput of hash table.\n\t\t@param equijoinQualifiers Qualifier[] for look up into hash table\n\t\t@param projection a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto project out the expected result row.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tExecRow projection() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param mapRefItem\tItem # for mapping of source to target columns\n\t\t@param reuseResult\tWhether or not to reuse the result row.\n\t\t@param keyColItem\tItem for hash key column array\n\t\t@param removeDuplicates\tWhether or not to remove duplicates when building the hash table\n\t\t@param maxInMemoryRowCount\t\t\tMax size of in-memory hash table\n\t\t@param initialCapacity\t\t\t\tinitialCapacity for java.util.HashTable\n\t\t@param loadFactor\t\t\t\t\tloadFactor for java.util.HashTable\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the project restrict operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getHashTableResultSet(NoPutResultSet source,\n\t\tGeneratedMethod singleTableRestriction, \n\t\tString equijoinQualifiersField,\n\t\tGeneratedMethod projection, int resultSetNumber,\n\t\tint mapRefItem,\n\t\tboolean reuseResult,\n\t\tint keyColItem,\n\t\tboolean removeDuplicates,\n\t\tlong maxInMemoryRowCount,\n\t\tint\tinitialCapacity,\n\t\tfloat loadFactor,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost)\n\t\t\t throws StandardException;\n\n\t/**\n\t\tA sort result set sorts its source and if requested removes\n\t\tduplicates. It will generate the entire result when open, and\n\t\tthen return it a row at a time.\n\t\t

\n\t\tIf passed aggregates it will do scalar or vector aggregate\n\t\tprocessing. A list of aggregator information is passed\n\t\toff of the PreparedStatement's savedObjects. Aggregation\n\t\tand SELECT DISTINCT cannot be processed in the same sort.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param distinct true if distinct SELECT list\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param orderItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the distinct operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getSortResultSet(NoPutResultSet source,\n\t\tboolean distinct, \n\t\tboolean isInSortedOrder,\n\t\tint orderItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA ScalarAggregateResultSet computes non-distinct scalar aggregates.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize\t\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param singleInputRow\tWhether we know we have a single input row or not\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getScalarAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tboolean singleInputRow,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA DistinctScalarAggregateResultSet computes scalar aggregates when \n\t\tat least one of them is a distinct aggregate.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param singleInputRow\tWhether we know we have a single input row or not\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctScalarAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tboolean singleInputRow,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA GroupedAggregateResultSet computes non-distinct grouped aggregates.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize\t\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isRollup true if this is a GROUP BY ROLLUP()\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getGroupedAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tboolean isRollup,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA DistinctGroupedAggregateResultSet computes scalar aggregates when \n\t\tat least one of them is a distinct aggregate.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isRollup true if this is a GROUP BY ROLLUP()\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctGroupedAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n boolean isRollup,\n String explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tAn any result set iterates over its source,\n\t\treturning a row with all columns set to nulls\n\t\tif the source returns no rows.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\tthat is called if the source returns no rows\n\t\t@param resultSetNumber\t\tThe resultSetNumber for the ResultSet\n\t\t@param subqueryNumber\t\tThe subquery number for this subquery.\n\t\t@param pointOfAttachment\tThe point of attachment for this subquery.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the any operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getAnyResultSet(NoPutResultSet source,\n\t\tGeneratedMethod emptyRowFun, int resultSetNumber,\n\t\tint subqueryNumber, int pointOfAttachment,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA once result set iterates over its source,\n\t\traising an error if the source returns > 1 row and\n\t\treturning a row with all columns set to nulls\n\t\tif the source returns no rows.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\tthat is called if the source returns no rows\n\t\t@param cardinalityCheck The type of cardinality check, if any that\n\t\t\tis required\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param subqueryNumber\t\tThe subquery number for this subquery.\n\t\t@param pointOfAttachment\tThe point of attachment for this subquery.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the once operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getOnceResultSet(NoPutResultSet source,\n\t\tGeneratedMethod emptyRowFun,\n\t\tint cardinalityCheck, int resultSetNumber, \n\t\tint subqueryNumber, int pointOfAttachment,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA row result set forms a result set on a single, known row value.\n\t\tIt is used to turn constant rows into result sets for use in\n\t\tthe result set paradigm.\n\t\tThe row can be constructed when it is requested from the\n\t\tresult set.\n\n\t\t@param activation the activation for this result set,\n\t\t\tagainst which the row operation is performed to\n\t\t\tcreate the result set.\n\t\t@param row a reference to a method in the activation\n\t\t\tthat creates the expected row.\n\t\t\t\n\t\t\t\tExecRow row() throws StandardException;\n\t\t\t\n\t\t@param canCacheRow\tTrue if execution can cache the input row\n\t\t\tafter it has gotten it. If the input row is constructed soley\n\t\t\tof constants or parameters, it is ok to cache this row rather\n\t\t\tthan recreating it each time it is requested.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the row as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getRowResultSet(Activation activation, GeneratedMethod row, \n\t\t\t\t\t\t\t boolean canCacheRow,\n\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t double optimizerEstimatedCost)\n\t\tthrows StandardException;\n\n NoPutResultSet getRowResultSet(Activation activation, ExecRow row,\n boolean canCacheRow,\n int resultSetNumber,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n /**\n Splice Addition\n\n A resultset that forms a result set on a collection of known rows.\n It is used to cache rows from a result set that's been materialized.\n\n @param activation The activation for this result set\n @param rows The collection of known ExecRows\n @param resultSetNumber The resultSetNumber for the result set being materialized\n */\n\n NoPutResultSet getCachedResultSet(Activation activation, List rows, int resultSetNumber)\n throws StandardException;\n\n\t/**\n\t\tA VTI result set wraps a user supplied result set.\n\n\t\t@param activation the activation for this result set,\n\t\t\tagainst which the row operation is performed to\n\t\t\tcreate the result set.\n\t\t@param row a reference to a method in the activation\n\t\t\tthat creates the expected row.\n\t\t\t\n\t\t\t\tExecRow row() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param constructor\t\tThe GeneratedMethod for the user's constructor\n\t\t@param javaClassName\tThe java class name for the VTI\n\t\t@param erdNumber\t\tint for referenced column BitSet (so it can be turned back into an object)\n\t\t@param version2\t\t\tWhether or not VTI is a version 2 VTI.\n\t\t@param isTarget\t\t\tWhether or not VTI is a target VTI.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by optimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isDerbyStyleTableFunction True if this is a Derby-style table function\n\t\t@param returnTypeNumber\tWhich saved object contains the return type (a multi-set) serialized as a byte array\n\t\t@param vtiProjectionNumber\tWhich saved object contains the projection for a RestrictedVTI\n\t\t@param vtiRestrictionNumber\tWhich saved object contains the restriction for a RestrictedVTI\n\t\t@return the row as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getVTIResultSet(Activation activation, GeneratedMethod row,\n\t\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t\t GeneratedMethod constructor,\n\t\t\t\t\t\t\t\t\t String javaClassName,\n\t\t\t\t\t\t\t\t\t String pushedQualifiersField,\n\t\t\t\t\t\t\t\t\t int erdNumber,\n\t\t\t\t\t\t\t\t\t int ctcNumber,\n\t\t\t\t\t\t\t\t\t boolean isTarget,\n\t\t\t\t\t\t\t\t\t int scanIsolationLevel,\n\t\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n boolean isDerbyStyleTableFunction,\n int returnTypeNumber,\n int vtiProjectionNumber,\n int vtiRestrictionNumber,\n\t String explainPlan)\n\t\t throws StandardException;\n\n\t/*\n\t * This method was purely added to get some stored prepared statements to pass the validation stage of their compilation.\n\t * The existing method used a String for pushedQualifiersField. However, nothing was done with the initial value that\n\t * was passed into the constructor. So this method does the same thing and ignores the pushedQualifiersField which is\n\t * an com.splicemachine.db.iapi.store.access.Qualifier[][].\n\t */\n\tpublic NoPutResultSet getVTIResultSet(\n\t\t\tActivation activation,\n\t\t\tGeneratedMethod row,\n\t\t\tint resultSetNumber,\n\t\t\tGeneratedMethod constructor,\n\t\t\tString javaClassName,\n\t\t\tcom.splicemachine.db.iapi.store.access.Qualifier[][] pushedQualifiersField,\n\t\t\tint erdNumber,\n\t\t\tint ctcNumber,\n\t\t\tboolean isTarget,\n\t\t\tint scanIsolationLevel,\n\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\tdouble optimizerEstimatedCost,\n\t\t\tboolean isDerbyStyleTableFunction,\n\t\t\tint returnTypeNumber,\n\t\t\tint vtiProjectionNumber,\n\t\t\tint vtiRestrictionNumber,\n\t\t\tString explainPlan\n\t\t\t)\n\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tA hash result set forms a result set on a hash table built on a scan\n\t\tof a table.\n\t\tThe rows are put into the hash table on the 1st open.\n\t\t

\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param scanQualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param nextQualifiers the array of Qualifiers for the look up into the hash table.\n\t\t@param initialCapacity\tThe initialCapacity for the HashTable.\n\t\t@param loadFactor\t\tThe loadFactor for the HashTable.\n\t\t@param maxCapacity\t\tThe maximum size for the HashTable.\n\t\t@param hashKeyColumn\tThe 0-based column # for the hash key.\n\t\t@param tableName\t\tThe full name of the table \n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getHashScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString scanQualifiersField,\n\t\t\t\t\t\t\t\tString nextQualifierField,\n\t\t\t\t\t\t\t\tint initialCapacity,\n\t\t\t\t\t\t\t\tfloat loadFactor,\n\t\t\t\t\t\t\t\tint maxCapacity,\n\t\t\t\t\t\t\t\tint hashKeyColumn,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA distinct scan result set pushes duplicate elimination into\n\t\tthe scan.\n\t\t

\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param hashKeyColumn\tThe 0-based column # for the hash key.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\t\t\t\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tint hashKeyColumn,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n\t\t\t\t\t\t\t\tString explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA table scan result set forms a result set on a scan\n\t\tof a table.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the table scan operation is simple, and is\n\t\tto be used when there are no predicates to be passed down\n\t\tto the scan to limit its scope on the target table.\n\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA table scan result set forms a result set on a scan\n\t\tof a table.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the table scan operation is simple, and is\n\t\tto be used when there are no predicates to be passed down\n\t\tto the scan to limit its scope on the target table.\n\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param rowsPerRead\t\tThe number of rows to read per fetch.\n @param disableForHoldable Whether or not bulk fetch should be disabled\n at runtime if the cursor is holdable.\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getBulkTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t long conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tint rowsPerRead,\n boolean disableForHoldable,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\tString tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n\n /**\n\t\tA multi-probe result set, used for probing an index with one or more\n\t\ttarget values (probeValues) and returning the matching rows. This\n\t\ttype of result set is useful for IN lists as it allows us to avoid\n\t\tscannning an entire, potentially very large, index for a mere handful\n\t\tof rows (DERBY-47).\n\n\t\tAll arguments are the same as for TableScanResultSet, plus the\n\t\tfollowing:\n\n\t\t@param probeVals List of values with which to probe the underlying\n\t\t\ttable. Should not be null.\n\t\t@param sortRequired Which type of sort we need for the values\n\t\t\t(ascending, descending, or none).\n\t */\n\tNoPutResultSet getMultiProbeTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tDataValueDescriptor [] probeVals,\n\t\t\t\t\t\t\t\tint sortRequired,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n /**\n\t\tAn index row to base row result set gets an index row from its source\n\t\tand uses the RowLocation in its last column to get the row from the\n\t\tbase conglomerate.\n\t\t

\n\n\t @param conglomId\tConglomerate # for the heap.\n\t\t@param scoci The saved item for the static conglomerate info.\n\t\t@param source\tthe source result set, which is expected to provide\n\t\t\t\t\t\trows from an index conglomerate\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param indexName\t\tThe name of the index.\n\t\t@param heapColRefItem\tA saved item for a bitImpl of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying heap. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param allColRefItem A saved item for a bitImpl of columns\n\t\t\t\t\t\t\t\tthat are referenced in the underlying\n\t\t\t\t\t\t\t\tindex and heap. -1 if no item.\n\t\t@param heapOnlyColRefItem A saved item for a bitImpl of\n\t\t\t\t\t\t\t\tcolumns that are referenced in the\n\t\t\t\t\t\t\t\tunderlying heap only. -1 if no item.\n\n\t\t@param indexColMapItem\tA saved item for a ReferencedColumnsDescriptorImpl\n\t\t\t\t\t\t\t\twhich tell which columms are coming from the index.\n\t\t@param restriction\t\tThe restriction, if any, to be applied to the base row\n\t\t@param forUpdate\t\tTrue means to open for update\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the index row to base row operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getIndexRowToBaseRowResultSet(\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scoci,\n\t\t\t\t\t\t\t\tNoPutResultSet source,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tint heapColRefItem,\n\t\t\t\t\t\t\t\tint allColRefItem,\n\t\t\t\t\t\t\t\tint heapOnlyColRefItem,\n\t\t\t\t\t\t\t\tint indexColMapItem,\n\t\t\t\t\t\t\t\tGeneratedMethod restriction,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n\t\t\t\t\t\t\t\tString explainPlan)\n\t\t\tthrows StandardException;\n\n\n /**\n A OLAP window on top of a regular result set. It is used to realize\n window functions.\n\n @param source the result set from which to take rows to be\n filtered by this operation.\n @param isInSortedOrder\ttrue if the source result set is in sorted order\n @param aggregateItem entry in preparedStatement's savedObjects for aggregates\n @param rowAllocator a reference to a method in the activation\n that generates rows of the right size and shape for the source\n @param rowSize\t\t\tthe size of the row that is allocated by rowAllocator.\n size should be the maximum size of the sum of all the datatypes.\n user type are necessarily approximated\n @param resultSetNumber\tThe resultSetNumber for the ResultSet\n @param optimizerEstimatedRowCount\tEstimated total # of rows by\n optimizer\n @param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n @return the scalar aggregation operation as a result set.\n @exception StandardException thrown when unable to create the\n result set\n */\n NoPutResultSet getWindowResultSet(NoPutResultSet source,\n boolean isInSortedOrder,\n int aggregateItem,\n GeneratedMethod rowAllocator,\n int rowSize,\n int resultSetNumber,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n String explainPlan)\n throws StandardException;\n\n\n\t/**\n\t\tA nested loop left outer join result set forms a result set on top of\n\t\t2 other result sets.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the nested loop join operation is simple, and is\n\t\tto be used when there are no join predicates to be passed down\n\t\tto the join to limit its scope on the right ResultSet.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getNestedLoopJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n public NoPutResultSet getMergeSortJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n\t String explainPlan)\n\t\t\t\t\t throws StandardException;\n\n public NoPutResultSet getMergeJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n String explainPlan)\n\t\t\t\t\t throws StandardException;\n\n public NoPutResultSet getBroadcastJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t String explainPlan)\n\t\t\t throws StandardException;\n\n\t/**\n\t\tA hash join.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getHashJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n int leftHashKeyitem,\n int rightHashKeyItem,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\n\t/**\n\t\tA nested loop join result set forms a result set on top of\n\t\t2 other result sets.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the nested loop join operation is simple, and is\n\t\tto be used when there are no join predicates to be passed down\n\t\tto the join to limit its scope on the right ResultSet.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\t\t\t\t\tthat is called if the right child returns no rows\n\t\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getNestedLoopLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t GeneratedMethod emptyRowFun,\n\t\t\t\t\t\t\t\t boolean wasRightOuterJoin,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA left outer join using a hash join.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\t\t\t\t\tthat is called if the right child returns no rows\n\t\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getHashLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n int leftHashKeyItem,\n int rightHashKeyItem,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t GeneratedMethod emptyRowFun,\n\t\t\t\t\t\t\t\t boolean wasRightOuterJoin,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA ResultSet which materializes the underlying ResultSet tree into a \n\t\ttemp table on the 1st open. All subsequent \"scans\" of this ResultSet\n\t\twill return results from the temp table.\n\n\t\t@param source the result set input to this result set.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the materialization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getMaterializedResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA ResultSet which provides the insensitive scrolling functionality\n\t\tfor the underlying result set by materializing the underlying ResultSet \n\t\ttree into a hash table while scrolling forward.\n\n\t\t@param source the result set input to this result set.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for normalization.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param sourceRowWidth\tThe # of columns in the source row.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the materialization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getScrollInsensitiveResultSet(NoPutResultSet source,\n\t Activation activation, \n\t\t\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t\tint sourceRowWidth,\n\t\t\t\t\t\t\t\t\t\t\tboolean scrollable,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\t\t\tString explainPlan) \n\t\tthrows StandardException;\n\t/**\n\t A left outer join using a sort merge join.\n\n\t@param leftResultSet\tOuter ResultSet for join.\n\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t@param rightResultSet\tInner ResultSet for join.\n\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t@param joinClause a reference to a method in the activation\n\t\tthat is applied to the activation's \"current row\" field\n\t\tto determine whether the joinClause is staisfied or not.\n\t\tThe signature of this method is\n\t\t\n\t\t\tBoolean joinClause() throws StandardException;\n\t\t\n\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t@param emptyRowFun a reference to a method in the activation\n\t\tthat is called if the right child returns no rows\n\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\ta single row. (No need to do 2nd next() if it does.)\n\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\tNOT EXISTS base table\n\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\toptimizer\n\t@param optimizerEstimatedCost\tEstimated total cost by optimizer\n\t@param userSuppliedOptimizerOverrides\tOverrides specified by the user on the sql\n\t@return the nested loop join operation as a result set.\n\t@exception StandardException thrown when unable to create the \n\t\tresult set\n\t*/\n\tpublic NoPutResultSet getMergeSortLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\tString explainPlan)\n\tthrows StandardException;\n\n\tpublic NoPutResultSet getMergeLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n String userSuppliedOptimizerOverrides,\n String explainPlan)\n\tthrows StandardException;\n\n\tpublic NoPutResultSet getBroadcastLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\tString explainPlan)\n\tthrows StandardException;\n\n\t/**\n\t\tREMIND: needs more description...\n\n\t\t@param source the result set input to this result set.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param erdNumber\tint for ResultDescription \n\t\t\t\t\t\t\t(so it can be turned back into an object)\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the normalization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getNormalizeResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t int erdNumber,\n\t\t\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\t\t boolean forUpdate,\n\t\t\t\t\t\t\t\t\t\t String explainPlan) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA current of result set forms a result set on the\n\t\tcurrent row of an open cursor.\n\t\tIt is used to perform positioned operations such as\n\t\tpositioned update and delete, using the result set paradigm.\n\n\t\t@param cursorName the name of the cursor providing the row.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t */\n\tNoPutResultSet getCurrentOfResultSet(String cursorName, Activation activation, \n\t\t\t\t\t\t\t\t\tint resultSetNumber);\n\n\t/**\n\t * The Union interface is used to evaluate the union (all) of two ResultSets.\n\t * (Any duplicate elimination is performed above this ResultSet.)\n\t *\n\t * Forms a ResultSet returning the union of the rows in two source\n\t * ResultSets. The column types in source1 and source2 are assumed to be\n\t * the same.\n\t *\n\t * @param source1\tThe first ResultSet whose rows go into the union\n\t * @param source2\tThe second ResultSet whose rows go into the\n\t *\t\t\tunion\n\t *\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t *\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t *\t\t\t\t\t\t\t\t\t\toptimizer\n\t *\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t *\n\t * @return\tA ResultSet from which the caller can get the union\n\t *\t\tof the two source ResultSets.\n\t *\n\t * @exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet\tgetUnionResultSet(NoPutResultSet source1,\n\t\t\t\t\tNoPutResultSet source2,\n\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\tString explainPlan)\n\t\t\t\t\tthrows StandardException;\n\n\n /**\n * The SetOpResultSet is used to implement an INTERSECT or EXCEPT operation.\n * It selects rows from two ordered input result sets.\n *\n * @param leftSource The result set that implements the left input\n * @param rightSource The result set that implements the right input\n * @param activation the activation for this result set\n * @param resultSetNumber\n * @param optimizerEstimatedRowCount\n * @param optimizerEstimatedCost\n * @param opType IntersectOrExceptNode.INTERSECT_OP or EXCEPT_OP\n * @param all true if the operation is an INTERSECT ALL or an EXCEPT ALL,\n * false if the operation is an INTERSECT DISCTINCT or an EXCEPT DISCTINCT\n * @param intermediateOrderByColumnsSavedObject The saved object index for the array of order by columns for the\n * ordering of the left and right sources. That is, both the left and right sources have an order by\n * clause of the form ORDER BY intermediateOrderByColumns[0],intermediateOrderByColumns[1],...\n * @param intermediateOrderByDirectionSavedObject The saved object index for the array of source\n * order by directions. That is, the ordering of the i'th order by column in the input is ascending\n * if intermediateOrderByDirection[i] is 1, descending if intermediateOrderByDirection[i] is -1.\n\t *\n\t * @return\tA ResultSet from which the caller can get the INTERSECT or EXCEPT\n\t *\n\t * @exception StandardException\t\tThrown on failure\n\t */\n NoPutResultSet getSetOpResultSet( NoPutResultSet leftSource,\n NoPutResultSet rightSource,\n Activation activation, \n int resultSetNumber,\n long optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n int opType,\n boolean all,\n int intermediateOrderByColumnsSavedObject,\n int intermediateOrderByDirectionSavedObject,\n int intermediateOrderByNullsLowSavedObject)\n throws StandardException;\n \n \n\t//\n\t// Misc operations\n\t//\n\n\n\n\t/**\n\t * A last index key result set returns the last row from\n\t * the index in question. It is used as an ajunct to max().\n\t *\n\t * @param activation \t\tthe activation for this result set,\n\t *\t\twhich provides the context for the row allocation operation.\n\t * @param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t * @param resultRowAllocator a reference to a method in the activation\n\t * \t\t\t\t\t\tthat creates a holder for the result row of the scan. May\n\t *\t\t\t\t\t\tbe a partial row. \n\t *\t\tExecRow rowAllocator() throws StandardException; \n\t * @param conglomId \t\tthe conglomerate of the table to be scanned.\n\t * @param tableName\t\t\tThe full name of the table\n\t * @param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t * @param indexName\t\t\tThe name of the index, if one used to access table.\n\t * @param colRefItem\t\tAn saved item for a bitSet of columns that\n\t *\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t *\t\t\t\t\t\t\tno item.\n\t * @param lockMode\t\t\tThe lock granularity to use (see\n\t *\t\t\t\t\t\t\tTransactionController in access)\n\t * @param tableLocked\t\tWhether or not the table is marked as using table locking\n\t *\t\t\t\t\t\t\t(in sys.systables)\n\t * @param isolationLevel\tIsolation level (specified or not) to use on scans\n\t * @param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t * \t\t\t\t\t\t\t\t\t\toptimizer\n\t * @param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t *\n\t * @return the scan operation as a result set.\n \t *\n\t * @exception StandardException thrown when unable to create the\n\t * \t\t\t\tresult set\n\t */\n\tNoPutResultSet getLastIndexKeyResultSet\n\t(\n\t\tActivation \t\t\tactivation,\n\t\tint \t\t\t\tresultSetNumber,\n\t\tGeneratedMethod \tresultRowAllocator,\n\t\tlong \t\t\t\tconglomId,\n\t\tString \t\t\t\ttableName,\n\t\tString \t\t\t\tuserSuppliedOptimizerOverrides,\n\t\tString \t\t\t\tindexName,\n\t\tint \t\t\t\tcolRefItem,\n\t\tint \t\t\t\tlockMode,\n\t\tboolean\t\t\t\ttableLocked,\n\t\tint\t\t\t\t\tisolationLevel,\n\t\tdouble\t\t\t\toptimizerEstimatedRowCount,\n\t\tdouble \t\t\t\toptimizerEstimatedCost,\n String tableVersion,\n\t\tString explainPlan\n\t) throws StandardException;\n\n\n\t/**\n\t\tA Dependent table scan result set forms a result set on a scan\n\t\tof a dependent table for the rows that got materilized \n\t\ton the scan of its parent table and if the row being deleted\n\t\ton parent table has a reference in the dependent table.\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param parentResultSetId Id to access the materlized temporary result\n \t set from the refence stored in the activation.\n\t\t@param fkIndexConglomId foreign key index conglomerate id.\n\t\t@param fkColArrayItem saved column array object that matches the foreign key index\n\t\t columns and the resultset from the parent table.\n\t\t@param rltItem row location template\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getRaDependentTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\t\tint scociItem,\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\tString parentResultSetId,\n\t\t\t\t\t\t\t\t\tlong fkIndexConglomId,\n\t\t\t\t\t\t\t\t\tint fkColArrayItem,\n\t\t\t\t\t\t\t\t\tint rltItem)\n\t\tthrows StandardException;\n\n /**\n\t * This result sets implements the filtering needed by and . It is only ever generated if at least\n\t * one of the two clauses is present.\n\t *\n\t * @param source The source result set being filtered\n\t * @param activation The activation for this result set,\n\t *\t\t which provides the context for the row\n\t * allocation operation\n\t * @param resultSetNumber The resultSetNumber for the ResultSet\n\t * @param offsetMethod The OFFSET parameter was specified\n\t * @param fetchFirstMethod The FETCH FIRST/NEXT parameter was specified\n\t * @param hasJDBClimitClause True if the offset/fetchFirst clauses were added by JDBC LIMExeIT escape syntax\n\t * @param optimizerEstimatedRowCount\n\t * Estimated total # of rows by optimizer\n\t * @param optimizerEstimatedCost\n\t * Estimated total cost by optimizer\n\t * @exception StandardException Standard error policy\n\t */\n\n\tpublic NoPutResultSet getRowCountResultSet(\n\t\tNoPutResultSet source,\n\t\tActivation activation,\n\t\tint resultSetNumber,\n\t\tGeneratedMethod offsetMethod,\n\t\tGeneratedMethod fetchFirstMethod,\n boolean hasJDBClimitClause,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) throws StandardException;\n\n public NoPutResultSet getExplainResultSet(ResultSet source, Activation activation, int resultSetNumber) throws StandardException;\n\n public NoPutResultSet getExplainResultSet(NoPutResultSet source, Activation activation, int resultSetNumber) throws StandardException;\n\n /**\n * Export\n */\n public NoPutResultSet getExportResultSet(NoPutResultSet source,\n Activation activation,\n int resultSetNumber,\n String exportPath,\n boolean compression,\n int replicationCount,\n String encoding,\n String fieldSeparator,\n String quoteChar,\n int srcResultDescriptionSavedObjectNum) throws StandardException;\n\n /**\n * Batch Once\n */\n public NoPutResultSet getBatchOnceResultSet(NoPutResultSet source,\n Activation activation,\n int resultSetNumber,\n NoPutResultSet subqueryResultSet,\n String updateResultSetFieldName,\n int sourceRowLocationColumnPosition,\n int sourceCorrelatedColumnPosition,\n int subqueryCorrelatedColumnPosition) throws StandardException;\n\n}\n"},"new_file":{"kind":"string","value":"db-engine/src/main/java/com/splicemachine/db/iapi/sql/execute/ResultSetFactory.java"},"old_contents":{"kind":"string","value":"/*\n\n Derby - Class com.splicemachine.db.iapi.sql.execute.ResultSetFactory\n\n Licensed to the Apache Software Foundation (ASF) under one or more\n contributor license agreements. See the NOTICE file distributed with\n this work for additional information regarding copyright ownership.\n The ASF licenses this file to you under the Apache License, Version 2.0\n (the \"License\"); you may not use this file except in compliance with\n the License. You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n */\n\npackage com.splicemachine.db.iapi.sql.execute;\n\nimport java.util.List;\n\nimport com.splicemachine.db.iapi.error.StandardException;\nimport com.splicemachine.db.iapi.services.loader.GeneratedMethod;\nimport com.splicemachine.db.iapi.sql.Activation;\nimport com.splicemachine.db.iapi.sql.ResultSet;\nimport com.splicemachine.db.iapi.types.DataValueDescriptor;\n\n/**\n * ResultSetFactory provides a wrapper around all of\n * the result sets needed in an execution implementation.\n *

\n * For the activations to avoid searching for this module\n * in their execute methods, the base activation supertype\n * should implement a method that does the lookup and salts\n * away this factory for the activation to use as it needs it.\n *\n */\npublic interface ResultSetFactory {\n\t/**\n\t\tModule name for the monitor's module locating system.\n\t */\n\tString MODULE = \"com.splicemachine.db.iapi.sql.execute.ResultSetFactory\";\n\n\t//\n\t// DDL operations\n\t//\n\n\t/**\n\t Generic DDL result set creation.\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getDDLResultSet(Activation activation)\n\t\t\t\t\tthrows StandardException;\n\n\n\t//\n\t// MISC operations\n\t//\n\n\t/**\n\t Generic Misc result set creation.\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getMiscResultSet(Activation activation)\n\t\t\t\t\tthrows StandardException;\n\n\t//\n\t// Transaction operations\n\t//\n\t/**\n\n\t\t@param activation \t\tthe activation for this result set\n\n\t\t@return\tResultSet\tA wrapper result set to run the Execution-time\n\t\t logic.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tResultSet getSetTransactionResultSet(Activation activation) \n\t\tthrows StandardException;\n\n\t//\n\t// DML statement operations\n\t//\n\t/**\n\t\tAn insert result set simply reports that it completed, and\n\t\tthe number of rows inserted. It does not return rows.\n\t\tThe insert has been completed once the\n\t\tinsert result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@param generationClauses\tThe code to compute column generation clauses if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@return the insert operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getInsertResultSet(NoPutResultSet source,\n\t GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,\n\t\t\t\t\t\t\t\t String insertMode,\n\t\t\t\t\t\t\t\t String statusDirectory,\n\t\t\t\t\t\t\t\t int failBadRecordCount,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n throws StandardException;\n\n\t/**\n\t\tAn insert VTI result set simply reports that it completed, and\n\t\tthe number of rows inserted. It does not return rows.\n\t\tThe insert has been completed once the\n\t\tinsert result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@param vtiRS\tThe code to instantiate the VTI, if necessary\n\t\t@return the insert VTI operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getInsertVTIResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t NoPutResultSet vtiRS,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tA delete VTI result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe inserted into the target table.\n\t\t@return the delete VTI operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the insert\n\t */\n\tResultSet getDeleteVTIResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tA delete result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe deleted from the target table. This result set must\n\t\t\tcontain one column which provides RowLocations that are\n\t\t\tvalid in the target table.\n\t\t@return the delete operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the delete\n\t */\n\tResultSet getDeleteResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost, String tableVersion,\n String explainPlan)\n\t\t\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tA delete Cascade result set simply reports that it completed, and\n\t\tthe number of rows deleted. It does not return rows.\n\t\tThe delete has been completed once the\n\t\tdelete result set is available.\n\n\t\t@param source the result set from which to take rows to\n\t\t\tbe deleted from the target table.\n\t\t@param constantActionItem a constant action saved object reference\n\t\t@param dependentResultSets an array of DeleteCascade Resultsets\n for the current table referential action\n\t\t\t\t\t\t\t\t dependents tables.\n\t\t@param resultSetId an Id which is used to store the refence\n to the temporary result set created of\n the materilized rows.Dependent table resultsets\n\t\t\t\t\t\t\tuses the same id to access their parent temporary result sets.\n\t\t@return the delete operation as a delete cascade result set.\n\t\t@exception StandardException thrown when unable to perform the delete\n\t */\n\tResultSet getDeleteCascadeResultSet(NoPutResultSet source,\n\t\t\t\t\t\t\t\t\t\tint constantActionItem,\n\t\t\t\t\t\t\t\t\t\tResultSet[] dependentResultSets, \n\t\t\t\t\t\t\t\t\t\tString resultSetId)\n\t\t\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tAn update result set simply reports that it completed, and\n\t\tthe number of rows updated. It does not return rows.\n\t\tThe update has been completed once the\n\t\tupdate result set is available.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tupdated in the target table. This result set must contain \n\t\t\ta column which provides RowLocations that are valid in the \n\t\t\ttarget table, and new values to be placed in those rows.\n\t\t@param generationClauses\tThe code to compute column generation clauses if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@return the update operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the update\n\t */\n\tResultSet getUpdateResultSet(NoPutResultSet source, GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost, String tableVersion,\n String explainPlan)\n throws StandardException;\n\n\t/**\n * @param source the result set from which to take rows to be \n * updated in the target table.\n * @return the update operation as a result set.\n * @exception StandardException thrown on error\n\t */\n\tpublic ResultSet getUpdateVTIResultSet(NoPutResultSet source,double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n\t/**\n\t\tAn update result set simply reports that it completed, and\n\t\tthe number of rows updated. It does not return rows.\n\t\tThe update has been completed once the\n\t\tupdate result set is available.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tupdated in the target table. This result set must contain \n\t\t\ta column which provides RowLocations that are valid in the \n\t\t\ttarget table, and new values to be placed in those rows.\n\t\t@param generationClauses\tThe code to compute generated columns, if any\n\t\t@param checkGM\tThe code to enforce the check constraints, if any\n\t\t@param constantActionItem a constant action saved object reference\n\t\t@param rsdItem result Description, saved object id. \t\t\t\t\n\t\t@return the update operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the update\n\t */\n\tResultSet getDeleteCascadeUpdateResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t GeneratedMethod generationClauses,\n\t\t\t\t\t\t\t\t GeneratedMethod checkGM,\n\t\t\t\t\t\t\t\t int constantActionItem,\n\t\t\t\t\t\t\t\t int rsdItem)\n throws StandardException;\n\n\t/**\n\t\tA call statement result set simply reports that it completed. \n\t\tIt does not return rows.\n\n\t\t@param methodCall a reference to a method in the activation\n\t\t\t for the method call\n\t\t@param activation the activation for this result set\n\n\t\t@return the call statement operation as a result set.\n\t\t@exception StandardException thrown when unable to perform the call statement\n\t */\n\tResultSet getCallStatementResultSet(GeneratedMethod methodCall,\n\t\t\t\t Activation activation) \n throws StandardException;\n\n ResultSet getCallStatementResultSet(GeneratedMethod methodCall,\n Activation activation,\n String origClassName,\n String origMethodName) \n throws StandardException;\n \n\t//\n\t// Query expression operations\n\t//\n\n\t/**\n\t\tA project restrict result set iterates over its source,\n\t\tevaluating a restriction and when it is satisfied,\n\t\tconstructing a row to return in its result set based on\n\t\tits projection.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param restriction a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the restriction is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean restriction() throws StandardException;\n\t\t\t\n\t\t@param projection a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto project out the expected result row.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tExecRow projection() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param constantRestriction a reference to a method in the activation\n\t\t\tthat represents a constant expression (eg where 1 = 2).\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean restriction() throws StandardException;\n\t\t\t\n\t\t@param mapArrayItem\tItem # for mapping of source to target columns\n @param cloneMapItem Item # for columns that need cloning\n @param reuseResult Whether or not to reuse the result row.\n\t\t@param doesProjection\tWhether or not this PRN does a projection\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the project restrict operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getProjectRestrictResultSet(NoPutResultSet source,\n\t\tGeneratedMethod restriction, \n\t\tGeneratedMethod projection, int resultSetNumber,\n\t\tGeneratedMethod constantRestriction,\n\t\tint mapArrayItem,\n int cloneMapItem,\n\t\tboolean reuseResult,\n\t\tboolean doesProjection,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) throws StandardException;\n\n\t/**\n\t\tA hash table result set builds a hash table on its source,\n\t\tapplying a list of predicates, if any, to the source,\n\t\twhen building the hash table. It then does a look up into\n\t\tthe hash table on a probe.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param singleTableRestriction restriction, if any, applied to\n\t\t\tinput of hash table.\n\t\t@param equijoinQualifiers Qualifier[] for look up into hash table\n\t\t@param projection a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto project out the expected result row.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tExecRow projection() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param mapRefItem\tItem # for mapping of source to target columns\n\t\t@param reuseResult\tWhether or not to reuse the result row.\n\t\t@param keyColItem\tItem for hash key column array\n\t\t@param removeDuplicates\tWhether or not to remove duplicates when building the hash table\n\t\t@param maxInMemoryRowCount\t\t\tMax size of in-memory hash table\n\t\t@param initialCapacity\t\t\t\tinitialCapacity for java.util.HashTable\n\t\t@param loadFactor\t\t\t\t\tloadFactor for java.util.HashTable\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the project restrict operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getHashTableResultSet(NoPutResultSet source,\n\t\tGeneratedMethod singleTableRestriction, \n\t\tString equijoinQualifiersField,\n\t\tGeneratedMethod projection, int resultSetNumber,\n\t\tint mapRefItem,\n\t\tboolean reuseResult,\n\t\tint keyColItem,\n\t\tboolean removeDuplicates,\n\t\tlong maxInMemoryRowCount,\n\t\tint\tinitialCapacity,\n\t\tfloat loadFactor,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost)\n\t\t\t throws StandardException;\n\n\t/**\n\t\tA sort result set sorts its source and if requested removes\n\t\tduplicates. It will generate the entire result when open, and\n\t\tthen return it a row at a time.\n\t\t

\n\t\tIf passed aggregates it will do scalar or vector aggregate\n\t\tprocessing. A list of aggregator information is passed\n\t\toff of the PreparedStatement's savedObjects. Aggregation\n\t\tand SELECT DISTINCT cannot be processed in the same sort.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param distinct true if distinct SELECT list\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param orderItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the distinct operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getSortResultSet(NoPutResultSet source,\n\t\tboolean distinct, \n\t\tboolean isInSortedOrder,\n\t\tint orderItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA ScalarAggregateResultSet computes non-distinct scalar aggregates.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize\t\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param singleInputRow\tWhether we know we have a single input row or not\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getScalarAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tboolean singleInputRow,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA DistinctScalarAggregateResultSet computes scalar aggregates when \n\t\tat least one of them is a distinct aggregate.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param singleInputRow\tWhether we know we have a single input row or not\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctScalarAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tboolean singleInputRow,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA GroupedAggregateResultSet computes non-distinct grouped aggregates.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize\t\t\tIgnored to allow same signature as getDistinctScalarAggregateResultSet\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isRollup true if this is a GROUP BY ROLLUP()\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getGroupedAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tboolean isRollup,\n\t\tString explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA DistinctGroupedAggregateResultSet computes scalar aggregates when \n\t\tat least one of them is a distinct aggregate.\n\t\tIt will compute the aggregates when open.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param isInSortedOrder\ttrue if the source result set is in sorted order\n\t\t@param aggregateItem entry in preparedStatement's savedObjects for aggregates\n\t\t@param orderingItem entry in preparedStatement's savedObjects for order\n\t\t@param rowAllocator a reference to a method in the activation\n\t\t\tthat generates rows of the right size and shape for the source\n\t\t@param rowSize the size of the row that is allocated by rowAllocator.\n\t\t\tsize should be the maximum size of the sum of all the datatypes.\n\t\t\tuser type are necessarily approximated\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isRollup true if this is a GROUP BY ROLLUP()\n\t\t@return the scalar aggregation operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctGroupedAggregateResultSet(NoPutResultSet source,\n\t\tboolean isInSortedOrder,\n\t\tint aggregateItem,\n\t\tint orderingItem,\n\t\tGeneratedMethod rowAllocator, \n\t\tint rowSize,\n\t\tint resultSetNumber, \n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n boolean isRollup,\n String explainPlan) \n\t\t\tthrows StandardException;\n\n\t/**\n\t\tAn any result set iterates over its source,\n\t\treturning a row with all columns set to nulls\n\t\tif the source returns no rows.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\tthat is called if the source returns no rows\n\t\t@param resultSetNumber\t\tThe resultSetNumber for the ResultSet\n\t\t@param subqueryNumber\t\tThe subquery number for this subquery.\n\t\t@param pointOfAttachment\tThe point of attachment for this subquery.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the any operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getAnyResultSet(NoPutResultSet source,\n\t\tGeneratedMethod emptyRowFun, int resultSetNumber,\n\t\tint subqueryNumber, int pointOfAttachment,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA once result set iterates over its source,\n\t\traising an error if the source returns > 1 row and\n\t\treturning a row with all columns set to nulls\n\t\tif the source returns no rows.\n\n\t\t@param source the result set from which to take rows to be \n\t\t\tfiltered by this operation.\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\tthat is called if the source returns no rows\n\t\t@param cardinalityCheck The type of cardinality check, if any that\n\t\t\tis required\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param subqueryNumber\t\tThe subquery number for this subquery.\n\t\t@param pointOfAttachment\tThe point of attachment for this subquery.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the once operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getOnceResultSet(NoPutResultSet source,\n\t\tGeneratedMethod emptyRowFun,\n\t\tint cardinalityCheck, int resultSetNumber, \n\t\tint subqueryNumber, int pointOfAttachment,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA row result set forms a result set on a single, known row value.\n\t\tIt is used to turn constant rows into result sets for use in\n\t\tthe result set paradigm.\n\t\tThe row can be constructed when it is requested from the\n\t\tresult set.\n\n\t\t@param activation the activation for this result set,\n\t\t\tagainst which the row operation is performed to\n\t\t\tcreate the result set.\n\t\t@param row a reference to a method in the activation\n\t\t\tthat creates the expected row.\n\t\t\t\n\t\t\t\tExecRow row() throws StandardException;\n\t\t\t\n\t\t@param canCacheRow\tTrue if execution can cache the input row\n\t\t\tafter it has gotten it. If the input row is constructed soley\n\t\t\tof constants or parameters, it is ok to cache this row rather\n\t\t\tthan recreating it each time it is requested.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the row as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getRowResultSet(Activation activation, GeneratedMethod row, \n\t\t\t\t\t\t\t boolean canCacheRow,\n\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t double optimizerEstimatedCost)\n\t\tthrows StandardException;\n\n NoPutResultSet getRowResultSet(Activation activation, ExecRow row,\n boolean canCacheRow,\n int resultSetNumber,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost)\n throws StandardException;\n\n /**\n Splice Addition\n\n A resultset that forms a result set on a collection of known rows.\n It is used to cache rows from a result set that's been materialized.\n\n @param activation The activation for this result set\n @param rows The collection of known ExecRows\n @param resultSetNumber The resultSetNumber for the result set being materialized\n */\n\n NoPutResultSet getCachedResultSet(Activation activation, List rows, int resultSetNumber)\n throws StandardException;\n\n\t/**\n\t\tA VTI result set wraps a user supplied result set.\n\n\t\t@param activation the activation for this result set,\n\t\t\tagainst which the row operation is performed to\n\t\t\tcreate the result set.\n\t\t@param row a reference to a method in the activation\n\t\t\tthat creates the expected row.\n\t\t\t\n\t\t\t\tExecRow row() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param constructor\t\tThe GeneratedMethod for the user's constructor\n\t\t@param javaClassName\tThe java class name for the VTI\n\t\t@param erdNumber\t\tint for referenced column BitSet (so it can be turned back into an object)\n\t\t@param version2\t\t\tWhether or not VTI is a version 2 VTI.\n\t\t@param isTarget\t\t\tWhether or not VTI is a target VTI.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by optimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param isDerbyStyleTableFunction True if this is a Derby-style table function\n\t\t@param returnTypeNumber\tWhich saved object contains the return type (a multi-set) serialized as a byte array\n\t\t@param vtiProjectionNumber\tWhich saved object contains the projection for a RestrictedVTI\n\t\t@param vtiRestrictionNumber\tWhich saved object contains the restriction for a RestrictedVTI\n\t\t@return the row as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getVTIResultSet(Activation activation, GeneratedMethod row,\n\t\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t\t GeneratedMethod constructor,\n\t\t\t\t\t\t\t\t\t String javaClassName,\n\t\t\t\t\t\t\t\t\t String pushedQualifiersField,\n\t\t\t\t\t\t\t\t\t int erdNumber,\n\t\t\t\t\t\t\t\t\t int ctcNumber,\n\t\t\t\t\t\t\t\t\t boolean isTarget,\n\t\t\t\t\t\t\t\t\t int scanIsolationLevel,\n\t\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n boolean isDerbyStyleTableFunction,\n int returnTypeNumber,\n int vtiProjectionNumber,\n int vtiRestrictionNumber,\n\t String explainPlan)\n\t\t throws StandardException;\n\n\t/*\n\t * This method was purely added to get some stored prepared statements to pass the validation stage of their compilation.\n\t * The existing method used a String for pushedQualifiersField. However, nothing was done with the initial value that\n\t * was passed into the constructor. So this method does the same thing and ignores the pushedQualifiersField which is\n\t * an com.splicemachine.db.iapi.store.access.Qualifier[][].\n\t */\n\tpublic NoPutResultSet getVTIResultSet(\n\t\t\tActivation activation,\n\t\t\tGeneratedMethod row,\n\t\t\tint resultSetNumber,\n\t\t\tGeneratedMethod constructor,\n\t\t\tString javaClassName,\n\t\t\tcom.splicemachine.db.iapi.store.access.Qualifier[][] pushedQualifiersField,\n\t\t\tint erdNumber,\n\t\t\tint ctcNumber,\n\t\t\tboolean isTarget,\n\t\t\tint scanIsolationLevel,\n\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\tdouble optimizerEstimatedCost,\n\t\t\tboolean isDerbyStyleTableFunction,\n\t\t\tint returnTypeNumber,\n\t\t\tint vtiProjectionNumber,\n\t\t\tint vtiRestrictionNumber,\n\t\t\tString explainPlan\n\t\t\t)\n\t\t\t\t\tthrows StandardException;\n\n\t/**\n\t\tA hash result set forms a result set on a hash table built on a scan\n\t\tof a table.\n\t\tThe rows are put into the hash table on the 1st open.\n\t\t

\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param scanQualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param nextQualifiers the array of Qualifiers for the look up into the hash table.\n\t\t@param initialCapacity\tThe initialCapacity for the HashTable.\n\t\t@param loadFactor\t\tThe loadFactor for the HashTable.\n\t\t@param maxCapacity\t\tThe maximum size for the HashTable.\n\t\t@param hashKeyColumn\tThe 0-based column # for the hash key.\n\t\t@param tableName\t\tThe full name of the table \n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getHashScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\t\t\t\t\t\t\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString scanQualifiersField,\n\t\t\t\t\t\t\t\tString nextQualifierField,\n\t\t\t\t\t\t\t\tint initialCapacity,\n\t\t\t\t\t\t\t\tfloat loadFactor,\n\t\t\t\t\t\t\t\tint maxCapacity,\n\t\t\t\t\t\t\t\tint hashKeyColumn,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA distinct scan result set pushes duplicate elimination into\n\t\tthe scan.\n\t\t

\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param hashKeyColumn\tThe 0-based column # for the hash key.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getDistinctScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\t\t\t\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tint hashKeyColumn,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA table scan result set forms a result set on a scan\n\t\tof a table.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the table scan operation is simple, and is\n\t\tto be used when there are no predicates to be passed down\n\t\tto the scan to limit its scope on the target table.\n\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA table scan result set forms a result set on a scan\n\t\tof a table.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the table scan operation is simple, and is\n\t\tto be used when there are no predicates to be passed down\n\t\tto the scan to limit its scope on the target table.\n\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param rowsPerRead\t\tThe number of rows to read per fetch.\n @param disableForHoldable Whether or not bulk fetch should be disabled\n at runtime if the cursor is holdable.\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tNoPutResultSet getBulkTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t long conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tint rowsPerRead,\n boolean disableForHoldable,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\tString tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n\n /**\n\t\tA multi-probe result set, used for probing an index with one or more\n\t\ttarget values (probeValues) and returning the matching rows. This\n\t\ttype of result set is useful for IN lists as it allows us to avoid\n\t\tscannning an entire, potentially very large, index for a mere handful\n\t\tof rows (DERBY-47).\n\n\t\tAll arguments are the same as for TableScanResultSet, plus the\n\t\tfollowing:\n\n\t\t@param probeVals List of values with which to probe the underlying\n\t\t\ttable. Should not be null.\n\t\t@param sortRequired Which type of sort we need for the values\n\t\t\t(ascending, descending, or none).\n\t */\n\tNoPutResultSet getMultiProbeTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scociItem,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\tDataValueDescriptor [] probeVals,\n\t\t\t\t\t\t\t\tint sortRequired,\n\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n String explainPlan)\n\t\t\tthrows StandardException;\n /**\n\t\tAn index row to base row result set gets an index row from its source\n\t\tand uses the RowLocation in its last column to get the row from the\n\t\tbase conglomerate.\n\t\t

\n\n\t @param conglomId\tConglomerate # for the heap.\n\t\t@param scoci The saved item for the static conglomerate info.\n\t\t@param source\tthe source result set, which is expected to provide\n\t\t\t\t\t\trows from an index conglomerate\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the rows from the scan.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param indexName\t\tThe name of the index.\n\t\t@param heapColRefItem\tA saved item for a bitImpl of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying heap. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param allColRefItem A saved item for a bitImpl of columns\n\t\t\t\t\t\t\t\tthat are referenced in the underlying\n\t\t\t\t\t\t\t\tindex and heap. -1 if no item.\n\t\t@param heapOnlyColRefItem A saved item for a bitImpl of\n\t\t\t\t\t\t\t\tcolumns that are referenced in the\n\t\t\t\t\t\t\t\tunderlying heap only. -1 if no item.\n\n\t\t@param indexColMapItem\tA saved item for a ReferencedColumnsDescriptorImpl\n\t\t\t\t\t\t\t\twhich tell which columms are coming from the index.\n\t\t@param restriction\t\tThe restriction, if any, to be applied to the base row\n\t\t@param forUpdate\t\tTrue means to open for update\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\n\t\t@return the index row to base row operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getIndexRowToBaseRowResultSet(\n\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\tint scoci,\n\t\t\t\t\t\t\t\tNoPutResultSet source,\n\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\tint heapColRefItem,\n\t\t\t\t\t\t\t\tint allColRefItem,\n\t\t\t\t\t\t\t\tint heapOnlyColRefItem,\n\t\t\t\t\t\t\t\tint indexColMapItem,\n\t\t\t\t\t\t\t\tGeneratedMethod restriction,\n\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion,\n\t\t\t\t\t\t\t\tString explainPlan)\n\t\t\tthrows StandardException;\n\n\n /**\n A OLAP window on top of a regular result set. It is used to realize\n window functions.\n\n @param source the result set from which to take rows to be\n filtered by this operation.\n @param isInSortedOrder\ttrue if the source result set is in sorted order\n @param aggregateItem entry in preparedStatement's savedObjects for aggregates\n @param rowAllocator a reference to a method in the activation\n that generates rows of the right size and shape for the source\n @param rowSize\t\t\tthe size of the row that is allocated by rowAllocator.\n size should be the maximum size of the sum of all the datatypes.\n user type are necessarily approximated\n @param resultSetNumber\tThe resultSetNumber for the ResultSet\n @param optimizerEstimatedRowCount\tEstimated total # of rows by\n optimizer\n @param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n @return the scalar aggregation operation as a result set.\n @exception StandardException thrown when unable to create the\n result set\n */\n NoPutResultSet getWindowResultSet(NoPutResultSet source,\n boolean isInSortedOrder,\n int aggregateItem,\n GeneratedMethod rowAllocator,\n int rowSize,\n int resultSetNumber,\n double optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n String explainPlan)\n throws StandardException;\n\n\n\t/**\n\t\tA nested loop left outer join result set forms a result set on top of\n\t\t2 other result sets.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the nested loop join operation is simple, and is\n\t\tto be used when there are no join predicates to be passed down\n\t\tto the join to limit its scope on the right ResultSet.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getNestedLoopJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n public NoPutResultSet getMergeSortJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n\t String explainPlan)\n\t\t\t\t\t throws StandardException;\n\n public NoPutResultSet getMergeJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n String explainPlan)\n\t\t\t\t\t throws StandardException;\n\n public NoPutResultSet getBroadcastJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t int leftNumCols,\n\t\t\t NoPutResultSet rightResultSet,\n\t\t\t int rightNumCols,\n\t\t\t int leftHashKeyItem,\n\t\t\t int rightHashKeyItem,\n\t\t\t GeneratedMethod joinClause,\n\t\t\t int resultSetNumber,\n\t\t\t boolean oneRowRightSide,\n\t\t\t boolean notExistsRightSide,\n\t\t\t double optimizerEstimatedRowCount,\n\t\t\t double optimizerEstimatedCost,\n\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t String explainPlan)\n\t\t\t throws StandardException;\n\n\t/**\n\t\tA hash join.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getHashJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n int leftHashKeyitem,\n int rightHashKeyItem,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\n\t/**\n\t\tA nested loop join result set forms a result set on top of\n\t\t2 other result sets.\n\t\tThe rows can be constructed as they are requested from the\n\t\tresult set.\n\t\t

\n\t\tThis form of the nested loop join operation is simple, and is\n\t\tto be used when there are no join predicates to be passed down\n\t\tto the join to limit its scope on the right ResultSet.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\t\t\t\t\tthat is called if the right child returns no rows\n\t\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getNestedLoopLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t GeneratedMethod emptyRowFun,\n\t\t\t\t\t\t\t\t boolean wasRightOuterJoin,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA left outer join using a hash join.\n\n\t\t@param leftResultSet\tOuter ResultSet for join.\n\t\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t\t@param rightResultSet\tInner ResultSet for join.\n\t\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t\t@param joinClause a reference to a method in the activation\n\t\t\tthat is applied to the activation's \"current row\" field\n\t\t\tto determine whether the joinClause is satisfied or not.\n\t\t\tThe signature of this method is\n\t\t\t\n\t\t\t\tBoolean joinClause() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param emptyRowFun a reference to a method in the activation\n\t\t\t\t\t\t\tthat is called if the right child returns no rows\n\t\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\t\t\t\t\t\t\ta single row. (No need to do 2nd next() if it does.)\n\t\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\t\t\t\t\t\t\t\tNOT EXISTS base table\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@return the nested loop join operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n public NoPutResultSet getHashLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\t\t\t\t\t int leftNumCols,\n\t\t\t\t\t\t\t\t NoPutResultSet rightResultSet,\n\t\t\t\t\t\t\t\t int rightNumCols,\n int leftHashKeyItem,\n int rightHashKeyItem,\n\t\t\t\t\t\t\t\t GeneratedMethod joinClause,\n\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t GeneratedMethod emptyRowFun,\n\t\t\t\t\t\t\t\t boolean wasRightOuterJoin,\n\t\t\t\t\t\t\t\t boolean oneRowRightSide,\n\t\t\t\t\t\t\t\t boolean notExistsRightSide,\n\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t String userSuppliedOptimizerOverrides,\n\t\t\t\t\t String explainPlan)\n\t\t\tthrows StandardException;\n\n\t/**\n\t\tA ResultSet which materializes the underlying ResultSet tree into a \n\t\ttemp table on the 1st open. All subsequent \"scans\" of this ResultSet\n\t\twill return results from the temp table.\n\n\t\t@param source the result set input to this result set.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the materialization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getMaterializedResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA ResultSet which provides the insensitive scrolling functionality\n\t\tfor the underlying result set by materializing the underlying ResultSet \n\t\ttree into a hash table while scrolling forward.\n\n\t\t@param source the result set input to this result set.\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for normalization.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param sourceRowWidth\tThe # of columns in the source row.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the materialization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getScrollInsensitiveResultSet(NoPutResultSet source,\n\t Activation activation, \n\t\t\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t\tint sourceRowWidth,\n\t\t\t\t\t\t\t\t\t\t\tboolean scrollable,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\t\t\tString explainPlan) \n\t\tthrows StandardException;\n\t/**\n\t A left outer join using a sort merge join.\n\n\t@param leftResultSet\tOuter ResultSet for join.\n\t@param leftNumCols\t\tNumber of columns in the leftResultSet\n\t@param rightResultSet\tInner ResultSet for join.\n\t@param rightNumCols\t\tNumber of columns in the rightResultSet\n\t@param joinClause a reference to a method in the activation\n\t\tthat is applied to the activation's \"current row\" field\n\t\tto determine whether the joinClause is staisfied or not.\n\t\tThe signature of this method is\n\t\t\n\t\t\tBoolean joinClause() throws StandardException;\n\t\t\n\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t@param emptyRowFun a reference to a method in the activation\n\t\tthat is called if the right child returns no rows\n\t@param wasRightOuterJoin\tWhether or not this was originally a right outer join\n\t@param oneRowRightSide\tboolean, whether or not the right side returns\n\t\ta single row. (No need to do 2nd next() if it does.)\n\t@param notExistsRightSide\tboolean, whether or not the right side resides a\n\t\tNOT EXISTS base table\n\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\toptimizer\n\t@param optimizerEstimatedCost\tEstimated total cost by optimizer\n\t@param userSuppliedOptimizerOverrides\tOverrides specified by the user on the sql\n\t@return the nested loop join operation as a result set.\n\t@exception StandardException thrown when unable to create the \n\t\tresult set\n\t*/\n\tpublic NoPutResultSet getMergeSortLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\tString explainPlan)\n\tthrows StandardException;\n\n\tpublic NoPutResultSet getMergeLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n String userSuppliedOptimizerOverrides,\n String explainPlan)\n\tthrows StandardException;\n\n\tpublic NoPutResultSet getBroadcastLeftOuterJoinResultSet(NoPutResultSet leftResultSet,\n\t\t\t\tint leftNumCols,\n\t\t\t\tNoPutResultSet rightResultSet,\n\t\t\t\tint rightNumCols,\n\t\t\t\tint leftHashKeyItem,\n\t\t\t\tint rightHashKeyItem,\n\t\t\t\tGeneratedMethod joinClause,\n\t\t\t\tint resultSetNUmber,\n\t\t\t\tGeneratedMethod emptyRowFun,\n\t\t\t\tboolean wasRightOuterJoin,\n\t\t\t\tboolean oneRowRightSide,\n\t\t\t\tboolean noExistsRightSide,\n\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\tString explainPlan)\n\tthrows StandardException;\n\n\t/**\n\t\tREMIND: needs more description...\n\n\t\t@param source the result set input to this result set.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param erdNumber\tint for ResultDescription \n\t\t\t\t\t\t\t(so it can be turned back into an object)\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@return the normalization operation as a result set.\n\n\t \t@exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet getNormalizeResultSet(NoPutResultSet source, \n\t\t\t\t\t\t\t\t\t\t int resultSetNumber,\n\t\t\t\t\t\t\t\t\t\t int erdNumber,\n\t\t\t\t\t\t\t\t\t\t double optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\t\t double optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\t\t boolean forUpdate,\n\t\t\t\t\t\t\t\t\t\t String explainPlan) \n\t\tthrows StandardException;\n\n\t/**\n\t\tA current of result set forms a result set on the\n\t\tcurrent row of an open cursor.\n\t\tIt is used to perform positioned operations such as\n\t\tpositioned update and delete, using the result set paradigm.\n\n\t\t@param cursorName the name of the cursor providing the row.\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t */\n\tNoPutResultSet getCurrentOfResultSet(String cursorName, Activation activation, \n\t\t\t\t\t\t\t\t\tint resultSetNumber);\n\n\t/**\n\t * The Union interface is used to evaluate the union (all) of two ResultSets.\n\t * (Any duplicate elimination is performed above this ResultSet.)\n\t *\n\t * Forms a ResultSet returning the union of the rows in two source\n\t * ResultSets. The column types in source1 and source2 are assumed to be\n\t * the same.\n\t *\n\t * @param source1\tThe first ResultSet whose rows go into the union\n\t * @param source2\tThe second ResultSet whose rows go into the\n\t *\t\t\tunion\n\t *\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t *\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t *\t\t\t\t\t\t\t\t\t\toptimizer\n\t *\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t *\n\t * @return\tA ResultSet from which the caller can get the union\n\t *\t\tof the two source ResultSets.\n\t *\n\t * @exception StandardException\t\tThrown on failure\n\t */\n\tNoPutResultSet\tgetUnionResultSet(NoPutResultSet source1,\n\t\t\t\t\tNoPutResultSet source2,\n\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\tString explainPlan)\n\t\t\t\t\tthrows StandardException;\n\n\n /**\n * The SetOpResultSet is used to implement an INTERSECT or EXCEPT operation.\n * It selects rows from two ordered input result sets.\n *\n * @param leftSource The result set that implements the left input\n * @param rightSource The result set that implements the right input\n * @param activation the activation for this result set\n * @param resultSetNumber\n * @param optimizerEstimatedRowCount\n * @param optimizerEstimatedCost\n * @param opType IntersectOrExceptNode.INTERSECT_OP or EXCEPT_OP\n * @param all true if the operation is an INTERSECT ALL or an EXCEPT ALL,\n * false if the operation is an INTERSECT DISCTINCT or an EXCEPT DISCTINCT\n * @param intermediateOrderByColumnsSavedObject The saved object index for the array of order by columns for the\n * ordering of the left and right sources. That is, both the left and right sources have an order by\n * clause of the form ORDER BY intermediateOrderByColumns[0],intermediateOrderByColumns[1],...\n * @param intermediateOrderByDirectionSavedObject The saved object index for the array of source\n * order by directions. That is, the ordering of the i'th order by column in the input is ascending\n * if intermediateOrderByDirection[i] is 1, descending if intermediateOrderByDirection[i] is -1.\n\t *\n\t * @return\tA ResultSet from which the caller can get the INTERSECT or EXCEPT\n\t *\n\t * @exception StandardException\t\tThrown on failure\n\t */\n NoPutResultSet getSetOpResultSet( NoPutResultSet leftSource,\n NoPutResultSet rightSource,\n Activation activation, \n int resultSetNumber,\n long optimizerEstimatedRowCount,\n double optimizerEstimatedCost,\n int opType,\n boolean all,\n int intermediateOrderByColumnsSavedObject,\n int intermediateOrderByDirectionSavedObject,\n int intermediateOrderByNullsLowSavedObject)\n throws StandardException;\n \n \n\t//\n\t// Misc operations\n\t//\n\n\n\n\t/**\n\t * A last index key result set returns the last row from\n\t * the index in question. It is used as an ajunct to max().\n\t *\n\t * @param activation \t\tthe activation for this result set,\n\t *\t\twhich provides the context for the row allocation operation.\n\t * @param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t * @param resultRowAllocator a reference to a method in the activation\n\t * \t\t\t\t\t\tthat creates a holder for the result row of the scan. May\n\t *\t\t\t\t\t\tbe a partial row. \n\t *\t\tExecRow rowAllocator() throws StandardException; \n\t * @param conglomId \t\tthe conglomerate of the table to be scanned.\n\t * @param tableName\t\t\tThe full name of the table\n\t * @param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t * @param indexName\t\t\tThe name of the index, if one used to access table.\n\t * @param colRefItem\t\tAn saved item for a bitSet of columns that\n\t *\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t *\t\t\t\t\t\t\tno item.\n\t * @param lockMode\t\t\tThe lock granularity to use (see\n\t *\t\t\t\t\t\t\tTransactionController in access)\n\t * @param tableLocked\t\tWhether or not the table is marked as using table locking\n\t *\t\t\t\t\t\t\t(in sys.systables)\n\t * @param isolationLevel\tIsolation level (specified or not) to use on scans\n\t * @param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t * \t\t\t\t\t\t\t\t\t\toptimizer\n\t * @param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t *\n\t * @return the scan operation as a result set.\n \t *\n\t * @exception StandardException thrown when unable to create the\n\t * \t\t\t\tresult set\n\t */\n\tNoPutResultSet getLastIndexKeyResultSet\n\t(\n\t\tActivation \t\t\tactivation,\n\t\tint \t\t\t\tresultSetNumber,\n\t\tGeneratedMethod \tresultRowAllocator,\n\t\tlong \t\t\t\tconglomId,\n\t\tString \t\t\t\ttableName,\n\t\tString \t\t\t\tuserSuppliedOptimizerOverrides,\n\t\tString \t\t\t\tindexName,\n\t\tint \t\t\t\tcolRefItem,\n\t\tint \t\t\t\tlockMode,\n\t\tboolean\t\t\t\ttableLocked,\n\t\tint\t\t\t\t\tisolationLevel,\n\t\tdouble\t\t\t\toptimizerEstimatedRowCount,\n\t\tdouble \t\t\t\toptimizerEstimatedCost,\n String tableVersion\n\t) throws StandardException;\n\n\n\t/**\n\t\tA Dependent table scan result set forms a result set on a scan\n\t\tof a dependent table for the rows that got materilized \n\t\ton the scan of its parent table and if the row being deleted\n\t\ton parent table has a reference in the dependent table.\n\n\t\t@param activation the activation for this result set,\n\t\t\twhich provides the context for the row allocation operation.\n\t\t@param conglomId the conglomerate of the table to be scanned.\n\t\t@param scociItem The saved item for the static conglomerate info.\n\t\t@param resultRowAllocator a reference to a method in the activation\n\t\t\tthat creates a holder for the result row of the scan. May\n\t\t\tbe a partial row.\n\t\t\t\n\t\t\t\tExecRow rowAllocator() throws StandardException;\n\t\t\t\n\t\t@param resultSetNumber\tThe resultSetNumber for the ResultSet\n\t\t@param startKeyGetter a reference to a method in the activation\n\t\t\tthat gets the start key indexable row for the scan. Null\n\t\t\tmeans there is no start key.\n\t\t\t\n\t\t\t\tExecIndexRow startKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param startSearchOperator The start search operator for opening\n\t\t\tthe scan\n\t\t@param stopKeyGetter\ta reference to a method in the activation\n\t\t\tthat gets the stop key indexable row for the scan. Null means\n\t\t\tthere is no stop key.\n\t\t\t\n\t\t\t\tExecIndexRow stopKeyGetter() throws StandardException;\n\t\t\t\n\t\t@param stopSearchOperator\tThe stop search operator for opening\n\t\t\tthe scan\n\t\t@param sameStartStopPosition\tRe-use the startKeyGetter for the stopKeyGetter\n\t\t\t\t\t\t\t\t\t\t(Exact match search.)\n\t\t@param qualifiers the array of Qualifiers for the scan.\n\t\t\tNull or an array length of zero means there are no qualifiers.\n\t\t@param tableName\t\tThe full name of the table\n\t\t@param userSuppliedOptimizerOverrides\t\tOverrides specified by the user on the sql\n\t\t@param indexName\t\tThe name of the index, if one used to access table.\n\t\t@param isConstraint\t\tIf index, if used, is a backing index for a constraint.\n\t\t@param forUpdate\t\tTrue means open for update\n\t\t@param colRefItem\t\tAn saved item for a bitSet of columns that\n\t\t\t\t\t\t\t\tare referenced in the underlying table. -1 if\n\t\t\t\t\t\t\t\tno item.\n\t\t@param lockMode\t\t\tThe lock granularity to use (see\n\t\t\t\t\t\t\t\tTransactionController in access)\n\t\t@param tableLocked\t\tWhether or not the table is marked as using table locking\n\t\t\t\t\t\t\t\t(in sys.systables)\n\t\t@param isolationLevel\tIsolation level (specified or not) to use on scans\n\t\t@param oneRowScan\t\tWhether or not this is a 1 row scan.\n\t\t@param optimizerEstimatedRowCount\tEstimated total # of rows by\n\t\t\t\t\t\t\t\t\t\t\toptimizer\n\t\t@param optimizerEstimatedCost\t\tEstimated total cost by optimizer\n\t\t@param parentResultSetId Id to access the materlized temporary result\n \t set from the refence stored in the activation.\n\t\t@param fkIndexConglomId foreign key index conglomerate id.\n\t\t@param fkColArrayItem saved column array object that matches the foreign key index\n\t\t columns and the resultset from the parent table.\n\t\t@param rltItem row location template\n\n\t\t@return the table scan operation as a result set.\n\t\t@exception StandardException thrown when unable to create the\n\t\t\tresult set\n\t */\n\tpublic NoPutResultSet getRaDependentTableScanResultSet(\n\t\t\t Activation activation,\n\t\t\t\t\t\t\t\t\tlong conglomId,\n\t\t\t\t\t\t\t\t\tint scociItem,\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\tGeneratedMethod resultRowAllocator,\n\t\t\t\t\t\t\t\t\tint resultSetNumber,\n\t\t\t\t\t\t\t\t\tGeneratedMethod startKeyGetter,\n\t\t\t\t\t\t\t\t\tint startSearchOperator,\n\t\t\t\t\t\t\t\t\tGeneratedMethod stopKeyGetter,\n\t\t\t\t\t\t\t\t\tint stopSearchOperator,\n\t\t\t\t\t\t\t\t\tboolean sameStartStopPosition,\n boolean rowIdKey,\n\t\t\t\t\t\t\t\t\tString qualifiersField,\n\t\t\t\t\t\t\t\t\tString tableName,\n\t\t\t\t\t\t\t\t\tString userSuppliedOptimizerOverrides,\n\t\t\t\t\t\t\t\t\tString indexName,\n\t\t\t\t\t\t\t\t\tboolean isConstraint,\n\t\t\t\t\t\t\t\t\tboolean forUpdate,\n\t\t\t\t\t\t\t\t\tint colRefItem,\n\t\t\t\t\t\t\t\t\tint indexColItem,\n\t\t\t\t\t\t\t\t\tint lockMode,\n\t\t\t\t\t\t\t\t\tboolean tableLocked,\n\t\t\t\t\t\t\t\t\tint isolationLevel,\n\t\t\t\t\t\t\t\t\tboolean oneRowScan,\n\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n\t\t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n\t\t\t\t\t\t\t\t\tString parentResultSetId,\n\t\t\t\t\t\t\t\t\tlong fkIndexConglomId,\n\t\t\t\t\t\t\t\t\tint fkColArrayItem,\n\t\t\t\t\t\t\t\t\tint rltItem)\n\t\tthrows StandardException;\n\n /**\n\t * This result sets implements the filtering needed by and . It is only ever generated if at least\n\t * one of the two clauses is present.\n\t *\n\t * @param source The source result set being filtered\n\t * @param activation The activation for this result set,\n\t *\t\t which provides the context for the row\n\t * allocation operation\n\t * @param resultSetNumber The resultSetNumber for the ResultSet\n\t * @param offsetMethod The OFFSET parameter was specified\n\t * @param fetchFirstMethod The FETCH FIRST/NEXT parameter was specified\n\t * @param hasJDBClimitClause True if the offset/fetchFirst clauses were added by JDBC LIMExeIT escape syntax\n\t * @param optimizerEstimatedRowCount\n\t * Estimated total # of rows by optimizer\n\t * @param optimizerEstimatedCost\n\t * Estimated total cost by optimizer\n\t * @exception StandardException Standard error policy\n\t */\n\n\tpublic NoPutResultSet getRowCountResultSet(\n\t\tNoPutResultSet source,\n\t\tActivation activation,\n\t\tint resultSetNumber,\n\t\tGeneratedMethod offsetMethod,\n\t\tGeneratedMethod fetchFirstMethod,\n boolean hasJDBClimitClause,\n\t\tdouble optimizerEstimatedRowCount,\n\t\tdouble optimizerEstimatedCost,\n\t\tString explainPlan) throws StandardException;\n\n public NoPutResultSet getExplainResultSet(ResultSet source, Activation activation, int resultSetNumber) throws StandardException;\n\n public NoPutResultSet getExplainResultSet(NoPutResultSet source, Activation activation, int resultSetNumber) throws StandardException;\n\n /**\n * Export\n */\n public NoPutResultSet getExportResultSet(NoPutResultSet source,\n Activation activation,\n int resultSetNumber,\n String exportPath,\n boolean compression,\n int replicationCount,\n String encoding,\n String fieldSeparator,\n String quoteChar,\n int srcResultDescriptionSavedObjectNum) throws StandardException;\n\n /**\n * Batch Once\n */\n public NoPutResultSet getBatchOnceResultSet(NoPutResultSet source,\n Activation activation,\n int resultSetNumber,\n NoPutResultSet subqueryResultSet,\n String updateResultSetFieldName,\n int sourceRowLocationColumnPosition,\n int sourceCorrelatedColumnPosition,\n int subqueryCorrelatedColumnPosition) throws StandardException;\n\n}\n"},"message":{"kind":"string","value":"Fixed distinct scan explain text for ui.\n"},"old_file":{"kind":"string","value":"db-engine/src/main/java/com/splicemachine/db/iapi/sql/execute/ResultSetFactory.java"},"subject":{"kind":"string","value":"Fixed distinct scan explain text for ui."},"git_diff":{"kind":"string","value":"b-engine/src/main/java/com/splicemachine/db/iapi/sql/execute/ResultSetFactory.java\n \t\t\t\t\t\t\t\tint isolationLevel,\n \t\t\t\t\t\t\t\tdouble optimizerEstimatedRowCount,\n \t\t\t\t\t\t\t\tdouble optimizerEstimatedCost,\n String tableVersion)\n String tableVersion,\n\t\t\t\t\t\t\t\tString explainPlan)\n \t\t\tthrows StandardException;\n \n \t/**\n \t\tint\t\t\t\t\tisolationLevel,\n \t\tdouble\t\t\t\toptimizerEstimatedRowCount,\n \t\tdouble \t\t\t\toptimizerEstimatedCost,\n String tableVersion\n String tableVersion,\n\t\tString explainPlan\n \t) throws StandardException;\n \n "}}},{"rowIdx":2036,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"d4f7dfc95bfb8a73a0892a065c05cde654e791b2"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi,jitsi/libjitsi"},"new_contents":{"kind":"string","value":"/*\n * Copyright @ 2015 Atlassian Pty Ltd\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.jitsi.impl.neomedia.rtcp;\n\nimport net.sf.fmj.media.rtp.*;\nimport org.jitsi.service.neomedia.*;\nimport org.jitsi.util.*;\n\nimport java.io.*;\nimport java.util.*;\n\n/**\n * A class which represents an RTCP packet carrying transport-wide congestion\n * control (transport-cc) feedback information. The format is defined here:\n * https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01\n *\n *

{@code\n *  0                   1                   2                   3\n *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |V=2|P|  FMT=15 |    PT=205     |           length              |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                     SSRC of packet sender                     |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                      SSRC of media source                     |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |      base sequence number     |      packet status count      |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                 reference time                | fb pkt. count |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |          packet chunk         |         packet chunk          |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * .                                                               .\n * .                                                               .\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |         packet chunk          |  recv delta   |  recv delta   |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * .                                                               .\n * .                                                               .\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |           recv delta          |  recv delta   | zero padding  |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * }
\n *\n * @author Boris Grozev\n */\npublic class RTCPTCCPacket\n extends RTCPFBPacket\n{\n /**\n * Gets a boolean indicating whether or not the RTCP packet specified in the\n * {@link ByteArrayBuffer} that is passed as an argument is a NACK packet or\n * not.\n *\n * @param baf the {@link ByteArrayBuffer}\n * @return true if the byte array buffer holds a NACK packet, otherwise\n * false.\n */\n public static boolean isTCCPacket(ByteArrayBuffer baf)\n {\n int rc = RTCPHeaderUtils.getReportCount(baf);\n return rc == FMT && isRTPFBPacket(baf);\n }\n\n /**\n * @return the packets represented in an RTCP transport-cc feedback packet.\n *\n * Note that the timestamps are represented in the 250µs format used by the\n * on-the-wire format, and don't represent local time.\n *\n * @param baf the buffer which contains the RTCP packet.\n */\n public static PacketMap getPackets(ByteArrayBuffer baf)\n {\n return getPacketsFci(getFCI(baf));\n }\n\n /**\n * @return the packets represented in the FCI portion of an RTCP\n * transport-cc feedback packet.\n *\n * Note that the timestamps are represented in the 250µs format used by the\n * on-the-wire format, and don't represent local time.\n *\n * @param fciBuffer the buffer which contains the FCI portion of the RTCP\n * feedback packet.\n */\n public static PacketMap getPacketsFci(ByteArrayBuffer fciBuffer)\n {\n if (fciBuffer == null)\n {\n return null;\n }\n\n byte[] buf = fciBuffer.getBuffer();\n int off = fciBuffer.getOffset();\n int len = fciBuffer.getLength();\n\n if (len < MIN_FCI_LENGTH)\n {\n logger.warn(PARSE_ERROR + \"length too small: \" + len);\n return null;\n }\n\n // The fixed fields\n int baseSeq = RTPUtils.readUint16AsInt(buf, off);\n int packetStatusCount = RTPUtils.readUint16AsInt(buf, off + 2);\n\n // reference time. The 24 bit field uses increments of 2^6ms, and we\n // shift by 8 to change the resolution to 250µs.\n long referenceTime = RTPUtils.readUint24AsInt(buf, off + 4) << 8;\n\n // The offset at which the packet status chunk list starts.\n int pscOff = off + 8;\n\n // First find where the delta list begins.\n int packetsRemaining = packetStatusCount;\n while (packetsRemaining > 0)\n {\n if (pscOff + 2 > off + len)\n {\n logger.warn(PARSE_ERROR + \"reached the end while reading chunks\");\n return null;\n }\n\n int packetsInChunk = getPacketCount(buf, pscOff);\n packetsRemaining -= packetsInChunk;\n\n pscOff += 2; // all chunks are 16bit\n }\n\n // At this point we have the the beginning of the delta list. Start\n // reading from the chunk and delta lists together.\n int deltaStart = pscOff;\n int deltaOff = pscOff;\n\n // Reset to the start of the chunks list.\n pscOff = off + 8;\n packetsRemaining = packetStatusCount;\n PacketMap packets = new PacketMap();\n while (packetsRemaining > 0 && pscOff < deltaStart)\n {\n // packetsRemaining is based on the \"packet status count\" field,\n // which helps us find the correct number of packets described in\n // the last chunk. E.g. if the last chunk is a vector chunk, we\n // don't really know by the chunk alone how many packets are\n // described.\n int packetsInChunk\n = Math.min(getPacketCount(buf, pscOff), packetsRemaining);\n\n // Read deltas for all packets in the chunk.\n for (int i = 0; i < packetsInChunk; i++)\n {\n int symbol = readSymbol(buf, pscOff, i);\n // -1 or delta in 250µs increments\n int delta = -1;\n switch (symbol)\n {\n case SYMBOL_SMALL_DELTA:\n // The delta is an 8-bit unsigned integer.\n deltaOff++;\n if (deltaOff > off + len)\n {\n logger.warn(PARSE_ERROR\n + \"reached the end while reading delta.\");\n return null;\n }\n delta = buf[deltaOff] & 0xff;\n break;\n case SYMBOL_LARGE_DELTA:\n // The delta is a 6-bit signed integer.\n deltaOff += 2;\n if (deltaOff > off + len)\n {\n logger.warn(PARSE_ERROR\n + \"reached the end while reading long delta.\");\n return null;\n }\n delta = RTPUtils.readInt16AsInt(buf, deltaOff);\n break;\n case SYMBOL_NOT_RECEIVED:\n default:\n delta = -1;\n break;\n }\n\n if (delta == -1)\n {\n // Packet not received. We don't update the reference time,\n // but we push the packet in the map to indicate that it was\n // marked as not received.\n packets.put(baseSeq, NEGATIVE_ONE);\n }\n else\n {\n // The spec is not clear about what the reference time for\n // each packet is. We assume that every packet for which\n // there is a delta updates the reference (even if the\n // delta is negative).\n // TODO: check what webrtc.org does\n referenceTime += delta;\n packets.put(baseSeq, referenceTime);\n }\n\n baseSeq = (baseSeq + 1) & 0xffff;\n }\n\n // next packet status chunk\n pscOff += 2;\n packetsRemaining -= packetsInChunk;\n }\n\n return packets;\n }\n\n /**\n * Reads the {@code i}-th (zero-based) symbol from the Packet Status Chunk\n * contained in {@code buf} at offset {@code off}. Returns -1 if the index\n * is found to be invalid (although the validity check is not performed\n * for RLE chunks).\n *\n * @param buf the buffer which contains the Packet Status Chunk.\n * @param off the offset in {@code buf} at which the Packet Status Chunk\n * begins.\n * @param i the zero-based index of the symbol to return.\n * @return the {@code i}-th symbol from the given Packet Status Chunk.\n */\n private static int readSymbol(byte[] buf, int off, int i)\n {\n int chunkType = buf[off] & 0x80 >> 7;\n if (chunkType == CHUNK_TYPE_VECTOR)\n {\n int symbolType = buf[off] & 0x40 >> 6;\n switch (symbolType)\n {\n case SYMBOL_TYPE_LONG:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T|S| s0| s1| s2| s3| s4| s5| s6|\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n if (0 <= i && i <= 2)\n {\n return (buf[off] >> (4-2*i)) & 0x03;\n }\n else if (3 <= i && i <= 6)\n {\n return (buf[off + 1] >> (6-2*(i-3))) & 0x03;\n }\n\n return -1;\n\n case SYMBOL_TYPE_SHORT:\n // The format is similar to above, except with 14 one-bit\n // symbols.\n int shortSymbol;\n if (0 <= i && i <= 5)\n {\n shortSymbol = (buf[off] >> (5-i)) & 0x01;\n }\n else if (6 <= i && i <= 13)\n {\n shortSymbol = (buf[off + 1] >> (13-i)) & 0x01;\n }\n else\n {\n return -1;\n }\n\n return shortToLong(shortSymbol);\n default:\n return -1;\n }\n }\n else if (chunkType == CHUNK_TYPE_RLE)\n {\n\n // A RLE chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T| S | Run Length |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n // We assume the caller knows what they are doing and they have\n // given us a valid i, so we just return the symbol (S). Otherwise\n // we'd have to read the Run Length field every time.\n return buf[off] >> 5 & 0x03;\n }\n\n return -1;\n }\n\n /**\n * Converts a short symbol to a long symbol.\n * @param shortSymbol the short symbol.\n * @return the long (two-bit) symbol corresponding to {@code shortSymbol}.\n */\n private static int shortToLong(int shortSymbol)\n {\n switch (shortSymbol)\n {\n case SHORT_SYMBOL_NOT_RECEIVED:\n return SYMBOL_NOT_RECEIVED;\n case SHORT_SYMBOL_RECEIVED:\n return SYMBOL_SMALL_DELTA;\n default:\n return -1;\n }\n }\n\n /**\n * Returns the number of packets described in the Packet Status Chunk\n * contained in the buffer {@code buf} at offset {@code off}.\n * Note that this may not necessarily match with the number of packets\n * that we want to read from the chunk. E.g. if a feedback packet describes\n * 3 packets (indicated by the value \"3\" in the \"packet status count\" field),\n * and it contains a Vector Status Chunk which can describe 7 packets (long\n * symbols), then we want to read only 3 packets (but this method will\n * return 7).\n *\n * @param buf the buffer which contains the Packet Status Chunk\n * @param off the offset at which the Packet Status Chunk starts.\n *\n * @return the number of packets described by the Packet Status Chunk.\n */\n private static int getPacketCount(byte[] buf, int off)\n {\n int chunkType = buf[off] & 0x80 >> 7;\n if (chunkType == CHUNK_TYPE_VECTOR)\n {\n // A vector chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |1|S| symbol list |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // The 14-bit long symbol list consists of either 14 single-bit\n // symbols, or 7 two-bit symbols, according to the S bit.\n int symbolType = buf[off] & 0x40 >> 6;\n return symbolType == SYMBOL_TYPE_SHORT ? 14 : 7;\n }\n else if (chunkType == CHUNK_TYPE_RLE)\n {\n // A RLE chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T| S | Run Length |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n return\n ((buf[off] & 0x1f) << 8)\n | (buf[off + 1] & 0xff);\n }\n\n return -1;\n }\n\n /**\n * The {@link Logger} used by the {@link RTCPTCCPacket} class and its\n * instances for logging output.\n */\n private static final Logger logger\n = Logger.getLogger(RTCPTCCPacket.class);\n\n /**\n * The value of the \"fmt\" field for a transport-cc RTCP feedback packet.\n */\n public static final int FMT = 15;\n\n /**\n * The symbol which indicates that a packet was not received.\n */\n private static final int SYMBOL_NOT_RECEIVED = 0;\n\n /**\n * The symbol which indicates that a packet was received with a small delta\n * (represented in a 1-byte field).\n */\n private static final int SYMBOL_SMALL_DELTA = 1;\n\n /**\n * The symbol which indicates that a packet was received with a large or\n * negative delta (represented in a 2-byte field).\n */\n private static final int SYMBOL_LARGE_DELTA = 2;\n\n /**\n * The short (1-bit) symbol which indicates that a packet was received\n * (with a small delta).\n */\n private static final int SHORT_SYMBOL_RECEIVED = 0;\n\n /**\n * The short (1-bit) symbol which indicates that a packet was not received.\n */\n private static final int SHORT_SYMBOL_NOT_RECEIVED = 1;\n\n /**\n * The value of the {@code T} bit of a Packet Status Chunk, which\n * identifies it as a Vector chunk.\n */\n private static final int CHUNK_TYPE_VECTOR = 0;\n\n /**\n * The value of the {@code T} bit of a Packet Status Chunk, which\n * identifies it as a Run Length Encoding chunk.\n */\n private static final int CHUNK_TYPE_RLE = 1;\n\n /**\n * The value of the {@code S} bit og a Status Vector Chunk, which\n * indicates 1-bit (short) symbols.\n */\n private static final int SYMBOL_TYPE_SHORT = 0;\n\n /**\n * The value of the {@code S} bit of a Status Vector Chunk, which\n * indicates 2-bit (long) symbols.\n */\n private static final int SYMBOL_TYPE_LONG = 1;\n\n /**\n * A static object defined here in the hope that it will reduce boxing.\n */\n private static final Long NEGATIVE_ONE = -1L;\n\n /**\n * The minimum length of the FCI field of a valid transport-cc RTCP\n * feedback message. 8 bytes for the fixed fields + 2 bytes for one\n * packet status chunk.\n */\n private static final int MIN_FCI_LENGTH = 10;\n\n /**\n * An error message to use when parsing failed.\n */\n private static final String PARSE_ERROR\n = \"Failed to parse an RTCP transport-cc feedback packet: \";\n\n /**\n * The map which contains the sequence numbers (mapped to the reception\n * timestamp) of the packets described by this RTCP packet.\n */\n private PacketMap packets = null;\n\n /**\n * Initializes a new NACKPacket instance.\n * @param base\n */\n public RTCPTCCPacket(RTCPCompoundPacket base)\n {\n super(base);\n }\n\n /**\n * Initializes a new {@link RTCPTCCPacket} instance with a specific \"packet\n * sender SSRC\" and \"media source SSRC\" values, and which describes a\n * specific set of sequence numbers.\n * @param senderSSRC the value to use for the \"packet sender SSRC\" field.\n * @param sourceSSRC the value to use for the \"media source SSRC\" field.\n * @param packets the set of RTP sequence numbers and their reception\n * timestamps which this packet is to describe.\n * @param fbPacketCount the index of this feedback packet, to be used in the\n * \"fb pkt count\" field.\n *\n * Note that this implementation is not optimized and might not always use\n * the minimal possible number of bytes to describe a given set of packets.\n * Specifically, it does take into account that sequence numbers wrap\n * at 2^16 and fails to pack numbers close to 2^16 with those close to 0.\n */\n public RTCPTCCPacket(long senderSSRC, long sourceSSRC,\n PacketMap packets,\n byte fbPacketCount)\n {\n super(FMT, RTPFB, senderSSRC, sourceSSRC);\n\n Map.Entry first = packets.firstEntry();\n int firstSeq = first.getKey();\n Map.Entry last = packets.lastEntry();\n int packetCount\n = 1 + RTPUtils.sequenceNumberDiff(last.getKey(), firstSeq);\n\n // Temporary buffer to store the fixed fields (8 bytes) and the list of\n // packet status chunks (see the format above). The buffer may be longer\n // than needed. We pack 7 packets in a chunk, and a chunk is 2 bytes.\n byte[] buf = new byte[(packetCount / 7 + 1) * 2 + 8];\n // Temporary buffer to store the list of deltas (see the format above).\n // We allocated for the worst case (2 bytes per packet), which may\n // be longer than needed.\n byte[] deltas = new byte[packetCount * 2];\n int deltaOff = 0;\n int off = 0;\n\n long referenceTime = first.getValue();\n referenceTime -= referenceTime % 64;\n\n // Set the 'base sequence number' field\n off += RTPUtils.writeShort(buf, off, (short) (int) first.getKey());\n\n // Set the 'packet status count' field\n off += RTPUtils.writeShort(buf, off, (short) packetCount);\n\n // Set the 'reference time' field\n off +=\n RTPUtils.writeUint24(buf, off,\n (int) ((referenceTime >> 6) & 0xffffff));\n\n // Set the 'fb pkt count' field. TODO increment\n buf[off++] = fbPacketCount;\n\n // Add the packet status chunks. In this first impl we'll just use\n // status vector chunks (T=1) with two-bit symbols (S=1) as this is\n // most straightforward to implement.\n // TODO: optimize for size\n long nextReferenceTime = referenceTime;\n off--; // we'll take care of this inside the loop.\n for (int seqDelta = 0; seqDelta < packetCount; seqDelta++)\n {\n // A status vector chunk with two-bit symbols contains 7 packet\n // symbols\n if (seqDelta % 7 == 0)\n {\n off++;\n buf[off] = (byte) 0xc0; //T=1, S=1\n }\n else if (seqDelta % 7 == 3)\n {\n off++;\n buf[off] = 0;\n }\n\n int symbol;\n\n int seq = (firstSeq + seqDelta) & 0xffff;\n Long ts = packets.get(seq);\n if (ts == null)\n {\n symbol = SYMBOL_NOT_RECEIVED;\n }\n else\n {\n long tsDelta = ts - nextReferenceTime;\n if (tsDelta >= 0 && tsDelta < 63)\n {\n symbol = SYMBOL_SMALL_DELTA;\n\n // The small delta is an 8-bit unsigned with a resolution of\n // 250µs. Our deltas are all in milliseconds (hence << 2).\n deltas[deltaOff++] = (byte ) ((tsDelta << 2) & 0xff);\n }\n else if (tsDelta < 8191 && tsDelta > -8192)\n {\n symbol = SYMBOL_LARGE_DELTA;\n\n // The large or negative delta is a 16-bit signed integer\n // with a resolution of 250µs (hence << 2).\n short d = (short) (tsDelta << 2);\n deltas[deltaOff++] = (byte) ((d >> 8) & 0xff);\n deltas[deltaOff++] = (byte) ((d) & 0xff);\n }\n else\n {\n // The RTCP packet format does not support deltas bigger\n // than what we handle above. As per the draft, if we want\n // send feedback with such deltas, we should split it up\n // into multiple RTCP packets. We can't do that here in the\n // constructor.\n throw new IllegalArgumentException(\"Delta too big, needs new reference.\");\n }\n\n // If the packet was received, the next delta will be relative\n // to its time. Otherwise, we'll just the previous reference.\n nextReferenceTime = ts;\n }\n\n // Depending on the index of our packet, we have to offset its\n // symbol (we've already set 'off' to point to the correct byte).\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // S T <0> <1> <2> <3> <4> <5> <6>\n int symbolOffset;\n switch (seqDelta % 7)\n {\n case 0:\n case 4:\n symbolOffset = 4;\n break;\n case 1:\n case 5:\n symbolOffset = 2;\n break;\n case 2:\n case 6:\n symbolOffset = 0;\n break;\n case 3:\n default:\n symbolOffset = 6;\n }\n\n symbol <<= symbolOffset;\n buf[off] |= symbol;\n }\n\n off++;\n if (packetCount % 7 <= 3)\n {\n // the last chunk was not complete\n buf[off++] = 0;\n }\n\n\n fci = new byte[off + deltaOff];\n System.arraycopy(buf, 0, fci, 0, off);\n System.arraycopy(deltas, 0, fci, off, deltaOff);\n this.packets = packets;\n }\n\n\n /**\n * @return the map of packets represented by this {@link RTCPTCCPacket}.\n */\n synchronized public Map getPackets()\n {\n if (packets == null)\n {\n packets = getPacketsFci(new ByteArrayBufferImpl(fci, 0, fci.length));\n }\n\n return packets;\n }\n\n @Override\n public void assemble(DataOutputStream dataoutputstream)\n throws IOException\n {\n dataoutputstream.writeByte((byte) (0x80 /* version */ | FMT));\n dataoutputstream.writeByte((byte) RTPFB);\n dataoutputstream.writeShort(2 + (fci.length / 4));\n dataoutputstream.writeInt((int) senderSSRC);\n dataoutputstream.writeInt((int) sourceSSRC);\n dataoutputstream.write(fci);\n }\n\n @Override\n public String toString()\n {\n return \"RTCP transport-cc feedback\";\n }\n\n /**\n * An ordered collection which maps sequence numbers to timestamps, the\n * order is by the sequence number.\n */\n public static class PacketMap extends TreeMap\n {\n public PacketMap()\n {\n super(RTPUtils.sequenceNumberComparator);\n }\n }\n}\n\n"},"new_file":{"kind":"string","value":"src/org/jitsi/impl/neomedia/rtcp/RTCPTCCPacket.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright @ 2015 Atlassian Pty Ltd\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.jitsi.impl.neomedia.rtcp;\n\nimport net.sf.fmj.media.rtp.*;\nimport org.jitsi.service.neomedia.*;\nimport org.jitsi.util.*;\n\nimport java.io.*;\nimport java.util.*;\n\n/**\n * A class which represents an RTCP packet carrying transport-wide congestion\n * control (transport-cc) feedback information. The format is defined here:\n * https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01\n *\n *
{@code\n *  0                   1                   2                   3\n *  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |V=2|P|  FMT=15 |    PT=205     |           length              |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                     SSRC of packet sender                     |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                      SSRC of media source                     |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |      base sequence number     |      packet status count      |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |                 reference time                | fb pkt. count |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |          packet chunk         |         packet chunk          |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * .                                                               .\n * .                                                               .\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |         packet chunk          |  recv delta   |  recv delta   |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * .                                                               .\n * .                                                               .\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * |           recv delta          |  recv delta   | zero padding  |\n * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n * }
\n *\n * @author Boris Grozev\n */\npublic class RTCPTCCPacket\n extends RTCPFBPacket\n{\n /**\n * Gets a boolean indicating whether or not the RTCP packet specified in the\n * {@link ByteArrayBuffer} that is passed as an argument is a NACK packet or\n * not.\n *\n * @param baf the {@link ByteArrayBuffer}\n * @return true if the byte array buffer holds a NACK packet, otherwise\n * false.\n */\n public static boolean isTCCPacket(ByteArrayBuffer baf)\n {\n int rc = RTCPHeaderUtils.getReportCount(baf);\n return rc == FMT && isRTPFBPacket(baf);\n }\n\n /**\n * @return the packets represented in an RTCP transport-cc feedback packet.\n *\n * Note that the timestamps are represented in the 250µs format used by the\n * on-the-wire format, and don't represent local time.\n *\n * @param baf the buffer which contains the RTCP packet.\n */\n public static PacketMap getPackets(ByteArrayBuffer baf)\n {\n return getPacketsFci(getFCI(baf));\n }\n\n /**\n * @return the packets represented in the FCI portion of an RTCP\n * transport-cc feedback packet.\n *\n * Note that the timestamps are represented in the 250µs format used by the\n * on-the-wire format, and don't represent local time.\n *\n * @param fciBuffer the buffer which contains the FCI portion of the RTCP\n * feedback packet.\n */\n public static PacketMap getPacketsFci(ByteArrayBuffer fciBuffer)\n {\n if (fciBuffer == null)\n {\n return null;\n }\n\n byte[] buf = fciBuffer.getBuffer();\n int off = fciBuffer.getOffset();\n int len = fciBuffer.getLength();\n\n if (len < MIN_FCI_LENGTH)\n {\n logger.warn(PARSE_ERROR + \"length too small: \" + len);\n return null;\n }\n\n // The fixed fields\n int baseSeq = RTPUtils.readUint16AsInt(buf, off);\n int packetStatusCount = RTPUtils.readUint16AsInt(buf, off + 2);\n\n // reference time. The 24 bit field uses increments of 2^6ms, and we\n // shift by 8 to change the resolution to 250µs.\n long referenceTime = RTPUtils.readUint24AsInt(buf, off + 4) << 8;\n\n // The offset at which the packet status chunk list starts.\n int pscOff = off + 8;\n\n // First find where the delta list begins.\n int packetsRemaining = packetStatusCount;\n while (packetsRemaining > 0)\n {\n if (pscOff + 2 > off + len)\n {\n logger.warn(PARSE_ERROR + \"reached the end while reading chunks\");\n return null;\n }\n\n int packetsInChunk = getPacketCount(buf, pscOff);\n packetsRemaining -= packetsInChunk;\n\n pscOff += 2; // all chunks are 16bit\n }\n\n // At this point we have the the beginning of the delta list. Start\n // reading from the chunk and delta lists together.\n int deltaStart = pscOff;\n int deltaOff = pscOff;\n\n // Reset to the start of the chunks list.\n pscOff = off + 8;\n packetsRemaining = packetStatusCount;\n PacketMap packets = new PacketMap();\n while (packetsRemaining > 0 && pscOff < deltaStart)\n {\n // packetsRemaining is based on the \"packet status count\" field,\n // which helps us find the correct number of packets described in\n // the last chunk. E.g. if the last chunk is a vector chunk, we\n // don't really know by the chunk alone how many packets are\n // described.\n int packetsInChunk\n = Math.min(getPacketCount(buf, pscOff), packetsRemaining);\n\n // Read deltas for all packets in the chunk.\n for (int i = 0; i < packetsInChunk; i++)\n {\n int symbol = readSymbol(buf, pscOff, i);\n // -1 or delta in 250µs increments\n int delta = -1;\n switch (symbol)\n {\n case SYMBOL_SMALL_DELTA:\n // The delta is an 8-bit unsigned integer.\n deltaOff++;\n if (deltaOff > off + len)\n {\n logger.warn(PARSE_ERROR\n + \"reached the end while reading delta.\");\n return null;\n }\n delta = buf[deltaOff] & 0xff;\n break;\n case SYMBOL_LARGE_DELTA:\n // The delta is a 6-bit signed integer.\n deltaOff += 2;\n if (deltaOff > off + len)\n {\n logger.warn(PARSE_ERROR\n + \"reached the end while reading long delta.\");\n return null;\n }\n delta = RTPUtils.readInt16AsInt(buf, deltaOff);\n break;\n case SYMBOL_NOT_RECEIVED:\n default:\n delta = -1;\n break;\n }\n\n if (delta == -1)\n {\n // Packet not received. We don't update the reference time,\n // but we push the packet in the map to indicate that it was\n // marked as not received.\n packets.put(baseSeq, NEGATIVE_ONE);\n }\n else\n {\n // The spec is not clear about what the reference time for\n // each packet is. We assume that every packet for which\n // there is a delta updates the reference (even if the\n // delta is negative).\n // TODO: check what webrtc.org does\n referenceTime += delta;\n packets.put(baseSeq, referenceTime);\n }\n\n baseSeq = (baseSeq + 1) & 0xffff;\n }\n\n // next packet status chunk\n pscOff += 2;\n packetsRemaining -= packetsInChunk;\n }\n\n return packets;\n }\n\n /**\n * Reads the {@code i}-th (zero-based) symbol from the Packet Status Chunk\n * contained in {@code buf} at offset {@code off}. Returns -1 if the index\n * is found to be invalid (although the validity check is not performed\n * for RLE chunks).\n *\n * @param buf the buffer which contains the Packet Status Chunk.\n * @param off the offset in {@code buf} at which the Packet Status Chunk\n * begins.\n * @param i the zero-based index of the symbol to return.\n * @return the {@code i}-th symbol from the given Packet Status Chunk.\n */\n private static int readSymbol(byte[] buf, int off, int i)\n {\n int chunkType = buf[off] & 0x80 >> 7;\n if (chunkType == CHUNK_TYPE_VECTOR)\n {\n int symbolType = buf[off] & 0x40 >> 6;\n switch (symbolType)\n {\n case SYMBOL_TYPE_LONG:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T|S| s0| s1| s2| s3| s4| s5| s6|\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n if (0 <= i && i <= 2)\n {\n return (buf[off] >> (4-2*i)) & 0x03;\n }\n else if (3 <= i && i <= 6)\n {\n return (buf[off + 1] >> (6-2*(i-3))) & 0x03;\n }\n\n return -1;\n\n case SYMBOL_TYPE_SHORT:\n // The format is similar to above, except with 14 one-bit\n // symbols.\n int shortSymbol;\n if (0 <= i && i <= 5)\n {\n shortSymbol = (buf[off] >> (5-i)) & 0x01;\n }\n else if (6 <= i && i <= 13)\n {\n shortSymbol = (buf[off + 1] >> (13-i)) & 0x01;\n }\n else\n {\n return -1;\n }\n\n return shortToLong(shortSymbol);\n default:\n return -1;\n }\n }\n else if (chunkType == CHUNK_TYPE_RLE)\n {\n\n // A RLE chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T| S | Run Length |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n\n // We assume the caller knows what they are doing and they have\n // given us a valid i, so we just return the symbol (S). Otherwise\n // we'd have to read the Run Length field every time.\n return buf[off] >> 5 & 0x03;\n }\n\n return -1;\n }\n\n /**\n * Converts a short symbol to a long symbol.\n * @param shortSymbol the short symbol.\n * @return the long (two-bit) symbol corresponding to {@code shortSymbol}.\n */\n private static int shortToLong(int shortSymbol)\n {\n switch (shortSymbol)\n {\n case SHORT_SYMBOL_NOT_RECEIVED:\n return SYMBOL_NOT_RECEIVED;\n case SHORT_SYMBOL_RECEIVED:\n return SYMBOL_SMALL_DELTA;\n default:\n return -1;\n }\n }\n\n /**\n * Returns the number of packets described in the Packet Status Chunk\n * contained in the buffer {@code buf} at offset {@code off}.\n * Note that this may not necessarily match with the number of packets\n * that we want to read from the chunk. E.g. if a feedback packet describes\n * 3 packets (indicated by the value \"3\" in the \"packet status count\" field),\n * and it contains a Vector Status Chunk which can describe 7 packets (long\n * symbols), then we want to read only 3 packets (but this method will\n * return 7).\n *\n * @param buf the buffer which contains the Packet Status Chunk\n * @param off the offset at which the Packet Status Chunk starts.\n *\n * @return the number of packets described by the Packet Status Chunk.\n */\n private static int getPacketCount(byte[] buf, int off)\n {\n int chunkType = buf[off] & 0x80 >> 7;\n if (chunkType == CHUNK_TYPE_VECTOR)\n {\n // A vector chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |1|S| symbol list |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // The 14-bit long symbol list consists of either 14 single-bit\n // symbols, or 7 two-bit symbols, according to the S bit.\n int symbolType = buf[off] & 0x40 >> 6;\n return symbolType == SYMBOL_TYPE_SHORT ? 14 : 7;\n }\n else if (chunkType == CHUNK_TYPE_RLE)\n {\n // A RLE chunk looks like this:\n // 0 1\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n // |T| S | Run Length |\n // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+\n return\n ((buf[off] & 0x1f) << 8)\n | (buf[off + 1] & 0xff);\n }\n\n return -1;\n }\n\n /**\n * The {@link Logger} used by the {@link RTCPTCCPacket} class and its\n * instances for logging output.\n */\n private static final Logger logger\n = Logger.getLogger(RTCPTCCPacket.class);\n\n /**\n * The value of the \"fmt\" field for a transport-cc RTCP feedback packet.\n */\n public static final int FMT = 15;\n\n /**\n * The symbol which indicates that a packet was not received.\n */\n private static final int SYMBOL_NOT_RECEIVED = 0;\n\n /**\n * The symbol which indicates that a packet was received with a small delta\n * (represented in a 1-byte field).\n */\n private static final int SYMBOL_SMALL_DELTA = 1;\n\n /**\n * The symbol which indicates that a packet was received with a large or\n * negative delta (represented in a 2-byte field).\n */\n private static final int SYMBOL_LARGE_DELTA = 2;\n\n /**\n * The short (1-bit) symbol which indicates that a packet was received\n * (with a small delta).\n */\n private static final int SHORT_SYMBOL_RECEIVED = 0;\n\n /**\n * The short (1-bit) symbol which indicates that a packet was not received.\n */\n private static final int SHORT_SYMBOL_NOT_RECEIVED = 1;\n\n /**\n * The value of the {@code T} bit of a Packet Status Chunk, which\n * identifies it as a Vector chunk.\n */\n private static final int CHUNK_TYPE_VECTOR = 0;\n\n /**\n * The value of the {@code T} bit of a Packet Status Chunk, which\n * identifies it as a Run Length Encoding chunk.\n */\n private static final int CHUNK_TYPE_RLE = 1;\n\n /**\n * The value of the {@code S} bit og a Status Vector Chunk, which\n * indicates 1-bit (short) symbols.\n */\n private static final int SYMBOL_TYPE_SHORT = 0;\n\n /**\n * The value of the {@code S} bit of a Status Vector Chunk, which\n * indicates 2-bit (long) symbols.\n */\n private static final int SYMBOL_TYPE_LONG = 1;\n\n /**\n * A static object defined here in the hope that it will reduce boxing.\n */\n private static final Long NEGATIVE_ONE = -1L;\n\n /**\n * The minimum length of the FCI field of a valid transport-cc RTCP\n * feedback message. 8 bytes for the fixed fields + 2 bytes for one\n * packet status chunk.\n */\n private static final int MIN_FCI_LENGTH = 10;\n\n /**\n * An error message to use when parsing failed.\n */\n private static final String PARSE_ERROR\n = \"Failed to parse an RTCP transport-cc feedback packet: \";\n\n /**\n * The map which contains the sequence numbers (mapped to the reception\n * timestamp) of the packets described by this RTCP packet.\n */\n private PacketMap packets = null;\n\n /**\n * Initializes a new NACKPacket instance.\n * @param base\n */\n public RTCPTCCPacket(RTCPCompoundPacket base)\n {\n super(base);\n }\n\n /**\n * Initializes a new {@link RTCPTCCPacket} instance with a specific \"packet\n * sender SSRC\" and \"media source SSRC\" values, and which describes a\n * specific set of sequence numbers.\n * @param senderSSRC the value to use for the \"packet sender SSRC\" field.\n * @param sourceSSRC the value to use for the \"media source SSRC\" field.\n * @param packets the set of RTP sequence numbers and their reception\n * timestamps which this packet is to describe.\n * @param fbPacketCount the index of this feedback packet, to be used in the\n * \"fb pkt count\" field.\n *\n * Note that this implementation is not optimized and might not always use\n * the minimal possible number of bytes to describe a given set of packets.\n * Specifically, it does take into account that sequence numbers wrap\n * at 2^16 and fails to pack numbers close to 2^16 with those close to 0.\n */\n public RTCPTCCPacket(long senderSSRC, long sourceSSRC,\n PacketMap packets,\n byte fbPacketCount)\n {\n super(FMT, RTPFB, senderSSRC, sourceSSRC);\n\n TreeSet> sequenceNumbers\n = (TreeSet) packets.entrySet();\n\n Map.Entry first = sequenceNumbers.first();\n int firstSeq = first.getKey();\n Map.Entry last = sequenceNumbers.last();\n int packetCount\n = 1 + RTPUtils.sequenceNumberDiff(last.getKey(), firstSeq);\n\n // Temporary buffer to store the fixed fields (8 bytes) and the list of\n // packet status chunks (see the format above). The buffer may be longer\n // than needed. We pack 7 packets in a chunk, and a chunk is 2 bytes.\n byte[] buf = new byte[(packetCount / 7 + 1) * 2 + 8];\n // Temporary buffer to store the list of deltas (see the format above).\n // We allocated for the worst case (2 bytes per packet), which may\n // be longer than needed.\n byte[] deltas = new byte[packetCount * 2];\n int deltaOff = 0;\n int off = 0;\n\n long referenceTime = first.getValue();\n referenceTime -= referenceTime % 64;\n\n // Set the 'base sequence number' field\n off += RTPUtils.writeShort(buf, off, (short) (int) first.getKey());\n\n // Set the 'packet status count' field\n off += RTPUtils.writeShort(buf, off, (short) packetCount);\n\n // Set the 'reference time' field\n off +=\n RTPUtils.writeUint24(buf, off,\n (int) ((referenceTime >> 6) & 0xffffff));\n\n // Set the 'fb pkt count' field. TODO increment\n buf[off++] = fbPacketCount;\n\n // Add the packet status chunks. In this first impl we'll just use\n // status vector chunks (T=1) with two-bit symbols (S=1) as this is\n // most straightforward to implement.\n // TODO: optimize for size\n long nextReferenceTime = referenceTime;\n off--; // we'll take care of this inside the loop.\n for (int seqDelta = 0; seqDelta < packetCount; seqDelta++)\n {\n // A status vector chunk with two-bit symbols contains 7 packet\n // symbols\n if (seqDelta % 7 == 0)\n {\n off++;\n buf[off] = (byte) 0xc0; //T=1, S=1\n }\n else if (seqDelta % 7 == 3)\n {\n off++;\n buf[off] = 0;\n }\n\n int symbol;\n\n int seq = (firstSeq + seqDelta) & 0xffff;\n Long ts = packets.get(seq);\n if (ts == null)\n {\n symbol = SYMBOL_NOT_RECEIVED;\n }\n else\n {\n long tsDelta = ts - nextReferenceTime;\n if (tsDelta >= 0 && tsDelta < 63)\n {\n symbol = SYMBOL_SMALL_DELTA;\n\n // The small delta is an 8-bit unsigned with a resolution of\n // 250µs. Our deltas are all in milliseconds (hence << 2).\n deltas[deltaOff++] = (byte ) ((tsDelta << 2) & 0xff);\n }\n else if (tsDelta < 8191 && tsDelta > -8192)\n {\n symbol = SYMBOL_LARGE_DELTA;\n\n // The large or negative delta is a 16-bit signed integer\n // with a resolution of 250µs (hence << 2).\n short d = (short) (tsDelta << 2);\n deltas[deltaOff++] = (byte) ((d >> 8) & 0xff);\n deltas[deltaOff++] = (byte) ((d) & 0xff);\n }\n else\n {\n // The RTCP packet format does not support deltas bigger\n // than what we handle above. As per the draft, if we want\n // send feedback with such deltas, we should split it up\n // into multiple RTCP packets. We can't do that here in the\n // constructor.\n throw new IllegalArgumentException(\"Delta too big, needs new reference.\");\n }\n\n // If the packet was received, the next delta will be relative\n // to its time. Otherwise, we'll just the previous reference.\n nextReferenceTime = ts;\n }\n\n // Depending on the index of our packet, we have to offset its\n // symbol (we've already set 'off' to point to the correct byte).\n // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5\n // S T <0> <1> <2> <3> <4> <5> <6>\n int symbolOffset;\n switch (seqDelta % 7)\n {\n case 0:\n case 4:\n symbolOffset = 4;\n break;\n case 1:\n case 5:\n symbolOffset = 2;\n break;\n case 2:\n case 6:\n symbolOffset = 0;\n break;\n case 3:\n default:\n symbolOffset = 6;\n }\n\n symbol <<= symbolOffset;\n buf[off] |= symbol;\n }\n\n off++;\n if (packetCount % 7 <= 3)\n {\n // the last chunk was not complete\n buf[off++] = 0;\n }\n\n\n fci = new byte[off + deltaOff];\n System.arraycopy(buf, 0, fci, 0, off);\n System.arraycopy(deltas, 0, fci, off, deltaOff);\n this.packets = packets;\n }\n\n\n /**\n * @return the map of packets represented by this {@link RTCPTCCPacket}.\n */\n synchronized public Map getPackets()\n {\n if (packets == null)\n {\n packets = getPacketsFci(new ByteArrayBufferImpl(fci, 0, fci.length));\n }\n\n return packets;\n }\n\n @Override\n public void assemble(DataOutputStream dataoutputstream)\n throws IOException\n {\n dataoutputstream.writeByte((byte) (0x80 /* version */ | FMT));\n dataoutputstream.writeByte((byte) RTPFB);\n dataoutputstream.writeShort(2 + (fci.length / 4));\n dataoutputstream.writeInt((int) senderSSRC);\n dataoutputstream.writeInt((int) sourceSSRC);\n dataoutputstream.write(fci);\n }\n\n @Override\n public String toString()\n {\n return \"RTCP transport-cc feedback\";\n }\n\n /**\n * An ordered collection which maps sequence numbers to timestamps, the\n * order is by the sequence number.\n */\n public static class PacketMap extends TreeMap\n {\n public PacketMap()\n {\n super(RTPUtils.sequenceNumberComparator);\n }\n }\n}\n\n"},"message":{"kind":"string","value":"fix: Fixes an exception.\n"},"old_file":{"kind":"string","value":"src/org/jitsi/impl/neomedia/rtcp/RTCPTCCPacket.java"},"subject":{"kind":"string","value":"fix: Fixes an exception."},"git_diff":{"kind":"string","value":"rc/org/jitsi/impl/neomedia/rtcp/RTCPTCCPacket.java\n {\n super(FMT, RTPFB, senderSSRC, sourceSSRC);\n \n TreeSet> sequenceNumbers\n = (TreeSet) packets.entrySet();\n\n Map.Entry first = sequenceNumbers.first();\n Map.Entry first = packets.firstEntry();\n int firstSeq = first.getKey();\n Map.Entry last = sequenceNumbers.last();\n Map.Entry last = packets.lastEntry();\n int packetCount\n = 1 + RTPUtils.sequenceNumberDiff(last.getKey(), firstSeq);\n "}}},{"rowIdx":2037,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"d2937e1eca4b2f9081dcc07dc6f11aa5a12b5398"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"bholloway/gulp-slash"},"new_contents":{"kind":"string","value":"var slash = require('slash');\nvar through = require('through2');\n\n/**\n * Convert given text using node slash, or where no text is given, return a stream\n * that similarly converts gulp (vinyl) file paths in place.\n * @param {string} [text] Text to convert (per node slash)\n * @returns {string|Transform} Converted text where given, else a transform stream for gulp\n */\nmodule.exports = function(text) {\n 'use strict';\n if (arguments.length > 0) {\n return !!(text) ? slash(text) : text;\n } else {\n return through.obj(function(file, encoding, done) {\n [ 'path', 'cwd', 'base' ].forEach(function(field) {\n var isValid = (field in file) && (typeof file[field] === typeof '');\n if (isValid) {\n file[field] = slash(file[field]);\n }\n });\n this.push(file);\n done();\n });\n }\n}"},"new_file":{"kind":"string","value":"index.js"},"old_contents":{"kind":"string","value":"var slash = require('slash');\nvar through = require('through2');\n\n/**\n * Convert given text using node slash, or where no text is given, return a stream\n * that similarly converts gulp (vinyl) file paths in place.\n * @param {string} [text] Text to convert (per node slash)\n * @returns {string|Transform} Converted text where given, else a transform stream for gulp\n */\nmodule.exports = function(text) {\n 'use strict';\n if (arguments.length > 0) {\n return slash(text);\n } else {\n return through.obj(function(file, encoding, done) {\n [ 'path', 'cwd', 'base' ].forEach(function(field) {\n var isValid = (field in file) && (typeof file[field] === typeof '');\n if (isValid) {\n file[field] = slash(file[field]);\n }\n });\n this.push(file);\n done();\n });\n }\n}"},"message":{"kind":"string","value":"added check for degenerate string before passing to slash\n"},"old_file":{"kind":"string","value":"index.js"},"subject":{"kind":"string","value":"added check for degenerate string before passing to slash"},"git_diff":{"kind":"string","value":"ndex.js\n module.exports = function(text) {\n 'use strict';\n if (arguments.length > 0) {\n return slash(text);\n return !!(text) ? slash(text) : text;\n } else {\n return through.obj(function(file, encoding, done) {\n [ 'path', 'cwd', 'base' ].forEach(function(field) {"}}},{"rowIdx":2038,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"60b5fd47dad069d062510c782a5db01fed6122c8"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"dalekjs/dalek-browser-chrome"},"new_contents":{"kind":"string","value":"/*!\n *\n * Copyright (c) 2013 Sebastian Golasch\n *\n * Permission is hereby granted, free of charge, to any person obtaining a\n * copy of this software and associated documentation files (the \"Software\"),\n * to deal in the Software without restriction, including without limitation\n * the rights to use, copy, modify, merge, publish, distribute, sublicense,\n * and/or sell copies of the Software, and to permit persons to whom the\n * Software is furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included\n * in all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n * DEALINGS IN THE SOFTWARE.\n *\n */\n\n'use strict';\n\n// ext. libs\nvar Q = require('q');\nvar fs = require('fs');\nvar cp = require('child_process');\nvar portscanner = require('portscanner');\n\n// int. libs\nvar chromedriver = require('./lib/chromedriver');\n\n/**\n * This module is a browser plugin for [DalekJS](//github.com/dalekjs/dalek).\n * It provides all a WebDriverServer & browser launcher for Google Chrome.\n *\n * The browser plugin can be installed with the following command:\n *\n * ```bash\n * $ npm install dalek-browser-chrome --save-dev\n * ```\n *\n * You can use the browser plugin by adding a config option to the your [Dalekfile](/pages/config.html)\n *\n * ```javascript\n * \"browser\": [\"chrome\"]\n * ```\n *\n * Or you can tell Dalek that it should test in this browser via the command line:\n *\n * ```bash\n * $ dalek mytest.js -b chrome\n * ```\n *\n * The Webdriver Server tries to open Port 9002 by default,\n * if this port is blocked, it tries to use a port between 9003 & 9092\n * You can specifiy a different port from within your [Dalekfile](/pages/config.html) like so:\n *\n * ```javascript\n * \"browsers\": {\n * \"chrome\": {\n * \"port\": 5555 \n * }\n * }\n * ```\n *\n * It is also possible to specify a range of ports:\n *\n * ```javascript\n * \"browsers\": {\n * \"chrome\": {\n * \"portRange\": [6100, 6120] \n * }\n * }\n * ```\n *\n * If you would like to test Chrome Canary oder Chromium releases, you can simply apply a snd. argument,\n * which defines the browser type:\n *\n * ```bash\n * $ dalek mytest.js -b chrome:canary\n * ```\n *\n * for canary, and if you would like to use chromium, just append `:chromium`:\n *\n * ```bash\n * $ dalek mytest.js -b chrome:chromium\n * ```\n *\n * This will only work if you installed your browser in the default locations,\n * if the browsers binary is located in a non default location, you are able to specify\n * its location in your [Dalekfile](/pages/config.html):\n *\n * ```javascript\n * \"browsers\": {\n * \"chrome\": {\n * \"binary\": \"/Applications/Custom Located Chrome.app/MacOS/Contents/Chrome\" \n * }\n * }\n * ```\n *\n * This also works for the canary &amp; chromium builds\n *\n * ```javascript\n * \"browsers\": {\n * \"chrome\": {\n * \"binary\": \"/Applications/Custom Located Chrome.app/MacOS/Contents/Chrome\" \n * }\n * }\n * ```\n *\n * @module DalekJS\n * @class ChromeDriver\n * @namespace Browser\n * @part Chrome\n * @api\n */\n\nvar ChromeDriver = {\n\n /**\n * Verbose version of the browser name\n *\n * @property longName\n * @type string\n * @default Google Chrome\n */\n\n longName: 'Google Chrome',\n\n /**\n * Default port of the ChromeWebDriverServer\n * The port may change, cause the port conflict resolution\n * tool might pick another one, if the default one is blocked\n *\n * @property port\n * @type integer\n * @default 9002\n */\n\n port: 9002,\n\n /**\n * Default maximum port of the ChromeWebDriverServer\n * The port is the highest port in the range that can be allocated\n * by the ChromeWebDriverServer\n *\n * @property maxPort\n * @type integer\n * @default 9092\n */\n\n maxPort: 9092,\n\n /**\n * Default host of the ChromeWebDriverServer\n * The host may be overridden with\n * a user configured value\n *\n * @property host\n * @type string\n * @default localhost\n */\n\n host: 'localhost',\n\n /**\n * Default desired capabilities that should be\n * transferred when the browser session gets requested\n *\n * @property desiredCapabilities\n * @type object\n */\n\n desiredCapabilities: {\n browserName: 'chrome'\n },\n\n /**\n * Driver defaults, what should the driver be able to access.\n *\n * @property driverDefaults\n * @type object\n */\n\n driverDefaults: {\n viewport: true,\n status: true,\n sessionInfo: true\n },\n\n /**\n * Root path of the ChromeWebDriverServer\n *\n * @property path\n * @type string\n * @default /wd/hub\n */\n\n path: '/wd/hub',\n\n /**\n * Child process instance of the Chrome browser\n *\n * @property spawned\n * @type null|Object\n * @default null\n */\n\n spawned: null,\n\n /**\n * Chrome processes that are running on startup,\n * and therefor shouldn`t be closed\n *\n * @property openProcesses\n * @type array\n * @default [] \n */\n\n openProcesses: [],\n\n /**\n * Name of the process (platform dependent)\n * that represents the browser itself\n *\n * @property processName\n * @type string\n * @default chrome.exe / Chrome \n */\n\n processName: (process.platform === 'win32' ? 'chrome.exe' : 'Chrome'),\n\n /**\n * Different browser types (Canary / Chromium) that can be controlled\n * via the Chromedriver\n *\n * @property browserTypes\n * @type object\n */\n\n browserTypes: {\n\n /**\n * Chrome Canary\n *\n * @property canary\n * @type object\n */\n\n canary: {\n name: 'Chrome Canary',\n linux: 'google-chrome-canary',\n darwin: '/Applications/Google Chrome Canary.app/Contents/MacOS/Google Chrome Canary',\n win32: process.env.LOCALAPPDATA + '\\\\Google\\\\Chrome SxS\\\\Application\\\\chrome.exe'\n },\n\n /**\n * Chromium\n *\n * @property chromium\n * @type object\n */\n\n chromium: {\n name: 'Chromium',\n process: (process.platform === 'win32' ? 'chromium.exe' : 'Chromium'),\n linux: 'chromium-browser',\n darwin: '/Applications/Chromium.app/Contents/MacOS/Chromium',\n win32: process.env.LOCALAPPDATA + '\\\\Google\\\\Chrome SxS\\\\Application\\\\chrome.exe'\n }\n },\n\n /**\n * Resolves the driver port\n *\n * @method getPort\n * @return {integer} port WebDriver server port\n */\n\n getPort: function () {\n return this.port;\n },\n\n /**\n * Resolves the maximum range for the driver port\n *\n * @method getMaxPort\n * @return {integer} port Max WebDriver server port range\n */\n\n getMaxPort: function () {\n return this.maxPort;\n },\n\n /**\n * Returns the driver host\n *\n * @method getHost\n * @return {string} host WebDriver server hostname\n */\n\n getHost: function () {\n return this.host;\n },\n\n /**\n * Launches the ChromeWebDriverServer\n * (which implicitly launches Chrome itself)\n * and checks for an available port\n *\n * @method launch\n * @param {object} configuration Browser configuration\n * @param {EventEmitter2} events EventEmitter (Reporter Emitter instance)\n * @param {Dalek.Internal.Config} config Dalek configuration class\n * @return {object} promise Browser promise\n */\n\n launch: function (configuration, events, config) {\n var deferred = Q.defer();\n\n // store injected configuration/log event handlers\n this.reporterEvents = events;\n this.configuration = configuration;\n this.config = config;\n\n // check for a user set port\n var browsers = this.config.get('browsers');\n if (browsers && Array.isArray(browsers)) {\n browsers.forEach(this._checkUserDefinedPorts.bind(this));\n }\n\n // check for a user defined binary\n if (configuration && configuration.binary) {\n var binaryExists = this._checkUserDefinedBinary(configuration.binary);\n if (binaryExists) {\n // check for new verbose & process name\n this.longName = this._modifyVerboseBrowserName(configuration);\n this.processName = this._fetchProcessName(configuration);\n }\n }\n\n // check if the current port is in use, if so, scan for free ports\n portscanner.findAPortNotInUse(this.getPort(), this.getMaxPort(), this.getHost(), this._checkPorts.bind(this, deferred));\n return deferred.promise;\n },\n\n /**\n * Kills the ChromeWebDriverServer\n * & Chrome browser processes\n *\n * @method kill\n * @chainable\n */\n\n kill: function () {\n this._processes(process.platform, this._checkProcesses.bind(this));\n return this;\n },\n\n /**\n * Modifies the verbose browser name\n *\n * @method _modifyVerboseBrowserName\n * @param {object} configuration User configuration\n * @return {string} Verbose browser name\n * @private\n */\n\n _modifyVerboseBrowserName: function (configuration) {\n if (configuration.type && this.browserTypes[configuration.type]) {\n return this.browserTypes[configuration.type].name + ' (' + this.longName + ')';\n }\n\n return this.longName;\n },\n\n /**\n * Change the process name for browser instances like Canary &amp; Chromium\n *\n * @method _fetchProcessName\n * @param {object} configuration User configuration\n * @return {string} Verbose browser name\n * @private\n */\n\n _fetchProcessName: function (configuration) {\n // check if the process name must be overridden (to shut down the browser)\n if (this.browserTypes[configuration.type] && this.browserTypes[configuration.type].process) {\n return this.browserTypes[configuration.type].process;\n }\n\n return this.processName;\n },\n\n /**\n * Process user defined ports\n *\n * @method _checkUserDefinedPorts\n * @param {object} browser Browser configuration\n * @chainable\n * @private\n */\n\n _checkUserDefinedPorts: function (browser) {\n // check for a single defined port\n if (browser.chrome && browser.chrome.port) {\n this.port = parseInt(browser.chrome.port, 10);\n this.maxPort = this.port + 90;\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Switching to user defined port: ' + this.port);\n }\n\n // check for a port range\n if (browser.chrome && browser.chrome.portRange && browser.chrome.portRange.length === 2) {\n this.port = parseInt(browser.chrome.portRange[0], 10);\n this.maxPort = parseInt(browser.chrome.portRange[1], 10);\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Switching to user defined port(s): ' + this.port + ' -> ' + this.maxPort);\n }\n\n return this;\n },\n\n /**\n * Checks if the binary exists,\n * when set manually by the user\n *\n * @method _checkUserDefinedBinary\n * @param {string} binary Path to the browser binary\n * @return {bool} Binary exists\n * @private\n */\n\n _checkUserDefinedBinary: function (binary) {\n // check if we need to replace the users home directory\n if (process.platform === 'darwin' && binary.trim()[0] === '~') {\n binary = binary.replace('~', process.env.HOME);\n }\n \n // check if the binary exists\n if (!fs.existsSync(binary)) {\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Binary not found: ' + binary);\n process.exit(127);\n return false;\n }\n\n // add the binary to the desired capabilities\n this.desiredCapabilities.chromeOptions = {\n binary: binary\n };\n\n return true;\n },\n\n /**\n * Checks if the def. port is blocked & if we need to switch to another port\n * Kicks off the process manager (for closing the opened browsers after the run has been finished)\n * Also starts the chromedriver instance \n *\n * @method _checkPorts\n * @param {object} deferred Promise\n * @param {null|object} error Error object\n * @param {integer} port Found open port\n * @private\n * @chainable\n */\n\n _checkPorts: function(deferred, error, port) {\n // check if the port was blocked & if we need to switch to another port\n if (this.port !== port) {\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Switching to port: ' + port);\n this.port = port;\n }\n\n // get the currently running processes & invoke the chromedriver afterwards\n this._processes(process.platform, this._startChromedriver.bind(this, deferred));\n return this;\n },\n\n /**\n * Spawns an instance of Chromedriver\n * \n * @method _startChromedriver\n * @param {object} deferred Promise\n * @param {null|object} error Error object\n * @param {string} result List of open chrome processes BEFORE the test browser has been launched\n * @private\n * @chainable\n */\n\n _startChromedriver: function (deferred, err, result) {\n var args = ['--port=' + this.getPort(), '--url-base=' + this.path];\n this.spawned = cp.spawn(chromedriver.path, args);\n this.openProcesses = result;\n this.spawned.stdout.on('data', this._catchDriverLogs.bind(this, deferred));\n return this;\n },\n\n /**\n * Watches the chromedriver console output to capture the starting message\n * \n * @method _catchDriverLogs\n * @param {object} deferred Promise\n * @param {buffer} data Chromedriver console output\n * @private\n * @chainable\n */\n\n _catchDriverLogs: function (deferred, data) {\n var dataStr = data + '';\n\n // timeout to catch if chromedriver couldnt be launched\n if (dataStr.search('DVFreeThread') === -1) {\n var timeout = setTimeout(function () {\n deferred.reject();\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Could not launch Chromedriver');\n process.exit(127);\n }.bind(this), 2000);\n }\n\n // look for the success message\n if (dataStr.search('Starting ChromeDriver') !== -1) {\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Started ChromeDriver');\n clearTimeout(timeout);\n deferred.resolve();\n }\n\n return this;\n },\n\n /**\n * Remove the chromedriver log that is written to the current working directory\n * \n * @method _unlinkChromedriverLog\n * @param {bool} retry Delete has been tried min 1 time before\n * @private\n * @chainable\n */\n\n _unlinkChromedriverLog: function (retry) {\n var logfile = process.cwd() + '/chromedriver.log';\n try {\n if (fs.existsSync(logfile)) {\n fs.unlinkSync(logfile);\n }\n } catch (e) {\n if (!retry) {\n setTimeout(this._unlinkChromedriverLog.bind(this, true), 1000);\n }\n }\n \n return this;\n },\n\n /**\n * Tracks running browser processes for chrome on mac & linux\n *\n * @method _processes\n * @param {string} platform Current OS\n * @param {function} fn Callback\n * @chainable\n * @private\n */\n\n _processes: function (platform, fn) {\n if (platform === 'win32') {\n this._processesWin(fn);\n return this;\n }\n \n this._processesNix(fn);\n return this;\n },\n\n /**\n * Kills all associated processes\n * \n * @method _checkProcesses\n * @param {object|null} err Error object or null\n * @param {array} result List of running processes\n * @chainable\n * @private\n */\n\n _checkProcesses: function (err, result) {\n // log that the driver shutdown process has been initiated\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Shutting down ChromeDriver');\n // kill leftover chrome browser processes\n result.forEach(this[(process.platform === 'win32' ? '_killWindows' : '_killNix')].bind(this));\n // kill chromedriver binary \n this.spawned.kill('SIGTERM');\n // clean up the file mess the chromedriver leaves us behind\n this._unlinkChromedriverLog();\n return this;\n },\n\n // UNIX ONLY\n // ---------\n\n /**\n * Kills a process\n * \n * @method _killNix\n * @param {integer} processID Process ID\n * @chainable\n * @private\n */\n\n _killNix: function (processID) {\n var kill = true;\n this.openProcesses.forEach(function (pid) {\n if (pid === processID) {\n kill = false;\n }\n });\n\n if (kill === true) {\n var killer = cp.spawn;\n killer('kill', [processID]);\n }\n\n return this;\n },\n\n /**\n * Lists all chrome processes on *NIX systems\n * \n * @method _processesNix\n * @param {function} fn Calback\n * @chainable\n * @private\n */\n\n _processesNix: function (fn) {\n var cmd = ['ps -ax', '|', 'grep ' + this.processName];\n cp.exec(cmd.join(' '), this._processListNix.bind(this, fn));\n return this;\n },\n\n /**\n * Deserializes a process list on nix\n * \n * @method _processListNix\n * @param {function} fn Calback\n * @param {object|null} err Error object\n * @param {string} stdout Output of the process list shell command\n * @chainable\n * @private\n */\n\n _processListNix: function(fn, err, stdout) {\n var result = [];\n stdout.split('\\n').forEach(this._splitProcessListNix.bind(this, result));\n fn(err, result);\n return this;\n },\n\n /**\n * Reformats the process list output on *NIX systems\n * \n * @method _splitProcessListNix\n * @param {array} result Reference to the process list\n * @param {string} line Single process in text representation\n * @chainable\n * @private\n */\n\n _splitProcessListNix: function(result, line) {\n var data = line.split(' ');\n data = data.filter(this._filterProcessItemsNix.bind(this));\n\n if (data[1] !== '??') {\n result.push(data[0]);\n }\n return this;\n },\n\n /**\n * Filters empty process list entries on *NIX\n * \n * @method _filterProcessItemsNix\n * @param {string} item Item to check\n * @return {string|bool} Item or falsy\n * @private\n */\n\n _filterProcessItemsNix: function (item) {\n if (item !== '') {\n return item;\n }\n return false;\n },\n\n // WIN ONLY\n // --------\n\n /**\n * Lists all running processes (win only)\n *\n * @method _processesWin\n * @param {Function} callback Receives the process object as the only callback argument\n * @chainable\n * @private\n */\n\n _processesWin: function (callback) {\n cp.exec('tasklist /FO CSV', this._processListWin.bind(this, callback));\n return this;\n },\n\n /**\n * Deserializes the process list on win\n * \n * @method _processListWin\n * @param {function} callback Callback to be executed after the list has been transformed\n * @param {object|null} err Error if error, else null\n * @param {string} stdout Output of the process list command\n * @chainable\n * @private\n */\n\n _processListWin: function (callback, err, stdout) {\n var p = [];\n stdout.split('\\r\\n').forEach(this._splitProcessListWin.bind(this, p));\n\n var proc = [];\n var head = null;\n while (p.length > 1) {\n var rec = p.shift();\n var tmp = {};\n rec = rec.replace(/\\\"\\,/gi,'\";').replace(/\\\"|\\'/gi,'').split(';');\n for (var j=0;j ' + this.maxPort);\n }\n\n return this;\n },\n\n /**\n * Checks if the binary exists,\n * when set manually by the user\n *\n * @method _checkUserDefinedBinary\n * @param {string} binary Path to the browser binary\n * @return {bool} Binary exists\n * @private\n */\n\n _checkUserDefinedBinary: function (binary) {\n // check if the binary exists\n if (!fs.existsSync(binary)) {\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Binary not found: ' + binary);\n process.exit(127);\n return false;\n }\n\n // add the binary to the desired capabilities\n this.desiredCapabilities.chromeOptions = {\n binary: binary\n };\n\n return true;\n },\n\n /**\n * Checks if the def. port is blocked & if we need to switch to another port\n * Kicks off the process manager (for closing the opened browsers after the run has been finished)\n * Also starts the chromedriver instance \n *\n * @method _checkPorts\n * @param {object} deferred Promise\n * @param {null|object} error Error object\n * @param {integer} port Found open port\n * @private\n * @chainable\n */\n\n _checkPorts: function(deferred, error, port) {\n // check if the port was blocked & if we need to switch to another port\n if (this.port !== port) {\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Switching to port: ' + port);\n this.port = port;\n }\n\n // get the currently running processes & invoke the chromedriver afterwards\n this._processes(process.platform, this._startChromedriver.bind(this, deferred));\n return this;\n },\n\n /**\n * Spawns an instance of Chromedriver\n * \n * @method _startChromedriver\n * @param {object} deferred Promise\n * @param {null|object} error Error object\n * @param {string} result List of open chrome processes BEFORE the test browser has been launched\n * @private\n * @chainable\n */\n\n _startChromedriver: function (deferred, err, result) {\n var args = ['--port=' + this.getPort(), '--url-base=' + this.path];\n this.spawned = cp.spawn(chromedriver.path, args);\n this.openProcesses = result;\n this.spawned.stdout.on('data', this._catchDriverLogs.bind(this, deferred));\n return this;\n },\n\n /**\n * Watches the chromedriver console output to capture the starting message\n * \n * @method _catchDriverLogs\n * @param {object} deferred Promise\n * @param {buffer} data Chromedriver console output\n * @private\n * @chainable\n */\n\n _catchDriverLogs: function (deferred, data) {\n var dataStr = data + '';\n\n // timeout to catch if chromedriver couldnt be launched\n var timeout = setTimeout(function () {\n deferred.reject();\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Could not launch Chromedriver');\n process.exit(127);\n }.bind(this), 2000);\n\n // look for the success message\n if (dataStr.search('Starting ChromeDriver') !== -1) {\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Started ChromeDriver');\n clearTimeout(timeout);\n deferred.resolve();\n }\n\n return this;\n },\n\n /**\n * Remove the chromedriver log that is written to the current working directory\n * \n * @method _unlinkChromedriverLog\n * @param {bool} retry Delete has been tried min 1 time before\n * @private\n * @chainable\n */\n\n _unlinkChromedriverLog: function (retry) {\n var logfile = process.cwd() + '/chromedriver.log';\n try {\n if (fs.existsSync(logfile)) {\n fs.unlinkSync(logfile);\n }\n } catch (e) {\n if (!retry) {\n setTimeout(this._unlinkChromedriverLog.bind(this, true), 1000);\n }\n }\n \n return this;\n },\n\n /**\n * Tracks running browser processes for chrome on mac & linux\n *\n * @method _processes\n * @param {string} platform Current OS\n * @param {function} fn Callback\n * @chainable\n * @private\n */\n\n _processes: function (platform, fn) {\n if (platform === 'win32') {\n this._processesWin(fn);\n return this;\n }\n \n this._processesNix(fn);\n return this;\n },\n\n /**\n * Kills all associated processes\n * \n * @method _checkProcesses\n * @param {object|null} err Error object or null\n * @param {array} result List of running processes\n * @chainable\n * @private\n */\n\n _checkProcesses: function (err, result) {\n // log that the driver shutdown process has been initiated\n this.reporterEvents.emit('report:log:system', 'dalek-browser-chrome: Shutting down ChromeDriver');\n // kill leftover chrome browser processes\n result.forEach(this[(process.platform === 'win32' ? '_killWindows' : '_killNix')].bind(this));\n // kill chromedriver binary \n this.spawned.kill('SIGTERM');\n // clean up the file mess the chromedriver leaves us behind\n this._unlinkChromedriverLog();\n return this;\n },\n\n // UNIX ONLY\n // ---------\n\n /**\n * Kills a process\n * \n * @method _killNix\n * @param {integer} processID Process ID\n * @chainable\n * @private\n */\n\n _killNix: function (processID) {\n var kill = true;\n this.openProcesses.forEach(function (pid) {\n if (pid === processID) {\n kill = false;\n }\n });\n\n if (kill === true) {\n var killer = cp.spawn;\n killer('kill', [processID]);\n }\n\n return this;\n },\n\n /**\n * Lists all chrome processes on *NIX systems\n * \n * @method _processesNix\n * @param {function} fn Calback\n * @chainable\n * @private\n */\n\n _processesNix: function (fn) {\n var cmd = ['ps -ax', '|', 'grep ' + this.processName];\n cp.exec(cmd.join(' '), this._processListNix.bind(this, fn));\n return this;\n },\n\n /**\n * Deserializes a process list on nix\n * \n * @method _processListNix\n * @param {function} fn Calback\n * @param {object|null} err Error object\n * @param {string} stdout Output of the process list shell command\n * @chainable\n * @private\n */\n\n _processListNix: function(fn, err, stdout) {\n var result = [];\n stdout.split('\\n').forEach(this._splitProcessListNix.bind(this, result));\n fn(err, result);\n return this;\n },\n\n /**\n * Reformats the process list output on *NIX systems\n * \n * @method _splitProcessListNix\n * @param {array} result Reference to the process list\n * @param {string} line Single process in text representation\n * @chainable\n * @private\n */\n\n _splitProcessListNix: function(result, line) {\n var data = line.split(' ');\n data = data.filter(this._filterProcessItemsNix.bind(this));\n\n if (data[1] !== '??') {\n result.push(data[0]);\n }\n return this;\n },\n\n /**\n * Filters empty process list entries on *NIX\n * \n * @method _filterProcessItemsNix\n * @param {string} item Item to check\n * @return {string|bool} Item or falsy\n * @private\n */\n\n _filterProcessItemsNix: function (item) {\n if (item !== '') {\n return item;\n }\n return false;\n },\n\n // WIN ONLY\n // --------\n\n /**\n * Lists all running processes (win only)\n *\n * @method _processesWin\n * @param {Function} callback Receives the process object as the only callback argument\n * @chainable\n * @private\n */\n\n _processesWin: function (callback) {\n cp.exec('tasklist /FO CSV', this._processListWin.bind(this, callback));\n return this;\n },\n\n /**\n * Deserializes the process list on win\n * \n * @method _processListWin\n * @param {function} callback Callback to be executed after the list has been transformed\n * @param {object|null} err Error if error, else null\n * @param {string} stdout Output of the process list command\n * @chainable\n * @private\n */\n\n _processListWin: function (callback, err, stdout) {\n var p = [];\n stdout.split('\\r\\n').forEach(this._splitProcessListWin.bind(this, p));\n\n var proc = [];\n var head = null;\n while (p.length > 1) {\n var rec = p.shift();\n var tmp = {};\n rec = rec.replace(/\\\"\\,/gi,'\";').replace(/\\\"|\\'/gi,'').split(';');\n for (var j=0;jndex.js\n * Different browser types (Canary / Chromium) that can be controlled\n * via the Chromedriver\n *\n * @property browserTypes\n * @type object\n */\n \n browserTypes: {\n */\n \n _checkUserDefinedBinary: function (binary) {\n // check if we need to replace the users home directory\n if (process.platform === 'darwin' && binary.trim()[0] === '~') {\n binary = binary.replace('~', process.env.HOME);\n }\n \n // check if the binary exists\n if (!fs.existsSync(binary)) {\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Binary not found: ' + binary);\n var dataStr = data + '';\n \n // timeout to catch if chromedriver couldnt be launched\n var timeout = setTimeout(function () {\n deferred.reject();\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Could not launch Chromedriver');\n process.exit(127);\n }.bind(this), 2000);\n if (dataStr.search('DVFreeThread') === -1) {\n var timeout = setTimeout(function () {\n deferred.reject();\n this.reporterEvents.emit('error', 'dalek-driver-chrome: Could not launch Chromedriver');\n process.exit(127);\n }.bind(this), 2000);\n }\n \n // look for the success message\n if (dataStr.search('Starting ChromeDriver') !== -1) {"}}},{"rowIdx":2039,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"652516fbf919c7004fef0563c5a15e98deb47286"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.facebook.litho.widget;\n\nimport android.content.Context;\nimport android.os.Handler;\nimport android.os.Looper;\nimport android.widget.FrameLayout;\nimport androidx.annotation.UiThread;\nimport com.facebook.litho.Component;\nimport com.facebook.litho.ComponentContext;\nimport com.facebook.litho.ComponentLayout;\nimport com.facebook.litho.ComponentTree;\nimport com.facebook.litho.LithoView;\nimport com.facebook.litho.Size;\nimport com.facebook.litho.SizeSpec;\nimport com.facebook.litho.StateValue;\nimport com.facebook.litho.ThreadUtils;\nimport com.facebook.litho.TreeProps;\nimport com.facebook.litho.annotations.MountSpec;\nimport com.facebook.litho.annotations.OnBind;\nimport com.facebook.litho.annotations.OnBoundsDefined;\nimport com.facebook.litho.annotations.OnCreateInitialState;\nimport com.facebook.litho.annotations.OnCreateMountContent;\nimport com.facebook.litho.annotations.OnDetached;\nimport com.facebook.litho.annotations.OnMeasure;\nimport com.facebook.litho.annotations.OnMount;\nimport com.facebook.litho.annotations.OnUnbind;\nimport com.facebook.litho.annotations.OnUnmount;\nimport com.facebook.litho.annotations.Prop;\nimport com.facebook.litho.annotations.State;\nimport java.util.concurrent.atomic.AtomicReference;\n\n/**\n * A {@link MountSpec} implementation to provide width and height information to the wrapped\n * component.\n *\n *

Usage: Create a {@link SizeSpecMountWrapperComponentSpec} with a {@link Component} added to\n * it, and it will provide the width and height information through a {@link Size} typed {@link\n * com.facebook.litho.annotations.TreeProp}.\n */\n@MountSpec(hasChildLithoViews = true)\npublic class SizeSpecMountWrapperComponentSpec {\n private static final Handler sMainThreadHandler = new Handler(Looper.getMainLooper());\n\n @OnCreateInitialState\n static void onCreateInitialState(\n ComponentContext c, StateValue> componentTreeRef) {\n componentTreeRef.set(new AtomicReference());\n // This is the component tree to be added to the LithoView.\n getOrCreateComponentTree(c, componentTreeRef.get());\n }\n\n @OnCreateMountContent\n static FrameLayout onCreateMountContent(Context c) {\n // This LithoView will contain the new tree that's created from this point onwards\n // TODO: T59446191 Replace with proper solution. Remove the use of FrameLayout.\n FrameLayout wrapperView = new FrameLayout(c);\n wrapperView.addView(new LithoView(c));\n return wrapperView;\n }\n\n @UiThread\n @OnMount\n static void onMount(\n ComponentContext c,\n FrameLayout wrapperView,\n @State AtomicReference componentTreeRef) {\n ((LithoView) wrapperView.getChildAt(0)).setComponentTree(componentTreeRef.get());\n }\n\n @UiThread\n @OnUnmount\n static void onUnmount(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).setComponentTree(null);\n }\n\n @OnMeasure\n static void onMeasure(\n ComponentContext c,\n ComponentLayout layout,\n int widthSpec,\n int heightSpec,\n Size size,\n @Prop Component component,\n @State AtomicReference componentTreeRef) {\n final ComponentTree componentTree = getOrCreateComponentTree(c, componentTreeRef);\n componentTree.setVersionedRootAndSizeSpec(\n component,\n widthSpec,\n heightSpec,\n size,\n getTreePropWithSize(c, widthSpec, heightSpec),\n c.getLayoutVersion());\n if (size.width < 0 || size.height < 0) {\n // if this happens it means that the componentTree was probably released in the UI Thread so\n // this measurement is not needed.\n size.width = size.height = 0;\n }\n }\n\n @OnBoundsDefined\n static void onBoundsDefined(\n ComponentContext c,\n ComponentLayout layout,\n @Prop Component component,\n @State AtomicReference componentTreeRef) {\n // the updated width and height is passed down.\n int widthSpec = SizeSpec.makeSizeSpec(layout.getWidth(), SizeSpec.EXACTLY);\n int heightSpec = SizeSpec.makeSizeSpec(layout.getHeight(), SizeSpec.EXACTLY);\n final ComponentTree componentTree = getOrCreateComponentTree(c, componentTreeRef);\n // This check is also done in the setRootAndSizeSpec method, but we need to do this here since\n // it will fail if a ErrorBoundariesConfiguration.rootWrapperComponentFactory was set.\n // TODO: T60426216\n if (!componentTree.hasCompatibleLayout(widthSpec, heightSpec)) {\n componentTree.setVersionedRootAndSizeSpec(\n component,\n widthSpec,\n heightSpec,\n null,\n getTreePropWithSize(c, widthSpec, heightSpec),\n c.getLayoutVersion());\n }\n }\n\n @OnDetached\n static void onDetached(\n ComponentContext c, @State AtomicReference componentTreeRef) {\n // We need to release the component tree here to allow for a proper memory deallocation\n final ComponentTree componentTree = componentTreeRef.get();\n if (componentTree != null) {\n componentTreeRef.set(null);\n if (ThreadUtils.isMainThread()) {\n componentTree.release();\n } else {\n sMainThreadHandler.post(\n new Runnable() {\n @Override\n public void run() {\n componentTree.release();\n }\n });\n }\n }\n }\n\n @UiThread\n @OnBind\n static void onBind(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).rebind();\n }\n\n @UiThread\n @OnUnbind\n static void onUnbind(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).unbind();\n }\n\n /**\n * Creates a TreeProp with the size information. We need to do this every time we call\n * setRootAndSizeSpec on the new tree.\n *\n * @param c\n * @param widthSpec\n * @param heightSpec\n * @return\n */\n private static TreeProps getTreePropWithSize(ComponentContext c, int widthSpec, int heightSpec) {\n TreeProps tp = c.getTreePropsCopy();\n if (tp == null) {\n tp = new TreeProps();\n }\n tp.put(Size.class, new Size(widthSpec, heightSpec));\n return tp;\n }\n\n /**\n * We create get a componentTree, we have to create it in case it's been released.\n *\n * @param c\n * @param componentTreeRef\n * @return\n */\n private static ComponentTree getOrCreateComponentTree(\n ComponentContext c, AtomicReference componentTreeRef) {\n ComponentTree componentTree = componentTreeRef.get();\n if (componentTree == null || componentTree.isReleased()) {\n componentTree = ComponentTree.create(c).build();\n componentTreeRef.set(componentTree);\n }\n return componentTree;\n }\n}\n"},"new_file":{"kind":"string","value":"litho-widget/src/main/java/com/facebook/litho/widget/SizeSpecMountWrapperComponentSpec.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) Facebook, Inc. and its affiliates.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.facebook.litho.widget;\n\nimport android.content.Context;\nimport android.widget.FrameLayout;\nimport androidx.annotation.UiThread;\nimport com.facebook.litho.Component;\nimport com.facebook.litho.ComponentContext;\nimport com.facebook.litho.ComponentLayout;\nimport com.facebook.litho.ComponentTree;\nimport com.facebook.litho.LithoView;\nimport com.facebook.litho.Size;\nimport com.facebook.litho.SizeSpec;\nimport com.facebook.litho.StateValue;\nimport com.facebook.litho.TreeProps;\nimport com.facebook.litho.annotations.MountSpec;\nimport com.facebook.litho.annotations.OnBind;\nimport com.facebook.litho.annotations.OnBoundsDefined;\nimport com.facebook.litho.annotations.OnCreateInitialState;\nimport com.facebook.litho.annotations.OnCreateMountContent;\nimport com.facebook.litho.annotations.OnDetached;\nimport com.facebook.litho.annotations.OnMeasure;\nimport com.facebook.litho.annotations.OnMount;\nimport com.facebook.litho.annotations.OnUnbind;\nimport com.facebook.litho.annotations.OnUnmount;\nimport com.facebook.litho.annotations.Prop;\nimport com.facebook.litho.annotations.State;\nimport java.util.concurrent.atomic.AtomicReference;\n\n/**\n * A {@link MountSpec} implementation to provide width and height information to the wrapped\n * component.\n *\n *

Usage: Create a {@link SizeSpecMountWrapperComponentSpec} with a {@link Component} added to\n * it, and it will provide the width and height information through a {@link Size} typed {@link\n * com.facebook.litho.annotations.TreeProp}.\n */\n@MountSpec(hasChildLithoViews = true)\npublic class SizeSpecMountWrapperComponentSpec {\n\n @OnCreateInitialState\n static void onCreateInitialState(\n ComponentContext c, StateValue> componentTreeRef) {\n componentTreeRef.set(new AtomicReference());\n // This is the component tree to be added to the LithoView.\n getOrCreateComponentTree(c, componentTreeRef.get());\n }\n\n @OnCreateMountContent\n static FrameLayout onCreateMountContent(Context c) {\n // This LithoView will contain the new tree that's created from this point onwards\n // TODO: T59446191 Replace with proper solution. Remove the use of FrameLayout.\n FrameLayout wrapperView = new FrameLayout(c);\n wrapperView.addView(new LithoView(c));\n return wrapperView;\n }\n\n @UiThread\n @OnMount\n static void onMount(\n ComponentContext c,\n FrameLayout wrapperView,\n @State AtomicReference componentTreeRef) {\n ((LithoView) wrapperView.getChildAt(0)).setComponentTree(componentTreeRef.get());\n }\n\n @UiThread\n @OnUnmount\n static void onUnmount(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).setComponentTree(null);\n }\n\n @OnMeasure\n static void onMeasure(\n ComponentContext c,\n ComponentLayout layout,\n int widthSpec,\n int heightSpec,\n Size size,\n @Prop Component component,\n @State AtomicReference componentTreeRef) {\n final ComponentTree componentTree = getOrCreateComponentTree(c, componentTreeRef);\n componentTree.setVersionedRootAndSizeSpec(\n component,\n widthSpec,\n heightSpec,\n size,\n getTreePropWithSize(c, widthSpec, heightSpec),\n c.getLayoutVersion());\n if (size.width < 0 || size.height < 0) {\n // if this happens it means that the componentTree was probably released in the UI Thread so\n // this measurement is not needed.\n size.width = size.height = 0;\n }\n }\n\n @OnBoundsDefined\n static void onBoundsDefined(\n ComponentContext c,\n ComponentLayout layout,\n @Prop Component component,\n @State AtomicReference componentTreeRef) {\n // the updated width and height is passed down.\n int widthSpec = SizeSpec.makeSizeSpec(layout.getWidth(), SizeSpec.EXACTLY);\n int heightSpec = SizeSpec.makeSizeSpec(layout.getHeight(), SizeSpec.EXACTLY);\n final ComponentTree componentTree = getOrCreateComponentTree(c, componentTreeRef);\n // This check is also done in the setRootAndSizeSpec method, but we need to do this here since\n // it will fail if a ErrorBoundariesConfiguration.rootWrapperComponentFactory was set.\n // TODO: T60426216\n if (!componentTree.hasCompatibleLayout(widthSpec, heightSpec)) {\n componentTree.setVersionedRootAndSizeSpec(\n component,\n widthSpec,\n heightSpec,\n null,\n getTreePropWithSize(c, widthSpec, heightSpec),\n c.getLayoutVersion());\n }\n }\n\n @OnDetached\n static void onDetached(\n ComponentContext c, @State AtomicReference componentTreeRef) {\n if (componentTreeRef.get() != null) {\n // We need to release the component tree here to allow for a proper memory deallocation\n componentTreeRef.get().release();\n componentTreeRef.set(null);\n }\n }\n\n @UiThread\n @OnBind\n static void onBind(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).rebind();\n }\n\n @UiThread\n @OnUnbind\n static void onUnbind(ComponentContext c, FrameLayout wrapperView) {\n ((LithoView) wrapperView.getChildAt(0)).unbind();\n }\n\n /**\n * Creates a TreeProp with the size information. We need to do this every time we call\n * setRootAndSizeSpec on the new tree.\n *\n * @param c\n * @param widthSpec\n * @param heightSpec\n * @return\n */\n private static TreeProps getTreePropWithSize(ComponentContext c, int widthSpec, int heightSpec) {\n TreeProps tp = c.getTreePropsCopy();\n if (tp == null) {\n tp = new TreeProps();\n }\n tp.put(Size.class, new Size(widthSpec, heightSpec));\n return tp;\n }\n\n /**\n * We create get a componentTree, we have to create it in case it's been released.\n *\n * @param c\n * @param componentTreeRef\n * @return\n */\n private static ComponentTree getOrCreateComponentTree(\n ComponentContext c, AtomicReference componentTreeRef) {\n ComponentTree componentTree = componentTreeRef.get();\n if (componentTree == null || componentTree.isReleased()) {\n componentTree = ComponentTree.create(c).build();\n componentTreeRef.set(componentTree);\n }\n return componentTree;\n }\n}\n"},"message":{"kind":"string","value":"Force ComponentTree release to the main thread.\n\nSummary:\nWe need to make sure the ComponentTree is released in the main thread.\n**Why?**\nThe ComponentTree release method call LithoView [setComponentTree](https://our.intern.facebook.com/intern/diffusion/FBS/browse/master/fbandroid/libraries/components/litho-core/src/main/java/com/facebook/litho/ComponentTree.java?commit=659646f0adffc9177ccf928905a383e2efbaf9b6&lines=2260), that [asserts](https://our.intern.facebook.com/intern/diffusion/FBS/browse/master/fbandroid/libraries/components/litho-core/src/main/java/com/facebook/litho/LithoView.java?commit=b4bc41337c4b8f8b223b6563ec3242f46f04bfff&lines=576%2C577) that we are in the main thread\n\nReviewed By: topwu\n\nDifferential Revision: D21130489\n\nfbshipit-source-id: bf1016e3abf80e34e5d608afb2e08ee4ad5224cb\n"},"old_file":{"kind":"string","value":"litho-widget/src/main/java/com/facebook/litho/widget/SizeSpecMountWrapperComponentSpec.java"},"subject":{"kind":"string","value":"Force ComponentTree release to the main thread."},"git_diff":{"kind":"string","value":"itho-widget/src/main/java/com/facebook/litho/widget/SizeSpecMountWrapperComponentSpec.java\n package com.facebook.litho.widget;\n \n import android.content.Context;\nimport android.os.Handler;\nimport android.os.Looper;\n import android.widget.FrameLayout;\n import androidx.annotation.UiThread;\n import com.facebook.litho.Component;\n import com.facebook.litho.Size;\n import com.facebook.litho.SizeSpec;\n import com.facebook.litho.StateValue;\nimport com.facebook.litho.ThreadUtils;\n import com.facebook.litho.TreeProps;\n import com.facebook.litho.annotations.MountSpec;\n import com.facebook.litho.annotations.OnBind;\n */\n @MountSpec(hasChildLithoViews = true)\n public class SizeSpecMountWrapperComponentSpec {\n private static final Handler sMainThreadHandler = new Handler(Looper.getMainLooper());\n \n @OnCreateInitialState\n static void onCreateInitialState(\n @OnDetached\n static void onDetached(\n ComponentContext c, @State AtomicReference componentTreeRef) {\n if (componentTreeRef.get() != null) {\n // We need to release the component tree here to allow for a proper memory deallocation\n componentTreeRef.get().release();\n // We need to release the component tree here to allow for a proper memory deallocation\n final ComponentTree componentTree = componentTreeRef.get();\n if (componentTree != null) {\n componentTreeRef.set(null);\n if (ThreadUtils.isMainThread()) {\n componentTree.release();\n } else {\n sMainThreadHandler.post(\n new Runnable() {\n @Override\n public void run() {\n componentTree.release();\n }\n });\n }\n }\n }\n "}}},{"rowIdx":2040,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"7e7f5148d3504d893c136f361a3b9f291c5d616b"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Discordius/Lesswrong2,Discordius/Lesswrong2,Discordius/Lesswrong2,Discordius/Telescope,Discordius/Lesswrong2,Discordius/Telescope,Discordius/Telescope,Discordius/Telescope"},"new_contents":{"kind":"string","value":"/* global Vulcan */\nimport { Collections, getCollection } from 'meteor/vulcan:core';\nimport { getFieldsWithAttribute } from './utils';\nimport { migrateDocuments } from '../migrations/migrationUtils'\n\n\nexport const recomputeAllDenormalizedValues = async () => {\n for(let collection of Collections) {\n await Vulcan.recomputeDenormalizedValues({\n collectionName: collection.options.collectionName\n })\n }\n}\nVulcan.recomputeAllDenormalizedValues = recomputeAllDenormalizedValues;\n\nexport const validateAllDenormalizedValues = async () => {\n for(let collection of Collections) {\n await Vulcan.recomputeDenormalizedValues({\n collectionName: collection.options.collectionName,\n validateOnly: true\n })\n }\n}\nVulcan.validateAllDenormalizedValues = validateAllDenormalizedValues;\n\n// Recompute the value of denormalized fields (that are tagged with canAutoDenormalize).\n// If validateOnly is true, compare them with the existing values in the database and\n// report how many differ; otherwise update them to the correct values. If fieldName\n// is given, recompute a single field; otherwise recompute all fields on the collection.\nexport const recomputeDenormalizedValues = async ({collectionName, fieldName=null, validateOnly=false}) => {\n // eslint-disable-next-line no-console\n console.log(`Recomputing denormalize values for ${collectionName} ${fieldName ? `and ${fieldName}` : \"\"}`)\n\n const collection = getCollection(collectionName)\n if (!collection.simpleSchema) {\n // eslint-disable-next-line no-console\n console.log(`${collectionName} does not have a schema defined, not computing denormalized values`)\n return\n }\n\n const schema = collection.simpleSchema()._schema\n if (fieldName) {\n if (!schema[fieldName]) {\n // eslint-disable-next-line no-console\n throw new Error(`${collectionName} does not have field ${fieldName}, not computing denormalized values`)\n }\n if (!schema[fieldName].denormalized) {\n throw new Error(`${collectionName}.${fieldName} is not marked as a denormalized field`)\n }\n if (!schema[fieldName].canAutoDenormalize) {\n throw new Error(`${collectionName}.${fieldName} is not marked as canAutoDenormalize`)\n }\n const getValue = schema[fieldName].getValue\n if (!getValue) {\n throw new Error(`${collectionName}.${fieldName} is missing its getValue function`)\n }\n\n await runDenormalizedFieldMigration({ collection, fieldName, getValue, validateOnly })\n } else {\n const denormalizedFields = getFieldsWithAttribute(schema, 'canAutoDenormalize')\n if (denormalizedFields.length == 0) {\n // eslint-disable-next-line no-console\n console.log(`${collectionName} does not have any fields with \"canAutoDenormalize\", not computing denormalized values`)\n return;\n }\n\n // eslint-disable-next-line no-console\n console.log(`Recomputing denormalized values for ${collection.collectionName} in fields: ${denormalizedFields}`);\n\n for (let j=0; j {\n // eslint-disable-next-line no-console\n const updates = await Promise.all(documents.map(async doc => {\n const newValue = await getValue(doc)\n // If the correct value is already present, don't make a database update\n if ((isNullOrDefined(newValue) && isNullOrDefined(doc[fieldName])) || doc[fieldName] === newValue) return null\n return {\n updateOne: {\n filter: {_id: doc._id},\n update: {\n $set: {\n [fieldName]: newValue\n }\n },\n }\n }\n }))\n\n const nonEmptyUpdates = _.without(updates, null)\n numDifferent += nonEmptyUpdates.length;\n\n // eslint-disable-next-line no-console\n console.log(`${nonEmptyUpdates.length} documents in batch with changing denormalized value`)\n if (!validateOnly) {\n // eslint-disable-next-line no-console\n if (nonEmptyUpdates.length > 0) {\n await collection.rawCollection().bulkWrite(\n nonEmptyUpdates,\n { ordered: false }\n );\n }\n } else {\n // TODO: This is a hack, but better than leaving it. We're basically\n // breaking the expected API from migrateDocuments by supporting a\n // validateOnly option, so it does not offer us good hooks to do this.\n throw new Error([\n 'Abort! validateOnly means the document will not change and the migration will never',\n 'complete. This error is expected behavior to cause the migration to end.'\n ].join(' '))\n }\n },\n });\n\n // eslint-disable-next-line no-console\n console.log(`${numDifferent} total documents had wrong denormalized value`)\n}\n\nfunction isNullOrDefined(value) {\n return value === null || value === undefined\n}\n"},"new_file":{"kind":"string","value":"packages/lesswrong/server/scripts/recomputeDenormalized.js"},"old_contents":{"kind":"string","value":"/* global Vulcan */\nimport { Collections, getCollection } from 'meteor/vulcan:core';\nimport { getFieldsWithAttribute } from './utils';\nimport { migrateDocuments } from '../migrations/migrationUtils'\n\n\nexport const recomputeAllDenormalizedValues = async () => {\n for(let collection of Collections) {\n await Vulcan.recomputeDenormalizedValues({\n collectionName: collection.options.collectionName\n })\n }\n}\nVulcan.recomputeAllDenormalizedValues = recomputeAllDenormalizedValues;\n\nexport const validateAllDenormalizedValues = async () => {\n for(let collection of Collections) {\n await Vulcan.recomputeDenormalizedValues({\n collectionName: collection.options.collectionName,\n validateOnly: true\n })\n }\n}\nVulcan.validateAllDenormalizedValues = validateAllDenormalizedValues;\n\n// Recompute the value of denormalized fields (that are tagged with canAutoDenormalize).\n// If validateOnly is true, compare them with the existing values in the database and\n// report how many differ; otherwise update them to the correct values. If fieldName\n// is given, recompute a single field; otherwise recompute all fields on the collection.\nexport const recomputeDenormalizedValues = async ({collectionName, fieldName=null, validateOnly=false}) => {\n // eslint-disable-next-line no-console\n console.log(`Recomputing denormalize values for ${collectionName} ${fieldName ? `and ${fieldName}` : \"\"}`)\n\n const collection = getCollection(collectionName)\n if (!collection.simpleSchema) {\n // eslint-disable-next-line no-console\n console.log(`${collectionName} does not have a schema defined, not computing denormalized values`)\n return\n }\n\n const schema = collection.simpleSchema()._schema\n if (fieldName) {\n if (!schema[fieldName]) {\n // eslint-disable-next-line no-console\n throw new Error(`${collectionName} does not have field ${fieldName}, not computing denormalized values`)\n }\n if (!schema[fieldName].denormalized) {\n throw new Error(`${collectionName}.${fieldName} is not marked as a denormalized field`)\n }\n if (!schema[fieldName].canAutoDenormalize) {\n throw new Error(`${collectionName}.${fieldName} is not marked as canAutoDenormalize`)\n }\n const getValue = schema[fieldName].getValue\n if (!getValue) {\n throw new Error(`${collectionName}.${fieldName} is missing its getValue function`)\n }\n \n await runDenormalizedFieldMigration({ collection, fieldName, getValue, validateOnly })\n } else {\n const denormalizedFields = getFieldsWithAttribute(schema, 'canAutoDenormalize')\n if (denormalizedFields.length == 0) {\n // eslint-disable-next-line no-console\n console.log(`${collectionName} does not have any fields with \"canAutoDenormalize\", not computing denormalized values`)\n return;\n }\n \n // eslint-disable-next-line no-console\n console.log(`Recomputing denormalized values for ${collection.collectionName} in fields: ${denormalizedFields}`);\n \n for (let j=0; j {\n // eslint-disable-next-line no-console\n const updates = await Promise.all(documents.map(async doc => {\n const newValue = await getValue(doc)\n // If the correct value is already present, don't make a database update\n if ((isNullOrDefined(newValue) && isNullOrDefined(doc[fieldName])) || doc[fieldName] === newValue) return null\n return {\n updateOne: {\n filter: {_id: doc._id},\n update: {\n $set: {\n [fieldName]: newValue\n }\n },\n }\n }\n }))\n\n const nonEmptyUpdates = _.without(updates, null)\n numDifferent += nonEmptyUpdates.length;\n \n // eslint-disable-next-line no-console\n console.log(`${nonEmptyUpdates.length} documents in batch with changing denormalized value`)\n if (!validateOnly) {\n // eslint-disable-next-line no-console\n if (nonEmptyUpdates.length > 0) {\n await collection.rawCollection().bulkWrite(\n nonEmptyUpdates,\n { ordered: false }\n );\n }\n }\n },\n });\n \n // eslint-disable-next-line no-console\n console.log(`${numDifferent} total documents had wrong denormalized value`)\n}\n\nfunction isNullOrDefined(value) {\n return value === null || value === undefined\n}"},"message":{"kind":"string","value":"hackily cause validate only recomputes to exit\n"},"old_file":{"kind":"string","value":"packages/lesswrong/server/scripts/recomputeDenormalized.js"},"subject":{"kind":"string","value":"hackily cause validate only recomputes to exit"},"git_diff":{"kind":"string","value":"ackages/lesswrong/server/scripts/recomputeDenormalized.js\n if (!getValue) {\n throw new Error(`${collectionName}.${fieldName} is missing its getValue function`)\n }\n \n\n await runDenormalizedFieldMigration({ collection, fieldName, getValue, validateOnly })\n } else {\n const denormalizedFields = getFieldsWithAttribute(schema, 'canAutoDenormalize')\n console.log(`${collectionName} does not have any fields with \"canAutoDenormalize\", not computing denormalized values`)\n return;\n }\n \n\n // eslint-disable-next-line no-console\n console.log(`Recomputing denormalized values for ${collection.collectionName} in fields: ${denormalizedFields}`);\n \n\n for (let j=0; j const fieldName = denormalizedFields[j];\n const getValue = schema[fieldName].getValue\n \n async function runDenormalizedFieldMigration({ collection, fieldName, getValue, validateOnly }) {\n let numDifferent = 0;\n \n\n await migrateDocuments({\n description: `Recomputing denormalized values for ${collection.collectionName} field ${fieldName}`,\n collection,\n \n const nonEmptyUpdates = _.without(updates, null)\n numDifferent += nonEmptyUpdates.length;\n \n\n // eslint-disable-next-line no-console\n console.log(`${nonEmptyUpdates.length} documents in batch with changing denormalized value`)\n if (!validateOnly) {\n { ordered: false }\n );\n }\n } else {\n // TODO: This is a hack, but better than leaving it. We're basically\n // breaking the expected API from migrateDocuments by supporting a\n // validateOnly option, so it does not offer us good hooks to do this.\n throw new Error([\n 'Abort! validateOnly means the document will not change and the migration will never',\n 'complete. This error is expected behavior to cause the migration to end.'\n ].join(' '))\n }\n },\n });\n \n\n // eslint-disable-next-line no-console\n console.log(`${numDifferent} total documents had wrong denormalized value`)\n }"}}},{"rowIdx":2041,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"b07c9ab24526c884273f107c2a55cc41f2d855d6"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j,HuangLS/neo4j"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) 2002-2015 \"Neo Technology,\"\n * Network Engine for Objects in Lund AB [http://neotechnology.com]\n *\n * This file is part of Neo4j.\n *\n * Neo4j is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program. If not, see .\n */\npackage org.neo4j.kernel.api.cursor;\n\nimport org.neo4j.collection.primitive.PrimitiveIntIterator;\nimport org.neo4j.collection.primitive.PrimitiveIntStack;\nimport org.neo4j.cursor.Cursor;\nimport org.neo4j.graphdb.NotFoundException;\n\n/**\n * Represents a single node or relationship cursor item.\n */\npublic interface EntityItem\n{\n public abstract class EntityItemHelper\n implements EntityItem\n {\n @Override\n public boolean hasProperty( int propertyKeyId )\n {\n try ( Cursor cursor = property( propertyKeyId ) )\n {\n return cursor.next();\n }\n }\n\n @Override\n public Object getProperty( int propertyKeyId )\n {\n try ( Cursor cursor = property( propertyKeyId ) )\n {\n if ( cursor.next() )\n {\n return cursor.get().value();\n }\n }\n catch ( NotFoundException e )\n {\n return null;\n }\n\n return null;\n }\n\n @Override\n public PrimitiveIntIterator getPropertyKeys()\n {\n PrimitiveIntStack keys = new PrimitiveIntStack();\n try ( Cursor properties = properties() )\n {\n while ( properties.next() )\n {\n keys.push( properties.get().propertyKeyId() );\n }\n }\n\n return keys.iterator();\n }\n }\n\n /**\n * @return id of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n long id();\n\n /**\n * @return cursor for properties of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n Cursor properties();\n\n /**\n * @param propertyKeyId of property to find\n * @return cursor for specific property of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n Cursor property( int propertyKeyId );\n\n boolean hasProperty( int propertyKeyId );\n\n Object getProperty( int propertyKeyId );\n\n PrimitiveIntIterator getPropertyKeys();\n}\n"},"new_file":{"kind":"string","value":"community/kernel/src/main/java/org/neo4j/kernel/api/cursor/EntityItem.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) 2002-2015 \"Neo Technology,\"\n * Network Engine for Objects in Lund AB [http://neotechnology.com]\n *\n * This file is part of Neo4j.\n *\n * Neo4j is free software: you can redistribute it and/or modify\n * it under the terms of the GNU General Public License as published by\n * the Free Software Foundation, either version 3 of the License, or\n * (at your option) any later version.\n *\n * This program is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n * GNU General Public License for more details.\n *\n * You should have received a copy of the GNU General Public License\n * along with this program. If not, see .\n */\npackage org.neo4j.kernel.api.cursor;\n\nimport org.neo4j.collection.primitive.PrimitiveIntIterator;\nimport org.neo4j.collection.primitive.PrimitiveIntStack;\nimport org.neo4j.cursor.Cursor;\n\n/**\n * Represents a single node or relationship cursor item.\n */\npublic interface EntityItem\n{\n public abstract class EntityItemHelper\n implements EntityItem\n {\n @Override\n public boolean hasProperty( int propertyKeyId )\n {\n try ( Cursor cursor = property( propertyKeyId ) )\n {\n return cursor.next();\n }\n }\n\n @Override\n public Object getProperty( int propertyKeyId )\n {\n try ( Cursor cursor = property( propertyKeyId ) )\n {\n if ( cursor.next() )\n {\n return cursor.get().value();\n }\n }\n\n return null;\n }\n\n @Override\n public PrimitiveIntIterator getPropertyKeys()\n {\n PrimitiveIntStack keys = new PrimitiveIntStack();\n try ( Cursor properties = properties() )\n {\n while ( properties.next() )\n {\n keys.push( properties.get().propertyKeyId() );\n }\n }\n\n return keys.iterator();\n }\n }\n\n /**\n * @return id of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n long id();\n\n /**\n * @return cursor for properties of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n Cursor properties();\n\n /**\n * @param propertyKeyId of property to find\n * @return cursor for specific property of current entity\n * @throws IllegalStateException if no current entity is selected\n */\n Cursor property( int propertyKeyId );\n\n boolean hasProperty( int propertyKeyId );\n\n Object getProperty( int propertyKeyId );\n\n PrimitiveIntIterator getPropertyKeys();\n}\n"},"message":{"kind":"string","value":"Return null when property does not exist\n\nUnder concurrent modification we could run across property records that\nare not in use. Instead of throwing an exception in this case, return\nnull to note that the property was not found.\n"},"old_file":{"kind":"string","value":"community/kernel/src/main/java/org/neo4j/kernel/api/cursor/EntityItem.java"},"subject":{"kind":"string","value":"Return null when property does not exist"},"git_diff":{"kind":"string","value":"ommunity/kernel/src/main/java/org/neo4j/kernel/api/cursor/EntityItem.java\n import org.neo4j.collection.primitive.PrimitiveIntIterator;\n import org.neo4j.collection.primitive.PrimitiveIntStack;\n import org.neo4j.cursor.Cursor;\nimport org.neo4j.graphdb.NotFoundException;\n \n /**\n * Represents a single node or relationship cursor item.\n {\n return cursor.get().value();\n }\n }\n catch ( NotFoundException e )\n {\n return null;\n }\n \n return null;"}}},{"rowIdx":2042,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"2214a242f218dfc571f98b64fcde61f1a9f6013a"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"zentol/flink,mbode/flink,clarkyzl/flink,zjureel/flink,twalthr/flink,zentol/flink,greghogan/flink,lincoln-lil/flink,rmetzger/flink,lincoln-lil/flink,StephanEwen/incubator-flink,lincoln-lil/flink,twalthr/flink,zjureel/flink,yew1eb/flink,aljoscha/flink,wwjiang007/flink,zhangminglei/flink,ueshin/apache-flink,apache/flink,aljoscha/flink,apache/flink,sunjincheng121/flink,gyfora/flink,aljoscha/flink,tzulitai/flink,aljoscha/flink,twalthr/flink,rmetzger/flink,tzulitai/flink,bowenli86/flink,twalthr/flink,kl0u/flink,tony810430/flink,godfreyhe/flink,lincoln-lil/flink,tzulitai/flink,greghogan/flink,GJL/flink,zentol/flink,lincoln-lil/flink,rmetzger/flink,zentol/flink,wwjiang007/flink,sunjincheng121/flink,kaibozhou/flink,kaibozhou/flink,wwjiang007/flink,godfreyhe/flink,greghogan/flink,hequn8128/flink,jinglining/flink,sunjincheng121/flink,darionyaphet/flink,clarkyzl/flink,apache/flink,tony810430/flink,jinglining/flink,kaibozhou/flink,ueshin/apache-flink,ueshin/apache-flink,xccui/flink,yew1eb/flink,fhueske/flink,hequn8128/flink,lincoln-lil/flink,bowenli86/flink,kl0u/flink,wwjiang007/flink,xccui/flink,kl0u/flink,apache/flink,gyfora/flink,greghogan/flink,tzulitai/flink,bowenli86/flink,zentol/flink,fhueske/flink,StephanEwen/incubator-flink,hequn8128/flink,godfreyhe/flink,shaoxuan-wang/flink,hequn8128/flink,godfreyhe/flink,darionyaphet/flink,darionyaphet/flink,GJL/flink,tillrohrmann/flink,jinglining/flink,rmetzger/flink,yew1eb/flink,ueshin/apache-flink,xccui/flink,xccui/flink,zhangminglei/flink,aljoscha/flink,darionyaphet/flink,gyfora/flink,hequn8128/flink,yew1eb/flink,greghogan/flink,gyfora/flink,mbode/flink,zhangminglei/flink,godfreyhe/flink,clarkyzl/flink,kl0u/flink,GJL/flink,mbode/flink,gyfora/flink,zentol/flink,kl0u/flink,shaoxuan-wang/flink,wwjiang007/flink,clarkyzl/flink,tillrohrmann/flink,kaibozhou/flink,mbode/flink,twalthr/flink,wwjiang007/flink,tillrohrmann/flink,jinglining/flink,kaibozhou/flink,bowenli86/flink,kaibozhou/flink,wwjiang007/flink,apache/flink,shaoxuan-wang/flink,tzulitai/flink,yew1eb/flink,apache/flink,kl0u/flink,tony810430/flink,mylog00/flink,greghogan/flink,twalthr/flink,clarkyzl/flink,mylog00/flink,bowenli86/flink,zjureel/flink,mylog00/flink,shaoxuan-wang/flink,StephanEwen/incubator-flink,mbode/flink,mylog00/flink,GJL/flink,tillrohrmann/flink,tillrohrmann/flink,GJL/flink,sunjincheng121/flink,shaoxuan-wang/flink,zentol/flink,jinglining/flink,apache/flink,shaoxuan-wang/flink,bowenli86/flink,sunjincheng121/flink,zjureel/flink,rmetzger/flink,sunjincheng121/flink,StephanEwen/incubator-flink,tony810430/flink,GJL/flink,fhueske/flink,fhueske/flink,aljoscha/flink,xccui/flink,lincoln-lil/flink,zhangminglei/flink,tillrohrmann/flink,ueshin/apache-flink,tillrohrmann/flink,twalthr/flink,xccui/flink,fhueske/flink,zhangminglei/flink,rmetzger/flink,tony810430/flink,xccui/flink,mylog00/flink,gyfora/flink,jinglining/flink,gyfora/flink,tony810430/flink,zjureel/flink,StephanEwen/incubator-flink,StephanEwen/incubator-flink,tony810430/flink,tzulitai/flink,darionyaphet/flink,godfreyhe/flink,rmetzger/flink,hequn8128/flink,zjureel/flink,zjureel/flink,fhueske/flink,godfreyhe/flink"},"new_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.runtime.io.network.partition;\n\nimport org.apache.flink.annotation.VisibleForTesting;\nimport org.apache.flink.runtime.io.network.api.EndOfPartitionEvent;\nimport org.apache.flink.runtime.io.network.api.serialization.EventSerializer;\nimport org.apache.flink.runtime.io.network.buffer.Buffer;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport javax.annotation.Nullable;\nimport javax.annotation.concurrent.GuardedBy;\nimport java.io.IOException;\nimport java.util.ArrayDeque;\n\nimport static org.apache.flink.util.Preconditions.checkNotNull;\nimport static org.apache.flink.util.Preconditions.checkState;\n\n/**\n * A pipelined in-memory only subpartition, which can be consumed once.\n */\nclass PipelinedSubpartition extends ResultSubpartition {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(PipelinedSubpartition.class);\n\n\t// ------------------------------------------------------------------------\n\n\t/** All buffers of this subpartition. Access to the buffers is synchronized on this object. */\n\tprivate final ArrayDeque buffers = new ArrayDeque<>();\n\n\t/** The read view to consume this subpartition. */\n\tprivate PipelinedSubpartitionView readView;\n\n\t/** Flag indicating whether the subpartition has been finished. */\n\tprivate boolean isFinished;\n\n\t/** Flag indicating whether the subpartition has been released. */\n\tprivate volatile boolean isReleased;\n\n\t/** The number of non-event buffers currently in this subpartition. */\n\t@GuardedBy(\"buffers\")\n\tprivate int buffersInBacklog;\n\n\t// ------------------------------------------------------------------------\n\n\tPipelinedSubpartition(int index, ResultPartition parent) {\n\t\tsuper(index, parent);\n\t}\n\n\t@Override\n\tpublic boolean add(Buffer buffer) throws IOException {\n\t\treturn add(buffer, false);\n\t}\n\n\t@Override\n\tpublic void finish() throws IOException {\n\t\tadd(EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE), true);\n\t\tLOG.debug(\"Finished {}.\", this);\n\t}\n\n\tprivate boolean add(Buffer buffer, boolean finish) throws IOException {\n\t\tcheckNotNull(buffer);\n\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView reader;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isFinished || isReleased) {\n\t\t\t\tbuffer.recycleBuffer();\n\t\t\t\treturn false;\n\t\t\t}\n\n\t\t\t// Add the buffer and update the stats\n\t\t\tbuffers.add(buffer);\n\t\t\treader = readView;\n\t\t\tupdateStatistics(buffer);\n\t\t\tincreaseBuffersInBacklog(buffer);\n\n\t\t\tif (finish) {\n\t\t\t\tisFinished = true;\n\t\t\t}\n\t\t}\n\n\t\t// Notify the listener outside of the synchronized block\n\t\tif (reader != null) {\n\t\t\treader.notifyBuffersAvailable(1);\n\t\t}\n\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void release() {\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView view;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isReleased) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Release all available buffers\n\t\t\tBuffer buffer;\n\t\t\twhile ((buffer = buffers.poll()) != null) {\n\t\t\t\tbuffer.recycleBuffer();\n\t\t\t}\n\n\t\t\tview = readView;\n\t\t\treadView = null;\n\n\t\t\t// Make sure that no further buffers are added to the subpartition\n\t\t\tisReleased = true;\n\t\t}\n\n\t\tLOG.debug(\"Released {}.\", this);\n\n\t\tif (view != null) {\n\t\t\tview.releaseAllResources();\n\t\t}\n\t}\n\n\t@Nullable\n\tBufferAndBacklog pollBuffer() {\n\t\tsynchronized (buffers) {\n\t\t\tBuffer buffer = buffers.pollFirst();\n\t\t\tdecreaseBuffersInBacklog(buffer);\n\n\t\t\tif (buffer != null) {\n\t\t\t\treturn new BufferAndBacklog(buffer, buffersInBacklog, _nextBufferIsEvent());\n\t\t\t} else {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t}\n\t}\n\n\tboolean nextBufferIsEvent() {\n\t\tsynchronized (buffers) {\n\t\t\treturn _nextBufferIsEvent();\n\t\t}\n\t}\n\n\tprivate boolean _nextBufferIsEvent() {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\treturn !buffers.isEmpty() && !buffers.peekFirst().isBuffer();\n\t}\n\n\t@Override\n\tpublic int releaseMemory() {\n\t\t// The pipelined subpartition does not react to memory release requests.\n\t\t// The buffers will be recycled by the consuming task.\n\t\treturn 0;\n\t}\n\n\t@Override\n\tpublic boolean isReleased() {\n\t\treturn isReleased;\n\t}\n\n\t@Override\n\t@VisibleForTesting\n\tpublic int getBuffersInBacklog() {\n\t\treturn buffersInBacklog;\n\t}\n\n\t/**\n\t * Decreases the number of non-event buffers by one after fetching a non-event\n\t * buffer from this subpartition.\n\t */\n\tprivate void decreaseBuffersInBacklog(Buffer buffer) {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\tif (buffer != null && buffer.isBuffer()) {\n\t\t\tbuffersInBacklog--;\n\t\t}\n\t}\n\n\t/**\n\t * Increases the number of non-event buffers by one after adding a non-event\n\t * buffer into this subpartition.\n\t */\n\tprivate void increaseBuffersInBacklog(Buffer buffer) {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\tif (buffer != null && buffer.isBuffer()) {\n\t\t\tbuffersInBacklog++;\n\t\t}\n\t}\n\n\t@Override\n\tpublic PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) throws IOException {\n\t\tfinal int queueSize;\n\n\t\tsynchronized (buffers) {\n\t\t\tcheckState(!isReleased);\n\t\t\tcheckState(readView == null,\n\t\t\t\t\t\"Subpartition %s of is being (or already has been) consumed, \" +\n\t\t\t\t\t\"but pipelined subpartitions can only be consumed once.\", index, parent.getPartitionId());\n\n\t\t\tLOG.debug(\"Creating read view for subpartition {} of partition {}.\", index, parent.getPartitionId());\n\n\t\t\tqueueSize = buffers.size();\n\t\t\treadView = new PipelinedSubpartitionView(this, availabilityListener);\n\t\t}\n\n\t\treadView.notifyBuffersAvailable(queueSize);\n\n\t\treturn readView;\n\t}\n\n\t// ------------------------------------------------------------------------\n\n\tint getCurrentNumberOfBuffers() {\n\t\treturn buffers.size();\n\t}\n\n\t// ------------------------------------------------------------------------\n\n\t@Override\n\tpublic String toString() {\n\t\tfinal long numBuffers;\n\t\tfinal long numBytes;\n\t\tfinal boolean finished;\n\t\tfinal boolean hasReadView;\n\n\t\tsynchronized (buffers) {\n\t\t\tnumBuffers = getTotalNumberOfBuffers();\n\t\t\tnumBytes = getTotalNumberOfBytes();\n\t\t\tfinished = isFinished;\n\t\t\thasReadView = readView != null;\n\t\t}\n\n\t\treturn String.format(\n\t\t\t\"PipelinedSubpartition [number of buffers: %d (%d bytes), number of buffers in backlog: %d, finished? %s, read view? %s]\",\n\t\t\tnumBuffers, numBytes, buffersInBacklog, finished, hasReadView);\n\t}\n\n\t@Override\n\tpublic int unsynchronizedGetNumberOfQueuedBuffers() {\n\t\t// since we do not synchronize, the size may actually be lower than 0!\n\t\treturn Math.max(buffers.size(), 0);\n\t}\n}\n"},"new_file":{"kind":"string","value":"flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PipelinedSubpartition.java"},"old_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage org.apache.flink.runtime.io.network.partition;\n\nimport org.apache.flink.annotation.VisibleForTesting;\nimport org.apache.flink.runtime.io.network.api.EndOfPartitionEvent;\nimport org.apache.flink.runtime.io.network.api.serialization.EventSerializer;\nimport org.apache.flink.runtime.io.network.buffer.Buffer;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport javax.annotation.Nullable;\nimport javax.annotation.concurrent.GuardedBy;\nimport java.io.IOException;\nimport java.util.ArrayDeque;\n\nimport static org.apache.flink.util.Preconditions.checkNotNull;\nimport static org.apache.flink.util.Preconditions.checkState;\n\n/**\n * A pipelined in-memory only subpartition, which can be consumed once.\n */\nclass PipelinedSubpartition extends ResultSubpartition {\n\n\tprivate static final Logger LOG = LoggerFactory.getLogger(PipelinedSubpartition.class);\n\n\t// ------------------------------------------------------------------------\n\n\t/** All buffers of this subpartition. Access to the buffers is synchronized on this object. */\n\tprivate final ArrayDeque buffers = new ArrayDeque<>();\n\n\t/** The read view to consume this subpartition. */\n\tprivate PipelinedSubpartitionView readView;\n\n\t/** Flag indicating whether the subpartition has been finished. */\n\tprivate boolean isFinished;\n\n\t/** Flag indicating whether the subpartition has been released. */\n\tprivate volatile boolean isReleased;\n\n\t/** The number of non-event buffers currently in this subpartition. */\n\t@GuardedBy(\"buffers\")\n\tprivate int buffersInBacklog;\n\n\t// ------------------------------------------------------------------------\n\n\tPipelinedSubpartition(int index, ResultPartition parent) {\n\t\tsuper(index, parent);\n\t}\n\n\t@Override\n\tpublic boolean add(Buffer buffer) throws IOException {\n\t\tcheckNotNull(buffer);\n\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView reader;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isFinished || isReleased) {\n\t\t\t\tbuffer.recycleBuffer();\n\t\t\t\treturn false;\n\t\t\t}\n\n\t\t\t// Add the buffer and update the stats\n\t\t\tbuffers.add(buffer);\n\t\t\treader = readView;\n\t\t\tupdateStatistics(buffer);\n\t\t\tincreaseBuffersInBacklog(buffer);\n\t\t}\n\n\t\t// Notify the listener outside of the synchronized block\n\t\tif (reader != null) {\n\t\t\treader.notifyBuffersAvailable(1);\n\t\t}\n\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void finish() throws IOException {\n\t\tfinal Buffer buffer = EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE);\n\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView reader;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isFinished || isReleased) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tbuffers.add(buffer);\n\t\t\treader = readView;\n\t\t\tupdateStatistics(buffer);\n\n\t\t\tisFinished = true;\n\t\t}\n\n\t\tLOG.debug(\"Finished {}.\", this);\n\n\t\t// Notify the listener outside of the synchronized block\n\t\tif (reader != null) {\n\t\t\treader.notifyBuffersAvailable(1);\n\t\t}\n\t}\n\n\t@Override\n\tpublic void release() {\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView view;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isReleased) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Release all available buffers\n\t\t\tBuffer buffer;\n\t\t\twhile ((buffer = buffers.poll()) != null) {\n\t\t\t\tbuffer.recycleBuffer();\n\t\t\t}\n\n\t\t\tview = readView;\n\t\t\treadView = null;\n\n\t\t\t// Make sure that no further buffers are added to the subpartition\n\t\t\tisReleased = true;\n\t\t}\n\n\t\tLOG.debug(\"Released {}.\", this);\n\n\t\tif (view != null) {\n\t\t\tview.releaseAllResources();\n\t\t}\n\t}\n\n\t@Nullable\n\tBufferAndBacklog pollBuffer() {\n\t\tsynchronized (buffers) {\n\t\t\tBuffer buffer = buffers.pollFirst();\n\t\t\tdecreaseBuffersInBacklog(buffer);\n\n\t\t\tif (buffer != null) {\n\t\t\t\treturn new BufferAndBacklog(buffer, buffersInBacklog, _nextBufferIsEvent());\n\t\t\t} else {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t}\n\t}\n\n\tboolean nextBufferIsEvent() {\n\t\tsynchronized (buffers) {\n\t\t\treturn _nextBufferIsEvent();\n\t\t}\n\t}\n\n\tprivate boolean _nextBufferIsEvent() {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\treturn !buffers.isEmpty() && !buffers.peekFirst().isBuffer();\n\t}\n\n\t@Override\n\tpublic int releaseMemory() {\n\t\t// The pipelined subpartition does not react to memory release requests.\n\t\t// The buffers will be recycled by the consuming task.\n\t\treturn 0;\n\t}\n\n\t@Override\n\tpublic boolean isReleased() {\n\t\treturn isReleased;\n\t}\n\n\t@Override\n\t@VisibleForTesting\n\tpublic int getBuffersInBacklog() {\n\t\treturn buffersInBacklog;\n\t}\n\n\t/**\n\t * Decreases the number of non-event buffers by one after fetching a non-event\n\t * buffer from this subpartition.\n\t */\n\tprivate void decreaseBuffersInBacklog(Buffer buffer) {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\tif (buffer != null && buffer.isBuffer()) {\n\t\t\tbuffersInBacklog--;\n\t\t}\n\t}\n\n\t/**\n\t * Increases the number of non-event buffers by one after adding a non-event\n\t * buffer into this subpartition.\n\t */\n\tprivate void increaseBuffersInBacklog(Buffer buffer) {\n\t\tassert Thread.holdsLock(buffers);\n\n\t\tif (buffer != null && buffer.isBuffer()) {\n\t\t\tbuffersInBacklog++;\n\t\t}\n\t}\n\n\t@Override\n\tpublic PipelinedSubpartitionView createReadView(BufferAvailabilityListener availabilityListener) throws IOException {\n\t\tfinal int queueSize;\n\n\t\tsynchronized (buffers) {\n\t\t\tcheckState(!isReleased);\n\t\t\tcheckState(readView == null,\n\t\t\t\t\t\"Subpartition %s of is being (or already has been) consumed, \" +\n\t\t\t\t\t\"but pipelined subpartitions can only be consumed once.\", index, parent.getPartitionId());\n\n\t\t\tLOG.debug(\"Creating read view for subpartition {} of partition {}.\", index, parent.getPartitionId());\n\n\t\t\tqueueSize = buffers.size();\n\t\t\treadView = new PipelinedSubpartitionView(this, availabilityListener);\n\t\t}\n\n\t\treadView.notifyBuffersAvailable(queueSize);\n\n\t\treturn readView;\n\t}\n\n\t// ------------------------------------------------------------------------\n\n\tint getCurrentNumberOfBuffers() {\n\t\treturn buffers.size();\n\t}\n\n\t// ------------------------------------------------------------------------\n\n\t@Override\n\tpublic String toString() {\n\t\tfinal long numBuffers;\n\t\tfinal long numBytes;\n\t\tfinal boolean finished;\n\t\tfinal boolean hasReadView;\n\n\t\tsynchronized (buffers) {\n\t\t\tnumBuffers = getTotalNumberOfBuffers();\n\t\t\tnumBytes = getTotalNumberOfBytes();\n\t\t\tfinished = isFinished;\n\t\t\thasReadView = readView != null;\n\t\t}\n\n\t\treturn String.format(\n\t\t\t\"PipelinedSubpartition [number of buffers: %d (%d bytes), number of buffers in backlog: %d, finished? %s, read view? %s]\",\n\t\t\tnumBuffers, numBytes, buffersInBacklog, finished, hasReadView);\n\t}\n\n\t@Override\n\tpublic int unsynchronizedGetNumberOfQueuedBuffers() {\n\t\t// since we do not synchronize, the size may actually be lower than 0!\n\t\treturn Math.max(buffers.size(), 0);\n\t}\n}\n"},"message":{"kind":"string","value":"[hotfix][runtime] Deduplicate code in PipelinedSubpartition\n"},"old_file":{"kind":"string","value":"flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PipelinedSubpartition.java"},"subject":{"kind":"string","value":"[hotfix][runtime] Deduplicate code in PipelinedSubpartition"},"git_diff":{"kind":"string","value":"link-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/PipelinedSubpartition.java\n \n \t@Override\n \tpublic boolean add(Buffer buffer) throws IOException {\n\t\treturn add(buffer, false);\n\t}\n\n\t@Override\n\tpublic void finish() throws IOException {\n\t\tadd(EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE), true);\n\t\tLOG.debug(\"Finished {}.\", this);\n\t}\n\n\tprivate boolean add(Buffer buffer, boolean finish) throws IOException {\n \t\tcheckNotNull(buffer);\n \n \t\t// view reference accessible outside the lock, but assigned inside the locked scope\n \t\t\treader = readView;\n \t\t\tupdateStatistics(buffer);\n \t\t\tincreaseBuffersInBacklog(buffer);\n\n\t\t\tif (finish) {\n\t\t\t\tisFinished = true;\n\t\t\t}\n \t\t}\n \n \t\t// Notify the listener outside of the synchronized block\n \t\t}\n \n \t\treturn true;\n\t}\n\n\t@Override\n\tpublic void finish() throws IOException {\n\t\tfinal Buffer buffer = EventSerializer.toBuffer(EndOfPartitionEvent.INSTANCE);\n\n\t\t// view reference accessible outside the lock, but assigned inside the locked scope\n\t\tfinal PipelinedSubpartitionView reader;\n\n\t\tsynchronized (buffers) {\n\t\t\tif (isFinished || isReleased) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tbuffers.add(buffer);\n\t\t\treader = readView;\n\t\t\tupdateStatistics(buffer);\n\n\t\t\tisFinished = true;\n\t\t}\n\n\t\tLOG.debug(\"Finished {}.\", this);\n\n\t\t// Notify the listener outside of the synchronized block\n\t\tif (reader != null) {\n\t\t\treader.notifyBuffersAvailable(1);\n\t\t}\n \t}\n \n \t@Override"}}},{"rowIdx":2043,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"9c85102f489e9d83f86be747d72bda577fc469bd"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"hash-bang/Monoxide,hash-bang/Mongoloid"},"new_contents":{"kind":"string","value":"var _ = require('lodash')\n\t.mixin(require('lodash-deep'));\nvar argy = require('argy');\nvar async = require('async-chainable');\nvar debug = require('debug')('monoxide');\nvar deepDiff = require('deep-diff');\nvar events = require('events');\nvar mongoose = require('mongoose');\nvar traverse = require('traverse');\nvar util = require('util');\n\n/**\n* @static monoxide\n*/\nfunction Monoxide() {\n\tvar o = this;\n\to.mongoose = mongoose;\n\to.models = {};\n\to.connection;\n\to.settings = {\n\t\tremoveAll: true, // Allow db.model.delete() calls with no arguments\n\t\tversionIncErr: /^MongoError: Cannot apply \\$inc to a value of non-numeric type. {.+} has the field '__v' of non-numeric type null$/i, // RegExp error detector used to detect $inc problems when trying to increment `__v` in update operations\n\t};\n\n\t// .connect {{{\n\t/**\n\t* Connect to a Mongo database\n\t* @param {string} uri The URL of the database to connect to\n\t* @param {function} [callback] Optional callback when connected, if omitted this function is syncronous\n\t* @return {monoxide} The Monoxide chainable object\n\t*/\n\to.connect = function(uri, callback) {\n\t\tmongoose.set('useFindAndModify', false);\n\t\tmongoose.set('useCreateIndex', true);\n\t\tmongoose.connect(uri, {\n\t\t\tpromiseLibrary: global.Promise,\n\t\t\tuseNewUrlParser: true,\n\t\t}, function(err) {\n\t\t\tif (err) {\n\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t} else {\n\t\t\t\to.connection = mongoose.connection;\n\t\t\t\tif (_.isFunction(callback)) callback();\n\t\t\t}\n\t\t})\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .disconnect {{{\n\t/**\n\t* Disconnect from an active connection\n\t* @return {monoxide} The Monoxide chainable object\n\t*/\n\to.disconnect = function(callback) {\n\t\tmongoose.disconnect(callback);\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .get(q, [id], callback) {{{\n\t/**\n\t* Retrieve a single record from a model via its ID\n\t* This function will ONLY retrieve via the ID field, all other fields are ignored\n\t* NOTE: Really this function just wraps the monoxide.query() function to provide functionality like populate\n\t*\n\t* @name monoxide.get\n\t* @memberof monoxide\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} [q.$id] The ID to return\n\t* @param {(string|string[]|object[])} [q.$populate] Population criteria to apply\n\t*\n\t* @param {string} [id] The ID to return (alternative syntax)\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Return a single widget by its ID (string syntax)\n\t* monoxide.get('widgets', '56e2421f475c1ef4135a1d58', function(err, res) {\n\t* \tconsole.log('Widget:', res);\n\t* });\n\t*\n\t* @example\n\t* // Return a single widget by its ID (object syntax)\n\t* monoxide.get({$collection: 'widgets', $id: '56e2421f475c1ef4135a1d58'}, function(err, res) {\n\t* \tconsole.log('Widget:', res);\n\t* });\n\t*/\n\to.get = argy('[object|string|number] [string|number|object] function', function(q, id, callback) {\n\t\targy(arguments)\n\t\t\t.ifForm('object function', function(aQ, aCallback) {\n\t\t\t\tq = aQ;\n\t\t\t\tcallback = aCallback;\n\t\t\t})\n\t\t\t.ifForm('string string|number function', function(aCollection, aId, aCallback) {\n\t\t\t\tq = {\n\t\t\t\t\t$collection: aCollection,\n\t\t\t\t\t$id: aId,\n\t\t\t\t};\n\t\t\t})\n\t\t\t.ifForm('string object function', function(aCollection, aId, aCallback) { // Probably being passed a Mongoose objectId as the ID\n\t\t\t\tq = {\n\t\t\t\t\t$collection: aCollection,\n\t\t\t\t\t$id: aId.toString(),\n\t\t\t\t};\n\t\t\t});\n\n\t\tif (!q.$id) return callback('No $id specified');\n\t\treturn o.internal.query(q, callback);\n\t});\n\t// }}}\n\n\t// .query([q], callback) {{{\n\t/**\n\t* Query Mongo directly with the Monoxide query syntax\n\t*\n\t* @name monoxide.query\n\t* @fires query\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} [q.$id] If specified return only one record by its master ID (implies $one=true). If present all other conditionals will be ignored and only the object is returned (see $one)\n\t* @param {(string|string[]|object[])} [q.$select] Field selection criteria to apply (implies q.$applySchema=false as we will be dealing with a partial schema). Any fields prefixed with '-' are removed\n\t* @param {(string|string[]|object[])} [q.$sort] Sorting criteria to apply\n\t* @param {(string|string[]|object[])} [q.$populate] Population criteria to apply\n\t* @param {boolean} [q.$one=false] Whether a single object should be returned (implies $limit=1). If enabled an object is returned not an array\n\t* @param {number} [q.$limit] Limit the return to this many rows\n\t* @param {number} [q.$skip] Offset return by this number of rows\n\t* @param {boolean=false} [q.$count=false] Only count the results - do not return them. If enabled a number of returned with the result\n\t* @param {object|function} [q.$data] Set the user-defined data object, if this is a function the callback result is used\n\t* @param {boolean} [q.$decorate=true] Add all Monoxide methods, functions and meta properties\n\t* @param {string} [q.$want='array'] How to return data contents. ENUM: 'array', 'cursor'\n\t* @param {boolean} [q.$plain=false] Return a plain object or object array. This is the equivelent of calling .toObject() on any resultant object. Implies $decorate=true\n\t* @param {boolean} [q.$cacheFKs=true] Cache the foreign keys (objectIDs) within an object so future retrievals dont have to recalculate the model structure\n\t* @param {boolean} [q.$applySchema=true] Apply the schema for each document retrieval - this slows retrieval but means any alterations to the schema are applied to each retrieved record\n\t* @param {boolean} [q.$dirty=false] Whether the entire document contents should be marked as dirty (modified). If true this also skips the computation of modified fields\n\t* @param {boolean} [q.$errNotFound] Raise an error if a specifically requested document is not found (requires $id)\n\t* @param {...*} [q.filter] Any other field (not beginning with '$') is treated as filtering criteria\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error. If $one is truthy this returns a single monoxide.monoxideDocument, if not it returns an array of them\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Return all Widgets, sorted by name\n\t* monoxide.query({$collection: 'widgets', $sort: 'name'}, function(err, res) {\n\t* \tconsole.log('Widgets:', res);\n\t* });\n\t* @example\n\t* // Filter Users to only return admins while also populating their country\n\t* monoxide.query({$collection: 'users', $populate: 'country', role: 'admin'}, function(err, res) {\n\t* \tconsole.log('Admin users:', res);\n\t* });\n\t*/\n\to.query = argy('[string|object] function', function MonoxideQuery(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t_.defaults(q || {}, {\n\t\t\t$cacheFKs: true, // Cache model Foreign Keys (used for populates) or compute them every time\n\t\t\t$want: 'array',\n\t\t\t$applySchema: true, // Apply the schema on retrieval - this slows ths record retrieval but means any alterations to the schema are applied to each retrieved record\n\t\t\t$errNotFound: true, // During $id / $one operations raise an error if the record is not found\n\t\t});\n\t\tif (!_.isEmpty(q.$select)) q.$applySchema = false; // Turn off schema application when using $select as we wont be grabbing the full object\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query\n\t\t\t\t'$data', // Meta user-defined data object\n\t\t\t\t'$dirty', // Whether the document is unclean\n\t\t\t\t'$id', // If specified return only one record by its master ID (implies $one=true). If present all other conditionals will be ignored and only the object is returned (see $one)\n\t\t\t\t'$select', // Field selection criteria to apply\n\t\t\t\t'$sort', // Sorting criteria to apply\n\t\t\t\t'$populate', // Population criteria to apply\n\t\t\t\t'$one', // Whether a single object should be returned (implies $limit=1). If enabled an object is returned not an array\n\t\t\t\t'$limit', // Limit the return to this many rows\n\t\t\t\t'$skip', // Offset return by this number of rows\n\t\t\t\t'$count', // Only count the results - do not return them\n\t\t\t\t'$want', // What result we are looking for from the query\n\t\t\t\t'$cacheFKs', // Cache model Foreign Keys (used for populates) or compute them every time\n\t\t\t\t'$applySchema', // Apply the schema on retrieval - this slows ths record retrieval but means any alterations to the schema are applied to each retrieved record\n\t\t\t\t'$decorate',\n\t\t\t\t'$plain',\n\t\t\t\t'$errNotFound', // During $id / $one operations raise an error if the record is not found\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for get operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for get operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized: \"' + q.$collection + '\"');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// .query - start the find query {{{\n\t\t\t.set('filterPostPopulate', {}) // Filter by these fields post populate\n\t\t\t.then('query', function(next) {\n\t\t\t\tvar me = this;\n\t\t\t\tvar fields;\n\n\t\t\t\tif (q.$id) { // Search by one ID only - ignore other fields\n\t\t\t\t\tfields = {_id: q.$id};\n\t\t\t\t\tq.$one = true;\n\t\t\t\t} else { // Search by query\n\t\t\t\t\tfields = _(q)\n\t\t\t\t\t\t.omit(this.metaFields) // Remove all meta fields\n\t\t\t\t\t\t// FIXME: Ensure all fields are flat\n\t\t\t\t\t\t.omitBy(function(val, key) { // Remove all fields that will need populating later\n\t\t\t\t\t\t\tif (_.some(q.$collection.$oids, function(FK) {\n\t\t\t\t\t\t\t\treturn _.startsWith(key, FK);\n\t\t\t\t\t\t\t})) {\n\t\t\t\t\t\t\t\tme.filterPostPopulate[key] = val;\n\t\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.value();\n\t\t\t\t}\n\n\t\t\t\t//console.log('FIELDS', fields);\n\t\t\t\t//console.log('POSTPOPFIELDS', o.filterPostPopulate);\n\n\t\t\t\tif (q.$count) {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.countDocuments(fields));\n\t\t\t\t} else if (q.$one) {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.findOne(fields));\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.find(fields));\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Apply various simple criteria {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (q.$count) return next(); // No point doing anything else if just counting\n\t\t\t\tif (q.$limit) this.query.limit(q.$limit);\n\t\t\t\tif (q.$skip) this.query.skip(q.$skip);\n\n\t\t\t\t// q.$populate {{{\n\t\t\t\tif (q.$populate) {\n\t\t\t\t\tif (_.isArray(q.$populate)) {\n\t\t\t\t\t\tthis.query.populate(q.$populate);\n\t\t\t\t\t} else if (_.isString(q.$populate) || _.isObject(q.$populate)) {\n\t\t\t\t\t\tthis.query.populate(q.$populate);\n\t\t\t\t\t\tq.$populate = [q.$populate]; // Also rewrite into an array so we can destructure later\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid sort type: ' + (typeof q.$sort));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\t// q.$select {{{\n\t\t\t\tif (q.$select) {\n\t\t\t\t\tif (_.isArray(q.$select)) {\n\t\t\t\t\t\tvar query = this.query;\n\t\t\t\t\t\tq.$select.forEach(function(s) {\n\t\t\t\t\t\t\tquery.select(s);\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (_.isString(q.$select) || _.isObject(q.$select)) {\n\t\t\t\t\t\tthis.query.select(q.$select);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid select type: ' + (typeof q.$select));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\t// q.$sort {{{\n\t\t\t\tif (q.$sort) {\n\t\t\t\t\tif (_.isArray(q.$sort)) {\n\t\t\t\t\t\tvar query = this.query;\n\t\t\t\t\t\tq.$sort.forEach(function(s) {\n\t\t\t\t\t\t\tquery.sort(s);\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (_.isString(q.$sort) || _.isObject(q.$sort)) {\n\t\t\t\t\t\tthis.query.sort(q.$sort);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid sort type: ' + (typeof q.$sort));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then('data', function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire hooks {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('query', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Execute and capture return {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\tswitch (q.$want) {\n\t\t\t\t\tcase 'array':\n\t\t\t\t\t\tthis.query.exec(function(err, res) {\n\t\t\t\t\t\t\tif (err) return next(err);\n\n\t\t\t\t\t\t\tif (q.$one) {\n\t\t\t\t\t\t\t\tif (_.isEmpty(res)) {\n\t\t\t\t\t\t\t\t\tif (q.$errNotFound) {\n\t\t\t\t\t\t\t\t\t\tnext('Not found');\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tnext(null, undefined);\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t} else if (q.$count) {\n\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'cursor':\n\t\t\t\t\t\tnext(null, this.query.cursor());\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tnext('Unknown $want type');\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Convert Mongoose Documents into Monoxide Documents {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\t// Not wanting an array of data? - pass though the result\n\t\t\t\tif (q.$want != 'array') return next(null, this.result);\n\n\t\t\t\tif (this.result === undefined) {\n\t\t\t\t\tnext(null, undefined);\n\t\t\t\t} else if (q.$one) {\n\t\t\t\t\tif (q.$decorate) return next(null, this.result.toObject());\n\t\t\t\t\tnext(null, new o.monoxideDocument({\n\t\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t\t$applySchema: q.$applySchema,\n\t\t\t\t\t\t$decorate: q.$decorate,\n\t\t\t\t\t\t$dirty: q.$dirty,\n\t\t\t\t\t}, this.result));\n\t\t\t\t} else if (q.$count) {\n\t\t\t\t\tnext(null, this.result);\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, this.result.map(function(doc) {\n\t\t\t\t\t\tif (q.$decorate) return doc.toObject();\n\t\t\t\t\t\treturn new o.monoxideDocument({\n\t\t\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t\t\t$applySchema: q.$applySchema,\n\t\t\t\t\t\t\t$decorate: q.$decorate,\n\t\t\t\t\t\t\t$dirty: q.$dirty,\n\t\t\t\t\t\t}, doc.toObject());\n\t\t\t\t\t}));\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Apply populates {{{\n\t\t\t.then(function(next) {\n\t\t\t\t// Not wanting an array of data? - pass though the result\n\t\t\t\tif (q.$want != 'array') return next(null, this.result);\n\n\t\t\t\tif (!q.$populate || !q.$populate.length || q.$count || q.$decorate === false || q.$plain === false || this.result === undefined) return next(); // Skip\n\t\t\t\tasync()\n\t\t\t\t\t.forEach(_.castArray(this.result), (next, doc) => {\n\t\t\t\t\t\tasync()\n\t\t\t\t\t\t\t.forEach(q.$populate, (next, pop) => {\n\t\t\t\t\t\t\t\tvar path = _.isString(pop) ? pop : pop.path;\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectId(_.get(doc, path))) return next(); // Already populated\n\n\t\t\t\t\t\t\t\tdoc.populate(path, next);\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t.end(next);\n\t\t\t\t\t})\n\t\t\t\t\t.end(next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('get() error', err);\n\t\t\t\t\treturn callback(err);\n\t\t\t\t} else if (q.$count) {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t} else {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .count([q], callback) {{{\n\t/**\n\t* Similar to query() but only return the count of possible results rather than the results themselves\n\t*\n\t* @name monoxide.count\n\t* @see monoxide.query\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {...*} [q.filter] Any other field (not beginning with '$') is treated as filtering criteria\n\t*\n\t* @param {function} callback(err,count) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Count all Widgets\n\t* monoxide.count({$collection: 'widgets'}, function(err, count) {\n\t* \tconsole.log('Number of Widgets:', count);\n\t* });\n\t*\n\t* @example\n\t* // Count all admin Users\n\t* monoxide.query({$collection: 'users', role: 'admin'}, function(err, count) {\n\t* \tconsole.log('Number of Admin Users:', count);\n\t* });\n\t*/\n\to.count = argy('[string|object] function', function MonoxideCount(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t// Glue count functionality to query\n\t\tq.$count = true;\n\n\t\treturn o.internal.query(q, callback);\n\t});\n\t// }}}\n\n\t// .save(item, [callback]) {{{\n\t/**\n\t* Save an existing Mongo document by its ID\n\t* If you wish to create a new document see the monoxide.create() function.\n\t* If the existing document ID is not found this function will execute the callback with an error\n\t*\n\t* @name monoxide.save\n\t* @fires save\n\t* @fires postSave\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} q.$id The ID of the document to save\n\t* @param {boolean} [q.$refetch=true] Whether to refetch the record after update, false returns `null` in the callback\n\t* @param {boolean} [q.$errNoUpdate=false] Raise an error if no documents were actually updated\n\t* @param {boolean} [q.$errBlankUpdate=false] Raise an error if no fields are updated\n\t* @param {boolean} [q.$returnUpdated=true] If true returns the updated document, if false it returns the document that was replaced\n\t* @param {boolean} [q.$version=true] Increment the `__v` property when updating\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as data to save\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Save a Widget\n\t* monoxide.save({\n\t* \t$collection: 'widgets',\n\t* \t$id: 1234,\n\t* \tname: 'New name',\n\t* }, function(err, widget) {\n\t* \tconsole.log('Saved widget is now', widget);\n\t* });\n\t*/\n\to.save = argy('object [function]', function(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when updated (false returns null)\n\t\t\t$errNoUpdate: false,\n\t\t\t$errBlankUpdate: false,\n\t\t\t$returnUpdated: true,\n\t\t\t$version: true,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$id', // Mandatory field to specify while record to update\n\t\t\t\t'_id', // We also need to clip this from the output (as we cant write to it), but we need to pass it to hooks\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t\t'$errNoUpdate',\n\t\t\t\t'$errBlankUpdate',\n\t\t\t\t'$version',\n\t\t\t\t'$returnUpdated',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!q.$id) return next('ID not specified');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tq._id = q.$id;\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'save' hook on the model {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('save', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Peform the update {{{\n\t\t\t.then('newRec', function(next) {\n\t\t\t\tvar patch = _.omit(q, this.metaFields);\n\t\t\t\tif (_.isEmpty(patch)) {\n\t\t\t\t\tif (q.$errBlankUpdate) return next('Nothing to update');\n\t\t\t\t\tif (q.$refetch) {\n\t\t\t\t\t\treturn o.internal.get({$collection: q.$collection, $id: q.$id}, next);\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn next(null, {});\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t_.forEach(o.models[q.$collection].$oids, function(fkType, schemaPath) {\n\t\t\t\t\tif (!_.has(patch, schemaPath)) return; // Not patching this field anyway\n\n\t\t\t\t\tswitch(fkType.type) {\n\t\t\t\t\t\tcase 'objectId': // Convert the field to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar newVal = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t_.set(patch, schemaPath, o.utilities.objectID(newVal));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray': // Convert each item to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar gotOIDs = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (_.isArray(gotOIDs)) {\n\t\t\t\t\t\t\t\t\t_.set(patch, schemaPath, gotOIDs.map(function(i, idx) {\n\t\t\t\t\t\t\t\t\t\treturn (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t\t\t? o.utilities.objectID(i)\n\t\t\t\t\t\t\t\t\t\t\t: i;\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tthrow new Error('Expected ' + schemaPath + ' to contain an array of OIDs but got ' + (typeof gotOIDs));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\tvar updateQuery = { _id: o.utilities.objectID(q.$id) };\n\t\t\t\tvar updatePayload = {$set: patch};\n\t\t\t\tvar updateOptions = { returnOriginal: !q.$returnUpdated };\n\t\t\t\tvar updateCallback = function(err, res) {\n\t\t\t\t\tif (q.$version && err && o.settings.versionIncErr.test(err.toString())) { // Error while setting `__v`\n\t\t\t\t\t\t// Remove __v as an increment operator + retry the operation\n\t\t\t\t\t\t// It would be good if $inc could assume `0` when null, but Mongo doesn't support that\n\t\t\t\t\t\tupdatePayload.$set.__v = 1;\n\t\t\t\t\t\tdelete updatePayload.$inc;\n\t\t\t\t\t\to.models[q.$collection].$mongoModel.findOneAndUpdate(updateQuery, updatePayload, updateOptions, updateCallback);\n\t\t\t\t\t} else if (err) {\n\t\t\t\t\t\tnext(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// This would only really happen if the record has gone away since we started updating\n\t\t\t\t\t\tif (q.$errNoUpdate && !res.ok) return next('No documents updated');\n\n\t\t\t\t\t\tif (!q.$refetch) return next(null, null);\n\t\t\t\t\t\tnext(null, new o.monoxideDocument({$collection: q.$collection}, res.value));\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\tif (q.$version) {\n\t\t\t\t\tupdatePayload.$inc = {'__v': 1};\n\t\t\t\t\tdelete updatePayload.$set.__v; // Remove user updates of __v\n\t\t\t\t}\n\n\t\t\t\t// Actually perform the action\n\t\t\t\to.models[q.$collection].$mongoModel.findOneAndUpdate(\n\t\t\t\t\tupdateQuery, // What we are writing to\n\t\t\t\t\tupdatePayload, // What we are saving\n\t\t\t\t\tupdateOptions, // Options passed to Mongo\n\t\t\t\t\tupdateCallback\n\t\t\t\t);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'postSave' hook {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('postSave', next, q, this.newRec);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('save() error', err);\n\t\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (_.isFunction(callback)) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .update(q, [with], [callback]) {{{\n\t/**\n\t* Update multiple documents\n\t*\n\t* @name monoxide.update\n\t* @fires update\n\t*\n\t* @param {Object} q The object to query by\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$refetch=true] Return the newly updated record\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as filter data\n\t*\n\t* @param {Object} qUpdate The object to update into the found documents\n\t* @param {...*} [qUpdate.field] Data to save into every record found by `q`\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Set all widgets to active\n\t* monoxide.update({\n\t* \t$collection: 'widgets',\n\t* \tstatus: 'active',\n\t* });\n\t*/\n\to.update = argy('object|string [object] [function]', function MonoxideUpdate(q, qUpdate, callback) {\n\t\tvar o = this;\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when updated (false returns null)\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for get operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for get operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'update' hook {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('update', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Peform the update {{{\n\t\t\t.then('rawResponse', function(next) {\n\t\t\t\to.models[q.$collection].$mongooseModel.updateMany(_.omit(q, this.metaFields), _.omit(qUpdate, this.metaFields), {multi: true}, next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('update() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .create(item, [callback]) {{{\n\t/**\n\t* Create a new Mongo document and return it\n\t* If you wish to save an existing document see the monoxide.save() function.\n\t*\n\t* @name monoxide.create\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$refetch=true] Return the newly create record\n\t* @param {boolean} [q.$version=true] Set the `__v` field to 0 when creating the document\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as data to save\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Create a Widget\n\t* monoxide.save({\n\t* \t$collection: 'widgets',\n\t* \tname: 'New widget name',\n\t* }, function(err, widget) {\n\t* \tconsole.log('Created widget is', widget);\n\t* });\n\t*/\n\to.create = argy('object [function]', function MonoxideQuery(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when created (false returns null)\n\t\t\t$version: true,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t\t'$version',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t\tnext();\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tnext();\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Coherse all OIDs (or arrays of OIDs) into their correct internal type {{{\n\t\t\t.then(function(next) {\n\t\t\t\t_.forEach(o.models[q.$collection].$oids, function(fkType, schemaPath) {\n\t\t\t\t\tswitch(fkType.type) {\n\t\t\t\t\t\tcase 'objectId': // Convert the field to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar newVal = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t_.set(q, schemaPath, o.utilities.objectID(newVal));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray': // Convert each item to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar gotOIDs = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (_.isArray(gotOIDs)) {\n\t\t\t\t\t\t\t\t\t_.set(q, schemaPath, gotOIDs.map(function(i, idx) {\n\t\t\t\t\t\t\t\t\t\treturn (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t\t\t? o.utilities.objectID(i)\n\t\t\t\t\t\t\t\t\t\t\t: i;\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tthrow new Error('Expected ' + schemaPath + ' to contain an array of OIDs but got ' + (typeof gotOIDs));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Add version information if $version==true {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$version) return next();\n\t\t\t\tq.__v = 0;\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Create record {{{\n\t\t\t.then('createDoc', function(next) { // Compute the document we will create\n\t\t\t\tnext(null, new o.monoxideDocument({\n\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t$dirty: true, // Mark all fields as modified (and not bother to compute the clean markers)\n\t\t\t\t}, _.omit(q, this.metaFields)));\n\t\t\t})\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('create', next, this.createDoc);\n\t\t\t})\n\t\t\t.then('rawResponse', function(next) {\n\t\t\t\to.models[q.$collection].$mongoModel.insertOne(this.createDoc.toMongoObject(), next);\n\t\t\t})\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('postCreate', next, q, this.createDoc);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Refetch record {{{\n\t\t\t.then('newRec', function(next) {\n\t\t\t\tif (!q.$refetch) return next(null, null);\n\t\t\t\to.internal.query({\n\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t$id: this.rawResponse.insertedId.toString(),\n\t\t\t\t}, function(err, res) {\n\t\t\t\t\tif (err == 'Not found') return next('Document creation failed');\n\t\t\t\t\tnext(err, res);\n\t\t\t\t});\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('create() error', err);\n\t\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (_.isFunction(callback)) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .delete(item, [callback]) {{{\n\t/**\n\t* Delete a Mongo document by its ID\n\t* This function has two behaviours - it will, by default, only delete a single record by its ID. If `q.$multiple` is true it will delete by query.\n\t* If `q.$multiple` is false and the document is not found (by `q.$id`) this function will execute the callback with an error\n\t* Delete will only work with no parameters if monoxide.settings.removeAll is truthy as an extra safety check\n\t*\n\t* @name monoxide.delete\n\t*\n\t* @param {Object} [q] The object to process\n\t* @param {string} [q.$collection] The collection / model to query\n\t* @param {string} [q.$id] The ID of the document to delete (if you wish to do a remove based on query set q.$query=true)\n\t* @param {boolean} [q.$multiple] Allow deletion of multiple records by query\n\t* @param {boolean} [q.$errNotFound] Raise an error if a specifically requested document is not found (requires $id)\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*/\n\to.delete = o.remove = argy('object [function]', function MonoxideQuery(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$errNotFound: true, // During raise an error if $id is specified but not found to delete\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$id', // Mandatory field to specify while record to update\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$multiple', // Whether to allow deletion by query\n\t\t\t\t'$errNotFound',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for delete operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for delete operation');\n\t\t\t\tif (!q.$id && !q.$multiple) return next('$id or $multiple must be speciied during delete operation');\n\n\t\t\t\tif (!o.settings.removeAll && !q.$id && _.isEmpty(_.omit(q, this.metaFields))) { // Apply extra checks to make sure we are not nuking everything if we're not allowed\n\t\t\t\t\treturn next('delete operation not allowed with empty query');\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Delete record {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (q.$multiple) { // Multiple delete operation\n\t\t\t\t\to.internal.query(_.merge(_.omit(q, this.metaFields), {$collection: q.$collection, $select: 'id'}), function(err, rows) {\n\t\t\t\t\t\tasync()\n\t\t\t\t\t\t\t.forEach(rows, function(next, row) {\n\t\t\t\t\t\t\t\to.internal.delete({$collection: q.$collection, $id: row._id}, next);\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t.end(next);\n\t\t\t\t\t});\n\t\t\t\t} else { // Single item delete\n\t\t\t\t\t// Check that the hook returns ok\n\t\t\t\t\to.models[q.$collection].fire('delete', function(err) {\n\t\t\t\t\t\t// Now actually delete the item\n\t\t\t\t\t\to.models[q.$collection].$mongoModel.deleteOne({_id: o.utilities.objectID(q.$id)}, function(err, res) {\n\t\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\t\tif (q.$errNotFound && !res.result.ok) return next('Not found');\n\t\t\t\t\t\t\t// Delete was sucessful - call event then move next\n\t\t\t\t\t\t\to.models[q.$collection].fire('postDelete', next, {_id: q.$id});\n\t\t\t\t\t\t});\n\t\t\t\t\t}, {_id: q.$id});\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('delete() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .meta(item, [callback]) {{{\n\t/**\n\t* Return information about a Mongo collection schema\n\t*\n\t* @name monoxide.meta\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to examine\n\t* @param {boolean} [q.$collectionEnums=false] Provide all enums as a collection object instead of an array\n\t* @param {boolean} [q.$filterPrivate=true] Ignore all private fields\n\t* @param {boolean} [q.$prototype=false] Provide the $prototype meta object\n\t* @param {boolean} [q.$indexes=false] Include whether a field is indexed\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Describe a collection\n\t* monoxide.meta({$collection: 'widgets'}, function(err, res) {\n\t* \tconsole.log('About the widget collection:', res);\n\t* });\n\t*/\n\to.meta = argy('[object] function', function MonoxideMeta(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$filterPrivate: true,\n\t\t\t$prototype: false,\n\t\t\t$indexes: false,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$filterPrivate', // Filter out /^_/ fields\n\t\t\t\t'$collectionEnums', // Convert enums into a collection (with `id` + `title` fields per object)\n\t\t\t\t'$prototype',\n\t\t\t\t'$indexes',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for meta operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for meta operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Cannot find collection to extract its meta information: ' + q.$collection);\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Retrieve the meta information {{{\n\t\t\t.then('meta', function(next) {\n\t\t\t\tvar sortedPaths = _(o.models[q.$collection].$mongooseModel.schema.paths)\n\t\t\t\t\t.map((v,k) => v)\n\t\t\t\t\t.sortBy('path')\n\t\t\t\t\t.value();\n\n\t\t\t\tvar meta = {\n\t\t\t\t\t_id: {type: 'objectid', index: true}, // FIXME: Is it always the case that a doc has an ID?\n\t\t\t\t};\n\n\t\t\t\t_.forEach(sortedPaths, function(path) {\n\t\t\t\t\tvar id = path.path;\n\n\t\t\t\t\tif (q.$filterPrivate && _.last(path.path.split('.')).startsWith('_')) return; // Skip private fields\n\n\t\t\t\t\tvar info = {};\n\t\t\t\t\tswitch (path.instance.toLowerCase()) {\n\t\t\t\t\t\tcase 'string':\n\t\t\t\t\t\t\tinfo.type = 'string';\n\t\t\t\t\t\t\tif (path.enumValues && path.enumValues.length) {\n\t\t\t\t\t\t\t\tif (q.$collectionEnums) {\n\t\t\t\t\t\t\t\t\tinfo.enum = path.enumValues.map(e => ({\n\t\t\t\t\t\t\t\t\t\tid: e,\n\t\t\t\t\t\t\t\t\t\ttitle: _.startCase(e),\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tinfo.enum = path.enumValues;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'number':\n\t\t\t\t\t\t\tinfo.type = 'number';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'date':\n\t\t\t\t\t\t\tinfo.type = 'date';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'boolean':\n\t\t\t\t\t\t\tinfo.type = 'boolean';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'array':\n\t\t\t\t\t\t\tinfo.type = 'array';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'object':\n\t\t\t\t\t\t\tinfo.type = 'object';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectid':\n\t\t\t\t\t\t\tinfo.type = 'objectid';\n\t\t\t\t\t\t\tif (_.has(path, 'options.ref')) info.ref = path.options.ref;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\tdebug('Unknown Mongo data type during meta extract on ' + q.$collection + ':', path.instance.toLowerCase());\n\t\t\t\t\t}\n\n\t\t\t\t\t// Extract default value if its not a function (otherwise return [DYNAMIC])\n\t\t\t\t\tif (path.defaultValue) info.default = argy.isType(path.defaultValue, 'scalar') ? path.defaultValue : '[DYNAMIC]';\n\n\t\t\t\t\tif (q.$indexes && path._index) info.index = true;\n\n\t\t\t\t\tmeta[id] = info;\n\t\t\t\t});\n\n\t\t\t\tnext(null, meta);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Construct the prototype if $prototype=true {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$prototype) return next();\n\n\t\t\t\tvar prototype = this.meta.$prototype = {};\n\n\t\t\t\t_.forEach(this.meta, function(v, k) {\n\t\t\t\t\tif (!_.has(v, 'default')) return;\n\t\t\t\t\tif (v.default == '[DYNAMIC]') return; // Ignore dynamic values\n\t\t\t\t\t_.set(prototype, k, v.default);\n\t\t\t\t});\n\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('meta() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.meta);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .runCommand(command, [callback]) {{{\n\t/**\n\t* Run an internal MongoDB command and fire an optional callback on the result\n\t*\n\t* @name monoxide.meta\n\t*\n\t* @param {Object} cmd The command to process\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t* @return {Object} This chainable object\n\t* @example\n\t*/\n\to.runCommand = argy('object [function]', function MonoxideRunCommand(cmd, callback) {\n\t\to.connection.db.command(cmd, callback);\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .queryBuilder() - query builder {{{\n\t/**\n\t* Returns data from a Monoxide model\n\t* @class\n\t* @name monoxide.queryBuilder\n\t* @return {monoxide.queryBuilder}\n\t* @fires queryBuilder Fired as (callback, qb) when a new queryBuilder object is created\n\t*/\n\to.queryBuilder = function monoxideQueryBuilder() {\n\t\tvar qb = this;\n\t\tqb.$MONOXIDE = true;\n\t\tqb.query = {};\n\n\t\t// qb.find(q, cb) {{{\n\t\t/**\n\t\t* Add a filtering function to an existing query\n\t\t* @name monoxide.queryBuilder.find\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|function} [q] Optional filtering object or callback (in which case we act as exec())\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.find = argy('[object|string] [function]', function(q, callback) {\n\t\t\tif (argy.isType(q, 'object')) {\n\t\t\t\t_.merge(qb.query, q);\n\t\t\t} else {\n\t\t\t\tq = {$id: q};\n\t\t\t}\n\n\t\t\tif (callback) qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.select(q, cb) {{{\n\t\t/**\n\t\t* Add select criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.select\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|Array|string} [q] Select criteria, for strings or arrays of strings use the field name optionally prefixed with '-' for omission. For Objects use `{field: 1|-1}`\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.select = argy('string|array [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm(['string', 'string function'], function(id, callback) {\n\t\t\t\t\tif (qb.query.$select) {\n\t\t\t\t\t\tqb.query.$select.push(id);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$select = [id];\n\t\t\t\t\t}\n\t\t\t\t\tif (callback) q.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(ids, callback) {\n\t\t\t\t\tif (qb.query.$select) {\n\t\t\t\t\t\tqb.query.$select.push.apply(this, ids);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$select = ids;\n\t\t\t\t\t}\n\t\t\t\t\tif (callback) q.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.sort(q, cb) {{{\n\t\t/**\n\t\t* Add sort criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.sort\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|Array|string} [q] Sorting criteria, for strings or arrays of strings use the field name optionally prefixed with '-' for decending search order. For Objects use `{ field: 1|-1|'asc'|'desc'}`\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.sort = argy('string|array|undefined [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {})\n\t\t\t\t.ifForm('undefined', function() {})\n\t\t\t\t.ifForm(['string', 'string function'], function(field, callback) {\n\t\t\t\t\tif (qb.query.$sort) {\n\t\t\t\t\t\tqb.query.$sort.push(field);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$sort = [field];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(fields, callback) {\n\t\t\t\t\tif (qb.query.$sort) {\n\t\t\t\t\t\tqb.query.$sort.push.apply(this, fields);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$sort = fields;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.limit(q, cb) {{{\n\t\t/**\n\t\t* Add limit criteria to an existing query\n\t\t* If this function is passed a falsy value the limit is removed\n\t\t* @name monoxide.queryBuilder.limit\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {number|string} q Limit records to this number (it will be parsed to an Int)\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.limit = argy('[falsy|string|number] [function]', function(q, callback) {\n\t\t\tif (!q) {\n\t\t\t\tdelete qb.query.$limit;\n\t\t\t} else if (argy.isType(q, 'string')) {\n\t\t\t\tqb.query.$limit = parseInt(q);\n\t\t\t} else {\n\t\t\t\tqb.query.$limit = q;\n\t\t\t}\n\n\t\t\tif (callback) return qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.skip(q, cb) {{{\n\t\t/**\n\t\t* Add skip criteria to an existing query\n\t\t* If this function is passed a falsy value the skip offset is removed\n\t\t* @name monoxide.queryBuilder.skip\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {number} q Skip this number of records (it will be parsed to an Int)\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.skip = argy('[falsy|string|number] [function]', function(q, callback) {\n\t\t\tif (!q) {\n\t\t\t\tdelete qb.query.$skip;\n\t\t\t} else if (argy.isType(q, 'string')) {\n\t\t\t\tqb.query.$skip = parseInt(q);\n\t\t\t} else {\n\t\t\t\tqb.query.$skip = q;\n\t\t\t}\n\n\t\t\tif (callback) return qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.populate(q, cb) {{{\n\t\t/**\n\t\t* Add population criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.populate\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Array|string} [q] Population criteria, for strings or arrays of strings use the field name\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.populate = argy('string|array [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {})\n\t\t\t\t.ifForm(['string', 'string function'], function(field, callback) {\n\t\t\t\t\tif (qb.query.$populate) {\n\t\t\t\t\t\tqb.query.$populate.push(field);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$populate = [field];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(fields, callback) {\n\t\t\t\t\tif (qb.query.$populate) {\n\t\t\t\t\t\tqb.query.$populate.push.apply(this, fields);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$populate = fields;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.exec(cb) {{{\n\t\t/**\n\t\t* Execute the query and return the error and any results\n\t\t* @name monoxide.queryBuilder.exec\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {function} callback(err,result)\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.exec = argy('function', function(callback) {\n\t\t\treturn o.internal.query(qb.query, callback);\n\t\t});\n\t\t// }}}\n\n\t\t// qb.optional() {{{\n\t\t/**\n\t\t* Convenience function to set $errNotFound\n\t\t* @name monoxide.queryBuilder.optional\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|function} [isOptional=true] Whether the return from this query should NOT throw an error if nothing was found\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.optional = argy('[boolean|null|undefined] [function]', function(isOptional, callback) {\n\t\t\tif (argy.isType(isOptional, ['null', 'undefined'])) {\n\t\t\t\tqb.query.$errNotFound = false;\n\t\t\t} else {\n\t\t\t\tqb.query.$errNotFound = !! isOptional;\n\t\t\t}\n\n\t\t\tif (callback) qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.promise() {{{\n\t\t/**\n\t\t* Convenience function to execute the query and return a promise with the result\n\t\t* @name monoxide.queryBuilder.promise\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @return {Mongoose.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.promise = function(callback) {\n\t\t\treturn new Promise(function(resolve, reject) {\n\t\t\t\to.internal.query(qb.query, function(err, result) {\n\t\t\t\t\tif (err) {\n\t\t\t\t\t\treject(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tresolve(result);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t};\n\n\t\t// Wrap all promise functions in a convnience wrapper\n\t\t['then', 'catch', 'finally'].forEach(f => {\n\t\t\tqb[f] = function() {\n\t\t\t\tvar p = qb.promise();\n\t\t\t\treturn p[f].apply(p, arguments);\n\t\t\t};\n\t\t});\n\t\t// }}}\n\n\n\t\t// qb.cursor() {{{\n\t\t/**\n\t\t* Convenience function to return the generated cursor back from a queryBuilder object\n\t\t* @name monoxide.queryBuilder.cursor\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {function} callback(err, cursor)\n\t\t* @return {Mongoose.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.cursor = function(callback) {\n\t\t\tqb.query.$want = 'cursor';\n\t\t\treturn o.internal.query(qb.query, callback);\n\t\t};\n\t\t// }}}\n\n\t\to.fireImmediate('queryBuilder', qb);\n\n\t\treturn qb;\n\t};\n\t// }}}\n\n\t// .monoxideModel([options]) - monoxide model instance {{{\n\t/**\n\t* @class\n\t*/\n\to.monoxideModel = argy('string|object', function monoxideModel(settings) {\n\t\tvar mm = this;\n\n\t\tif (argy.isType(settings, 'string')) settings = {$collection: settings};\n\n\t\t// Sanity checks {{{\n\t\tif (!settings.$collection) throw new Error('new MonoxideModel({$collection: }) requires at least \\'$collection\\' to be specified');\n\t\tif (!o.connection) throw new Error('Trying to create a MonoxideModel before a connection has been established');\n\t\tif (!o.connection.db) throw new Error('Connection does not look like a MongoDB-Core object');\n\t\t// }}}\n\n\t\t/**\n\t\t* The raw MongoDB-Core model\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$mongoModel = o.connection.db.collection(settings.$collection.toLowerCase());\n\t\tif (!mm.$mongoModel) throw new Error('Model not found in MongoDB-Core - did you forget to call monoxide.schema(\\'name\\', ) first?');\n\n\t\t/**\n\t\t* The raw Mongoose model\n\t\t* @depreciated This will eventually go away and be replaced with raw `mm.$mongoModel` calls\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$mongooseModel = o.connection.base.models[settings.$collection.toLowerCase()];\n\n\t\t/**\n\t\t* Holder for all OID information\n\t\t* This can either be the `._id` of the object, sub-documents, array pointers or object pointers\n\t\t* @see monoxide.utilities.extractFKs\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$oids = _.has(mm, '$mongooseModel.schema') ? o.utilities.extractFKs(mm.$mongooseModel.schema) : {};\n\n\t\t/**\n\t\t* Optional model schema\n\t\t* NOTE: This is the user defined schema as-is NOT the computed $monogooseModel.schema\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$schema = settings.$schema;\n\n\t\tmm.$collection = settings.$collection;\n\t\tmm.$methods = {};\n\t\tmm.$virtuals = {};\n\t\tmm.$hooks = {};\n\t\tmm.$data = {};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $count=true in the queryBuilder\n\t\t* @name monoxide.monoxideModel.find\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.count = function(q, callback) {\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$count: true,\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* @name monoxide.monoxideModel.find\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.find = function(q, callback) {\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({$collection: mm.$collection}) // Set the collection from the model\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $one=true in the queryBuilder\n\t\t* @name monoxide.monoxideModel.findOne\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.findOne = function(q, callback) {\n\t\t\tif (argy.isType(q, 'string')) throw new Error('Refusing to allow findOne(String). Use findOneByID if you wish to specify only the ID');\n\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$one: true, // Return a single object\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $id=q in the queryBuilder\n\t\t* @name monoxide.monoxideModel.findOneByID\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.findOneByID = function(q, callback) {\n\t\t\t// Deal with arguments {{{\n\t\t\tif (argy.isType(q, 'string')) {\n\t\t\t\t// All ok\n\t\t\t} else if (argy.isType(q, 'object') && q.toString().length) { // Input is an object but we can convert it to something useful\n\t\t\t\tq = q.toString();\n\t\t\t} else {\n\t\t\t\tthrow new Error('Unknown function call pattern');\n\t\t\t}\n\t\t\t// }}}\n\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$id: q,\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\t\t/**\n\t\t* Alias of findOneByID\n\t\t* @see monoxide.queryBuilder.find\n\t\t*/\n\t\tmm.findOneById = mm.findOneByID;\n\n\n\t\t/**\n\t\t* Shortcut function to create a new record within a collection\n\t\t* @name monoxide.monoxideModel.create\n\t\t* @see monoxide.create\n\t\t*\n\t\t* @param {Object} [q] Optional document contents\n\t\t* @param {function} [callback] Optional callback\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.create = argy('object [function]', function(q, callback) {\n\t\t\tq.$collection = mm.$collection;\n\t\t\to.internal.create(q, callback);\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Shortcut to invoke update on a given model\n\t\t* @name monoxide.monoxideMode.update\n\t\t* @see monoxide.update\n\t\t* @param {Object} q The filter to query by\n\t\t* @param {Object} qUpdate The object to update into the found documents\n\t\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t\t* @return {Object} This chainable object\n\t\t*/\n\t\tmm.update = argy('object object [function]', function(q, qUpdate, callback) {\n\t\t\tq.$collection = mm.$collection;\n\t\t\to.internal.update(q, qUpdate, callback);\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Shortcut function to remove a number of rows based on a query\n\t\t* @name monoxide.monoxideModel.remove\n\t\t* @see monoxide.delete\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback\n\t\t* @return {monoxide}\n\t\t*/\n\t\tmm.remove = argy('[object] [function]', function(q, callback) {\n\t\t\treturn o.internal.delete(_.merge({}, q, {$collection: mm.$collection, $multiple: true}), callback);\n\t\t});\n\n\n\t\t/**\n\t\t* Alias of remove()\n\t\t* @see monoxide.remove()\n\t\t*/\n\t\tmm.delete = mm.remove;\n\n\n\t\t/**\n\t\t* Run an aggregation pipeline on a model\n\t\t* @param {array} q The aggregation pipeline to process\n\t\t* @param {function} callback Callback to fire as (err, data)\n\t\t* @return {Object} This chainable object\n\t\t*/\n\t\tmm.aggregate = argy('array function', function(q, callback) {\n\t\t\to.internal.aggregate({\n\t\t\t\t$collection: mm.$collection,\n\t\t\t\t$stages: q,\n\t\t\t}, callback)\n\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Add a method to a all documents returned from this model\n\t\t* A method is a user defined function which extends the `monoxide.monoxideDocument` prototype\n\t\t* @param {string} name The function name to add as a static method\n\t\t* @param {function} func The function to add as a static method\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.method = function(name, func) {\n\t\t\tmm.$methods[name] = func;\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Add a static method to a model\n\t\t* A static is a user defined function which extends the `monoxide.monoxideModel` prototype\n\t\t* @param {string} name The function name to add as a static method\n\t\t* @param {function} func The function to add as a static method\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.static = function(name, func) {\n\t\t\tmm[name] = func;\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Define a virtual (a handler when a property gets set or read)\n\t\t* @param {string|Object} name The virtual name to apply or the full virtual object (must pretain to the Object.defineProperty descriptor)\n\t\t* @param {function} getCallback The get function to call when the virtual value is read\n\t\t* @param {function} setCallback The set function to call when the virtual value changes\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.virtual = argy('string [function|falsy] [function|falsy]', function(name, getCallback, setCallback) {\n\t\t\tvar q = {};\n\t\t\tif (argy.isType(getCallback, 'function')) q.get = getCallback;\n\t\t\tif (argy.isType(setCallback, 'function')) q.set = setCallback;\n\n\t\t\tmm.$virtuals[name] = q;\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Return whether a model has virtuals\n\t\t* @return {boolean} Whether any virtuals are present\n\t\t*/\n\t\tmm.hasVirtuals = function() {\n\t\t\treturn (Object.keys(mm.$virtuals).length > 0);\n\t\t};\n\n\n\t\t/**\n\t\t* Attach a hook to a model\n\t\t* A hook is exactly the same as a eventEmitter.on() event but must return a callback\n\t\t* Multiple hooks can be attached and all will be called in parallel on certain events such as 'save'\n\t\t* All hooks must return non-errors to proceed with the operation\n\t\t* @param {string} eventName The event ID to hook against\n\t\t* @param {function} callback The callback to run when hooked, NOTE: Any falsy callbacks are ignored\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.hook = function(eventName, callback) {\n\t\t\tif (!callback) return mm; // Ignore flasy callbacks\n\t\t\tif (!mm.$hooks[eventName]) mm.$hooks[eventName] = [];\n\t\t\tmm.$hooks[eventName].push(callback);\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return whether a model has a specific hook\n\t\t* If an array is passed the result is whether the model has none or all of the specified hooks\n\t\t* @param {string|array|undefined|null} hooks The hook(s) to query, if undefined or null this returns if any hooks are present\n\t\t* @return {boolean} Whether the hook(s) is present\n\t\t*/\n\t\tmm.hasHook = argy('[string|array]', function(hooks) {\n\t\t\tvar out;\n\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {\n\t\t\t\t\tout = !_.isEmpty(mm.$hooks);\n\t\t\t\t})\n\t\t\t\t.ifForm('string', function(hook) {\n\t\t\t\t\tout = mm.$hooks[hook] && mm.$hooks[hook].length;\n\t\t\t\t})\n\t\t\t\t.ifForm('array', function(hooks) {\n\t\t\t\t\tout = hooks.every(function(hook) {\n\t\t\t\t\t\treturn (mm.$hooks[hook] && mm.$hooks[hook].length);\n\t\t\t\t\t});\n\t\t\t\t});\n\n\t\t\treturn out;\n\t\t});\n\n\n\t\t/**\n\t\t* Execute all hooks attached to a model\n\t\t* This function fires all hooks in parallel and expects all to resolve correctly via callback\n\t\t* NOTE: Hooks are always fired with the callback as the first argument\n\t\t* @param {string} name The name of the hook to invoke\n\t\t* @param {function} callback The callback to invoke on success\n\t\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.fire = function(name, callback) {\n\t\t\tif ( // There is at least one event handler attached\n\t\t\t\t(mm.$hooks[name] && mm.$hooks[name].length)\n\t\t\t\t|| (o.$hooks[name] && o.$hooks[name].length)\n\t\t\t) {\n\t\t\t\tvar eventArgs = _.values(arguments);\n\t\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\t\t\t\tmm.emit.apply(mm, eventArgs);\n\t\t\t} else {\n\t\t\t\treturn callback();\n\t\t\t}\n\n\t\t\t// Calculate the args array we will pass to each hook\n\t\t\tvar hookArgs = _.values(arguments);\n\t\t\thookArgs.shift(); // We will set args[0] to the callback in each case anyway so we only need to shift 1\n\n\t\t\tvar eventArgs = _.values(arguments);\n\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\n\t\t\tasync()\n\t\t\t\t// Fire hooks attached to this model + global hooks {{{\n\t\t\t\t.forEach([]\n\t\t\t\t\t.concat(o.$hooks[name], mm.$hooks[name])\n\t\t\t\t\t.filter(f => !!f) // Actually is a function?\n\t\t\t\t, function(next, hookFunc) {\n\t\t\t\t\thookArgs[0] = next;\n\t\t\t\t\thookFunc.apply(mm, hookArgs);\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t.end(callback);\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return the meta structure for a specific model\n\t\t* @param {Object} Options to return when computing the meta object. See the main meta() function for details\n\t\t* @param {function} callback The callback to call with (err, layout)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t* @see monoxide.meta()\n\t\t*/\n\t\tmm.meta = argy('[object] function', function(options, callback) {\n\t\t\tvar settings = options || {};\n\t\t\tsettings.$collection = mm.$collection;\n\t\t\to.internal.meta(settings, callback);\n\t\t\treturn mm;\n\t\t});\n\n\t\t/**\n\t\t* Run a third party plugin against a model\n\t\t* This function is really just a shorthand way to pass a Monoxide model into a function\n\t\t* @param {function|string|array} plugins The plugin(s) to run. Each function is run as (model, callback), strings are assumed to be file paths to JS files if they contain at least one '/' or `.` otherwise they are loaded from the `plugins` directory\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.use = function(plugins, callback) {\n\t\t\tif (!plugins) return callback(); // Do nothing if given falsy\n\n\t\t\tasync()\n\t\t\t\t.forEach(_.castArray(plugins), function(next, plugin) {\n\t\t\t\t\tif (_.isString(plugin)) {\n\t\t\t\t\t\tvar pluginModule = /[\\/\\.]/.test(plugin) // Contains at least one slash or dot?\n\t\t\t\t\t\t\t? require(plugin)\n\t\t\t\t\t\t\t: require(__dirname + '/plugins/' + plugin)\n\t\t\t\t\t\tpluginModule.call(mm, mm, next);\n\t\t\t\t\t} else if (_.isFunction(plugin)) {\n\t\t\t\t\t\tplugin.call(mm, mm, next);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tnext('Unsupported plugin format');\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.end(callback);\n\n\t\t\treturn mm;\n\t\t};\n\n\t\t/**\n\t\t* Return an array of all distinct field values\n\t\t* @param {string} field The field to return the values of\n\t\t* @param {function} plugin The plugin to run. This gets the arguments (values)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.distinct = function(field, callback) {\n\t\t\to.internal.runCommand({\n\t\t\t\tdistinct: mm.$collection,\n\t\t\t\tkey: field,\n\t\t\t}, function(err, res) {\n\t\t\t\tif (err) return callback(err);\n\t\t\t\tcallback(null, res.values);\n\t\t\t});\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Set a simple data key\n\t\t* This is usually used to store suplemental information about models\n\t\t* @param {Object|string} key The key to set or a full object of keys\n\t\t* @param {*} value If `key` is a string the value is the value stored\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.set = function(key, value) {\n\t\t\tif (argy.isType(key, 'object')) {\n\t\t\t\t_.assign(mm.$data, key);\n\t\t\t} else if (argy.isType(key, 'string')) {\n\t\t\t\tmm.$data[key] = value;\n\t\t\t} else {\n\t\t\t\tthrow new Error('Unsupported type storage during set');\n\t\t\t}\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/*\n\t\t* Gets a simple data key or returns a fallback\n\t\t* @param {string} key The data key to retrieve\n\t\t* @param {*} [fallback] The fallback to return if the key is not present\n\t\t*/\n\t\tmm.get = function(key, fallback) {\n\t\t\treturn (argy.isType(mm.$data[key], 'undefined') ? fallback : mm.$data[key]);\n\t\t};\n\n\n\n\t\t/**\n\t\t* Retrieve the list of actual on-the-database indexes\n\t\t* @param {function} callback Callback to fire as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.getIndexes = function(callback) {\n\t\t\tmm.$mongoModel.indexes(function(err, res) {\n\t\t\t\tif (err && err.message == 'no collection') {\n\t\t\t\t\tcallback(null, []); // Collection doesn't exist yet - ignore and return that it has no indexes\n\t\t\t\t} else {\n\t\t\t\t\tcallback(err, res);\n\t\t\t\t}\n\t\t\t});\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return the list of indexes requested by the schema\n\t\t* @param {function} callback Callback to fire as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.getSchemaIndexes = function(callback) {\n\t\t\tmm.meta({$indexes: true}, function(err, res) {\n\t\t\t\tif (err) return callback(err);\n\t\t\t\tcallback(null, _(res)\n\t\t\t\t\t.map(function(v, k) {\n\t\t\t\t\t\treturn _.assign(v, {id: k});\n\t\t\t\t\t})\n\t\t\t\t\t.filter(function(v) {\n\t\t\t\t\t\treturn !!v.index;\n\t\t\t\t\t})\n\t\t\t\t\t.map(function(v) {\n\t\t\t\t\t\tvar o = {name: v.id == '_id' ? '_id_' : v.id, key: {}};\n\t\t\t\t\t\to.key[v.id] = 1;\n\t\t\t\t\t\treturn o;\n\t\t\t\t\t})\n\t\t\t\t\t.value()\n\t\t\t\t);\n\t\t\t});\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Check this model by a defined list of indexes\n\t\t* The return is a duplicate of the input indexes with an additional `status` property which can equal to 'ok' or 'missing'\n\t\t* @param {array} [wantIndexes] The indexes to examine against. If omitted the results of model.getSchemaIndexes() is used\n\t\t* @param {array} [actualIndexes] The current state of the model to compare against. If omitted the results of model.getIndexes() is used\n\t\t* @param {function} callback The callback to call as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.checkIndexes = argy('[array] [array] function', function(wantIndexes, actualIndexes, callback) {\n\t\t\tasync()\n\t\t\t\t// Either use provided indexes or determine them {{{\n\t\t\t\t.parallel({\n\t\t\t\t\twantIndexes: function(next) {\n\t\t\t\t\t\tif (wantIndexes) return next(null, wantIndexes);\n\t\t\t\t\t\tmm.getSchemaIndexes(next);\n\t\t\t\t\t},\n\t\t\t\t\tactualIndexes: function(next) {\n\t\t\t\t\t\tif (actualIndexes) return next(null, actualIndexes);\n\t\t\t\t\t\tmm.getIndexes(next);\n\t\t\t\t\t},\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t// Compare indexes against whats declared {{{\n\t\t\t\t.map('indexReport', 'wantIndexes', function(next, index) {\n\t\t\t\t\tvar foundIndex = this.actualIndexes.find(i => _.isEqual(i.key, index.key));\n\t\t\t\t\tif (foundIndex) {\n\t\t\t\t\t\tindex.status = 'ok';\n\t\t\t\t\t} else {\n\t\t\t\t\t\tindex.status = 'missing';\n\t\t\t\t\t}\n\n\t\t\t\t\tnext(null, index);\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t// End {{{\n\t\t\t\t.end(function(err) {\n\t\t\t\t\tif (err) return callback(err);\n\t\t\t\t\tcallback(null, this.indexReport);\n\t\t\t\t});\n\t\t\t\t// }}}\n\t\t});\n\n\n\t\treturn mm;\n\t});\n\tutil.inherits(o.monoxideModel, events.EventEmitter);\n\n\t// }}}\n\n\t// .monoxideDocument([setup]) - monoxide document instance {{{\n\t/**\n\t* Returns a single instance of a Monoxide document\n\t* @class\n\t* @name monoxide.monoxideDocument\n\t* @param {Object} setup The prototype fields. Everything in this object is extended into the prototype\n\t* @param {boolean} [setup.$applySchema=true] Whether to enforce the model schema on the object. This includes applying default values\n\t* @param {boolean} [setup.$dirty=false] Whether the entire document contents should be marked as dirty (modified). If true this also skips the computation of modified fields\n\t* @param {boolean [setup.decorate=true] Whether to apply any decoration. If false this function returns data undecorated (i.e. no custom Monoxide functionality)\n\t* @param {string} setup.$collection The collection this document belongs to\n\t* @param {Object} data The initial data\n\t* @return {monoxide.monoxideDocument}\n\t*/\n\to.monoxideDocument = function monoxideDocument(setup, data) {\n\t\tif (setup.$decorate === false) return data;\n\t\tsetup.$dirty = !!setup.$dirty;\n\n\t\tvar model = o.models[setup.$collection];\n\n\t\tvar proto = {\n\t\t\t$MONOXIDE: true,\n\t\t\t$collection: setup.$collection,\n\t\t\t$populated: {},\n\n\t\t\t/**\n\t\t\t* Save a document\n\t\t\t* By default this function will only save back modfified data\n\t\t\t* If `data` is specified this is used as well as the modified fields (unless `data.$ignoreModified` is falsy, in which case modified fields are ignored)\n\t\t\t* @param {Object} [data] An optional data patch to save\n\t\t\t* @param {boolean} [data.$ignoreModified=false] Ignore all modified fields and only process save data being passed in the `data` object (use this to directly address what should be saved, ignoring everything else). Setting this drastically speeds up the save operation but at the cost of having to be specific as to what to save\n\t\t\t* @param {function} [callback] The callback to invoke on saving\n\t\t\t*/\n\t\t\tsave: argy('[object] [function]', function(data, callback) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar mongoDoc = doc.toMongoObject();\n\t\t\t\tvar patch = {\n\t\t\t\t\t$collection: doc.$collection,\n\t\t\t\t\t$id: doc._id,\n\t\t\t\t\t$errNoUpdate: true, // Throw an error if we fail to update (i.e. record removed before save)\n\t\t\t\t\t$returnUpdated: true,\n\t\t\t\t};\n\n\t\t\t\tif (data && data.$ignoreModified) { // Only save incomming data\n\t\t\t\t\tdelete data.$ignoreModified;\n\t\t\t\t\t_.assign(patch, data);\n\t\t\t\t} else if (data) { // Data is specified as an object but $ignoreModified is not set - use both inputs\n\t\t\t\t\tdoc.isModified().forEach(function(path) {\n\t\t\t\t\t\tpatch[path] = _.get(mongoDoc, path);\n\t\t\t\t\t});\n\t\t\t\t\t_.assign(patch, data);\n\t\t\t\t} else {\n\t\t\t\t\tdoc.isModified().forEach(function(path) {\n\t\t\t\t\t\tpatch[path] = _.get(mongoDoc, path);\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\to.internal.save(patch, function(err, newRec) {\n\t\t\t\t\tdoc = newRec;\n\t\t\t\t\tif (_.isFunction(callback)) callback(err, newRec);\n\t\t\t\t});\n\n\t\t\t\treturn doc;\n\t\t\t}),\n\n\t\t\t/**\n\t\t\t* Remove the document from the collection\n\t\t\t* This method is really just a thin wrapper around monoxide.delete()\n\t\t\t* @param {function} [callback] Optional callback to invoke on completion\n\t\t\t* @see monoxide.delete\n\t\t\t*/\n\t\t\tremove: function(callback) {\n\t\t\t\tvar doc = this;\n\t\t\t\to.internal.delete({\n\t\t\t\t\t$collection: doc.$collection,\n\t\t\t\t\t$id: doc._id,\n\t\t\t\t}, callback);\n\t\t\t\treturn doc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Remove certain fields from the document object\n\t\t\t* This method is really just a thin wrapper around monoxide.delete()\n\t\t\t* @param {string|regexp|array} fields Either a single field name, regular expression or array of strings/regexps to filter by. Any key matching will be removed from the object\n\t\t\t* @return {monoxide.monoxideDocument} This object after the fields have been removed\n\t\t\t*/\n\t\t\tomit: function(fields) {\n\t\t\t\tvar removeFields = _.castArray(fields);\n\t\t\t\ttraverse(this).forEach(function(v) {\n\t\t\t\t\tif (!this.key) return; // Skip array entries\n\t\t\t\t\tvar key = this.key;\n\t\t\t\t\tif (removeFields.some(function(filter) {\n\t\t\t\t\t\treturn (\n\t\t\t\t\t\t\t(_.isString(filter) && key == filter) ||\n\t\t\t\t\t\t\t(_.isRegExp(filter) && filter.test(key))\n\t\t\t\t\t\t);\n\t\t\t\t\t})) {\n\t\t\t\t\t\tthis.remove();\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn this;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Transform a MonoxideDocument into a plain JavaScript object\n\t\t\t* @return {Object} Plain JavaScript object with all special properties and other gunk removed\n\t\t\t*/\n\t\t\ttoObject: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar newDoc = {};\n\t\t\t\t_.forEach(this, function(v, k) {\n\t\t\t\t\tif (doc.hasOwnProperty(k) && !_.startsWith(k, '$')) newDoc[k] = _.clone(v);\n\t\t\t\t});\n\n\t\t\t\treturn newDoc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Transform a MonoxideDocument into a Mongo object\n\t\t\t* This function transforms all OID strings back into their Mongo equivalent\n\t\t\t* @return {Object} Plain JavaScript object with all special properties and other gunk removed\n\t\t\t*/\n\t\t\ttoMongoObject: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar outDoc = doc.toObject(); // Rely on the toObject() syntax to strip out rubbish\n\n\t\t\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\t\t\tswitch (node.fkType) {\n\t\t\t\t\t\tcase 'objectId':\n\t\t\t\t\t\t\tvar oidLeaf = _.get(doc, node.docPath);\n\t\t\t\t\t\t\tif (_.isUndefined(oidLeaf)) return; // Ignore undefined\n\n\t\t\t\t\t\t\tif (!o.utilities.isObjectID(oidLeaf)) {\n\t\t\t\t\t\t\t\tif (_.has(oidLeaf, '_id')) { // Already populated?\n\t\t\t\t\t\t\t\t\t_.set(outDoc, node.docPath, o.utilities.objectID(oidLeaf._id));\n\t\t\t\t\t\t\t\t} else { // Convert to an OID\n\t\t\t\t\t\t\t\t\t_.set(outDoc, node.docPath, o.utilities.objectID(oidLeaf));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray':\n\t\t\t\t\t\t\tvar oidLeaf = _.get(doc, node.schemaPath);\n\t\t\t\t\t\t\t_.set(outDoc, node.schemaPath, oidLeaf.map(function(leaf) {\n\t\t\t\t\t\t\t\treturn o.utilities.isObjectID(leaf) ? leaf : o.utilities.objectID(leaf);\n\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\treturn; // Ignore unsupported OID types\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\treturn outDoc;\n\t\t\t},\n\n\t\t\tisModified: function(path) {\n\t\t\t\tvar doc = this;\n\t\t\t\tif (path) {\n\t\t\t\t\tvar v = _.get(doc, path);\n\t\t\t\t\tvar pathJoined = _.isArray(path) ? path.join('.') : path;\n\t\t\t\t\tif (o.utilities.isObjectID(v)) {\n\t\t\t\t\t\tif (doc.$populated[pathJoined]) { // Has been populated\n\t\t\t\t\t\t\t// FIXME; What happens if a populated document changes\n\t\t\t\t\t\t\tthrow new Error('Changing populated document objects is not yet supported');\n\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t} else { // Has not been populated\n\t\t\t\t\t\t\tif (doc.$originalValues[pathJoined]) { // Compare against the string value\n\t\t\t\t\t\t\t\treturn doc.$originalValues[pathJoined] != v.toString();\n\t\t\t\t\t\t\t} else if (doc.$originalValues[pathJoined + '.id'] && doc.$originalValues[pathJoined + '._bsontype']) { // Known but its stored as a Mongo OID - look into its values to determine its real comparitor string\n\t\t\t\t\t\t\t\t// When the lookup is a raw OID we need to pass the binary junk into the objectID THEN get its string value before we can compare it to the one we last saw when we fetched the object\n\t\t\t\t\t\t\t\treturn o.utilities.objectID(doc.$originalValues[pathJoined + '.id']).toString() != v.toString(); // Compare against the string value\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\treturn true; // Otherwise declare it modified\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (_.isObject(v)) { // If its an object (or an array) examine the $clean propertly\n\t\t\t\t\t\treturn !v.$clean;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn doc.$originalValues[pathJoined] != v;\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tvar modified = [];\n\t\t\t\t\ttraverse(doc).map(function(v) { // NOTE - We're using traverse().map() here as traverse().forEach() actually mutates the array if we tell it not to recurse with this.remove(true) (needed to stop recursion into complex objects if the parent has been changed)\n\t\t\t\t\t\tif (!this.path.length) return; // Root node\n\t\t\t\t\t\tif (_.startsWith(this.key, '$') || this.key == '_id') { // Don't scan down hidden elements\n\t\t\t\t\t\t\treturn this.remove(true);\n\t\t\t\t\t\t} else if (o.utilities.isObjectID(v)) { // Leaf is an object ID\n\t\t\t\t\t\t\tif (doc.isModified(this.path)) modified.push(this.path.join('.'));\n\t\t\t\t\t\t\tthis.remove(true); // Don't scan any deeper\n\t\t\t\t\t\t} else if (doc.isModified(this.path)) {\n\t\t\t\t\t\t\tif (_.isObject(v)) this.remove(true);\n\t\t\t\t\t\t\tmodified.push(this.path.join('.'));\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\treturn modified;\n\t\t\t\t}\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Expand given paths into objects\n\t\t\t* @param {Object|array|string} populations A single or multiple populations to perform\n\t\t\t* @param {function} callback The callback to run on completion\n\t\t\t* @param {boolean} [strict=false] Whether to raise errors and agressively retry if a population fails\n\t\t\t* @return {Object} This document\n\t\t\t*/\n\t\t\tpopulate: function(populations, callback, strict) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar populations = _(populations)\n\t\t\t\t\t.castArray()\n\t\t\t\t\t.map(function(population) { // Mangle all populations into objects (each object should contain a path and an optional ref)\n\t\t\t\t\t\tif (_.isString(population)) {\n\t\t\t\t\t\t\treturn {path: population};\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn population;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.value();\n\n\t\t\t\tvar tryPopulate = function(finish, populations, strict) {\n\t\t\t\t\tvar willPopulate = 0; // Count of items that seem valid that we will try to populate\n\t\t\t\t\tvar failedPopulations = []; // Populations that we couldn't get the end-points of (probably because they are nested)\n\t\t\t\t\tvar populator = async(); // Defered async worker that will actually populate things\n\t\t\t\t\tasync()\n\t\t\t\t\t\t.forEach(populations, function(nextPopulation, population) {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tdoc.getNodesBySchemaPath(population.path, true).forEach(function(node) {\n\t\t\t\t\t\t\t\t\tif (!population.ref) {\n\t\t\t\t\t\t\t\t\t\tpopulation.ref = _.get(model, '$mongooseModel.schema.paths.' + node.schemaPath.split('.').join('.schema.paths.') + '.options.ref');\n\t\t\t\t\t\t\t\t\t\tif (!population.ref) throw new Error('Cannot determine collection to use for schemaPath ' + node.schemaPath + '! Specify this is in model with {ref: }');\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tif (_.isObject(node.node) && node.node._id) { // Object is already populated\n\t\t\t\t\t\t\t\t\t\twillPopulate++; // Say we're going to resolve this anyway even though we have nothing to do - prevents an issue where the error catcher reports it as a null operation (willPopulate==0)\n\t\t\t\t\t\t\t\t\t} else if (!node.node) {\n\t\t\t\t\t\t\t\t\t\t// Node is falsy - nothing to populate here\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tpopulator.defer(function(next) {\n\t\t\t\t\t\t\t\t\t\t\to.internal.query({\n\t\t\t\t\t\t\t\t\t\t\t\t$errNotFound: false,\n\t\t\t\t\t\t\t\t\t\t\t\t$collection: population.ref,\n\t\t\t\t\t\t\t\t\t\t\t\t$id: o.utilities.isObjectID(node.node) ? node.node.toString() : node.node,\n\t\t\t\t\t\t\t\t\t\t\t}, function(err, res) {\n\t\t\t\t\t\t\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\t\t\t\t\t\t\t_.set(doc, node.docPath, res);\n\t\t\t\t\t\t\t\t\t\t\t\tdoc.$populated[node.docPath] = true;\n\t\t\t\t\t\t\t\t\t\t\t\tnext();\n\t\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t\twillPopulate++;\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\tnextPopulation();\n\t\t\t\t\t\t\t} catch (e) {\n\t\t\t\t\t\t\t\tif (strict) failedPopulations.push(population);\n\t\t\t\t\t\t\t\tnextPopulation();\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.then(function(next) {\n\t\t\t\t\t\t\tif (willPopulate > 0) {\n\t\t\t\t\t\t\t\tpopulator.await().end(next); // Run all population defers\n\t\t\t\t\t\t\t} else if (strict) {\n\t\t\t\t\t\t\t\tnext('Unable to resolve remaining populations: ' + JSON.stringify(populations) + '. In ' + doc.$collection + '#' + doc._id);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tnext();\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.end(function(err) {\n\t\t\t\t\t\t\tif (err) {\n\t\t\t\t\t\t\t\tcallback(err);\n\t\t\t\t\t\t\t} else if (failedPopulations.length) {\n\t\t\t\t\t\t\t\tconsole.log('SILL MORE POPULATIONS TO RUN', failedPopulations);\n\t\t\t\t\t\t\t\tsetTimeout(function() {\n\t\t\t\t\t\t\t\t\tconsole.log('FIXME: Defered runnable');\n\t\t\t\t\t\t\t\t\t//tryPopulate(callback, failedPopulations);\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcallback(null, doc);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t};\n\t\t\t\ttryPopulate(callback, populations, strict);\n\t\t\t\treturn doc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Retrieves all 'leaf' elements matching a schema path\n\t\t\t* Since any segment of the path could be a nested object, array or sub-document collection this function is likely to return multiple elements\n\t\t\t* For the nearest approximation of how this function operates think of it like performing the jQuery expression: `$('p').each(function() { ... })`\n\t\t\t* @param {string} schemaPath The schema path to iterate down\n\t\t\t* @param {boolean} [strict=false] Optional indicator that an error should be thrown if a path cannot be traversed\n\t\t\t* @return {array} Array of all found leaf nodes\n\t\t\t*/\n\t\t\tgetNodesBySchemaPath: function(schemaPath, strict) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar examineStack = [{\n\t\t\t\t\tnode: doc,\n\t\t\t\t\tdocPath: '',\n\t\t\t\t\tschemaPath: '',\n\t\t\t\t}];\n\n\t\t\t\tvar segments = schemaPath.split('.');\n\t\t\t\tsegments.every(function(pathSegment, pathSegmentIndex) {\n\t\t\t\t\treturn examineStack.every(function(esDoc, esDocIndex) {\n\t\t\t\t\t\tif (esDoc === false) { // Skip this subdoc\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.isUndefined(esDoc.node[pathSegment]) && pathSegmentIndex == segments.length -1) {\n\t\t\t\t\t\t\texamineStack[esDocIndex] = {\n\t\t\t\t\t\t\t\tnode: esDoc.node[pathSegment],\n\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.isUndefined(esDoc.node[pathSegment])) {\n\t\t\t\t\t\t\t// If we are trying to recurse into a path segment AND we are not at the leaf of the path (as undefined leaves are ok) - raise an error\n\t\t\t\t\t\t\tif (strict) throw new Error('Cannot traverse into path: \"' + (esDoc.docPath + '.' + pathSegment).substr(1) + '\" for doc ' + doc.$collection + '#' + doc._id);\n\t\t\t\t\t\t\texamineStack[esDocIndex] = false;\n\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t} else if (_.isArray(esDoc.node[pathSegment])) { // Found an array - remove this doc and append each document we need to examine at the next stage\n\t\t\t\t\t\t\tesDoc.node[pathSegment].forEach(function(d,i) {\n\t\t\t\t\t\t\t\t// Do this in a forEach to break appart the weird DocumentArray structure we get back from Mongoose\n\t\t\t\t\t\t\t\texamineStack.push({\n\t\t\t\t\t\t\t\t\tnode: d,\n\t\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment + '.' + i,\n\t\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\texamineStack[esDocIndex] = false;\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.has(esDoc.node, pathSegment)) { // Traverse into object - replace this nodeerence with the new pointer\n\t\t\t\t\t\t\texamineStack[esDocIndex] = {\n\t\t\t\t\t\t\t\tnode: esDoc.node[pathSegment],\n\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t});\n\n\t\t\t\treturn _(examineStack)\n\t\t\t\t\t.filter()\n\t\t\t\t\t.filter(function(node) {\n\t\t\t\t\t\treturn !! node.docPath;\n\t\t\t\t\t})\n\t\t\t\t\t.map(function(node) {\n\t\t\t\t\t\tnode.docPath = node.docPath.substr(1);\n\t\t\t\t\t\tnode.schemaPath = node.schemaPath.substr(1);\n\t\t\t\t\t\treturn node;\n\t\t\t\t\t})\n\t\t\t\t\t.value();\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Return an array of all OID leaf nodes within the document\n\t\t\t* This function combines the behaviour of monoxide.utilities.extractFKs with monoxide.monoxideDocument.getNodesBySchemaPath)\n\t\t\t* @return {array} An array of all leaf nodes\n\t\t\t*/\n\t\t\tgetOIDs: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar stack = [];\n\n\t\t\t\t_.forEach(model.$oids, function(fkType, schemaPath) {\n\t\t\t\t\tif (fkType.type == 'subDocument') return; // Skip sub-documents (as they are stored against the parent anyway)\n\n\t\t\t\t\tstack = stack.concat(doc.getNodesBySchemaPath(schemaPath)\n\t\t\t\t\t\t.map(function(node) {\n\t\t\t\t\t\t\tnode.fkType = fkType.type;\n\t\t\t\t\t\t\treturn node;\n\t\t\t\t\t\t})\n\t\t\t\t\t);\n\t\t\t\t});\n\t\t\t\treturn stack;\n\t\t\t},\n\n\t\t\t$applySchema: true,\n\t\t};\n\n\t\tproto.delete = proto.remove;\n\n\t\t_.extend(\n\t\t\tproto, // INPUT: Basic prototype\n\t\t\tsetup, // Merge with the incomming prototype (should contain at least $collection)\n\t\t\tmodel.$methods // Merge with model methods\n\t\t);\n\n\t\t// Create the base document\n\t\tvar doc = Object.create(proto);\n\n\t\t// Setup Virtuals\n\t\tObject.defineProperties(doc, model.$virtuals);\n\n\t\t// Convert data to a simple array if its weird Mongoose fluff\n\t\tif (data instanceof mongoose.Document) data = data.toObject();\n\n\t\t_.extend(doc, data);\n\n\t\t// Apply schema\n\t\tif (doc.$applySchema) {\n\t\t\t_.forEach(model.$mongooseModel.schema.paths, function(pathSpec, path) {\n\t\t\t\tvar docValue = _.get(doc, path, undefined);\n\t\t\t\tif (_.isUndefined(docValue)) {\n\t\t\t\t\tif (pathSpec.defaultValue) { // Item is blank but SHOULD have a default\n\t\t\t\t\t\t_.set(doc, path, _.isFunction(pathSpec.defaultValue) ? pathSpec.defaultValue() : pathSpec.defaultValue);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t_.set(doc, path, undefined);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\n\t\t// Sanitize data to remove all ObjectID crap\n\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\tif (node.fkType == 'objectId') {\n\t\t\t\tvar singleOid = _.get(doc, node.docPath);\n\t\t\t\tif (o.utilities.isObjectID(singleOid))\n\t\t\t\t\t_.set(doc, node.docPath, singleOid.toString());\n\t\t\t} else if (node.fkType == 'objectIdArray') {\n\t\t\t\tvar oidArray = _.get(doc, node.docPath);\n\t\t\t\tif (o.utilities.isObjectID(oidArray)) {\n\t\t\t\t\t_.set(doc, node.docPath, oidArray.toString());\n\t\t\t\t} else if (_.isObject(oidArray) && oidArray._id && o.utilities.isObjectID(oidArray._id)) {\n\t\t\t\t\t// FIXME: Rather crappy sub-document flattening for now\n\t\t\t\t\t// This needs to actually scope into the sub-object schema and flatten each ID and not just the _id element\n\n\t\t\t\t\toidArray._id = oidArray._id.toString();\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\n\n\t\t// Break object into component parts and apply the '$clean' marker to arrays and objects\n\t\tObject.defineProperty(doc, '$originalValues', {\n\t\t\tenumerable: false,\n\t\t\tvalue: {},\n\t\t});\n\n\t\tif (!setup.$dirty) {\n\t\t\ttraverse(doc).forEach(function(v) {\n\t\t\t\t// If its an object (or array) glue the `$clean` property to it to detect writes\n\t\t\t\tif (_.isObject(v)) {\n\t\t\t\t\tObject.defineProperty(v, '$clean', {\n\t\t\t\t\t\tenumerable: false,\n\t\t\t\t\t\tvalue: true,\n\t\t\t\t\t});\n\t\t\t\t} else if (!_.isPlainObject(v)) { // For everything else - stash the original value in this.parent.$originalValues\n\t\t\t\t\tdoc.$originalValues[this.path.join('.')] = o.utilities.isObjectID(v) ? v.toString() : v;\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\n\t\t// Apply population data\n\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\tdoc.$populated[node.docPath] = o.utilities.isObjectID(node.docPath);\n\t\t\tif (!setup.$dirty) doc.$originalValues[node.docPath] = _.get(doc, node.docPath);\n\t\t});\n\n\t\treturn doc;\n\t};\n\t// }}}\n\n\t// .model(name) - helper function to return a declared model {{{\n\t/**\n\t* Return a defined Monoxide model\n\t* The model must have been previously defined by monoxide.schema()\n\t* This function is identical to accessing the model directly via `monoxide.models[modelName]`\n\t*\n\t* @name monoxide.model\n\t* @see monoxide.schema\n\t*\n\t* @param {string} model The model name (generally lowercase plurals e.g. 'users', 'widgets', 'favouriteItems' etc.)\n\t* @returns {Object} The monoxide model of the generated schema\n\t*/\n\to.model = function(model) {\n\t\treturn o.models[model];\n\t};\n\t// }}}\n\n\t// .schema - Schema builder {{{\n\t/**\n\t* Construct and return a Mongo model\n\t* This function creates a valid schema specificaion then returns it as if model() were called\n\t*\n\t* @name monoxide.schema\n\t* @see monoxide.model\n\t*\n\t* @param {string} model The model name (generally lowercase plurals e.g. 'users', 'widgets', 'favouriteItems' etc.)\n\t* @param {Object} spec The schema specification composed of a hierarhical object of keys with each value being the specification of that field\n\t* @returns {Object} The monoxide model of the generated schema\n\t* @emits modelCreate Called as (model, instance) when a model gets created\n\t*\n\t* @example\n\t* // Example schema for a widget\n\t* var Widgets = monoxide.schema('widgets', {\n\t* \tname: String,\n\t* \tcontent: String,\n\t* \tstatus: {type: String, enum: ['active', 'deleted'], default: 'active'},\n\t* \tcolor: {type: String, enum: ['red', 'green', 'blue'], default: 'blue', index: true},\n\t* });\n\t*\n\t* @example\n\t* // Example schema for a user\n\t* var Users = monoxide.schema('users', {\n\t* \tname: String,\n\t* \trole: {type: 'string', enum: ['user', 'admin'], default: 'user'},\n\t* \tfavourite: {type: 'pointer', ref: 'widgets'},\n\t* \titems: [{type: 'pointer', ref: 'widgets'}],\n\t* \tsettings: {type: 'any'},\n\t* \tmostPurchased: [\n\t* \t\t{\n\t* \t\t\tnumber: {type: 'number', default: 0},\n\t* \t\t\titem: {type: 'pointer', ref: 'widgets'},\n\t* \t\t}\n\t* \t],\n\t* });\n\t*/\n\to.schema = function(model, spec) {\n\t\tif (!argy.isType(model, 'string') || !argy.isType(spec, 'object')) throw new Error('Schema construction requires a model ID + schema object');\n\n\t\tvar schema = new mongoose.Schema(_.deepMapValues(spec, function(value, path) {\n\t\t\t// Rewrite .type leafs {{{\n\t\t\tif (_.endsWith(path, '.type')) { // Ignore not type rewrites\n\t\t\t\tif (!_.isString(value)) return value; // Only rewrite string values\n\n\t\t\t\tswitch (value.toLowerCase()) {\n\t\t\t\t\tcase 'oid':\n\t\t\t\t\tcase 'pointer':\n\t\t\t\t\tcase 'objectid':\n\t\t\t\t\t\treturn mongoose.Schema.ObjectId;\n\t\t\t\t\tcase 'string':\n\t\t\t\t\t\treturn mongoose.Schema.Types.String;\n\t\t\t\t\tcase 'number':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Number;\n\t\t\t\t\tcase 'boolean':\n\t\t\t\t\tcase 'bool':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Boolean;\n\t\t\t\t\tcase 'array':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Array;\n\t\t\t\t\tcase 'date':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Date;\n\t\t\t\t\tcase 'object':\n\t\t\t\t\tcase 'mixed':\n\t\t\t\t\tcase 'any':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Mixed;\n\t\t\t\t\tcase 'buffer':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Buffer;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tthrow new Error('Unknown Monoxide data type: ' + value.toLowerCase());\n\t\t\t\t}\n\t\t\t// }}}\n\t\t\t// Rewrite .ref leafs {{{\n\t\t\t} else if (_.endsWith(path, '.ref')) {\n\t\t\t\tif (!_.isString(value)) return value; // Leave complex objects alone\n\t\t\t\treturn value.toLowerCase();\n\t\t\t// }}}\n\t\t\t// Leave everything else unaltered {{{\n\t\t\t} else { // Do nothing\n\t\t\t\treturn value;\n\t\t\t}\n\t\t\t// }}}\n\t\t}));\n\n\t\t// Add to model storage\n\t\to.models[model] = new o.monoxideModel({\n\t\t\t$collection: model,\n\t\t\t$mongoose: mongoose.model(model.toLowerCase(), schema), // FIXME: When we implement our own schema def system we can remove the toLowerCase() component that Mongoose insists on using. We can also remove all of the other toLowerCase() calls when we're trying to find the Mongoose schema\n\t\t\t$schema: schema.obj,\n\t\t});\n\n\t\to.emit('modelCreate', model, o.models[model]);\n\n\t\treturn o.models[model];\n\t};\n\t// }}}\n\n\t// .aggregate([q], callback) {{{\n\t/**\n\t* Perform a direct aggregation and return the result\n\t*\n\t* @name monoxide.aggregate\n\t* @memberof monoxide\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$slurp=true] Attempt to read all results into an array rather than return a cursor\n\t* @param {array} q.$stages The aggregation stages array\n\t* @param {Object} [q.$stages.$project] Fields to be supplied in the aggregation (in the form `{field: true}`)\n\t* @param {boolean} [q.$stages.$project._id=false] If true surpress the output of the `_id` field\n\t* @param {Object} [q.$stages.$match] Specify a filter on fields (in the form `{field: CRITERIA}`)\n\t* @param {Object} [q.$stages.$redract]\n\t* @param {Object} [q.$stages.$limit]\n\t* @param {Object} [q.$stages.$skip]\n\t* @param {Object} [q.$stages.$unwind]\n\t* @param {Object} [q.$stages.$group]\n\t* @param {Object} [q.$stages.$sample]\n\t* @param {Object} [q.$stages.$sort] Specify an object of fields to sort by (in the form `{field: 1|-1}` where 1 is ascending and -1 is decending sort order)\n\t* @param {Object} [q.$stages.$geoNear]\n\t* @param {Object} [q.$stages.$lookup]\n\t* @param {Object} [q.$stages.$out]\n\t* @param {Object} [q.$stages.$indexStats]\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*/\n\to.aggregate = argy('string|object function', function MonoxideAggregate(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\tasync()\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$stages || !_.isArray(q.$stages)) return next('$stages must be specified as an array');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Execute and capture return {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\to.models[q.$collection].$mongoModel.aggregate(q.$stages, next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Slurp the cursor? {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\tif (q.$slurp || _.isUndefined(q.$slurp)) {\n\t\t\t\t\to.utilities.slurpCursor(this.result, next);\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, this.result);\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\treturn callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .use([plugins...], [callback]) {{{\n\t/**\n\t* Run a third party plugin against the entire Monoxide structure\n\t* Really this function just registers all given modules against monoxide then fires the callback when done\n\t* Each plugin is called as `(callback, monoxide)`\n\t* @param {function|string|array} plugins The plugin(s) to run. Each function is run as (model, callback), strings are assumed to be file paths to JS files if they contain at least one '/' or `.` otherwise they are loaded from the `plugins` directory\n\t* @param {function} [callback] Optional callback to fire when all plugin have registered\n\t* @return {monoxide.monoxide} The chainable object\n\t*/\n\to.use = function(plugins, callback) {\n\t\tif (!plugins) return callback(); // Do nothing if given falsy\n\n\t\tasync()\n\t\t\t.forEach(_.castArray(plugins), function(next, plugin) {\n\t\t\t\tif (o.used.some(i => i === plugin)) {\n\t\t\t\t\tdebug('Plugin already loaded, ignoring');\n\t\t\t\t\tnext();\n\t\t\t\t} else if (_.isString(plugin)) {\n\t\t\t\t\tvar pluginModule = /[\\/\\.]/.test(plugin) // Contains at least one slash or dot?\n\t\t\t\t\t\t? require(plugin)\n\t\t\t\t\t\t: require(__dirname + '/plugins/' + plugin)\n\t\t\t\t\tpluginModule.call(o, next, o);\n\t\t\t\t\to.used.push(pluginModule);\n\t\t\t\t} else if (_.isFunction(plugin)) {\n\t\t\t\t\tplugin.call(o, next, o);\n\t\t\t\t\to.used.push(plugin);\n\t\t\t\t} else {\n\t\t\t\t\tnext('Unsupported plugin format');\n\t\t\t\t}\n\t\t\t})\n\t\t\t.end(callback);\n\n\t\treturn o;\n\t};\n\n\t/**\n\t* Storage for modules we have already loaded\n\t* @var {Array } All plugins (as funtions) we have previously loaded\n\t*/\n\to.used = [];\n\t// }}}\n\n\t// .hook(hookName, callback) {{{\n\n\t/**\n\t* Holder for global hooks\n\t* @var {array }\n\t*/\n\to.$hooks = {};\n\n\n\t/**\n\t* Attach a hook to a global event\n\t* A hook is exactly the same as a eventEmitter.on() event but must return a callback\n\t* Multiple hooks can be attached and all will be called in parallel on certain events such as 'save'\n\t* All hooks must return non-errors to proceed with the operation\n\t* @param {string} eventName The event ID to hook against\n\t* @param {function} callback The callback to run when hooked, NOTE: Any falsy callbacks are ignored\n\t* @return {monoxide} The chainable monoxide\n\t*/\n\to.hook = function(eventName, callback) {\n\t\tif (!callback) return mm; // Ignore flasy callbacks\n\t\tif (!o.$hooks[eventName]) o.$hooks[eventName] = [];\n\t\to.$hooks[eventName].push(callback);\n\t\treturn o;\n\t};\n\n\n\t/**\n\t* Execute global level hooks\n\t* NOTE: This will only fire hooks attached via monoxide.hook() and not individual model hooks\n\t* NOTE: Hooks are always fired with the callback as the first argument\n\t* @param {string} name The name of the hook to invoke\n\t* @param {function} callback The callback to invoke on success\n\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t* @return {monoxide} The chainable monoxide\n\t*/\n\to.fire = function(name, callback) {\n\t\tif (o.$hooks[name] && o.$hooks[name].length) { // There is at least one event handler attached\n\t\t\tvar eventArgs = _.values(arguments);\n\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\t\t\to.emit.apply(o, eventArgs);\n\t\t} else {\n\t\t\treturn callback();\n\t\t}\n\n\t\t// Calculate the args array we will pass to each hook\n\t\tvar hookArgs = _.values(arguments);\n\t\thookArgs.shift(); // We will set args[0] to the callback in each case anyway so we only need to shift 1\n\n\t\tasync()\n\t\t\t// Fire hooks attached to this model + global hooks {{{\n\t\t\t.forEach(\n\t\t\t\to.$hooks[name]\n\t\t\t\t.filter(f => !!f) // Actually is a function?\n\t\t\t, function(next, hookFunc) {\n\t\t\t\thookArgs[0] = next;\n\t\t\t\thookFunc.apply(o, hookArgs);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t.end(callback);\n\n\t\treturn o;\n\t};\n\n\n\n\t/**\n\t* Similar to fire() expect that execution is immediate\n\t* This should only be used by sync functions that require immediate action such as object mutators\n\t* NOTE: Because of the nature of this function a callback CANNOT be accepted when finished - the function is assumed done when it returns\n\t* @param {string} name The name of the hook to invoke\n\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t* @return {monoxide} The chainable monoxide\n\t* @see fire()\n\t*/\n\to.fireImmediate = function(name, callback) {\n\t\tif (!o.$hooks[name] || !o.$hooks[name].length) return o; // No hooks to run anyway\n\n\t\tfor (var i of o.$hooks[name]) {\n\t\t\tlet hookArgs = _.values(arguments);\n\t\t\thookArgs.shift();\n\t\t\ti.apply(o, hookArgs);\n\t\t}\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .utilities structure {{{\n\to.utilities = {};\n\n\t// .utilities.extractFKs(schema, prefix, base) {{{\n\t/**\n\t* Extract all FKs in dotted path notation from a Mongoose model\n\t*\n\t* @name monoxide.utilities.extractFKs\n\t*\n\t* @param {Object} schema The schema object to examine (usually monoxide.models[model].$mongooseModel.schema)\n\t* @param {string} prefix existing Path prefix to use (internal use only)\n\t* @param {Object} base Base object to append flat paths to (internal use only)\n\t* @return {Object} A dictionary of foreign keys for the schema (each key will be the info of the object)\n\t*/\n\to.utilities.extractFKs = function(schema, prefix, base) {\n\t\tvar FKs = {};\n\t\tif (!prefix) prefix = '';\n\t\tif (!base) base = FKs;\n\n\t\t_.forEach(schema.paths, function(path, id) {\n\t\t\tif (id == 'id' || id == '_id') { // Main document ID\n\t\t\t\tFKs[prefix + id] = {type: 'objectId'};\n\t\t\t} else if (path.instance && path.instance == 'ObjectID') {\n\t\t\t\tFKs[prefix + id] = {type: 'objectId'};\n\t\t\t} else if (path.caster && path.caster.instance == 'ObjectID') { // Array of ObjectIDs\n\t\t\t\tFKs[prefix + id] = {type: 'objectIdArray'};\n\t\t\t} else if (path.schema) {\n\t\t\t\tFKs[prefix + id] = {type: 'subDocument'};\n\t\t\t\t_.forEach(o.utilities.extractFKs(path.schema, prefix + id + '.', base), function(val, key) {\n\t\t\t\t\tbase[key] = val;\n\t\t\t\t});\n\t\t\t}\n\t\t});\n\n\t\treturn FKs;\n\t}\n\t// }}}\n\n\t// .utilities.objectID(string) {{{\n\t/**\n\t* Construct and return a MongoDB-Core compatible ObjectID object\n\t* This is mainly used within functions that need to convert a string ID into an object\n\t* This has one additional check which will return undefined if the value passed in is falsy\n\t* @name monoxide.utilities.objectID\n\t* @param {string} str The string to convert into an ObjectID\n\t* @return {Object} A MongoDB-Core compatible ObjectID object instance\n\t*/\n\to.utilities.objectID = function(str) {\n\t\tif (!str) return undefined;\n\t\tif (_.isObject(str) && str._id) return new mongoose.Types.ObjectId(str._id); // Is a sub-document - extract its _id and use that\n\t\treturn new mongoose.Types.ObjectId(str);\n\t};\n\t// }}}\n\n\t// .utilities.isObjectID(string) {{{\n\t/**\n\t* Return if the input is a valid MongoDB-Core compatible ObjectID object\n\t* This is mainly used within functions that need to check that a given variable is a Mongo OID\n\t* @name monoxide.utilities.isObjectID\n\t* @param {mixed} subject The item to examine\n\t* @return {boolean} Whether the subject is a MongoDB-Core compatible ObjectID object instance\n\t*/\n\to.utilities.isObjectID = function(subject) {\n\t\treturn (subject instanceof mongoose.Types.ObjectId);\n\t};\n\n\t/**\n\t* Alias of isObjectID\n\t* @see monoxide.utilities.isObjectId\n\t*/\n\to.utilities.isObjectId = o.utilities.isObjectID;\n\t// }}}\n\n\t// .utilities.runMiddleware(middleware) {{{\n\t/**\n\t* Run optional middleware\n\t*\n\t* Middleware can be:\n\t* \t- A function(req, res, next)\n\t*\t- An array of functions(req, res, next) - Functions will be called in sequence, all functions must call the next method\n\t*\t- A string - If specified (and `obj` is also specified) the middleware to use will be looked up as a key of the object. This is useful if you need to invoke similar methods on different entry points (e.g. monoxide.express.middleware('widgets', {save: function(req, res, next) { // Check something // }, create: 'save'}) - where the `create` method invokes the same middleware as `save)\n\t*\n\t* @param {null|function|array} middleware The optional middleware to run this can be a function, an array of functions or a string\n\t* @param {function} callback The callback to invoke when completed. This may not be called\n\t* @param {object} obj The parent object to look up inherited functions from (if middleware is a string)\n\t*\n\t* @example\n\t* // Set up a Monoxide express middleware to check user logins on each save or create operaion\n\t* app.use('/api/widgets/:id?', monoxide.express.middleware('widgets', {\n\t* \tcreate: function(req, res, next) {\n\t*\t\tif (req.user && req.user._id) {\n\t* \t\t\tnext();\n\t* \t\t} else {\n\t* \t\t\tres.status(403).send('You are not logged in').end();\n\t*\t\t}\n\t*\t},\n\t* \tsave: 'create', // Point to the same checks as the `create` middleware\n\t* }));\n\n\t*/\n\to.utilities.runMiddleware = function(req, res, middleware, callback, obj) {\n\t\tvar thisContext = this;\n\t\tvar runnable; // The middleware ARRAY to run\n\n\t\tif (_.isBoolean(middleware) && !middleware) { // Boolean=false - deny!\n\t\t\tres.status(403).end();\n\t\t} else if (_.isUndefined(middleware) || _.isNull(middleware)) { // Nothing to do anyway\n\t\t\treturn callback();\n\t\t} else if (_.isFunction(middleware)) {\n\t\t\trunnable = [middleware];\n\t\t} else if (_.isArray(middleware)) {\n\t\t\trunnable = middleware;\n\t\t} else if (_.isString(middleware) && _.has(obj, middleware)) {\n\t\t\treturn o.utilities.runMiddleware(req, res, _.get(obj, middleware), callback, obj); // Defer to the pointer\n\t\t}\n\n\t\tasync()\n\t\t\t.limit(1)\n\t\t\t.forEach(runnable, function(nextMiddleware, middlewareFunc, index) {\n\t\t\t\tmiddlewareFunc.apply(thisContext, [req, res, nextMiddleware]);\n\t\t\t})\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\to.express.sendError(res, 403, err);\n\t\t\t\t} else {\n\t\t\t\t\tcallback();\n\t\t\t\t}\n\t\t\t});\n\t};\n\t// }}}\n\n\t// .utilities.diff(originalDoc, newDoc) {{{\n\t/**\n\t* Diff two monoxide.monoxideDocument objects and return the changes as an object\n\t* This change object is suitable for passing directly into monoxide.save()\n\t* While originally intended only for comparing monoxide.monoxideDocument objects this function can be used to compare any type of object\n\t* NOTE: If you are comparing MonoxideDocuments call `.toObject()` before passing the object in to strip it of its noise\n\t*\n\t* @name monoxide.utilities.diff\n\t* @see monoxide.save\n\t* @see monoxide.update\n\t*\n\t* @param {Object} originalDoc The original source document to compare to\n\t* @param {Object} newDoc The new document with possible changes\n\t* @return {Object} The patch object\n\t*\n\t* @example\n\t* // Get the patch of two documents\n\t* monoxide.query({$collection: 'widgets', $id: '123'}, function(err, res) {\n\t* \tvar docA = res.toObject();\n\t* \tvar docB = res.toObject();\n\t*\n\t*\t// Change some fields\n\t* \tdocB.title = 'Hello world';\n\t*\n\t* \tvar patch = monoxide.utilities.diff(docA, docB);\n\t* \t// => should only return {title: 'Hello World'}\n\t* });\n\t*/\n\to.utilities.diff = function(originalDoc, newDoc) {\n\t\tvar patch = {};\n\n\t\tdeepDiff.observableDiff(originalDoc, newDoc, function(diff) {\n\t\t\tif (diff.kind == 'N' || diff.kind == 'E') {\n\t\t\t\t_.set(patch, diff.path, diff.rhs);\n\t\t\t} else if (diff.kind == 'A') { // Array alterations\n\t\t\t\t// deepDiff will only apply changes onto newDoc - we can't just apply them to the empty patch object\n\t\t\t\t// so we let deepDiff do its thing then copy the new structure across into patch\n\t\t\t\tdeepDiff.applyChange(originalDoc, newDoc, diff);\n\t\t\t\t_.set(patch, diff.path, _.get(newDoc, diff.path));\n\t\t\t}\n\t\t});\n\n\t\treturn patch;\n\t};\n\t// }}}\n\n\t// .utilities.rewriteQuery(query, settings) {{{\n\t/**\n\t* Returns a rewritten version of an incomming query that obeys various rules\n\t* This usually accepts req.query as a parameter and a complex settings object as a secondary\n\t* This function is used internally by middleware functions to clean up the incomming query\n\t*\n\t* @name monoxide.utilities.rewriteQuery\n\t* @see monoxide.middleware\n\t*\n\t* @param {Object} query The user-provided query object\n\t* @param {Object} settings The settings object to apply (see middleware functions)\n\t* @return {Object} The rewritten query object\n\t*/\n\to.utilities.rewriteQuery = function(query, settings) {\n\t\treturn _(query)\n\t\t\t.mapKeys(function(val, key) {\n\t\t\t\tif (_.has(settings.queryRemaps, key)) return settings.queryRemaps[key];\n\t\t\t\treturn key;\n\t\t\t})\n\t\t\t.mapValues(function(val, key) {\n\t\t\t\tif (settings.queryAllowed && settings.queryAllowed[key]) {\n\t\t\t\t\tvar allowed = settings.queryAllowed[key];\n\t\t\t\t\tif (!_.isString(val) && !allowed.scalar) {\n\t\t\t\t\t\treturn null;\n\t\t\t\t\t} else if (allowed.boolean) {\n\t\t\t\t\t\treturn (val == 'true' || val == '1');\n\t\t\t\t\t} else if (_.isString(val) && allowed.scalarCSV) {\n\t\t\t\t\t\treturn val.split(/\\s*,\\s*/);\n\t\t\t\t\t} else if (_.isArray(val) && allowed.array) {\n\t\t\t\t\t\treturn val;\n\t\t\t\t\t} else if (_.isString(val) && allowed.number) {\n\t\t\t\t\t\treturn parseInt(val);\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn val;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn val;\n\t\t\t})\n\t\t\t.value();\n\t};\n\t// }}}\n\n\t// .utilities.slurpCursor(cursor, cb) {{{\n\t/**\n\t* Asyncronously calls a cursor until it is exhausted\n\t*\n\t* @name monoxide.utilities.slurpCursor\n\t*\n\t* @param {Cursor} cursor A mongo compatible cursor object\n\t* @param {function} cb The callback to call as (err, result) when complete\n\t*/\n\to.utilities.slurpCursor = function(cursor, cb) {\n\t\tvar res = [];\n\n\t\tvar cursorReady = function(err, result) {\n\t\t\tif (result === null) { // Cursor is exhausted\n\t\t\t\tcb(null, res);\n\t\t\t} else {\n\t\t\t\tres.push(result);\n\t\t\t\tsetTimeout(function() { // Queue fetcher in timeout so we don't stack overflow\n\t\t\t\t\tcursor.next(cursorReady);\n\t\t\t\t});\n\t\t\t}\n\t\t};\n\n\t\tcursor.next(cursorReady);\n\t};\n\t// }}}\n\t// }}}\n\n\t// Create internals mapping {{{\n\to.internal = o; // Mapping for the original function handlers (e.g. get() before any mutations)\n\t// }}}\n\n\treturn o;\n}\n\nutil.inherits(Monoxide, events.EventEmitter);\n\nmodule.exports = new Monoxide();\n"},"new_file":{"kind":"string","value":"index.js"},"old_contents":{"kind":"string","value":"var _ = require('lodash')\n\t.mixin(require('lodash-deep'));\nvar argy = require('argy');\nvar async = require('async-chainable');\nvar debug = require('debug')('monoxide');\nvar deepDiff = require('deep-diff');\nvar events = require('events');\nvar mongoose = require('mongoose');\nvar traverse = require('traverse');\nvar util = require('util');\n\n/**\n* @static monoxide\n*/\nfunction Monoxide() {\n\tvar o = this;\n\to.mongoose = mongoose;\n\to.models = {};\n\to.connection;\n\to.settings = {\n\t\tremoveAll: true, // Allow db.model.delete() calls with no arguments\n\t\tversionIncErr: /^MongoError: Cannot apply \\$inc to a value of non-numeric type. {.+} has the field '__v' of non-numeric type null$/i, // RegExp error detector used to detect $inc problems when trying to increment `__v` in update operations\n\t};\n\n\t// .connect {{{\n\t/**\n\t* Connect to a Mongo database\n\t* @param {string} uri The URL of the database to connect to\n\t* @param {function} [callback] Optional callback when connected, if omitted this function is syncronous\n\t* @return {monoxide} The Monoxide chainable object\n\t*/\n\to.connect = function(uri, callback) {\n\t\tmongoose.set('useFindAndModify', false);\n\t\tmongoose.set('useCreateIndex', true);\n\t\tmongoose.connect(uri, {\n\t\t\tpromiseLibrary: global.Promise,\n\t\t\tuseNewUrlParser: true,\n\t\t})\n\t\t.then(function() {\n\t\t\to.connection = mongoose.connection;\n\t\t\tif (callback) callback();\n\t\t})\n\t\t.catch(e => callback(e))\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .disconnect {{{\n\t/**\n\t* Disconnect from an active connection\n\t* @return {monoxide} The Monoxide chainable object\n\t*/\n\to.disconnect = function(callback) {\n\t\tmongoose.disconnect(callback);\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .get(q, [id], callback) {{{\n\t/**\n\t* Retrieve a single record from a model via its ID\n\t* This function will ONLY retrieve via the ID field, all other fields are ignored\n\t* NOTE: Really this function just wraps the monoxide.query() function to provide functionality like populate\n\t*\n\t* @name monoxide.get\n\t* @memberof monoxide\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} [q.$id] The ID to return\n\t* @param {(string|string[]|object[])} [q.$populate] Population criteria to apply\n\t*\n\t* @param {string} [id] The ID to return (alternative syntax)\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Return a single widget by its ID (string syntax)\n\t* monoxide.get('widgets', '56e2421f475c1ef4135a1d58', function(err, res) {\n\t* \tconsole.log('Widget:', res);\n\t* });\n\t*\n\t* @example\n\t* // Return a single widget by its ID (object syntax)\n\t* monoxide.get({$collection: 'widgets', $id: '56e2421f475c1ef4135a1d58'}, function(err, res) {\n\t* \tconsole.log('Widget:', res);\n\t* });\n\t*/\n\to.get = argy('[object|string|number] [string|number|object] function', function(q, id, callback) {\n\t\targy(arguments)\n\t\t\t.ifForm('object function', function(aQ, aCallback) {\n\t\t\t\tq = aQ;\n\t\t\t\tcallback = aCallback;\n\t\t\t})\n\t\t\t.ifForm('string string|number function', function(aCollection, aId, aCallback) {\n\t\t\t\tq = {\n\t\t\t\t\t$collection: aCollection,\n\t\t\t\t\t$id: aId,\n\t\t\t\t};\n\t\t\t})\n\t\t\t.ifForm('string object function', function(aCollection, aId, aCallback) { // Probably being passed a Mongoose objectId as the ID\n\t\t\t\tq = {\n\t\t\t\t\t$collection: aCollection,\n\t\t\t\t\t$id: aId.toString(),\n\t\t\t\t};\n\t\t\t});\n\n\t\tif (!q.$id) return callback('No $id specified');\n\t\treturn o.internal.query(q, callback);\n\t});\n\t// }}}\n\n\t// .query([q], callback) {{{\n\t/**\n\t* Query Mongo directly with the Monoxide query syntax\n\t*\n\t* @name monoxide.query\n\t* @fires query\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} [q.$id] If specified return only one record by its master ID (implies $one=true). If present all other conditionals will be ignored and only the object is returned (see $one)\n\t* @param {(string|string[]|object[])} [q.$select] Field selection criteria to apply (implies q.$applySchema=false as we will be dealing with a partial schema). Any fields prefixed with '-' are removed\n\t* @param {(string|string[]|object[])} [q.$sort] Sorting criteria to apply\n\t* @param {(string|string[]|object[])} [q.$populate] Population criteria to apply\n\t* @param {boolean} [q.$one=false] Whether a single object should be returned (implies $limit=1). If enabled an object is returned not an array\n\t* @param {number} [q.$limit] Limit the return to this many rows\n\t* @param {number} [q.$skip] Offset return by this number of rows\n\t* @param {boolean=false} [q.$count=false] Only count the results - do not return them. If enabled a number of returned with the result\n\t* @param {object|function} [q.$data] Set the user-defined data object, if this is a function the callback result is used\n\t* @param {boolean} [q.$decorate=true] Add all Monoxide methods, functions and meta properties\n\t* @param {string} [q.$want='array'] How to return data contents. ENUM: 'array', 'cursor'\n\t* @param {boolean} [q.$plain=false] Return a plain object or object array. This is the equivelent of calling .toObject() on any resultant object. Implies $decorate=true\n\t* @param {boolean} [q.$cacheFKs=true] Cache the foreign keys (objectIDs) within an object so future retrievals dont have to recalculate the model structure\n\t* @param {boolean} [q.$applySchema=true] Apply the schema for each document retrieval - this slows retrieval but means any alterations to the schema are applied to each retrieved record\n\t* @param {boolean} [q.$dirty=false] Whether the entire document contents should be marked as dirty (modified). If true this also skips the computation of modified fields\n\t* @param {boolean} [q.$errNotFound] Raise an error if a specifically requested document is not found (requires $id)\n\t* @param {...*} [q.filter] Any other field (not beginning with '$') is treated as filtering criteria\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error. If $one is truthy this returns a single monoxide.monoxideDocument, if not it returns an array of them\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Return all Widgets, sorted by name\n\t* monoxide.query({$collection: 'widgets', $sort: 'name'}, function(err, res) {\n\t* \tconsole.log('Widgets:', res);\n\t* });\n\t* @example\n\t* // Filter Users to only return admins while also populating their country\n\t* monoxide.query({$collection: 'users', $populate: 'country', role: 'admin'}, function(err, res) {\n\t* \tconsole.log('Admin users:', res);\n\t* });\n\t*/\n\to.query = argy('[string|object] function', function MonoxideQuery(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t_.defaults(q || {}, {\n\t\t\t$cacheFKs: true, // Cache model Foreign Keys (used for populates) or compute them every time\n\t\t\t$want: 'array',\n\t\t\t$applySchema: true, // Apply the schema on retrieval - this slows ths record retrieval but means any alterations to the schema are applied to each retrieved record\n\t\t\t$errNotFound: true, // During $id / $one operations raise an error if the record is not found\n\t\t});\n\t\tif (!_.isEmpty(q.$select)) q.$applySchema = false; // Turn off schema application when using $select as we wont be grabbing the full object\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query\n\t\t\t\t'$data', // Meta user-defined data object\n\t\t\t\t'$dirty', // Whether the document is unclean\n\t\t\t\t'$id', // If specified return only one record by its master ID (implies $one=true). If present all other conditionals will be ignored and only the object is returned (see $one)\n\t\t\t\t'$select', // Field selection criteria to apply\n\t\t\t\t'$sort', // Sorting criteria to apply\n\t\t\t\t'$populate', // Population criteria to apply\n\t\t\t\t'$one', // Whether a single object should be returned (implies $limit=1). If enabled an object is returned not an array\n\t\t\t\t'$limit', // Limit the return to this many rows\n\t\t\t\t'$skip', // Offset return by this number of rows\n\t\t\t\t'$count', // Only count the results - do not return them\n\t\t\t\t'$want', // What result we are looking for from the query\n\t\t\t\t'$cacheFKs', // Cache model Foreign Keys (used for populates) or compute them every time\n\t\t\t\t'$applySchema', // Apply the schema on retrieval - this slows ths record retrieval but means any alterations to the schema are applied to each retrieved record\n\t\t\t\t'$decorate',\n\t\t\t\t'$plain',\n\t\t\t\t'$errNotFound', // During $id / $one operations raise an error if the record is not found\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for get operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for get operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized: \"' + q.$collection + '\"');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// .query - start the find query {{{\n\t\t\t.set('filterPostPopulate', {}) // Filter by these fields post populate\n\t\t\t.then('query', function(next) {\n\t\t\t\tvar me = this;\n\t\t\t\tvar fields;\n\n\t\t\t\tif (q.$id) { // Search by one ID only - ignore other fields\n\t\t\t\t\tfields = {_id: q.$id};\n\t\t\t\t\tq.$one = true;\n\t\t\t\t} else { // Search by query\n\t\t\t\t\tfields = _(q)\n\t\t\t\t\t\t.omit(this.metaFields) // Remove all meta fields\n\t\t\t\t\t\t// FIXME: Ensure all fields are flat\n\t\t\t\t\t\t.omitBy(function(val, key) { // Remove all fields that will need populating later\n\t\t\t\t\t\t\tif (_.some(q.$collection.$oids, function(FK) {\n\t\t\t\t\t\t\t\treturn _.startsWith(key, FK);\n\t\t\t\t\t\t\t})) {\n\t\t\t\t\t\t\t\tme.filterPostPopulate[key] = val;\n\t\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.value();\n\t\t\t\t}\n\n\t\t\t\t//console.log('FIELDS', fields);\n\t\t\t\t//console.log('POSTPOPFIELDS', o.filterPostPopulate);\n\n\t\t\t\tif (q.$count) {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.countDocuments(fields));\n\t\t\t\t} else if (q.$one) {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.findOne(fields));\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, o.models[q.$collection].$mongooseModel.find(fields));\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Apply various simple criteria {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (q.$count) return next(); // No point doing anything else if just counting\n\t\t\t\tif (q.$limit) this.query.limit(q.$limit);\n\t\t\t\tif (q.$skip) this.query.skip(q.$skip);\n\n\t\t\t\t// q.$populate {{{\n\t\t\t\tif (q.$populate) {\n\t\t\t\t\tif (_.isArray(q.$populate)) {\n\t\t\t\t\t\tthis.query.populate(q.$populate);\n\t\t\t\t\t} else if (_.isString(q.$populate) || _.isObject(q.$populate)) {\n\t\t\t\t\t\tthis.query.populate(q.$populate);\n\t\t\t\t\t\tq.$populate = [q.$populate]; // Also rewrite into an array so we can destructure later\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid sort type: ' + (typeof q.$sort));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\t// q.$select {{{\n\t\t\t\tif (q.$select) {\n\t\t\t\t\tif (_.isArray(q.$select)) {\n\t\t\t\t\t\tvar query = this.query;\n\t\t\t\t\t\tq.$select.forEach(function(s) {\n\t\t\t\t\t\t\tquery.select(s);\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (_.isString(q.$select) || _.isObject(q.$select)) {\n\t\t\t\t\t\tthis.query.select(q.$select);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid select type: ' + (typeof q.$select));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\t// q.$sort {{{\n\t\t\t\tif (q.$sort) {\n\t\t\t\t\tif (_.isArray(q.$sort)) {\n\t\t\t\t\t\tvar query = this.query;\n\t\t\t\t\t\tq.$sort.forEach(function(s) {\n\t\t\t\t\t\t\tquery.sort(s);\n\t\t\t\t\t\t});\n\t\t\t\t\t} else if (_.isString(q.$sort) || _.isObject(q.$sort)) {\n\t\t\t\t\t\tthis.query.sort(q.$sort);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthrow new Error('Invalid sort type: ' + (typeof q.$sort));\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// }}}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then('data', function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire hooks {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('query', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Execute and capture return {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\tswitch (q.$want) {\n\t\t\t\t\tcase 'array':\n\t\t\t\t\t\tthis.query.exec(function(err, res) {\n\t\t\t\t\t\t\tif (err) return next(err);\n\n\t\t\t\t\t\t\tif (q.$one) {\n\t\t\t\t\t\t\t\tif (_.isEmpty(res)) {\n\t\t\t\t\t\t\t\t\tif (q.$errNotFound) {\n\t\t\t\t\t\t\t\t\t\tnext('Not found');\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tnext(null, undefined);\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t} else if (q.$count) {\n\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tnext(null, res);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'cursor':\n\t\t\t\t\t\tnext(null, this.query.cursor());\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tnext('Unknown $want type');\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Convert Mongoose Documents into Monoxide Documents {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\t// Not wanting an array of data? - pass though the result\n\t\t\t\tif (q.$want != 'array') return next(null, this.result);\n\n\t\t\t\tif (this.result === undefined) {\n\t\t\t\t\tnext(null, undefined);\n\t\t\t\t} else if (q.$one) {\n\t\t\t\t\tif (q.$decorate) return next(null, this.result.toObject());\n\t\t\t\t\tnext(null, new o.monoxideDocument({\n\t\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t\t$applySchema: q.$applySchema,\n\t\t\t\t\t\t$decorate: q.$decorate,\n\t\t\t\t\t\t$dirty: q.$dirty,\n\t\t\t\t\t}, this.result));\n\t\t\t\t} else if (q.$count) {\n\t\t\t\t\tnext(null, this.result);\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, this.result.map(function(doc) {\n\t\t\t\t\t\tif (q.$decorate) return doc.toObject();\n\t\t\t\t\t\treturn new o.monoxideDocument({\n\t\t\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t\t\t$applySchema: q.$applySchema,\n\t\t\t\t\t\t\t$decorate: q.$decorate,\n\t\t\t\t\t\t\t$dirty: q.$dirty,\n\t\t\t\t\t\t}, doc.toObject());\n\t\t\t\t\t}));\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Apply populates {{{\n\t\t\t.then(function(next) {\n\t\t\t\t// Not wanting an array of data? - pass though the result\n\t\t\t\tif (q.$want != 'array') return next(null, this.result);\n\n\t\t\t\tif (!q.$populate || !q.$populate.length || q.$count || q.$decorate === false || q.$plain === false || this.result === undefined) return next(); // Skip\n\t\t\t\tasync()\n\t\t\t\t\t.forEach(_.castArray(this.result), (next, doc) => {\n\t\t\t\t\t\tasync()\n\t\t\t\t\t\t\t.forEach(q.$populate, (next, pop) => {\n\t\t\t\t\t\t\t\tvar path = _.isString(pop) ? pop : pop.path;\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectId(_.get(doc, path))) return next(); // Already populated\n\n\t\t\t\t\t\t\t\tdoc.populate(path, next);\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t.end(next);\n\t\t\t\t\t})\n\t\t\t\t\t.end(next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('get() error', err);\n\t\t\t\t\treturn callback(err);\n\t\t\t\t} else if (q.$count) {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t} else {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .count([q], callback) {{{\n\t/**\n\t* Similar to query() but only return the count of possible results rather than the results themselves\n\t*\n\t* @name monoxide.count\n\t* @see monoxide.query\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {...*} [q.filter] Any other field (not beginning with '$') is treated as filtering criteria\n\t*\n\t* @param {function} callback(err,count) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Count all Widgets\n\t* monoxide.count({$collection: 'widgets'}, function(err, count) {\n\t* \tconsole.log('Number of Widgets:', count);\n\t* });\n\t*\n\t* @example\n\t* // Count all admin Users\n\t* monoxide.query({$collection: 'users', role: 'admin'}, function(err, count) {\n\t* \tconsole.log('Number of Admin Users:', count);\n\t* });\n\t*/\n\to.count = argy('[string|object] function', function MonoxideCount(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t// Glue count functionality to query\n\t\tq.$count = true;\n\n\t\treturn o.internal.query(q, callback);\n\t});\n\t// }}}\n\n\t// .save(item, [callback]) {{{\n\t/**\n\t* Save an existing Mongo document by its ID\n\t* If you wish to create a new document see the monoxide.create() function.\n\t* If the existing document ID is not found this function will execute the callback with an error\n\t*\n\t* @name monoxide.save\n\t* @fires save\n\t* @fires postSave\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {string} q.$id The ID of the document to save\n\t* @param {boolean} [q.$refetch=true] Whether to refetch the record after update, false returns `null` in the callback\n\t* @param {boolean} [q.$errNoUpdate=false] Raise an error if no documents were actually updated\n\t* @param {boolean} [q.$errBlankUpdate=false] Raise an error if no fields are updated\n\t* @param {boolean} [q.$returnUpdated=true] If true returns the updated document, if false it returns the document that was replaced\n\t* @param {boolean} [q.$version=true] Increment the `__v` property when updating\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as data to save\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Save a Widget\n\t* monoxide.save({\n\t* \t$collection: 'widgets',\n\t* \t$id: 1234,\n\t* \tname: 'New name',\n\t* }, function(err, widget) {\n\t* \tconsole.log('Saved widget is now', widget);\n\t* });\n\t*/\n\to.save = argy('object [function]', function(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when updated (false returns null)\n\t\t\t$errNoUpdate: false,\n\t\t\t$errBlankUpdate: false,\n\t\t\t$returnUpdated: true,\n\t\t\t$version: true,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$id', // Mandatory field to specify while record to update\n\t\t\t\t'_id', // We also need to clip this from the output (as we cant write to it), but we need to pass it to hooks\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t\t'$errNoUpdate',\n\t\t\t\t'$errBlankUpdate',\n\t\t\t\t'$version',\n\t\t\t\t'$returnUpdated',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!q.$id) return next('ID not specified');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tq._id = q.$id;\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'save' hook on the model {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('save', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Peform the update {{{\n\t\t\t.then('newRec', function(next) {\n\t\t\t\tvar patch = _.omit(q, this.metaFields);\n\t\t\t\tif (_.isEmpty(patch)) {\n\t\t\t\t\tif (q.$errBlankUpdate) return next('Nothing to update');\n\t\t\t\t\tif (q.$refetch) {\n\t\t\t\t\t\treturn o.internal.get({$collection: q.$collection, $id: q.$id}, next);\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn next(null, {});\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t_.forEach(o.models[q.$collection].$oids, function(fkType, schemaPath) {\n\t\t\t\t\tif (!_.has(patch, schemaPath)) return; // Not patching this field anyway\n\n\t\t\t\t\tswitch(fkType.type) {\n\t\t\t\t\t\tcase 'objectId': // Convert the field to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar newVal = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t_.set(patch, schemaPath, o.utilities.objectID(newVal));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray': // Convert each item to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar gotOIDs = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (_.isArray(gotOIDs)) {\n\t\t\t\t\t\t\t\t\t_.set(patch, schemaPath, gotOIDs.map(function(i, idx) {\n\t\t\t\t\t\t\t\t\t\treturn (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t\t\t? o.utilities.objectID(i)\n\t\t\t\t\t\t\t\t\t\t\t: i;\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tthrow new Error('Expected ' + schemaPath + ' to contain an array of OIDs but got ' + (typeof gotOIDs));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\tvar updateQuery = { _id: o.utilities.objectID(q.$id) };\n\t\t\t\tvar updatePayload = {$set: patch};\n\t\t\t\tvar updateOptions = { returnOriginal: !q.$returnUpdated };\n\t\t\t\tvar updateCallback = function(err, res) {\n\t\t\t\t\tif (q.$version && err && o.settings.versionIncErr.test(err.toString())) { // Error while setting `__v`\n\t\t\t\t\t\t// Remove __v as an increment operator + retry the operation\n\t\t\t\t\t\t// It would be good if $inc could assume `0` when null, but Mongo doesn't support that\n\t\t\t\t\t\tupdatePayload.$set.__v = 1;\n\t\t\t\t\t\tdelete updatePayload.$inc;\n\t\t\t\t\t\to.models[q.$collection].$mongoModel.findOneAndUpdate(updateQuery, updatePayload, updateOptions, updateCallback);\n\t\t\t\t\t} else if (err) {\n\t\t\t\t\t\tnext(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// This would only really happen if the record has gone away since we started updating\n\t\t\t\t\t\tif (q.$errNoUpdate && !res.ok) return next('No documents updated');\n\n\t\t\t\t\t\tif (!q.$refetch) return next(null, null);\n\t\t\t\t\t\tnext(null, new o.monoxideDocument({$collection: q.$collection}, res.value));\n\t\t\t\t\t}\n\t\t\t\t};\n\n\t\t\t\tif (q.$version) {\n\t\t\t\t\tupdatePayload.$inc = {'__v': 1};\n\t\t\t\t\tdelete updatePayload.$set.__v; // Remove user updates of __v\n\t\t\t\t}\n\n\t\t\t\t// Actually perform the action\n\t\t\t\to.models[q.$collection].$mongoModel.findOneAndUpdate(\n\t\t\t\t\tupdateQuery, // What we are writing to\n\t\t\t\t\tupdatePayload, // What we are saving\n\t\t\t\t\tupdateOptions, // Options passed to Mongo\n\t\t\t\t\tupdateCallback\n\t\t\t\t);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'postSave' hook {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('postSave', next, q, this.newRec);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('save() error', err);\n\t\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (_.isFunction(callback)) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .update(q, [with], [callback]) {{{\n\t/**\n\t* Update multiple documents\n\t*\n\t* @name monoxide.update\n\t* @fires update\n\t*\n\t* @param {Object} q The object to query by\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$refetch=true] Return the newly updated record\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as filter data\n\t*\n\t* @param {Object} qUpdate The object to update into the found documents\n\t* @param {...*} [qUpdate.field] Data to save into every record found by `q`\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Set all widgets to active\n\t* monoxide.update({\n\t* \t$collection: 'widgets',\n\t* \tstatus: 'active',\n\t* });\n\t*/\n\to.update = argy('object|string [object] [function]', function MonoxideUpdate(q, qUpdate, callback) {\n\t\tvar o = this;\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when updated (false returns null)\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for get operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for get operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Fire the 'update' hook {{{\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('update', next, q);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Peform the update {{{\n\t\t\t.then('rawResponse', function(next) {\n\t\t\t\to.models[q.$collection].$mongooseModel.updateMany(_.omit(q, this.metaFields), _.omit(qUpdate, this.metaFields), {multi: true}, next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('update() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .create(item, [callback]) {{{\n\t/**\n\t* Create a new Mongo document and return it\n\t* If you wish to save an existing document see the monoxide.save() function.\n\t*\n\t* @name monoxide.create\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$refetch=true] Return the newly create record\n\t* @param {boolean} [q.$version=true] Set the `__v` field to 0 when creating the document\n\t* @param {...*} [q.field] Any other field (not beginning with '$') is treated as data to save\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Create a Widget\n\t* monoxide.save({\n\t* \t$collection: 'widgets',\n\t* \tname: 'New widget name',\n\t* }, function(err, widget) {\n\t* \tconsole.log('Created widget is', widget);\n\t* });\n\t*/\n\to.create = argy('object [function]', function MonoxideQuery(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$refetch: true, // Fetch and return the record when created (false returns null)\n\t\t\t$version: true,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$refetch',\n\t\t\t\t'$version',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t\tnext();\n\t\t\t\t\t});\n\t\t\t\t} else {\n\t\t\t\t\tnext();\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Coherse all OIDs (or arrays of OIDs) into their correct internal type {{{\n\t\t\t.then(function(next) {\n\t\t\t\t_.forEach(o.models[q.$collection].$oids, function(fkType, schemaPath) {\n\t\t\t\t\tswitch(fkType.type) {\n\t\t\t\t\t\tcase 'objectId': // Convert the field to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar newVal = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t_.set(q, schemaPath, o.utilities.objectID(newVal));\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray': // Convert each item to an OID if it isn't already\n\t\t\t\t\t\t\tif (_.has(q, schemaPath)) {\n\t\t\t\t\t\t\t\tvar gotOIDs = _.get(q, schemaPath);\n\t\t\t\t\t\t\t\tif (_.isArray(gotOIDs)) {\n\t\t\t\t\t\t\t\t\t_.set(q, schemaPath, gotOIDs.map(function(i, idx) {\n\t\t\t\t\t\t\t\t\t\treturn (!o.utilities.isObjectID(newVal))\n\t\t\t\t\t\t\t\t\t\t\t? o.utilities.objectID(i)\n\t\t\t\t\t\t\t\t\t\t\t: i;\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tthrow new Error('Expected ' + schemaPath + ' to contain an array of OIDs but got ' + (typeof gotOIDs));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Add version information if $version==true {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$version) return next();\n\t\t\t\tq.__v = 0;\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Create record {{{\n\t\t\t.then('createDoc', function(next) { // Compute the document we will create\n\t\t\t\tnext(null, new o.monoxideDocument({\n\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t$dirty: true, // Mark all fields as modified (and not bother to compute the clean markers)\n\t\t\t\t}, _.omit(q, this.metaFields)));\n\t\t\t})\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('create', next, this.createDoc);\n\t\t\t})\n\t\t\t.then('rawResponse', function(next) {\n\t\t\t\to.models[q.$collection].$mongoModel.insertOne(this.createDoc.toMongoObject(), next);\n\t\t\t})\n\t\t\t.then(function(next) {\n\t\t\t\to.models[q.$collection].fire('postCreate', next, q, this.createDoc);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Refetch record {{{\n\t\t\t.then('newRec', function(next) {\n\t\t\t\tif (!q.$refetch) return next(null, null);\n\t\t\t\to.internal.query({\n\t\t\t\t\t$collection: q.$collection,\n\t\t\t\t\t$id: this.rawResponse.insertedId.toString(),\n\t\t\t\t}, function(err, res) {\n\t\t\t\t\tif (err == 'Not found') return next('Document creation failed');\n\t\t\t\t\tnext(err, res);\n\t\t\t\t});\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('create() error', err);\n\t\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (_.isFunction(callback)) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .delete(item, [callback]) {{{\n\t/**\n\t* Delete a Mongo document by its ID\n\t* This function has two behaviours - it will, by default, only delete a single record by its ID. If `q.$multiple` is true it will delete by query.\n\t* If `q.$multiple` is false and the document is not found (by `q.$id`) this function will execute the callback with an error\n\t* Delete will only work with no parameters if monoxide.settings.removeAll is truthy as an extra safety check\n\t*\n\t* @name monoxide.delete\n\t*\n\t* @param {Object} [q] The object to process\n\t* @param {string} [q.$collection] The collection / model to query\n\t* @param {string} [q.$id] The ID of the document to delete (if you wish to do a remove based on query set q.$query=true)\n\t* @param {boolean} [q.$multiple] Allow deletion of multiple records by query\n\t* @param {boolean} [q.$errNotFound] Raise an error if a specifically requested document is not found (requires $id)\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*/\n\to.delete = o.remove = argy('object [function]', function MonoxideQuery(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$errNotFound: true, // During raise an error if $id is specified but not found to delete\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$id', // Mandatory field to specify while record to update\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$multiple', // Whether to allow deletion by query\n\t\t\t\t'$errNotFound',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for delete operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for delete operation');\n\t\t\t\tif (!q.$id && !q.$multiple) return next('$id or $multiple must be speciied during delete operation');\n\n\t\t\t\tif (!o.settings.removeAll && !q.$id && _.isEmpty(_.omit(q, this.metaFields))) { // Apply extra checks to make sure we are not nuking everything if we're not allowed\n\t\t\t\t\treturn next('delete operation not allowed with empty query');\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Calculate $data if it is a function {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$data) return next();\n\t\t\t\tif (_.isFunction(q.$data)) {\n\t\t\t\t\tq.$data(function(err, data) {\n\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\tq.$data = data;\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Delete record {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (q.$multiple) { // Multiple delete operation\n\t\t\t\t\to.internal.query(_.merge(_.omit(q, this.metaFields), {$collection: q.$collection, $select: 'id'}), function(err, rows) {\n\t\t\t\t\t\tasync()\n\t\t\t\t\t\t\t.forEach(rows, function(next, row) {\n\t\t\t\t\t\t\t\to.internal.delete({$collection: q.$collection, $id: row._id}, next);\n\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t.end(next);\n\t\t\t\t\t});\n\t\t\t\t} else { // Single item delete\n\t\t\t\t\t// Check that the hook returns ok\n\t\t\t\t\to.models[q.$collection].fire('delete', function(err) {\n\t\t\t\t\t\t// Now actually delete the item\n\t\t\t\t\t\to.models[q.$collection].$mongoModel.deleteOne({_id: o.utilities.objectID(q.$id)}, function(err, res) {\n\t\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\t\tif (q.$errNotFound && !res.result.ok) return next('Not found');\n\t\t\t\t\t\t\t// Delete was sucessful - call event then move next\n\t\t\t\t\t\t\to.models[q.$collection].fire('postDelete', next, {_id: q.$id});\n\t\t\t\t\t\t});\n\t\t\t\t\t}, {_id: q.$id});\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('delete() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.newRec);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .meta(item, [callback]) {{{\n\t/**\n\t* Return information about a Mongo collection schema\n\t*\n\t* @name monoxide.meta\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to examine\n\t* @param {boolean} [q.$collectionEnums=false] Provide all enums as a collection object instead of an array\n\t* @param {boolean} [q.$filterPrivate=true] Ignore all private fields\n\t* @param {boolean} [q.$prototype=false] Provide the $prototype meta object\n\t* @param {boolean} [q.$indexes=false] Include whether a field is indexed\n\t*\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*\n\t* @example\n\t* // Describe a collection\n\t* monoxide.meta({$collection: 'widgets'}, function(err, res) {\n\t* \tconsole.log('About the widget collection:', res);\n\t* });\n\t*/\n\to.meta = argy('[object] function', function MonoxideMeta(q, callback) {\n\t\t_.defaults(q || {}, {\n\t\t\t$filterPrivate: true,\n\t\t\t$prototype: false,\n\t\t\t$indexes: false,\n\t\t});\n\n\t\tasync()\n\t\t\t.set('metaFields', [\n\t\t\t\t'$collection', // Collection to query to find the original record\n\t\t\t\t'$data', // Meta user-defined data\n\t\t\t\t'$filterPrivate', // Filter out /^_/ fields\n\t\t\t\t'$collectionEnums', // Convert enums into a collection (with `id` + `title` fields per object)\n\t\t\t\t'$prototype',\n\t\t\t\t'$indexes',\n\t\t\t])\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for meta operation');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for meta operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Cannot find collection to extract its meta information: ' + q.$collection);\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Retrieve the meta information {{{\n\t\t\t.then('meta', function(next) {\n\t\t\t\tvar sortedPaths = _(o.models[q.$collection].$mongooseModel.schema.paths)\n\t\t\t\t\t.map((v,k) => v)\n\t\t\t\t\t.sortBy('path')\n\t\t\t\t\t.value();\n\n\t\t\t\tvar meta = {\n\t\t\t\t\t_id: {type: 'objectid', index: true}, // FIXME: Is it always the case that a doc has an ID?\n\t\t\t\t};\n\n\t\t\t\t_.forEach(sortedPaths, function(path) {\n\t\t\t\t\tvar id = path.path;\n\n\t\t\t\t\tif (q.$filterPrivate && _.last(path.path.split('.')).startsWith('_')) return; // Skip private fields\n\n\t\t\t\t\tvar info = {};\n\t\t\t\t\tswitch (path.instance.toLowerCase()) {\n\t\t\t\t\t\tcase 'string':\n\t\t\t\t\t\t\tinfo.type = 'string';\n\t\t\t\t\t\t\tif (path.enumValues && path.enumValues.length) {\n\t\t\t\t\t\t\t\tif (q.$collectionEnums) {\n\t\t\t\t\t\t\t\t\tinfo.enum = path.enumValues.map(e => ({\n\t\t\t\t\t\t\t\t\t\tid: e,\n\t\t\t\t\t\t\t\t\t\ttitle: _.startCase(e),\n\t\t\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\tinfo.enum = path.enumValues;\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'number':\n\t\t\t\t\t\t\tinfo.type = 'number';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'date':\n\t\t\t\t\t\t\tinfo.type = 'date';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'boolean':\n\t\t\t\t\t\t\tinfo.type = 'boolean';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'array':\n\t\t\t\t\t\t\tinfo.type = 'array';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'object':\n\t\t\t\t\t\t\tinfo.type = 'object';\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectid':\n\t\t\t\t\t\t\tinfo.type = 'objectid';\n\t\t\t\t\t\t\tif (_.has(path, 'options.ref')) info.ref = path.options.ref;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\tdebug('Unknown Mongo data type during meta extract on ' + q.$collection + ':', path.instance.toLowerCase());\n\t\t\t\t\t}\n\n\t\t\t\t\t// Extract default value if its not a function (otherwise return [DYNAMIC])\n\t\t\t\t\tif (path.defaultValue) info.default = argy.isType(path.defaultValue, 'scalar') ? path.defaultValue : '[DYNAMIC]';\n\n\t\t\t\t\tif (q.$indexes && path._index) info.index = true;\n\n\t\t\t\t\tmeta[id] = info;\n\t\t\t\t});\n\n\t\t\t\tnext(null, meta);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Construct the prototype if $prototype=true {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q.$prototype) return next();\n\n\t\t\t\tvar prototype = this.meta.$prototype = {};\n\n\t\t\t\t_.forEach(this.meta, function(v, k) {\n\t\t\t\t\tif (!_.has(v, 'default')) return;\n\t\t\t\t\tif (v.default == '[DYNAMIC]') return; // Ignore dynamic values\n\t\t\t\t\t_.set(prototype, k, v.default);\n\t\t\t\t});\n\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\tdebug('meta() error', err);\n\t\t\t\t\tif (callback) callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tif (callback) callback(null, this.meta);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\n\t\t\treturn o;\n\t});\n\t// }}}\n\n\t// .runCommand(command, [callback]) {{{\n\t/**\n\t* Run an internal MongoDB command and fire an optional callback on the result\n\t*\n\t* @name monoxide.meta\n\t*\n\t* @param {Object} cmd The command to process\n\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t* @return {Object} This chainable object\n\t* @example\n\t*/\n\to.runCommand = argy('object [function]', function MonoxideRunCommand(cmd, callback) {\n\t\to.connection.db.command(cmd, callback);\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .queryBuilder() - query builder {{{\n\t/**\n\t* Returns data from a Monoxide model\n\t* @class\n\t* @name monoxide.queryBuilder\n\t* @return {monoxide.queryBuilder}\n\t* @fires queryBuilder Fired as (callback, qb) when a new queryBuilder object is created\n\t*/\n\to.queryBuilder = function monoxideQueryBuilder() {\n\t\tvar qb = this;\n\t\tqb.$MONOXIDE = true;\n\t\tqb.query = {};\n\n\t\t// qb.find(q, cb) {{{\n\t\t/**\n\t\t* Add a filtering function to an existing query\n\t\t* @name monoxide.queryBuilder.find\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|function} [q] Optional filtering object or callback (in which case we act as exec())\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.find = argy('[object|string] [function]', function(q, callback) {\n\t\t\tif (argy.isType(q, 'object')) {\n\t\t\t\t_.merge(qb.query, q);\n\t\t\t} else {\n\t\t\t\tq = {$id: q};\n\t\t\t}\n\n\t\t\tif (callback) qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.select(q, cb) {{{\n\t\t/**\n\t\t* Add select criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.select\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|Array|string} [q] Select criteria, for strings or arrays of strings use the field name optionally prefixed with '-' for omission. For Objects use `{field: 1|-1}`\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.select = argy('string|array [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm(['string', 'string function'], function(id, callback) {\n\t\t\t\t\tif (qb.query.$select) {\n\t\t\t\t\t\tqb.query.$select.push(id);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$select = [id];\n\t\t\t\t\t}\n\t\t\t\t\tif (callback) q.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(ids, callback) {\n\t\t\t\t\tif (qb.query.$select) {\n\t\t\t\t\t\tqb.query.$select.push.apply(this, ids);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$select = ids;\n\t\t\t\t\t}\n\t\t\t\t\tif (callback) q.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.sort(q, cb) {{{\n\t\t/**\n\t\t* Add sort criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.sort\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|Array|string} [q] Sorting criteria, for strings or arrays of strings use the field name optionally prefixed with '-' for decending search order. For Objects use `{ field: 1|-1|'asc'|'desc'}`\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.sort = argy('string|array|undefined [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {})\n\t\t\t\t.ifForm('undefined', function() {})\n\t\t\t\t.ifForm(['string', 'string function'], function(field, callback) {\n\t\t\t\t\tif (qb.query.$sort) {\n\t\t\t\t\t\tqb.query.$sort.push(field);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$sort = [field];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(fields, callback) {\n\t\t\t\t\tif (qb.query.$sort) {\n\t\t\t\t\t\tqb.query.$sort.push.apply(this, fields);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$sort = fields;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.limit(q, cb) {{{\n\t\t/**\n\t\t* Add limit criteria to an existing query\n\t\t* If this function is passed a falsy value the limit is removed\n\t\t* @name monoxide.queryBuilder.limit\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {number|string} q Limit records to this number (it will be parsed to an Int)\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.limit = argy('[falsy|string|number] [function]', function(q, callback) {\n\t\t\tif (!q) {\n\t\t\t\tdelete qb.query.$limit;\n\t\t\t} else if (argy.isType(q, 'string')) {\n\t\t\t\tqb.query.$limit = parseInt(q);\n\t\t\t} else {\n\t\t\t\tqb.query.$limit = q;\n\t\t\t}\n\n\t\t\tif (callback) return qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.skip(q, cb) {{{\n\t\t/**\n\t\t* Add skip criteria to an existing query\n\t\t* If this function is passed a falsy value the skip offset is removed\n\t\t* @name monoxide.queryBuilder.skip\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {number} q Skip this number of records (it will be parsed to an Int)\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.skip = argy('[falsy|string|number] [function]', function(q, callback) {\n\t\t\tif (!q) {\n\t\t\t\tdelete qb.query.$skip;\n\t\t\t} else if (argy.isType(q, 'string')) {\n\t\t\t\tqb.query.$skip = parseInt(q);\n\t\t\t} else {\n\t\t\t\tqb.query.$skip = q;\n\t\t\t}\n\n\t\t\tif (callback) return qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.populate(q, cb) {{{\n\t\t/**\n\t\t* Add population criteria to an existing query\n\t\t* If this function is passed a falsy value it is ignored\n\t\t* @name monoxide.queryBuilder.populate\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Array|string} [q] Population criteria, for strings or arrays of strings use the field name\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.populate = argy('string|array [function]', function(q, callback) {\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {})\n\t\t\t\t.ifForm(['string', 'string function'], function(field, callback) {\n\t\t\t\t\tif (qb.query.$populate) {\n\t\t\t\t\t\tqb.query.$populate.push(field);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$populate = [field];\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\t\t\t\t.ifForm(['array', 'array function'], function(fields, callback) {\n\t\t\t\t\tif (qb.query.$populate) {\n\t\t\t\t\t\tqb.query.$populate.push.apply(this, fields);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tqb.query.$populate = fields;\n\t\t\t\t\t}\n\n\t\t\t\t\tif (callback) qb.exec(callback);\n\t\t\t\t})\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.exec(cb) {{{\n\t\t/**\n\t\t* Execute the query and return the error and any results\n\t\t* @name monoxide.queryBuilder.exec\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {function} callback(err,result)\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.exec = argy('function', function(callback) {\n\t\t\treturn o.internal.query(qb.query, callback);\n\t\t});\n\t\t// }}}\n\n\t\t// qb.optional() {{{\n\t\t/**\n\t\t* Convenience function to set $errNotFound\n\t\t* @name monoxide.queryBuilder.optional\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {Object|function} [isOptional=true] Whether the return from this query should NOT throw an error if nothing was found\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.optional = argy('[boolean|null|undefined] [function]', function(isOptional, callback) {\n\t\t\tif (argy.isType(isOptional, ['null', 'undefined'])) {\n\t\t\t\tqb.query.$errNotFound = false;\n\t\t\t} else {\n\t\t\t\tqb.query.$errNotFound = !! isOptional;\n\t\t\t}\n\n\t\t\tif (callback) qb.exec(callback);\n\n\t\t\treturn qb;\n\t\t});\n\t\t// }}}\n\n\t\t// qb.promise() {{{\n\t\t/**\n\t\t* Convenience function to execute the query and return a promise with the result\n\t\t* @name monoxide.queryBuilder.promise\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @return {Mongoose.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.promise = function(callback) {\n\t\t\treturn new Promise(function(resolve, reject) {\n\t\t\t\to.internal.query(qb.query, function(err, result) {\n\t\t\t\t\tif (err) {\n\t\t\t\t\t\treject(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tresolve(result);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t};\n\n\t\t// Wrap all promise functions in a convnience wrapper\n\t\t['then', 'catch', 'finally'].forEach(f => {\n\t\t\tqb[f] = function() {\n\t\t\t\tvar p = qb.promise();\n\t\t\t\treturn p[f].apply(p, arguments);\n\t\t\t};\n\t\t});\n\t\t// }}}\n\n\n\t\t// qb.cursor() {{{\n\t\t/**\n\t\t* Convenience function to return the generated cursor back from a queryBuilder object\n\t\t* @name monoxide.queryBuilder.cursor\n\t\t* @memberof monoxide.queryBuilder\n\t\t* @param {function} callback(err, cursor)\n\t\t* @return {Mongoose.queryBuilder} This chainable object\n\t\t*/\n\t\tqb.cursor = function(callback) {\n\t\t\tqb.query.$want = 'cursor';\n\t\t\treturn o.internal.query(qb.query, callback);\n\t\t};\n\t\t// }}}\n\n\t\to.fireImmediate('queryBuilder', qb);\n\n\t\treturn qb;\n\t};\n\t// }}}\n\n\t// .monoxideModel([options]) - monoxide model instance {{{\n\t/**\n\t* @class\n\t*/\n\to.monoxideModel = argy('string|object', function monoxideModel(settings) {\n\t\tvar mm = this;\n\n\t\tif (argy.isType(settings, 'string')) settings = {$collection: settings};\n\n\t\t// Sanity checks {{{\n\t\tif (!settings.$collection) throw new Error('new MonoxideModel({$collection: }) requires at least \\'$collection\\' to be specified');\n\t\tif (!o.connection) throw new Error('Trying to create a MonoxideModel before a connection has been established');\n\t\tif (!o.connection.db) throw new Error('Connection does not look like a MongoDB-Core object');\n\t\t// }}}\n\n\t\t/**\n\t\t* The raw MongoDB-Core model\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$mongoModel = o.connection.db.collection(settings.$collection.toLowerCase());\n\t\tif (!mm.$mongoModel) throw new Error('Model not found in MongoDB-Core - did you forget to call monoxide.schema(\\'name\\', ) first?');\n\n\t\t/**\n\t\t* The raw Mongoose model\n\t\t* @depreciated This will eventually go away and be replaced with raw `mm.$mongoModel` calls\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$mongooseModel = o.connection.base.models[settings.$collection.toLowerCase()];\n\n\t\t/**\n\t\t* Holder for all OID information\n\t\t* This can either be the `._id` of the object, sub-documents, array pointers or object pointers\n\t\t* @see monoxide.utilities.extractFKs\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$oids = _.has(mm, '$mongooseModel.schema') ? o.utilities.extractFKs(mm.$mongooseModel.schema) : {};\n\n\t\t/**\n\t\t* Optional model schema\n\t\t* NOTE: This is the user defined schema as-is NOT the computed $monogooseModel.schema\n\t\t* @var {Object}\n\t\t*/\n\t\tmm.$schema = settings.$schema;\n\n\t\tmm.$collection = settings.$collection;\n\t\tmm.$methods = {};\n\t\tmm.$virtuals = {};\n\t\tmm.$hooks = {};\n\t\tmm.$data = {};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $count=true in the queryBuilder\n\t\t* @name monoxide.monoxideModel.find\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.count = function(q, callback) {\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$count: true,\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* @name monoxide.monoxideModel.find\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.find = function(q, callback) {\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({$collection: mm.$collection}) // Set the collection from the model\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $one=true in the queryBuilder\n\t\t* @name monoxide.monoxideModel.findOne\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.findOne = function(q, callback) {\n\t\t\tif (argy.isType(q, 'string')) throw new Error('Refusing to allow findOne(String). Use findOneByID if you wish to specify only the ID');\n\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$one: true, // Return a single object\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\n\t\t/**\n\t\t* Shortcut function to create a monoxide.queryBuilder object and immediately start filtering\n\t\t* This also sets $id=q in the queryBuilder\n\t\t* @name monoxide.monoxideModel.findOneByID\n\t\t* @see monoxide.queryBuilder.find\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback. If present this is the equivelent of calling exec()\n\t\t* @return {monoxide.queryBuilder}\n\t\t*/\n\t\tmm.findOneByID = function(q, callback) {\n\t\t\t// Deal with arguments {{{\n\t\t\tif (argy.isType(q, 'string')) {\n\t\t\t\t// All ok\n\t\t\t} else if (argy.isType(q, 'object') && q.toString().length) { // Input is an object but we can convert it to something useful\n\t\t\t\tq = q.toString();\n\t\t\t} else {\n\t\t\t\tthrow new Error('Unknown function call pattern');\n\t\t\t}\n\t\t\t// }}}\n\n\t\t\treturn (new o.queryBuilder())\n\t\t\t\t.find({\n\t\t\t\t\t$collection: mm.$collection, // Set the collection from the model\n\t\t\t\t\t$id: q,\n\t\t\t\t})\n\t\t\t\t.find(q, callback); // Then re-parse the find query into the new queryBuilder\n\t\t};\n\n\t\t/**\n\t\t* Alias of findOneByID\n\t\t* @see monoxide.queryBuilder.find\n\t\t*/\n\t\tmm.findOneById = mm.findOneByID;\n\n\n\t\t/**\n\t\t* Shortcut function to create a new record within a collection\n\t\t* @name monoxide.monoxideModel.create\n\t\t* @see monoxide.create\n\t\t*\n\t\t* @param {Object} [q] Optional document contents\n\t\t* @param {function} [callback] Optional callback\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.create = argy('object [function]', function(q, callback) {\n\t\t\tq.$collection = mm.$collection;\n\t\t\to.internal.create(q, callback);\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Shortcut to invoke update on a given model\n\t\t* @name monoxide.monoxideMode.update\n\t\t* @see monoxide.update\n\t\t* @param {Object} q The filter to query by\n\t\t* @param {Object} qUpdate The object to update into the found documents\n\t\t* @param {function} [callback(err,result)] Optional callback to call on completion or error\n\t\t* @return {Object} This chainable object\n\t\t*/\n\t\tmm.update = argy('object object [function]', function(q, qUpdate, callback) {\n\t\t\tq.$collection = mm.$collection;\n\t\t\to.internal.update(q, qUpdate, callback);\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Shortcut function to remove a number of rows based on a query\n\t\t* @name monoxide.monoxideModel.remove\n\t\t* @see monoxide.delete\n\t\t*\n\t\t* @param {Object} [q] Optional filtering object\n\t\t* @param {function} [callback] Optional callback\n\t\t* @return {monoxide}\n\t\t*/\n\t\tmm.remove = argy('[object] [function]', function(q, callback) {\n\t\t\treturn o.internal.delete(_.merge({}, q, {$collection: mm.$collection, $multiple: true}), callback);\n\t\t});\n\n\n\t\t/**\n\t\t* Alias of remove()\n\t\t* @see monoxide.remove()\n\t\t*/\n\t\tmm.delete = mm.remove;\n\n\n\t\t/**\n\t\t* Run an aggregation pipeline on a model\n\t\t* @param {array} q The aggregation pipeline to process\n\t\t* @param {function} callback Callback to fire as (err, data)\n\t\t* @return {Object} This chainable object\n\t\t*/\n\t\tmm.aggregate = argy('array function', function(q, callback) {\n\t\t\to.internal.aggregate({\n\t\t\t\t$collection: mm.$collection,\n\t\t\t\t$stages: q,\n\t\t\t}, callback)\n\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Add a method to a all documents returned from this model\n\t\t* A method is a user defined function which extends the `monoxide.monoxideDocument` prototype\n\t\t* @param {string} name The function name to add as a static method\n\t\t* @param {function} func The function to add as a static method\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.method = function(name, func) {\n\t\t\tmm.$methods[name] = func;\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Add a static method to a model\n\t\t* A static is a user defined function which extends the `monoxide.monoxideModel` prototype\n\t\t* @param {string} name The function name to add as a static method\n\t\t* @param {function} func The function to add as a static method\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.static = function(name, func) {\n\t\t\tmm[name] = func;\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Define a virtual (a handler when a property gets set or read)\n\t\t* @param {string|Object} name The virtual name to apply or the full virtual object (must pretain to the Object.defineProperty descriptor)\n\t\t* @param {function} getCallback The get function to call when the virtual value is read\n\t\t* @param {function} setCallback The set function to call when the virtual value changes\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.virtual = argy('string [function|falsy] [function|falsy]', function(name, getCallback, setCallback) {\n\t\t\tvar q = {};\n\t\t\tif (argy.isType(getCallback, 'function')) q.get = getCallback;\n\t\t\tif (argy.isType(setCallback, 'function')) q.set = setCallback;\n\n\t\t\tmm.$virtuals[name] = q;\n\t\t\treturn mm;\n\t\t});\n\n\n\t\t/**\n\t\t* Return whether a model has virtuals\n\t\t* @return {boolean} Whether any virtuals are present\n\t\t*/\n\t\tmm.hasVirtuals = function() {\n\t\t\treturn (Object.keys(mm.$virtuals).length > 0);\n\t\t};\n\n\n\t\t/**\n\t\t* Attach a hook to a model\n\t\t* A hook is exactly the same as a eventEmitter.on() event but must return a callback\n\t\t* Multiple hooks can be attached and all will be called in parallel on certain events such as 'save'\n\t\t* All hooks must return non-errors to proceed with the operation\n\t\t* @param {string} eventName The event ID to hook against\n\t\t* @param {function} callback The callback to run when hooked, NOTE: Any falsy callbacks are ignored\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.hook = function(eventName, callback) {\n\t\t\tif (!callback) return mm; // Ignore flasy callbacks\n\t\t\tif (!mm.$hooks[eventName]) mm.$hooks[eventName] = [];\n\t\t\tmm.$hooks[eventName].push(callback);\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return whether a model has a specific hook\n\t\t* If an array is passed the result is whether the model has none or all of the specified hooks\n\t\t* @param {string|array|undefined|null} hooks The hook(s) to query, if undefined or null this returns if any hooks are present\n\t\t* @return {boolean} Whether the hook(s) is present\n\t\t*/\n\t\tmm.hasHook = argy('[string|array]', function(hooks) {\n\t\t\tvar out;\n\n\t\t\targy(arguments)\n\t\t\t\t.ifForm('', function() {\n\t\t\t\t\tout = !_.isEmpty(mm.$hooks);\n\t\t\t\t})\n\t\t\t\t.ifForm('string', function(hook) {\n\t\t\t\t\tout = mm.$hooks[hook] && mm.$hooks[hook].length;\n\t\t\t\t})\n\t\t\t\t.ifForm('array', function(hooks) {\n\t\t\t\t\tout = hooks.every(function(hook) {\n\t\t\t\t\t\treturn (mm.$hooks[hook] && mm.$hooks[hook].length);\n\t\t\t\t\t});\n\t\t\t\t});\n\n\t\t\treturn out;\n\t\t});\n\n\n\t\t/**\n\t\t* Execute all hooks attached to a model\n\t\t* This function fires all hooks in parallel and expects all to resolve correctly via callback\n\t\t* NOTE: Hooks are always fired with the callback as the first argument\n\t\t* @param {string} name The name of the hook to invoke\n\t\t* @param {function} callback The callback to invoke on success\n\t\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.fire = function(name, callback) {\n\t\t\tif ( // There is at least one event handler attached\n\t\t\t\t(mm.$hooks[name] && mm.$hooks[name].length)\n\t\t\t\t|| (o.$hooks[name] && o.$hooks[name].length)\n\t\t\t) {\n\t\t\t\tvar eventArgs = _.values(arguments);\n\t\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\t\t\t\tmm.emit.apply(mm, eventArgs);\n\t\t\t} else {\n\t\t\t\treturn callback();\n\t\t\t}\n\n\t\t\t// Calculate the args array we will pass to each hook\n\t\t\tvar hookArgs = _.values(arguments);\n\t\t\thookArgs.shift(); // We will set args[0] to the callback in each case anyway so we only need to shift 1\n\n\t\t\tvar eventArgs = _.values(arguments);\n\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\n\t\t\tasync()\n\t\t\t\t// Fire hooks attached to this model + global hooks {{{\n\t\t\t\t.forEach([]\n\t\t\t\t\t.concat(o.$hooks[name], mm.$hooks[name])\n\t\t\t\t\t.filter(f => !!f) // Actually is a function?\n\t\t\t\t, function(next, hookFunc) {\n\t\t\t\t\thookArgs[0] = next;\n\t\t\t\t\thookFunc.apply(mm, hookArgs);\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t.end(callback);\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return the meta structure for a specific model\n\t\t* @param {Object} Options to return when computing the meta object. See the main meta() function for details\n\t\t* @param {function} callback The callback to call with (err, layout)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t* @see monoxide.meta()\n\t\t*/\n\t\tmm.meta = argy('[object] function', function(options, callback) {\n\t\t\tvar settings = options || {};\n\t\t\tsettings.$collection = mm.$collection;\n\t\t\to.internal.meta(settings, callback);\n\t\t\treturn mm;\n\t\t});\n\n\t\t/**\n\t\t* Run a third party plugin against a model\n\t\t* This function is really just a shorthand way to pass a Monoxide model into a function\n\t\t* @param {function|string|array} plugins The plugin(s) to run. Each function is run as (model, callback), strings are assumed to be file paths to JS files if they contain at least one '/' or `.` otherwise they are loaded from the `plugins` directory\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.use = function(plugins, callback) {\n\t\t\tif (!plugins) return callback(); // Do nothing if given falsy\n\n\t\t\tasync()\n\t\t\t\t.forEach(_.castArray(plugins), function(next, plugin) {\n\t\t\t\t\tif (_.isString(plugin)) {\n\t\t\t\t\t\tvar pluginModule = /[\\/\\.]/.test(plugin) // Contains at least one slash or dot?\n\t\t\t\t\t\t\t? require(plugin)\n\t\t\t\t\t\t\t: require(__dirname + '/plugins/' + plugin)\n\t\t\t\t\t\tpluginModule.call(mm, mm, next);\n\t\t\t\t\t} else if (_.isFunction(plugin)) {\n\t\t\t\t\t\tplugin.call(mm, mm, next);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tnext('Unsupported plugin format');\n\t\t\t\t\t}\n\t\t\t\t})\n\t\t\t\t.end(callback);\n\n\t\t\treturn mm;\n\t\t};\n\n\t\t/**\n\t\t* Return an array of all distinct field values\n\t\t* @param {string} field The field to return the values of\n\t\t* @param {function} plugin The plugin to run. This gets the arguments (values)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.distinct = function(field, callback) {\n\t\t\to.internal.runCommand({\n\t\t\t\tdistinct: mm.$collection,\n\t\t\t\tkey: field,\n\t\t\t}, function(err, res) {\n\t\t\t\tif (err) return callback(err);\n\t\t\t\tcallback(null, res.values);\n\t\t\t});\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Set a simple data key\n\t\t* This is usually used to store suplemental information about models\n\t\t* @param {Object|string} key The key to set or a full object of keys\n\t\t* @param {*} value If `key` is a string the value is the value stored\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.set = function(key, value) {\n\t\t\tif (argy.isType(key, 'object')) {\n\t\t\t\t_.assign(mm.$data, key);\n\t\t\t} else if (argy.isType(key, 'string')) {\n\t\t\t\tmm.$data[key] = value;\n\t\t\t} else {\n\t\t\t\tthrow new Error('Unsupported type storage during set');\n\t\t\t}\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/*\n\t\t* Gets a simple data key or returns a fallback\n\t\t* @param {string} key The data key to retrieve\n\t\t* @param {*} [fallback] The fallback to return if the key is not present\n\t\t*/\n\t\tmm.get = function(key, fallback) {\n\t\t\treturn (argy.isType(mm.$data[key], 'undefined') ? fallback : mm.$data[key]);\n\t\t};\n\n\n\n\t\t/**\n\t\t* Retrieve the list of actual on-the-database indexes\n\t\t* @param {function} callback Callback to fire as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.getIndexes = function(callback) {\n\t\t\tmm.$mongoModel.indexes(function(err, res) {\n\t\t\t\tif (err && err.message == 'no collection') {\n\t\t\t\t\tcallback(null, []); // Collection doesn't exist yet - ignore and return that it has no indexes\n\t\t\t\t} else {\n\t\t\t\t\tcallback(err, res);\n\t\t\t\t}\n\t\t\t});\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Return the list of indexes requested by the schema\n\t\t* @param {function} callback Callback to fire as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.getSchemaIndexes = function(callback) {\n\t\t\tmm.meta({$indexes: true}, function(err, res) {\n\t\t\t\tif (err) return callback(err);\n\t\t\t\tcallback(null, _(res)\n\t\t\t\t\t.map(function(v, k) {\n\t\t\t\t\t\treturn _.assign(v, {id: k});\n\t\t\t\t\t})\n\t\t\t\t\t.filter(function(v) {\n\t\t\t\t\t\treturn !!v.index;\n\t\t\t\t\t})\n\t\t\t\t\t.map(function(v) {\n\t\t\t\t\t\tvar o = {name: v.id == '_id' ? '_id_' : v.id, key: {}};\n\t\t\t\t\t\to.key[v.id] = 1;\n\t\t\t\t\t\treturn o;\n\t\t\t\t\t})\n\t\t\t\t\t.value()\n\t\t\t\t);\n\t\t\t});\n\n\t\t\treturn mm;\n\t\t};\n\n\n\t\t/**\n\t\t* Check this model by a defined list of indexes\n\t\t* The return is a duplicate of the input indexes with an additional `status` property which can equal to 'ok' or 'missing'\n\t\t* @param {array} [wantIndexes] The indexes to examine against. If omitted the results of model.getSchemaIndexes() is used\n\t\t* @param {array} [actualIndexes] The current state of the model to compare against. If omitted the results of model.getIndexes() is used\n\t\t* @param {function} callback The callback to call as (err, indexes)\n\t\t* @return {monoxide.monoxideModel} The chainable monoxideModel\n\t\t*/\n\t\tmm.checkIndexes = argy('[array] [array] function', function(wantIndexes, actualIndexes, callback) {\n\t\t\tasync()\n\t\t\t\t// Either use provided indexes or determine them {{{\n\t\t\t\t.parallel({\n\t\t\t\t\twantIndexes: function(next) {\n\t\t\t\t\t\tif (wantIndexes) return next(null, wantIndexes);\n\t\t\t\t\t\tmm.getSchemaIndexes(next);\n\t\t\t\t\t},\n\t\t\t\t\tactualIndexes: function(next) {\n\t\t\t\t\t\tif (actualIndexes) return next(null, actualIndexes);\n\t\t\t\t\t\tmm.getIndexes(next);\n\t\t\t\t\t},\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t// Compare indexes against whats declared {{{\n\t\t\t\t.map('indexReport', 'wantIndexes', function(next, index) {\n\t\t\t\t\tvar foundIndex = this.actualIndexes.find(i => _.isEqual(i.key, index.key));\n\t\t\t\t\tif (foundIndex) {\n\t\t\t\t\t\tindex.status = 'ok';\n\t\t\t\t\t} else {\n\t\t\t\t\t\tindex.status = 'missing';\n\t\t\t\t\t}\n\n\t\t\t\t\tnext(null, index);\n\t\t\t\t})\n\t\t\t\t// }}}\n\t\t\t\t// End {{{\n\t\t\t\t.end(function(err) {\n\t\t\t\t\tif (err) return callback(err);\n\t\t\t\t\tcallback(null, this.indexReport);\n\t\t\t\t});\n\t\t\t\t// }}}\n\t\t});\n\n\n\t\treturn mm;\n\t});\n\tutil.inherits(o.monoxideModel, events.EventEmitter);\n\n\t// }}}\n\n\t// .monoxideDocument([setup]) - monoxide document instance {{{\n\t/**\n\t* Returns a single instance of a Monoxide document\n\t* @class\n\t* @name monoxide.monoxideDocument\n\t* @param {Object} setup The prototype fields. Everything in this object is extended into the prototype\n\t* @param {boolean} [setup.$applySchema=true] Whether to enforce the model schema on the object. This includes applying default values\n\t* @param {boolean} [setup.$dirty=false] Whether the entire document contents should be marked as dirty (modified). If true this also skips the computation of modified fields\n\t* @param {boolean [setup.decorate=true] Whether to apply any decoration. If false this function returns data undecorated (i.e. no custom Monoxide functionality)\n\t* @param {string} setup.$collection The collection this document belongs to\n\t* @param {Object} data The initial data\n\t* @return {monoxide.monoxideDocument}\n\t*/\n\to.monoxideDocument = function monoxideDocument(setup, data) {\n\t\tif (setup.$decorate === false) return data;\n\t\tsetup.$dirty = !!setup.$dirty;\n\n\t\tvar model = o.models[setup.$collection];\n\n\t\tvar proto = {\n\t\t\t$MONOXIDE: true,\n\t\t\t$collection: setup.$collection,\n\t\t\t$populated: {},\n\n\t\t\t/**\n\t\t\t* Save a document\n\t\t\t* By default this function will only save back modfified data\n\t\t\t* If `data` is specified this is used as well as the modified fields (unless `data.$ignoreModified` is falsy, in which case modified fields are ignored)\n\t\t\t* @param {Object} [data] An optional data patch to save\n\t\t\t* @param {boolean} [data.$ignoreModified=false] Ignore all modified fields and only process save data being passed in the `data` object (use this to directly address what should be saved, ignoring everything else). Setting this drastically speeds up the save operation but at the cost of having to be specific as to what to save\n\t\t\t* @param {function} [callback] The callback to invoke on saving\n\t\t\t*/\n\t\t\tsave: argy('[object] [function]', function(data, callback) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar mongoDoc = doc.toMongoObject();\n\t\t\t\tvar patch = {\n\t\t\t\t\t$collection: doc.$collection,\n\t\t\t\t\t$id: doc._id,\n\t\t\t\t\t$errNoUpdate: true, // Throw an error if we fail to update (i.e. record removed before save)\n\t\t\t\t\t$returnUpdated: true,\n\t\t\t\t};\n\n\t\t\t\tif (data && data.$ignoreModified) { // Only save incomming data\n\t\t\t\t\tdelete data.$ignoreModified;\n\t\t\t\t\t_.assign(patch, data);\n\t\t\t\t} else if (data) { // Data is specified as an object but $ignoreModified is not set - use both inputs\n\t\t\t\t\tdoc.isModified().forEach(function(path) {\n\t\t\t\t\t\tpatch[path] = _.get(mongoDoc, path);\n\t\t\t\t\t});\n\t\t\t\t\t_.assign(patch, data);\n\t\t\t\t} else {\n\t\t\t\t\tdoc.isModified().forEach(function(path) {\n\t\t\t\t\t\tpatch[path] = _.get(mongoDoc, path);\n\t\t\t\t\t});\n\t\t\t\t}\n\n\t\t\t\to.internal.save(patch, function(err, newRec) {\n\t\t\t\t\tdoc = newRec;\n\t\t\t\t\tif (_.isFunction(callback)) callback(err, newRec);\n\t\t\t\t});\n\n\t\t\t\treturn doc;\n\t\t\t}),\n\n\t\t\t/**\n\t\t\t* Remove the document from the collection\n\t\t\t* This method is really just a thin wrapper around monoxide.delete()\n\t\t\t* @param {function} [callback] Optional callback to invoke on completion\n\t\t\t* @see monoxide.delete\n\t\t\t*/\n\t\t\tremove: function(callback) {\n\t\t\t\tvar doc = this;\n\t\t\t\to.internal.delete({\n\t\t\t\t\t$collection: doc.$collection,\n\t\t\t\t\t$id: doc._id,\n\t\t\t\t}, callback);\n\t\t\t\treturn doc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Remove certain fields from the document object\n\t\t\t* This method is really just a thin wrapper around monoxide.delete()\n\t\t\t* @param {string|regexp|array} fields Either a single field name, regular expression or array of strings/regexps to filter by. Any key matching will be removed from the object\n\t\t\t* @return {monoxide.monoxideDocument} This object after the fields have been removed\n\t\t\t*/\n\t\t\tomit: function(fields) {\n\t\t\t\tvar removeFields = _.castArray(fields);\n\t\t\t\ttraverse(this).forEach(function(v) {\n\t\t\t\t\tif (!this.key) return; // Skip array entries\n\t\t\t\t\tvar key = this.key;\n\t\t\t\t\tif (removeFields.some(function(filter) {\n\t\t\t\t\t\treturn (\n\t\t\t\t\t\t\t(_.isString(filter) && key == filter) ||\n\t\t\t\t\t\t\t(_.isRegExp(filter) && filter.test(key))\n\t\t\t\t\t\t);\n\t\t\t\t\t})) {\n\t\t\t\t\t\tthis.remove();\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn this;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Transform a MonoxideDocument into a plain JavaScript object\n\t\t\t* @return {Object} Plain JavaScript object with all special properties and other gunk removed\n\t\t\t*/\n\t\t\ttoObject: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar newDoc = {};\n\t\t\t\t_.forEach(this, function(v, k) {\n\t\t\t\t\tif (doc.hasOwnProperty(k) && !_.startsWith(k, '$')) newDoc[k] = _.clone(v);\n\t\t\t\t});\n\n\t\t\t\treturn newDoc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Transform a MonoxideDocument into a Mongo object\n\t\t\t* This function transforms all OID strings back into their Mongo equivalent\n\t\t\t* @return {Object} Plain JavaScript object with all special properties and other gunk removed\n\t\t\t*/\n\t\t\ttoMongoObject: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar outDoc = doc.toObject(); // Rely on the toObject() syntax to strip out rubbish\n\n\t\t\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\t\t\tswitch (node.fkType) {\n\t\t\t\t\t\tcase 'objectId':\n\t\t\t\t\t\t\tvar oidLeaf = _.get(doc, node.docPath);\n\t\t\t\t\t\t\tif (_.isUndefined(oidLeaf)) return; // Ignore undefined\n\n\t\t\t\t\t\t\tif (!o.utilities.isObjectID(oidLeaf)) {\n\t\t\t\t\t\t\t\tif (_.has(oidLeaf, '_id')) { // Already populated?\n\t\t\t\t\t\t\t\t\t_.set(outDoc, node.docPath, o.utilities.objectID(oidLeaf._id));\n\t\t\t\t\t\t\t\t} else { // Convert to an OID\n\t\t\t\t\t\t\t\t\t_.set(outDoc, node.docPath, o.utilities.objectID(oidLeaf));\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tcase 'objectIdArray':\n\t\t\t\t\t\t\tvar oidLeaf = _.get(doc, node.schemaPath);\n\t\t\t\t\t\t\t_.set(outDoc, node.schemaPath, oidLeaf.map(function(leaf) {\n\t\t\t\t\t\t\t\treturn o.utilities.isObjectID(leaf) ? leaf : o.utilities.objectID(leaf);\n\t\t\t\t\t\t\t}));\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\tdefault:\n\t\t\t\t\t\t\treturn; // Ignore unsupported OID types\n\t\t\t\t\t}\n\t\t\t\t});\n\n\t\t\t\treturn outDoc;\n\t\t\t},\n\n\t\t\tisModified: function(path) {\n\t\t\t\tvar doc = this;\n\t\t\t\tif (path) {\n\t\t\t\t\tvar v = _.get(doc, path);\n\t\t\t\t\tvar pathJoined = _.isArray(path) ? path.join('.') : path;\n\t\t\t\t\tif (o.utilities.isObjectID(v)) {\n\t\t\t\t\t\tif (doc.$populated[pathJoined]) { // Has been populated\n\t\t\t\t\t\t\t// FIXME; What happens if a populated document changes\n\t\t\t\t\t\t\tthrow new Error('Changing populated document objects is not yet supported');\n\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t} else { // Has not been populated\n\t\t\t\t\t\t\tif (doc.$originalValues[pathJoined]) { // Compare against the string value\n\t\t\t\t\t\t\t\treturn doc.$originalValues[pathJoined] != v.toString();\n\t\t\t\t\t\t\t} else if (doc.$originalValues[pathJoined + '.id'] && doc.$originalValues[pathJoined + '._bsontype']) { // Known but its stored as a Mongo OID - look into its values to determine its real comparitor string\n\t\t\t\t\t\t\t\t// When the lookup is a raw OID we need to pass the binary junk into the objectID THEN get its string value before we can compare it to the one we last saw when we fetched the object\n\t\t\t\t\t\t\t\treturn o.utilities.objectID(doc.$originalValues[pathJoined + '.id']).toString() != v.toString(); // Compare against the string value\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\treturn true; // Otherwise declare it modified\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t} else if (_.isObject(v)) { // If its an object (or an array) examine the $clean propertly\n\t\t\t\t\t\treturn !v.$clean;\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn doc.$originalValues[pathJoined] != v;\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tvar modified = [];\n\t\t\t\t\ttraverse(doc).map(function(v) { // NOTE - We're using traverse().map() here as traverse().forEach() actually mutates the array if we tell it not to recurse with this.remove(true) (needed to stop recursion into complex objects if the parent has been changed)\n\t\t\t\t\t\tif (!this.path.length) return; // Root node\n\t\t\t\t\t\tif (_.startsWith(this.key, '$') || this.key == '_id') { // Don't scan down hidden elements\n\t\t\t\t\t\t\treturn this.remove(true);\n\t\t\t\t\t\t} else if (o.utilities.isObjectID(v)) { // Leaf is an object ID\n\t\t\t\t\t\t\tif (doc.isModified(this.path)) modified.push(this.path.join('.'));\n\t\t\t\t\t\t\tthis.remove(true); // Don't scan any deeper\n\t\t\t\t\t\t} else if (doc.isModified(this.path)) {\n\t\t\t\t\t\t\tif (_.isObject(v)) this.remove(true);\n\t\t\t\t\t\t\tmodified.push(this.path.join('.'));\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\treturn modified;\n\t\t\t\t}\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Expand given paths into objects\n\t\t\t* @param {Object|array|string} populations A single or multiple populations to perform\n\t\t\t* @param {function} callback The callback to run on completion\n\t\t\t* @param {boolean} [strict=false] Whether to raise errors and agressively retry if a population fails\n\t\t\t* @return {Object} This document\n\t\t\t*/\n\t\t\tpopulate: function(populations, callback, strict) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar populations = _(populations)\n\t\t\t\t\t.castArray()\n\t\t\t\t\t.map(function(population) { // Mangle all populations into objects (each object should contain a path and an optional ref)\n\t\t\t\t\t\tif (_.isString(population)) {\n\t\t\t\t\t\t\treturn {path: population};\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn population;\n\t\t\t\t\t\t}\n\t\t\t\t\t})\n\t\t\t\t\t.value();\n\n\t\t\t\tvar tryPopulate = function(finish, populations, strict) {\n\t\t\t\t\tvar willPopulate = 0; // Count of items that seem valid that we will try to populate\n\t\t\t\t\tvar failedPopulations = []; // Populations that we couldn't get the end-points of (probably because they are nested)\n\t\t\t\t\tvar populator = async(); // Defered async worker that will actually populate things\n\t\t\t\t\tasync()\n\t\t\t\t\t\t.forEach(populations, function(nextPopulation, population) {\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\tdoc.getNodesBySchemaPath(population.path, true).forEach(function(node) {\n\t\t\t\t\t\t\t\t\tif (!population.ref) {\n\t\t\t\t\t\t\t\t\t\tpopulation.ref = _.get(model, '$mongooseModel.schema.paths.' + node.schemaPath.split('.').join('.schema.paths.') + '.options.ref');\n\t\t\t\t\t\t\t\t\t\tif (!population.ref) throw new Error('Cannot determine collection to use for schemaPath ' + node.schemaPath + '! Specify this is in model with {ref: }');\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tif (_.isObject(node.node) && node.node._id) { // Object is already populated\n\t\t\t\t\t\t\t\t\t\twillPopulate++; // Say we're going to resolve this anyway even though we have nothing to do - prevents an issue where the error catcher reports it as a null operation (willPopulate==0)\n\t\t\t\t\t\t\t\t\t} else if (!node.node) {\n\t\t\t\t\t\t\t\t\t\t// Node is falsy - nothing to populate here\n\t\t\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\t\t\tpopulator.defer(function(next) {\n\t\t\t\t\t\t\t\t\t\t\to.internal.query({\n\t\t\t\t\t\t\t\t\t\t\t\t$errNotFound: false,\n\t\t\t\t\t\t\t\t\t\t\t\t$collection: population.ref,\n\t\t\t\t\t\t\t\t\t\t\t\t$id: o.utilities.isObjectID(node.node) ? node.node.toString() : node.node,\n\t\t\t\t\t\t\t\t\t\t\t}, function(err, res) {\n\t\t\t\t\t\t\t\t\t\t\t\tif (err) return next(err);\n\t\t\t\t\t\t\t\t\t\t\t\t_.set(doc, node.docPath, res);\n\t\t\t\t\t\t\t\t\t\t\t\tdoc.$populated[node.docPath] = true;\n\t\t\t\t\t\t\t\t\t\t\t\tnext();\n\t\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\t\t\twillPopulate++;\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t\tnextPopulation();\n\t\t\t\t\t\t\t} catch (e) {\n\t\t\t\t\t\t\t\tif (strict) failedPopulations.push(population);\n\t\t\t\t\t\t\t\tnextPopulation();\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.then(function(next) {\n\t\t\t\t\t\t\tif (willPopulate > 0) {\n\t\t\t\t\t\t\t\tpopulator.await().end(next); // Run all population defers\n\t\t\t\t\t\t\t} else if (strict) {\n\t\t\t\t\t\t\t\tnext('Unable to resolve remaining populations: ' + JSON.stringify(populations) + '. In ' + doc.$collection + '#' + doc._id);\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tnext();\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t})\n\t\t\t\t\t\t.end(function(err) {\n\t\t\t\t\t\t\tif (err) {\n\t\t\t\t\t\t\t\tcallback(err);\n\t\t\t\t\t\t\t} else if (failedPopulations.length) {\n\t\t\t\t\t\t\t\tconsole.log('SILL MORE POPULATIONS TO RUN', failedPopulations);\n\t\t\t\t\t\t\t\tsetTimeout(function() {\n\t\t\t\t\t\t\t\t\tconsole.log('FIXME: Defered runnable');\n\t\t\t\t\t\t\t\t\t//tryPopulate(callback, failedPopulations);\n\t\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\t\tcallback(null, doc);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t});\n\t\t\t\t};\n\t\t\t\ttryPopulate(callback, populations, strict);\n\t\t\t\treturn doc;\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Retrieves all 'leaf' elements matching a schema path\n\t\t\t* Since any segment of the path could be a nested object, array or sub-document collection this function is likely to return multiple elements\n\t\t\t* For the nearest approximation of how this function operates think of it like performing the jQuery expression: `$('p').each(function() { ... })`\n\t\t\t* @param {string} schemaPath The schema path to iterate down\n\t\t\t* @param {boolean} [strict=false] Optional indicator that an error should be thrown if a path cannot be traversed\n\t\t\t* @return {array} Array of all found leaf nodes\n\t\t\t*/\n\t\t\tgetNodesBySchemaPath: function(schemaPath, strict) {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar examineStack = [{\n\t\t\t\t\tnode: doc,\n\t\t\t\t\tdocPath: '',\n\t\t\t\t\tschemaPath: '',\n\t\t\t\t}];\n\n\t\t\t\tvar segments = schemaPath.split('.');\n\t\t\t\tsegments.every(function(pathSegment, pathSegmentIndex) {\n\t\t\t\t\treturn examineStack.every(function(esDoc, esDocIndex) {\n\t\t\t\t\t\tif (esDoc === false) { // Skip this subdoc\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.isUndefined(esDoc.node[pathSegment]) && pathSegmentIndex == segments.length -1) {\n\t\t\t\t\t\t\texamineStack[esDocIndex] = {\n\t\t\t\t\t\t\t\tnode: esDoc.node[pathSegment],\n\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.isUndefined(esDoc.node[pathSegment])) {\n\t\t\t\t\t\t\t// If we are trying to recurse into a path segment AND we are not at the leaf of the path (as undefined leaves are ok) - raise an error\n\t\t\t\t\t\t\tif (strict) throw new Error('Cannot traverse into path: \"' + (esDoc.docPath + '.' + pathSegment).substr(1) + '\" for doc ' + doc.$collection + '#' + doc._id);\n\t\t\t\t\t\t\texamineStack[esDocIndex] = false;\n\t\t\t\t\t\t\treturn false;\n\t\t\t\t\t\t} else if (_.isArray(esDoc.node[pathSegment])) { // Found an array - remove this doc and append each document we need to examine at the next stage\n\t\t\t\t\t\t\tesDoc.node[pathSegment].forEach(function(d,i) {\n\t\t\t\t\t\t\t\t// Do this in a forEach to break appart the weird DocumentArray structure we get back from Mongoose\n\t\t\t\t\t\t\t\texamineStack.push({\n\t\t\t\t\t\t\t\t\tnode: d,\n\t\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment + '.' + i,\n\t\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\t})\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t\texamineStack[esDocIndex] = false;\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t} else if (_.has(esDoc.node, pathSegment)) { // Traverse into object - replace this nodeerence with the new pointer\n\t\t\t\t\t\t\texamineStack[esDocIndex] = {\n\t\t\t\t\t\t\t\tnode: esDoc.node[pathSegment],\n\t\t\t\t\t\t\t\tdocPath: esDoc.docPath + '.' + pathSegment,\n\t\t\t\t\t\t\t\tschemaPath: esDoc.schemaPath + '.' + pathSegment,\n\t\t\t\t\t\t\t};\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t});\n\n\t\t\t\treturn _(examineStack)\n\t\t\t\t\t.filter()\n\t\t\t\t\t.filter(function(node) {\n\t\t\t\t\t\treturn !! node.docPath;\n\t\t\t\t\t})\n\t\t\t\t\t.map(function(node) {\n\t\t\t\t\t\tnode.docPath = node.docPath.substr(1);\n\t\t\t\t\t\tnode.schemaPath = node.schemaPath.substr(1);\n\t\t\t\t\t\treturn node;\n\t\t\t\t\t})\n\t\t\t\t\t.value();\n\t\t\t},\n\n\t\t\t/**\n\t\t\t* Return an array of all OID leaf nodes within the document\n\t\t\t* This function combines the behaviour of monoxide.utilities.extractFKs with monoxide.monoxideDocument.getNodesBySchemaPath)\n\t\t\t* @return {array} An array of all leaf nodes\n\t\t\t*/\n\t\t\tgetOIDs: function() {\n\t\t\t\tvar doc = this;\n\t\t\t\tvar stack = [];\n\n\t\t\t\t_.forEach(model.$oids, function(fkType, schemaPath) {\n\t\t\t\t\tif (fkType.type == 'subDocument') return; // Skip sub-documents (as they are stored against the parent anyway)\n\n\t\t\t\t\tstack = stack.concat(doc.getNodesBySchemaPath(schemaPath)\n\t\t\t\t\t\t.map(function(node) {\n\t\t\t\t\t\t\tnode.fkType = fkType.type;\n\t\t\t\t\t\t\treturn node;\n\t\t\t\t\t\t})\n\t\t\t\t\t);\n\t\t\t\t});\n\t\t\t\treturn stack;\n\t\t\t},\n\n\t\t\t$applySchema: true,\n\t\t};\n\n\t\tproto.delete = proto.remove;\n\n\t\t_.extend(\n\t\t\tproto, // INPUT: Basic prototype\n\t\t\tsetup, // Merge with the incomming prototype (should contain at least $collection)\n\t\t\tmodel.$methods // Merge with model methods\n\t\t);\n\n\t\t// Create the base document\n\t\tvar doc = Object.create(proto);\n\n\t\t// Setup Virtuals\n\t\tObject.defineProperties(doc, model.$virtuals);\n\n\t\t// Convert data to a simple array if its weird Mongoose fluff\n\t\tif (data instanceof mongoose.Document) data = data.toObject();\n\n\t\t_.extend(doc, data);\n\n\t\t// Apply schema\n\t\tif (doc.$applySchema) {\n\t\t\t_.forEach(model.$mongooseModel.schema.paths, function(pathSpec, path) {\n\t\t\t\tvar docValue = _.get(doc, path, undefined);\n\t\t\t\tif (_.isUndefined(docValue)) {\n\t\t\t\t\tif (pathSpec.defaultValue) { // Item is blank but SHOULD have a default\n\t\t\t\t\t\t_.set(doc, path, _.isFunction(pathSpec.defaultValue) ? pathSpec.defaultValue() : pathSpec.defaultValue);\n\t\t\t\t\t} else {\n\t\t\t\t\t\t_.set(doc, path, undefined);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\n\t\t// Sanitize data to remove all ObjectID crap\n\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\tif (node.fkType == 'objectId') {\n\t\t\t\tvar singleOid = _.get(doc, node.docPath);\n\t\t\t\tif (o.utilities.isObjectID(singleOid))\n\t\t\t\t\t_.set(doc, node.docPath, singleOid.toString());\n\t\t\t} else if (node.fkType == 'objectIdArray') {\n\t\t\t\tvar oidArray = _.get(doc, node.docPath);\n\t\t\t\tif (o.utilities.isObjectID(oidArray)) {\n\t\t\t\t\t_.set(doc, node.docPath, oidArray.toString());\n\t\t\t\t} else if (_.isObject(oidArray) && oidArray._id && o.utilities.isObjectID(oidArray._id)) {\n\t\t\t\t\t// FIXME: Rather crappy sub-document flattening for now\n\t\t\t\t\t// This needs to actually scope into the sub-object schema and flatten each ID and not just the _id element\n\n\t\t\t\t\toidArray._id = oidArray._id.toString();\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\n\n\t\t// Break object into component parts and apply the '$clean' marker to arrays and objects\n\t\tObject.defineProperty(doc, '$originalValues', {\n\t\t\tenumerable: false,\n\t\t\tvalue: {},\n\t\t});\n\n\t\tif (!setup.$dirty) {\n\t\t\ttraverse(doc).forEach(function(v) {\n\t\t\t\t// If its an object (or array) glue the `$clean` property to it to detect writes\n\t\t\t\tif (_.isObject(v)) {\n\t\t\t\t\tObject.defineProperty(v, '$clean', {\n\t\t\t\t\t\tenumerable: false,\n\t\t\t\t\t\tvalue: true,\n\t\t\t\t\t});\n\t\t\t\t} else if (!_.isPlainObject(v)) { // For everything else - stash the original value in this.parent.$originalValues\n\t\t\t\t\tdoc.$originalValues[this.path.join('.')] = o.utilities.isObjectID(v) ? v.toString() : v;\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\n\t\t// Apply population data\n\t\tdoc.getOIDs().forEach(function(node) {\n\t\t\tdoc.$populated[node.docPath] = o.utilities.isObjectID(node.docPath);\n\t\t\tif (!setup.$dirty) doc.$originalValues[node.docPath] = _.get(doc, node.docPath);\n\t\t});\n\n\t\treturn doc;\n\t};\n\t// }}}\n\n\t// .model(name) - helper function to return a declared model {{{\n\t/**\n\t* Return a defined Monoxide model\n\t* The model must have been previously defined by monoxide.schema()\n\t* This function is identical to accessing the model directly via `monoxide.models[modelName]`\n\t*\n\t* @name monoxide.model\n\t* @see monoxide.schema\n\t*\n\t* @param {string} model The model name (generally lowercase plurals e.g. 'users', 'widgets', 'favouriteItems' etc.)\n\t* @returns {Object} The monoxide model of the generated schema\n\t*/\n\to.model = function(model) {\n\t\treturn o.models[model];\n\t};\n\t// }}}\n\n\t// .schema - Schema builder {{{\n\t/**\n\t* Construct and return a Mongo model\n\t* This function creates a valid schema specificaion then returns it as if model() were called\n\t*\n\t* @name monoxide.schema\n\t* @see monoxide.model\n\t*\n\t* @param {string} model The model name (generally lowercase plurals e.g. 'users', 'widgets', 'favouriteItems' etc.)\n\t* @param {Object} spec The schema specification composed of a hierarhical object of keys with each value being the specification of that field\n\t* @returns {Object} The monoxide model of the generated schema\n\t* @emits modelCreate Called as (model, instance) when a model gets created\n\t*\n\t* @example\n\t* // Example schema for a widget\n\t* var Widgets = monoxide.schema('widgets', {\n\t* \tname: String,\n\t* \tcontent: String,\n\t* \tstatus: {type: String, enum: ['active', 'deleted'], default: 'active'},\n\t* \tcolor: {type: String, enum: ['red', 'green', 'blue'], default: 'blue', index: true},\n\t* });\n\t*\n\t* @example\n\t* // Example schema for a user\n\t* var Users = monoxide.schema('users', {\n\t* \tname: String,\n\t* \trole: {type: 'string', enum: ['user', 'admin'], default: 'user'},\n\t* \tfavourite: {type: 'pointer', ref: 'widgets'},\n\t* \titems: [{type: 'pointer', ref: 'widgets'}],\n\t* \tsettings: {type: 'any'},\n\t* \tmostPurchased: [\n\t* \t\t{\n\t* \t\t\tnumber: {type: 'number', default: 0},\n\t* \t\t\titem: {type: 'pointer', ref: 'widgets'},\n\t* \t\t}\n\t* \t],\n\t* });\n\t*/\n\to.schema = function(model, spec) {\n\t\tif (!argy.isType(model, 'string') || !argy.isType(spec, 'object')) throw new Error('Schema construction requires a model ID + schema object');\n\n\t\tvar schema = new mongoose.Schema(_.deepMapValues(spec, function(value, path) {\n\t\t\t// Rewrite .type leafs {{{\n\t\t\tif (_.endsWith(path, '.type')) { // Ignore not type rewrites\n\t\t\t\tif (!_.isString(value)) return value; // Only rewrite string values\n\n\t\t\t\tswitch (value.toLowerCase()) {\n\t\t\t\t\tcase 'oid':\n\t\t\t\t\tcase 'pointer':\n\t\t\t\t\tcase 'objectid':\n\t\t\t\t\t\treturn mongoose.Schema.ObjectId;\n\t\t\t\t\tcase 'string':\n\t\t\t\t\t\treturn mongoose.Schema.Types.String;\n\t\t\t\t\tcase 'number':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Number;\n\t\t\t\t\tcase 'boolean':\n\t\t\t\t\tcase 'bool':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Boolean;\n\t\t\t\t\tcase 'array':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Array;\n\t\t\t\t\tcase 'date':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Date;\n\t\t\t\t\tcase 'object':\n\t\t\t\t\tcase 'mixed':\n\t\t\t\t\tcase 'any':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Mixed;\n\t\t\t\t\tcase 'buffer':\n\t\t\t\t\t\treturn mongoose.Schema.Types.Buffer;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tthrow new Error('Unknown Monoxide data type: ' + value.toLowerCase());\n\t\t\t\t}\n\t\t\t// }}}\n\t\t\t// Rewrite .ref leafs {{{\n\t\t\t} else if (_.endsWith(path, '.ref')) {\n\t\t\t\tif (!_.isString(value)) return value; // Leave complex objects alone\n\t\t\t\treturn value.toLowerCase();\n\t\t\t// }}}\n\t\t\t// Leave everything else unaltered {{{\n\t\t\t} else { // Do nothing\n\t\t\t\treturn value;\n\t\t\t}\n\t\t\t// }}}\n\t\t}));\n\n\t\t// Add to model storage\n\t\to.models[model] = new o.monoxideModel({\n\t\t\t$collection: model,\n\t\t\t$mongoose: mongoose.model(model.toLowerCase(), schema), // FIXME: When we implement our own schema def system we can remove the toLowerCase() component that Mongoose insists on using. We can also remove all of the other toLowerCase() calls when we're trying to find the Mongoose schema\n\t\t\t$schema: schema.obj,\n\t\t});\n\n\t\to.emit('modelCreate', model, o.models[model]);\n\n\t\treturn o.models[model];\n\t};\n\t// }}}\n\n\t// .aggregate([q], callback) {{{\n\t/**\n\t* Perform a direct aggregation and return the result\n\t*\n\t* @name monoxide.aggregate\n\t* @memberof monoxide\n\t*\n\t* @param {Object} q The object to process\n\t* @param {string} q.$collection The collection / model to query\n\t* @param {boolean} [q.$slurp=true] Attempt to read all results into an array rather than return a cursor\n\t* @param {array} q.$stages The aggregation stages array\n\t* @param {Object} [q.$stages.$project] Fields to be supplied in the aggregation (in the form `{field: true}`)\n\t* @param {boolean} [q.$stages.$project._id=false] If true surpress the output of the `_id` field\n\t* @param {Object} [q.$stages.$match] Specify a filter on fields (in the form `{field: CRITERIA}`)\n\t* @param {Object} [q.$stages.$redract]\n\t* @param {Object} [q.$stages.$limit]\n\t* @param {Object} [q.$stages.$skip]\n\t* @param {Object} [q.$stages.$unwind]\n\t* @param {Object} [q.$stages.$group]\n\t* @param {Object} [q.$stages.$sample]\n\t* @param {Object} [q.$stages.$sort] Specify an object of fields to sort by (in the form `{field: 1|-1}` where 1 is ascending and -1 is decending sort order)\n\t* @param {Object} [q.$stages.$geoNear]\n\t* @param {Object} [q.$stages.$lookup]\n\t* @param {Object} [q.$stages.$out]\n\t* @param {Object} [q.$stages.$indexStats]\n\t*\n\t* @param {function} callback(err, result) the callback to call on completion or error\n\t*\n\t* @return {Object} This chainable object\n\t*/\n\to.aggregate = argy('string|object function', function MonoxideAggregate(q, callback) {\n\t\tif (argy.isType(q, 'string')) q = {$collection: q};\n\n\t\tasync()\n\t\t\t// Sanity checks {{{\n\t\t\t.then(function(next) {\n\t\t\t\tif (!q || _.isEmpty(q)) return next('No query given for save operation');\n\t\t\t\tif (!q.$stages || !_.isArray(q.$stages)) return next('$stages must be specified as an array');\n\t\t\t\tif (!q.$collection) return next('$collection must be specified for save operation');\n\t\t\t\tif (!o.models[q.$collection]) return next('Model not initalized');\n\t\t\t\tnext();\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Execute and capture return {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\to.models[q.$collection].$mongoModel.aggregate(q.$stages, next);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// Slurp the cursor? {{{\n\t\t\t.then('result', function(next) {\n\t\t\t\tif (q.$slurp || _.isUndefined(q.$slurp)) {\n\t\t\t\t\to.utilities.slurpCursor(this.result, next);\n\t\t\t\t} else {\n\t\t\t\t\tnext(null, this.result);\n\t\t\t\t}\n\t\t\t})\n\t\t\t// }}}\n\t\t\t// End {{{\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\treturn callback(err);\n\t\t\t\t} else {\n\t\t\t\t\tcallback(null, this.result);\n\t\t\t\t}\n\t\t\t});\n\t\t\t// }}}\n\t\treturn o;\n\t});\n\t// }}}\n\n\t// .use([plugins...], [callback]) {{{\n\t/**\n\t* Run a third party plugin against the entire Monoxide structure\n\t* Really this function just registers all given modules against monoxide then fires the callback when done\n\t* Each plugin is called as `(callback, monoxide)`\n\t* @param {function|string|array} plugins The plugin(s) to run. Each function is run as (model, callback), strings are assumed to be file paths to JS files if they contain at least one '/' or `.` otherwise they are loaded from the `plugins` directory\n\t* @param {function} [callback] Optional callback to fire when all plugin have registered\n\t* @return {monoxide.monoxide} The chainable object\n\t*/\n\to.use = function(plugins, callback) {\n\t\tif (!plugins) return callback(); // Do nothing if given falsy\n\n\t\tasync()\n\t\t\t.forEach(_.castArray(plugins), function(next, plugin) {\n\t\t\t\tif (o.used.some(i => i === plugin)) {\n\t\t\t\t\tdebug('Plugin already loaded, ignoring');\n\t\t\t\t\tnext();\n\t\t\t\t} else if (_.isString(plugin)) {\n\t\t\t\t\tvar pluginModule = /[\\/\\.]/.test(plugin) // Contains at least one slash or dot?\n\t\t\t\t\t\t? require(plugin)\n\t\t\t\t\t\t: require(__dirname + '/plugins/' + plugin)\n\t\t\t\t\tpluginModule.call(o, next, o);\n\t\t\t\t\to.used.push(pluginModule);\n\t\t\t\t} else if (_.isFunction(plugin)) {\n\t\t\t\t\tplugin.call(o, next, o);\n\t\t\t\t\to.used.push(plugin);\n\t\t\t\t} else {\n\t\t\t\t\tnext('Unsupported plugin format');\n\t\t\t\t}\n\t\t\t})\n\t\t\t.end(callback);\n\n\t\treturn o;\n\t};\n\n\t/**\n\t* Storage for modules we have already loaded\n\t* @var {Array } All plugins (as funtions) we have previously loaded\n\t*/\n\to.used = [];\n\t// }}}\n\n\t// .hook(hookName, callback) {{{\n\n\t/**\n\t* Holder for global hooks\n\t* @var {array }\n\t*/\n\to.$hooks = {};\n\n\n\t/**\n\t* Attach a hook to a global event\n\t* A hook is exactly the same as a eventEmitter.on() event but must return a callback\n\t* Multiple hooks can be attached and all will be called in parallel on certain events such as 'save'\n\t* All hooks must return non-errors to proceed with the operation\n\t* @param {string} eventName The event ID to hook against\n\t* @param {function} callback The callback to run when hooked, NOTE: Any falsy callbacks are ignored\n\t* @return {monoxide} The chainable monoxide\n\t*/\n\to.hook = function(eventName, callback) {\n\t\tif (!callback) return mm; // Ignore flasy callbacks\n\t\tif (!o.$hooks[eventName]) o.$hooks[eventName] = [];\n\t\to.$hooks[eventName].push(callback);\n\t\treturn o;\n\t};\n\n\n\t/**\n\t* Execute global level hooks\n\t* NOTE: This will only fire hooks attached via monoxide.hook() and not individual model hooks\n\t* NOTE: Hooks are always fired with the callback as the first argument\n\t* @param {string} name The name of the hook to invoke\n\t* @param {function} callback The callback to invoke on success\n\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t* @return {monoxide} The chainable monoxide\n\t*/\n\to.fire = function(name, callback) {\n\t\tif (o.$hooks[name] && o.$hooks[name].length) { // There is at least one event handler attached\n\t\t\tvar eventArgs = _.values(arguments);\n\t\t\teventArgs.splice(1, 1); // Remove the 'callback' arg as events cant respond to it anyway\n\t\t\to.emit.apply(o, eventArgs);\n\t\t} else {\n\t\t\treturn callback();\n\t\t}\n\n\t\t// Calculate the args array we will pass to each hook\n\t\tvar hookArgs = _.values(arguments);\n\t\thookArgs.shift(); // We will set args[0] to the callback in each case anyway so we only need to shift 1\n\n\t\tasync()\n\t\t\t// Fire hooks attached to this model + global hooks {{{\n\t\t\t.forEach(\n\t\t\t\to.$hooks[name]\n\t\t\t\t.filter(f => !!f) // Actually is a function?\n\t\t\t, function(next, hookFunc) {\n\t\t\t\thookArgs[0] = next;\n\t\t\t\thookFunc.apply(o, hookArgs);\n\t\t\t})\n\t\t\t// }}}\n\t\t\t.end(callback);\n\n\t\treturn o;\n\t};\n\n\n\n\t/**\n\t* Similar to fire() expect that execution is immediate\n\t* This should only be used by sync functions that require immediate action such as object mutators\n\t* NOTE: Because of the nature of this function a callback CANNOT be accepted when finished - the function is assumed done when it returns\n\t* @param {string} name The name of the hook to invoke\n\t* @param {...*} parameters Any other parameters to be passed to each hook\n\t* @return {monoxide} The chainable monoxide\n\t* @see fire()\n\t*/\n\to.fireImmediate = function(name, callback) {\n\t\tif (!o.$hooks[name] || !o.$hooks[name].length) return o; // No hooks to run anyway\n\n\t\tfor (var i of o.$hooks[name]) {\n\t\t\tlet hookArgs = _.values(arguments);\n\t\t\thookArgs.shift();\n\t\t\ti.apply(o, hookArgs);\n\t\t}\n\n\t\treturn o;\n\t};\n\t// }}}\n\n\t// .utilities structure {{{\n\to.utilities = {};\n\n\t// .utilities.extractFKs(schema, prefix, base) {{{\n\t/**\n\t* Extract all FKs in dotted path notation from a Mongoose model\n\t*\n\t* @name monoxide.utilities.extractFKs\n\t*\n\t* @param {Object} schema The schema object to examine (usually monoxide.models[model].$mongooseModel.schema)\n\t* @param {string} prefix existing Path prefix to use (internal use only)\n\t* @param {Object} base Base object to append flat paths to (internal use only)\n\t* @return {Object} A dictionary of foreign keys for the schema (each key will be the info of the object)\n\t*/\n\to.utilities.extractFKs = function(schema, prefix, base) {\n\t\tvar FKs = {};\n\t\tif (!prefix) prefix = '';\n\t\tif (!base) base = FKs;\n\n\t\t_.forEach(schema.paths, function(path, id) {\n\t\t\tif (id == 'id' || id == '_id') { // Main document ID\n\t\t\t\tFKs[prefix + id] = {type: 'objectId'};\n\t\t\t} else if (path.instance && path.instance == 'ObjectID') {\n\t\t\t\tFKs[prefix + id] = {type: 'objectId'};\n\t\t\t} else if (path.caster && path.caster.instance == 'ObjectID') { // Array of ObjectIDs\n\t\t\t\tFKs[prefix + id] = {type: 'objectIdArray'};\n\t\t\t} else if (path.schema) {\n\t\t\t\tFKs[prefix + id] = {type: 'subDocument'};\n\t\t\t\t_.forEach(o.utilities.extractFKs(path.schema, prefix + id + '.', base), function(val, key) {\n\t\t\t\t\tbase[key] = val;\n\t\t\t\t});\n\t\t\t}\n\t\t});\n\n\t\treturn FKs;\n\t}\n\t// }}}\n\n\t// .utilities.objectID(string) {{{\n\t/**\n\t* Construct and return a MongoDB-Core compatible ObjectID object\n\t* This is mainly used within functions that need to convert a string ID into an object\n\t* This has one additional check which will return undefined if the value passed in is falsy\n\t* @name monoxide.utilities.objectID\n\t* @param {string} str The string to convert into an ObjectID\n\t* @return {Object} A MongoDB-Core compatible ObjectID object instance\n\t*/\n\to.utilities.objectID = function(str) {\n\t\tif (!str) return undefined;\n\t\tif (_.isObject(str) && str._id) return new mongoose.Types.ObjectId(str._id); // Is a sub-document - extract its _id and use that\n\t\treturn new mongoose.Types.ObjectId(str);\n\t};\n\t// }}}\n\n\t// .utilities.isObjectID(string) {{{\n\t/**\n\t* Return if the input is a valid MongoDB-Core compatible ObjectID object\n\t* This is mainly used within functions that need to check that a given variable is a Mongo OID\n\t* @name monoxide.utilities.isObjectID\n\t* @param {mixed} subject The item to examine\n\t* @return {boolean} Whether the subject is a MongoDB-Core compatible ObjectID object instance\n\t*/\n\to.utilities.isObjectID = function(subject) {\n\t\treturn (subject instanceof mongoose.Types.ObjectId);\n\t};\n\n\t/**\n\t* Alias of isObjectID\n\t* @see monoxide.utilities.isObjectId\n\t*/\n\to.utilities.isObjectId = o.utilities.isObjectID;\n\t// }}}\n\n\t// .utilities.runMiddleware(middleware) {{{\n\t/**\n\t* Run optional middleware\n\t*\n\t* Middleware can be:\n\t* \t- A function(req, res, next)\n\t*\t- An array of functions(req, res, next) - Functions will be called in sequence, all functions must call the next method\n\t*\t- A string - If specified (and `obj` is also specified) the middleware to use will be looked up as a key of the object. This is useful if you need to invoke similar methods on different entry points (e.g. monoxide.express.middleware('widgets', {save: function(req, res, next) { // Check something // }, create: 'save'}) - where the `create` method invokes the same middleware as `save)\n\t*\n\t* @param {null|function|array} middleware The optional middleware to run this can be a function, an array of functions or a string\n\t* @param {function} callback The callback to invoke when completed. This may not be called\n\t* @param {object} obj The parent object to look up inherited functions from (if middleware is a string)\n\t*\n\t* @example\n\t* // Set up a Monoxide express middleware to check user logins on each save or create operaion\n\t* app.use('/api/widgets/:id?', monoxide.express.middleware('widgets', {\n\t* \tcreate: function(req, res, next) {\n\t*\t\tif (req.user && req.user._id) {\n\t* \t\t\tnext();\n\t* \t\t} else {\n\t* \t\t\tres.status(403).send('You are not logged in').end();\n\t*\t\t}\n\t*\t},\n\t* \tsave: 'create', // Point to the same checks as the `create` middleware\n\t* }));\n\n\t*/\n\to.utilities.runMiddleware = function(req, res, middleware, callback, obj) {\n\t\tvar thisContext = this;\n\t\tvar runnable; // The middleware ARRAY to run\n\n\t\tif (_.isBoolean(middleware) && !middleware) { // Boolean=false - deny!\n\t\t\tres.status(403).end();\n\t\t} else if (_.isUndefined(middleware) || _.isNull(middleware)) { // Nothing to do anyway\n\t\t\treturn callback();\n\t\t} else if (_.isFunction(middleware)) {\n\t\t\trunnable = [middleware];\n\t\t} else if (_.isArray(middleware)) {\n\t\t\trunnable = middleware;\n\t\t} else if (_.isString(middleware) && _.has(obj, middleware)) {\n\t\t\treturn o.utilities.runMiddleware(req, res, _.get(obj, middleware), callback, obj); // Defer to the pointer\n\t\t}\n\n\t\tasync()\n\t\t\t.limit(1)\n\t\t\t.forEach(runnable, function(nextMiddleware, middlewareFunc, index) {\n\t\t\t\tmiddlewareFunc.apply(thisContext, [req, res, nextMiddleware]);\n\t\t\t})\n\t\t\t.end(function(err) {\n\t\t\t\tif (err) {\n\t\t\t\t\to.express.sendError(res, 403, err);\n\t\t\t\t} else {\n\t\t\t\t\tcallback();\n\t\t\t\t}\n\t\t\t});\n\t};\n\t// }}}\n\n\t// .utilities.diff(originalDoc, newDoc) {{{\n\t/**\n\t* Diff two monoxide.monoxideDocument objects and return the changes as an object\n\t* This change object is suitable for passing directly into monoxide.save()\n\t* While originally intended only for comparing monoxide.monoxideDocument objects this function can be used to compare any type of object\n\t* NOTE: If you are comparing MonoxideDocuments call `.toObject()` before passing the object in to strip it of its noise\n\t*\n\t* @name monoxide.utilities.diff\n\t* @see monoxide.save\n\t* @see monoxide.update\n\t*\n\t* @param {Object} originalDoc The original source document to compare to\n\t* @param {Object} newDoc The new document with possible changes\n\t* @return {Object} The patch object\n\t*\n\t* @example\n\t* // Get the patch of two documents\n\t* monoxide.query({$collection: 'widgets', $id: '123'}, function(err, res) {\n\t* \tvar docA = res.toObject();\n\t* \tvar docB = res.toObject();\n\t*\n\t*\t// Change some fields\n\t* \tdocB.title = 'Hello world';\n\t*\n\t* \tvar patch = monoxide.utilities.diff(docA, docB);\n\t* \t// => should only return {title: 'Hello World'}\n\t* });\n\t*/\n\to.utilities.diff = function(originalDoc, newDoc) {\n\t\tvar patch = {};\n\n\t\tdeepDiff.observableDiff(originalDoc, newDoc, function(diff) {\n\t\t\tif (diff.kind == 'N' || diff.kind == 'E') {\n\t\t\t\t_.set(patch, diff.path, diff.rhs);\n\t\t\t} else if (diff.kind == 'A') { // Array alterations\n\t\t\t\t// deepDiff will only apply changes onto newDoc - we can't just apply them to the empty patch object\n\t\t\t\t// so we let deepDiff do its thing then copy the new structure across into patch\n\t\t\t\tdeepDiff.applyChange(originalDoc, newDoc, diff);\n\t\t\t\t_.set(patch, diff.path, _.get(newDoc, diff.path));\n\t\t\t}\n\t\t});\n\n\t\treturn patch;\n\t};\n\t// }}}\n\n\t// .utilities.rewriteQuery(query, settings) {{{\n\t/**\n\t* Returns a rewritten version of an incomming query that obeys various rules\n\t* This usually accepts req.query as a parameter and a complex settings object as a secondary\n\t* This function is used internally by middleware functions to clean up the incomming query\n\t*\n\t* @name monoxide.utilities.rewriteQuery\n\t* @see monoxide.middleware\n\t*\n\t* @param {Object} query The user-provided query object\n\t* @param {Object} settings The settings object to apply (see middleware functions)\n\t* @return {Object} The rewritten query object\n\t*/\n\to.utilities.rewriteQuery = function(query, settings) {\n\t\treturn _(query)\n\t\t\t.mapKeys(function(val, key) {\n\t\t\t\tif (_.has(settings.queryRemaps, key)) return settings.queryRemaps[key];\n\t\t\t\treturn key;\n\t\t\t})\n\t\t\t.mapValues(function(val, key) {\n\t\t\t\tif (settings.queryAllowed && settings.queryAllowed[key]) {\n\t\t\t\t\tvar allowed = settings.queryAllowed[key];\n\t\t\t\t\tif (!_.isString(val) && !allowed.scalar) {\n\t\t\t\t\t\treturn null;\n\t\t\t\t\t} else if (allowed.boolean) {\n\t\t\t\t\t\treturn (val == 'true' || val == '1');\n\t\t\t\t\t} else if (_.isString(val) && allowed.scalarCSV) {\n\t\t\t\t\t\treturn val.split(/\\s*,\\s*/);\n\t\t\t\t\t} else if (_.isArray(val) && allowed.array) {\n\t\t\t\t\t\treturn val;\n\t\t\t\t\t} else if (_.isString(val) && allowed.number) {\n\t\t\t\t\t\treturn parseInt(val);\n\t\t\t\t\t} else {\n\t\t\t\t\t\treturn val;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn val;\n\t\t\t})\n\t\t\t.value();\n\t};\n\t// }}}\n\n\t// .utilities.slurpCursor(cursor, cb) {{{\n\t/**\n\t* Asyncronously calls a cursor until it is exhausted\n\t*\n\t* @name monoxide.utilities.slurpCursor\n\t*\n\t* @param {Cursor} cursor A mongo compatible cursor object\n\t* @param {function} cb The callback to call as (err, result) when complete\n\t*/\n\to.utilities.slurpCursor = function(cursor, cb) {\n\t\tvar res = [];\n\n\t\tvar cursorReady = function(err, result) {\n\t\t\tif (result === null) { // Cursor is exhausted\n\t\t\t\tcb(null, res);\n\t\t\t} else {\n\t\t\t\tres.push(result);\n\t\t\t\tsetTimeout(function() { // Queue fetcher in timeout so we don't stack overflow\n\t\t\t\t\tcursor.next(cursorReady);\n\t\t\t\t});\n\t\t\t}\n\t\t};\n\n\t\tcursor.next(cursorReady);\n\t};\n\t// }}}\n\t// }}}\n\n\t// Create internals mapping {{{\n\to.internal = o; // Mapping for the original function handlers (e.g. get() before any mutations)\n\t// }}}\n\n\treturn o;\n}\n\nutil.inherits(Monoxide, events.EventEmitter);\n\nmodule.exports = new Monoxide();\n"},"message":{"kind":"string","value":"BUGFIX: Better connection handling\n"},"old_file":{"kind":"string","value":"index.js"},"subject":{"kind":"string","value":"BUGFIX: Better connection handling"},"git_diff":{"kind":"string","value":"ndex.js\n \t\tmongoose.connect(uri, {\n \t\t\tpromiseLibrary: global.Promise,\n \t\t\tuseNewUrlParser: true,\n\t\t}, function(err) {\n\t\t\tif (err) {\n\t\t\t\tif (_.isFunction(callback)) callback(err);\n\t\t\t} else {\n\t\t\t\to.connection = mongoose.connection;\n\t\t\t\tif (_.isFunction(callback)) callback();\n\t\t\t}\n \t\t})\n\t\t.then(function() {\n\t\t\to.connection = mongoose.connection;\n\t\t\tif (callback) callback();\n\t\t})\n\t\t.catch(e => callback(e))\n \n \t\treturn o;\n \t};"}}},{"rowIdx":2044,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"00a75d7c99e87f543a4e0a4390732bb2eaa286fe"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"chrislusf/seaweedfs,chrislusf/seaweedfs,chrislusf/seaweedfs,chrislusf/seaweedfs"},"new_contents":{"kind":"string","value":"package seaweedfs.client;\n\nimport org.apache.http.Header;\nimport org.apache.http.HeaderElement;\nimport org.apache.http.HttpEntity;\nimport org.apache.http.HttpHeaders;\nimport org.apache.http.client.entity.GzipDecompressingEntity;\nimport org.apache.http.client.methods.CloseableHttpResponse;\nimport org.apache.http.client.methods.HttpGet;\nimport org.apache.http.util.EntityUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\nimport java.util.*;\n\npublic class SeaweedRead {\n\n private static final Logger LOG = LoggerFactory.getLogger(SeaweedRead.class);\n\n static ChunkCache chunkCache = new ChunkCache(4);\n\n // returns bytesRead\n public static long read(FilerGrpcClient filerGrpcClient, List visibleIntervals,\n final long position, final byte[] buffer, final int bufferOffset,\n final int bufferLength, final long fileSize) throws IOException {\n\n List chunkViews = viewFromVisibles(visibleIntervals, position, bufferLength);\n\n FilerProto.LookupVolumeRequest.Builder lookupRequest = FilerProto.LookupVolumeRequest.newBuilder();\n for (ChunkView chunkView : chunkViews) {\n String vid = parseVolumeId(chunkView.fileId);\n lookupRequest.addVolumeIds(vid);\n }\n\n FilerProto.LookupVolumeResponse lookupResponse = filerGrpcClient\n .getBlockingStub().lookupVolume(lookupRequest.build());\n\n Map vid2Locations = lookupResponse.getLocationsMapMap();\n\n //TODO parallel this\n long readCount = 0;\n long startOffset = position;\n for (ChunkView chunkView : chunkViews) {\n\n if (startOffset < chunkView.logicOffset) {\n long gap = chunkView.logicOffset - startOffset;\n LOG.debug(\"zero [{},{})\", startOffset, startOffset + gap);\n readCount += gap;\n startOffset += gap;\n }\n\n FilerProto.Locations locations = vid2Locations.get(parseVolumeId(chunkView.fileId));\n if (locations == null || locations.getLocationsCount() == 0) {\n LOG.error(\"failed to locate {}\", chunkView.fileId);\n // log here!\n return 0;\n }\n\n int len = readChunkView(startOffset, buffer, bufferOffset + readCount, chunkView, locations);\n\n LOG.debug(\"read [{},{}) {} size {}\", startOffset, startOffset + len, chunkView.fileId, chunkView.size);\n\n readCount += len;\n startOffset += len;\n\n }\n\n long limit = Math.min(bufferOffset + bufferLength, fileSize);\n\n if (startOffset < limit) {\n long gap = limit - startOffset;\n LOG.debug(\"zero2 [{},{})\", startOffset, startOffset + gap);\n readCount += gap;\n startOffset += gap;\n }\n\n return readCount;\n }\n\n private static int readChunkView(long startOffset, byte[] buffer, long bufOffset, ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n\n byte[] chunkData = chunkCache.getChunk(chunkView.fileId);\n\n if (chunkData == null) {\n chunkData = doFetchFullChunkData(chunkView, locations);\n chunkCache.setChunk(chunkView.fileId, chunkData);\n }\n\n int len = (int) chunkView.size;\n LOG.debug(\"readChunkView fid:{} chunkData.length:{} chunkView[{};{}) buf[{},{})/{} startOffset:{}\",\n chunkView.fileId, chunkData.length, chunkView.offset, chunkView.offset+chunkView.size, bufOffset, bufOffset+len, buffer.length, startOffset);\n System.arraycopy(chunkData, (int) (startOffset - chunkView.logicOffset + chunkView.offset), buffer, (int)bufOffset, len);\n\n return len;\n }\n\n public static byte[] doFetchFullChunkData(ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n\n byte[] data = null;\n IOException lastException = null;\n for (long waitTime = 1000L; waitTime < 10 * 1000; waitTime += waitTime / 2) {\n for (FilerProto.Location location : locations.getLocationsList()) {\n String url = String.format(\"http://%s/%s\", location.getUrl(), chunkView.fileId);\n try {\n data = doFetchOneFullChunkData(chunkView, url);\n break;\n } catch (IOException ioe) {\n LOG.debug(\"doFetchFullChunkData {} :{}\", url, ioe);\n lastException = ioe;\n }\n }\n if (data != null) {\n break;\n }\n try {\n Thread.sleep(waitTime);\n } catch (InterruptedException e) {\n }\n }\n\n if (data == null) {\n throw lastException;\n }\n\n LOG.debug(\"doFetchFullChunkData fid:{} chunkData.length:{}\", chunkView.fileId, data.length);\n\n return data;\n\n }\n\n public static byte[] doFetchOneFullChunkData(ChunkView chunkView, String url) throws IOException {\n\n HttpGet request = new HttpGet(url);\n\n request.setHeader(HttpHeaders.ACCEPT_ENCODING, \"gzip\");\n\n byte[] data = null;\n\n CloseableHttpResponse response = SeaweedUtil.getClosableHttpClient().execute(request);\n\n try {\n HttpEntity entity = response.getEntity();\n\n Header contentEncodingHeader = entity.getContentEncoding();\n\n if (contentEncodingHeader != null) {\n HeaderElement[] encodings = contentEncodingHeader.getElements();\n for (int i = 0; i < encodings.length; i++) {\n if (encodings[i].getName().equalsIgnoreCase(\"gzip\")) {\n entity = new GzipDecompressingEntity(entity);\n break;\n }\n }\n }\n\n data = EntityUtils.toByteArray(entity);\n\n EntityUtils.consume(entity);\n\n } finally {\n response.close();\n request.releaseConnection();\n }\n\n if (chunkView.cipherKey != null && chunkView.cipherKey.length != 0) {\n try {\n data = SeaweedCipher.decrypt(data, chunkView.cipherKey);\n } catch (Exception e) {\n throw new IOException(\"fail to decrypt\", e);\n }\n }\n\n if (chunkView.isCompressed) {\n data = Gzip.decompress(data);\n }\n\n LOG.debug(\"doFetchOneFullChunkData url:{} chunkData.length:{}\", url, data.length);\n\n return data;\n\n }\n\n protected static List viewFromVisibles(List visibleIntervals, long offset, long size) {\n List views = new ArrayList<>();\n\n long stop = offset + size;\n for (VisibleInterval chunk : visibleIntervals) {\n long chunkStart = Math.max(offset, chunk.start);\n long chunkStop = Math.min(stop, chunk.stop);\n if (chunkStart < chunkStop) {\n boolean isFullChunk = chunk.isFullChunk && chunk.start == offset && chunk.stop <= stop;\n views.add(new ChunkView(\n chunk.fileId,\n chunkStart - chunk.start + chunk.chunkOffset,\n chunkStop - chunkStart,\n chunkStart,\n isFullChunk,\n chunk.cipherKey,\n chunk.isCompressed\n ));\n }\n }\n return views;\n }\n\n public static List nonOverlappingVisibleIntervals(\n final FilerGrpcClient filerGrpcClient, List chunkList) throws IOException {\n\n chunkList = FileChunkManifest.resolveChunkManifest(filerGrpcClient, chunkList);\n\n FilerProto.FileChunk[] chunks = chunkList.toArray(new FilerProto.FileChunk[0]);\n Arrays.sort(chunks, new Comparator() {\n @Override\n public int compare(FilerProto.FileChunk a, FilerProto.FileChunk b) {\n // if just a.getMtime() - b.getMtime(), it will overflow!\n if (a.getMtime() < b.getMtime()) {\n return -1;\n } else if (a.getMtime() > b.getMtime()) {\n return 1;\n }\n return 0;\n }\n });\n\n List visibles = new ArrayList<>();\n for (FilerProto.FileChunk chunk : chunks) {\n List newVisibles = new ArrayList<>();\n visibles = mergeIntoVisibles(visibles, newVisibles, chunk);\n }\n\n return visibles;\n }\n\n private static List mergeIntoVisibles(List visibles,\n List newVisibles,\n FilerProto.FileChunk chunk) {\n VisibleInterval newV = new VisibleInterval(\n chunk.getOffset(),\n chunk.getOffset() + chunk.getSize(),\n chunk.getFileId(),\n chunk.getMtime(),\n 0,\n true,\n chunk.getCipherKey().toByteArray(),\n chunk.getIsCompressed()\n );\n\n // easy cases to speed up\n if (visibles.size() == 0) {\n visibles.add(newV);\n return visibles;\n }\n if (visibles.get(visibles.size() - 1).stop <= chunk.getOffset()) {\n visibles.add(newV);\n return visibles;\n }\n\n for (VisibleInterval v : visibles) {\n if (v.start < chunk.getOffset() && chunk.getOffset() < v.stop) {\n newVisibles.add(new VisibleInterval(\n v.start,\n chunk.getOffset(),\n v.fileId,\n v.modifiedTime,\n v.chunkOffset,\n false,\n v.cipherKey,\n v.isCompressed\n ));\n }\n long chunkStop = chunk.getOffset() + chunk.getSize();\n if (v.start < chunkStop && chunkStop < v.stop) {\n newVisibles.add(new VisibleInterval(\n chunkStop,\n v.stop,\n v.fileId,\n v.modifiedTime,\n v.chunkOffset + (chunkStop - v.start),\n false,\n v.cipherKey,\n v.isCompressed\n ));\n }\n if (chunkStop <= v.start || v.stop <= chunk.getOffset()) {\n newVisibles.add(v);\n }\n }\n newVisibles.add(newV);\n\n // keep everything sorted\n for (int i = newVisibles.size() - 1; i >= 0; i--) {\n if (i > 0 && newV.start < newVisibles.get(i - 1).start) {\n newVisibles.set(i, newVisibles.get(i - 1));\n } else {\n newVisibles.set(i, newV);\n break;\n }\n }\n\n return newVisibles;\n }\n\n public static String parseVolumeId(String fileId) {\n int commaIndex = fileId.lastIndexOf(',');\n if (commaIndex > 0) {\n return fileId.substring(0, commaIndex);\n }\n return fileId;\n }\n\n public static long fileSize(FilerProto.Entry entry) {\n return Math.max(totalSize(entry.getChunksList()), entry.getAttributes().getFileSize());\n }\n\n public static long totalSize(List chunksList) {\n long size = 0;\n for (FilerProto.FileChunk chunk : chunksList) {\n long t = chunk.getOffset() + chunk.getSize();\n if (size < t) {\n size = t;\n }\n }\n return size;\n }\n\n public static class VisibleInterval {\n public final long start;\n public final long stop;\n public final long modifiedTime;\n public final String fileId;\n public final long chunkOffset;\n public final boolean isFullChunk;\n public final byte[] cipherKey;\n public final boolean isCompressed;\n\n public VisibleInterval(long start, long stop, String fileId, long modifiedTime, long chunkOffset, boolean isFullChunk, byte[] cipherKey, boolean isCompressed) {\n this.start = start;\n this.stop = stop;\n this.modifiedTime = modifiedTime;\n this.fileId = fileId;\n this.chunkOffset = chunkOffset;\n this.isFullChunk = isFullChunk;\n this.cipherKey = cipherKey;\n this.isCompressed = isCompressed;\n }\n\n @Override\n public String toString() {\n return \"VisibleInterval{\" +\n \"start=\" + start +\n \", stop=\" + stop +\n \", modifiedTime=\" + modifiedTime +\n \", fileId='\" + fileId + '\\'' +\n \", isFullChunk=\" + isFullChunk +\n \", cipherKey=\" + Arrays.toString(cipherKey) +\n \", isCompressed=\" + isCompressed +\n '}';\n }\n }\n\n public static class ChunkView {\n public final String fileId;\n public final long offset;\n public final long size;\n public final long logicOffset;\n public final boolean isFullChunk;\n public final byte[] cipherKey;\n public final boolean isCompressed;\n\n public ChunkView(String fileId, long offset, long size, long logicOffset, boolean isFullChunk, byte[] cipherKey, boolean isCompressed) {\n this.fileId = fileId;\n this.offset = offset;\n this.size = size;\n this.logicOffset = logicOffset;\n this.isFullChunk = isFullChunk;\n this.cipherKey = cipherKey;\n this.isCompressed = isCompressed;\n }\n\n @Override\n public String toString() {\n return \"ChunkView{\" +\n \"fileId='\" + fileId + '\\'' +\n \", offset=\" + offset +\n \", size=\" + size +\n \", logicOffset=\" + logicOffset +\n \", isFullChunk=\" + isFullChunk +\n \", cipherKey=\" + Arrays.toString(cipherKey) +\n \", isCompressed=\" + isCompressed +\n '}';\n }\n }\n\n}\n"},"new_file":{"kind":"string","value":"other/java/client/src/main/java/seaweedfs/client/SeaweedRead.java"},"old_contents":{"kind":"string","value":"package seaweedfs.client;\n\nimport org.apache.http.Header;\nimport org.apache.http.HeaderElement;\nimport org.apache.http.HttpEntity;\nimport org.apache.http.HttpHeaders;\nimport org.apache.http.client.entity.GzipDecompressingEntity;\nimport org.apache.http.client.methods.CloseableHttpResponse;\nimport org.apache.http.client.methods.HttpGet;\nimport org.apache.http.util.EntityUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\nimport java.util.*;\n\npublic class SeaweedRead {\n\n private static final Logger LOG = LoggerFactory.getLogger(SeaweedRead.class);\n\n static ChunkCache chunkCache = new ChunkCache(4);\n\n // returns bytesRead\n public static long read(FilerGrpcClient filerGrpcClient, List visibleIntervals,\n final long position, final byte[] buffer, final int bufferOffset,\n final int bufferLength, final long fileSize) throws IOException {\n\n List chunkViews = viewFromVisibles(visibleIntervals, position, bufferLength);\n\n FilerProto.LookupVolumeRequest.Builder lookupRequest = FilerProto.LookupVolumeRequest.newBuilder();\n for (ChunkView chunkView : chunkViews) {\n String vid = parseVolumeId(chunkView.fileId);\n lookupRequest.addVolumeIds(vid);\n }\n\n FilerProto.LookupVolumeResponse lookupResponse = filerGrpcClient\n .getBlockingStub().lookupVolume(lookupRequest.build());\n\n Map vid2Locations = lookupResponse.getLocationsMapMap();\n\n //TODO parallel this\n long readCount = 0;\n int startOffset = bufferOffset;\n for (ChunkView chunkView : chunkViews) {\n\n if (startOffset < chunkView.logicOffset) {\n long gap = chunkView.logicOffset - startOffset;\n LOG.debug(\"zero [{},{})\", startOffset, startOffset + gap);\n readCount += gap;\n startOffset += gap;\n }\n\n FilerProto.Locations locations = vid2Locations.get(parseVolumeId(chunkView.fileId));\n if (locations == null || locations.getLocationsCount() == 0) {\n LOG.error(\"failed to locate {}\", chunkView.fileId);\n // log here!\n return 0;\n }\n\n int len = readChunkView(position, buffer, startOffset, chunkView, locations);\n\n LOG.debug(\"read [{},{}) {} size {}\", startOffset, startOffset + len, chunkView.fileId, chunkView.size);\n\n readCount += len;\n startOffset += len;\n\n }\n\n long limit = Math.min(bufferLength, fileSize);\n\n if (startOffset < limit) {\n long gap = limit - startOffset;\n LOG.debug(\"zero2 [{},{})\", startOffset, startOffset + gap);\n readCount += gap;\n startOffset += gap;\n }\n\n return readCount;\n }\n\n private static int readChunkView(long position, byte[] buffer, int startOffset, ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n\n byte[] chunkData = chunkCache.getChunk(chunkView.fileId);\n\n if (chunkData == null) {\n chunkData = doFetchFullChunkData(chunkView, locations);\n chunkCache.setChunk(chunkView.fileId, chunkData);\n }\n\n int len = (int) chunkView.size;\n LOG.debug(\"readChunkView fid:{} chunkData.length:{} chunkView.offset:{} buffer.length:{} startOffset:{} len:{}\",\n chunkView.fileId, chunkData.length, chunkView.offset, buffer.length, startOffset, len);\n System.arraycopy(chunkData, startOffset - (int) (chunkView.logicOffset - chunkView.offset), buffer, startOffset, len);\n\n return len;\n }\n\n public static byte[] doFetchFullChunkData(ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n\n byte[] data = null;\n for (long waitTime = 230L; waitTime < 20 * 1000; waitTime += waitTime / 2) {\n for (FilerProto.Location location : locations.getLocationsList()) {\n String url = String.format(\"http://%s/%s\", location.getUrl(), chunkView.fileId);\n try {\n data = doFetchOneFullChunkData(chunkView, url);\n break;\n } catch (IOException ioe) {\n LOG.debug(\"doFetchFullChunkData {} :{}\", url, ioe);\n }\n }\n if (data != null) {\n break;\n }\n try {\n Thread.sleep(waitTime);\n } catch (InterruptedException e) {\n }\n }\n\n LOG.debug(\"doFetchFullChunkData fid:{} chunkData.length:{}\", chunkView.fileId, data.length);\n\n return data;\n\n }\n\n public static byte[] doFetchOneFullChunkData(ChunkView chunkView, String url) throws IOException {\n\n HttpGet request = new HttpGet(url);\n\n request.setHeader(HttpHeaders.ACCEPT_ENCODING, \"gzip\");\n\n byte[] data = null;\n\n CloseableHttpResponse response = SeaweedUtil.getClosableHttpClient().execute(request);\n\n try {\n HttpEntity entity = response.getEntity();\n\n Header contentEncodingHeader = entity.getContentEncoding();\n\n if (contentEncodingHeader != null) {\n HeaderElement[] encodings = contentEncodingHeader.getElements();\n for (int i = 0; i < encodings.length; i++) {\n if (encodings[i].getName().equalsIgnoreCase(\"gzip\")) {\n entity = new GzipDecompressingEntity(entity);\n break;\n }\n }\n }\n\n data = EntityUtils.toByteArray(entity);\n\n EntityUtils.consume(entity);\n\n } finally {\n response.close();\n request.releaseConnection();\n }\n\n if (chunkView.cipherKey != null && chunkView.cipherKey.length != 0) {\n try {\n data = SeaweedCipher.decrypt(data, chunkView.cipherKey);\n } catch (Exception e) {\n throw new IOException(\"fail to decrypt\", e);\n }\n }\n\n if (chunkView.isCompressed) {\n data = Gzip.decompress(data);\n }\n\n LOG.debug(\"doFetchOneFullChunkData url:{} chunkData.length:{}\", url, data.length);\n\n return data;\n\n }\n\n protected static List viewFromVisibles(List visibleIntervals, long offset, long size) {\n List views = new ArrayList<>();\n\n long stop = offset + size;\n for (VisibleInterval chunk : visibleIntervals) {\n long chunkStart = Math.max(offset, chunk.start);\n long chunkStop = Math.min(stop, chunk.stop);\n if (chunkStart < chunkStop) {\n boolean isFullChunk = chunk.isFullChunk && chunk.start == offset && chunk.stop <= stop;\n views.add(new ChunkView(\n chunk.fileId,\n chunkStart - chunk.start + chunk.chunkOffset,\n chunkStop - chunkStart,\n chunkStart,\n isFullChunk,\n chunk.cipherKey,\n chunk.isCompressed\n ));\n }\n }\n return views;\n }\n\n public static List nonOverlappingVisibleIntervals(\n final FilerGrpcClient filerGrpcClient, List chunkList) throws IOException {\n\n chunkList = FileChunkManifest.resolveChunkManifest(filerGrpcClient, chunkList);\n\n FilerProto.FileChunk[] chunks = chunkList.toArray(new FilerProto.FileChunk[0]);\n Arrays.sort(chunks, new Comparator() {\n @Override\n public int compare(FilerProto.FileChunk a, FilerProto.FileChunk b) {\n // if just a.getMtime() - b.getMtime(), it will overflow!\n if (a.getMtime() < b.getMtime()) {\n return -1;\n } else if (a.getMtime() > b.getMtime()) {\n return 1;\n }\n return 0;\n }\n });\n\n List visibles = new ArrayList<>();\n for (FilerProto.FileChunk chunk : chunks) {\n List newVisibles = new ArrayList<>();\n visibles = mergeIntoVisibles(visibles, newVisibles, chunk);\n }\n\n return visibles;\n }\n\n private static List mergeIntoVisibles(List visibles,\n List newVisibles,\n FilerProto.FileChunk chunk) {\n VisibleInterval newV = new VisibleInterval(\n chunk.getOffset(),\n chunk.getOffset() + chunk.getSize(),\n chunk.getFileId(),\n chunk.getMtime(),\n 0,\n true,\n chunk.getCipherKey().toByteArray(),\n chunk.getIsCompressed()\n );\n\n // easy cases to speed up\n if (visibles.size() == 0) {\n visibles.add(newV);\n return visibles;\n }\n if (visibles.get(visibles.size() - 1).stop <= chunk.getOffset()) {\n visibles.add(newV);\n return visibles;\n }\n\n for (VisibleInterval v : visibles) {\n if (v.start < chunk.getOffset() && chunk.getOffset() < v.stop) {\n newVisibles.add(new VisibleInterval(\n v.start,\n chunk.getOffset(),\n v.fileId,\n v.modifiedTime,\n v.chunkOffset,\n false,\n v.cipherKey,\n v.isCompressed\n ));\n }\n long chunkStop = chunk.getOffset() + chunk.getSize();\n if (v.start < chunkStop && chunkStop < v.stop) {\n newVisibles.add(new VisibleInterval(\n chunkStop,\n v.stop,\n v.fileId,\n v.modifiedTime,\n v.chunkOffset + (chunkStop - v.start),\n false,\n v.cipherKey,\n v.isCompressed\n ));\n }\n if (chunkStop <= v.start || v.stop <= chunk.getOffset()) {\n newVisibles.add(v);\n }\n }\n newVisibles.add(newV);\n\n // keep everything sorted\n for (int i = newVisibles.size() - 1; i >= 0; i--) {\n if (i > 0 && newV.start < newVisibles.get(i - 1).start) {\n newVisibles.set(i, newVisibles.get(i - 1));\n } else {\n newVisibles.set(i, newV);\n break;\n }\n }\n\n return newVisibles;\n }\n\n public static String parseVolumeId(String fileId) {\n int commaIndex = fileId.lastIndexOf(',');\n if (commaIndex > 0) {\n return fileId.substring(0, commaIndex);\n }\n return fileId;\n }\n\n public static long fileSize(FilerProto.Entry entry) {\n return Math.max(totalSize(entry.getChunksList()), entry.getAttributes().getFileSize());\n }\n\n public static long totalSize(List chunksList) {\n long size = 0;\n for (FilerProto.FileChunk chunk : chunksList) {\n long t = chunk.getOffset() + chunk.getSize();\n if (size < t) {\n size = t;\n }\n }\n return size;\n }\n\n public static class VisibleInterval {\n public final long start;\n public final long stop;\n public final long modifiedTime;\n public final String fileId;\n public final long chunkOffset;\n public final boolean isFullChunk;\n public final byte[] cipherKey;\n public final boolean isCompressed;\n\n public VisibleInterval(long start, long stop, String fileId, long modifiedTime, long chunkOffset, boolean isFullChunk, byte[] cipherKey, boolean isCompressed) {\n this.start = start;\n this.stop = stop;\n this.modifiedTime = modifiedTime;\n this.fileId = fileId;\n this.chunkOffset = chunkOffset;\n this.isFullChunk = isFullChunk;\n this.cipherKey = cipherKey;\n this.isCompressed = isCompressed;\n }\n\n @Override\n public String toString() {\n return \"VisibleInterval{\" +\n \"start=\" + start +\n \", stop=\" + stop +\n \", modifiedTime=\" + modifiedTime +\n \", fileId='\" + fileId + '\\'' +\n \", isFullChunk=\" + isFullChunk +\n \", cipherKey=\" + Arrays.toString(cipherKey) +\n \", isCompressed=\" + isCompressed +\n '}';\n }\n }\n\n public static class ChunkView {\n public final String fileId;\n public final long offset;\n public final long size;\n public final long logicOffset;\n public final boolean isFullChunk;\n public final byte[] cipherKey;\n public final boolean isCompressed;\n\n public ChunkView(String fileId, long offset, long size, long logicOffset, boolean isFullChunk, byte[] cipherKey, boolean isCompressed) {\n this.fileId = fileId;\n this.offset = offset;\n this.size = size;\n this.logicOffset = logicOffset;\n this.isFullChunk = isFullChunk;\n this.cipherKey = cipherKey;\n this.isCompressed = isCompressed;\n }\n\n @Override\n public String toString() {\n return \"ChunkView{\" +\n \"fileId='\" + fileId + '\\'' +\n \", offset=\" + offset +\n \", size=\" + size +\n \", logicOffset=\" + logicOffset +\n \", isFullChunk=\" + isFullChunk +\n \", cipherKey=\" + Arrays.toString(cipherKey) +\n \", isCompressed=\" + isCompressed +\n '}';\n }\n }\n\n}\n"},"message":{"kind":"string","value":"Hadoop: fix reading file tail\n"},"old_file":{"kind":"string","value":"other/java/client/src/main/java/seaweedfs/client/SeaweedRead.java"},"subject":{"kind":"string","value":"Hadoop: fix reading file tail"},"git_diff":{"kind":"string","value":"ther/java/client/src/main/java/seaweedfs/client/SeaweedRead.java\n \n //TODO parallel this\n long readCount = 0;\n int startOffset = bufferOffset;\n long startOffset = position;\n for (ChunkView chunkView : chunkViews) {\n \n if (startOffset < chunkView.logicOffset) {\n return 0;\n }\n \n int len = readChunkView(position, buffer, startOffset, chunkView, locations);\n int len = readChunkView(startOffset, buffer, bufferOffset + readCount, chunkView, locations);\n \n LOG.debug(\"read [{},{}) {} size {}\", startOffset, startOffset + len, chunkView.fileId, chunkView.size);\n \n \n }\n \n long limit = Math.min(bufferLength, fileSize);\n long limit = Math.min(bufferOffset + bufferLength, fileSize);\n \n if (startOffset < limit) {\n long gap = limit - startOffset;\n return readCount;\n }\n \n private static int readChunkView(long position, byte[] buffer, int startOffset, ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n private static int readChunkView(long startOffset, byte[] buffer, long bufOffset, ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n \n byte[] chunkData = chunkCache.getChunk(chunkView.fileId);\n \n }\n \n int len = (int) chunkView.size;\n LOG.debug(\"readChunkView fid:{} chunkData.length:{} chunkView.offset:{} buffer.length:{} startOffset:{} len:{}\",\n chunkView.fileId, chunkData.length, chunkView.offset, buffer.length, startOffset, len);\n System.arraycopy(chunkData, startOffset - (int) (chunkView.logicOffset - chunkView.offset), buffer, startOffset, len);\n LOG.debug(\"readChunkView fid:{} chunkData.length:{} chunkView[{};{}) buf[{},{})/{} startOffset:{}\",\n chunkView.fileId, chunkData.length, chunkView.offset, chunkView.offset+chunkView.size, bufOffset, bufOffset+len, buffer.length, startOffset);\n System.arraycopy(chunkData, (int) (startOffset - chunkView.logicOffset + chunkView.offset), buffer, (int)bufOffset, len);\n \n return len;\n }\n public static byte[] doFetchFullChunkData(ChunkView chunkView, FilerProto.Locations locations) throws IOException {\n \n byte[] data = null;\n for (long waitTime = 230L; waitTime < 20 * 1000; waitTime += waitTime / 2) {\n IOException lastException = null;\n for (long waitTime = 1000L; waitTime < 10 * 1000; waitTime += waitTime / 2) {\n for (FilerProto.Location location : locations.getLocationsList()) {\n String url = String.format(\"http://%s/%s\", location.getUrl(), chunkView.fileId);\n try {\n break;\n } catch (IOException ioe) {\n LOG.debug(\"doFetchFullChunkData {} :{}\", url, ioe);\n lastException = ioe;\n }\n }\n if (data != null) {\n Thread.sleep(waitTime);\n } catch (InterruptedException e) {\n }\n }\n\n if (data == null) {\n throw lastException;\n }\n \n LOG.debug(\"doFetchFullChunkData fid:{} chunkData.length:{}\", chunkView.fileId, data.length);"}}},{"rowIdx":2045,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"ca109e788525cf1fbd881a17a01d6b646c4d1c2b"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"pombredanne/lz4-java,czietsman/lz4-java,lz4/lz4-java,lz4/lz4-java,pranjalpatil/lz4-java,jpountz/lz4-java,pranjalpatil/lz4-java,lz4/lz4-java,jpountz/lz4-java,pombredanne/lz4-java,pombredanne/lz4-java,czietsman/lz4-java,jpountz/lz4-java,pranjalpatil/lz4-java"},"new_contents":{"kind":"string","value":"package net.jpountz.lz4;\n\n/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport static net.jpountz.lz4.Instances.COMPRESSORS;\nimport static net.jpountz.lz4.Instances.UNCOMPRESSORS;\nimport static net.jpountz.lz4.Instances.UNCOMPRESSORS2;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Arrays;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport com.carrotsearch.randomizedtesting.RandomizedRunner;\nimport com.carrotsearch.randomizedtesting.RandomizedTest;\nimport com.carrotsearch.randomizedtesting.annotations.Repeat;\n\n@RunWith(RandomizedRunner.class)\npublic class LZ4Test extends RandomizedTest {\n\n @Test\n @Repeat(iterations=20)\n public void testMaxCompressedLength() {\n final int len = randomBoolean() ? randomInt(16) : randomInt(1 << 30);\n final LZ4Compressor refCompressor = LZ4Factory.nativeInstance().fastCompressor();\n for (LZ4Compressor compressor : COMPRESSORS) {\n assertEquals(refCompressor.maxCompressedLength(len), compressor.maxCompressedLength(len));\n }\n }\n\n private static byte[] getCompressedWorstCase(byte[] decompressed) {\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n int len = decompressed.length;\n if (len >= LZ4Utils.RUN_MASK) {\n baos.write(LZ4Utils.RUN_MASK << LZ4Utils.ML_BITS);\n len -= LZ4Utils.RUN_MASK;\n }\n while (len >= 255) {\n baos.write(255);\n len -= 255;\n }\n baos.write(len);\n try {\n baos.write(decompressed);\n } catch (IOException e) {\n throw new AssertionError();\n }\n return baos.toByteArray();\n }\n\n private static byte[] randomArray(int len, int max) {\n byte[] result = new byte[len];\n for (int i = 0; i < result.length; ++i) {\n result[i] = (byte) randomInt(max);\n }\n return result;\n }\n\n @Test\n public void testEmpty() {\n testRoundTrip(new byte[0]);\n }\n\n public void testUncompressWorstCase(LZ4Decompressor decompressor) {\n final int len = randomInt(100 * 1024);\n final int max = randomInt(256);\n byte[] decompressed = randomArray(len, max);\n byte[] compressed = getCompressedWorstCase(decompressed);\n byte[] restored = new byte[decompressed.length];\n int cpLen = decompressor.decompress(compressed, 0, restored, 0, decompressed.length);\n assertEquals(compressed.length, cpLen);\n assertArrayEquals(decompressed, restored);\n }\n\n @Test\n public void testUncompressWorstCase() {\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n testUncompressWorstCase(decompressor);\n }\n }\n\n public void testUncompressWorstCase(LZ4UnknownSizeDecompressor decompressor) {\n final int len = randomInt(100 * 1024);\n final int max = randomInt(256);\n byte[] decompressed = randomArray(len, max);\n byte[] compressed = getCompressedWorstCase(decompressed);\n byte[] restored = new byte[decompressed.length];\n int uncpLen = decompressor.decompress(compressed, 0, compressed.length, restored, 0);\n assertEquals(decompressed.length, uncpLen);\n assertArrayEquals(decompressed, restored);\n }\n\n @Test\n public void testUncompressUnknownSizeWorstCase() {\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n testUncompressWorstCase(decompressor);\n }\n }\n\n @Test(expected=LZ4Exception.class)\n @Repeat(iterations=10)\n public void testUncompressUnknownSizeUnderflow() {\n final LZ4UnknownSizeDecompressor decompressor = randomFrom(UNCOMPRESSORS2);\n final int len = randomInt(100000);\n final int max = randomInt(256);\n final byte[] data = new byte[len];\n for (int i = 0; i < data.length; ++i) {\n data[i] = (byte) randomInt(max);\n }\n final int maxCompressedLength = LZ4JNICompressor.FAST.maxCompressedLength(len);\n final byte[] compressed = new byte[maxCompressedLength];\n final int compressedLength = LZ4JNICompressor.FAST.compress(data, 0, data.length, compressed, 0, compressed.length);\n decompressor.decompress(compressed, 0, compressedLength, new byte[data.length - 1], 0);\n }\n\n private static byte[] readResource(String resource) throws IOException {\n InputStream is = LZ4Test.class.getResourceAsStream(resource);\n if (is == null) {\n throw new IllegalStateException(\"Cannot find \" + resource);\n }\n byte[] buf = new byte[4096];\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n try {\n while (true) {\n final int read = is.read(buf);\n if (read == -1) {\n break;\n }\n baos.write(buf, 0, read);\n }\n } finally {\n is.close();\n }\n return baos.toByteArray();\n }\n\n public void testRoundTrip(byte[] data, int off, int len,\n LZ4Compressor compressor,\n LZ4Decompressor decompressor,\n LZ4UnknownSizeDecompressor decompressor2) {\n final byte[] compressed = new byte[LZ4Utils.maxCompressedLength(len)];\n final int compressedLen = compressor.compress(\n data, off, len,\n compressed, 0, compressed.length);\n\n final byte[] restored = new byte[len];\n assertEquals(compressedLen, decompressor.decompress(compressed, 0, restored, 0, len));\n assertArrayEquals(data, restored);\n\n if (len > 0) {\n Arrays.fill(restored, (byte) 0);\n decompressor2.decompress(compressed, 0, compressedLen, restored, 0);\n assertEquals(len, decompressor2.decompress(compressed, 0, compressedLen, restored, 0));\n } else {\n assertEquals(0, decompressor2.decompress(compressed, 0, compressedLen, new byte[1], 0));\n }\n\n LZ4Compressor refCompressor = null;\n if (compressor == LZ4Factory.unsafeInstance().fastCompressor()\n || compressor == LZ4Factory.safeInstance().fastCompressor()) {\n refCompressor = LZ4Factory.nativeInstance().fastCompressor();\n } else if (compressor == LZ4Factory.unsafeInstance().highCompressor()\n || compressor == LZ4Factory.safeInstance().highCompressor()) {\n refCompressor = LZ4Factory.nativeInstance().highCompressor();\n }\n if (refCompressor != null) {\n final byte[] compressed2 = new byte[refCompressor.maxCompressedLength(len)];\n final int compressedLen2 = refCompressor.compress(data, off, len, compressed2, 0, compressed2.length);\n assertCompressedArrayEquals(compressor.toString(),\n Arrays.copyOf(compressed2, compressedLen2),\n Arrays.copyOf(compressed, compressedLen));\n }\n }\n\n public void testRoundTrip(byte[] data, int off, int len, LZ4Factory lz4) {\n for (LZ4Compressor compressor : Arrays.asList(\n lz4.fastCompressor(), lz4.highCompressor())) {\n testRoundTrip(data, off, len, compressor, lz4.decompressor(), lz4.unknwonSizeDecompressor());\n }\n }\n\n public void testRoundTrip(byte[] data, int off, int len) {\n for (LZ4Factory lz4 : Arrays.asList(\n LZ4Factory.nativeInstance(),\n LZ4Factory.unsafeInstance(),\n LZ4Factory.safeInstance())) {\n testRoundTrip(data, off, len, lz4);\n }\n }\n\n public void testRoundTrip(byte[] data) {\n testRoundTrip(data, 0, data.length);\n }\n\n public void testRoundTrip(String resource) throws IOException {\n final byte[] data = readResource(resource);\n testRoundTrip(data);\n }\n\n @Test\n public void testRoundtripGeo() throws IOException {\n testRoundTrip(\"/calgary/geo\");\n }\n\n @Test\n public void testRoundtripBook1() throws IOException {\n testRoundTrip(\"/calgary/book1\");\n }\n\n @Test\n public void testRoundtripPic() throws IOException {\n testRoundTrip(\"/calgary/pic\");\n }\n\n @Test\n public void testNullMatchDec() {\n // 1 literal, 4 matchs with matchDec=0, 5 literals\n final byte[] invalid = new byte[] { 16, 42, 0, 0, 42, 42, 42, 42, 42 };\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n decompressor.decompress(invalid, 0, new byte[10], 0, 10);\n // free not to fail, but do not throw something else than a LZ4Exception\n } catch (LZ4Exception e) {\n // OK\n }\n }\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n decompressor.decompress(invalid, 0, invalid.length, new byte[10], 0);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n\n @Test\n public void testEndsWithMatch() {\n // 6 literals, 4 matchs\n final byte[] invalid = new byte[] { 96, 42, 43, 44, 45, 46, 47, 5, 0 };\n final int decompressedLength = 10;\n\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, new byte[decompressedLength], 0, decompressedLength);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, invalid.length, new byte[20], 0);\n assertTrue(false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n\n @Test\n public void testEndsWithLessThan5Literals() {\n // 6 literals, 4 matchs\n final byte[] invalidBase = new byte[] { 96, 42, 43, 44, 45, 46, 47, 5, 0 };\n\n for (int i = 1; i < 5; ++i) {\n final byte[] invalid = Arrays.copyOf(invalidBase, invalidBase.length + 1 + i);\n invalid[invalidBase.length] = (byte) (i << 4); // i literals at the end\n\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, new byte[20], 0, 20);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, invalid.length, new byte[20], 0);\n assertTrue(false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n }\n\n @Test\n @Repeat(iterations=50)\n public void testCompressExactSize() {\n final byte[] data = randomArray(randomInt(rarely() ? 100000 : 200), randomIntBetween(1, 10));\n for (LZ4Compressor compressor : COMPRESSORS) {\n final byte[] buf = new byte[compressor.maxCompressedLength(data.length)];\n final int compressedLength = compressor.compress(data, 0, data.length, buf, 0, buf.length);\n final byte[] buf2 = new byte[compressedLength];\n try {\n final int compressedLength2 = compressor.compress(data, 0, data.length, buf2, 0, buf2.length);\n assertEquals(compressedLength, compressedLength2);\n assertArrayEquals(Arrays.copyOf(buf, compressedLength), buf2);\n\n try {\n compressor.compress(data, 0, data.length, buf2, 0, buf2.length - 1);\n assertFalse(true);\n } catch (LZ4Exception e) {\n // ok\n }\n } catch (IllegalArgumentException e) {\n // the JNI high compressor does not support exact size compression\n assert compressor == LZ4Factory.nativeInstance().highCompressor();\n }\n }\n }\n\n @Test\n @Repeat(iterations=5)\n public void testAllEqual() {\n final int len = randomBoolean() ? randomInt(20) : randomInt(100000);\n final byte[] buf = new byte[len];\n Arrays.fill(buf, randomByte());\n testRoundTrip(buf);\n }\n\n @Test\n public void testMaxDistance() {\n final int len = randomIntBetween(1 << 17, 1 << 18);\n final int off = 0;//randomInt(len - (1 << 16) - (1 << 15));\n final byte[] buf = new byte[len];\n for (int i = 0; i < (1 << 15); ++i) {\n buf[off + i] = randomByte();\n }\n System.arraycopy(buf, off, buf, off + 65535, 1 << 15);\n testRoundTrip(buf);\n }\n\n @Test\n @Repeat(iterations=10)\n public void testCompressedArrayEqualsJNI() {\n final int max = randomIntBetween(1, 15);\n final int len = randomInt(1 << 18);\n final byte[] data = new byte[len];\n for (int i = 0; i < len; ++i) {\n data[i] = (byte) randomInt(max);\n }\n testRoundTrip(data);\n }\n\n private static void assertCompressedArrayEquals(String message, byte[] expected, byte[] actual) {\n int off = 0;\n int decompressedOff = 0;\n while (true) {\n if (off == expected.length) {\n break;\n }\n final Sequence sequence1 = readSequence(expected, off);\n final Sequence sequence2 = readSequence(actual, off);\n assertEquals(message + \", off=\" + off + \", decompressedOff=\" + decompressedOff, sequence1, sequence2);\n off += sequence1.length;\n decompressedOff += sequence1.literalLen + sequence1.matchLen;\n }\n }\n\n private static Sequence readSequence(byte[] buf, int off) {\n final int start = off;\n final int token = buf[off++] & 0xFF;\n int literalLen = token >>> 4;\n if (literalLen >= 0x0F) {\n int len;\n while ((len = buf[off++] & 0xFF) == 0xFF) {\n literalLen += 0xFF;\n }\n literalLen += len;\n }\n off += literalLen;\n if (off == buf.length) {\n return new Sequence(literalLen, -1, -1, off - start);\n }\n int matchDec = (buf[off++] & 0xFF) | ((buf[off++] & 0xFF) << 8);\n int matchLen = token & 0x0F;\n if (matchLen >= 0x0F) {\n int len;\n while ((len = buf[off++] & 0xFF) == 0xFF) {\n matchLen += 0xFF;\n }\n matchLen += len;\n }\n matchLen += 4;\n return new Sequence(literalLen, matchDec, matchLen, off - start);\n }\n\n private static class Sequence {\n final int literalLen, matchDec, matchLen, length;\n\n public Sequence(int literalLen, int matchDec, int matchLen, int length) {\n this.literalLen = literalLen;\n this.matchDec = matchDec;\n this.matchLen = matchLen;\n this.length = length;\n }\n\n @Override\n public String toString() {\n return \"Sequence [literalLen=\" + literalLen + \", matchDec=\" + matchDec\n + \", matchLen=\" + matchLen + \"]\";\n }\n\n @Override\n public int hashCode() {\n return 42;\n }\n\n @Override\n public boolean equals(Object obj) {\n if (this == obj)\n return true;\n if (obj == null)\n return false;\n if (getClass() != obj.getClass())\n return false;\n Sequence other = (Sequence) obj;\n if (literalLen != other.literalLen)\n return false;\n if (matchDec != other.matchDec)\n return false;\n if (matchLen != other.matchLen)\n return false;\n return true;\n }\n\n }\n\n}\n"},"new_file":{"kind":"string","value":"src/test/net/jpountz/lz4/LZ4Test.java"},"old_contents":{"kind":"string","value":"package net.jpountz.lz4;\n\n/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport static net.jpountz.lz4.Instances.COMPRESSORS;\nimport static net.jpountz.lz4.Instances.UNCOMPRESSORS;\nimport static net.jpountz.lz4.Instances.UNCOMPRESSORS2;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Arrays;\n\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\n\nimport com.carrotsearch.randomizedtesting.RandomizedRunner;\nimport com.carrotsearch.randomizedtesting.RandomizedTest;\nimport com.carrotsearch.randomizedtesting.annotations.Repeat;\n\n@RunWith(RandomizedRunner.class)\npublic class LZ4Test extends RandomizedTest {\n\n private static byte[] getCompressedWorstCase(byte[] decompressed) {\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n int len = decompressed.length;\n if (len >= LZ4Utils.RUN_MASK) {\n baos.write(LZ4Utils.RUN_MASK << LZ4Utils.ML_BITS);\n len -= LZ4Utils.RUN_MASK;\n }\n while (len >= 255) {\n baos.write(255);\n len -= 255;\n }\n baos.write(len);\n try {\n baos.write(decompressed);\n } catch (IOException e) {\n throw new AssertionError();\n }\n return baos.toByteArray();\n }\n\n private static byte[] randomArray(int len, int max) {\n byte[] result = new byte[len];\n for (int i = 0; i < result.length; ++i) {\n result[i] = (byte) randomInt(max);\n }\n return result;\n }\n\n @Test\n public void testEmpty() {\n testRoundTrip(new byte[0]);\n }\n\n public void testUncompressWorstCase(LZ4Decompressor decompressor) {\n final int len = randomInt(100 * 1024);\n final int max = randomInt(256);\n byte[] decompressed = randomArray(len, max);\n byte[] compressed = getCompressedWorstCase(decompressed);\n byte[] restored = new byte[decompressed.length];\n int cpLen = decompressor.decompress(compressed, 0, restored, 0, decompressed.length);\n assertEquals(compressed.length, cpLen);\n assertArrayEquals(decompressed, restored);\n }\n\n @Test\n public void testUncompressWorstCase() {\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n testUncompressWorstCase(decompressor);\n }\n }\n\n public void testUncompressWorstCase(LZ4UnknownSizeDecompressor decompressor) {\n final int len = randomInt(100 * 1024);\n final int max = randomInt(256);\n byte[] decompressed = randomArray(len, max);\n byte[] compressed = getCompressedWorstCase(decompressed);\n byte[] restored = new byte[decompressed.length];\n int uncpLen = decompressor.decompress(compressed, 0, compressed.length, restored, 0);\n assertEquals(decompressed.length, uncpLen);\n assertArrayEquals(decompressed, restored);\n }\n\n @Test\n public void testUncompressUnknownSizeWorstCase() {\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n testUncompressWorstCase(decompressor);\n }\n }\n\n @Test(expected=LZ4Exception.class)\n @Repeat(iterations=10)\n public void testUncompressUnknownSizeUnderflow() {\n final LZ4UnknownSizeDecompressor decompressor = randomFrom(UNCOMPRESSORS2);\n final int len = randomInt(100000);\n final int max = randomInt(256);\n final byte[] data = new byte[len];\n for (int i = 0; i < data.length; ++i) {\n data[i] = (byte) randomInt(max);\n }\n final int maxCompressedLength = LZ4JNICompressor.FAST.maxCompressedLength(len);\n final byte[] compressed = new byte[maxCompressedLength];\n final int compressedLength = LZ4JNICompressor.FAST.compress(data, 0, data.length, compressed, 0, compressed.length);\n decompressor.decompress(compressed, 0, compressedLength, new byte[data.length - 1], 0);\n }\n\n private static byte[] readResource(String resource) throws IOException {\n InputStream is = LZ4Test.class.getResourceAsStream(resource);\n if (is == null) {\n throw new IllegalStateException(\"Cannot find \" + resource);\n }\n byte[] buf = new byte[4096];\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n try {\n while (true) {\n final int read = is.read(buf);\n if (read == -1) {\n break;\n }\n baos.write(buf, 0, read);\n }\n } finally {\n is.close();\n }\n return baos.toByteArray();\n }\n\n public void testRoundTrip(byte[] data, int off, int len,\n LZ4Compressor compressor,\n LZ4Decompressor decompressor,\n LZ4UnknownSizeDecompressor decompressor2) {\n final byte[] compressed = new byte[LZ4Utils.maxCompressedLength(len)];\n final int compressedLen = compressor.compress(\n data, off, len,\n compressed, 0, compressed.length);\n\n final byte[] restored = new byte[len];\n assertEquals(compressedLen, decompressor.decompress(compressed, 0, restored, 0, len));\n assertArrayEquals(data, restored);\n\n if (len > 0) {\n Arrays.fill(restored, (byte) 0);\n decompressor2.decompress(compressed, 0, compressedLen, restored, 0);\n assertEquals(len, decompressor2.decompress(compressed, 0, compressedLen, restored, 0));\n } else {\n assertEquals(0, decompressor2.decompress(compressed, 0, compressedLen, new byte[1], 0));\n }\n\n LZ4Compressor refCompressor = null;\n if (compressor == LZ4Factory.unsafeInstance().fastCompressor()\n || compressor == LZ4Factory.safeInstance().fastCompressor()) {\n refCompressor = LZ4Factory.nativeInstance().fastCompressor();\n } else if (compressor == LZ4Factory.unsafeInstance().highCompressor()\n || compressor == LZ4Factory.safeInstance().highCompressor()) {\n refCompressor = LZ4Factory.nativeInstance().highCompressor();\n }\n if (refCompressor != null) {\n final byte[] compressed2 = new byte[refCompressor.maxCompressedLength(len)];\n final int compressedLen2 = refCompressor.compress(data, off, len, compressed2, 0, compressed2.length);\n assertCompressedArrayEquals(compressor.toString(),\n Arrays.copyOf(compressed2, compressedLen2),\n Arrays.copyOf(compressed, compressedLen));\n }\n }\n\n public void testRoundTrip(byte[] data, int off, int len, LZ4Factory lz4) {\n for (LZ4Compressor compressor : Arrays.asList(\n lz4.fastCompressor(), lz4.highCompressor())) {\n testRoundTrip(data, off, len, compressor, lz4.decompressor(), lz4.unknwonSizeDecompressor());\n }\n }\n\n public void testRoundTrip(byte[] data, int off, int len) {\n for (LZ4Factory lz4 : Arrays.asList(\n LZ4Factory.nativeInstance(),\n LZ4Factory.unsafeInstance(),\n LZ4Factory.safeInstance())) {\n testRoundTrip(data, off, len, lz4);\n }\n }\n\n public void testRoundTrip(byte[] data) {\n testRoundTrip(data, 0, data.length);\n }\n\n public void testRoundTrip(String resource) throws IOException {\n final byte[] data = readResource(resource);\n testRoundTrip(data);\n }\n\n @Test\n public void testRoundtripGeo() throws IOException {\n testRoundTrip(\"/calgary/geo\");\n }\n\n @Test\n public void testRoundtripBook1() throws IOException {\n testRoundTrip(\"/calgary/book1\");\n }\n\n @Test\n public void testRoundtripPic() throws IOException {\n testRoundTrip(\"/calgary/pic\");\n }\n\n @Test\n public void testNullMatchDec() {\n // 1 literal, 4 matchs with matchDec=0, 5 literals\n final byte[] invalid = new byte[] { 16, 42, 0, 0, 42, 42, 42, 42, 42 };\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n decompressor.decompress(invalid, 0, new byte[10], 0, 10);\n // free not to fail, but do not throw something else than a LZ4Exception\n } catch (LZ4Exception e) {\n // OK\n }\n }\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n decompressor.decompress(invalid, 0, invalid.length, new byte[10], 0);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n\n @Test\n public void testEndsWithMatch() {\n // 6 literals, 4 matchs\n final byte[] invalid = new byte[] { 96, 42, 43, 44, 45, 46, 47, 5, 0 };\n final int decompressedLength = 10;\n\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, new byte[decompressedLength], 0, decompressedLength);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, invalid.length, new byte[20], 0);\n assertTrue(false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n\n @Test\n public void testEndsWithLessThan5Literals() {\n // 6 literals, 4 matchs\n final byte[] invalidBase = new byte[] { 96, 42, 43, 44, 45, 46, 47, 5, 0 };\n\n for (int i = 1; i < 5; ++i) {\n final byte[] invalid = Arrays.copyOf(invalidBase, invalidBase.length + 1 + i);\n invalid[invalidBase.length] = (byte) (i << 4); // i literals at the end\n\n for (LZ4Decompressor decompressor : UNCOMPRESSORS) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, new byte[20], 0, 20);\n assertTrue(decompressor.toString(), false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n\n for (LZ4UnknownSizeDecompressor decompressor : UNCOMPRESSORS2) {\n try {\n // it is invalid to end with a match, should be at least 5 literals\n decompressor.decompress(invalid, 0, invalid.length, new byte[20], 0);\n assertTrue(false);\n } catch (LZ4Exception e) {\n // OK\n }\n }\n }\n }\n\n @Test\n @Repeat(iterations=50)\n public void testCompressExactSize() {\n final byte[] data = randomArray(randomInt(rarely() ? 100000 : 200), randomIntBetween(1, 10));\n for (LZ4Compressor compressor : COMPRESSORS) {\n final byte[] buf = new byte[compressor.maxCompressedLength(data.length)];\n final int compressedLength = compressor.compress(data, 0, data.length, buf, 0, buf.length);\n final byte[] buf2 = new byte[compressedLength];\n try {\n final int compressedLength2 = compressor.compress(data, 0, data.length, buf2, 0, buf2.length);\n assertEquals(compressedLength, compressedLength2);\n assertArrayEquals(Arrays.copyOf(buf, compressedLength), buf2);\n\n try {\n compressor.compress(data, 0, data.length, buf2, 0, buf2.length - 1);\n assertFalse(true);\n } catch (LZ4Exception e) {\n // ok\n }\n } catch (IllegalArgumentException e) {\n // the JNI high compressor does not support exact size compression\n assert compressor == LZ4Factory.nativeInstance().highCompressor();\n }\n }\n }\n\n @Test\n @Repeat(iterations=5)\n public void testAllEqual() {\n final int len = randomBoolean() ? randomInt(20) : randomInt(100000);\n final byte[] buf = new byte[len];\n Arrays.fill(buf, randomByte());\n testRoundTrip(buf);\n }\n\n @Test\n public void testMaxDistance() {\n final int len = randomIntBetween(1 << 17, 1 << 18);\n final int off = 0;//randomInt(len - (1 << 16) - (1 << 15));\n final byte[] buf = new byte[len];\n for (int i = 0; i < (1 << 15); ++i) {\n buf[off + i] = randomByte();\n }\n System.arraycopy(buf, off, buf, off + 65535, 1 << 15);\n testRoundTrip(buf);\n }\n\n @Test\n @Repeat(iterations=10)\n public void testCompressedArrayEqualsJNI() {\n final int max = randomIntBetween(1, 15);\n final int len = randomInt(1 << 18);\n final byte[] data = new byte[len];\n for (int i = 0; i < len; ++i) {\n data[i] = (byte) randomInt(max);\n }\n testRoundTrip(data);\n }\n\n private static void assertCompressedArrayEquals(String message, byte[] expected, byte[] actual) {\n int off = 0;\n int decompressedOff = 0;\n while (true) {\n if (off == expected.length) {\n break;\n }\n final Sequence sequence1 = readSequence(expected, off);\n final Sequence sequence2 = readSequence(actual, off);\n assertEquals(message + \", off=\" + off + \", decompressedOff=\" + decompressedOff, sequence1, sequence2);\n off += sequence1.length;\n decompressedOff += sequence1.literalLen + sequence1.matchLen;\n }\n }\n\n private static Sequence readSequence(byte[] buf, int off) {\n final int start = off;\n final int token = buf[off++] & 0xFF;\n int literalLen = token >>> 4;\n if (literalLen >= 0x0F) {\n int len;\n while ((len = buf[off++] & 0xFF) == 0xFF) {\n literalLen += 0xFF;\n }\n literalLen += len;\n }\n off += literalLen;\n if (off == buf.length) {\n return new Sequence(literalLen, -1, -1, off - start);\n }\n int matchDec = (buf[off++] & 0xFF) | ((buf[off++] & 0xFF) << 8);\n int matchLen = token & 0x0F;\n if (matchLen >= 0x0F) {\n int len;\n while ((len = buf[off++] & 0xFF) == 0xFF) {\n matchLen += 0xFF;\n }\n matchLen += len;\n }\n matchLen += 4;\n return new Sequence(literalLen, matchDec, matchLen, off - start);\n }\n\n private static class Sequence {\n final int literalLen, matchDec, matchLen, length;\n\n public Sequence(int literalLen, int matchDec, int matchLen, int length) {\n this.literalLen = literalLen;\n this.matchDec = matchDec;\n this.matchLen = matchLen;\n this.length = length;\n }\n\n @Override\n public String toString() {\n return \"Sequence [literalLen=\" + literalLen + \", matchDec=\" + matchDec\n + \", matchLen=\" + matchLen + \"]\";\n }\n\n @Override\n public int hashCode() {\n return 42;\n }\n\n @Override\n public boolean equals(Object obj) {\n if (this == obj)\n return true;\n if (obj == null)\n return false;\n if (getClass() != obj.getClass())\n return false;\n Sequence other = (Sequence) obj;\n if (literalLen != other.literalLen)\n return false;\n if (matchDec != other.matchDec)\n return false;\n if (matchLen != other.matchLen)\n return false;\n return true;\n }\n\n }\n\n}\n"},"message":{"kind":"string","value":"Test maxCompressedLength.\n"},"old_file":{"kind":"string","value":"src/test/net/jpountz/lz4/LZ4Test.java"},"subject":{"kind":"string","value":"Test maxCompressedLength."},"git_diff":{"kind":"string","value":"rc/test/net/jpountz/lz4/LZ4Test.java\n @RunWith(RandomizedRunner.class)\n public class LZ4Test extends RandomizedTest {\n \n @Test\n @Repeat(iterations=20)\n public void testMaxCompressedLength() {\n final int len = randomBoolean() ? randomInt(16) : randomInt(1 << 30);\n final LZ4Compressor refCompressor = LZ4Factory.nativeInstance().fastCompressor();\n for (LZ4Compressor compressor : COMPRESSORS) {\n assertEquals(refCompressor.maxCompressedLength(len), compressor.maxCompressedLength(len));\n }\n }\n\n private static byte[] getCompressedWorstCase(byte[] decompressed) {\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n int len = decompressed.length;"}}},{"rowIdx":2046,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"d9e8af15806b9fd53f41d62d744db67f88533594"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2011-2017 B2i Healthcare Pte Ltd, http://b2i.sg\n * \n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.b2international.snowowl.snomed.datastore.index.entry;\n\nimport static com.b2international.index.query.Expressions.exactMatch;\nimport static com.b2international.index.query.Expressions.matchAny;\nimport static com.b2international.index.query.Expressions.matchAnyDecimal;\nimport static com.b2international.index.query.Expressions.matchAnyInt;\nimport static com.b2international.index.query.Expressions.matchRange;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.DESCRIPTION_NUMBER;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER;\nimport static com.google.common.base.Preconditions.checkArgument;\n\nimport java.math.BigDecimal;\nimport java.util.Collection;\nimport java.util.Date;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\nimport com.b2international.commons.StringUtils;\nimport com.b2international.index.Doc;\nimport com.b2international.index.query.Expression;\nimport com.b2international.snowowl.core.CoreTerminologyBroker;\nimport com.b2international.snowowl.core.date.DateFormats;\nimport com.b2international.snowowl.core.date.EffectiveTimes;\nimport com.b2international.snowowl.datastore.cdo.CDOIDUtils;\nimport com.b2international.snowowl.snomed.common.SnomedRf2Headers;\nimport com.b2international.snowowl.snomed.core.domain.Acceptability;\nimport com.b2international.snowowl.snomed.core.domain.InactivationIndicator;\nimport com.b2international.snowowl.snomed.core.domain.RelationshipRefinability;\nimport com.b2international.snowowl.snomed.core.domain.SnomedConcept;\nimport com.b2international.snowowl.snomed.core.domain.SnomedCoreComponent;\nimport com.b2international.snowowl.snomed.core.domain.SnomedDescription;\nimport com.b2international.snowowl.snomed.core.domain.SnomedRelationship;\nimport com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember;\nimport com.b2international.snowowl.snomed.datastore.SnomedRefSetUtil;\nimport com.b2international.snowowl.snomed.snomedrefset.DataType;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedAssociationRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedAttributeValueRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedComplexMapRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedConcreteDataTypeRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedDescriptionTypeRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedLanguageRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedModuleDependencyRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedQueryRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetType;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedSimpleMapRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.util.SnomedRefSetSwitch;\nimport com.fasterxml.jackson.annotation.JsonCreator;\nimport com.fasterxml.jackson.annotation.JsonIgnore;\nimport com.fasterxml.jackson.annotation.JsonProperty;\nimport com.fasterxml.jackson.databind.annotation.JsonDeserialize;\nimport com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;\nimport com.google.common.base.Function;\nimport com.google.common.base.Objects.ToStringHelper;\nimport com.google.common.base.Strings;\nimport com.google.common.collect.FluentIterable;\nimport com.google.common.collect.ImmutableMap;\n\n/**\n * Lightweight representation of a SNOMED CT reference set member.\n */\n@Doc\n@JsonDeserialize(builder = SnomedRefSetMemberIndexEntry.Builder.class)\npublic final class SnomedRefSetMemberIndexEntry extends SnomedDocument {\n\n\tprivate static final long serialVersionUID = 5198766293865046258L;\n\n\tpublic static class Fields extends SnomedDocument.Fields {\n\t\t// known RF2 fields\n\t\tpublic static final String REFERENCE_SET_ID = \"referenceSetId\"; // XXX different than the RF2 header field name\n\t\tpublic static final String REFERENCED_COMPONENT_ID = SnomedRf2Headers.FIELD_REFERENCED_COMPONENT_ID;\n\t\tpublic static final String ACCEPTABILITY_ID = SnomedRf2Headers.FIELD_ACCEPTABILITY_ID;\n\t\tpublic static final String VALUE_ID = SnomedRf2Headers.FIELD_VALUE_ID;\n\t\tpublic static final String TARGET_COMPONENT = SnomedRf2Headers.FIELD_TARGET_COMPONENT;\n\t\tpublic static final String MAP_TARGET = SnomedRf2Headers.FIELD_MAP_TARGET;\n\t\tpublic static final String MAP_TARGET_DESCRIPTION = SnomedRf2Headers.FIELD_MAP_TARGET_DESCRIPTION;\n\t\tpublic static final String MAP_GROUP = SnomedRf2Headers.FIELD_MAP_GROUP;\n\t\tpublic static final String MAP_PRIORITY = SnomedRf2Headers.FIELD_MAP_PRIORITY;\n\t\tpublic static final String MAP_RULE = SnomedRf2Headers.FIELD_MAP_RULE;\n\t\tpublic static final String MAP_ADVICE = SnomedRf2Headers.FIELD_MAP_ADVICE;\n\t\tpublic static final String MAP_CATEGORY_ID = SnomedRf2Headers.FIELD_MAP_CATEGORY_ID;\n\t\tpublic static final String CORRELATION_ID = SnomedRf2Headers.FIELD_CORRELATION_ID;\n\t\tpublic static final String DESCRIPTION_FORMAT = SnomedRf2Headers.FIELD_DESCRIPTION_FORMAT;\n\t\tpublic static final String DESCRIPTION_LENGTH = SnomedRf2Headers.FIELD_DESCRIPTION_LENGTH;\n\t\tpublic static final String OPERATOR_ID = SnomedRf2Headers.FIELD_OPERATOR_ID;\n\t\tpublic static final String UNIT_ID = SnomedRf2Headers.FIELD_UNIT_ID;\n\t\tpublic static final String QUERY = SnomedRf2Headers.FIELD_QUERY;\n\t\tpublic static final String CHARACTERISTIC_TYPE_ID = SnomedRf2Headers.FIELD_CHARACTERISTIC_TYPE_ID;\n\t\tpublic static final String SOURCE_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME;\n\t\tpublic static final String TARGET_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME;\n\t\tprivate static final String DATA_VALUE = SnomedRf2Headers.FIELD_VALUE;\n\t\tpublic static final String ATTRIBUTE_NAME = SnomedRf2Headers.FIELD_ATTRIBUTE_NAME;\n\t\t// extra index fields to store datatype and map target type\n\t\tpublic static final String DATA_TYPE = \"dataType\";\n\t\tpublic static final String REFSET_TYPE = \"referenceSetType\";\n\t\tpublic static final String REFERENCED_COMPONENT_TYPE = \"referencedComponentType\";\n\t\t// CD value fields per type\n\t\tpublic static final String BOOLEAN_VALUE = \"booleanValue\";\n\t\tpublic static final String STRING_VALUE = \"stringValue\";\n\t\tpublic static final String INTEGER_VALUE = \"integerValue\";\n\t\tpublic static final String DECIMAL_VALUE = \"decimalValue\";\n\t}\n\t\n\t\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\t\n\tpublic static Builder builder(final SnomedRefSetMemberIndexEntry source) {\n\t\treturn builder()\n\t\t\t\t.storageKey(source.getStorageKey())\n\t\t\t\t.active(source.isActive())\n\t\t\t\t.effectiveTime(source.getEffectiveTime())\n\t\t\t\t.id(source.getId())\n\t\t\t\t.moduleId(source.getModuleId())\n\t\t\t\t.referencedComponentId(source.getReferencedComponentId())\n\t\t\t\t.referencedComponentType(source.getReferencedComponentType())\n\t\t\t\t.referenceSetId(source.getReferenceSetId())\n\t\t\t\t.referenceSetType(source.getReferenceSetType())\n\t\t\t\t.released(source.isReleased())\n\t\t\t\t.fields(source.getAdditionalFields());\n\t}\n\t\n\tpublic static final Builder builder(final SnomedReferenceSetMember input) {\n\t\tfinal Builder builder = builder()\n\t\t\t\t.storageKey(input.getStorageKey())\n\t\t\t\t.active(input.isActive())\n\t\t\t\t.effectiveTime(EffectiveTimes.getEffectiveTime(input.getEffectiveTime()))\n\t\t\t\t.id(input.getId())\n\t\t\t\t.moduleId(input.getModuleId())\n\t\t\t\t.referencedComponentId(input.getReferencedComponent().getId())\n\t\t\t\t.referenceSetId(input.getReferenceSetId())\n\t\t\t\t.referenceSetType(input.type())\n\t\t\t\t.released(input.isReleased());\n\t\t\n\t\tif (input.getReferencedComponent() instanceof SnomedConcept) {\n\t\t\tbuilder.referencedComponentType(CONCEPT_NUMBER);\n\t\t} else if (input.getReferencedComponent() instanceof SnomedDescription) {\n\t\t\tbuilder.referencedComponentType(DESCRIPTION_NUMBER);\n\t\t} else if (input.getReferencedComponent() instanceof SnomedRelationship) {\n\t\t\tbuilder.referencedComponentType(RELATIONSHIP_NUMBER);\n\t\t} else {\n\t\t\tbuilder.referencedComponentType(CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT);\n\t\t}\n\t\t\n\t\t\n\t\tfor (Entry entry : input.getProperties().entrySet()) {\n\t\t\tfinal Object value = entry.getValue();\n\t\t\tfinal String fieldName = entry.getKey();\n\t\t\t// certain RF2 fields can be expanded into full blown representation class, get the ID in this case\n\t\t\tif (value instanceof SnomedCoreComponent) {\n\t\t\t\tbuilder.field(fieldName, ((SnomedCoreComponent) value).getId());\n\t\t\t} else {\n\t\t\t\tbuilder.field(fieldName, convertValue(entry.getKey(), value));\n\t\t\t}\n\t\t}\n\t\t\n\t\treturn builder;\n\t}\n\t\n\tpublic static Builder builder(SnomedRefSetMember refSetMember) {\n\t\tfinal Builder builder = SnomedRefSetMemberIndexEntry.builder()\n\t\t\t\t.storageKey(CDOIDUtils.asLong(refSetMember.cdoID()))\n\t\t\t\t.id(refSetMember.getUuid()) \n\t\t\t\t.moduleId(refSetMember.getModuleId())\n\t\t\t\t.active(refSetMember.isActive())\n\t\t\t\t.released(refSetMember.isReleased())\n\t\t\t\t.effectiveTime(refSetMember.isSetEffectiveTime() ? refSetMember.getEffectiveTime().getTime() : EffectiveTimes.UNSET_EFFECTIVE_TIME)\n\t\t\t\t.referenceSetId(refSetMember.getRefSetIdentifierId())\n\t\t\t\t.referenceSetType(refSetMember.getRefSet().getType())\n\t\t\t\t.referencedComponentType(refSetMember.getReferencedComponentType())\n\t\t\t\t.referencedComponentId(refSetMember.getReferencedComponentId());\n\n\t\treturn new SnomedRefSetSwitch() {\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedAssociationRefSetMember(final SnomedAssociationRefSetMember associationMember) {\n\t\t\t\treturn builder.targetComponent(associationMember.getTargetComponentId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedAttributeValueRefSetMember(final SnomedAttributeValueRefSetMember attributeValueMember) {\n\t\t\t\treturn builder.field(Fields.VALUE_ID, attributeValueMember.getValueId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedConcreteDataTypeRefSetMember(final SnomedConcreteDataTypeRefSetMember concreteDataTypeMember) {\n\t\t\t\treturn builder.field(Fields.ATTRIBUTE_NAME, concreteDataTypeMember.getLabel())\n\t\t\t\t\t\t.field(Fields.DATA_TYPE, concreteDataTypeMember.getDataType())\n\t\t\t\t\t\t.field(Fields.DATA_VALUE, concreteDataTypeMember.getSerializedValue())\n\t\t\t\t\t\t.field(Fields.CHARACTERISTIC_TYPE_ID, concreteDataTypeMember.getCharacteristicTypeId())\n\t\t\t\t\t\t.field(Fields.OPERATOR_ID, concreteDataTypeMember.getOperatorComponentId())\n\t\t\t\t\t\t.field(Fields.UNIT_ID, Strings.nullToEmpty(concreteDataTypeMember.getUomComponentId()));\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedDescriptionTypeRefSetMember(final SnomedDescriptionTypeRefSetMember descriptionTypeMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.DESCRIPTION_FORMAT, descriptionTypeMember.getDescriptionFormat())\n\t\t\t\t\t\t.field(Fields.DESCRIPTION_LENGTH, descriptionTypeMember.getDescriptionLength());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedLanguageRefSetMember(final SnomedLanguageRefSetMember languageMember) {\n\t\t\t\treturn builder.field(Fields.ACCEPTABILITY_ID, languageMember.getAcceptabilityId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedQueryRefSetMember(final SnomedQueryRefSetMember queryMember) {\n\t\t\t\treturn builder.field(Fields.QUERY, queryMember.getQuery());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedSimpleMapRefSetMember(final SnomedSimpleMapRefSetMember mapRefSetMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId())\n\t\t\t\t\t\t.field(Fields.MAP_TARGET_DESCRIPTION, mapRefSetMember.getMapTargetComponentDescription());\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedComplexMapRefSetMember(final SnomedComplexMapRefSetMember mapRefSetMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId())\n\t\t\t\t\t\t.field(Fields.CORRELATION_ID, mapRefSetMember.getCorrelationId())\n\t\t\t\t\t\t.field(Fields.MAP_GROUP, Integer.valueOf(mapRefSetMember.getMapGroup()))\n\t\t\t\t\t\t.field(Fields.MAP_ADVICE, Strings.nullToEmpty(mapRefSetMember.getMapAdvice()))\n\t\t\t\t\t\t.field(Fields.MAP_PRIORITY, Integer.valueOf(mapRefSetMember.getMapPriority()))\n\t\t\t\t\t\t.field(Fields.MAP_RULE, Strings.nullToEmpty(mapRefSetMember.getMapRule()))\n\t\t\t\t\t\t// extended refset\n\t\t\t\t\t\t.field(Fields.MAP_CATEGORY_ID, Strings.nullToEmpty(mapRefSetMember.getMapCategoryId()));\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedModuleDependencyRefSetMember(SnomedModuleDependencyRefSetMember member) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.SOURCE_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getSourceEffectiveTime()))\n\t\t\t\t\t\t.field(Fields.TARGET_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getTargetEffectiveTime()));\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedRefSetMember(SnomedRefSetMember object) {\n\t\t\t\treturn builder;\n\t\t\t};\n\n\t\t}.doSwitch(refSetMember);\n\t}\n\t\n\tprivate static Object convertValue(String rf2Field, Object value) {\n\t\tswitch (rf2Field) {\n\t\tcase SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME:\n\t\tcase SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME:\n\t\t\tif (value instanceof String && !StringUtils.isEmpty((String) value)) {\n\t\t\t\tDate parsedDate = EffectiveTimes.parse((String) value, DateFormats.SHORT);\n\t\t\t\treturn EffectiveTimes.getEffectiveTime(parsedDate);\n\t\t\t} else {\n\t\t\t\treturn EffectiveTimes.UNSET_EFFECTIVE_TIME;\n\t\t\t}\n\t\tdefault: \n\t\t\treturn value;\n\t\t}\n\t}\n\n\tpublic static Collection from(final Iterable refSetMembers) {\n\t\treturn FluentIterable.from(refSetMembers).transform(new Function() {\n\t\t\t@Override\n\t\t\tpublic SnomedRefSetMemberIndexEntry apply(final SnomedReferenceSetMember refSetMember) {\n\t\t\t\treturn builder(refSetMember).build();\n\t\t\t}\n\t\t}).toList();\n\t}\n\n\tpublic static final class Expressions extends SnomedDocument.Expressions {\n\t\t\n\t\tpublic static Expression referenceSetId(String referenceSetId) {\n\t\t\treturn exactMatch(Fields.REFERENCE_SET_ID, referenceSetId);\n\t\t}\n\n\t\tpublic static Expression referenceSetId(Collection referenceSetIds) {\n\t\t\treturn matchAny(Fields.REFERENCE_SET_ID, referenceSetIds);\n\t\t}\n\t\t\n\t\tpublic static Expression referencedComponentId(String referencedComponentId) {\n\t\t\treturn exactMatch(Fields.REFERENCED_COMPONENT_ID, referencedComponentId);\n\t\t}\n\t\t\n\t\tpublic static Expression mapTargets(Collection mapTargets) {\n\t\t\treturn matchAny(Fields.MAP_TARGET, mapTargets);\n\t\t}\n\n\t\tpublic static Expression referencedComponentIds(Collection referencedComponentIds) {\n\t\t\treturn matchAny(Fields.REFERENCED_COMPONENT_ID, referencedComponentIds);\n\t\t}\n\t\t\n\t\tpublic static Expression targetComponents(Collection targetComponentIds) {\n\t\t\treturn matchAny(Fields.TARGET_COMPONENT, targetComponentIds);\n\t\t}\n\t\t\n\t\tpublic static Expression acceptabilityIds(Collection acceptabilityIds) {\n\t\t\treturn matchAny(Fields.ACCEPTABILITY_ID, acceptabilityIds);\n\t\t}\n\t\t\n\t\tpublic static Expression characteristicTypeIds(Collection characteristicTypeIds) {\n\t\t\treturn matchAny(Fields.CHARACTERISTIC_TYPE_ID, characteristicTypeIds);\n\t\t}\n\t\t\n\t\tpublic static Expression correlationIds(Collection correlationIds) {\n\t\t\treturn matchAny(Fields.CORRELATION_ID, correlationIds);\n\t\t}\n\t\t\n\t\tpublic static Expression descriptionFormats(Collection descriptionFormats) {\n\t\t\treturn matchAny(Fields.DESCRIPTION_FORMAT, descriptionFormats);\n\t\t}\n\t\t\n\t\tpublic static Expression mapCategoryIds(Collection mapCategoryIds) {\n\t\t\treturn matchAny(Fields.MAP_CATEGORY_ID, mapCategoryIds);\n\t\t}\n\t\t\n\t\tpublic static Expression operatorIds(Collection operatorIds) {\n\t\t\treturn matchAny(Fields.OPERATOR_ID, operatorIds);\n\t\t}\n\t\t\n\t\tpublic static Expression unitIds(Collection unitIds) {\n\t\t\treturn matchAny(Fields.UNIT_ID, unitIds);\n\t\t}\n\t\t\n\t\tpublic static Expression valueIds(Collection valueIds) {\n\t\t\treturn matchAny(Fields.VALUE_ID, valueIds);\n\t\t}\n\t\t\n\t\tpublic static Expression values(DataType type, Collection values) {\n\t\t\tswitch (type) {\n\t\t\tcase STRING: \n\t\t\t\treturn matchAny(Fields.STRING_VALUE, FluentIterable.from(values).filter(String.class).toSet());\n\t\t\tcase INTEGER:\n\t\t\t\treturn matchAnyInt(Fields.INTEGER_VALUE, FluentIterable.from(values).filter(Integer.class).toSet());\n\t\t\tcase DECIMAL:\n\t\t\t\treturn matchAnyDecimal(Fields.DECIMAL_VALUE, FluentIterable.from(values).filter(BigDecimal.class).toSet());\n\t\t\tdefault:\n\t\t\t\tthrow new UnsupportedOperationException(\"Unsupported data type when filtering by values, \" + type);\n\t\t\t}\n\t\t}\n\t\t\n\t\tpublic static Expression valueRange(DataType type, final Object lower, final Object upper, boolean includeLower, boolean includeUpper) {\n\t\t\tswitch (type) {\n\t\t\tcase STRING: \n\t\t\t\treturn matchRange(Fields.STRING_VALUE, (String) lower, (String) upper, includeLower, includeUpper);\n\t\t\tcase INTEGER:\n\t\t\t\treturn matchRange(Fields.INTEGER_VALUE, (Integer) lower, (Integer) upper, includeLower, includeUpper);\n\t\t\tcase DECIMAL:\n\t\t\t\treturn matchRange(Fields.DECIMAL_VALUE, (BigDecimal) lower, (BigDecimal) upper, includeLower, includeUpper);\n\t\t\tdefault:\n\t\t\t\tthrow new UnsupportedOperationException(\"Unsupported data type when filtering by values, \" + type);\n\t\t\t}\n\t\t}\n\t\t\n\t\tpublic static Expression dataTypes(Collection dataTypes) {\n\t\t\treturn matchAny(Fields.DATA_TYPE, FluentIterable.from(dataTypes).transform(new Function() {\n\t\t\t\t@Override\n\t\t\t\tpublic String apply(DataType input) {\n\t\t\t\t\treturn input.name();\n\t\t\t\t}\n\t\t\t}).toSet());\n\t\t}\n\t\t\n\t\tpublic static Expression attributeNames(Collection attributeNames) {\n\t\t\treturn matchAny(Fields.ATTRIBUTE_NAME, attributeNames);\n\t\t}\n\t\t\n\t\tpublic static Expression sourceEffectiveTime(long effectiveTime) {\n\t\t\treturn exactMatch(Fields.SOURCE_EFFECTIVE_TIME, effectiveTime);\n\t\t}\n\t\t\n\t\tpublic static Expression targetEffectiveTime(long effectiveTime) {\n\t\t\treturn exactMatch(Fields.TARGET_EFFECTIVE_TIME, effectiveTime);\n\t\t}\n\t\t\n\t\tpublic static Expression refSetTypes(Collection refSetTypes) {\n\t\t\treturn matchAny(Fields.REFSET_TYPE, FluentIterable.from(refSetTypes).transform(new Function() {\n\t\t\t\t@Override\n\t\t\t\tpublic String apply(SnomedRefSetType input) {\n\t\t\t\t\treturn input.name();\n\t\t\t\t}\n\t\t\t}).toSet());\n\t\t}\n\t\t\n\t}\n\n\t@JsonPOJOBuilder(withPrefix=\"\")\n\tpublic static final class Builder extends SnomedDocumentBuilder {\n\n\t\tprivate String referencedComponentId;\n\n\t\tprivate String referenceSetId;\n\t\tprivate SnomedRefSetType referenceSetType;\n\t\tprivate short referencedComponentType;\n\n\t\t// Member specific fields, they can be null or emptyish values\n\t\t// ASSOCIATION reference set members\n\t\tprivate String targetComponent;\n\t\t// ATTRIBUTE VALUE\n\t\tprivate String valueId;\n\t\t// CONCRETE DOMAIN reference set members\n\t\tprivate DataType dataType;\n\t\tprivate String attributeName;\n\t\tprivate Object value;\n\t\tprivate String operatorId;\n\t\tprivate String characteristicTypeId;\n\t\tprivate String unitId;\n\t\t// DESCRIPTION\n\t\tprivate Integer descriptionLength;\n\t\tprivate String descriptionFormat;\n\t\t// LANGUAGE\n\t\tprivate String acceptabilityId;\n\t\t// MODULE\n\t\tprivate Long sourceEffectiveTime;\n\t\tprivate Long targetEffectiveTime;\n\t\t// SIMPLE MAP reference set members\n\t\tprivate String mapTarget;\n\t\tprivate String mapTargetDescription;\n\t\t// COMPLEX MAP\n\t\tprivate String mapCategoryId;\n\t\tprivate String correlationId;\n\t\tprivate String mapAdvice;\n\t\tprivate String mapRule;\n\t\tprivate Integer mapGroup;\n\t\tprivate Integer mapPriority;\n\t\t// QUERY\n\t\tprivate String query;\n\n\t\t@JsonCreator\n\t\tprivate Builder() {\n\t\t\t// Disallow instantiation outside static method\n\t\t}\n\n\t\tpublic Builder fields(Map fields) {\n\t\t\tfor (Entry entry : fields.entrySet()) {\n\t\t\t\tfield(entry.getKey(), entry.getValue());\n\t\t\t}\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tpublic Builder field(String fieldName, Object value) {\n\t\t\tswitch (fieldName) {\n\t\t\tcase Fields.ACCEPTABILITY_ID: this.acceptabilityId = (String) value; break;\n\t\t\tcase Fields.ATTRIBUTE_NAME: this.attributeName = (String) value; break;\n\t\t\tcase Fields.CHARACTERISTIC_TYPE_ID: this.characteristicTypeId = (String) value; break;\n\t\t\tcase Fields.CORRELATION_ID: this.correlationId = (String) value; break;\n\t\t\tcase Fields.DATA_TYPE: this.dataType = (DataType) value; break;\n\t\t\tcase Fields.DATA_VALUE: this.value = value; break;\n\t\t\tcase Fields.DESCRIPTION_FORMAT: this.descriptionFormat = (String) value; break;\n\t\t\tcase Fields.DESCRIPTION_LENGTH: this.descriptionLength = (Integer) value; break;\n\t\t\tcase Fields.MAP_ADVICE: this.mapAdvice = (String) value; break;\n\t\t\tcase Fields.MAP_CATEGORY_ID: this.mapCategoryId = (String) value; break;\n\t\t\tcase Fields.MAP_GROUP: this.mapGroup = (Integer) value; break;\n\t\t\tcase Fields.MAP_PRIORITY: this.mapPriority = (Integer) value; break;\n\t\t\tcase Fields.MAP_RULE: this.mapRule = (String) value; break;\n\t\t\tcase Fields.MAP_TARGET: this.mapTarget = (String) value; break;\n\t\t\tcase Fields.MAP_TARGET_DESCRIPTION: this.mapTargetDescription = (String) value; break;\n\t\t\tcase Fields.OPERATOR_ID: this.operatorId = (String) value; break;\n\t\t\tcase Fields.QUERY: this.query = (String) value; break;\n\t\t\tcase Fields.SOURCE_EFFECTIVE_TIME: this.sourceEffectiveTime = (Long) value; break;\n\t\t\tcase Fields.TARGET_COMPONENT: this.targetComponent = (String) value; break;\n\t\t\tcase Fields.TARGET_EFFECTIVE_TIME: this.targetEffectiveTime = (Long) value; break;\n\t\t\tcase Fields.UNIT_ID: this.unitId = (String) value; break;\n\t\t\tcase Fields.VALUE_ID: this.valueId = (String) value; break;\n\t\t\tdefault: throw new UnsupportedOperationException(\"Unknown RF2 member field: \" + fieldName);\n\t\t\t}\n\t\t\treturn this;\n\t\t}\n\n\t\t@Override\n\t\tprotected Builder getSelf() {\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referencedComponentId(final String referencedComponentId) {\n\t\t\tthis.referencedComponentId = referencedComponentId;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referenceSetId(final String referenceSetId) {\n\t\t\tthis.referenceSetId = referenceSetId;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referenceSetType(final SnomedRefSetType referenceSetType) {\n\t\t\tthis.referenceSetType = referenceSetType;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referencedComponentType(final short referencedComponentType) {\n\t\t\tthis.referencedComponentType = referencedComponentType;\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tpublic Builder targetComponent(String targetComponent) {\n\t\t\tthis.targetComponent = targetComponent;\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tBuilder acceptabilityId(String acceptabilityId) {\n\t\t\tthis.acceptabilityId = acceptabilityId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder attributeName(String attributeName) {\n\t\t\tthis.attributeName = attributeName;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder characteristicTypeId(final String characteristicTypeId) {\n\t\t\tthis.characteristicTypeId = characteristicTypeId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder correlationId(final String correlationId) {\n\t\t\tthis.correlationId = correlationId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder dataType(final DataType dataType) {\n\t\t\tthis.dataType = dataType;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder descriptionFormat(final String descriptionFormat) {\n\t\t\tthis.descriptionFormat = descriptionFormat;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder descriptionLength(final Integer descriptionLength) {\n\t\t\tthis.descriptionLength = descriptionLength;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapAdvice(final String mapAdvice) {\n\t\t\tthis.mapAdvice = mapAdvice;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapCategoryId(final String mapCategoryId) {\n\t\t\tthis.mapCategoryId = mapCategoryId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapGroup(final Integer mapGroup) {\n\t\t\tthis.mapGroup = mapGroup;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapPriority(final Integer mapPriority) {\n\t\t\tthis.mapPriority = mapPriority;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapRule(final String mapRule) {\n\t\t\tthis.mapRule = mapRule;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapTarget(final String mapTarget) {\n\t\t\tthis.mapTarget = mapTarget;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapTargetDescription(final String mapTargetDescription) {\n\t\t\tthis.mapTargetDescription = mapTargetDescription;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder operatorId(final String operatorId) {\n\t\t\tthis.operatorId = operatorId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder query(final String query) {\n\t\t\tthis.query = query;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder sourceEffectiveTime(final Long sourceEffectiveTime) {\n\t\t\tthis.sourceEffectiveTime = sourceEffectiveTime;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder targetEffectiveTime(final Long targetEffectiveTime) {\n\t\t\tthis.targetEffectiveTime = targetEffectiveTime;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder unitId(final String unitId) {\n\t\t\tthis.unitId = unitId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\t/**\n\t\t * @deprecated - this is no longer a valid refset member index field, but required to make pre-5.4 dataset work with 5.4 without migration\n\t\t */\n\t\tBuilder value(final Object value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder decimalValue(final BigDecimal value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder booleanValue(final Boolean value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder integerValue(final Integer value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder stringValue(final String value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder valueId(String valueId) {\n\t\t\tthis.valueId = valueId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tpublic SnomedRefSetMemberIndexEntry build() {\n\t\t\tfinal SnomedRefSetMemberIndexEntry doc = new SnomedRefSetMemberIndexEntry(id,\n\t\t\t\t\tlabel,\n\t\t\t\t\tmoduleId, \n\t\t\t\t\treleased, \n\t\t\t\t\tactive, \n\t\t\t\t\teffectiveTime, \n\t\t\t\t\treferencedComponentId, \n\t\t\t\t\treferenceSetId,\n\t\t\t\t\treferenceSetType,\n\t\t\t\t\treferencedComponentType);\n\t\t\t// association members\n\t\t\tdoc.targetComponent = targetComponent;\n\t\t\t// attribute value\n\t\t\tdoc.valueId = valueId;\n\t\t\t// concrete domain members\n\t\t\tdoc.dataType = dataType;\n\t\t\tdoc.attributeName = attributeName;\n\t\t\tif (dataType != null) {\n\t\t\t\tswitch (dataType) {\n\t\t\t\tcase BOOLEAN:\n\t\t\t\t\tif (value instanceof Boolean) {\n\t\t\t\t\t\tdoc.booleanValue = (Boolean) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.booleanValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase DECIMAL:\n\t\t\t\t\tif (value instanceof BigDecimal) {\n\t\t\t\t\t\tdoc.decimalValue = (BigDecimal) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.decimalValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase INTEGER:\n\t\t\t\t\tif (value instanceof Integer) {\n\t\t\t\t\t\tdoc.integerValue = (Integer) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.integerValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase STRING:\n\t\t\t\t\tdoc.stringValue = (String) value;\n\t\t\t\t\tbreak;\n\t\t\t\tdefault: throw new UnsupportedOperationException(\"Unsupported concrete domain data type: \" + dataType);\n\t\t\t\t}\n\t\t\t}\n\t\t\tdoc.characteristicTypeId = characteristicTypeId;\n\t\t\tdoc.operatorId = operatorId;\n\t\t\tdoc.unitId = unitId;\n\t\t\t// description\n\t\t\tdoc.descriptionFormat = descriptionFormat;\n\t\t\tdoc.descriptionLength = descriptionLength;\n\t\t\t// language reference set\n\t\t\tdoc.acceptabilityId = acceptabilityId;\n\t\t\t// module\n\t\t\tdoc.sourceEffectiveTime = sourceEffectiveTime;\n\t\t\tdoc.targetEffectiveTime = targetEffectiveTime;\n\t\t\t// simple map\n\t\t\tdoc.mapTarget = mapTarget;\n\t\t\tdoc.mapTargetDescription = mapTargetDescription;\n\t\t\t// complex map\n\t\t\tdoc.mapCategoryId = mapCategoryId;\n\t\t\tdoc.mapAdvice = mapAdvice;\n\t\t\tdoc.correlationId = correlationId;\n\t\t\tdoc.mapGroup = mapGroup;\n\t\t\tdoc.mapPriority = mapPriority;\n\t\t\tdoc.mapRule = mapRule;\n\t\t\t// query\n\t\t\tdoc.query = query;\n\t\t\t\n\t\t\tdoc.setScore(score);\n\t\t\t// metadata\n\t\t\tdoc.setBranchPath(branchPath);\n\t\t\tdoc.setCommitTimestamp(commitTimestamp);\n\t\t\tdoc.setStorageKey(storageKey);\n\t\t\tdoc.setReplacedIns(replacedIns);\n\t\t\tdoc.setSegmentId(segmentId);\n\t\t\treturn doc;\n\t\t}\n\t}\n\n\tprivate final String referencedComponentId;\n\tprivate final String referenceSetId;\n\tprivate final SnomedRefSetType referenceSetType;\n\tprivate final short referencedComponentType;\n\t\n\t// Member specific fields, they can be null or emptyish values\n\t// ASSOCIATION reference set members\n\tprivate String targetComponent;\n\t// ATTRIBUTE VALUE\n\tprivate String valueId;\n\t// CONCRETE DOMAIN reference set members\n\tprivate DataType dataType;\n\tprivate String attributeName;\n\t\n\t// only one of these value fields should be set when this represents a concrete domain member\n\tprivate String stringValue;\n\tprivate Boolean booleanValue;\n\tprivate Integer integerValue;\n\tprivate BigDecimal decimalValue;\n\t\n\tprivate String operatorId;\n\tprivate String characteristicTypeId;\n\tprivate String unitId;\n\t// DESCRIPTION\n\tprivate Integer descriptionLength;\n\tprivate String descriptionFormat;\n\t// LANGUAGE\n\tprivate String acceptabilityId;\n\t// MODULE\n\tprivate Long sourceEffectiveTime;\n\tprivate Long targetEffectiveTime;\n\t// SIMPLE MAP reference set members\n\tprivate String mapTarget;\n\tprivate String mapTargetDescription;\n\t// COMPLEX MAP\n\tprivate String mapCategoryId;\n\tprivate String correlationId;\n\tprivate String mapAdvice;\n\tprivate String mapRule;\n\tprivate Integer mapGroup;\n\tprivate Integer mapPriority;\n\t// QUERY\n\tprivate String query;\n\t\n\n\tprivate SnomedRefSetMemberIndexEntry(final String id,\n\t\t\tfinal String label,\n\t\t\tfinal String moduleId, \n\t\t\tfinal boolean released,\n\t\t\tfinal boolean active, \n\t\t\tfinal long effectiveTimeLong, \n\t\t\tfinal String referencedComponentId, \n\t\t\tfinal String referenceSetId,\n\t\t\tfinal SnomedRefSetType referenceSetType,\n\t\t\tfinal short referencedComponentType) {\n\n\t\tsuper(id, \n\t\t\t\tlabel,\n\t\t\t\treferencedComponentId, // XXX: iconId is the referenced component identifier\n\t\t\t\tmoduleId, \n\t\t\t\treleased, \n\t\t\t\tactive, \n\t\t\t\teffectiveTimeLong);\n\n\t\tcheckArgument(referencedComponentType >= CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT, \"Referenced component type '%s' is invalid.\", referencedComponentType);\n\t\tthis.referencedComponentId = referencedComponentId;\n\t\tthis.referenceSetId = referenceSetId;\n\t\tthis.referenceSetType = referenceSetType;\n\t\tthis.referencedComponentType = referencedComponentType;\n\t}\n\n\t@Override\n\tpublic String getContainerId() {\n\t\t// XXX hack to make IHTSDO merge review API tests pass and work as before in 4.5\n\t\tif (getReferenceSetType() == SnomedRefSetType.MODULE_DEPENDENCY) {\n\t\t\treturn null;\n\t\t} else {\n\t\t\treturn getReferencedComponentId();\n\t\t}\n\t}\n\n\t/**\n\t * @return the referenced component identifier\n\t */\n\tpublic String getReferencedComponentId() {\n\t\treturn referencedComponentId;\n\t}\n\n\t/**\n\t * @return the identifier of the member's reference set\n\t */\n\tpublic String getReferenceSetId() {\n\t\treturn referenceSetId;\n\t}\n\n\t/**\n\t * @return the type of the member's reference set\n\t */\n\tpublic SnomedRefSetType getReferenceSetType() {\n\t\treturn referenceSetType;\n\t}\n\n\t@JsonIgnore\n\t@SuppressWarnings(\"unchecked\")\n\tpublic T getValueAs() {\n\t\treturn (T) getValue();\n\t}\n\t\n\t@JsonIgnore\n\tpublic Object getValue() {\n\t\tif (dataType == null) {\n\t\t\treturn null;\n\t\t} else {\n\t\t\tswitch (dataType) {\n\t\t\tcase BOOLEAN: return booleanValue;\n\t\t\tcase DECIMAL: return decimalValue;\n\t\t\tcase INTEGER: return integerValue;\n\t\t\tcase STRING: return stringValue;\n\t\t\tdefault: throw new UnsupportedOperationException(\"Unsupported concrete domain data type: \" + dataType);\n\t\t\t}\n\t\t}\n\t}\n\t\n\t@JsonProperty\n\tBigDecimal getDecimalValue() {\n\t\treturn decimalValue;\n\t}\n\t\n\t@JsonProperty\n\tBoolean getBooleanValue() {\n\t\treturn booleanValue;\n\t}\n\t\n\t@JsonProperty\n\tInteger getIntegerValue() {\n\t\treturn integerValue;\n\t}\n\t\n\t@JsonProperty\n\tString getStringValue() {\n\t\treturn stringValue;\n\t}\n\n\tpublic DataType getDataType() {\n\t\treturn dataType;\n\t}\n\n\tpublic String getUnitId() {\n\t\treturn unitId;\n\t}\n\n\tpublic String getAttributeName() {\n\t\treturn attributeName;\n\t}\n\n\tpublic String getOperatorId() {\n\t\treturn operatorId;\n\t}\n\n\tpublic String getCharacteristicTypeId() {\n\t\treturn characteristicTypeId;\n\t}\t\n\n\tpublic String getAcceptabilityId() {\n\t\treturn acceptabilityId;\n\t}\n\n\tpublic Integer getDescriptionLength() {\n\t\treturn descriptionLength;\n\t}\n\t\n\tpublic String getDescriptionFormat() {\n\t\treturn descriptionFormat;\n\t}\n\n\tpublic String getMapTarget() {\n\t\treturn mapTarget;\n\t}\n\n\tpublic Integer getMapGroup() {\n\t\treturn mapGroup;\n\t}\n\n\tpublic Integer getMapPriority() {\n\t\treturn mapPriority;\n\t}\n\n\tpublic String getMapRule() {\n\t\treturn mapRule;\n\t}\n\n\tpublic String getMapAdvice() {\n\t\treturn mapAdvice;\n\t}\n\t\n\tpublic String getMapCategoryId() {\n\t\treturn mapCategoryId;\n\t}\n\t\n\tpublic String getCorrelationId() {\n\t\treturn correlationId;\n\t}\n\n\tpublic String getMapTargetDescription() {\n\t\treturn mapTargetDescription;\n\t}\n\t\n\tpublic String getQuery() {\n\t\treturn query;\n\t}\n\t\n\tpublic String getTargetComponent() {\n\t\treturn targetComponent;\n\t}\n\t\n\tpublic String getValueId() {\n\t\treturn valueId;\n\t}\n\t\n\tpublic Long getSourceEffectiveTime() {\n\t\treturn sourceEffectiveTime;\n\t}\n\t\n\tpublic Long getTargetEffectiveTime() {\n\t\treturn targetEffectiveTime;\n\t}\n\t\n\tpublic short getReferencedComponentType() {\n\t\treturn referencedComponentType;\n\t}\n\t\n\t// model helper methods\n\t\n\t@JsonIgnore\n\tpublic Acceptability getAcceptability() {\n\t\treturn Acceptability.getByConceptId(getAcceptabilityId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic RelationshipRefinability getRefinability() {\n\t\treturn RelationshipRefinability.getByConceptId(getValueId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic InactivationIndicator getInactivationIndicator() {\n\t\treturn InactivationIndicator.getByConceptId(getValueId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic String getSourceEffectiveTimeAsString() {\n\t\treturn EffectiveTimes.format(getSourceEffectiveTime(), DateFormats.SHORT);\n\t}\n\t\n\t@JsonIgnore\n\tpublic String getTargetEffectiveTimeAsString() {\n\t\treturn EffectiveTimes.format(getTargetEffectiveTime(), DateFormats.SHORT);\n\t}\n\t\n\t/**\n\t * @return the {@code String} terminology component identifier of the component referenced in this member\n\t */\n\t@JsonIgnore\n\tpublic String getReferencedComponentTypeAsString() {\n\t\treturn CoreTerminologyBroker.getInstance().getTerminologyComponentId(referencedComponentType);\n\t}\n\n\t/**\n\t * Helper which converts all non-null/empty additional fields to a values {@link Map} keyed by their field name; \n\t * @return\n\t */\n\t@JsonIgnore\n\tpublic Map getAdditionalFields() {\n\t\tfinal ImmutableMap.Builder builder = ImmutableMap.builder();\n\t\t// ASSOCIATION refset members\n\t\tputIfPresent(builder, Fields.TARGET_COMPONENT, getTargetComponent());\n\t\t// ATTRIBUTE_VALUE refset members \n\t\tputIfPresent(builder, Fields.VALUE_ID, getValueId());\n\t\t// CONCRETE DOMAIN reference set members\n\t\tputIfPresent(builder, Fields.DATA_TYPE, getDataType());\n\t\tputIfPresent(builder, Fields.ATTRIBUTE_NAME, getAttributeName());\n\t\tputIfPresent(builder, Fields.DATA_VALUE, getValue());\n\t\tputIfPresent(builder, Fields.OPERATOR_ID, getOperatorId());\n\t\tputIfPresent(builder, Fields.CHARACTERISTIC_TYPE_ID, getCharacteristicTypeId());\n\t\tputIfPresent(builder, Fields.UNIT_ID, getUnitId());\n\t\t// DESCRIPTION\n\t\tputIfPresent(builder, Fields.DESCRIPTION_LENGTH, getDescriptionLength());\n\t\tputIfPresent(builder, Fields.DESCRIPTION_FORMAT, getDescriptionFormat());\n\t\t// LANGUAGE\n\t\tputIfPresent(builder, Fields.ACCEPTABILITY_ID, getAcceptabilityId());\n\t\t// MODULE\n\t\tputIfPresent(builder, Fields.SOURCE_EFFECTIVE_TIME, getSourceEffectiveTime());\n\t\tputIfPresent(builder, Fields.TARGET_EFFECTIVE_TIME, getTargetEffectiveTime());\n\t\t// SIMPLE MAP reference set members\n\t\tputIfPresent(builder, Fields.MAP_TARGET, getMapTarget());\n\t\tputIfPresent(builder, Fields.MAP_TARGET_DESCRIPTION, getMapTargetDescription());\n\t\t// COMPLEX MAP\n\t\tputIfPresent(builder, Fields.MAP_CATEGORY_ID, getMapCategoryId());\n\t\tputIfPresent(builder, Fields.CORRELATION_ID, getCorrelationId());\n\t\tputIfPresent(builder, Fields.MAP_ADVICE, getMapAdvice());\n\t\tputIfPresent(builder, Fields.MAP_RULE, getMapRule());\n\t\tputIfPresent(builder, Fields.MAP_GROUP, getMapGroup());\n\t\tputIfPresent(builder, Fields.MAP_PRIORITY, getMapPriority());\n\t\t// QUERY\n\t\tputIfPresent(builder, Fields.QUERY, getQuery());\n\t\treturn builder.build();\n\t}\n\t\n\tprivate static void putIfPresent(ImmutableMap.Builder builder, String key, Object value) {\n\t\tif (key != null && value != null) {\n\t\t\tbuilder.put(key, value);\n\t\t}\n\t}\n\t\n\t@Override\n\tprotected ToStringHelper doToString() {\n\t\treturn super.doToString()\n\t\t\t\t.add(\"referencedComponentId\", referencedComponentId)\n\t\t\t\t.add(\"referenceSetId\", referenceSetId)\n\t\t\t\t.add(\"referenceSetType\", referenceSetType)\n\t\t\t\t.add(\"referencedComponentType\", referencedComponentType)\n\t\t\t\t.add(\"targetComponent\", targetComponent)\n\t\t\t\t.add(\"valueId\", valueId)\n\t\t\t\t.add(\"dataType\", dataType)\n\t\t\t\t.add(\"attributeName\", attributeName)\n\t\t\t\t.add(\"value\", getValue())\n\t\t\t\t.add(\"operatorId\", operatorId)\n\t\t\t\t.add(\"characteristicTypeId\", characteristicTypeId)\n\t\t\t\t.add(\"unitId\", unitId)\n\t\t\t\t.add(\"descriptionLength\", descriptionLength)\n\t\t\t\t.add(\"descriptionFormat\", descriptionFormat)\n\t\t\t\t.add(\"acceptabilityId\", acceptabilityId)\n\t\t\t\t.add(\"sourceEffectiveTime\", sourceEffectiveTime)\n\t\t\t\t.add(\"targetEffectiveTime\", targetEffectiveTime)\n\t\t\t\t.add(\"mapTarget\", mapTarget)\n\t\t\t\t.add(\"mapTargetDescription\", mapTargetDescription)\n\t\t\t\t.add(\"mapCategoryId\", mapCategoryId)\n\t\t\t\t.add(\"correlationId\", correlationId)\n\t\t\t\t.add(\"mapAdvice\", mapAdvice)\n\t\t\t\t.add(\"mapRule\", mapRule)\n\t\t\t\t.add(\"mapGroup\", mapGroup)\n\t\t\t\t.add(\"mapPriority\", mapPriority)\n\t\t\t\t.add(\"query\", query);\n\t}\n}\n"},"new_file":{"kind":"string","value":"snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/entry/SnomedRefSetMemberIndexEntry.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright 2011-2017 B2i Healthcare Pte Ltd, http://b2i.sg\n * \n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.b2international.snowowl.snomed.datastore.index.entry;\n\nimport static com.b2international.index.query.Expressions.exactMatch;\nimport static com.b2international.index.query.Expressions.matchAny;\nimport static com.b2international.index.query.Expressions.matchAnyDecimal;\nimport static com.b2international.index.query.Expressions.matchAnyInt;\nimport static com.b2international.index.query.Expressions.matchRange;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.CONCEPT_NUMBER;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.DESCRIPTION_NUMBER;\nimport static com.b2international.snowowl.snomed.common.SnomedTerminologyComponentConstants.RELATIONSHIP_NUMBER;\nimport static com.google.common.base.Preconditions.checkArgument;\n\nimport java.math.BigDecimal;\nimport java.util.Collection;\nimport java.util.Date;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\nimport com.b2international.commons.StringUtils;\nimport com.b2international.index.Doc;\nimport com.b2international.index.query.Expression;\nimport com.b2international.snowowl.core.CoreTerminologyBroker;\nimport com.b2international.snowowl.core.date.DateFormats;\nimport com.b2international.snowowl.core.date.EffectiveTimes;\nimport com.b2international.snowowl.datastore.cdo.CDOIDUtils;\nimport com.b2international.snowowl.snomed.common.SnomedRf2Headers;\nimport com.b2international.snowowl.snomed.core.domain.Acceptability;\nimport com.b2international.snowowl.snomed.core.domain.InactivationIndicator;\nimport com.b2international.snowowl.snomed.core.domain.RelationshipRefinability;\nimport com.b2international.snowowl.snomed.core.domain.SnomedConcept;\nimport com.b2international.snowowl.snomed.core.domain.SnomedCoreComponent;\nimport com.b2international.snowowl.snomed.core.domain.SnomedDescription;\nimport com.b2international.snowowl.snomed.core.domain.SnomedRelationship;\nimport com.b2international.snowowl.snomed.core.domain.refset.SnomedReferenceSetMember;\nimport com.b2international.snowowl.snomed.datastore.SnomedRefSetUtil;\nimport com.b2international.snowowl.snomed.snomedrefset.DataType;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedAssociationRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedAttributeValueRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedComplexMapRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedConcreteDataTypeRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedDescriptionTypeRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedLanguageRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedModuleDependencyRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedQueryRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedRefSetType;\nimport com.b2international.snowowl.snomed.snomedrefset.SnomedSimpleMapRefSetMember;\nimport com.b2international.snowowl.snomed.snomedrefset.util.SnomedRefSetSwitch;\nimport com.fasterxml.jackson.annotation.JsonCreator;\nimport com.fasterxml.jackson.annotation.JsonIgnore;\nimport com.fasterxml.jackson.annotation.JsonProperty;\nimport com.fasterxml.jackson.databind.annotation.JsonDeserialize;\nimport com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;\nimport com.google.common.base.Function;\nimport com.google.common.base.Objects.ToStringHelper;\nimport com.google.common.base.Strings;\nimport com.google.common.collect.FluentIterable;\nimport com.google.common.collect.ImmutableMap;\n\n/**\n * Lightweight representation of a SNOMED CT reference set member.\n */\n@Doc\n@JsonDeserialize(builder = SnomedRefSetMemberIndexEntry.Builder.class)\npublic final class SnomedRefSetMemberIndexEntry extends SnomedDocument {\n\n\tprivate static final long serialVersionUID = 5198766293865046258L;\n\n\tpublic static class Fields {\n\t\t// known RF2 fields\n\t\tpublic static final String REFERENCE_SET_ID = \"referenceSetId\"; // XXX different than the RF2 header field name\n\t\tpublic static final String REFERENCED_COMPONENT_ID = SnomedRf2Headers.FIELD_REFERENCED_COMPONENT_ID;\n\t\tpublic static final String ACCEPTABILITY_ID = SnomedRf2Headers.FIELD_ACCEPTABILITY_ID;\n\t\tpublic static final String VALUE_ID = SnomedRf2Headers.FIELD_VALUE_ID;\n\t\tpublic static final String TARGET_COMPONENT = SnomedRf2Headers.FIELD_TARGET_COMPONENT;\n\t\tpublic static final String MAP_TARGET = SnomedRf2Headers.FIELD_MAP_TARGET;\n\t\tpublic static final String MAP_TARGET_DESCRIPTION = SnomedRf2Headers.FIELD_MAP_TARGET_DESCRIPTION;\n\t\tpublic static final String MAP_GROUP = SnomedRf2Headers.FIELD_MAP_GROUP;\n\t\tpublic static final String MAP_PRIORITY = SnomedRf2Headers.FIELD_MAP_PRIORITY;\n\t\tpublic static final String MAP_RULE = SnomedRf2Headers.FIELD_MAP_RULE;\n\t\tpublic static final String MAP_ADVICE = SnomedRf2Headers.FIELD_MAP_ADVICE;\n\t\tpublic static final String MAP_CATEGORY_ID = SnomedRf2Headers.FIELD_MAP_CATEGORY_ID;\n\t\tpublic static final String CORRELATION_ID = SnomedRf2Headers.FIELD_CORRELATION_ID;\n\t\tpublic static final String DESCRIPTION_FORMAT = SnomedRf2Headers.FIELD_DESCRIPTION_FORMAT;\n\t\tpublic static final String DESCRIPTION_LENGTH = SnomedRf2Headers.FIELD_DESCRIPTION_LENGTH;\n\t\tpublic static final String OPERATOR_ID = SnomedRf2Headers.FIELD_OPERATOR_ID;\n\t\tpublic static final String UNIT_ID = SnomedRf2Headers.FIELD_UNIT_ID;\n\t\tpublic static final String QUERY = SnomedRf2Headers.FIELD_QUERY;\n\t\tpublic static final String CHARACTERISTIC_TYPE_ID = SnomedRf2Headers.FIELD_CHARACTERISTIC_TYPE_ID;\n\t\tpublic static final String SOURCE_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME;\n\t\tpublic static final String TARGET_EFFECTIVE_TIME = SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME;\n\t\tprivate static final String DATA_VALUE = SnomedRf2Headers.FIELD_VALUE;\n\t\tpublic static final String ATTRIBUTE_NAME = SnomedRf2Headers.FIELD_ATTRIBUTE_NAME;\n\t\t// extra index fields to store datatype and map target type\n\t\tpublic static final String DATA_TYPE = \"dataType\";\n\t\tpublic static final String REFSET_TYPE = \"referenceSetType\";\n\t\tpublic static final String REFERENCED_COMPONENT_TYPE = \"referencedComponentType\";\n\t\t// CD value fields per type\n\t\tpublic static final String BOOLEAN_VALUE = \"booleanValue\";\n\t\tpublic static final String STRING_VALUE = \"stringValue\";\n\t\tpublic static final String INTEGER_VALUE = \"integerValue\";\n\t\tpublic static final String DECIMAL_VALUE = \"decimalValue\";\n\t}\n\t\n\t\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\t\n\tpublic static Builder builder(final SnomedRefSetMemberIndexEntry source) {\n\t\treturn builder()\n\t\t\t\t.storageKey(source.getStorageKey())\n\t\t\t\t.active(source.isActive())\n\t\t\t\t.effectiveTime(source.getEffectiveTime())\n\t\t\t\t.id(source.getId())\n\t\t\t\t.moduleId(source.getModuleId())\n\t\t\t\t.referencedComponentId(source.getReferencedComponentId())\n\t\t\t\t.referencedComponentType(source.getReferencedComponentType())\n\t\t\t\t.referenceSetId(source.getReferenceSetId())\n\t\t\t\t.referenceSetType(source.getReferenceSetType())\n\t\t\t\t.released(source.isReleased())\n\t\t\t\t.fields(source.getAdditionalFields());\n\t}\n\t\n\tpublic static final Builder builder(final SnomedReferenceSetMember input) {\n\t\tfinal Builder builder = builder()\n\t\t\t\t.storageKey(input.getStorageKey())\n\t\t\t\t.active(input.isActive())\n\t\t\t\t.effectiveTime(EffectiveTimes.getEffectiveTime(input.getEffectiveTime()))\n\t\t\t\t.id(input.getId())\n\t\t\t\t.moduleId(input.getModuleId())\n\t\t\t\t.referencedComponentId(input.getReferencedComponent().getId())\n\t\t\t\t.referenceSetId(input.getReferenceSetId())\n\t\t\t\t.referenceSetType(input.type())\n\t\t\t\t.released(input.isReleased());\n\t\t\n\t\tif (input.getReferencedComponent() instanceof SnomedConcept) {\n\t\t\tbuilder.referencedComponentType(CONCEPT_NUMBER);\n\t\t} else if (input.getReferencedComponent() instanceof SnomedDescription) {\n\t\t\tbuilder.referencedComponentType(DESCRIPTION_NUMBER);\n\t\t} else if (input.getReferencedComponent() instanceof SnomedRelationship) {\n\t\t\tbuilder.referencedComponentType(RELATIONSHIP_NUMBER);\n\t\t} else {\n\t\t\tbuilder.referencedComponentType(CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT);\n\t\t}\n\t\t\n\t\t\n\t\tfor (Entry entry : input.getProperties().entrySet()) {\n\t\t\tfinal Object value = entry.getValue();\n\t\t\tfinal String fieldName = entry.getKey();\n\t\t\t// certain RF2 fields can be expanded into full blown representation class, get the ID in this case\n\t\t\tif (value instanceof SnomedCoreComponent) {\n\t\t\t\tbuilder.field(fieldName, ((SnomedCoreComponent) value).getId());\n\t\t\t} else {\n\t\t\t\tbuilder.field(fieldName, convertValue(entry.getKey(), value));\n\t\t\t}\n\t\t}\n\t\t\n\t\treturn builder;\n\t}\n\t\n\tpublic static Builder builder(SnomedRefSetMember refSetMember) {\n\t\tfinal Builder builder = SnomedRefSetMemberIndexEntry.builder()\n\t\t\t\t.storageKey(CDOIDUtils.asLong(refSetMember.cdoID()))\n\t\t\t\t.id(refSetMember.getUuid()) \n\t\t\t\t.moduleId(refSetMember.getModuleId())\n\t\t\t\t.active(refSetMember.isActive())\n\t\t\t\t.released(refSetMember.isReleased())\n\t\t\t\t.effectiveTime(refSetMember.isSetEffectiveTime() ? refSetMember.getEffectiveTime().getTime() : EffectiveTimes.UNSET_EFFECTIVE_TIME)\n\t\t\t\t.referenceSetId(refSetMember.getRefSetIdentifierId())\n\t\t\t\t.referenceSetType(refSetMember.getRefSet().getType())\n\t\t\t\t.referencedComponentType(refSetMember.getReferencedComponentType())\n\t\t\t\t.referencedComponentId(refSetMember.getReferencedComponentId());\n\n\t\treturn new SnomedRefSetSwitch() {\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedAssociationRefSetMember(final SnomedAssociationRefSetMember associationMember) {\n\t\t\t\treturn builder.targetComponent(associationMember.getTargetComponentId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedAttributeValueRefSetMember(final SnomedAttributeValueRefSetMember attributeValueMember) {\n\t\t\t\treturn builder.field(Fields.VALUE_ID, attributeValueMember.getValueId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedConcreteDataTypeRefSetMember(final SnomedConcreteDataTypeRefSetMember concreteDataTypeMember) {\n\t\t\t\treturn builder.field(Fields.ATTRIBUTE_NAME, concreteDataTypeMember.getLabel())\n\t\t\t\t\t\t.field(Fields.DATA_TYPE, concreteDataTypeMember.getDataType())\n\t\t\t\t\t\t.field(Fields.DATA_VALUE, concreteDataTypeMember.getSerializedValue())\n\t\t\t\t\t\t.field(Fields.CHARACTERISTIC_TYPE_ID, concreteDataTypeMember.getCharacteristicTypeId())\n\t\t\t\t\t\t.field(Fields.OPERATOR_ID, concreteDataTypeMember.getOperatorComponentId())\n\t\t\t\t\t\t.field(Fields.UNIT_ID, Strings.nullToEmpty(concreteDataTypeMember.getUomComponentId()));\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedDescriptionTypeRefSetMember(final SnomedDescriptionTypeRefSetMember descriptionTypeMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.DESCRIPTION_FORMAT, descriptionTypeMember.getDescriptionFormat())\n\t\t\t\t\t\t.field(Fields.DESCRIPTION_LENGTH, descriptionTypeMember.getDescriptionLength());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedLanguageRefSetMember(final SnomedLanguageRefSetMember languageMember) {\n\t\t\t\treturn builder.field(Fields.ACCEPTABILITY_ID, languageMember.getAcceptabilityId());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedQueryRefSetMember(final SnomedQueryRefSetMember queryMember) {\n\t\t\t\treturn builder.field(Fields.QUERY, queryMember.getQuery());\n\t\t\t}\n\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedSimpleMapRefSetMember(final SnomedSimpleMapRefSetMember mapRefSetMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId())\n\t\t\t\t\t\t.field(Fields.MAP_TARGET_DESCRIPTION, mapRefSetMember.getMapTargetComponentDescription());\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedComplexMapRefSetMember(final SnomedComplexMapRefSetMember mapRefSetMember) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.MAP_TARGET, mapRefSetMember.getMapTargetComponentId())\n\t\t\t\t\t\t.field(Fields.CORRELATION_ID, mapRefSetMember.getCorrelationId())\n\t\t\t\t\t\t.field(Fields.MAP_GROUP, Integer.valueOf(mapRefSetMember.getMapGroup()))\n\t\t\t\t\t\t.field(Fields.MAP_ADVICE, Strings.nullToEmpty(mapRefSetMember.getMapAdvice()))\n\t\t\t\t\t\t.field(Fields.MAP_PRIORITY, Integer.valueOf(mapRefSetMember.getMapPriority()))\n\t\t\t\t\t\t.field(Fields.MAP_RULE, Strings.nullToEmpty(mapRefSetMember.getMapRule()))\n\t\t\t\t\t\t// extended refset\n\t\t\t\t\t\t.field(Fields.MAP_CATEGORY_ID, Strings.nullToEmpty(mapRefSetMember.getMapCategoryId()));\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedModuleDependencyRefSetMember(SnomedModuleDependencyRefSetMember member) {\n\t\t\t\treturn builder\n\t\t\t\t\t\t.field(Fields.SOURCE_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getSourceEffectiveTime()))\n\t\t\t\t\t\t.field(Fields.TARGET_EFFECTIVE_TIME, EffectiveTimes.getEffectiveTime(member.getTargetEffectiveTime()));\n\t\t\t}\n\t\t\t\n\t\t\t@Override\n\t\t\tpublic Builder caseSnomedRefSetMember(SnomedRefSetMember object) {\n\t\t\t\treturn builder;\n\t\t\t};\n\n\t\t}.doSwitch(refSetMember);\n\t}\n\t\n\tprivate static Object convertValue(String rf2Field, Object value) {\n\t\tswitch (rf2Field) {\n\t\tcase SnomedRf2Headers.FIELD_SOURCE_EFFECTIVE_TIME:\n\t\tcase SnomedRf2Headers.FIELD_TARGET_EFFECTIVE_TIME:\n\t\t\tif (value instanceof String && !StringUtils.isEmpty((String) value)) {\n\t\t\t\tDate parsedDate = EffectiveTimes.parse((String) value, DateFormats.SHORT);\n\t\t\t\treturn EffectiveTimes.getEffectiveTime(parsedDate);\n\t\t\t} else {\n\t\t\t\treturn EffectiveTimes.UNSET_EFFECTIVE_TIME;\n\t\t\t}\n\t\tdefault: \n\t\t\treturn value;\n\t\t}\n\t}\n\n\tpublic static Collection from(final Iterable refSetMembers) {\n\t\treturn FluentIterable.from(refSetMembers).transform(new Function() {\n\t\t\t@Override\n\t\t\tpublic SnomedRefSetMemberIndexEntry apply(final SnomedReferenceSetMember refSetMember) {\n\t\t\t\treturn builder(refSetMember).build();\n\t\t\t}\n\t\t}).toList();\n\t}\n\n\tpublic static final class Expressions extends SnomedDocument.Expressions {\n\t\t\n\t\tpublic static Expression referenceSetId(String referenceSetId) {\n\t\t\treturn exactMatch(Fields.REFERENCE_SET_ID, referenceSetId);\n\t\t}\n\n\t\tpublic static Expression referenceSetId(Collection referenceSetIds) {\n\t\t\treturn matchAny(Fields.REFERENCE_SET_ID, referenceSetIds);\n\t\t}\n\t\t\n\t\tpublic static Expression referencedComponentId(String referencedComponentId) {\n\t\t\treturn exactMatch(Fields.REFERENCED_COMPONENT_ID, referencedComponentId);\n\t\t}\n\t\t\n\t\tpublic static Expression mapTargets(Collection mapTargets) {\n\t\t\treturn matchAny(Fields.MAP_TARGET, mapTargets);\n\t\t}\n\n\t\tpublic static Expression referencedComponentIds(Collection referencedComponentIds) {\n\t\t\treturn matchAny(Fields.REFERENCED_COMPONENT_ID, referencedComponentIds);\n\t\t}\n\t\t\n\t\tpublic static Expression targetComponents(Collection targetComponentIds) {\n\t\t\treturn matchAny(Fields.TARGET_COMPONENT, targetComponentIds);\n\t\t}\n\t\t\n\t\tpublic static Expression acceptabilityIds(Collection acceptabilityIds) {\n\t\t\treturn matchAny(Fields.ACCEPTABILITY_ID, acceptabilityIds);\n\t\t}\n\t\t\n\t\tpublic static Expression characteristicTypeIds(Collection characteristicTypeIds) {\n\t\t\treturn matchAny(Fields.CHARACTERISTIC_TYPE_ID, characteristicTypeIds);\n\t\t}\n\t\t\n\t\tpublic static Expression correlationIds(Collection correlationIds) {\n\t\t\treturn matchAny(Fields.CORRELATION_ID, correlationIds);\n\t\t}\n\t\t\n\t\tpublic static Expression descriptionFormats(Collection descriptionFormats) {\n\t\t\treturn matchAny(Fields.DESCRIPTION_FORMAT, descriptionFormats);\n\t\t}\n\t\t\n\t\tpublic static Expression mapCategoryIds(Collection mapCategoryIds) {\n\t\t\treturn matchAny(Fields.MAP_CATEGORY_ID, mapCategoryIds);\n\t\t}\n\t\t\n\t\tpublic static Expression operatorIds(Collection operatorIds) {\n\t\t\treturn matchAny(Fields.OPERATOR_ID, operatorIds);\n\t\t}\n\t\t\n\t\tpublic static Expression unitIds(Collection unitIds) {\n\t\t\treturn matchAny(Fields.UNIT_ID, unitIds);\n\t\t}\n\t\t\n\t\tpublic static Expression valueIds(Collection valueIds) {\n\t\t\treturn matchAny(Fields.VALUE_ID, valueIds);\n\t\t}\n\t\t\n\t\tpublic static Expression values(DataType type, Collection values) {\n\t\t\tswitch (type) {\n\t\t\tcase STRING: \n\t\t\t\treturn matchAny(Fields.STRING_VALUE, FluentIterable.from(values).filter(String.class).toSet());\n\t\t\tcase INTEGER:\n\t\t\t\treturn matchAnyInt(Fields.INTEGER_VALUE, FluentIterable.from(values).filter(Integer.class).toSet());\n\t\t\tcase DECIMAL:\n\t\t\t\treturn matchAnyDecimal(Fields.DECIMAL_VALUE, FluentIterable.from(values).filter(BigDecimal.class).toSet());\n\t\t\tdefault:\n\t\t\t\tthrow new UnsupportedOperationException(\"Unsupported data type when filtering by values, \" + type);\n\t\t\t}\n\t\t}\n\t\t\n\t\tpublic static Expression valueRange(DataType type, final Object lower, final Object upper, boolean includeLower, boolean includeUpper) {\n\t\t\tswitch (type) {\n\t\t\tcase STRING: \n\t\t\t\treturn matchRange(Fields.STRING_VALUE, (String) lower, (String) upper, includeLower, includeUpper);\n\t\t\tcase INTEGER:\n\t\t\t\treturn matchRange(Fields.INTEGER_VALUE, (Integer) lower, (Integer) upper, includeLower, includeUpper);\n\t\t\tcase DECIMAL:\n\t\t\t\treturn matchRange(Fields.DECIMAL_VALUE, (BigDecimal) lower, (BigDecimal) upper, includeLower, includeUpper);\n\t\t\tdefault:\n\t\t\t\tthrow new UnsupportedOperationException(\"Unsupported data type when filtering by values, \" + type);\n\t\t\t}\n\t\t}\n\t\t\n\t\tpublic static Expression dataTypes(Collection dataTypes) {\n\t\t\treturn matchAny(Fields.DATA_TYPE, FluentIterable.from(dataTypes).transform(new Function() {\n\t\t\t\t@Override\n\t\t\t\tpublic String apply(DataType input) {\n\t\t\t\t\treturn input.name();\n\t\t\t\t}\n\t\t\t}).toSet());\n\t\t}\n\t\t\n\t\tpublic static Expression attributeNames(Collection attributeNames) {\n\t\t\treturn matchAny(Fields.ATTRIBUTE_NAME, attributeNames);\n\t\t}\n\t\t\n\t\tpublic static Expression sourceEffectiveTime(long effectiveTime) {\n\t\t\treturn exactMatch(Fields.SOURCE_EFFECTIVE_TIME, effectiveTime);\n\t\t}\n\t\t\n\t\tpublic static Expression targetEffectiveTime(long effectiveTime) {\n\t\t\treturn exactMatch(Fields.TARGET_EFFECTIVE_TIME, effectiveTime);\n\t\t}\n\t\t\n\t\tpublic static Expression refSetTypes(Collection refSetTypes) {\n\t\t\treturn matchAny(Fields.REFSET_TYPE, FluentIterable.from(refSetTypes).transform(new Function() {\n\t\t\t\t@Override\n\t\t\t\tpublic String apply(SnomedRefSetType input) {\n\t\t\t\t\treturn input.name();\n\t\t\t\t}\n\t\t\t}).toSet());\n\t\t}\n\t\t\n\t}\n\n\t@JsonPOJOBuilder(withPrefix=\"\")\n\tpublic static final class Builder extends SnomedDocumentBuilder {\n\n\t\tprivate String referencedComponentId;\n\n\t\tprivate String referenceSetId;\n\t\tprivate SnomedRefSetType referenceSetType;\n\t\tprivate short referencedComponentType;\n\n\t\t// Member specific fields, they can be null or emptyish values\n\t\t// ASSOCIATION reference set members\n\t\tprivate String targetComponent;\n\t\t// ATTRIBUTE VALUE\n\t\tprivate String valueId;\n\t\t// CONCRETE DOMAIN reference set members\n\t\tprivate DataType dataType;\n\t\tprivate String attributeName;\n\t\tprivate Object value;\n\t\tprivate String operatorId;\n\t\tprivate String characteristicTypeId;\n\t\tprivate String unitId;\n\t\t// DESCRIPTION\n\t\tprivate Integer descriptionLength;\n\t\tprivate String descriptionFormat;\n\t\t// LANGUAGE\n\t\tprivate String acceptabilityId;\n\t\t// MODULE\n\t\tprivate Long sourceEffectiveTime;\n\t\tprivate Long targetEffectiveTime;\n\t\t// SIMPLE MAP reference set members\n\t\tprivate String mapTarget;\n\t\tprivate String mapTargetDescription;\n\t\t// COMPLEX MAP\n\t\tprivate String mapCategoryId;\n\t\tprivate String correlationId;\n\t\tprivate String mapAdvice;\n\t\tprivate String mapRule;\n\t\tprivate Integer mapGroup;\n\t\tprivate Integer mapPriority;\n\t\t// QUERY\n\t\tprivate String query;\n\n\t\t@JsonCreator\n\t\tprivate Builder() {\n\t\t\t// Disallow instantiation outside static method\n\t\t}\n\n\t\tpublic Builder fields(Map fields) {\n\t\t\tfor (Entry entry : fields.entrySet()) {\n\t\t\t\tfield(entry.getKey(), entry.getValue());\n\t\t\t}\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tpublic Builder field(String fieldName, Object value) {\n\t\t\tswitch (fieldName) {\n\t\t\tcase Fields.ACCEPTABILITY_ID: this.acceptabilityId = (String) value; break;\n\t\t\tcase Fields.ATTRIBUTE_NAME: this.attributeName = (String) value; break;\n\t\t\tcase Fields.CHARACTERISTIC_TYPE_ID: this.characteristicTypeId = (String) value; break;\n\t\t\tcase Fields.CORRELATION_ID: this.correlationId = (String) value; break;\n\t\t\tcase Fields.DATA_TYPE: this.dataType = (DataType) value; break;\n\t\t\tcase Fields.DATA_VALUE: this.value = value; break;\n\t\t\tcase Fields.DESCRIPTION_FORMAT: this.descriptionFormat = (String) value; break;\n\t\t\tcase Fields.DESCRIPTION_LENGTH: this.descriptionLength = (Integer) value; break;\n\t\t\tcase Fields.MAP_ADVICE: this.mapAdvice = (String) value; break;\n\t\t\tcase Fields.MAP_CATEGORY_ID: this.mapCategoryId = (String) value; break;\n\t\t\tcase Fields.MAP_GROUP: this.mapGroup = (Integer) value; break;\n\t\t\tcase Fields.MAP_PRIORITY: this.mapPriority = (Integer) value; break;\n\t\t\tcase Fields.MAP_RULE: this.mapRule = (String) value; break;\n\t\t\tcase Fields.MAP_TARGET: this.mapTarget = (String) value; break;\n\t\t\tcase Fields.MAP_TARGET_DESCRIPTION: this.mapTargetDescription = (String) value; break;\n\t\t\tcase Fields.OPERATOR_ID: this.operatorId = (String) value; break;\n\t\t\tcase Fields.QUERY: this.query = (String) value; break;\n\t\t\tcase Fields.SOURCE_EFFECTIVE_TIME: this.sourceEffectiveTime = (Long) value; break;\n\t\t\tcase Fields.TARGET_COMPONENT: this.targetComponent = (String) value; break;\n\t\t\tcase Fields.TARGET_EFFECTIVE_TIME: this.targetEffectiveTime = (Long) value; break;\n\t\t\tcase Fields.UNIT_ID: this.unitId = (String) value; break;\n\t\t\tcase Fields.VALUE_ID: this.valueId = (String) value; break;\n\t\t\tdefault: throw new UnsupportedOperationException(\"Unknown RF2 member field: \" + fieldName);\n\t\t\t}\n\t\t\treturn this;\n\t\t}\n\n\t\t@Override\n\t\tprotected Builder getSelf() {\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referencedComponentId(final String referencedComponentId) {\n\t\t\tthis.referencedComponentId = referencedComponentId;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referenceSetId(final String referenceSetId) {\n\t\t\tthis.referenceSetId = referenceSetId;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referenceSetType(final SnomedRefSetType referenceSetType) {\n\t\t\tthis.referenceSetType = referenceSetType;\n\t\t\treturn this;\n\t\t}\n\n\t\tpublic Builder referencedComponentType(final short referencedComponentType) {\n\t\t\tthis.referencedComponentType = referencedComponentType;\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tpublic Builder targetComponent(String targetComponent) {\n\t\t\tthis.targetComponent = targetComponent;\n\t\t\treturn this;\n\t\t}\n\t\t\n\t\tBuilder acceptabilityId(String acceptabilityId) {\n\t\t\tthis.acceptabilityId = acceptabilityId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder attributeName(String attributeName) {\n\t\t\tthis.attributeName = attributeName;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder characteristicTypeId(final String characteristicTypeId) {\n\t\t\tthis.characteristicTypeId = characteristicTypeId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder correlationId(final String correlationId) {\n\t\t\tthis.correlationId = correlationId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder dataType(final DataType dataType) {\n\t\t\tthis.dataType = dataType;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder descriptionFormat(final String descriptionFormat) {\n\t\t\tthis.descriptionFormat = descriptionFormat;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder descriptionLength(final Integer descriptionLength) {\n\t\t\tthis.descriptionLength = descriptionLength;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapAdvice(final String mapAdvice) {\n\t\t\tthis.mapAdvice = mapAdvice;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapCategoryId(final String mapCategoryId) {\n\t\t\tthis.mapCategoryId = mapCategoryId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapGroup(final Integer mapGroup) {\n\t\t\tthis.mapGroup = mapGroup;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapPriority(final Integer mapPriority) {\n\t\t\tthis.mapPriority = mapPriority;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapRule(final String mapRule) {\n\t\t\tthis.mapRule = mapRule;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapTarget(final String mapTarget) {\n\t\t\tthis.mapTarget = mapTarget;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder mapTargetDescription(final String mapTargetDescription) {\n\t\t\tthis.mapTargetDescription = mapTargetDescription;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder operatorId(final String operatorId) {\n\t\t\tthis.operatorId = operatorId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder query(final String query) {\n\t\t\tthis.query = query;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder sourceEffectiveTime(final Long sourceEffectiveTime) {\n\t\t\tthis.sourceEffectiveTime = sourceEffectiveTime;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder targetEffectiveTime(final Long targetEffectiveTime) {\n\t\t\tthis.targetEffectiveTime = targetEffectiveTime;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder unitId(final String unitId) {\n\t\t\tthis.unitId = unitId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\t/**\n\t\t * @deprecated - this is no longer a valid refset member index field, but required to make pre-5.4 dataset work with 5.4 without migration\n\t\t */\n\t\tBuilder value(final Object value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder decimalValue(final BigDecimal value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder booleanValue(final Boolean value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder integerValue(final Integer value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder stringValue(final String value) {\n\t\t\tthis.value = value;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tBuilder valueId(String valueId) {\n\t\t\tthis.valueId = valueId;\n\t\t\treturn getSelf();\n\t\t}\n\t\t\n\t\tpublic SnomedRefSetMemberIndexEntry build() {\n\t\t\tfinal SnomedRefSetMemberIndexEntry doc = new SnomedRefSetMemberIndexEntry(id,\n\t\t\t\t\tlabel,\n\t\t\t\t\tmoduleId, \n\t\t\t\t\treleased, \n\t\t\t\t\tactive, \n\t\t\t\t\teffectiveTime, \n\t\t\t\t\treferencedComponentId, \n\t\t\t\t\treferenceSetId,\n\t\t\t\t\treferenceSetType,\n\t\t\t\t\treferencedComponentType);\n\t\t\t// association members\n\t\t\tdoc.targetComponent = targetComponent;\n\t\t\t// attribute value\n\t\t\tdoc.valueId = valueId;\n\t\t\t// concrete domain members\n\t\t\tdoc.dataType = dataType;\n\t\t\tdoc.attributeName = attributeName;\n\t\t\tif (dataType != null) {\n\t\t\t\tswitch (dataType) {\n\t\t\t\tcase BOOLEAN:\n\t\t\t\t\tif (value instanceof Boolean) {\n\t\t\t\t\t\tdoc.booleanValue = (Boolean) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.booleanValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase DECIMAL:\n\t\t\t\t\tif (value instanceof BigDecimal) {\n\t\t\t\t\t\tdoc.decimalValue = (BigDecimal) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.decimalValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase INTEGER:\n\t\t\t\t\tif (value instanceof Integer) {\n\t\t\t\t\t\tdoc.integerValue = (Integer) value;\n\t\t\t\t\t} else if (value instanceof String) {\n\t\t\t\t\t\tdoc.integerValue = SnomedRefSetUtil.deserializeValue(dataType, (String) value);\n\t\t\t\t\t}\n\t\t\t\t\tbreak;\n\t\t\t\tcase STRING:\n\t\t\t\t\tdoc.stringValue = (String) value;\n\t\t\t\t\tbreak;\n\t\t\t\tdefault: throw new UnsupportedOperationException(\"Unsupported concrete domain data type: \" + dataType);\n\t\t\t\t}\n\t\t\t}\n\t\t\tdoc.characteristicTypeId = characteristicTypeId;\n\t\t\tdoc.operatorId = operatorId;\n\t\t\tdoc.unitId = unitId;\n\t\t\t// description\n\t\t\tdoc.descriptionFormat = descriptionFormat;\n\t\t\tdoc.descriptionLength = descriptionLength;\n\t\t\t// language reference set\n\t\t\tdoc.acceptabilityId = acceptabilityId;\n\t\t\t// module\n\t\t\tdoc.sourceEffectiveTime = sourceEffectiveTime;\n\t\t\tdoc.targetEffectiveTime = targetEffectiveTime;\n\t\t\t// simple map\n\t\t\tdoc.mapTarget = mapTarget;\n\t\t\tdoc.mapTargetDescription = mapTargetDescription;\n\t\t\t// complex map\n\t\t\tdoc.mapCategoryId = mapCategoryId;\n\t\t\tdoc.mapAdvice = mapAdvice;\n\t\t\tdoc.correlationId = correlationId;\n\t\t\tdoc.mapGroup = mapGroup;\n\t\t\tdoc.mapPriority = mapPriority;\n\t\t\tdoc.mapRule = mapRule;\n\t\t\t// query\n\t\t\tdoc.query = query;\n\t\t\t\n\t\t\tdoc.setScore(score);\n\t\t\t// metadata\n\t\t\tdoc.setBranchPath(branchPath);\n\t\t\tdoc.setCommitTimestamp(commitTimestamp);\n\t\t\tdoc.setStorageKey(storageKey);\n\t\t\tdoc.setReplacedIns(replacedIns);\n\t\t\tdoc.setSegmentId(segmentId);\n\t\t\treturn doc;\n\t\t}\n\t}\n\n\tprivate final String referencedComponentId;\n\tprivate final String referenceSetId;\n\tprivate final SnomedRefSetType referenceSetType;\n\tprivate final short referencedComponentType;\n\t\n\t// Member specific fields, they can be null or emptyish values\n\t// ASSOCIATION reference set members\n\tprivate String targetComponent;\n\t// ATTRIBUTE VALUE\n\tprivate String valueId;\n\t// CONCRETE DOMAIN reference set members\n\tprivate DataType dataType;\n\tprivate String attributeName;\n\t\n\t// only one of these value fields should be set when this represents a concrete domain member\n\tprivate String stringValue;\n\tprivate Boolean booleanValue;\n\tprivate Integer integerValue;\n\tprivate BigDecimal decimalValue;\n\t\n\tprivate String operatorId;\n\tprivate String characteristicTypeId;\n\tprivate String unitId;\n\t// DESCRIPTION\n\tprivate Integer descriptionLength;\n\tprivate String descriptionFormat;\n\t// LANGUAGE\n\tprivate String acceptabilityId;\n\t// MODULE\n\tprivate Long sourceEffectiveTime;\n\tprivate Long targetEffectiveTime;\n\t// SIMPLE MAP reference set members\n\tprivate String mapTarget;\n\tprivate String mapTargetDescription;\n\t// COMPLEX MAP\n\tprivate String mapCategoryId;\n\tprivate String correlationId;\n\tprivate String mapAdvice;\n\tprivate String mapRule;\n\tprivate Integer mapGroup;\n\tprivate Integer mapPriority;\n\t// QUERY\n\tprivate String query;\n\t\n\n\tprivate SnomedRefSetMemberIndexEntry(final String id,\n\t\t\tfinal String label,\n\t\t\tfinal String moduleId, \n\t\t\tfinal boolean released,\n\t\t\tfinal boolean active, \n\t\t\tfinal long effectiveTimeLong, \n\t\t\tfinal String referencedComponentId, \n\t\t\tfinal String referenceSetId,\n\t\t\tfinal SnomedRefSetType referenceSetType,\n\t\t\tfinal short referencedComponentType) {\n\n\t\tsuper(id, \n\t\t\t\tlabel,\n\t\t\t\treferencedComponentId, // XXX: iconId is the referenced component identifier\n\t\t\t\tmoduleId, \n\t\t\t\treleased, \n\t\t\t\tactive, \n\t\t\t\teffectiveTimeLong);\n\n\t\tcheckArgument(referencedComponentType >= CoreTerminologyBroker.UNSPECIFIED_NUMBER_SHORT, \"Referenced component type '%s' is invalid.\", referencedComponentType);\n\t\tthis.referencedComponentId = referencedComponentId;\n\t\tthis.referenceSetId = referenceSetId;\n\t\tthis.referenceSetType = referenceSetType;\n\t\tthis.referencedComponentType = referencedComponentType;\n\t}\n\n\t@Override\n\tpublic String getContainerId() {\n\t\t// XXX hack to make IHTSDO merge review API tests pass and work as before in 4.5\n\t\tif (getReferenceSetType() == SnomedRefSetType.MODULE_DEPENDENCY) {\n\t\t\treturn null;\n\t\t} else {\n\t\t\treturn getReferencedComponentId();\n\t\t}\n\t}\n\n\t/**\n\t * @return the referenced component identifier\n\t */\n\tpublic String getReferencedComponentId() {\n\t\treturn referencedComponentId;\n\t}\n\n\t/**\n\t * @return the identifier of the member's reference set\n\t */\n\tpublic String getReferenceSetId() {\n\t\treturn referenceSetId;\n\t}\n\n\t/**\n\t * @return the type of the member's reference set\n\t */\n\tpublic SnomedRefSetType getReferenceSetType() {\n\t\treturn referenceSetType;\n\t}\n\n\t@JsonIgnore\n\t@SuppressWarnings(\"unchecked\")\n\tpublic T getValueAs() {\n\t\treturn (T) getValue();\n\t}\n\t\n\t@JsonIgnore\n\tpublic Object getValue() {\n\t\tif (dataType == null) {\n\t\t\treturn null;\n\t\t} else {\n\t\t\tswitch (dataType) {\n\t\t\tcase BOOLEAN: return booleanValue;\n\t\t\tcase DECIMAL: return decimalValue;\n\t\t\tcase INTEGER: return integerValue;\n\t\t\tcase STRING: return stringValue;\n\t\t\tdefault: throw new UnsupportedOperationException(\"Unsupported concrete domain data type: \" + dataType);\n\t\t\t}\n\t\t}\n\t}\n\t\n\t@JsonProperty\n\tBigDecimal getDecimalValue() {\n\t\treturn decimalValue;\n\t}\n\t\n\t@JsonProperty\n\tBoolean getBooleanValue() {\n\t\treturn booleanValue;\n\t}\n\t\n\t@JsonProperty\n\tInteger getIntegerValue() {\n\t\treturn integerValue;\n\t}\n\t\n\t@JsonProperty\n\tString getStringValue() {\n\t\treturn stringValue;\n\t}\n\n\tpublic DataType getDataType() {\n\t\treturn dataType;\n\t}\n\n\tpublic String getUnitId() {\n\t\treturn unitId;\n\t}\n\n\tpublic String getAttributeName() {\n\t\treturn attributeName;\n\t}\n\n\tpublic String getOperatorId() {\n\t\treturn operatorId;\n\t}\n\n\tpublic String getCharacteristicTypeId() {\n\t\treturn characteristicTypeId;\n\t}\t\n\n\tpublic String getAcceptabilityId() {\n\t\treturn acceptabilityId;\n\t}\n\n\tpublic Integer getDescriptionLength() {\n\t\treturn descriptionLength;\n\t}\n\t\n\tpublic String getDescriptionFormat() {\n\t\treturn descriptionFormat;\n\t}\n\n\tpublic String getMapTarget() {\n\t\treturn mapTarget;\n\t}\n\n\tpublic Integer getMapGroup() {\n\t\treturn mapGroup;\n\t}\n\n\tpublic Integer getMapPriority() {\n\t\treturn mapPriority;\n\t}\n\n\tpublic String getMapRule() {\n\t\treturn mapRule;\n\t}\n\n\tpublic String getMapAdvice() {\n\t\treturn mapAdvice;\n\t}\n\t\n\tpublic String getMapCategoryId() {\n\t\treturn mapCategoryId;\n\t}\n\t\n\tpublic String getCorrelationId() {\n\t\treturn correlationId;\n\t}\n\n\tpublic String getMapTargetDescription() {\n\t\treturn mapTargetDescription;\n\t}\n\t\n\tpublic String getQuery() {\n\t\treturn query;\n\t}\n\t\n\tpublic String getTargetComponent() {\n\t\treturn targetComponent;\n\t}\n\t\n\tpublic String getValueId() {\n\t\treturn valueId;\n\t}\n\t\n\tpublic Long getSourceEffectiveTime() {\n\t\treturn sourceEffectiveTime;\n\t}\n\t\n\tpublic Long getTargetEffectiveTime() {\n\t\treturn targetEffectiveTime;\n\t}\n\t\n\tpublic short getReferencedComponentType() {\n\t\treturn referencedComponentType;\n\t}\n\t\n\t// model helper methods\n\t\n\t@JsonIgnore\n\tpublic Acceptability getAcceptability() {\n\t\treturn Acceptability.getByConceptId(getAcceptabilityId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic RelationshipRefinability getRefinability() {\n\t\treturn RelationshipRefinability.getByConceptId(getValueId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic InactivationIndicator getInactivationIndicator() {\n\t\treturn InactivationIndicator.getByConceptId(getValueId());\n\t}\n\t\n\t@JsonIgnore\n\tpublic String getSourceEffectiveTimeAsString() {\n\t\treturn EffectiveTimes.format(getSourceEffectiveTime(), DateFormats.SHORT);\n\t}\n\t\n\t@JsonIgnore\n\tpublic String getTargetEffectiveTimeAsString() {\n\t\treturn EffectiveTimes.format(getTargetEffectiveTime(), DateFormats.SHORT);\n\t}\n\t\n\t/**\n\t * @return the {@code String} terminology component identifier of the component referenced in this member\n\t */\n\t@JsonIgnore\n\tpublic String getReferencedComponentTypeAsString() {\n\t\treturn CoreTerminologyBroker.getInstance().getTerminologyComponentId(referencedComponentType);\n\t}\n\n\t/**\n\t * Helper which converts all non-null/empty additional fields to a values {@link Map} keyed by their field name; \n\t * @return\n\t */\n\t@JsonIgnore\n\tpublic Map getAdditionalFields() {\n\t\tfinal ImmutableMap.Builder builder = ImmutableMap.builder();\n\t\t// ASSOCIATION refset members\n\t\tputIfPresent(builder, Fields.TARGET_COMPONENT, getTargetComponent());\n\t\t// ATTRIBUTE_VALUE refset members \n\t\tputIfPresent(builder, Fields.VALUE_ID, getValueId());\n\t\t// CONCRETE DOMAIN reference set members\n\t\tputIfPresent(builder, Fields.DATA_TYPE, getDataType());\n\t\tputIfPresent(builder, Fields.ATTRIBUTE_NAME, getAttributeName());\n\t\tputIfPresent(builder, Fields.DATA_VALUE, getValue());\n\t\tputIfPresent(builder, Fields.OPERATOR_ID, getOperatorId());\n\t\tputIfPresent(builder, Fields.CHARACTERISTIC_TYPE_ID, getCharacteristicTypeId());\n\t\tputIfPresent(builder, Fields.UNIT_ID, getUnitId());\n\t\t// DESCRIPTION\n\t\tputIfPresent(builder, Fields.DESCRIPTION_LENGTH, getDescriptionLength());\n\t\tputIfPresent(builder, Fields.DESCRIPTION_FORMAT, getDescriptionFormat());\n\t\t// LANGUAGE\n\t\tputIfPresent(builder, Fields.ACCEPTABILITY_ID, getAcceptabilityId());\n\t\t// MODULE\n\t\tputIfPresent(builder, Fields.SOURCE_EFFECTIVE_TIME, getSourceEffectiveTime());\n\t\tputIfPresent(builder, Fields.TARGET_EFFECTIVE_TIME, getTargetEffectiveTime());\n\t\t// SIMPLE MAP reference set members\n\t\tputIfPresent(builder, Fields.MAP_TARGET, getMapTarget());\n\t\tputIfPresent(builder, Fields.MAP_TARGET_DESCRIPTION, getMapTargetDescription());\n\t\t// COMPLEX MAP\n\t\tputIfPresent(builder, Fields.MAP_CATEGORY_ID, getMapCategoryId());\n\t\tputIfPresent(builder, Fields.CORRELATION_ID, getCorrelationId());\n\t\tputIfPresent(builder, Fields.MAP_ADVICE, getMapAdvice());\n\t\tputIfPresent(builder, Fields.MAP_RULE, getMapRule());\n\t\tputIfPresent(builder, Fields.MAP_GROUP, getMapGroup());\n\t\tputIfPresent(builder, Fields.MAP_PRIORITY, getMapPriority());\n\t\t// QUERY\n\t\tputIfPresent(builder, Fields.QUERY, getQuery());\n\t\treturn builder.build();\n\t}\n\t\n\tprivate static void putIfPresent(ImmutableMap.Builder builder, String key, Object value) {\n\t\tif (key != null && value != null) {\n\t\t\tbuilder.put(key, value);\n\t\t}\n\t}\n\t\n\t@Override\n\tprotected ToStringHelper doToString() {\n\t\treturn super.doToString()\n\t\t\t\t.add(\"referencedComponentId\", referencedComponentId)\n\t\t\t\t.add(\"referenceSetId\", referenceSetId)\n\t\t\t\t.add(\"referenceSetType\", referenceSetType)\n\t\t\t\t.add(\"referencedComponentType\", referencedComponentType)\n\t\t\t\t.add(\"targetComponent\", targetComponent)\n\t\t\t\t.add(\"valueId\", valueId)\n\t\t\t\t.add(\"dataType\", dataType)\n\t\t\t\t.add(\"attributeName\", attributeName)\n\t\t\t\t.add(\"value\", getValue())\n\t\t\t\t.add(\"operatorId\", operatorId)\n\t\t\t\t.add(\"characteristicTypeId\", characteristicTypeId)\n\t\t\t\t.add(\"unitId\", unitId)\n\t\t\t\t.add(\"descriptionLength\", descriptionLength)\n\t\t\t\t.add(\"descriptionFormat\", descriptionFormat)\n\t\t\t\t.add(\"acceptabilityId\", acceptabilityId)\n\t\t\t\t.add(\"sourceEffectiveTime\", sourceEffectiveTime)\n\t\t\t\t.add(\"targetEffectiveTime\", targetEffectiveTime)\n\t\t\t\t.add(\"mapTarget\", mapTarget)\n\t\t\t\t.add(\"mapTargetDescription\", mapTargetDescription)\n\t\t\t\t.add(\"mapCategoryId\", mapCategoryId)\n\t\t\t\t.add(\"correlationId\", correlationId)\n\t\t\t\t.add(\"mapAdvice\", mapAdvice)\n\t\t\t\t.add(\"mapRule\", mapRule)\n\t\t\t\t.add(\"mapGroup\", mapGroup)\n\t\t\t\t.add(\"mapPriority\", mapPriority)\n\t\t\t\t.add(\"query\", query);\n\t}\n}\n"},"message":{"kind":"string","value":"[snomed] extends SnomedDocument.Fields in SnomedRefSetMemberIndexEntry"},"old_file":{"kind":"string","value":"snomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/entry/SnomedRefSetMemberIndexEntry.java"},"subject":{"kind":"string","value":"[snomed] extends SnomedDocument.Fields in SnomedRefSetMemberIndexEntry"},"git_diff":{"kind":"string","value":"nomed/com.b2international.snowowl.snomed.datastore/src/com/b2international/snowowl/snomed/datastore/index/entry/SnomedRefSetMemberIndexEntry.java\n \n \tprivate static final long serialVersionUID = 5198766293865046258L;\n \n\tpublic static class Fields {\n\tpublic static class Fields extends SnomedDocument.Fields {\n \t\t// known RF2 fields\n \t\tpublic static final String REFERENCE_SET_ID = \"referenceSetId\"; // XXX different than the RF2 header field name\n \t\tpublic static final String REFERENCED_COMPONENT_ID = SnomedRf2Headers.FIELD_REFERENCED_COMPONENT_ID;"}}},{"rowIdx":2047,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"d1ca56ee0d2a9fe37e42dd4f4b4a459f3225e4cc"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"bin-liu/TYComponent,bin-liu/TYComponent,bin-liu/TYComponent"},"new_contents":{"kind":"string","value":"/**\n * The MIT License (MIT)\n * Copyright (c) 2012-2014 唐虞科技(TangyuSoft) Corporation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage com.tangyu.component.service.remind;\n\nimport android.os.Parcel;\nimport android.os.Parcelable;\nimport android.text.TextUtils;\n\nimport com.tangyu.component.Util;\nimport com.tangyu.component.util.ICopyFrom;\n\nimport java.util.Calendar;\nimport java.util.Comparator;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.UUID;\n\n/**\n * The data of remind service.
\n * Demo : {@link com.tangyu.component.demo.service.remind.RemindData}\n * @author bin\n */\npublic class TYRemindData implements Parcelable, ICopyFrom {\n\n public static final String TABCOL_ID = \"_id\";\n public static final String TABCOL_TIME = \"time\";\n public static final String TABCOL_REMINDSTATE = \"REMIND_STATE\";\n public static final String TABCOL_ENABLE = \"enable\";\n public static final String TABCOL_UUID = \"uuid\";\n\n public static final int REMIND_STATE_REMINDED = -1;\n public static final int REMIND_STATE_UNREMIND = 0;\n public static final int REMIND_STATE_REMINDDING = 1;\n public static final int REMIND_STATE_INVALID = -2;\n\n public static Comparator COMPARATOR_FOR_REMIND_TIME = new Comparator() {\n @Override\n public int compare(TYRemindData lhs, TYRemindData rhs) {\n return lhs.getmRemindTime() == rhs.getmRemindTime() ? 0 : lhs.getmRemindTime() < rhs.getmRemindTime() ? -1 : 1;\n }\n };\n\n protected int mRemindId;\n protected long mRemindTime;\n protected int mRemindState;\n protected boolean mEnable = true;\n protected String mUUID = UUID.randomUUID().toString();\n\n protected TYRemindData() {\n\n }\n\n protected TYRemindData(TYRemindData r) {\n mRemindId = r.mRemindId;\n mRemindTime = r.mRemindTime;\n mRemindState = r.mRemindState;\n mEnable = r.mEnable;\n mUUID = r.mUUID;\n }\n\n protected TYRemindData(Parcel in) {\n mRemindId = in.readInt();\n mRemindTime = in.readLong();\n mRemindState = in.readInt();\n mEnable = in.readInt() == 0 ? false : true;\n mUUID = in.readString();\n }\n\n public int getmRemindId() {\n return mRemindId;\n }\n\n public void setmRemindId(int mRemindId) {\n this.mRemindId = mRemindId;\n }\n\n public long getmRemindTime() {\n return mRemindTime;\n }\n\n public void setmRemindTime(long mRemindTime) {\n this.mRemindTime = mRemindTime;\n }\n\n public int getmRemindState() {\n return mRemindState;\n }\n\n public void setmRemindState(int mRemindState) {\n this.mRemindState = mRemindState;\n }\n\n public boolean ismEnable() {\n return mEnable;\n }\n\n public void setmEnable(boolean mEnable) {\n this.mEnable = mEnable;\n }\n\n public String getmUUID() {\n return mUUID;\n }\n\n public void setmUUID(String mUUID) {\n this.mUUID = mUUID;\n }\n\n @Override\n public boolean equals(Object o) {\n TYRemindData data = (TYRemindData) o;\n if (mRemindId == data.mRemindId && mRemindTime == data.mRemindTime &&\n mRemindState == data.mRemindState && mEnable == data.mEnable &&\n mUUID.equals(data.mUUID)) {\n return true;\n }\n return false;\n }\n\n @Override\n public int hashCode() {\n int res = 17;\n res = 37 * res + mRemindId;\n res = 37 * res + (int) (mRemindTime ^ mRemindTime >>> 32);\n res = 37 * res + mRemindState;\n res = 37 * res + (mEnable ? 0 : 1);\n res = 37 * res + mUUID.hashCode();\n return res;\n }\n\n public void clone(TYRemindData r) {\n r.mRemindId = mRemindId;\n r.mRemindState = mRemindState;\n r.mRemindTime = mRemindTime;\n r.mEnable = mEnable;\n r.mUUID = mUUID;\n }\n\n @Override\n public void copyFrom(Object obj) {\n if (obj instanceof TYRemindData) {\n TYRemindData source = (TYRemindData) obj;\n mRemindId = source.mRemindId;\n mRemindState = source.mRemindState;\n mRemindTime = source.mRemindTime;\n mEnable = source.mEnable;\n mUUID = source.mUUID;\n }\n }\n\n @Override\n public void writeToParcel(Parcel dest, int flags) {\n dest.writeInt(mRemindId);\n dest.writeLong(mRemindTime);\n dest.writeInt(mRemindState);\n dest.writeInt(mEnable ? 1 : 0);\n dest.writeString(mUUID);\n }\n\n @Override\n public int describeContents() {\n return 0;\n }\n\n public static final Creator CREATOR = new Creator() {\n\n @Override\n public TYRemindData createFromParcel(Parcel source) {\n return new TYRemindData(source);\n }\n\n @Override\n public TYRemindData[] newArray(int size) {\n return new TYRemindData[size];\n }\n\n };\n\n @Override\n public String toString() {\n return \"[ID = \" + mRemindId + \"][Time = \" + mRemindTime +\n \"][RemindState =\" + mRemindState + \"][enable = \" + mEnable +\n \"][UUID = \" + mUUID + \"]\";\n }\n\n public boolean isSameData(TYRemindData other) {\n if (other == null) return false;\n return RemindDataUtil.isSameDate(mRemindTime, other.mRemindTime);\n }\n\n public boolean isCompletedState() {\n return mRemindState == REMIND_STATE_REMINDED;\n }\n\n public static class RemindDataUtil {\n\n /**\n * is same date.\n *\n * @param c1\n * @param c2\n * @return\n */\n public static boolean isSameDate(Calendar c1, Calendar c2) {\n if (c1.get(Calendar.YEAR) == c2.get(Calendar.YEAR) &&\n c1.get(Calendar.MONTH) == c2.get(Calendar.MONTH) &&\n c1.get(Calendar.DATE) == c2.get(Calendar.DATE)) {\n return true;\n }\n return false;\n }\n\n public static boolean isSameDate(long timeMills1, long timeMills2) {\n Calendar c1 = Calendar.getInstance();\n Calendar c2 = Calendar.getInstance();\n c1.setTimeInMillis(timeMills1);\n c2.setTimeInMillis(timeMills2);\n return isSameDate(c1, c2);\n }\n\n public static Calendar toDate(Calendar source, Calendar destiny) {\n Calendar res = (Calendar) source.clone();\n res.set(Calendar.YEAR, destiny.get(Calendar.YEAR));\n res.set(Calendar.MONTH, destiny.get(Calendar.MONTH));\n res.set(Calendar.DATE, destiny.get(Calendar.DATE));\n return res;\n }\n\n public final int indexOf(List source, TYRemindData target) {\n if (!Util.isNull(source) && !TextUtils.isEmpty(target.getmUUID())) {\n for (int i = 0; i < source.size(); ++i) {\n TYRemindData e = source.get(i);\n if (TextUtils.isEmpty(e.getmUUID())) continue;\n if (target.getmUUID().equals(e.getmUUID())) return i;\n }\n }\n return -1;\n }\n\n public boolean isAllCompleted(List source) {\n if (null != source) {\n for (T remind : source) {\n if (!remind.isCompletedState()) {\n return false;\n }\n }\n }\n return true;\n }\n\n /**\n * find out next remind data.\n * @param reminds be sorted by remind time.\n * @param c2 base time line.\n * @return the data that later than param c2. if null means not found.\n * @see com.tangyu.component.service.remind.TYRemindData#COMPARATOR_FOR_REMIND_TIME\n */\n public T filterNextRemindsPassingTest(List reminds,\n Calendar c2,\n PassingTest passing) {\n if (Util.isNull(reminds)) return null;\n final long baseline = c2.getTimeInMillis();\n for (T rd : reminds) {\n if (!rd.ismEnable()) continue;\n boolean isPass = null != passing ? passing.passingTestStep(rd) : true;\n if (!isPass) continue;\n if (rd.getmRemindTime() >= baseline) {\n return rd;\n }\n }\n return null;\n }\n\n /**\n * change the remind time of reminds to destiny date.\n * @param reminds\n * @param destiny\n * @param passing\n * @return\n */\n public List filterToCalendarPassingTest(List reminds,\n Calendar destiny,\n PassingTest passing) {\n List result = new LinkedList();\n if (!Util.isNull(reminds)) {\n for (T rd : reminds) {\n boolean isPass = null != passing ? passing.passingTestStep(rd) : true;\n if (!isPass) continue;\n Calendar c1 = Calendar.getInstance();\n c1.setTimeInMillis(rd.getmRemindTime());\n rd.setmRemindTime(toDate(c1, destiny).getTimeInMillis());\n result.add(rd);\n }\n }\n return result;\n }\n\n public static interface PassingTest {\n /**\n * will to be invoked in loop every step.\n * @param obj\n * @return if false, the param will not add to result.\n */\n public boolean passingTestStep(T obj);\n }\n }\n\n}\n"},"new_file":{"kind":"string","value":"TangyuComponentProject/Source/src/com/tangyu/component/service/remind/TYRemindData.java"},"old_contents":{"kind":"string","value":"/**\n * The MIT License (MIT)\n * Copyright (c) 2012-2014 唐虞科技(TangyuSoft) Corporation\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\npackage com.tangyu.component.service.remind;\n\nimport android.os.Parcel;\nimport android.os.Parcelable;\nimport android.text.TextUtils;\n\nimport com.tangyu.component.Util;\nimport com.tangyu.component.util.ICopyFrom;\n\nimport java.util.Calendar;\nimport java.util.Comparator;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.UUID;\n\n/**\n * The data of remind service.
\n * Demo : {@link com.tangyu.component.demo.service.remind.RemindData}\n * @author bin\n */\npublic class TYRemindData implements Parcelable, ICopyFrom {\n\n public static final String TABCOL_ID = \"_id\";\n public static final String TABCOL_TIME = \"time\";\n public static final String TABCOL_REMINDSTATE = \"REMIND_STATE\";\n public static final String TABCOL_ENABLE = \"enable\";\n public static final String TABCOL_UUID = \"uuid\";\n\n public static final int REMIND_STATE_REMINDED = -1;\n public static final int REMIND_STATE_UNREMIND = 0;\n public static final int REMIND_STATE_REMINDDING = 1;\n public static final int REMIND_STATE_INVALID = -2;\n\n public static Comparator COMPARATOR_FOR_REMIND_TIME = new Comparator() {\n @Override\n public int compare(TYRemindData lhs, TYRemindData rhs) {\n return lhs.getmRemindTime() == rhs.getmRemindTime() ? 0 : lhs.getmRemindTime() < rhs.getmRemindTime() ? -1 : 1;\n }\n };\n\n protected int mRemindId;\n protected long mRemindTime;\n protected int mRemindState;\n protected boolean mEnable = true;\n protected String mUUID = UUID.randomUUID().toString();\n\n protected TYRemindData() {\n\n }\n\n protected TYRemindData(TYRemindData r) {\n mRemindId = r.mRemindId;\n mRemindTime = r.mRemindTime;\n mRemindState = r.mRemindState;\n mEnable = r.mEnable;\n mUUID = r.mUUID;\n }\n\n protected TYRemindData(Parcel in) {\n mRemindId = in.readInt();\n mRemindTime = in.readLong();\n mRemindState = in.readInt();\n mEnable = in.readInt() == 0 ? false : true;\n mUUID = in.readString();\n }\n\n public int getmRemindId() {\n return mRemindId;\n }\n\n public void setmRemindId(int mRemindId) {\n this.mRemindId = mRemindId;\n }\n\n public long getmRemindTime() {\n return mRemindTime;\n }\n\n public void setmRemindTime(long mRemindTime) {\n this.mRemindTime = mRemindTime;\n }\n\n public int getmRemindState() {\n return mRemindState;\n }\n\n public void setmRemindState(int mRemindState) {\n this.mRemindState = mRemindState;\n }\n\n public boolean ismEnable() {\n return mEnable;\n }\n\n public void setmEnable(boolean mEnable) {\n this.mEnable = mEnable;\n }\n\n public String getmUUID() {\n return mUUID;\n }\n\n public void setmUUID(String mUUID) {\n this.mUUID = mUUID;\n }\n\n @Override\n public boolean equals(Object o) {\n TYRemindData data = (TYRemindData) o;\n if (mRemindId == data.mRemindId && mRemindTime == data.mRemindTime &&\n mRemindState == data.mRemindState && mEnable == data.mEnable &&\n mUUID.equals(data.mUUID)) {\n return true;\n }\n return false;\n }\n\n @Override\n public int hashCode() {\n int res = 17;\n res = 37 * res + mRemindId;\n res = 37 * res + (int) (mRemindTime ^ mRemindTime >>> 32);\n res = 37 * res + mRemindState;\n res = 37 * res + (mEnable ? 0 : 1);\n res = 37 * res + mUUID.hashCode();\n return res;\n }\n\n public void clone(TYRemindData r) {\n r.mRemindId = mRemindId;\n r.mRemindState = mRemindState;\n r.mRemindTime = mRemindTime;\n r.mEnable = mEnable;\n r.mUUID = mUUID;\n }\n\n @Override\n public void copyFrom(Object obj) {\n if (obj instanceof TYRemindData) {\n TYRemindData source = (TYRemindData) obj;\n mRemindId = source.mRemindId;\n mRemindState = source.mRemindState;\n mRemindTime = source.mRemindTime;\n mEnable = source.mEnable;\n mUUID = source.mUUID;\n }\n }\n\n @Override\n public void writeToParcel(Parcel dest, int flags) {\n dest.writeInt(mRemindId);\n dest.writeLong(mRemindTime);\n dest.writeInt(mRemindState);\n dest.writeInt(mEnable ? 1 : 0);\n dest.writeString(mUUID);\n }\n\n @Override\n public int describeContents() {\n return 0;\n }\n\n public static final Creator CREATOR = new Creator() {\n\n @Override\n public TYRemindData createFromParcel(Parcel source) {\n return new TYRemindData(source);\n }\n\n @Override\n public TYRemindData[] newArray(int size) {\n return new TYRemindData[size];\n }\n\n };\n\n @Override\n public String toString() {\n return \"[ID = \" + mRemindId + \"][Time = \" + mRemindTime +\n \"][RemindState =\" + mRemindState + \"][enable = \" + mEnable +\n \"][UUID = \" + mUUID + \"]\";\n }\n\n public boolean isSameData(TYRemindData other) {\n if (other == null) return false;\n return RemindDataUtil.isSameDate(mRemindTime, other.mRemindTime);\n }\n\n public boolean isCompleted() {\n return mRemindState == REMIND_STATE_REMINDED;\n }\n\n public static class RemindDataUtil {\n\n /**\n * is same date.\n *\n * @param c1\n * @param c2\n * @return\n */\n public static boolean isSameDate(Calendar c1, Calendar c2) {\n if (c1.get(Calendar.YEAR) == c2.get(Calendar.YEAR) &&\n c1.get(Calendar.MONTH) == c2.get(Calendar.MONTH) &&\n c1.get(Calendar.DATE) == c2.get(Calendar.DATE)) {\n return true;\n }\n return false;\n }\n\n public static boolean isSameDate(long timeMills1, long timeMills2) {\n Calendar c1 = Calendar.getInstance();\n Calendar c2 = Calendar.getInstance();\n c1.setTimeInMillis(timeMills1);\n c2.setTimeInMillis(timeMills2);\n return isSameDate(c1, c2);\n }\n\n public static Calendar toDate(Calendar source, Calendar destiny) {\n Calendar res = (Calendar) source.clone();\n res.set(Calendar.YEAR, destiny.get(Calendar.YEAR));\n res.set(Calendar.MONTH, destiny.get(Calendar.MONTH));\n res.set(Calendar.DATE, destiny.get(Calendar.DATE));\n return res;\n }\n\n public final int indexOf(List source, TYRemindData target) {\n if (!Util.isNull(source) && !TextUtils.isEmpty(target.getmUUID())) {\n for (int i = 0; i < source.size(); ++i) {\n TYRemindData e = source.get(i);\n if (TextUtils.isEmpty(e.getmUUID())) continue;\n if (target.getmUUID().equals(e.getmUUID())) return i;\n }\n }\n return -1;\n }\n\n public boolean isAllCompleted(List source) {\n if (null != source) {\n for (T remind : source) {\n if (!remind.isCompleted()) {\n return false;\n }\n }\n }\n return true;\n }\n\n /**\n * find out next remind data.\n * @param reminds be sorted by remind time.\n * @param c2 base time line.\n * @return the data that later than param c2. if null means not found.\n * @see com.tangyu.component.service.remind.TYRemindData#COMPARATOR_FOR_REMIND_TIME\n */\n public T filterNextRemindsPassingTest(List reminds,\n Calendar c2,\n PassingTest passing) {\n if (Util.isNull(reminds)) return null;\n final long baseline = c2.getTimeInMillis();\n for (T rd : reminds) {\n if (!rd.ismEnable()) continue;\n boolean isPass = null != passing ? passing.passingTestStep(rd) : true;\n if (!isPass) continue;\n if (rd.getmRemindTime() >= baseline) {\n return rd;\n }\n }\n return null;\n }\n\n /**\n * change the remind time of reminds to destiny date.\n * @param reminds\n * @param destiny\n * @param passing\n * @return\n */\n public List filterToCalendarPassingTest(List reminds,\n Calendar destiny,\n PassingTest passing) {\n List result = new LinkedList();\n if (!Util.isNull(reminds)) {\n for (T rd : reminds) {\n boolean isPass = null != passing ? passing.passingTestStep(rd) : true;\n if (!isPass) continue;\n Calendar c1 = Calendar.getInstance();\n c1.setTimeInMillis(rd.getmRemindTime());\n rd.setmRemindTime(toDate(c1, destiny).getTimeInMillis());\n result.add(rd);\n }\n }\n return result;\n }\n\n public static interface PassingTest {\n /**\n * will to be invoked in loop every step.\n * @param obj\n * @return if false, the param will not add to result.\n */\n public boolean passingTestStep(T obj);\n }\n }\n\n}\n"},"message":{"kind":"string","value":"test for rebase in master\n"},"old_file":{"kind":"string","value":"TangyuComponentProject/Source/src/com/tangyu/component/service/remind/TYRemindData.java"},"subject":{"kind":"string","value":"test for rebase in master"},"git_diff":{"kind":"string","value":"angyuComponentProject/Source/src/com/tangyu/component/service/remind/TYRemindData.java\n return RemindDataUtil.isSameDate(mRemindTime, other.mRemindTime);\n }\n \n public boolean isCompleted() {\n public boolean isCompletedState() {\n return mRemindState == REMIND_STATE_REMINDED;\n }\n \n public boolean isAllCompleted(List source) {\n if (null != source) {\n for (T remind : source) {\n if (!remind.isCompleted()) {\n if (!remind.isCompletedState()) {\n return false;\n }\n }"}}},{"rowIdx":2048,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"bsd-3-clause"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"e53a790784b57c0dc6f66ed91feb0bdcbf02371d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"NCIP/catissue-advanced-query,NCIP/catissue-advanced-query"},"new_contents":{"kind":"string","value":"package edu.wustl.query.action;\r\n\r\nimport java.io.BufferedOutputStream;\r\nimport java.io.FileOutputStream;\r\nimport java.io.IOException;\r\nimport java.io.OutputStream;\r\nimport java.io.OutputStreamWriter;\r\nimport java.util.ArrayList;\r\nimport java.util.Collection;\r\nimport java.util.List;\r\nimport javax.servlet.http.HttpServletRequest;\r\nimport javax.servlet.http.HttpServletResponse;\r\nimport javax.servlet.http.HttpSession;\r\nimport org.apache.struts.action.Action;\r\nimport org.apache.struts.action.ActionForm;\r\nimport org.apache.struts.action.ActionForward;\r\nimport org.apache.struts.action.ActionMapping;\r\nimport edu.common.dynamicextensions.domaininterface.EntityGroupInterface;\r\nimport edu.common.dynamicextensions.domaininterface.EntityInterface;\r\nimport edu.wustl.cab2b.server.cache.EntityCache;\r\nimport edu.wustl.common.beans.NameValueBean;\r\nimport edu.wustl.common.exception.BizLogicException;\r\nimport edu.wustl.common.query.queryobject.impl.metadata.SelectedColumnsMetadata;\r\nimport edu.wustl.common.querysuite.queryobject.IQuery;\r\nimport edu.wustl.common.querysuite.queryobject.impl.ParameterizedQuery;\r\nimport edu.wustl.common.util.logger.Logger;\r\nimport edu.wustl.query.actionForm.CategorySearchForm;\r\nimport edu.wustl.query.bizlogic.DefineGridViewBizLogic;\r\nimport edu.wustl.query.bizlogic.ValidateQueryBizLogic;\r\nimport edu.wustl.query.util.global.Constants;\r\nimport edu.wustl.query.util.querysuite.AddContainmentsUtil;\r\nimport edu.wustl.query.util.querysuite.IQueryTreeGenerationUtil;\r\nimport edu.wustl.query.util.querysuite.IQueryUpdationUtil;\r\nimport edu.wustl.query.util.querysuite.QueryDetails;\r\nimport edu.wustl.query.util.querysuite.QueryModuleUtil;\r\n\r\n/**\r\n * This is a action class to load Define Search Results View screen.\r\n * @author deepti_shelar\r\n *\r\n */\r\n/**\r\n * @author baljeet_dhindhwal\r\n *\r\n */\r\npublic class DefineSearchResultsViewAction extends Action\r\n{\r\n\r\n\tprivate static org.apache.log4j.Logger logger =Logger.getLogger(IQueryUpdationUtil.class);\r\n\t/**\r\n\t * This method loads define search results view jsp.\r\n\t * @param mapping mapping\r\n\t * @param form form\r\n\t * @param request request\r\n\t * @param response response\r\n\t * @throws Exception Exception\r\n\t * @return ActionForward actionForward\r\n\t */\r\n\t@Override\r\n\tpublic ActionForward execute(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response) throws Exception\r\n\t{\r\n\t\trequest.setAttribute(Constants.CURRENT_PAGE, Constants.DEFINE_RESULTS_VIEW);\r\n\t\tCategorySearchForm searchForm = (CategorySearchForm) form;\r\n\t\tsearchForm = QueryModuleUtil.setDefaultSelections(searchForm);\r\n\t\tHttpSession session = request.getSession();\r\n\t\tString workflow=request.getParameter(Constants.IS_WORKFLOW);\r\n\t\t if(Constants.TRUE.equals(workflow))\r\n\t\t {\r\n\t\t\t request.setAttribute(Constants.IS_WORKFLOW,Constants.TRUE);\r\n\t\t\t String workflowName= (String)request.getSession().getAttribute(Constants.WORKFLOW_NAME);\r\n\t\t\t request.setAttribute(Constants.WORKFLOW_NAME,workflowName);\r\n\t\t }\r\n\t\t\r\n\t\tIQuery query = (IQuery) session.getAttribute(Constants.QUERY_OBJECT);\r\n\t\tString entityId = request.getParameter(Constants.MAIN_ENTITY_ID);\r\n\t\t/*if(entityId == null)\r\n\t\t{\r\n\t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query);\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query, entityId);\r\n\t\t}*/\r\n\t\t\r\n\t\tList mainEntityList = IQueryUpdationUtil.getAllMainObjects(query);\r\n\t\tsession.setAttribute(Constants.MAIN_ENTITY_LIST, mainEntityList);\r\n\t\t\r\n\t\tList prevSelectedColumnNVBList = setSelectedColumnList(session);\r\n ValidateQueryBizLogic.getValidationMessage(request,query);\r\n QueryDetails queryDetailsObject = new QueryDetails(session);\r\n IQueryTreeGenerationUtil.parseIQueryToCreateTree(queryDetailsObject);\r\n StringBuilder xmlString = getConatinmentTreeXML(queryDetailsObject);\r\n\t\tsetMainEntityList(request);\r\n \r\n\t\t//Set the selected column name value bean list to Form\r\n\t\tsetSelectedColumnsNVBeanList(searchForm, prevSelectedColumnNVBList);\r\n\t session.setAttribute(Constants.SELECTED_COLUMN_NAME_VALUE_BEAN_LIST,searchForm.getSelectedColumnNameValueBeanList());\r\n\r\n\t\t/*\r\n\t\t * changes made for defined Query\r\n\t\t */\r\n ((ParameterizedQuery)query).setName(searchForm.getQueryTitle());\r\n \r\n\r\n\t\tsession.setAttribute(Constants.QUERY_OBJECT,query);\r\n\t\tString fileName = getFileName();\r\n\t\twriteXMLToTempFile(xmlString.toString(), fileName);\r\n\t\tActionForward target = null;\r\n\t\tif(entityId != null)\r\n\t\t{\r\n\t\t\tresponse.setContentType(Constants.CONTENT_TYPE_TEXT);\r\n\t\t\tresponse.getWriter().write(fileName);\r\n\t\t\ttarget = null;\t\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\trequest.setAttribute(Constants.XML_FILE_NAME, fileName);\r\n\t\t\ttarget = mapping.findForward(Constants.SUCCESS);\r\n\t\t}\r\n\t\treturn target;\r\n\t}\r\n\r\n\t/**\r\n\t * This method sets the selected column name value bean list \r\n\t * @param searchForm\r\n\t * @param prevSelectedColumnNVBList\r\n\t */\r\n\tprivate void setSelectedColumnsNVBeanList(CategorySearchForm searchForm,\r\n\t\t\tList prevSelectedColumnNVBList)\r\n\t{\r\n\t\tList defaultSelectedColumnNameValueBeanList = searchForm.getSelectedColumnNameValueBeanList(); \r\n\t if(defaultSelectedColumnNameValueBeanList==null)\r\n\t {\r\n\t \t defaultSelectedColumnNameValueBeanList = new ArrayList(); \r\n\t }\r\n\t\t if (prevSelectedColumnNVBList != null)\r\n\t {\r\n\t\t\t searchForm.setSelectedColumnNameValueBeanList(prevSelectedColumnNVBList);\r\n\t }\r\n\t else \r\n\t {\r\n\t \t searchForm.setSelectedColumnNameValueBeanList(defaultSelectedColumnNameValueBeanList);\r\n\t }\r\n\t}\r\n\r\n\t/**\r\n\t * This method creates XML string to create containment tree\r\n\t * @param searchForm\r\n\t * @param prevSelectedColumnNVBList\r\n\t * @param queryDetailsObject\r\n\t * @return XML String\r\n\t */\r\n\tprivate StringBuilder getConatinmentTreeXML(QueryDetails queryDetailsObject)\r\n\t{\r\n\t\tDefineGridViewBizLogic defineGridViewBizLogic = new DefineGridViewBizLogic();\r\n\t\t//Create XML String instead of populating the tree data vector\r\n\t\tStringBuilder xmlString = new StringBuilder(\" \");\r\n\t\txmlString = defineGridViewBizLogic.createContainmentTree(queryDetailsObject,xmlString);\r\n\t\t\r\n\t\t//This string is appended for the root node of the tree\r\n\t\txmlString.append(\"\");\r\n\t\treturn xmlString;\r\n\t}\r\n\t\r\n\t/**\r\n\t * This method returns Unique file name \r\n\t * @return Unique file name\r\n\t */\r\n\tprivate String getFileName()\r\n\t{\r\n\t\treturn \"loadXML_\"+System.currentTimeMillis()+\".xml\";\r\n\t}\r\n\t\r\n\t/**\r\n\t * This method writes XML tree to create tree to a temporary file \r\n\t * @param xmlString\r\n\t * @param fileName\r\n\t * @throws BizLogicException\r\n\t */\r\n\tprivate void writeXMLToTempFile(String xmlString,String fileName) throws BizLogicException\r\n\t{\r\n\t\ttry \r\n\t\t{ \r\n\t\t\tString path=edu.wustl.query.util.global.Variables.applicationHome+System.getProperty(\"file.separator\");\r\n\t OutputStream fout= new FileOutputStream(path+fileName);\r\n\t OutputStream bout= new BufferedOutputStream(fout);\r\n\t OutputStreamWriter out = new OutputStreamWriter(bout, \"8859_1\");\r\n\t out.write(xmlString);\r\n\t out.flush(); \r\n\t out.close();\r\n\t\t}\r\n\t\tcatch (IOException e) \r\n\t\t{\r\n\t\t\tlogger.info(\"Couldn't create XML file\");\t\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This method returns prevSelectedColumnNVBList\r\n\t * @param session\r\n\t * @return prevSelectedColumnNVBList\r\n\t */\r\n\tprivate List setSelectedColumnList(HttpSession session)\r\n\t{\r\n\t\tList prevSelectedColumnNVBList;\r\n\t\tSelectedColumnsMetadata selectedColumnsMetadata = (SelectedColumnsMetadata) session.getAttribute(Constants.SELECTED_COLUMN_META_DATA);\r\n if (selectedColumnsMetadata==null)\r\n {\r\n \t prevSelectedColumnNVBList=null;\r\n }\r\n else\r\n {\r\n \t prevSelectedColumnNVBList = selectedColumnsMetadata.getSelectedColumnNameValueBeanList();\r\n }\r\n\t\treturn prevSelectedColumnNVBList;\r\n\t}\r\n\r\n\t/**\r\n\t * This method returns list of all main entities present in Model \r\n\t * @param request\r\n\t */\r\n\tprivate void setMainEntityList(HttpServletRequest request)\r\n\t{\r\n\t\tCollection entityGroups = EntityCache.getCache().getEntityGroups();\r\n\t\tArrayList entityList = new ArrayList();\r\n\t\tfor (EntityGroupInterface entityGroupInterface : entityGroups)\r\n\t\t{\r\n\t\t\tCollection entityInterface =entityGroupInterface.getEntityCollection();\r\n\t\t for(EntityInterface entity : entityInterface)\r\n\t\t {\r\n\t\t \t if (edu.wustl.query.util.global.Utility.isMainEntity(entity))\r\n\t\t \t {\r\n\t\t \t\t entityList.add(entity);\r\n\t\t \t }\r\n\t\t }\r\n\t\t}\r\n\t\trequest.setAttribute(edu.wustl.query.util.global.Constants.ENTITY_LIST,entityList);\r\n\t}\t\r\n}\r\n"},"new_file":{"kind":"string","value":"WEB-INF/src/edu/wustl/query/action/DefineSearchResultsViewAction.java"},"old_contents":{"kind":"string","value":"package edu.wustl.query.action;\r\n\r\nimport java.io.BufferedOutputStream;\r\nimport java.io.FileOutputStream;\r\nimport java.io.IOException;\r\nimport java.io.OutputStream;\r\nimport java.io.OutputStreamWriter;\r\nimport java.util.ArrayList;\r\nimport java.util.Collection;\r\nimport java.util.List;\r\nimport javax.servlet.http.HttpServletRequest;\r\nimport javax.servlet.http.HttpServletResponse;\r\nimport javax.servlet.http.HttpSession;\r\nimport org.apache.struts.action.Action;\r\nimport org.apache.struts.action.ActionForm;\r\nimport org.apache.struts.action.ActionForward;\r\nimport org.apache.struts.action.ActionMapping;\r\nimport edu.common.dynamicextensions.domaininterface.EntityGroupInterface;\r\nimport edu.common.dynamicextensions.domaininterface.EntityInterface;\r\nimport edu.wustl.cab2b.server.cache.EntityCache;\r\nimport edu.wustl.common.beans.NameValueBean;\r\nimport edu.wustl.common.exception.BizLogicException;\r\nimport edu.wustl.common.query.queryobject.impl.metadata.SelectedColumnsMetadata;\r\nimport edu.wustl.common.querysuite.queryobject.IQuery;\r\nimport edu.wustl.common.querysuite.queryobject.impl.ParameterizedQuery;\r\nimport edu.wustl.common.util.logger.Logger;\r\nimport edu.wustl.query.actionForm.CategorySearchForm;\r\nimport edu.wustl.query.bizlogic.DefineGridViewBizLogic;\r\nimport edu.wustl.query.bizlogic.ValidateQueryBizLogic;\r\nimport edu.wustl.query.util.global.Constants;\r\nimport edu.wustl.query.util.querysuite.AddContainmentsUtil;\r\nimport edu.wustl.query.util.querysuite.IQueryTreeGenerationUtil;\r\nimport edu.wustl.query.util.querysuite.IQueryUpdationUtil;\r\nimport edu.wustl.query.util.querysuite.QueryDetails;\r\nimport edu.wustl.query.util.querysuite.QueryModuleUtil;\r\n\r\n/**\r\n * This is a action class to load Define Search Results View screen.\r\n * @author deepti_shelar\r\n *\r\n */\r\n/**\r\n * @author baljeet_dhindhwal\r\n *\r\n */\r\npublic class DefineSearchResultsViewAction extends Action\r\n{\r\n\r\n\tprivate static org.apache.log4j.Logger logger =Logger.getLogger(IQueryUpdationUtil.class);\r\n\t/**\r\n\t * This method loads define search results view jsp.\r\n\t * @param mapping mapping\r\n\t * @param form form\r\n\t * @param request request\r\n\t * @param response response\r\n\t * @throws Exception Exception\r\n\t * @return ActionForward actionForward\r\n\t */\r\n\t@Override\r\n\tpublic ActionForward execute(ActionMapping mapping, ActionForm form,HttpServletRequest request, HttpServletResponse response) throws Exception\r\n\t{\r\n\t\trequest.setAttribute(Constants.CURRENT_PAGE, Constants.DEFINE_RESULTS_VIEW);\r\n\t\tCategorySearchForm searchForm = (CategorySearchForm) form;\r\n\t\tsearchForm = QueryModuleUtil.setDefaultSelections(searchForm);\r\n\t\tHttpSession session = request.getSession();\r\n\t\tString workflow=request.getParameter(Constants.IS_WORKFLOW);\r\n\t\t if(Constants.TRUE.equals(workflow))\r\n\t\t {\r\n\t\t\t request.setAttribute(Constants.IS_WORKFLOW,Constants.TRUE);\r\n\t\t\t String workflowName= (String)request.getSession().getAttribute(Constants.WORKFLOW_NAME);\r\n\t\t\t request.setAttribute(Constants.WORKFLOW_NAME,workflowName);\r\n\t\t }\r\n\t\t\r\n\t\tIQuery query = (IQuery) session.getAttribute(Constants.QUERY_OBJECT);\r\n\t\tString entityId = request.getParameter(Constants.MAIN_ENTITY_ID);\r\n\t\tif(entityId == null)\r\n\t\t{\r\n\t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query);\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query, entityId);\r\n\t\t}\r\n\t\tList prevSelectedColumnNVBList = setSelectedColumnList(session);\r\n ValidateQueryBizLogic.getValidationMessage(request,query);\r\n QueryDetails queryDetailsObject = new QueryDetails(session);\r\n IQueryTreeGenerationUtil.parseIQueryToCreateTree(queryDetailsObject);\r\n StringBuilder xmlString = getConatinmentTreeXML(queryDetailsObject);\r\n\t\tsetMainEntityList(request);\r\n \r\n\t\t//Set the selected column name value bean list to Form\r\n\t\tsetSelectedColumnsNVBeanList(searchForm, prevSelectedColumnNVBList);\r\n\t session.setAttribute(Constants.SELECTED_COLUMN_NAME_VALUE_BEAN_LIST,searchForm.getSelectedColumnNameValueBeanList());\r\n\r\n\t\t/*\r\n\t\t * changes made for defined Query\r\n\t\t */\r\n ((ParameterizedQuery)query).setName(searchForm.getQueryTitle());\r\n \r\n\r\n\t\tsession.setAttribute(Constants.QUERY_OBJECT,query);\r\n\t\tString fileName = getFileName();\r\n\t\twriteXMLToTempFile(xmlString.toString(), fileName);\r\n\t\tActionForward target = null;\r\n\t\tif(entityId != null)\r\n\t\t{\r\n\t\t\tresponse.setContentType(Constants.CONTENT_TYPE_TEXT);\r\n\t\t\tresponse.getWriter().write(fileName);\r\n\t\t\ttarget = null;\t\r\n\t\t}\r\n\t\telse\r\n\t\t{\r\n\t\t\trequest.setAttribute(Constants.XML_FILE_NAME, fileName);\r\n\t\t\ttarget = mapping.findForward(Constants.SUCCESS);\r\n\t\t}\r\n\t\treturn target;\r\n\t}\r\n\r\n\t/**\r\n\t * This method sets the selected column name value bean list \r\n\t * @param searchForm\r\n\t * @param prevSelectedColumnNVBList\r\n\t */\r\n\tprivate void setSelectedColumnsNVBeanList(CategorySearchForm searchForm,\r\n\t\t\tList prevSelectedColumnNVBList)\r\n\t{\r\n\t\tList defaultSelectedColumnNameValueBeanList = searchForm.getSelectedColumnNameValueBeanList(); \r\n\t if(defaultSelectedColumnNameValueBeanList==null)\r\n\t {\r\n\t \t defaultSelectedColumnNameValueBeanList = new ArrayList(); \r\n\t }\r\n\t\t if (prevSelectedColumnNVBList != null)\r\n\t {\r\n\t\t\t searchForm.setSelectedColumnNameValueBeanList(prevSelectedColumnNVBList);\r\n\t }\r\n\t else \r\n\t {\r\n\t \t searchForm.setSelectedColumnNameValueBeanList(defaultSelectedColumnNameValueBeanList);\r\n\t }\r\n\t}\r\n\r\n\t/**\r\n\t * This method creates XML string to create containment tree\r\n\t * @param searchForm\r\n\t * @param prevSelectedColumnNVBList\r\n\t * @param queryDetailsObject\r\n\t * @return XML String\r\n\t */\r\n\tprivate StringBuilder getConatinmentTreeXML(QueryDetails queryDetailsObject)\r\n\t{\r\n\t\tDefineGridViewBizLogic defineGridViewBizLogic = new DefineGridViewBizLogic();\r\n\t\t//Create XML String instead of populating the tree data vector\r\n\t\tStringBuilder xmlString = new StringBuilder(\" \");\r\n\t\txmlString = defineGridViewBizLogic.createContainmentTree(queryDetailsObject,xmlString);\r\n\t\t\r\n\t\t//This string is appended for the root node of the tree\r\n\t\txmlString.append(\"\");\r\n\t\treturn xmlString;\r\n\t}\r\n\t\r\n\t/**\r\n\t * This method returns Unique file name \r\n\t * @return Unique file name\r\n\t */\r\n\tprivate String getFileName()\r\n\t{\r\n\t\treturn \"loadXML_\"+System.currentTimeMillis()+\".xml\";\r\n\t}\r\n\t\r\n\t/**\r\n\t * This method writes XML tree to create tree to a temporary file \r\n\t * @param xmlString\r\n\t * @param fileName\r\n\t * @throws BizLogicException\r\n\t */\r\n\tprivate void writeXMLToTempFile(String xmlString,String fileName) throws BizLogicException\r\n\t{\r\n\t\ttry \r\n\t\t{ \r\n\t\t\tString path=edu.wustl.query.util.global.Variables.applicationHome+System.getProperty(\"file.separator\");\r\n\t OutputStream fout= new FileOutputStream(path+fileName);\r\n\t OutputStream bout= new BufferedOutputStream(fout);\r\n\t OutputStreamWriter out = new OutputStreamWriter(bout, \"8859_1\");\r\n\t out.write(xmlString);\r\n\t out.flush(); \r\n\t out.close();\r\n\t\t}\r\n\t\tcatch (IOException e) \r\n\t\t{\r\n\t\t\tlogger.info(\"Couldn't create XML file\");\t\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This method returns prevSelectedColumnNVBList\r\n\t * @param session\r\n\t * @return prevSelectedColumnNVBList\r\n\t */\r\n\tprivate List setSelectedColumnList(HttpSession session)\r\n\t{\r\n\t\tList prevSelectedColumnNVBList;\r\n\t\tSelectedColumnsMetadata selectedColumnsMetadata = (SelectedColumnsMetadata) session.getAttribute(Constants.SELECTED_COLUMN_META_DATA);\r\n if (selectedColumnsMetadata==null)\r\n {\r\n \t prevSelectedColumnNVBList=null;\r\n }\r\n else\r\n {\r\n \t prevSelectedColumnNVBList = selectedColumnsMetadata.getSelectedColumnNameValueBeanList();\r\n }\r\n\t\treturn prevSelectedColumnNVBList;\r\n\t}\r\n\r\n\t/**\r\n\t * This method returns list of all main entities present in Model \r\n\t * @param request\r\n\t */\r\n\tprivate void setMainEntityList(HttpServletRequest request)\r\n\t{\r\n\t\tCollection entityGroups = EntityCache.getCache().getEntityGroups();\r\n\t\tArrayList entityList = new ArrayList();\r\n\t\tfor (EntityGroupInterface entityGroupInterface : entityGroups)\r\n\t\t{\r\n\t\t\tCollection entityInterface =entityGroupInterface.getEntityCollection();\r\n\t\t for(EntityInterface entity : entityInterface)\r\n\t\t {\r\n\t\t \t if (edu.wustl.query.util.global.Utility.isMainEntity(entity))\r\n\t\t \t {\r\n\t\t \t\t entityList.add(entity);\r\n\t\t \t }\r\n\t\t }\r\n\t\t}\r\n\t\trequest.setAttribute(edu.wustl.query.util.global.Constants.ENTITY_LIST,entityList);\r\n\t}\t\r\n}\r\n"},"message":{"kind":"string","value":"Adding containments related code commented out\n\nSVN-Revision: 5361\n"},"old_file":{"kind":"string","value":"WEB-INF/src/edu/wustl/query/action/DefineSearchResultsViewAction.java"},"subject":{"kind":"string","value":"Adding containments related code commented out"},"git_diff":{"kind":"string","value":"EB-INF/src/edu/wustl/query/action/DefineSearchResultsViewAction.java\n \t\t\n \t\tIQuery query = (IQuery) session.getAttribute(Constants.QUERY_OBJECT);\n \t\tString entityId = request.getParameter(Constants.MAIN_ENTITY_ID);\n\t\tif(entityId == null)\n\t\t/*if(entityId == null)\n \t\t{\n \t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query);\n \t\t}\n \t\telse\n \t\t{\n \t\t\tAddContainmentsUtil.updateIQueryForContainments(session, query, entityId);\n\t\t}\n\t\t}*/\n\t\t\n\t\tList mainEntityList = IQueryUpdationUtil.getAllMainObjects(query);\n\t\tsession.setAttribute(Constants.MAIN_ENTITY_LIST, mainEntityList);\n\t\t\n \t\tList prevSelectedColumnNVBList = setSelectedColumnList(session);\n ValidateQueryBizLogic.getValidationMessage(request,query);\n QueryDetails queryDetailsObject = new QueryDetails(session);"}}},{"rowIdx":2049,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"5cd00a5f5bde281aeae95482c947c4b3a91a59ce"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"wso2/carbon-data,wso2/carbon-data,wso2/carbon-data"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.\n *\n * WSO2 Inc. licenses this file to you under the Apache License,\n * Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\n */\npackage org.wso2.carbon.dataservices.sql.driver.query.insert;\n\nimport org.wso2.carbon.dataservices.sql.driver.TDriverUtil;\nimport org.wso2.carbon.dataservices.sql.driver.parser.Constants;\nimport org.wso2.carbon.dataservices.sql.driver.parser.ParserUtil;\nimport org.wso2.carbon.dataservices.sql.driver.query.ColumnInfo;\nimport org.wso2.carbon.dataservices.sql.driver.query.Query;\n\nimport java.sql.SQLException;\nimport java.sql.Statement;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Queue;\n\npublic abstract class InsertQuery extends Query {\n\n private String targetTableName;\n\n private Map columns;\n\n private Map columnValues;\n\n public InsertQuery(Statement stmt) throws SQLException {\n super(stmt);\n this.targetTableName = this.extractTargetTableName(getProcessedTokens());\n this.columns = this.extractTargetColumns(getProcessedTokens());\n this.columnValues = this.extractTargetColumnValues(getProcessedTokens());\n if (this.getColumns().size() != this.getColumnValues().size()) {\n throw new SQLException(\"Parameter index is out of range. The column count does not \" +\n \"match the value count\");\n }\n }\n\n private String extractTargetTableName(Queue tokens) throws SQLException {\n if (tokens == null || tokens.isEmpty()) {\n throw new SQLException(\"Unable to populate attributes\");\n }\n /* Drops INSERT keyword */\n tokens.poll();\n /* Drops INTO keyword */\n tokens.poll();\n if (!Constants.TABLE.equalsIgnoreCase(tokens.peek())) {\n throw new SQLException(\"Table name is missing\");\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Table name is missing\");\n }\n return tokens.poll();\n }\n\n private Map extractTargetColumns(Queue tokens) throws SQLException {\n Map targetColumns = new HashMap();\n if (Constants.COLUMN.equals(tokens.peek())) {\n this.processColumnNames(tokens, targetColumns, 0);\n } else {\n targetColumns = this.getColumnMap();\n }\n return targetColumns;\n }\n\n private Map extractTargetColumnValues(Queue tokens) throws\n SQLException {\n Map targetColumnValues = new HashMap();\n if (!(Constants.VALUES.equalsIgnoreCase(tokens.peek()) ||\n Constants.VALUE.equalsIgnoreCase(tokens.peek()))) {\n throw new SQLException(\"VALUE/VALUES keyword is missing\");\n }\n tokens.poll();\n processColumnValues(tokens, targetColumnValues, 0, false, false, true);\n return targetColumnValues;\n }\n\n private void processColumnNames(Queue tokens, Map targetColumns,\n int colCount) throws SQLException {\n if (!Constants.COLUMN.equalsIgnoreCase(tokens.peek())) {\n return;\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Syntax Error : String literal expected\");\n }\n targetColumns.put(colCount, tokens.poll());\n if (Constants.COLUMN.equalsIgnoreCase(tokens.peek())) {\n processColumnNames(tokens, targetColumns, colCount + 1);\n }\n }\n\n private void processColumnValues(Queue tokens, Map targetColumnValues,\n int valCount, boolean isParameterized, boolean isEnd,\n boolean isInit) throws SQLException {\n if (!isEnd) {\n if (!Constants.PARAM_VALUE.equalsIgnoreCase(tokens.peek())) {\n throw new SQLException(\"Syntax Error : 'PARAM_VALUE' is expected\");\n }\n tokens.poll();\n if (\"?\".equalsIgnoreCase(tokens.peek())) {\n if (isInit) {\n isParameterized = true;\n isInit = false;\n }\n if (!isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n isParameterized = true;\n targetColumnValues.put(valCount, tokens.poll());\n } else if (Constants.SINGLE_QUOTATION.equalsIgnoreCase(tokens.peek())) {\n if (isInit) {\n isInit = false;\n isParameterized = false;\n }\n if (isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n tokens.poll();\n StringBuilder b = new StringBuilder();\n while (Constants.SINGLE_QUOTATION.equalsIgnoreCase(tokens.peek()) ||\n tokens.isEmpty()) {\n b.append(tokens.poll());\n }\n targetColumnValues.put(valCount, b.toString());\n tokens.poll();\n } else {\n if (isInit) {\n isInit = false;\n isParameterized = false;\n }\n if (isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n targetColumnValues.put(valCount, tokens.poll());\n }\n if (!Constants.PARAM_VALUE.equalsIgnoreCase(tokens.peek())) {\n isEnd = true;\n }\n processColumnValues(tokens, targetColumnValues, valCount + 1, isParameterized, isEnd,\n isInit);\n }\n }\n\n public String getTargetTableName() {\n return targetTableName;\n }\n\n public Map getColumns() {\n return columns;\n }\n\n public Map getColumnValues() {\n return columnValues;\n }\n\n private Map getColumnMap() throws SQLException {\n ColumnInfo[] headers =\n TDriverUtil.getHeaders(this.getConnection(), this.getTargetTableName());\n Map columns = new HashMap();\n for (ColumnInfo column : headers) {\n columns.put(column.getId(), column.getName());\n }\n return columns;\n }\n\n}\n"},"new_file":{"kind":"string","value":"components/data-services/org.wso2.carbon.dataservices.sql.driver/src/main/java/org/wso2/carbon/dataservices/sql/driver/query/insert/InsertQuery.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) 2005-2011, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.\n *\n * WSO2 Inc. licenses this file to you under the Apache License,\n * Version 2.0 (the \"License\"); you may not use this file except\n * in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.\n *\n */\npackage org.wso2.carbon.dataservices.sql.driver.query.insert;\n\nimport org.wso2.carbon.dataservices.sql.driver.TDriverUtil;\nimport org.wso2.carbon.dataservices.sql.driver.parser.Constants;\nimport org.wso2.carbon.dataservices.sql.driver.parser.ParserUtil;\nimport org.wso2.carbon.dataservices.sql.driver.query.ColumnInfo;\nimport org.wso2.carbon.dataservices.sql.driver.query.Query;\n\nimport java.sql.SQLException;\nimport java.sql.Statement;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Queue;\n\npublic abstract class InsertQuery extends Query {\n\n private String targetTableName;\n\n private Map columns;\n\n private Map columnValues;\n\n public InsertQuery(Statement stmt) throws SQLException {\n super(stmt);\n this.targetTableName = this.extractTargetTableName(getProcessedTokens());\n this.columns = this.extractTargetColumns(getProcessedTokens());\n this.columnValues = this.extractTargetColumnValues(getProcessedTokens());\n if (this.getColumns().size() != this.getColumnValues().size()) {\n throw new SQLException(\"Parameter index is out of range. The column count does not \" +\n \"match the value count\");\n }\n }\n\n private String extractTargetTableName(Queue tokens) throws SQLException {\n if (tokens == null || tokens.isEmpty()) {\n throw new SQLException(\"Unable to populate attributes\");\n }\n /* Drops INSERT keyword */\n tokens.poll();\n /* Drops INTO keyword */\n tokens.poll();\n if (!Constants.TABLE.equalsIgnoreCase(tokens.peek())) {\n throw new SQLException(\"Table name is missing\");\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Table name is missing\");\n }\n return tokens.poll();\n }\n\n private Map extractTargetColumns(Queue tokens) throws SQLException {\n Map targetColumns = new HashMap();\n if (Constants.COLUMN.equals(tokens.peek())) {\n this.processColumnNames(tokens, targetColumns, 0);\n } else {\n targetColumns = this.getColumnMap();\n }\n return targetColumns;\n }\n\n private Map extractTargetColumnValues(Queue tokens) throws\n SQLException {\n Map targetColumnValues = new HashMap();\n if (!(Constants.VALUES.equalsIgnoreCase(tokens.peek()) ||\n Constants.VALUE.equalsIgnoreCase(tokens.peek()))) {\n throw new SQLException(\"VALUE/VALUES keyword is missing\");\n }\n tokens.poll();\n processColumnValues(tokens, targetColumnValues, 0, false, false, true);\n return targetColumnValues;\n }\n\n private void processColumnNames(Queue tokens, Map targetColumns,\n int colCount) throws SQLException {\n if (!Constants.COLUMN.equalsIgnoreCase(tokens.peek())) {\n return;\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Syntax Error : String literal expected\");\n }\n targetColumns.put(colCount, tokens.poll());\n if (Constants.COLUMN.equalsIgnoreCase(tokens.peek())) {\n processColumnNames(tokens, targetColumns, colCount + 1);\n }\n }\n\n private void processColumnValues(Queue tokens, Map targetColumnValues,\n int valCount, boolean isParameterized, boolean isEnd,\n boolean isInit) throws SQLException {\n if (!isEnd) {\n if (!Constants.PARAM_VALUE.equalsIgnoreCase(tokens.peek())) {\n throw new SQLException(\"Syntax Error : 'PARAM_VALUE' is expected\");\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Syntax Error : String literal expected\");\n }\n if (\"?\".equalsIgnoreCase(tokens.peek())) {\n if (isInit) {\n isParameterized = true;\n isInit = false;\n }\n if (!isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n isParameterized = true;\n targetColumnValues.put(valCount, tokens.poll());\n } else if (Constants.SINGLE_QUOTATION.equalsIgnoreCase(tokens.peek())) {\n if (isInit) {\n isInit = false;\n isParameterized = false;\n }\n if (isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n tokens.poll();\n StringBuilder b = new StringBuilder();\n while (Constants.SINGLE_QUOTATION.equalsIgnoreCase(tokens.peek()) ||\n tokens.isEmpty()) {\n b.append(tokens.poll());\n }\n targetColumnValues.put(valCount, b.toString());\n tokens.poll();\n } else if (ParserUtil.isStringLiteral(tokens.peek())) {\n if (isInit) {\n isInit = false;\n isParameterized = false;\n }\n if (isParameterized) {\n throw new SQLException(\"Both parameters and inline parameter values are not \" +\n \"allowed to exist together\");\n }\n targetColumnValues.put(valCount, tokens.poll());\n }\n if (!Constants.PARAM_VALUE.equalsIgnoreCase(tokens.peek())) {\n isEnd = true;\n }\n processColumnValues(tokens, targetColumnValues, valCount + 1, isParameterized, isEnd,\n isInit);\n }\n }\n\n public String getTargetTableName() {\n return targetTableName;\n }\n\n public Map getColumns() {\n return columns;\n }\n\n public Map getColumnValues() {\n return columnValues;\n }\n\n private Map getColumnMap() throws SQLException {\n ColumnInfo[] headers =\n TDriverUtil.getHeaders(this.getConnection(), this.getTargetTableName());\n Map columns = new HashMap();\n for (ColumnInfo column : headers) {\n columns.put(column.getId(), column.getName());\n }\n return columns;\n }\n\n}\n"},"message":{"kind":"string","value":"Fix excel keyword issue\n"},"old_file":{"kind":"string","value":"components/data-services/org.wso2.carbon.dataservices.sql.driver/src/main/java/org/wso2/carbon/dataservices/sql/driver/query/insert/InsertQuery.java"},"subject":{"kind":"string","value":"Fix excel keyword issue"},"git_diff":{"kind":"string","value":"omponents/data-services/org.wso2.carbon.dataservices.sql.driver/src/main/java/org/wso2/carbon/dataservices/sql/driver/query/insert/InsertQuery.java\n throw new SQLException(\"Syntax Error : 'PARAM_VALUE' is expected\");\n }\n tokens.poll();\n if (!ParserUtil.isStringLiteral(tokens.peek())) {\n throw new SQLException(\"Syntax Error : String literal expected\");\n }\n if (\"?\".equalsIgnoreCase(tokens.peek())) {\n if (isInit) {\n isParameterized = true;\n }\n targetColumnValues.put(valCount, b.toString());\n tokens.poll();\n } else if (ParserUtil.isStringLiteral(tokens.peek())) {\n } else {\n if (isInit) {\n isInit = false;\n isParameterized = false;"}}},{"rowIdx":2050,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"epl-1.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"83f579aba8460a9ea2fcd58b3a4e64c4068025d1"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"phat-trien-phan-mem-phan-tan/dynamicpool,phat-trien-phan-mem-phan-tan/dynamicpool,phat-trien-phan-mem-phan-tan/dynamicpool,phat-trien-phan-mem-phan-tan/dynamicpool"},"new_contents":{"kind":"string","value":"package vn.edu.hust.student.dynamicpool.dal;\r\n\r\nimport vn.edu.hust.student.dynamicpool.bll.BusinessLogicDataCallback;\r\nimport vn.edu.hust.student.dynamicpool.bll.Fish;\r\nimport vn.edu.hust.student.dynamicpool.bll.FishManager;\r\nimport vn.edu.hust.student.dynamicpool.model.DeviceInfo;\r\n\r\npublic interface DataAccessLayer {\r\n\t\r\n\tString getClientName();\r\n\t\r\n\tvoid joinHost(int key, BusinessLogicDataCallback callback);\r\n\r\n\tvoid createHost(BusinessLogicDataCallback callback);\r\n\t\r\n\t// them thiet bi va dua ket qua tra ve cua server la mot mang danh sach cac Segment cua be\r\n\tvoid addDevice(DeviceInfo deviceInfo,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid exit(BusinessLogicDataCallback callback);\r\n\r\n\tvoid createFish(Fish fish,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid synchronization(BusinessLogicDataCallback callback);\r\n\t\r\n\t// gui thong tin ca nen server khi chuan bi ra khoi be\r\n\tvoid removeFish(Fish fish,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid synchronous(FishManager fishManager, String clientName);\r\n}\r\n"},"new_file":{"kind":"string","value":"DynamicPool/core/src/vn/edu/hust/student/dynamicpool/dal/DataAccessLayer.java"},"old_contents":{"kind":"string","value":"package vn.edu.hust.student.dynamicpool.dal;\r\n\r\nimport java.util.List;\r\n\r\nimport vn.edu.hust.student.dynamicpool.bll.BusinessLogicDataCallback;\r\nimport vn.edu.hust.student.dynamicpool.bll.Fish;\r\nimport vn.edu.hust.student.dynamicpool.bll.FishManager;\r\nimport vn.edu.hust.student.dynamicpool.model.DeviceInfo;\r\n\r\npublic interface DataAccessLayer {\r\n\t\r\n\tString getClientName();\r\n\t\r\n\tvoid joinHost(int key, BusinessLogicDataCallback callback);\r\n\r\n\tvoid createHost(BusinessLogicDataCallback callback);\r\n\t\r\n\t// them thiet bi va dua ket qua tra ve cua server la mot mang danh sach cac Segment cua be\r\n\tvoid addDevice(DeviceInfo deviceInfo,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid exit(BusinessLogicDataCallback callback);\r\n\r\n\tvoid createFish(Fish fish,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid synchronization(BusinessLogicDataCallback callback);\r\n\t\r\n\t// gui thong tin ca nen server khi chuan bi ra khoi be\r\n\tvoid removeFish(Fish fish,BusinessLogicDataCallback callback);\r\n\t\r\n\tvoid synchronous(FishManager fishManager, String clientName);\r\n}\r\n"},"message":{"kind":"string","value":"sua data access layer\n"},"old_file":{"kind":"string","value":"DynamicPool/core/src/vn/edu/hust/student/dynamicpool/dal/DataAccessLayer.java"},"subject":{"kind":"string","value":"sua data access layer"},"git_diff":{"kind":"string","value":"ynamicPool/core/src/vn/edu/hust/student/dynamicpool/dal/DataAccessLayer.java\n package vn.edu.hust.student.dynamicpool.dal;\n\nimport java.util.List;\n \n import vn.edu.hust.student.dynamicpool.bll.BusinessLogicDataCallback;\n import vn.edu.hust.student.dynamicpool.bll.Fish;"}}},{"rowIdx":2051,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"837d26e9fd2e8994974dbb0f1ec968beab5717af"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"EvilMcJerkface/Aeron,mikeb01/Aeron,EvilMcJerkface/Aeron,mikeb01/Aeron,mikeb01/Aeron,mikeb01/Aeron,real-logic/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2014-2020 Real Logic Limited.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage io.aeron.archive;\n\nimport io.aeron.Aeron;\nimport io.aeron.AeronCloseHelper;\nimport io.aeron.Publication;\nimport io.aeron.Subscription;\nimport io.aeron.archive.codecs.ControlResponseCode;\nimport io.aeron.archive.codecs.RecordingSignal;\nimport io.aeron.archive.codecs.SourceLocation;\nimport io.aeron.security.Authenticator;\nimport org.agrona.concurrent.CachedEpochClock;\nimport org.agrona.concurrent.CountedErrorHandler;\nimport org.agrona.concurrent.UnsafeBuffer;\n\nimport java.util.ArrayDeque;\nimport java.util.function.BooleanSupplier;\n\nimport static io.aeron.archive.client.ArchiveException.AUTHENTICATION_REJECTED;\nimport static io.aeron.archive.client.ArchiveException.GENERIC;\nimport static io.aeron.archive.codecs.ControlResponseCode.*;\n\n/**\n * Control sessions are interacted with from the {@link ArchiveConductor}. The interaction may result in pending\n * send actions being queued for execution by the {@link ArchiveConductor}.\n */\nclass ControlSession implements Session\n{\n private static final long RESEND_INTERVAL_MS = 200L;\n private static final String SESSION_REJECTED_MSG = \"authentication rejected\";\n\n enum State\n {\n INIT, CONNECTED, CHALLENGED, AUTHENTICATED, ACTIVE, INACTIVE, REJECTED, CLOSED\n }\n\n private final int majorVersion;\n private final long controlSessionId;\n private final long connectTimeoutMs;\n private long correlationId;\n private long resendDeadlineMs;\n private long activityDeadlineMs;\n private Session activeListing = null;\n private final ArchiveConductor conductor;\n private final CachedEpochClock cachedEpochClock;\n private final ControlResponseProxy controlResponseProxy;\n private final Authenticator authenticator;\n private final ControlSessionProxy controlSessionProxy;\n private final ArrayDeque queuedResponses = new ArrayDeque<>(8);\n private final ControlSessionDemuxer demuxer;\n private final Publication controlPublication;\n private final String invalidVersionMessage;\n private State state = State.INIT;\n\n ControlSession(\n final int majorVersion,\n final long controlSessionId,\n final long correlationId,\n final long connectTimeoutMs,\n final String invalidVersionMessage,\n final ControlSessionDemuxer demuxer,\n final Publication controlPublication,\n final ArchiveConductor conductor,\n final CachedEpochClock cachedEpochClock,\n final ControlResponseProxy controlResponseProxy,\n final Authenticator authenticator,\n final ControlSessionProxy controlSessionProxy)\n {\n this.majorVersion = majorVersion;\n this.controlSessionId = controlSessionId;\n this.correlationId = correlationId;\n this.connectTimeoutMs = connectTimeoutMs;\n this.invalidVersionMessage = invalidVersionMessage;\n this.demuxer = demuxer;\n this.controlPublication = controlPublication;\n this.conductor = conductor;\n this.cachedEpochClock = cachedEpochClock;\n this.controlResponseProxy = controlResponseProxy;\n this.authenticator = authenticator;\n this.controlSessionProxy = controlSessionProxy;\n this.activityDeadlineMs = cachedEpochClock.time() + connectTimeoutMs;\n }\n\n public int majorVersion()\n {\n return majorVersion;\n }\n\n public long sessionId()\n {\n return controlSessionId;\n }\n\n public long correlationId()\n {\n return correlationId;\n }\n\n public void abort()\n {\n state(State.INACTIVE);\n if (null != activeListing)\n {\n activeListing.abort();\n }\n }\n\n public void close()\n {\n final CountedErrorHandler errorHandler = conductor.context().countedErrorHandler();\n if (null != activeListing)\n {\n AeronCloseHelper.close(errorHandler, activeListing::abort);\n }\n\n AeronCloseHelper.close(errorHandler, controlPublication);\n\n state(State.CLOSED);\n demuxer.removeControlSession(this);\n }\n\n public boolean isDone()\n {\n return state == State.INACTIVE;\n }\n\n public int doWork()\n {\n int workCount = 0;\n final long nowMs = cachedEpochClock.time();\n\n switch (state)\n {\n case INIT:\n workCount += waitForConnection(nowMs);\n break;\n\n case CONNECTED:\n workCount += sendConnectResponse(nowMs);\n break;\n\n case CHALLENGED:\n workCount += waitForChallengeResponse(nowMs);\n break;\n\n case AUTHENTICATED:\n workCount += waitForRequest(nowMs);\n break;\n\n case ACTIVE:\n workCount += sendQueuedResponses(nowMs);\n break;\n\n case REJECTED:\n workCount += sendReject(nowMs);\n break;\n }\n\n return workCount;\n }\n\n ArchiveConductor archiveConductor()\n {\n return conductor;\n }\n\n Publication controlPublication()\n {\n return controlPublication;\n }\n\n boolean hasActiveListing()\n {\n return null != activeListing;\n }\n\n void activeListing(final Session activeListing)\n {\n this.activeListing = activeListing;\n }\n\n @SuppressWarnings(\"unused\")\n void onChallengeResponse(final long correlationId, final byte[] encodedCredentials)\n {\n if (State.CHALLENGED == state)\n {\n authenticator.onChallengeResponse(controlSessionId, encodedCredentials, cachedEpochClock.time());\n this.correlationId = correlationId;\n }\n }\n\n @SuppressWarnings(\"unused\")\n void onKeepAlive(final long correlationId)\n {\n attemptToGoActive();\n }\n\n void onStopRecording(final long correlationId, final int streamId, final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopRecording(correlationId, streamId, channel, this);\n }\n }\n\n void onStopRecordingSubscription(final long correlationId, final long subscriptionId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopRecordingSubscription(correlationId, subscriptionId, this);\n }\n }\n\n void onStartRecording(\n final long correlationId, final int streamId, final SourceLocation sourceLocation, final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startRecording(correlationId, streamId, sourceLocation, channel, this);\n }\n }\n\n void onListRecordingsForUri(\n final long correlationId,\n final long fromRecordingId,\n final int recordCount,\n final int streamId,\n final byte[] channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.newListRecordingsForUriSession(\n correlationId,\n fromRecordingId,\n recordCount,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onListRecordings(final long correlationId, final long fromRecordingId, final int recordCount)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.newListRecordingsSession(correlationId, fromRecordingId, recordCount, this);\n }\n }\n\n void onListRecording(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.listRecording(correlationId, recordingId, this);\n }\n }\n\n void onFindLastMatchingRecording(\n final long correlationId,\n final long minRecordingId,\n final int sessionId,\n final int streamId,\n final byte[] channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.findLastMatchingRecording(\n correlationId,\n minRecordingId,\n sessionId,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onStartReplay(\n final long correlationId,\n final long recordingId,\n final long position,\n final long length,\n final int replayStreamId,\n final String replayChannel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startReplay(\n correlationId, recordingId, position, length, replayStreamId, replayChannel, this);\n }\n }\n\n void onStartBoundedReplay(\n final long correlationId,\n final long recordingId,\n final long position,\n final long length,\n final int limitCounterId,\n final int replayStreamId,\n final String replayChannel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startBoundedReplay(\n correlationId,\n recordingId,\n position,\n length,\n limitCounterId,\n replayStreamId,\n replayChannel,\n this);\n }\n }\n\n void onStopReplay(final long correlationId, final long replaySessionId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopReplay(correlationId, replaySessionId, this);\n }\n }\n\n void onStopAllReplays(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopAllReplays(correlationId, recordingId, this);\n }\n }\n\n void onExtendRecording(\n final long correlationId,\n final long recordingId,\n final int streamId,\n final SourceLocation sourceLocation,\n final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.extendRecording(correlationId, recordingId, streamId, sourceLocation, channel, this);\n }\n }\n\n void onGetRecordingPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getRecordingPosition(correlationId, recordingId, this);\n }\n }\n\n void onTruncateRecording(final long correlationId, final long recordingId, final long position)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.truncateRecording(correlationId, recordingId, position, this);\n }\n }\n\n void onGetStopPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getStopPosition(correlationId, recordingId, this);\n }\n }\n\n void onListRecordingSubscriptions(\n final long correlationId,\n final int pseudoIndex,\n final int subscriptionCount,\n final boolean applyStreamId,\n final int streamId,\n final String channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.listRecordingSubscriptions(\n correlationId,\n pseudoIndex,\n subscriptionCount,\n applyStreamId,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onReplicate(\n final long correlationId,\n final long srcRecordingId,\n final long dstRecordingId,\n final int srcControlStreamId,\n final String srcControlChannel,\n final String liveDestination)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.replicate(\n correlationId,\n srcRecordingId,\n dstRecordingId,\n Aeron.NULL_VALUE,\n Aeron.NULL_VALUE,\n srcControlStreamId,\n srcControlChannel,\n liveDestination,\n this);\n }\n }\n\n void onStopReplication(final long correlationId, final long replicationId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopReplication(correlationId, replicationId, this);\n }\n }\n\n void onGetStartPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getStartPosition(correlationId, recordingId, this);\n }\n }\n\n void onDetachSegments(final long correlationId, final long recordingId, final long newStartPosition)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.detachSegments(correlationId, recordingId, newStartPosition, this);\n }\n }\n\n void onDeleteDetachedSegments(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.deleteDetachedSegments(correlationId, recordingId, this);\n }\n }\n\n void onPurgeSegments(final long correlationId, final long recordingId, final long newStartPosition)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.purgeSegments(correlationId, recordingId, newStartPosition, this);\n }\n }\n\n void onAttachSegments(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.attachSegments(correlationId, recordingId, this);\n }\n }\n\n void onMigrateSegments(final long correlationId, final long srcRecordingId, final long dstRecordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.migrateSegments(correlationId, srcRecordingId, dstRecordingId, this);\n }\n }\n\n void onReplicateTagged(\n final long correlationId,\n final long srcRecordingId,\n final long dstRecordingId,\n final long channelTagId,\n final long subscriptionTagId,\n final int srcControlStreamId,\n final String srcControlChannel,\n final String liveDestination)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.replicate(\n correlationId,\n srcRecordingId,\n dstRecordingId,\n channelTagId,\n subscriptionTagId,\n srcControlStreamId,\n srcControlChannel,\n liveDestination,\n this);\n }\n }\n\n void sendOkResponse(final long correlationId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, OK, null, proxy);\n }\n\n void sendOkResponse(final long correlationId, final long relevantId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, relevantId, OK, null, proxy);\n }\n\n void sendErrorResponse(final long correlationId, final String errorMessage, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, ERROR, errorMessage, proxy);\n }\n\n void sendErrorResponse(\n final long correlationId, final long relevantId, final String errorMessage, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, relevantId, ERROR, errorMessage, proxy);\n }\n\n void sendRecordingUnknown(final long correlationId, final long recordingId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, recordingId, RECORDING_UNKNOWN, null, proxy);\n }\n\n void sendSubscriptionUnknown(final long correlationId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, SUBSCRIPTION_UNKNOWN, null, proxy);\n }\n\n void sendResponse(\n final long correlationId,\n final long relevantId,\n final ControlResponseCode code,\n final String errorMessage,\n final ControlResponseProxy proxy)\n {\n if (!proxy.sendResponse(controlSessionId, correlationId, relevantId, code, errorMessage, this))\n {\n queueResponse(correlationId, relevantId, code, errorMessage);\n }\n }\n\n void attemptErrorResponse(final long correlationId, final String errorMessage, final ControlResponseProxy proxy)\n {\n proxy.sendResponse(controlSessionId, correlationId, GENERIC, ERROR, errorMessage, this);\n }\n\n void attemptErrorResponse(\n final long correlationId, final long relevantId, final String errorMessage, final ControlResponseProxy proxy)\n {\n proxy.sendResponse(controlSessionId, correlationId, relevantId, ERROR, errorMessage, this);\n }\n\n int sendDescriptor(final long correlationId, final UnsafeBuffer descriptorBuffer, final ControlResponseProxy proxy)\n {\n return proxy.sendDescriptor(controlSessionId, correlationId, descriptorBuffer, this);\n }\n\n boolean sendSubscriptionDescriptor(\n final long correlationId, final Subscription subscription, final ControlResponseProxy proxy)\n {\n return proxy.sendSubscriptionDescriptor(controlSessionId, correlationId, subscription, this);\n }\n\n void attemptSignal(\n final long correlationId,\n final long recordingId,\n final long subscriptionId,\n final long position,\n final RecordingSignal recordingSignal)\n {\n controlResponseProxy.attemptSendSignal(\n controlSessionId,\n correlationId,\n recordingId,\n subscriptionId,\n position,\n recordingSignal,\n controlPublication);\n }\n\n int maxPayloadLength()\n {\n return controlPublication.maxPayloadLength();\n }\n\n void challenged()\n {\n state(State.CHALLENGED);\n }\n\n @SuppressWarnings(\"unused\")\n void authenticate(final byte[] encodedPrincipal)\n {\n activityDeadlineMs = Aeron.NULL_VALUE;\n state(State.AUTHENTICATED);\n }\n\n void reject()\n {\n state(State.REJECTED);\n }\n\n State state()\n {\n return state;\n }\n\n private void queueResponse(\n final long correlationId, final long relevantId, final ControlResponseCode code, final String message)\n {\n queuedResponses.offer(() -> controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n relevantId,\n code,\n message,\n this));\n }\n\n private int waitForConnection(final long nowMs)\n {\n int workCount = 0;\n\n if (controlPublication.isConnected())\n {\n state(State.CONNECTED);\n workCount += 1;\n }\n else if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n\n return workCount;\n }\n\n private int sendConnectResponse(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n if (null != invalidVersionMessage)\n {\n controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n controlSessionId,\n ERROR,\n invalidVersionMessage,\n this);\n }\n else\n {\n authenticator.onConnectedSession(controlSessionProxy.controlSession(this), nowMs);\n }\n\n workCount += 1;\n }\n\n return workCount;\n }\n\n private int waitForChallengeResponse(final long nowMs)\n {\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n }\n else\n {\n authenticator.onChallengedSession(controlSessionProxy.controlSession(this), nowMs);\n }\n\n return 1;\n }\n\n private int waitForRequest(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n if (controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n controlSessionId,\n OK,\n null,\n this))\n {\n activityDeadlineMs = Aeron.NULL_VALUE;\n workCount += 1;\n }\n }\n\n return workCount;\n }\n\n private int sendQueuedResponses(final long nowMs)\n {\n int workCount = 0;\n\n if (!controlPublication.isConnected())\n {\n state(State.INACTIVE);\n }\n else\n {\n if (!queuedResponses.isEmpty())\n {\n if (queuedResponses.peekFirst().getAsBoolean())\n {\n queuedResponses.pollFirst();\n activityDeadlineMs = Aeron.NULL_VALUE;\n workCount++;\n }\n else if (activityDeadlineMs == Aeron.NULL_VALUE)\n {\n activityDeadlineMs = nowMs + connectTimeoutMs;\n }\n else if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n }\n }\n }\n\n return workCount;\n }\n\n private int sendReject(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n AUTHENTICATION_REJECTED,\n ERROR,\n SESSION_REJECTED_MSG,\n this);\n\n workCount += 1;\n }\n\n return workCount;\n }\n\n private boolean hasNoActivity(final long nowMs)\n {\n return Aeron.NULL_VALUE != activityDeadlineMs & nowMs > activityDeadlineMs;\n }\n\n private void attemptToGoActive()\n {\n if (State.AUTHENTICATED == state && null == invalidVersionMessage)\n {\n state(State.ACTIVE);\n }\n }\n\n private void state(final State state)\n {\n //System.out.println(controlSessionId + \": \" + this.state + \" -> \" + state);\n this.state = state;\n }\n\n public String toString()\n {\n return \"ControlSession{\" +\n \"controlSessionId=\" + controlSessionId +\n \", correlationId=\" + correlationId +\n \", state=\" + state +\n \", controlPublication=\" + controlPublication +\n '}';\n }\n}\n"},"new_file":{"kind":"string","value":"aeron-archive/src/main/java/io/aeron/archive/ControlSession.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright 2014-2020 Real Logic Limited.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage io.aeron.archive;\n\nimport io.aeron.Aeron;\nimport io.aeron.AeronCloseHelper;\nimport io.aeron.Publication;\nimport io.aeron.Subscription;\nimport io.aeron.archive.codecs.ControlResponseCode;\nimport io.aeron.archive.codecs.RecordingSignal;\nimport io.aeron.archive.codecs.SourceLocation;\nimport io.aeron.security.Authenticator;\nimport org.agrona.concurrent.CachedEpochClock;\nimport org.agrona.concurrent.CountedErrorHandler;\nimport org.agrona.concurrent.UnsafeBuffer;\n\nimport java.util.ArrayDeque;\nimport java.util.function.BooleanSupplier;\n\nimport static io.aeron.archive.client.ArchiveException.AUTHENTICATION_REJECTED;\nimport static io.aeron.archive.client.ArchiveException.GENERIC;\nimport static io.aeron.archive.codecs.ControlResponseCode.*;\n\n/**\n * Control sessions are interacted with from the {@link ArchiveConductor}. The interaction may result in pending\n * send actions being queued for execution by the {@link ArchiveConductor}.\n */\nclass ControlSession implements Session\n{\n private static final long RESEND_INTERVAL_MS = 200L;\n private static final String SESSION_REJECTED_MSG = \"authentication rejected\";\n\n enum State\n {\n INIT, CONNECTED, CHALLENGED, AUTHENTICATED, ACTIVE, INACTIVE, REJECTED, CLOSED\n }\n\n private final int majorVersion;\n private final long controlSessionId;\n private final long connectTimeoutMs;\n private long correlationId;\n private long resendDeadlineMs;\n private long activityDeadlineMs;\n private Session activeListing = null;\n private final ArchiveConductor conductor;\n private final CachedEpochClock cachedEpochClock;\n private final ControlResponseProxy controlResponseProxy;\n private final Authenticator authenticator;\n private final ControlSessionProxy controlSessionProxy;\n private final ArrayDeque queuedResponses = new ArrayDeque<>(8);\n private final ControlSessionDemuxer demuxer;\n private final Publication controlPublication;\n private final String invalidVersionMessage;\n private State state = State.INIT;\n\n ControlSession(\n final int majorVersion,\n final long controlSessionId,\n final long correlationId,\n final long connectTimeoutMs,\n final String invalidVersionMessage,\n final ControlSessionDemuxer demuxer,\n final Publication controlPublication,\n final ArchiveConductor conductor,\n final CachedEpochClock cachedEpochClock,\n final ControlResponseProxy controlResponseProxy,\n final Authenticator authenticator,\n final ControlSessionProxy controlSessionProxy)\n {\n this.majorVersion = majorVersion;\n this.controlSessionId = controlSessionId;\n this.correlationId = correlationId;\n this.connectTimeoutMs = connectTimeoutMs;\n this.invalidVersionMessage = invalidVersionMessage;\n this.demuxer = demuxer;\n this.controlPublication = controlPublication;\n this.conductor = conductor;\n this.cachedEpochClock = cachedEpochClock;\n this.controlResponseProxy = controlResponseProxy;\n this.authenticator = authenticator;\n this.controlSessionProxy = controlSessionProxy;\n this.activityDeadlineMs = cachedEpochClock.time() + connectTimeoutMs;\n }\n\n public int majorVersion()\n {\n return majorVersion;\n }\n\n public long sessionId()\n {\n return controlSessionId;\n }\n\n public long correlationId()\n {\n return correlationId;\n }\n\n public void abort()\n {\n state(State.INACTIVE);\n if (null != activeListing)\n {\n activeListing.abort();\n }\n }\n\n public void close()\n {\n final CountedErrorHandler errorHandler = conductor.context().countedErrorHandler();\n if (null != activeListing)\n {\n AeronCloseHelper.close(errorHandler, activeListing::abort);\n }\n\n AeronCloseHelper.close(errorHandler, controlPublication);\n\n state(State.CLOSED);\n demuxer.removeControlSession(this);\n }\n\n public boolean isDone()\n {\n return state == State.INACTIVE;\n }\n\n public int doWork()\n {\n int workCount = 0;\n final long nowMs = cachedEpochClock.time();\n\n switch (state)\n {\n case INIT:\n workCount += waitForConnection(nowMs);\n break;\n\n case CONNECTED:\n workCount += sendConnectResponse(nowMs);\n break;\n\n case CHALLENGED:\n workCount += waitForChallengeResponse(nowMs);\n break;\n\n case AUTHENTICATED:\n workCount += waitForRequest(nowMs);\n break;\n\n case ACTIVE:\n workCount += sendQueuedResponses(nowMs);\n break;\n\n case REJECTED:\n workCount += sendReject(nowMs);\n break;\n }\n\n return workCount;\n }\n\n ArchiveConductor archiveConductor()\n {\n return conductor;\n }\n\n Publication controlPublication()\n {\n return controlPublication;\n }\n\n boolean hasActiveListing()\n {\n return null != activeListing;\n }\n\n void activeListing(final Session activeListing)\n {\n this.activeListing = activeListing;\n }\n\n @SuppressWarnings(\"unused\")\n void onChallengeResponse(final long correlationId, final byte[] encodedCredentials)\n {\n if (State.CHALLENGED == state)\n {\n authenticator.onChallengeResponse(controlSessionId, encodedCredentials, cachedEpochClock.time());\n this.correlationId = correlationId;\n }\n }\n\n @SuppressWarnings(\"unused\")\n void onKeepAlive(final long correlationId)\n {\n attemptToGoActive();\n }\n\n void onStopRecording(final long correlationId, final int streamId, final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopRecording(correlationId, streamId, channel, this);\n }\n }\n\n void onStopRecordingSubscription(final long correlationId, final long subscriptionId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopRecordingSubscription(correlationId, subscriptionId, this);\n }\n }\n\n void onStartRecording(\n final long correlationId, final int streamId, final SourceLocation sourceLocation, final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startRecording(correlationId, streamId, sourceLocation, channel, this);\n }\n }\n\n void onListRecordingsForUri(\n final long correlationId,\n final long fromRecordingId,\n final int recordCount,\n final int streamId,\n final byte[] channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.newListRecordingsForUriSession(\n correlationId,\n fromRecordingId,\n recordCount,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onListRecordings(final long correlationId, final long fromRecordingId, final int recordCount)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.newListRecordingsSession(correlationId, fromRecordingId, recordCount, this);\n }\n }\n\n void onListRecording(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.listRecording(correlationId, recordingId, this);\n }\n }\n\n void onFindLastMatchingRecording(\n final long correlationId,\n final long minRecordingId,\n final int sessionId,\n final int streamId,\n final byte[] channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.findLastMatchingRecording(\n correlationId,\n minRecordingId,\n sessionId,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onStartReplay(\n final long correlationId,\n final long recordingId,\n final long position,\n final long length,\n final int replayStreamId,\n final String replayChannel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startReplay(\n correlationId, recordingId, position, length, replayStreamId, replayChannel, this);\n }\n }\n\n void onStartBoundedReplay(\n final long correlationId,\n final long recordingId,\n final long position,\n final long length,\n final int limitCounterId,\n final int replayStreamId,\n final String replayChannel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.startBoundedReplay(\n correlationId,\n recordingId,\n position,\n length,\n limitCounterId,\n replayStreamId,\n replayChannel,\n this);\n }\n }\n\n void onStopReplay(final long correlationId, final long replaySessionId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopReplay(correlationId, replaySessionId, this);\n }\n }\n\n void onStopAllReplays(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopAllReplays(correlationId, recordingId, this);\n }\n }\n\n void onExtendRecording(\n final long correlationId,\n final long recordingId,\n final int streamId,\n final SourceLocation sourceLocation,\n final String channel)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.extendRecording(correlationId, recordingId, streamId, sourceLocation, channel, this);\n }\n }\n\n void onGetRecordingPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getRecordingPosition(correlationId, recordingId, this);\n }\n }\n\n void onTruncateRecording(final long correlationId, final long recordingId, final long position)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.truncateRecording(correlationId, recordingId, position, this);\n }\n }\n\n void onGetStopPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getStopPosition(correlationId, recordingId, this);\n }\n }\n\n void onListRecordingSubscriptions(\n final long correlationId,\n final int pseudoIndex,\n final int subscriptionCount,\n final boolean applyStreamId,\n final int streamId,\n final String channelFragment)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.listRecordingSubscriptions(\n correlationId,\n pseudoIndex,\n subscriptionCount,\n applyStreamId,\n streamId,\n channelFragment,\n this);\n }\n }\n\n void onReplicate(\n final long correlationId,\n final long srcRecordingId,\n final long dstRecordingId,\n final int srcControlStreamId,\n final String srcControlChannel,\n final String liveDestination)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.replicate(\n correlationId,\n srcRecordingId,\n dstRecordingId,\n Aeron.NULL_VALUE,\n Aeron.NULL_VALUE,\n srcControlStreamId,\n srcControlChannel,\n liveDestination,\n this);\n }\n }\n\n void onStopReplication(final long correlationId, final long replicationId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.stopReplication(correlationId, replicationId, this);\n }\n }\n\n void onGetStartPosition(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.getStartPosition(correlationId, recordingId, this);\n }\n }\n\n void onDetachSegments(final long correlationId, final long recordingId, final long newStartPosition)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.detachSegments(correlationId, recordingId, newStartPosition, this);\n }\n }\n\n void onDeleteDetachedSegments(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.deleteDetachedSegments(correlationId, recordingId, this);\n }\n }\n\n void onPurgeSegments(final long correlationId, final long recordingId, final long newStartPosition)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.purgeSegments(correlationId, recordingId, newStartPosition, this);\n }\n }\n\n void onAttachSegments(final long correlationId, final long recordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.attachSegments(correlationId, recordingId, this);\n }\n }\n\n void onMigrateSegments(final long correlationId, final long srcRecordingId, final long dstRecordingId)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.migrateSegments(correlationId, srcRecordingId, dstRecordingId, this);\n }\n }\n\n void onReplicateTagged(\n final long correlationId,\n final long srcRecordingId,\n final long dstRecordingId,\n final long channelTagId,\n final long subscriptionTagId,\n final int srcControlStreamId,\n final String srcControlChannel,\n final String liveDestination)\n {\n attemptToGoActive();\n if (State.ACTIVE == state)\n {\n conductor.replicate(\n correlationId,\n srcRecordingId,\n dstRecordingId,\n channelTagId,\n subscriptionTagId,\n srcControlStreamId,\n srcControlChannel,\n liveDestination,\n this);\n }\n }\n\n void sendOkResponse(final long correlationId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, OK, null, proxy);\n }\n\n void sendOkResponse(final long correlationId, final long relevantId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, relevantId, OK, null, proxy);\n }\n\n void sendErrorResponse(final long correlationId, final String errorMessage, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, ERROR, errorMessage, proxy);\n }\n\n void sendErrorResponse(\n final long correlationId, final long relevantId, final String errorMessage, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, relevantId, ERROR, errorMessage, proxy);\n }\n\n void sendRecordingUnknown(final long correlationId, final long recordingId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, recordingId, RECORDING_UNKNOWN, null, proxy);\n }\n\n void sendSubscriptionUnknown(final long correlationId, final ControlResponseProxy proxy)\n {\n sendResponse(correlationId, 0L, SUBSCRIPTION_UNKNOWN, null, proxy);\n }\n\n void sendResponse(\n final long correlationId,\n final long relevantId,\n final ControlResponseCode code,\n final String errorMessage,\n final ControlResponseProxy proxy)\n {\n if (!proxy.sendResponse(controlSessionId, correlationId, relevantId, code, errorMessage, this))\n {\n queueResponse(correlationId, relevantId, code, errorMessage);\n }\n }\n\n void attemptErrorResponse(final long correlationId, final String errorMessage, final ControlResponseProxy proxy)\n {\n proxy.sendResponse(controlSessionId, correlationId, GENERIC, ERROR, errorMessage, this);\n }\n\n void attemptErrorResponse(\n final long correlationId, final long relevantId, final String errorMessage, final ControlResponseProxy proxy)\n {\n proxy.sendResponse(controlSessionId, correlationId, relevantId, ERROR, errorMessage, this);\n }\n\n int sendDescriptor(final long correlationId, final UnsafeBuffer descriptorBuffer, final ControlResponseProxy proxy)\n {\n return proxy.sendDescriptor(controlSessionId, correlationId, descriptorBuffer, this);\n }\n\n boolean sendSubscriptionDescriptor(\n final long correlationId, final Subscription subscription, final ControlResponseProxy proxy)\n {\n return proxy.sendSubscriptionDescriptor(controlSessionId, correlationId, subscription, this);\n }\n\n void attemptSignal(\n final long correlationId,\n final long recordingId,\n final long subscriptionId,\n final long position,\n final RecordingSignal recordingSignal)\n {\n controlResponseProxy.attemptSendSignal(\n controlSessionId,\n correlationId,\n recordingId,\n subscriptionId,\n position,\n recordingSignal,\n controlPublication);\n }\n\n int maxPayloadLength()\n {\n return controlPublication.maxPayloadLength();\n }\n\n void challenged()\n {\n state(State.CHALLENGED);\n }\n\n @SuppressWarnings(\"unused\")\n void authenticate(final byte[] encodedPrincipal)\n {\n activityDeadlineMs = Aeron.NULL_VALUE;\n state(State.AUTHENTICATED);\n }\n\n void reject()\n {\n state(State.REJECTED);\n }\n\n State state()\n {\n return state;\n }\n\n private void queueResponse(\n final long correlationId, final long relevantId, final ControlResponseCode code, final String message)\n {\n queuedResponses.offer(() -> controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n relevantId,\n code,\n message,\n this));\n }\n\n private int waitForConnection(final long nowMs)\n {\n int workCount = 0;\n\n if (controlPublication.isConnected())\n {\n state(State.CONNECTED);\n workCount += 1;\n }\n else if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n\n return workCount;\n }\n\n private int sendConnectResponse(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n if (null != invalidVersionMessage)\n {\n controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n controlSessionId,\n ERROR,\n invalidVersionMessage,\n this);\n }\n else\n {\n authenticator.onConnectedSession(controlSessionProxy.controlSession(this), nowMs);\n }\n\n workCount += 1;\n }\n\n return workCount;\n }\n\n private int waitForChallengeResponse(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else\n {\n authenticator.onChallengedSession(controlSessionProxy.controlSession(this), nowMs);\n workCount += 1;\n }\n\n return workCount;\n }\n\n private int waitForRequest(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n if (controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n controlSessionId,\n OK,\n null,\n this))\n {\n activityDeadlineMs = Aeron.NULL_VALUE;\n workCount += 1;\n }\n }\n\n return workCount;\n }\n\n private int sendQueuedResponses(final long nowMs)\n {\n int workCount = 0;\n\n if (!controlPublication.isConnected())\n {\n state(State.INACTIVE);\n }\n else\n {\n if (!queuedResponses.isEmpty())\n {\n if (queuedResponses.peekFirst().getAsBoolean())\n {\n queuedResponses.pollFirst();\n activityDeadlineMs = Aeron.NULL_VALUE;\n workCount++;\n }\n else if (activityDeadlineMs == Aeron.NULL_VALUE)\n {\n activityDeadlineMs = nowMs + connectTimeoutMs;\n }\n else if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n }\n }\n }\n\n return workCount;\n }\n\n private int sendReject(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else if (nowMs > resendDeadlineMs)\n {\n resendDeadlineMs = nowMs + RESEND_INTERVAL_MS;\n controlResponseProxy.sendResponse(\n controlSessionId,\n correlationId,\n AUTHENTICATION_REJECTED,\n ERROR,\n SESSION_REJECTED_MSG,\n this);\n\n workCount += 1;\n }\n\n return workCount;\n }\n\n private boolean hasNoActivity(final long nowMs)\n {\n return Aeron.NULL_VALUE != activityDeadlineMs & nowMs > activityDeadlineMs;\n }\n\n private void attemptToGoActive()\n {\n if (State.AUTHENTICATED == state && null == invalidVersionMessage)\n {\n state(State.ACTIVE);\n }\n }\n\n private void state(final State state)\n {\n //System.out.println(controlSessionId + \": \" + this.state + \" -> \" + state);\n this.state = state;\n }\n\n public String toString()\n {\n return \"ControlSession{\" +\n \"controlSessionId=\" + controlSessionId +\n \", correlationId=\" + correlationId +\n \", state=\" + state +\n \", controlPublication=\" + controlPublication +\n '}';\n }\n}\n"},"message":{"kind":"string","value":"[Java] Clean up code to avoid confusion over paths in waitForChallengeResponse.\n"},"old_file":{"kind":"string","value":"aeron-archive/src/main/java/io/aeron/archive/ControlSession.java"},"subject":{"kind":"string","value":"[Java] Clean up code to avoid confusion over paths in waitForChallengeResponse."},"git_diff":{"kind":"string","value":"eron-archive/src/main/java/io/aeron/archive/ControlSession.java\n \n private int waitForChallengeResponse(final long nowMs)\n {\n int workCount = 0;\n\n if (hasNoActivity(nowMs))\n {\n state(State.INACTIVE);\n workCount += 1;\n }\n else\n {\n authenticator.onChallengedSession(controlSessionProxy.controlSession(this), nowMs);\n workCount += 1;\n }\n\n return workCount;\n }\n\n return 1;\n }\n \n private int waitForRequest(final long nowMs)"}}},{"rowIdx":2052,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"7001dbb3289165c50693269933d2c73ec73d8dc1"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Br3nda/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow,Br3nda/plow,Br3nda/plow,Br3nda/plow,Br3nda/plow"},"new_contents":{"kind":"string","value":"package com.breakersoft.plow.dao.pgsql;\n\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.UUID;\n\nimport org.springframework.jdbc.core.PreparedStatementCreator;\nimport org.springframework.jdbc.core.RowMapper;\nimport org.springframework.jdbc.support.rowset.SqlRowSet;\nimport org.springframework.stereotype.Repository;\n\nimport com.breakersoft.plow.Folder;\nimport com.breakersoft.plow.Job;\nimport com.breakersoft.plow.JobE;\nimport com.breakersoft.plow.Project;\nimport com.breakersoft.plow.dao.AbstractDao;\nimport com.breakersoft.plow.dao.JobDao;\nimport com.breakersoft.plow.exceptions.InvalidBlueprintException;\nimport com.breakersoft.plow.thrift.JobSpecT;\nimport com.breakersoft.plow.thrift.JobState;\nimport com.breakersoft.plow.thrift.TaskState;\nimport com.breakersoft.plow.util.JdbcUtils;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n@Repository\npublic final class JobDaoImpl extends AbstractDao implements JobDao {\n\n public static final RowMapper MAPPER = new RowMapper() {\n\n @Override\n public Job mapRow(ResultSet rs, int rowNum)\n throws SQLException {\n JobE job = new JobE();\n job.setJobId((UUID) rs.getObject(1));\n job.setProjectId((UUID) rs.getObject(2));\n job.setFolderId((UUID) rs.getObject(3));\n return job;\n }\n };\n\n private static final String GET =\n \"SELECT \" +\n \"pk_job,\"+\n \"pk_project, \" +\n \"pk_folder \" +\n \"FROM \" +\n \"plow.job \";\n\n @Override\n public Job get(String name, JobState state) {\n return jdbc.queryForObject(\n GET + \"WHERE str_name=? AND int_state=?\",\n MAPPER, name, state.ordinal());\n }\n\n @Override\n public Job getActive(String name) {\n return jdbc.queryForObject(\n GET + \"WHERE str_active_name=?\", MAPPER, name);\n }\n\n @Override\n public Job getActive(UUID id) {\n return jdbc.queryForObject(\n GET + \"WHERE pk_job=? AND int_state!=?\", MAPPER,\n id, JobState.FINISHED.ordinal());\n }\n\n @Override\n public Job getByActiveNameOrId(String identifer) {\n try {\n return getActive(UUID.fromString(identifer));\n } catch (IllegalArgumentException e) {\n return getActive(identifer);\n }\n }\n\n @Override\n public Job get(UUID id) {\n return jdbc.queryForObject(\n GET + \"WHERE pk_job=?\",\n MAPPER, id);\n }\n\n @Override\n public void setPaused(Job job, boolean value) {\n jdbc.update(\"UPDATE plow.job SET bool_paused=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n\n private static final String INSERT[] = {\n JdbcUtils.Insert(\"plow.job\",\n \"pk_job\", \"pk_project\", \"str_name\", \"str_active_name\",\n \"str_username\", \"int_uid\", \"int_state\", \"bool_paused\",\n \"str_log_path\", \"attrs\")\n };\n\n @Override\n public Job create(final Project project, final JobSpecT spec) {\n\n final UUID jobId = UUID.randomUUID();\n\n jdbc.update(new PreparedStatementCreator() {\n @Override\n public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException {\n final PreparedStatement ret = conn.prepareStatement(INSERT[0]);\n ret.setObject(1, jobId);\n ret.setObject(2, project.getProjectId());\n ret.setString(3, spec.getName());\n ret.setString(4, spec.getName());\n ret.setString(5, spec.username);\n ret.setInt(6, spec.getUid());\n ret.setInt(7, JobState.INITIALIZE.ordinal());\n ret.setBoolean(8, spec.isPaused());\n ret.setString(9, String.format(\"%s/%s\", spec.logPath, spec.name));\n ret.setObject(10, spec.attrs);\n return ret;\n }\n });\n\n jdbc.update(\"INSERT INTO plow.job_count (pk_job) VALUES (?)\", jobId);\n jdbc.update(\"INSERT INTO plow.job_dsp (pk_job) VALUES (?)\", jobId);\n jdbc.update(\"INSERT INTO plow.job_ping (pk_job) VALUES (?)\", jobId);\n\n final JobE job = new JobE();\n job.setJobId(jobId);\n job.setProjectId(project.getProjectId());\n job.setFolderId(null); // Don't know folder yet\n return job;\n }\n\n private static final String UPDATE_ATTRS =\n \"UPDATE \" +\n \"plow.job \" +\n \"SET \" +\n \"attrs = ? \" +\n \"WHERE \" +\n \"pk_job=?\";\n\n @Override\n public void setAttrs(final Job job, final Map attrs) {\n jdbc.update(new PreparedStatementCreator() {\n @Override\n public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException {\n final PreparedStatement ret = conn.prepareStatement(UPDATE_ATTRS);\n ret.setObject(1, attrs);\n ret.setObject(2, job.getJobId());\n return ret;\n }\n });\n }\n\n @Override\n public Map getAttrs(final Job job) {\n return jdbc.queryForObject(\n \"SELECT attrs FROM plow.job WHERE job.pk_job=?\",\n new RowMapper>() {\n\n @Override\n public Map mapRow(ResultSet rs, int rowNum)\n throws SQLException {\n @SuppressWarnings(\"unchecked\")\n Map result = (Map) rs.getObject(\"attrs\");\n return result;\n }\n\n }, job.getJobId());\n }\n\n @Override\n public void updateFolder(Job job, Folder folder) {\n jdbc.update(\"UPDATE plow.job SET pk_folder=? WHERE pk_job=?\",\n folder.getFolderId(), job.getJobId());\n }\n\n @Override\n public boolean setJobState(Job job, JobState state) {\n return jdbc.update(\"UPDATE plow.job SET int_state=? WHERE pk_job=?\",\n state.ordinal(), job.getJobId()) == 1;\n }\n\n @Override\n public boolean shutdown(Job job) {\n return jdbc.update(\"UPDATE plow.job SET int_state=?, \" +\n \"str_active_name=NULL, time_stopped=plow.txTimeMillis() WHERE pk_job=? AND int_state=?\",\n JobState.FINISHED.ordinal(), job.getJobId(), JobState.RUNNING.ordinal()) == 1;\n }\n\n @Override\n public void updateFrameStatesForLaunch(Job job) {\n jdbc.update(\"UPDATE plow.task SET int_state=? WHERE pk_layer \" +\n \"IN (SELECT pk_layer FROM plow.layer WHERE pk_job=?)\",\n TaskState.WAITING.ordinal(), job.getJobId());\n }\n\n private static final String GET_FRAME_STATUS_COUNTS =\n \"SELECT \" +\n \"COUNT(1) AS c, \" +\n \"task.int_state, \" +\n \"task.pk_layer \" +\n \"FROM \" +\n \"plow.task,\" +\n \"plow.layer \" +\n \"WHERE \" +\n \"task.pk_layer = layer.pk_layer \" +\n \"AND \"+\n \"layer.pk_job=? \" +\n \"GROUP BY \" +\n \"task.int_state,\"+\n \"task.pk_layer\";\n\n @Override\n public void updateFrameCountsForLaunch(Job job) {\n\n Map jobRollup = Maps.newHashMap();\n Map> layerRollup = Maps.newHashMap();\n\n List> taskCounts = jdbc.queryForList(\n GET_FRAME_STATUS_COUNTS, job.getJobId());\n\n if (taskCounts.isEmpty()) {\n throw new InvalidBlueprintException(\"The job contains no tasks.\");\n }\n\n for (Map entry: taskCounts) {\n\n String layerId = entry.get(\"pk_layer\").toString();\n int state = (Integer) entry.get(\"int_state\");\n int count = ((Long)entry.get(\"c\")).intValue();\n\n // Rollup counts for job.\n Integer stateCount = jobRollup.get(state);\n if (stateCount == null) {\n jobRollup.put(state, count);\n }\n else {\n jobRollup.put(state, count + stateCount);\n }\n\n // Rollup stats for layers.\n List layerCounts = layerRollup.get(layerId);\n if (layerCounts == null) {\n layerRollup.put(layerId, Lists.newArrayList(state, count));\n }\n else {\n layerRollup.get(layerId).add(state);\n layerRollup.get(layerId).add(count);\n }\n }\n\n final StringBuilder sb = new StringBuilder(512);\n final List values = Lists.newArrayList();\n\n // Apply layer counts\n for (Map.Entry> entry: layerRollup.entrySet()) {\n List d = entry.getValue();\n values.clear();\n int total = 0;\n\n sb.setLength(0);\n sb.append(\"UPDATE plow.layer_count SET\");\n for (int i=0; i < entry.getValue().size(); i=i+2) {\n sb.append(\" int_\");\n sb.append(TaskState.findByValue(d.get(i)).toString().toLowerCase());\n sb.append(\"=?,\");\n values.add(d.get(i+1));\n total=total + d.get(i+1);\n }\n sb.deleteCharAt(sb.length() - 1);\n sb.append(\" WHERE pk_layer=?\");\n values.add(UUID.fromString(entry.getKey()));\n jdbc.update(sb.toString(), values.toArray());\n jdbc.update(\"UPDATE plow.layer_count SET int_total=? WHERE pk_layer=?\",\n total, UUID.fromString(entry.getKey()));\n }\n\n int total = 0;\n values.clear();\n sb.setLength(0);\n sb.append(\"UPDATE plow.job_count SET \");\n for (Map.Entry entry: jobRollup.entrySet()) {\n sb.append(\"int_\");\n sb.append(TaskState.findByValue(entry.getKey()).toString().toLowerCase());\n sb.append(\"=?,\");\n values.add(entry.getValue());\n total=total + entry.getValue();\n }\n sb.deleteCharAt(sb.length() - 1);\n sb.append(\" WHERE pk_job=?\");\n values.add(job.getJobId());\n jdbc.update(sb.toString(), values.toArray());\n jdbc.update(\"UPDATE plow.job_count SET int_total=? WHERE pk_job=?\",\n total, job.getJobId());\n\n }\n\n @Override\n public boolean isPaused(Job job) {\n return jdbc.queryForObject(\"SELECT bool_paused FROM plow.job WHERE pk_job=?\",\n Boolean.class, job.getJobId());\n }\n\n @Override\n public boolean hasWaitingFrames(Job job) {\n return jdbc.queryForInt(\"SELECT job_count.int_waiting FROM plow.job_count WHERE pk_job=?\",\n job.getJobId()) > 0;\n }\n\n @Override\n public boolean updateMaxRssMb(UUID jobId, int value) {\n return jdbc.update(\"UPDATE plow.job_ping SET int_max_rss=? \" +\n \"WHERE pk_job=? AND int_max_rss < ?\",\n value, jobId, value) == 1;\n }\n private static final String HAS_PENDING_FRAMES =\n \"SELECT \" +\n \"job_count.int_total - (job_count.int_eaten + job_count.int_succeeded) AS pending, \" +\n \"job.int_state \" +\n \"FROM \" +\n \"plow.job \" +\n \"INNER JOIN \" +\n \"plow.job_count \" +\n \"ON \" +\n \"job.pk_job = job_count.pk_job \" +\n \"WHERE \" +\n \"job.pk_job=?\";\n @Override\n public boolean isFinished(Job job) {\n SqlRowSet row = jdbc.queryForRowSet(HAS_PENDING_FRAMES, job.getJobId());\n if (!row.first()) {\n return true;\n }\n if (row.getInt(\"int_state\") == JobState.FINISHED.ordinal()) {\n return true;\n }\n if (row.getInt(\"pending\") == 0) {\n return true;\n }\n return false;\n }\n\n @Override\n public void setMaxCores(Job job, int value) {\n jdbc.update(\"UPDATE plow.job_dsp SET int_max_cores=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n\n @Override\n public void setMinCores(Job job, int value) {\n jdbc.update(\"UPDATE plow.job_dsp SET int_min_cores=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n}\n"},"new_file":{"kind":"string","value":"server/src/main/java/com/breakersoft/plow/dao/pgsql/JobDaoImpl.java"},"old_contents":{"kind":"string","value":"package com.breakersoft.plow.dao.pgsql;\n\nimport java.sql.Connection;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.UUID;\n\nimport org.springframework.jdbc.core.PreparedStatementCreator;\nimport org.springframework.jdbc.core.RowMapper;\nimport org.springframework.jdbc.support.rowset.SqlRowSet;\nimport org.springframework.stereotype.Repository;\n\nimport com.breakersoft.plow.Folder;\nimport com.breakersoft.plow.Job;\nimport com.breakersoft.plow.JobE;\nimport com.breakersoft.plow.Project;\nimport com.breakersoft.plow.dao.AbstractDao;\nimport com.breakersoft.plow.dao.JobDao;\nimport com.breakersoft.plow.exceptions.InvalidBlueprintException;\nimport com.breakersoft.plow.thrift.JobSpecT;\nimport com.breakersoft.plow.thrift.JobState;\nimport com.breakersoft.plow.thrift.TaskState;\nimport com.breakersoft.plow.util.JdbcUtils;\nimport com.google.common.collect.Lists;\nimport com.google.common.collect.Maps;\n\n@Repository\npublic final class JobDaoImpl extends AbstractDao implements JobDao {\n\n public static final RowMapper MAPPER = new RowMapper() {\n\n @Override\n public Job mapRow(ResultSet rs, int rowNum)\n throws SQLException {\n JobE job = new JobE();\n job.setJobId((UUID) rs.getObject(1));\n job.setProjectId((UUID) rs.getObject(2));\n job.setFolderId((UUID) rs.getObject(3));\n return job;\n }\n };\n\n private static final String GET =\n \"SELECT \" +\n \"pk_job,\"+\n \"pk_project, \" +\n \"pk_folder \" +\n \"FROM \" +\n \"plow.job \";\n\n @Override\n public Job get(String name, JobState state) {\n return jdbc.queryForObject(\n GET + \"WHERE str_name=? AND int_state=?\",\n MAPPER, name, state.ordinal());\n }\n\n @Override\n public Job getActive(String name) {\n return jdbc.queryForObject(\n GET + \"WHERE str_active_name=?\", MAPPER, name);\n }\n\n @Override\n public Job getActive(UUID id) {\n return jdbc.queryForObject(\n GET + \"WHERE pk_job=? AND int_state!=?\", MAPPER,\n id, JobState.FINISHED.ordinal());\n }\n\n @Override\n public Job getByActiveNameOrId(String identifer) {\n try {\n return getActive(UUID.fromString(identifer));\n } catch (IllegalArgumentException e) {\n return getActive(identifer);\n }\n }\n\n @Override\n public Job get(UUID id) {\n return jdbc.queryForObject(\n GET + \"WHERE pk_job=?\",\n MAPPER, id);\n }\n\n @Override\n public void setPaused(Job job, boolean value) {\n jdbc.update(\"UPDATE plow.job SET bool_paused=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n\n private static final String INSERT[] = {\n JdbcUtils.Insert(\"plow.job\",\n \"pk_job\", \"pk_project\", \"str_name\", \"str_active_name\",\n \"str_username\", \"int_uid\", \"int_state\", \"bool_paused\",\n \"str_log_path\", \"attrs\")\n };\n\n @Override\n public Job create(final Project project, final JobSpecT spec) {\n\n final UUID jobId = UUID.randomUUID();\n\n jdbc.update(new PreparedStatementCreator() {\n @Override\n public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException {\n final PreparedStatement ret = conn.prepareStatement(INSERT[0]);\n ret.setObject(1, jobId);\n ret.setObject(2, project.getProjectId());\n ret.setString(3, spec.getName());\n ret.setString(4, spec.getName());\n ret.setString(5, spec.username);\n ret.setInt(6, spec.getUid());\n ret.setInt(7, JobState.INITIALIZE.ordinal());\n ret.setBoolean(8, spec.isPaused());\n ret.setString(9, String.format(\"%s/%s\", spec.logPath, spec.name));\n ret.setObject(10, spec.attrs);\n return ret;\n }\n });\n\n jdbc.update(\"INSERT INTO plow.job_count (pk_job) VALUES (?)\", jobId);\n jdbc.update(\"INSERT INTO plow.job_dsp (pk_job) VALUES (?)\", jobId);\n jdbc.update(\"INSERT INTO plow.job_ping (pk_job) VALUES (?)\", jobId);\n\n final JobE job = new JobE();\n job.setJobId(jobId);\n job.setProjectId(project.getProjectId());\n job.setFolderId(null); // Don't know folder yet\n return job;\n }\n\n private static final String UPDATE_ATTRS =\n \"UPDATE \" +\n \"plow.job \" +\n \"SET \" +\n \"attrs = ? \" +\n \"WHERE \" +\n \"pk_job=?\";\n\n @Override\n public void setAttrs(final Job job, final Map attrs) {\n jdbc.update(new PreparedStatementCreator() {\n @Override\n public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException {\n final PreparedStatement ret = conn.prepareStatement(UPDATE_ATTRS);\n ret.setObject(1, attrs);\n ret.setObject(2, job.getJobId());\n return ret;\n }\n });\n }\n\n @Override\n public Map getAttrs(final Job job) {\n return jdbc.queryForObject(\n \"SELECT attrs FROM plow.job WHERE job.pk_job=?\",\n new RowMapper>() {\n\n @Override\n public Map mapRow(ResultSet rs, int rowNum)\n throws SQLException {\n Map result = (Map) rs.getObject(\"attrs\");\n return result;\n }\n\n }, job.getJobId());\n }\n\n @Override\n public void updateFolder(Job job, Folder folder) {\n jdbc.update(\"UPDATE plow.job SET pk_folder=? WHERE pk_job=?\",\n folder.getFolderId(), job.getJobId());\n }\n\n @Override\n public boolean setJobState(Job job, JobState state) {\n return jdbc.update(\"UPDATE plow.job SET int_state=? WHERE pk_job=?\",\n state.ordinal(), job.getJobId()) == 1;\n }\n\n @Override\n public boolean shutdown(Job job) {\n return jdbc.update(\"UPDATE plow.job SET int_state=?, \" +\n \"str_active_name=NULL, time_stopped=plow.txTimeMillis() WHERE pk_job=? AND int_state=?\",\n JobState.FINISHED.ordinal(), job.getJobId(), JobState.RUNNING.ordinal()) == 1;\n }\n\n @Override\n public void updateFrameStatesForLaunch(Job job) {\n jdbc.update(\"UPDATE plow.task SET int_state=? WHERE pk_layer \" +\n \"IN (SELECT pk_layer FROM plow.layer WHERE pk_job=?)\",\n TaskState.WAITING.ordinal(), job.getJobId());\n }\n\n private static final String GET_FRAME_STATUS_COUNTS =\n \"SELECT \" +\n \"COUNT(1) AS c, \" +\n \"task.int_state, \" +\n \"task.pk_layer \" +\n \"FROM \" +\n \"plow.task,\" +\n \"plow.layer \" +\n \"WHERE \" +\n \"task.pk_layer = layer.pk_layer \" +\n \"AND \"+\n \"layer.pk_job=? \" +\n \"GROUP BY \" +\n \"task.int_state,\"+\n \"task.pk_layer\";\n\n @Override\n public void updateFrameCountsForLaunch(Job job) {\n\n Map jobRollup = Maps.newHashMap();\n Map> layerRollup = Maps.newHashMap();\n\n List> taskCounts = jdbc.queryForList(\n GET_FRAME_STATUS_COUNTS, job.getJobId());\n\n if (taskCounts.isEmpty()) {\n throw new InvalidBlueprintException(\"The job contains no tasks.\");\n }\n\n for (Map entry: taskCounts) {\n\n String layerId = entry.get(\"pk_layer\").toString();\n int state = (Integer) entry.get(\"int_state\");\n int count = ((Long)entry.get(\"c\")).intValue();\n\n // Rollup counts for job.\n Integer stateCount = jobRollup.get(state);\n if (stateCount == null) {\n jobRollup.put(state, count);\n }\n else {\n jobRollup.put(state, count + stateCount);\n }\n\n // Rollup stats for layers.\n List layerCounts = layerRollup.get(layerId);\n if (layerCounts == null) {\n layerRollup.put(layerId, Lists.newArrayList(state, count));\n }\n else {\n layerRollup.get(layerId).add(state);\n layerRollup.get(layerId).add(count);\n }\n }\n\n final StringBuilder sb = new StringBuilder(512);\n final List values = Lists.newArrayList();\n\n // Apply layer counts\n for (Map.Entry> entry: layerRollup.entrySet()) {\n List d = entry.getValue();\n values.clear();\n int total = 0;\n\n sb.setLength(0);\n sb.append(\"UPDATE plow.layer_count SET\");\n for (int i=0; i < entry.getValue().size(); i=i+2) {\n sb.append(\" int_\");\n sb.append(TaskState.findByValue(d.get(i)).toString().toLowerCase());\n sb.append(\"=?,\");\n values.add(d.get(i+1));\n total=total + d.get(i+1);\n }\n sb.deleteCharAt(sb.length() - 1);\n sb.append(\" WHERE pk_layer=?\");\n values.add(UUID.fromString(entry.getKey()));\n jdbc.update(sb.toString(), values.toArray());\n jdbc.update(\"UPDATE plow.layer_count SET int_total=? WHERE pk_layer=?\",\n total, UUID.fromString(entry.getKey()));\n }\n\n int total = 0;\n values.clear();\n sb.setLength(0);\n sb.append(\"UPDATE plow.job_count SET \");\n for (Map.Entry entry: jobRollup.entrySet()) {\n sb.append(\"int_\");\n sb.append(TaskState.findByValue(entry.getKey()).toString().toLowerCase());\n sb.append(\"=?,\");\n values.add(entry.getValue());\n total=total + entry.getValue();\n }\n sb.deleteCharAt(sb.length() - 1);\n sb.append(\" WHERE pk_job=?\");\n values.add(job.getJobId());\n jdbc.update(sb.toString(), values.toArray());\n jdbc.update(\"UPDATE plow.job_count SET int_total=? WHERE pk_job=?\",\n total, job.getJobId());\n\n }\n\n @Override\n public boolean isPaused(Job job) {\n return jdbc.queryForObject(\"SELECT bool_paused FROM plow.job WHERE pk_job=?\",\n Boolean.class, job.getJobId());\n }\n\n @Override\n public boolean hasWaitingFrames(Job job) {\n return jdbc.queryForInt(\"SELECT job_count.int_waiting FROM plow.job_count WHERE pk_job=?\",\n job.getJobId()) > 0;\n }\n\n @Override\n public boolean updateMaxRssMb(UUID jobId, int value) {\n return jdbc.update(\"UPDATE plow.job_ping SET int_max_rss=? \" +\n \"WHERE pk_job=? AND int_max_rss < ?\",\n value, jobId, value) == 1;\n }\n private static final String HAS_PENDING_FRAMES =\n \"SELECT \" +\n \"job_count.int_total - (job_count.int_eaten + job_count.int_succeeded) AS pending, \" +\n \"job.int_state \" +\n \"FROM \" +\n \"plow.job \" +\n \"INNER JOIN \" +\n \"plow.job_count \" +\n \"ON \" +\n \"job.pk_job = job_count.pk_job \" +\n \"WHERE \" +\n \"job.pk_job=?\";\n @Override\n public boolean isFinished(Job job) {\n SqlRowSet row = jdbc.queryForRowSet(HAS_PENDING_FRAMES, job.getJobId());\n if (!row.first()) {\n return true;\n }\n if (row.getInt(\"int_state\") == JobState.FINISHED.ordinal()) {\n return true;\n }\n if (row.getInt(\"pending\") == 0) {\n return true;\n }\n return false;\n }\n\n @Override\n public void setMaxCores(Job job, int value) {\n jdbc.update(\"UPDATE plow.job_dsp SET int_max_cores=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n\n @Override\n public void setMinCores(Job job, int value) {\n jdbc.update(\"UPDATE plow.job_dsp SET int_min_cores=? WHERE pk_job=?\",\n value, job.getJobId());\n }\n}\n"},"message":{"kind":"string","value":"suppress warnings for an unsafe cast coming from postgres driver.\n"},"old_file":{"kind":"string","value":"server/src/main/java/com/breakersoft/plow/dao/pgsql/JobDaoImpl.java"},"subject":{"kind":"string","value":"suppress warnings for an unsafe cast coming from postgres driver."},"git_diff":{"kind":"string","value":"erver/src/main/java/com/breakersoft/plow/dao/pgsql/JobDaoImpl.java\n @Override\n public Map mapRow(ResultSet rs, int rowNum)\n throws SQLException {\n @SuppressWarnings(\"unchecked\")\n Map result = (Map) rs.getObject(\"attrs\");\n return result;\n }"}}},{"rowIdx":2053,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"c84637a0012d41e823b6d6af298afbfcf268c6b7"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"xdyixia/coolweather"},"new_contents":{"kind":"string","value":"package com.coolweather.android;\r\n\r\nimport android.content.Intent;\r\nimport android.content.SharedPreferences;\r\nimport android.graphics.Color;\r\nimport android.os.Build;\r\nimport android.os.Bundle;\r\nimport android.preference.PreferenceManager;\r\nimport android.support.v4.view.GravityCompat;\r\nimport android.support.v4.widget.DrawerLayout;\r\nimport android.support.v4.widget.SwipeRefreshLayout;\r\nimport android.support.v7.app.AppCompatActivity;\r\nimport android.view.LayoutInflater;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.ImageView;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.ScrollView;\r\nimport android.widget.TextView;\r\nimport android.widget.Toast;\r\n\r\nimport com.bumptech.glide.Glide;\r\nimport com.coolweather.android.gson.Forecast;\r\nimport com.coolweather.android.gson.Weather;\r\n//import com.coolweather.android.service.AutoUpdateService;\r\nimport com.coolweather.android.util.HttpUtil;\r\nimport com.coolweather.android.util.Utility;\r\n\r\nimport java.io.IOException;\r\n\r\nimport okhttp3.Call;\r\nimport okhttp3.Callback;\r\nimport okhttp3.Response;\r\n\r\npublic class WeatherActivity extends AppCompatActivity {\r\n\r\n public DrawerLayout drawerLayout;\r\n\r\n public SwipeRefreshLayout swipeRefresh;\r\n\r\n private ScrollView weatherLayout;\r\n\r\n private Button navButton;\r\n\r\n private TextView titleCity;\r\n\r\n private TextView titleUpdateTime;\r\n\r\n private TextView degreeText;\r\n\r\n private TextView weatherInfoText;\r\n\r\n private LinearLayout forecastLayout;\r\n\r\n private TextView aqiText;\r\n\r\n private TextView pm25Text;\r\n\r\n private TextView comfortText;\r\n\r\n private TextView carWashText;\r\n\r\n private TextView sportText;\r\n\r\n private ImageView bingPicImg;\r\n\r\n private String mWeatherId;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n if (Build.VERSION.SDK_INT >= 21) {\r\n View decorView = getWindow().getDecorView();\r\n decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN\r\n | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);\r\n getWindow().setStatusBarColor(Color.TRANSPARENT);\r\n }\r\n setContentView(R.layout.activity_weather);\r\n // 初始化各控件\r\n bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);\r\n weatherLayout = (ScrollView) findViewById(R.id.weather_layout);\r\n titleCity = (TextView) findViewById(R.id.title_city);\r\n titleUpdateTime = (TextView) findViewById(R.id.title_update_time);\r\n degreeText = (TextView) findViewById(R.id.degree_text);\r\n weatherInfoText = (TextView) findViewById(R.id.weather_info_text);\r\n forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);\r\n aqiText = (TextView) findViewById(R.id.aqi_text);\r\n pm25Text = (TextView) findViewById(R.id.pm25_text);\r\n comfortText = (TextView) findViewById(R.id.comfort_text);\r\n carWashText = (TextView) findViewById(R.id.car_wash_text);\r\n sportText = (TextView) findViewById(R.id.sport_text);\r\n swipeRefresh = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);\r\n swipeRefresh.setColorSchemeResources(R.color.colorPrimary);\r\n drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);\r\n navButton = (Button) findViewById(R.id.nav_button);\r\n SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);\r\n String weatherString = prefs.getString(\"weather\", null);\r\n //final String weatherId;\r\n if (weatherString != null) {\r\n // 有缓存时直接解析天气数据\r\n Weather weather = Utility.handleWeatherResponse(weatherString);\r\n mWeatherId = weather.basic.weatherId;\r\n showWeatherInfo(weather);\r\n } else {\r\n // 无缓存时去服务器查询天气\r\n mWeatherId = getIntent().getStringExtra(\"weather_id\");\r\n weatherLayout.setVisibility(View.INVISIBLE);\r\n requestWeather(mWeatherId);\r\n }\r\n swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {\r\n @Override\r\n public void onRefresh() {\r\n requestWeather(mWeatherId);\r\n }\r\n });\r\n navButton.setOnClickListener(new View.OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n drawerLayout.openDrawer(GravityCompat.START);\r\n }\r\n });\r\n String bingPic = prefs.getString(\"bing_pic\", null);\r\n if (bingPic != null) {\r\n Glide.with(this).load(bingPic).into(bingPicImg);\r\n } else {\r\n loadBingPic();\r\n }\r\n }\r\n\r\n /**\r\n * 根据天气id请求城市天气信息。\r\n */\r\n public void requestWeather(final String weatherId) {\r\n String weatherUrl = \"https://api.heweather.com/x3/weather?cityid=\" + weatherId + \"&key=bc0418b57b2d4918819d3974ac1285d9\";\r\n HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {\r\n @Override\r\n public void onResponse(Call call, Response response) throws IOException {\r\n final String responseText = response.body().string();\r\n final Weather weather = Utility.handleWeatherResponse(responseText);\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n if (weather != null && \"ok\".equals(weather.status)) {\r\n SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();\r\n editor.putString(\"weather\", responseText);\r\n editor.apply();\r\n mWeatherId=weather.basic.weatherId;\r\n showWeatherInfo(weather);\r\n } else {\r\n Toast.makeText(WeatherActivity.this, \"获取天气信息失败\", Toast.LENGTH_SHORT).show();\r\n }\r\n swipeRefresh.setRefreshing(false);\r\n }\r\n });\r\n }\r\n\r\n @Override\r\n public void onFailure(Call call, IOException e) {\r\n e.printStackTrace();\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n Toast.makeText(WeatherActivity.this, \"获取天气信息失败\", Toast.LENGTH_SHORT).show();\r\n swipeRefresh.setRefreshing(false);\r\n }\r\n });\r\n }\r\n });\r\n loadBingPic();\r\n }\r\n\r\n /**\r\n * 加载必应每日一图\r\n */\r\n private void loadBingPic() {\r\n String requestBingPic = \"http://guolin.tech/api/bing_pic\";\r\n HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {\r\n @Override\r\n public void onResponse(Call call, Response response) throws IOException {\r\n final String bingPic = response.body().string();\r\n SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();\r\n editor.putString(\"bing_pic\", bingPic);\r\n editor.apply();\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);\r\n }\r\n });\r\n }\r\n\r\n @Override\r\n public void onFailure(Call call, IOException e) {\r\n e.printStackTrace();\r\n }\r\n });\r\n }\r\n\r\n /**\r\n * 处理并展示Weather实体类中的数据。\r\n */\r\n private void showWeatherInfo(Weather weather) {\r\n String cityName = weather.basic.cityName;\r\n String updateTime = weather.basic.update.updateTime.split(\" \")[1];\r\n String degree = weather.now.temperature + \"℃\";\r\n String weatherInfo = weather.now.more.info;\r\n titleCity.setText(cityName);\r\n titleUpdateTime.setText(updateTime);\r\n degreeText.setText(degree);\r\n weatherInfoText.setText(weatherInfo);\r\n forecastLayout.removeAllViews();\r\n for (Forecast forecast : weather.forecastList) {\r\n View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);\r\n TextView dateText = (TextView) view.findViewById(R.id.date_text);\r\n TextView infoText = (TextView) view.findViewById(R.id.info_text);\r\n TextView maxText = (TextView) view.findViewById(R.id.max_text);\r\n TextView minText = (TextView) view.findViewById(R.id.min_text);\r\n dateText.setText(forecast.date);\r\n infoText.setText(forecast.more.info);\r\n maxText.setText(forecast.temperature.max);\r\n minText.setText(forecast.temperature.min);\r\n forecastLayout.addView(view);\r\n }\r\n if (weather.aqi != null) {\r\n aqiText.setText(weather.aqi.city.aqi);\r\n pm25Text.setText(weather.aqi.city.pm25);\r\n }\r\n String comfort = \"舒适度:\" + weather.suggestion.comfort.info;\r\n String carWash = \"洗车指数:\" + weather.suggestion.carWash.info;\r\n String sport = \"运行建议:\" + weather.suggestion.sport.info;\r\n comfortText.setText(comfort);\r\n carWashText.setText(carWash);\r\n sportText.setText(sport);\r\n weatherLayout.setVisibility(View.VISIBLE);\r\n // Intent intent = new Intent(this, AutoUpdateService.class);\r\n //startService(intent);\r\n }\r\n\r\n}\r\n"},"new_file":{"kind":"string","value":"app/src/main/java/com/coolweather/android/WeatherActivity.java"},"old_contents":{"kind":"string","value":"package com.coolweather.android;\r\n\r\nimport android.content.Intent;\r\nimport android.content.SharedPreferences;\r\nimport android.graphics.Color;\r\nimport android.os.Build;\r\nimport android.os.Bundle;\r\nimport android.preference.PreferenceManager;\r\nimport android.support.v4.view.GravityCompat;\r\nimport android.support.v4.widget.DrawerLayout;\r\nimport android.support.v4.widget.SwipeRefreshLayout;\r\nimport android.support.v7.app.AppCompatActivity;\r\nimport android.view.LayoutInflater;\r\nimport android.view.View;\r\nimport android.widget.Button;\r\nimport android.widget.ImageView;\r\nimport android.widget.LinearLayout;\r\nimport android.widget.ScrollView;\r\nimport android.widget.TextView;\r\nimport android.widget.Toast;\r\n\r\nimport com.bumptech.glide.Glide;\r\nimport com.coolweather.android.gson.Forecast;\r\nimport com.coolweather.android.gson.Weather;\r\n//import com.coolweather.android.service.AutoUpdateService;\r\nimport com.coolweather.android.util.HttpUtil;\r\nimport com.coolweather.android.util.Utility;\r\n\r\nimport java.io.IOException;\r\n\r\nimport okhttp3.Call;\r\nimport okhttp3.Callback;\r\nimport okhttp3.Response;\r\n\r\npublic class WeatherActivity extends AppCompatActivity {\r\n\r\n public DrawerLayout drawerLayout;\r\n\r\n public SwipeRefreshLayout swipeRefresh;\r\n\r\n private ScrollView weatherLayout;\r\n\r\n private Button navButton;\r\n\r\n private TextView titleCity;\r\n\r\n private TextView titleUpdateTime;\r\n\r\n private TextView degreeText;\r\n\r\n private TextView weatherInfoText;\r\n\r\n private LinearLayout forecastLayout;\r\n\r\n private TextView aqiText;\r\n\r\n private TextView pm25Text;\r\n\r\n private TextView comfortText;\r\n\r\n private TextView carWashText;\r\n\r\n private TextView sportText;\r\n\r\n private ImageView bingPicImg;\r\n\r\n @Override\r\n protected void onCreate(Bundle savedInstanceState) {\r\n super.onCreate(savedInstanceState);\r\n if (Build.VERSION.SDK_INT >= 21) {\r\n View decorView = getWindow().getDecorView();\r\n decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN\r\n | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);\r\n getWindow().setStatusBarColor(Color.TRANSPARENT);\r\n }\r\n setContentView(R.layout.activity_weather);\r\n // 初始化各控件\r\n bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);\r\n weatherLayout = (ScrollView) findViewById(R.id.weather_layout);\r\n titleCity = (TextView) findViewById(R.id.title_city);\r\n titleUpdateTime = (TextView) findViewById(R.id.title_update_time);\r\n degreeText = (TextView) findViewById(R.id.degree_text);\r\n weatherInfoText = (TextView) findViewById(R.id.weather_info_text);\r\n forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);\r\n aqiText = (TextView) findViewById(R.id.aqi_text);\r\n pm25Text = (TextView) findViewById(R.id.pm25_text);\r\n comfortText = (TextView) findViewById(R.id.comfort_text);\r\n carWashText = (TextView) findViewById(R.id.car_wash_text);\r\n sportText = (TextView) findViewById(R.id.sport_text);\r\n swipeRefresh = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);\r\n swipeRefresh.setColorSchemeResources(R.color.colorPrimary);\r\n drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);\r\n navButton = (Button) findViewById(R.id.nav_button);\r\n SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);\r\n String weatherString = prefs.getString(\"weather\", null);\r\n final String weatherId;\r\n if (weatherString != null) {\r\n // 有缓存时直接解析天气数据\r\n Weather weather = Utility.handleWeatherResponse(weatherString);\r\n weatherId = weather.basic.weatherId;\r\n showWeatherInfo(weather);\r\n } else {\r\n // 无缓存时去服务器查询天气\r\n weatherId = getIntent().getStringExtra(\"weather_id\");\r\n weatherLayout.setVisibility(View.INVISIBLE);\r\n requestWeather(weatherId);\r\n }\r\n swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {\r\n @Override\r\n public void onRefresh() {\r\n requestWeather(weatherId);\r\n }\r\n });\r\n navButton.setOnClickListener(new View.OnClickListener() {\r\n @Override\r\n public void onClick(View v) {\r\n drawerLayout.openDrawer(GravityCompat.START);\r\n }\r\n });\r\n String bingPic = prefs.getString(\"bing_pic\", null);\r\n if (bingPic != null) {\r\n Glide.with(this).load(bingPic).into(bingPicImg);\r\n } else {\r\n loadBingPic();\r\n }\r\n }\r\n\r\n /**\r\n * 根据天气id请求城市天气信息。\r\n */\r\n public void requestWeather(final String weatherId) {\r\n String weatherUrl = \"https://api.heweather.com/x3/weather?cityid=\" + weatherId + \"&key=bc0418b57b2d4918819d3974ac1285d9\";\r\n HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() {\r\n @Override\r\n public void onResponse(Call call, Response response) throws IOException {\r\n final String responseText = response.body().string();\r\n final Weather weather = Utility.handleWeatherResponse(responseText);\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n if (weather != null && \"ok\".equals(weather.status)) {\r\n SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();\r\n editor.putString(\"weather\", responseText);\r\n editor.apply();\r\n showWeatherInfo(weather);\r\n } else {\r\n Toast.makeText(WeatherActivity.this, \"获取天气信息失败\", Toast.LENGTH_SHORT).show();\r\n }\r\n swipeRefresh.setRefreshing(false);\r\n }\r\n });\r\n }\r\n\r\n @Override\r\n public void onFailure(Call call, IOException e) {\r\n e.printStackTrace();\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n Toast.makeText(WeatherActivity.this, \"获取天气信息失败\", Toast.LENGTH_SHORT).show();\r\n swipeRefresh.setRefreshing(false);\r\n }\r\n });\r\n }\r\n });\r\n loadBingPic();\r\n }\r\n\r\n /**\r\n * 加载必应每日一图\r\n */\r\n private void loadBingPic() {\r\n String requestBingPic = \"http://guolin.tech/api/bing_pic\";\r\n HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {\r\n @Override\r\n public void onResponse(Call call, Response response) throws IOException {\r\n final String bingPic = response.body().string();\r\n SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();\r\n editor.putString(\"bing_pic\", bingPic);\r\n editor.apply();\r\n runOnUiThread(new Runnable() {\r\n @Override\r\n public void run() {\r\n Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);\r\n }\r\n });\r\n }\r\n\r\n @Override\r\n public void onFailure(Call call, IOException e) {\r\n e.printStackTrace();\r\n }\r\n });\r\n }\r\n\r\n /**\r\n * 处理并展示Weather实体类中的数据。\r\n */\r\n private void showWeatherInfo(Weather weather) {\r\n String cityName = weather.basic.cityName;\r\n String updateTime = weather.basic.update.updateTime.split(\" \")[1];\r\n String degree = weather.now.temperature + \"℃\";\r\n String weatherInfo = weather.now.more.info;\r\n titleCity.setText(cityName);\r\n titleUpdateTime.setText(updateTime);\r\n degreeText.setText(degree);\r\n weatherInfoText.setText(weatherInfo);\r\n forecastLayout.removeAllViews();\r\n for (Forecast forecast : weather.forecastList) {\r\n View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);\r\n TextView dateText = (TextView) view.findViewById(R.id.date_text);\r\n TextView infoText = (TextView) view.findViewById(R.id.info_text);\r\n TextView maxText = (TextView) view.findViewById(R.id.max_text);\r\n TextView minText = (TextView) view.findViewById(R.id.min_text);\r\n dateText.setText(forecast.date);\r\n infoText.setText(forecast.more.info);\r\n maxText.setText(forecast.temperature.max);\r\n minText.setText(forecast.temperature.min);\r\n forecastLayout.addView(view);\r\n }\r\n if (weather.aqi != null) {\r\n aqiText.setText(weather.aqi.city.aqi);\r\n pm25Text.setText(weather.aqi.city.pm25);\r\n }\r\n String comfort = \"舒适度:\" + weather.suggestion.comfort.info;\r\n String carWash = \"洗车指数:\" + weather.suggestion.carWash.info;\r\n String sport = \"运行建议:\" + weather.suggestion.sport.info;\r\n comfortText.setText(comfort);\r\n carWashText.setText(carWash);\r\n sportText.setText(sport);\r\n weatherLayout.setVisibility(View.VISIBLE);\r\n// Intent intent = new Intent(this, AutoUpdateService.class);\r\n // startService(intent);\r\n }\r\n\r\n}\r\n"},"message":{"kind":"string","value":"新增切换城市和手动更新天气的功能。\n"},"old_file":{"kind":"string","value":"app/src/main/java/com/coolweather/android/WeatherActivity.java"},"subject":{"kind":"string","value":"新增切换城市和手动更新天气的功能。"},"git_diff":{"kind":"string","value":"pp/src/main/java/com/coolweather/android/WeatherActivity.java\n private TextView sportText;\n \n private ImageView bingPicImg;\n\n private String mWeatherId;\n \n @Override\n protected void onCreate(Bundle savedInstanceState) {\n navButton = (Button) findViewById(R.id.nav_button);\n SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);\n String weatherString = prefs.getString(\"weather\", null);\n final String weatherId;\n //final String weatherId;\n if (weatherString != null) {\n // 有缓存时直接解析天气数据\n Weather weather = Utility.handleWeatherResponse(weatherString);\n weatherId = weather.basic.weatherId;\n mWeatherId = weather.basic.weatherId;\n showWeatherInfo(weather);\n } else {\n // 无缓存时去服务器查询天气\n weatherId = getIntent().getStringExtra(\"weather_id\");\n mWeatherId = getIntent().getStringExtra(\"weather_id\");\n weatherLayout.setVisibility(View.INVISIBLE);\n requestWeather(weatherId);\n requestWeather(mWeatherId);\n }\n swipeRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {\n @Override\n public void onRefresh() {\n requestWeather(weatherId);\n requestWeather(mWeatherId);\n }\n });\n navButton.setOnClickListener(new View.OnClickListener() {\n SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();\n editor.putString(\"weather\", responseText);\n editor.apply();\n mWeatherId=weather.basic.weatherId;\n showWeatherInfo(weather);\n } else {\n Toast.makeText(WeatherActivity.this, \"获取天气信息失败\", Toast.LENGTH_SHORT).show();\n carWashText.setText(carWash);\n sportText.setText(sport);\n weatherLayout.setVisibility(View.VISIBLE);\n// Intent intent = new Intent(this, AutoUpdateService.class);\n // startService(intent);\n // Intent intent = new Intent(this, AutoUpdateService.class);\n //startService(intent);\n }\n \n }"}}},{"rowIdx":2054,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"6a948a1a2021dca92b7097b3e5f76bbe866da770"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree"},"new_contents":{"kind":"string","value":"{% load i18n %}\n{% load inventree_extras %}\n\n/* globals\n Chart,\n constructForm,\n global_settings,\n imageHoverIcon,\n inventreeGet,\n inventreePut,\n launchModalForm,\n linkButtonsToSelection,\n loadTableFilters,\n makeIconBadge,\n makeIconButton,\n printPartLabels,\n renderLink,\n setFormGroupVisibility,\n setupFilterList,\n yesNoLabel,\n*/\n\n/* exported\n duplicatePart,\n editCategory,\n editPart,\n initPriceBreakSet,\n loadBomChart,\n loadParametricPartTable,\n loadPartCategoryTable,\n loadPartParameterTable,\n loadPartTable,\n loadPartTestTemplateTable,\n loadPartVariantTable,\n loadRelatedPartsTable,\n loadSellPricingChart,\n loadSimplePartTable,\n loadStockPricingChart,\n partStockLabel,\n toggleStar,\n*/\n\n/* Part API functions\n * Requires api.js to be loaded first\n */\n\nfunction partGroups() {\n\n return {\n attributes: {\n title: '{% trans \"Part Attributes\" %}',\n collapsible: true,\n },\n create: {\n title: '{% trans \"Part Creation Options\" %}',\n collapsible: true,\n },\n duplicate: {\n title: '{% trans \"Part Duplication Options\" %}',\n collapsible: true,\n },\n supplier: {\n title: '{% trans \"Supplier Options\" %}',\n collapsible: true,\n hidden: !global_settings.PART_PURCHASEABLE,\n }\n };\n}\n\n\n// Construct fieldset for part forms\nfunction partFields(options={}) {\n\n var fields = {\n category: {\n secondary: {\n title: '{% trans \"Add Part Category\" %}',\n fields: function() {\n var fields = categoryFields();\n\n return fields;\n }\n }\n },\n name: {},\n IPN: {},\n revision: {},\n description: {},\n variant_of: {},\n keywords: {\n icon: 'fa-key',\n },\n units: {},\n link: {\n icon: 'fa-link',\n },\n default_location: {\n },\n default_supplier: {\n filters: {\n part_detail: true,\n supplier_detail: true,\n }\n },\n default_expiry: {\n icon: 'fa-calendar-alt',\n },\n minimum_stock: {\n icon: 'fa-boxes',\n },\n component: {\n default: global_settings.PART_COMPONENT,\n group: 'attributes',\n },\n assembly: {\n default: global_settings.PART_ASSEMBLY,\n group: 'attributes',\n },\n is_template: {\n default: global_settings.PART_TEMPLATE,\n group: 'attributes',\n },\n trackable: {\n default: global_settings.PART_TRACKABLE,\n group: 'attributes',\n },\n purchaseable: {\n default: global_settings.PART_PURCHASEABLE,\n group: 'attributes',\n onEdit: function(value, name, field, options) {\n setFormGroupVisibility('supplier', value, options);\n }\n },\n salable: {\n default: global_settings.PART_SALABLE,\n group: 'attributes',\n },\n virtual: {\n default: global_settings.PART_VIRTUAL,\n group: 'attributes',\n },\n };\n\n // If editing a part, we can set the \"active\" status\n if (options.edit) {\n fields.active = {\n group: 'attributes'\n };\n }\n\n // Pop expiry field\n if (!global_settings.STOCK_ENABLE_EXPIRY) {\n delete fields['default_expiry'];\n }\n\n // Additional fields when \"creating\" a new part\n if (options.create) {\n\n // No supplier parts available yet\n delete fields['default_supplier'];\n\n if (global_settings.PART_CREATE_INITIAL) {\n\n fields.initial_stock = {\n type: 'boolean',\n label: '{% trans \"Create Initial Stock\" %}',\n help_text: '{% trans \"Create an initial stock item for this part\" %}',\n group: 'create',\n };\n\n fields.initial_stock_quantity = {\n type: 'decimal',\n value: 1,\n label: '{% trans \"Initial Stock Quantity\" %}',\n help_text: '{% trans \"Specify initial stock quantity for this part\" %}',\n group: 'create',\n };\n\n // TODO - Allow initial location of stock to be specified\n fields.initial_stock_location = {\n label: '{% trans \"Location\" %}',\n help_text: '{% trans \"Select destination stock location\" %}',\n type: 'related field',\n required: true,\n api_url: `/api/stock/location/`,\n model: 'stocklocation',\n group: 'create',\n };\n }\n\n fields.copy_category_parameters = {\n type: 'boolean',\n label: '{% trans \"Copy Category Parameters\" %}',\n help_text: '{% trans \"Copy parameter templates from selected part category\" %}',\n value: global_settings.PART_CATEGORY_PARAMETERS,\n group: 'create',\n };\n\n // Supplier options\n fields.add_supplier_info = {\n type: 'boolean',\n label: '{% trans \"Add Supplier Data\" %}',\n help_text: '{% trans \"Create initial supplier data for this part\" %}',\n group: 'supplier',\n };\n \n fields.supplier = {\n type: 'related field',\n model: 'company',\n label: '{% trans \"Supplier\" %}',\n help_text: '{% trans \"Select supplier\" %}',\n filters: {\n 'is_supplier': true,\n },\n api_url: '{% url \"api-company-list\" %}',\n group: 'supplier',\n };\n \n fields.SKU = {\n type: 'string',\n label: '{% trans \"SKU\" %}', \n help_text: '{% trans \"Supplier stock keeping unit\" %}',\n group: 'supplier',\n };\n \n fields.manufacturer = {\n type: 'related field',\n model: 'company',\n label: '{% trans \"Manufacturer\" %}',\n help_text: '{% trans \"Select manufacturer\" %}',\n filters: {\n 'is_manufacturer': true,\n },\n api_url: '{% url \"api-company-list\" %}',\n group: 'supplier',\n };\n \n fields.MPN = {\n type: 'string',\n label: '{% trans \"MPN\" %}',\n help_text: '{% trans \"Manufacturer Part Number\" %}',\n group: 'supplier',\n };\n\n }\n\n // Additional fields when \"duplicating\" a part\n if (options.duplicate) {\n\n fields.copy_from = {\n type: 'integer',\n hidden: true,\n value: options.duplicate,\n group: 'duplicate',\n },\n\n fields.copy_image = {\n type: 'boolean',\n label: '{% trans \"Copy Image\" %}',\n help_text: '{% trans \"Copy image from original part\" %}',\n value: true,\n group: 'duplicate',\n },\n\n fields.copy_bom = {\n type: 'boolean',\n label: '{% trans \"Copy BOM\" %}',\n help_text: '{% trans \"Copy bill of materials from original part\" %}',\n value: global_settings.PART_COPY_BOM,\n group: 'duplicate',\n };\n\n fields.copy_parameters = {\n type: 'boolean',\n label: '{% trans \"Copy Parameters\" %}',\n help_text: '{% trans \"Copy parameter data from original part\" %}',\n value: global_settings.PART_COPY_PARAMETERS,\n group: 'duplicate',\n };\n }\n\n return fields;\n}\n\n\nfunction categoryFields() {\n return {\n parent: {\n help_text: '{% trans \"Parent part category\" %}',\n },\n name: {},\n description: {},\n default_location: {},\n default_keywords: {\n icon: 'fa-key',\n }\n };\n}\n\n\n// Edit a PartCategory via the API\nfunction editCategory(pk) {\n\n var url = `/api/part/category/${pk}/`;\n\n var fields = categoryFields();\n\n constructForm(url, {\n fields: fields,\n title: '{% trans \"Edit Part Category\" %}',\n reload: true,\n });\n\n}\n\n\nfunction editPart(pk) {\n\n var url = `/api/part/${pk}/`;\n\n var fields = partFields({\n edit: true\n });\n\n // Filter supplied parts by the Part ID\n fields.default_supplier.filters.part = pk;\n\n var groups = partGroups({});\n\n constructForm(url, {\n fields: fields,\n groups: groups,\n title: '{% trans \"Edit Part\" %}',\n reload: true,\n successMessage: '{% trans \"Part edited\" %}',\n });\n}\n\n\n// Launch form to duplicate a part\nfunction duplicatePart(pk, options={}) {\n\n // First we need all the part information\n inventreeGet(`/api/part/${pk}/`, {}, {\n\n success: function(data) {\n \n var fields = partFields({\n duplicate: pk,\n });\n\n // Remove \"default_supplier\" field\n delete fields['default_supplier'];\n\n // If we are making a \"variant\" part\n if (options.variant) {\n\n // Override the \"variant_of\" field\n data.variant_of = pk;\n }\n \n constructForm('{% url \"api-part-list\" %}', {\n method: 'POST',\n fields: fields,\n groups: partGroups(),\n title: '{% trans \"Duplicate Part\" %}',\n data: data,\n onSuccess: function(data) {\n // Follow the new part\n location.href = `/part/${data.pk}/`;\n }\n });\n }\n });\n}\n\n\n/* Toggle the 'starred' status of a part.\n * Performs AJAX queries and updates the display on the button.\n * \n * options:\n * - button: ID of the button (default = '#part-star-icon')\n * - URL: API url of the object\n * - user: pk of the user\n */\nfunction toggleStar(options) {\n\n inventreeGet(options.url, {}, {\n success: function(response) {\n\n var starred = response.starred;\n\n inventreePut(\n options.url,\n {\n starred: !starred,\n },\n {\n method: 'PATCH',\n success: function(response) {\n if (response.starred) {\n $(options.button).removeClass('fa fa-bell-slash').addClass('fas fa-bell icon-green');\n $(options.button).attr('title', '{% trans \"You are subscribed to notifications for this item\" %}');\n\n showMessage('{% trans \"You have subscribed to notifications for this item\" %}', {\n style: 'success',\n });\n } else {\n $(options.button).removeClass('fas fa-bell icon-green').addClass('fa fa-bell-slash');\n $(options.button).attr('title', '{% trans \"Subscribe to notifications for this item\" %}');\n\n showMessage('{% trans \"You have unsubscribed to notifications for this item\" %}', {\n style: 'warning',\n });\n }\n }\n }\n );\n }\n });\n}\n\n\nfunction partStockLabel(part, options={}) {\n\n if (part.in_stock) {\n return `{% trans \"Stock\" %}: ${part.in_stock}`;\n } else {\n return `{% trans \"No Stock\" %}`;\n }\n}\n\n\nfunction makePartIcons(part) {\n /* Render a set of icons for the given part.\n */\n\n var html = '';\n\n if (part.trackable) {\n html += makeIconBadge('fa-directions', '{% trans \"Trackable part\" %}');\n }\n\n if (part.virtual) {\n html += makeIconBadge('fa-ghost', '{% trans \"Virtual part\" %}');\n }\n\n if (part.is_template) {\n html += makeIconBadge('fa-clone', '{% trans \"Template part\" %}');\n }\n\n if (part.assembly) {\n html += makeIconBadge('fa-tools', '{% trans \"Assembled part\" %}');\n }\n\n if (part.starred) {\n html += makeIconBadge('fa-bell icon-green', '{% trans \"Subscribed part\" %}');\n }\n\n if (part.salable) {\n html += makeIconBadge('fa-dollar-sign', '{% trans \"Salable part\" %}');\n }\n\n if (!part.active) {\n html += `{% trans \"Inactive\" %} `; \n }\n\n return html;\n\n}\n\n\nfunction loadPartVariantTable(table, partId, options={}) {\n /* Load part variant table\n */\n\n var params = options.params || {};\n\n params.ancestor = partId;\n\n // Load filters\n var filters = loadTableFilters('variants');\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n setupFilterList('variants', $(table));\n\n var cols = [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: false,\n formatter: function(value, row) {\n var html = '';\n\n var name = '';\n\n if (row.IPN) {\n name += row.IPN;\n name += ' | ';\n }\n\n name += value;\n\n if (row.revision) {\n name += ' | ';\n name += row.revision;\n }\n\n if (row.is_template) {\n name = '' + name + '';\n }\n\n html += imageHoverIcon(row.thumbnail);\n html += renderLink(name, `/part/${row.pk}/`);\n\n if (row.trackable) {\n html += makeIconBadge('fa-directions', '{% trans \"Trackable part\" %}');\n }\n\n if (row.virtual) {\n html += makeIconBadge('fa-ghost', '{% trans \"Virtual part\" %}');\n }\n\n if (row.is_template) {\n html += makeIconBadge('fa-clone', '{% trans \"Template part\" %}');\n }\n\n if (row.assembly) {\n html += makeIconBadge('fa-tools', '{% trans \"Assembled part\" %}');\n }\n\n if (!row.active) {\n html += `{% trans \"Inactive\" %}`; \n }\n\n return html;\n },\n },\n {\n field: 'IPN',\n title: '{% trans \"IPN\" %}',\n },\n {\n field: 'revision',\n title: '{% trans \"Revision\" %}',\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n },\n {\n field: 'in_stock',\n title: '{% trans \"Stock\" %}',\n formatter: function(value, row) {\n return renderLink(value, `/part/${row.pk}/?display=part-stock`);\n }\n }\n ];\n\n table.inventreeTable({\n url: '{% url \"api-part-list\" %}',\n name: 'partvariants',\n showColumns: true,\n original: params,\n queryParams: filters,\n formatNoMatches: function() {\n return '{% trans \"No variants found\" %}';\n },\n columns: cols,\n treeEnable: true,\n rootParentId: partId,\n parentIdField: 'variant_of',\n idField: 'pk',\n uniqueId: 'pk',\n treeShowField: 'name',\n sortable: true,\n search: true,\n onPostBody: function() {\n table.treegrid({\n treeColumn: 0,\n });\n\n table.treegrid('collapseAll');\n }\n });\n}\n\n\nfunction loadSimplePartTable(table, url, options={}) {\n\n options.disableFilters = true;\n\n loadPartTable(table, url, options);\n}\n\n\nfunction loadPartParameterTable(table, url, options) {\n\n var params = options.params || {};\n\n // Load filters\n var filters = loadTableFilters('part-parameters');\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n // setupFilterList(\"#part-parameters\", $(table));\n\n $(table).inventreeTable({\n url: url,\n original: params,\n queryParams: filters,\n name: 'partparameters',\n groupBy: false,\n formatNoMatches: function() {\n return '{% trans \"No parameters found\" %}';\n },\n columns: [\n {\n checkbox: true,\n switchable: false,\n visible: true,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: false,\n sortable: true,\n formatter: function(value, row) {\n return row.template_detail.name;\n }\n },\n {\n field: 'data',\n title: '{% trans \"Value\" %}',\n switchable: false,\n sortable: true,\n },\n {\n field: 'units',\n title: '{% trans \"Units\" %}',\n switchable: true,\n sortable: true,\n formatter: function(value, row) {\n return row.template_detail.units;\n }\n },\n {\n field: 'actions',\n title: '',\n switchable: false,\n sortable: false,\n formatter: function(value, row) {\n var pk = row.pk;\n\n var html = `
`;\n\n html += makeIconButton('fa-edit icon-blue', 'button-parameter-edit', pk, '{% trans \"Edit parameter\" %}');\n html += makeIconButton('fa-trash-alt icon-red', 'button-parameter-delete', pk, '{% trans \"Delete parameter\" %}');\n\n html += `
`;\n\n return html;\n }\n }\n ],\n onPostBody: function() {\n // Setup button callbacks\n $(table).find('.button-parameter-edit').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/parameter/${pk}/`, {\n fields: {\n data: {},\n },\n title: '{% trans \"Edit Parameter\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n\n $(table).find('.button-parameter-delete').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/parameter/${pk}/`, {\n method: 'DELETE',\n title: '{% trans \"Delete Parameter\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n }\n });\n}\n\n\nfunction loadRelatedPartsTable(table, part_id, options={}) {\n /*\n * Load table of \"related\" parts\n */\n\n options.params = options.params || {};\n\n options.params.part = part_id;\n\n var filters = {};\n\n for (var key in options.params) {\n filters[key] = options.params[key];\n }\n\n setupFilterList('related', $(table), options.filterTarget);\n\n function getPart(row) {\n if (row.part_1 == part_id) {\n return row.part_2_detail;\n } else {\n return row.part_1_detail;\n }\n }\n\n var columns = [\n {\n field: 'name',\n title: '{% trans \"Part\" %}',\n switchable: false,\n formatter: function(value, row) {\n\n var part = getPart(row);\n\n var html = imageHoverIcon(part.thumbnail) + renderLink(part.full_name, `/part/${part.pk}/`);\n\n html += makePartIcons(part);\n\n return html;\n }\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n formatter: function(value, row) {\n return getPart(row).description;\n }\n },\n {\n field: 'actions',\n title: '',\n switchable: false,\n formatter: function(value, row) {\n \n var html = `
`;\n\n html += makeIconButton('fa-trash-alt icon-red', 'button-related-delete', row.pk, '{% trans \"Delete part relationship\" %}');\n\n html += '
';\n\n return html;\n }\n }\n ];\n\n $(table).inventreeTable({\n url: '{% url \"api-part-related-list\" %}',\n groupBy: false,\n name: 'related',\n original: options.params,\n queryParams: filters,\n columns: columns,\n showColumns: false,\n search: true,\n onPostBody: function() {\n $(table).find('.button-related-delete').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/related/${pk}/`, {\n method: 'DELETE',\n title: '{% trans \"Delete Part Relationship\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n },\n });\n}\n\n\nfunction loadParametricPartTable(table, options={}) {\n /* Load parametric table for part parameters\n * \n * Args:\n * - table: HTML reference to the table\n * - table_headers: Unique parameters found in category\n * - table_data: Parameters data\n */\n\n var table_headers = options.headers;\n var table_data = options.data;\n\n var columns = [];\n\n for (var header of table_headers) {\n if (header === 'part') {\n columns.push({\n field: header,\n title: '{% trans \"Part\" %}',\n sortable: true,\n sortName: 'name',\n formatter: function(value, row) {\n\n var name = '';\n\n if (row.IPN) {\n name += row.IPN + ' | ' + row.name;\n } else {\n name += row.name;\n }\n\n return renderLink(name, '/part/' + row.pk + '/'); \n }\n });\n } else if (header === 'description') {\n columns.push({\n field: header,\n title: '{% trans \"Description\" %}',\n sortable: true,\n });\n } else {\n columns.push({\n field: header,\n title: header,\n sortable: true,\n filterControl: 'input',\n });\n }\n }\n\n $(table).inventreeTable({\n sortName: 'part',\n queryParams: table_headers,\n groupBy: false,\n name: options.name || 'parametric',\n formatNoMatches: function() {\n return '{% trans \"No parts found\" %}';\n },\n columns: columns,\n showColumns: true,\n data: table_data,\n filterControl: true,\n });\n}\n\n\nfunction partGridTile(part) {\n // Generate a \"grid tile\" view for a particular part\n\n // Rows for table view\n var rows = '';\n\n var stock = `${part.in_stock}`;\n\n if (!part.in_stock) {\n stock = `{% trans \"No Stock\" %}`;\n }\n\n rows += `{% trans \"Stock\" %}${stock}`;\n\n if (part.on_order) {\n rows += `{$ trans \"On Order\" %}${part.on_order}`;\n }\n\n if (part.building) {\n rows += `{% trans \"Building\" %}${part.building}`;\n }\n\n var html = `\n \n
\n
\n
\n \n ${part.full_name}\n \n ${makePartIcons(part)}\n
\n ${part.description}\n
\n
\n
\n
\n \n
\n
\n \n ${rows}\n
\n
\n
\n
\n
\n
\n `;\n\n return html;\n}\n\n\nfunction loadPartTable(table, url, options={}) {\n /* Load part listing data into specified table.\n * \n * Args:\n * - table: HTML reference to the table\n * - url: Base URL for API query\n * - options: object containing following (optional) fields\n * checkbox: Show the checkbox column\n * query: extra query params for API request\n * buttons: If provided, link buttons to selection status of this table\n * disableFilters: If true, disable custom filters\n * actions: Provide a callback function to construct an \"actions\" column\n */\n\n // Ensure category detail is included\n options.params['category_detail'] = true;\n\n var params = options.params || {};\n\n var filters = {};\n\n var col = null;\n\n if (!options.disableFilters) {\n filters = loadTableFilters('parts');\n }\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n setupFilterList('parts', $(table), options.filterTarget || null);\n\n var columns = [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n searchable: false,\n }\n ];\n\n if (options.checkbox) {\n columns.push({\n checkbox: true,\n title: '{% trans \"Select\" %}',\n searchable: false,\n switchable: false,\n });\n }\n\n col = {\n field: 'IPN',\n title: '{% trans \"IPN\" %}',\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n col = {\n field: 'name',\n title: '{% trans \"Part\" %}',\n switchable: false,\n formatter: function(value, row) {\n\n var name = row.full_name;\n\n var display = imageHoverIcon(row.thumbnail) + renderLink(name, `/part/${row.pk}/`);\n\n display += makePartIcons(row);\n\n return display; \n }\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n columns.push({\n field: 'description',\n title: '{% trans \"Description\" %}',\n formatter: function(value, row) {\n\n if (row.is_template) {\n value = `${value}`;\n }\n\n return value;\n }\n });\n\n col = {\n sortName: 'category',\n field: 'category_detail',\n title: '{% trans \"Category\" %}',\n formatter: function(value, row) {\n if (row.category) {\n return renderLink(value.pathstring, `/part/category/${row.category}/`);\n } else {\n return '{% trans \"No category\" %}';\n }\n } \n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n col = {\n field: 'in_stock',\n title: '{% trans \"Stock\" %}',\n searchable: false,\n formatter: function(value, row) { \n var link = '?display=part-stock';\n\n if (value) {\n // There IS stock available for this part\n\n // Is stock \"low\" (below the 'minimum_stock' quantity)?\n if (row.minimum_stock && row.minimum_stock > value) {\n value += `{% trans \"Low stock\" %}`;\n }\n\n } else if (row.on_order) {\n // There is no stock available, but stock is on order\n value = `0{% trans \"On Order\" %}: ${row.on_order}`;\n link = '?display=purchase-orders';\n } else if (row.building) {\n // There is no stock available, but stock is being built\n value = `0{% trans \"Building\" %}: ${row.building}`;\n link = '?display=build-orders';\n } else {\n // There is no stock available\n value = `0{% trans \"No Stock\" %}`;\n }\n\n return renderLink(value, `/part/${row.pk}/${link}`);\n }\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n columns.push({\n field: 'link',\n title: '{% trans \"Link\" %}',\n formatter: function(value) {\n return renderLink(\n value, value,\n {\n max_length: 32,\n remove_http: true,\n }\n );\n }\n });\n\n // Push an \"actions\" column\n if (options.actions) {\n columns.push({\n field: 'actions',\n title: '',\n switchable: false,\n visible: true,\n searchable: false,\n sortable: false,\n formatter: function(value, row) {\n return options.actions(value, row);\n }\n });\n }\n\n var grid_view = options.gridView && inventreeLoad('part-grid-view') == 1;\n\n $(table).inventreeTable({\n url: url,\n method: 'get',\n queryParams: filters,\n groupBy: false,\n name: options.name || 'part',\n original: params,\n sidePagination: 'server',\n pagination: 'true',\n formatNoMatches: function() {\n return '{% trans \"No parts found\" %}';\n },\n columns: columns,\n showColumns: true,\n showCustomView: grid_view,\n showCustomViewButton: false,\n onPostBody: function() {\n grid_view = inventreeLoad('part-grid-view') == 1;\n if (grid_view) {\n $('#view-part-list').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-part-grid').removeClass('btn-outline-secondary').addClass('btn-secondary');\n } else {\n $('#view-part-grid').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-part-list').removeClass('btn-outline-secondary').addClass('btn-secondary');\n }\n\n if (options.onPostBody) {\n options.onPostBody();\n }\n },\n buttons: options.gridView ? [\n {\n icon: 'fas fa-bars',\n attributes: {\n title: '{% trans \"Display as list\" %}',\n id: 'view-part-list',\n },\n event: () => {\n inventreeSave('part-grid-view', 0);\n $(table).bootstrapTable(\n 'refreshOptions',\n {\n showCustomView: false,\n }\n );\n }\n },\n {\n icon: 'fas fa-th',\n attributes: {\n title: '{% trans \"Display as grid\" %}',\n id: 'view-part-grid',\n },\n event: () => {\n inventreeSave('part-grid-view', 1);\n $(table).bootstrapTable(\n 'refreshOptions',\n {\n showCustomView: true,\n }\n );\n }\n }\n ] : [],\n customView: function(data) {\n\n var html = '';\n\n html = `
`;\n\n data.forEach(function(row, index) {\n \n // Force a new row every 5 columns\n if ((index > 0) && (index % 5 == 0) && (index < data.length)) {\n html += `
`;\n }\n\n html += partGridTile(row);\n });\n\n html += `
`;\n\n return html;\n }\n });\n \n if (options.buttons) {\n linkButtonsToSelection($(table), options.buttons);\n }\n\n /* Button callbacks for part table buttons */\n\n $('#multi-part-order').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = [];\n\n selections.forEach(function(item) {\n parts.push(item.pk);\n });\n\n launchModalForm('/order/purchase-order/order-parts/', {\n data: {\n parts: parts,\n },\n });\n });\n\n $('#multi-part-category').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = [];\n\n selections.forEach(function(item) {\n parts.push(item.pk);\n });\n\n launchModalForm('/part/set-category/', {\n data: {\n parts: parts,\n },\n reload: true,\n });\n });\n\n $('#multi-part-print-label').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var items = [];\n\n selections.forEach(function(item) {\n items.push(item.pk);\n });\n\n printPartLabels(items);\n });\n\n $('#multi-part-export').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = '';\n\n selections.forEach(function(item) {\n parts += item.pk;\n parts += ',';\n });\n\n location.href = '/part/export/?parts=' + parts;\n });\n}\n\n\n/*\n * Display a table of part categories\n */\nfunction loadPartCategoryTable(table, options) {\n\n var params = options.params || {};\n\n var filterListElement = options.filterList || '#filter-list-category';\n\n var filters = {};\n\n var filterKey = options.filterKey || options.name || 'category';\n\n if (!options.disableFilters) {\n filters = loadTableFilters(filterKey);\n }\n\n \n var tree_view = options.allowTreeView && inventreeLoad('category-tree-view') == 1;\n\n if (tree_view) {\n params.cascade = true; \n }\n\n var original = {};\n\n for (var key in params) {\n original[key] = params[key];\n filters[key] = params[key];\n }\n\n setupFilterList(filterKey, table, filterListElement);\n\n table.inventreeTable({\n treeEnable: tree_view,\n rootParentId: tree_view ? options.params.parent : null,\n uniqueId: 'pk',\n idField: 'pk',\n treeShowField: 'name',\n parentIdField: tree_view ? 'parent' : null,\n method: 'get',\n url: options.url || '{% url \"api-part-category-list\" %}',\n queryParams: filters,\n disablePagination: tree_view,\n sidePagination: tree_view ? 'client' : 'server',\n serverSort: !tree_view, \n search: !tree_view,\n name: 'category',\n original: original,\n showColumns: true,\n buttons: options.allowTreeView ? [\n {\n icon: 'fas fa-bars',\n attributes: {\n title: '{% trans \"Display as list\" %}',\n id: 'view-category-list',\n },\n event: () => {\n inventreeSave('category-tree-view', 0);\n table.bootstrapTable(\n 'refreshOptions',\n {\n treeEnable: false,\n serverSort: true,\n search: true,\n pagination: true,\n }\n );\n }\n },\n {\n icon: 'fas fa-sitemap',\n attributes: {\n title: '{% trans \"Display as tree\" %}',\n id: 'view-category-tree',\n },\n event: () => {\n inventreeSave('category-tree-view', 1);\n table.bootstrapTable(\n 'refreshOptions',\n {\n treeEnable: true,\n serverSort: false,\n search: false,\n pagination: false,\n }\n );\n }\n }\n ] : [],\n onPostBody: function() {\n\n if (options.allowTreeView) {\n\n tree_view = inventreeLoad('category-tree-view') == 1;\n\n if (tree_view) {\n\n $('#view-category-list').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-category-tree').removeClass('btn-outline-secondary').addClass('btn-secondary');\n \n table.treegrid({\n treeColumn: 0,\n onChange: function() {\n table.bootstrapTable('resetView');\n },\n onExpand: function() {\n \n }\n });\n } else {\n $('#view-category-tree').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-category-list').removeClass('btn-outline-secondary').addClass('btn-secondary');\n }\n }\n },\n columns: [\n {\n checkbox: true,\n title: '{% trans \"Select\" %}',\n searchable: false,\n switchable: false,\n visible: false,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: true,\n sortable: true,\n formatter: function(value, row) {\n\n var html = renderLink(\n value,\n `/part/category/${row.pk}/`\n );\n\n if (row.starred) {\n html += makeIconBadge('fa-bell icon-green', '{% trans \"Subscribed category\" %}');\n }\n\n return html;\n }\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n switchable: true,\n sortable: false,\n },\n {\n field: 'pathstring',\n title: '{% trans \"Path\" %}',\n switchable: !tree_view,\n visible: !tree_view,\n sortable: false,\n },\n {\n field: 'parts',\n title: '{% trans \"Parts\" %}',\n switchable: true,\n sortable: false,\n }\n ]\n });\n}\n\nfunction loadPartTestTemplateTable(table, options) {\n /*\n * Load PartTestTemplate table.\n */\n\n var params = options.params || {};\n\n var part = options.part || null;\n\n var filterListElement = options.filterList || '#filter-list-parttests';\n\n var filters = loadTableFilters('parttests');\n\n var original = {};\n\n for (var k in params) {\n original[k] = params[k];\n }\n\n setupFilterList('parttests', table, filterListElement);\n\n // Override the default values, or add new ones\n for (var key in params) {\n filters[key] = params[key];\n }\n\n table.inventreeTable({\n method: 'get',\n formatNoMatches: function() {\n return '{% trans \"No test templates matching query\" %}';\n },\n url: '{% url \"api-part-test-template-list\" %}',\n queryParams: filters,\n name: 'testtemplate',\n original: original,\n columns: [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n },\n {\n field: 'test_name',\n title: '{% trans \"Test Name\" %}',\n sortable: true,\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n },\n {\n field: 'required',\n title: '{% trans \"Required\" %}',\n sortable: true,\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'requires_value',\n title: '{% trans \"Requires Value\" %}',\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'requires_attachment',\n title: '{% trans \"Requires Attachment\" %}',\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'buttons',\n formatter: function(value, row) {\n var pk = row.pk;\n\n if (row.part == part) {\n var html = `
`;\n\n html += makeIconButton('fa-edit icon-blue', 'button-test-edit', pk, '{% trans \"Edit test result\" %}');\n html += makeIconButton('fa-trash-alt icon-red', 'button-test-delete', pk, '{% trans \"Delete test result\" %}');\n\n html += `
`;\n\n return html;\n } else {\n var text = '{% trans \"This test is defined for a parent part\" %}';\n\n return renderLink(text, `/part/${row.part}/tests/`); \n }\n }\n }\n ],\n onPostBody: function() {\n\n table.find('.button-test-edit').click(function() {\n var pk = $(this).attr('pk');\n \n var url = `/api/part/test-template/${pk}/`;\n \n constructForm(url, {\n fields: {\n test_name: {},\n description: {},\n required: {},\n requires_value: {},\n requires_attachment: {},\n },\n title: '{% trans \"Edit Test Result Template\" %}',\n onSuccess: function() {\n table.bootstrapTable('refresh');\n },\n });\n });\n\n table.find('.button-test-delete').click(function() {\n var pk = $(this).attr('pk');\n \n var url = `/api/part/test-template/${pk}/`;\n \n constructForm(url, {\n method: 'DELETE',\n title: '{% trans \"Delete Test Result Template\" %}',\n onSuccess: function() {\n table.bootstrapTable('refresh');\n },\n });\n });\n }\n });\n}\n\n\nfunction loadPriceBreakTable(table, options) {\n /*\n * Load PriceBreak table.\n */\n\n var name = options.name || 'pricebreak';\n var human_name = options.human_name || 'price break';\n var linkedGraph = options.linkedGraph || null;\n var chart = null;\n\n table.inventreeTable({\n name: name,\n method: 'get',\n formatNoMatches: function() {\n return `{% trans \"No ${human_name} information found\" %}`;\n },\n queryParams: {part: options.part},\n url: options.url,\n onLoadSuccess: function(tableData) {\n if (linkedGraph) {\n // sort array\n tableData = tableData.sort((a, b) => (a.quantity - b.quantity));\n\n // split up for graph definition\n var graphLabels = Array.from(tableData, (x) => (x.quantity));\n var graphData = Array.from(tableData, (x) => (x.price));\n\n // destroy chart if exists\n if (chart) {\n chart.destroy();\n }\n chart = loadLineChart(linkedGraph,\n {\n labels: graphLabels,\n datasets: [\n {\n label: '{% trans \"Unit Price\" %}',\n data: graphData,\n backgroundColor: 'rgba(255, 206, 86, 0.2)',\n borderColor: 'rgb(255, 206, 86)',\n stepped: true,\n fill: true,\n },\n ],\n }\n );\n }\n },\n columns: [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n },\n {\n field: 'quantity',\n title: '{% trans \"Quantity\" %}',\n sortable: true,\n },\n {\n field: 'price',\n title: '{% trans \"Price\" %}',\n sortable: true,\n formatter: function(value, row) {\n var html = value;\n \n html += `
`;\n\n html += makeIconButton('fa-edit icon-blue', `button-${name}-edit`, row.pk, `{% trans \"Edit ${human_name}\" %}`);\n html += makeIconButton('fa-trash-alt icon-red', `button-${name}-delete`, row.pk, `{% trans \"Delete ${human_name}\" %}`);\n \n html += `
`;\n \n return html;\n }\n },\n ]\n });\n}\n\nfunction loadLineChart(context, data) {\n return new Chart(context, {\n type: 'line',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {position: 'bottom'},\n }\n }\n });\n}\n\nfunction initPriceBreakSet(table, options) {\n\n var part_id = options.part_id;\n var pb_human_name = options.pb_human_name;\n var pb_url_slug = options.pb_url_slug;\n var pb_url = options.pb_url;\n var pb_new_btn = options.pb_new_btn;\n var pb_new_url = options.pb_new_url;\n\n var linkedGraph = options.linkedGraph || null;\n\n loadPriceBreakTable(\n table,\n {\n name: pb_url_slug,\n human_name: pb_human_name,\n url: pb_url,\n linkedGraph: linkedGraph,\n part: part_id,\n }\n );\n\n function reloadPriceBreakTable() {\n table.bootstrapTable('refresh');\n }\n\n pb_new_btn.click(function() {\n launchModalForm(pb_new_url,\n {\n success: reloadPriceBreakTable,\n data: {\n part: part_id,\n }\n }\n );\n });\n\n table.on('click', `.button-${pb_url_slug}-delete`, function() {\n var pk = $(this).attr('pk');\n\n launchModalForm(\n `/part/${pb_url_slug}/${pk}/delete/`,\n {\n success: reloadPriceBreakTable\n }\n );\n });\n\n table.on('click', `.button-${pb_url_slug}-edit`, function() {\n var pk = $(this).attr('pk');\n\n launchModalForm(\n `/part/${pb_url_slug}/${pk}/edit/`,\n {\n success: reloadPriceBreakTable\n }\n );\n });\n}\n\n\nfunction loadStockPricingChart(context, data) {\n return new Chart(context, {\n type: 'bar',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {legend: {position: 'bottom'}},\n scales: {\n y: {\n type: 'linear',\n position: 'left',\n grid: {display: false},\n title: {\n display: true,\n text: '{% trans \"Single Price\" %}'\n }\n },\n y1: {\n type: 'linear',\n position: 'right',\n grid: {display: false},\n titel: {\n display: true,\n text: '{% trans \"Quantity\" %}',\n position: 'right'\n }\n },\n y2: {\n type: 'linear',\n position: 'left',\n grid: {display: false},\n title: {\n display: true,\n text: '{% trans \"Single Price Difference\" %}'\n }\n }\n },\n }\n });\n}\n\n\nfunction loadBomChart(context, data) {\n return new Chart(context, {\n type: 'doughnut',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {\n position: 'bottom',\n },\n scales: {\n xAxes: [\n {\n beginAtZero: true,\n ticks: {\n autoSkip: false,\n }\n }\n ]\n }\n }\n }\n });\n}\n\n\nfunction loadSellPricingChart(context, data) {\n return new Chart(context, {\n type: 'line',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {\n position: 'bottom'\n }\n },\n scales: {\n y: {\n type: 'linear',\n position: 'left',\n grid: {\n display: false\n },\n title: {\n display: true,\n text: '{% trans \"Unit Price\" %}',\n }\n },\n y1: {\n type: 'linear',\n position: 'right',\n grid: {\n display: false\n },\n titel: {\n display: true,\n text: '{% trans \"Quantity\" %}',\n position: 'right'\n }\n },\n },\n }\n });\n}\n"},"new_file":{"kind":"string","value":"InvenTree/templates/js/translated/part.js"},"old_contents":{"kind":"string","value":"{% load i18n %}\n{% load inventree_extras %}\n\n/* globals\n Chart,\n constructForm,\n global_settings,\n imageHoverIcon,\n inventreeGet,\n inventreePut,\n launchModalForm,\n linkButtonsToSelection,\n loadTableFilters,\n makeIconBadge,\n makeIconButton,\n printPartLabels,\n renderLink,\n setFormGroupVisibility,\n setupFilterList,\n yesNoLabel,\n*/\n\n/* exported\n duplicatePart,\n editCategory,\n editPart,\n initPriceBreakSet,\n loadBomChart,\n loadParametricPartTable,\n loadPartCategoryTable,\n loadPartParameterTable,\n loadPartTable,\n loadPartTestTemplateTable,\n loadPartVariantTable,\n loadSellPricingChart,\n loadSimplePartTable,\n loadStockPricingChart,\n partStockLabel,\n toggleStar,\n*/\n\n/* Part API functions\n * Requires api.js to be loaded first\n */\n\nfunction partGroups() {\n\n return {\n attributes: {\n title: '{% trans \"Part Attributes\" %}',\n collapsible: true,\n },\n create: {\n title: '{% trans \"Part Creation Options\" %}',\n collapsible: true,\n },\n duplicate: {\n title: '{% trans \"Part Duplication Options\" %}',\n collapsible: true,\n },\n supplier: {\n title: '{% trans \"Supplier Options\" %}',\n collapsible: true,\n hidden: !global_settings.PART_PURCHASEABLE,\n }\n };\n}\n\n\n// Construct fieldset for part forms\nfunction partFields(options={}) {\n\n var fields = {\n category: {\n secondary: {\n title: '{% trans \"Add Part Category\" %}',\n fields: function() {\n var fields = categoryFields();\n\n return fields;\n }\n }\n },\n name: {},\n IPN: {},\n revision: {},\n description: {},\n variant_of: {},\n keywords: {\n icon: 'fa-key',\n },\n units: {},\n link: {\n icon: 'fa-link',\n },\n default_location: {\n },\n default_supplier: {\n filters: {\n part_detail: true,\n supplier_detail: true,\n }\n },\n default_expiry: {\n icon: 'fa-calendar-alt',\n },\n minimum_stock: {\n icon: 'fa-boxes',\n },\n component: {\n default: global_settings.PART_COMPONENT,\n group: 'attributes',\n },\n assembly: {\n default: global_settings.PART_ASSEMBLY,\n group: 'attributes',\n },\n is_template: {\n default: global_settings.PART_TEMPLATE,\n group: 'attributes',\n },\n trackable: {\n default: global_settings.PART_TRACKABLE,\n group: 'attributes',\n },\n purchaseable: {\n default: global_settings.PART_PURCHASEABLE,\n group: 'attributes',\n onEdit: function(value, name, field, options) {\n setFormGroupVisibility('supplier', value, options);\n }\n },\n salable: {\n default: global_settings.PART_SALABLE,\n group: 'attributes',\n },\n virtual: {\n default: global_settings.PART_VIRTUAL,\n group: 'attributes',\n },\n };\n\n // If editing a part, we can set the \"active\" status\n if (options.edit) {\n fields.active = {\n group: 'attributes'\n };\n }\n\n // Pop expiry field\n if (!global_settings.STOCK_ENABLE_EXPIRY) {\n delete fields['default_expiry'];\n }\n\n // Additional fields when \"creating\" a new part\n if (options.create) {\n\n // No supplier parts available yet\n delete fields['default_supplier'];\n\n if (global_settings.PART_CREATE_INITIAL) {\n\n fields.initial_stock = {\n type: 'boolean',\n label: '{% trans \"Create Initial Stock\" %}',\n help_text: '{% trans \"Create an initial stock item for this part\" %}',\n group: 'create',\n };\n\n fields.initial_stock_quantity = {\n type: 'decimal',\n value: 1,\n label: '{% trans \"Initial Stock Quantity\" %}',\n help_text: '{% trans \"Specify initial stock quantity for this part\" %}',\n group: 'create',\n };\n\n // TODO - Allow initial location of stock to be specified\n fields.initial_stock_location = {\n label: '{% trans \"Location\" %}',\n help_text: '{% trans \"Select destination stock location\" %}',\n type: 'related field',\n required: true,\n api_url: `/api/stock/location/`,\n model: 'stocklocation',\n group: 'create',\n };\n }\n\n fields.copy_category_parameters = {\n type: 'boolean',\n label: '{% trans \"Copy Category Parameters\" %}',\n help_text: '{% trans \"Copy parameter templates from selected part category\" %}',\n value: global_settings.PART_CATEGORY_PARAMETERS,\n group: 'create',\n };\n\n // Supplier options\n fields.add_supplier_info = {\n type: 'boolean',\n label: '{% trans \"Add Supplier Data\" %}',\n help_text: '{% trans \"Create initial supplier data for this part\" %}',\n group: 'supplier',\n };\n \n fields.supplier = {\n type: 'related field',\n model: 'company',\n label: '{% trans \"Supplier\" %}',\n help_text: '{% trans \"Select supplier\" %}',\n filters: {\n 'is_supplier': true,\n },\n api_url: '{% url \"api-company-list\" %}',\n group: 'supplier',\n };\n \n fields.SKU = {\n type: 'string',\n label: '{% trans \"SKU\" %}', \n help_text: '{% trans \"Supplier stock keeping unit\" %}',\n group: 'supplier',\n };\n \n fields.manufacturer = {\n type: 'related field',\n model: 'company',\n label: '{% trans \"Manufacturer\" %}',\n help_text: '{% trans \"Select manufacturer\" %}',\n filters: {\n 'is_manufacturer': true,\n },\n api_url: '{% url \"api-company-list\" %}',\n group: 'supplier',\n };\n \n fields.MPN = {\n type: 'string',\n label: '{% trans \"MPN\" %}',\n help_text: '{% trans \"Manufacturer Part Number\" %}',\n group: 'supplier',\n };\n\n }\n\n // Additional fields when \"duplicating\" a part\n if (options.duplicate) {\n\n fields.copy_from = {\n type: 'integer',\n hidden: true,\n value: options.duplicate,\n group: 'duplicate',\n },\n\n fields.copy_image = {\n type: 'boolean',\n label: '{% trans \"Copy Image\" %}',\n help_text: '{% trans \"Copy image from original part\" %}',\n value: true,\n group: 'duplicate',\n },\n\n fields.copy_bom = {\n type: 'boolean',\n label: '{% trans \"Copy BOM\" %}',\n help_text: '{% trans \"Copy bill of materials from original part\" %}',\n value: global_settings.PART_COPY_BOM,\n group: 'duplicate',\n };\n\n fields.copy_parameters = {\n type: 'boolean',\n label: '{% trans \"Copy Parameters\" %}',\n help_text: '{% trans \"Copy parameter data from original part\" %}',\n value: global_settings.PART_COPY_PARAMETERS,\n group: 'duplicate',\n };\n }\n\n return fields;\n}\n\n\nfunction categoryFields() {\n return {\n parent: {\n help_text: '{% trans \"Parent part category\" %}',\n },\n name: {},\n description: {},\n default_location: {},\n default_keywords: {\n icon: 'fa-key',\n }\n };\n}\n\n\n// Edit a PartCategory via the API\nfunction editCategory(pk) {\n\n var url = `/api/part/category/${pk}/`;\n\n var fields = categoryFields();\n\n constructForm(url, {\n fields: fields,\n title: '{% trans \"Edit Part Category\" %}',\n reload: true,\n });\n\n}\n\n\nfunction editPart(pk) {\n\n var url = `/api/part/${pk}/`;\n\n var fields = partFields({\n edit: true\n });\n\n // Filter supplied parts by the Part ID\n fields.default_supplier.filters.part = pk;\n\n var groups = partGroups({});\n\n constructForm(url, {\n fields: fields,\n groups: groups,\n title: '{% trans \"Edit Part\" %}',\n reload: true,\n successMessage: '{% trans \"Part edited\" %}',\n });\n}\n\n\n// Launch form to duplicate a part\nfunction duplicatePart(pk, options={}) {\n\n // First we need all the part information\n inventreeGet(`/api/part/${pk}/`, {}, {\n\n success: function(data) {\n \n var fields = partFields({\n duplicate: pk,\n });\n\n // Remove \"default_supplier\" field\n delete fields['default_supplier'];\n\n // If we are making a \"variant\" part\n if (options.variant) {\n\n // Override the \"variant_of\" field\n data.variant_of = pk;\n }\n \n constructForm('{% url \"api-part-list\" %}', {\n method: 'POST',\n fields: fields,\n groups: partGroups(),\n title: '{% trans \"Duplicate Part\" %}',\n data: data,\n onSuccess: function(data) {\n // Follow the new part\n location.href = `/part/${data.pk}/`;\n }\n });\n }\n });\n}\n\n\n/* Toggle the 'starred' status of a part.\n * Performs AJAX queries and updates the display on the button.\n * \n * options:\n * - button: ID of the button (default = '#part-star-icon')\n * - URL: API url of the object\n * - user: pk of the user\n */\nfunction toggleStar(options) {\n\n inventreeGet(options.url, {}, {\n success: function(response) {\n\n var starred = response.starred;\n\n inventreePut(\n options.url,\n {\n starred: !starred,\n },\n {\n method: 'PATCH',\n success: function(response) {\n if (response.starred) {\n $(options.button).removeClass('fa fa-bell-slash').addClass('fas fa-bell icon-green');\n $(options.button).attr('title', '{% trans \"You are subscribed to notifications for this item\" %}');\n\n showMessage('{% trans \"You have subscribed to notifications for this item\" %}', {\n style: 'success',\n });\n } else {\n $(options.button).removeClass('fas fa-bell icon-green').addClass('fa fa-bell-slash');\n $(options.button).attr('title', '{% trans \"Subscribe to notifications for this item\" %}');\n\n showMessage('{% trans \"You have unsubscribed to notifications for this item\" %}', {\n style: 'warning',\n });\n }\n }\n }\n );\n }\n });\n}\n\n\nfunction partStockLabel(part, options={}) {\n\n if (part.in_stock) {\n return `{% trans \"Stock\" %}: ${part.in_stock}`;\n } else {\n return `{% trans \"No Stock\" %}`;\n }\n}\n\n\nfunction makePartIcons(part) {\n /* Render a set of icons for the given part.\n */\n\n var html = '';\n\n if (part.trackable) {\n html += makeIconBadge('fa-directions', '{% trans \"Trackable part\" %}');\n }\n\n if (part.virtual) {\n html += makeIconBadge('fa-ghost', '{% trans \"Virtual part\" %}');\n }\n\n if (part.is_template) {\n html += makeIconBadge('fa-clone', '{% trans \"Template part\" %}');\n }\n\n if (part.assembly) {\n html += makeIconBadge('fa-tools', '{% trans \"Assembled part\" %}');\n }\n\n if (part.starred) {\n html += makeIconBadge('fa-bell icon-green', '{% trans \"Subscribed part\" %}');\n }\n\n if (part.salable) {\n html += makeIconBadge('fa-dollar-sign', '{% trans \"Salable part\" %}');\n }\n\n if (!part.active) {\n html += `{% trans \"Inactive\" %} `; \n }\n\n return html;\n\n}\n\n\nfunction loadPartVariantTable(table, partId, options={}) {\n /* Load part variant table\n */\n\n var params = options.params || {};\n\n params.ancestor = partId;\n\n // Load filters\n var filters = loadTableFilters('variants');\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n setupFilterList('variants', $(table));\n\n var cols = [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: false,\n formatter: function(value, row) {\n var html = '';\n\n var name = '';\n\n if (row.IPN) {\n name += row.IPN;\n name += ' | ';\n }\n\n name += value;\n\n if (row.revision) {\n name += ' | ';\n name += row.revision;\n }\n\n if (row.is_template) {\n name = '' + name + '';\n }\n\n html += imageHoverIcon(row.thumbnail);\n html += renderLink(name, `/part/${row.pk}/`);\n\n if (row.trackable) {\n html += makeIconBadge('fa-directions', '{% trans \"Trackable part\" %}');\n }\n\n if (row.virtual) {\n html += makeIconBadge('fa-ghost', '{% trans \"Virtual part\" %}');\n }\n\n if (row.is_template) {\n html += makeIconBadge('fa-clone', '{% trans \"Template part\" %}');\n }\n\n if (row.assembly) {\n html += makeIconBadge('fa-tools', '{% trans \"Assembled part\" %}');\n }\n\n if (!row.active) {\n html += `{% trans \"Inactive\" %}`; \n }\n\n return html;\n },\n },\n {\n field: 'IPN',\n title: '{% trans \"IPN\" %}',\n },\n {\n field: 'revision',\n title: '{% trans \"Revision\" %}',\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n },\n {\n field: 'in_stock',\n title: '{% trans \"Stock\" %}',\n formatter: function(value, row) {\n return renderLink(value, `/part/${row.pk}/?display=part-stock`);\n }\n }\n ];\n\n table.inventreeTable({\n url: '{% url \"api-part-list\" %}',\n name: 'partvariants',\n showColumns: true,\n original: params,\n queryParams: filters,\n formatNoMatches: function() {\n return '{% trans \"No variants found\" %}';\n },\n columns: cols,\n treeEnable: true,\n rootParentId: partId,\n parentIdField: 'variant_of',\n idField: 'pk',\n uniqueId: 'pk',\n treeShowField: 'name',\n sortable: true,\n search: true,\n onPostBody: function() {\n table.treegrid({\n treeColumn: 0,\n });\n\n table.treegrid('collapseAll');\n }\n });\n}\n\n\nfunction loadSimplePartTable(table, url, options={}) {\n\n options.disableFilters = true;\n\n loadPartTable(table, url, options);\n}\n\n\nfunction loadPartParameterTable(table, url, options) {\n\n var params = options.params || {};\n\n // Load filters\n var filters = loadTableFilters('part-parameters');\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n // setupFilterList(\"#part-parameters\", $(table));\n\n $(table).inventreeTable({\n url: url,\n original: params,\n queryParams: filters,\n name: 'partparameters',\n groupBy: false,\n formatNoMatches: function() {\n return '{% trans \"No parameters found\" %}';\n },\n columns: [\n {\n checkbox: true,\n switchable: false,\n visible: true,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: false,\n sortable: true,\n formatter: function(value, row) {\n return row.template_detail.name;\n }\n },\n {\n field: 'data',\n title: '{% trans \"Value\" %}',\n switchable: false,\n sortable: true,\n },\n {\n field: 'units',\n title: '{% trans \"Units\" %}',\n switchable: true,\n sortable: true,\n formatter: function(value, row) {\n return row.template_detail.units;\n }\n },\n {\n field: 'actions',\n title: '',\n switchable: false,\n sortable: false,\n formatter: function(value, row) {\n var pk = row.pk;\n\n var html = `
`;\n\n html += makeIconButton('fa-edit icon-blue', 'button-parameter-edit', pk, '{% trans \"Edit parameter\" %}');\n html += makeIconButton('fa-trash-alt icon-red', 'button-parameter-delete', pk, '{% trans \"Delete parameter\" %}');\n\n html += `
`;\n\n return html;\n }\n }\n ],\n onPostBody: function() {\n // Setup button callbacks\n $(table).find('.button-parameter-edit').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/parameter/${pk}/`, {\n fields: {\n data: {},\n },\n title: '{% trans \"Edit Parameter\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n\n $(table).find('.button-parameter-delete').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/parameter/${pk}/`, {\n method: 'DELETE',\n title: '{% trans \"Delete Parameter\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n }\n });\n}\n\n\nfunction loadRelatedPartsTable(table, part_id, options={}) {\n /*\n * Load table of \"related\" parts\n */\n\n options.params = options.params || {};\n\n options.params.part = part_id;\n\n var filters = {};\n\n for (var key in options.params) {\n filters[key] = options.params[key];\n }\n\n setupFilterList('related', $(table), options.filterTarget);\n\n function getPart(row) {\n if (row.part_1 == part_id) {\n return row.part_2_detail;\n } else {\n return row.part_1_detail;\n }\n }\n\n var columns = [\n {\n field: 'name',\n title: '{% trans \"Part\" %}',\n switchable: false,\n formatter: function(value, row) {\n\n var part = getPart(row);\n\n var html = imageHoverIcon(part.thumbnail) + renderLink(part.full_name, `/part/${part.pk}/`)\n\n html += makePartIcons(part);\n\n return html;\n }\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n formatter: function(value, row) {\n return getPart(row).description;\n }\n },\n {\n field: 'actions',\n title: '',\n switchable: false,\n formatter: function(value, row) {\n \n var html = `
`;\n\n html += makeIconButton('fa-trash-alt icon-red', 'button-related-delete', row.pk, '{% trans \"Delete part relationship\" %}');\n\n html += '
';\n\n return html;\n }\n }\n ];\n\n $(table).inventreeTable({\n url: '{% url \"api-part-related-list\" %}',\n groupBy: false,\n name: 'related',\n original: options.params,\n queryParams: filters,\n columns: columns,\n showColumns: false,\n search: true,\n onPostBody: function() {\n $(table).find('.button-related-delete').click(function() {\n var pk = $(this).attr('pk');\n\n constructForm(`/api/part/related/${pk}/`, {\n method: 'DELETE',\n title: '{% trans \"Delete Part Relationship\" %}',\n onSuccess: function() {\n $(table).bootstrapTable('refresh');\n }\n });\n });\n },\n });\n}\n\n\nfunction loadParametricPartTable(table, options={}) {\n /* Load parametric table for part parameters\n * \n * Args:\n * - table: HTML reference to the table\n * - table_headers: Unique parameters found in category\n * - table_data: Parameters data\n */\n\n var table_headers = options.headers;\n var table_data = options.data;\n\n var columns = [];\n\n for (var header of table_headers) {\n if (header === 'part') {\n columns.push({\n field: header,\n title: '{% trans \"Part\" %}',\n sortable: true,\n sortName: 'name',\n formatter: function(value, row) {\n\n var name = '';\n\n if (row.IPN) {\n name += row.IPN + ' | ' + row.name;\n } else {\n name += row.name;\n }\n\n return renderLink(name, '/part/' + row.pk + '/'); \n }\n });\n } else if (header === 'description') {\n columns.push({\n field: header,\n title: '{% trans \"Description\" %}',\n sortable: true,\n });\n } else {\n columns.push({\n field: header,\n title: header,\n sortable: true,\n filterControl: 'input',\n });\n }\n }\n\n $(table).inventreeTable({\n sortName: 'part',\n queryParams: table_headers,\n groupBy: false,\n name: options.name || 'parametric',\n formatNoMatches: function() {\n return '{% trans \"No parts found\" %}';\n },\n columns: columns,\n showColumns: true,\n data: table_data,\n filterControl: true,\n });\n}\n\n\nfunction partGridTile(part) {\n // Generate a \"grid tile\" view for a particular part\n\n // Rows for table view\n var rows = '';\n\n var stock = `${part.in_stock}`;\n\n if (!part.in_stock) {\n stock = `{% trans \"No Stock\" %}`;\n }\n\n rows += `{% trans \"Stock\" %}${stock}`;\n\n if (part.on_order) {\n rows += `{$ trans \"On Order\" %}${part.on_order}`;\n }\n\n if (part.building) {\n rows += `{% trans \"Building\" %}${part.building}`;\n }\n\n var html = `\n \n
\n
\n
\n \n ${part.full_name}\n \n ${makePartIcons(part)}\n
\n ${part.description}\n
\n
\n
\n
\n \n
\n
\n \n ${rows}\n
\n
\n
\n
\n
\n
\n `;\n\n return html;\n}\n\n\nfunction loadPartTable(table, url, options={}) {\n /* Load part listing data into specified table.\n * \n * Args:\n * - table: HTML reference to the table\n * - url: Base URL for API query\n * - options: object containing following (optional) fields\n * checkbox: Show the checkbox column\n * query: extra query params for API request\n * buttons: If provided, link buttons to selection status of this table\n * disableFilters: If true, disable custom filters\n * actions: Provide a callback function to construct an \"actions\" column\n */\n\n // Ensure category detail is included\n options.params['category_detail'] = true;\n\n var params = options.params || {};\n\n var filters = {};\n\n var col = null;\n\n if (!options.disableFilters) {\n filters = loadTableFilters('parts');\n }\n\n for (var key in params) {\n filters[key] = params[key];\n }\n\n setupFilterList('parts', $(table), options.filterTarget || null);\n\n var columns = [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n searchable: false,\n }\n ];\n\n if (options.checkbox) {\n columns.push({\n checkbox: true,\n title: '{% trans \"Select\" %}',\n searchable: false,\n switchable: false,\n });\n }\n\n col = {\n field: 'IPN',\n title: '{% trans \"IPN\" %}',\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n col = {\n field: 'name',\n title: '{% trans \"Part\" %}',\n switchable: false,\n formatter: function(value, row) {\n\n var name = row.full_name;\n\n var display = imageHoverIcon(row.thumbnail) + renderLink(name, `/part/${row.pk}/`);\n\n display += makePartIcons(row);\n\n return display; \n }\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n columns.push({\n field: 'description',\n title: '{% trans \"Description\" %}',\n formatter: function(value, row) {\n\n if (row.is_template) {\n value = `${value}`;\n }\n\n return value;\n }\n });\n\n col = {\n sortName: 'category',\n field: 'category_detail',\n title: '{% trans \"Category\" %}',\n formatter: function(value, row) {\n if (row.category) {\n return renderLink(value.pathstring, `/part/category/${row.category}/`);\n } else {\n return '{% trans \"No category\" %}';\n }\n } \n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n col = {\n field: 'in_stock',\n title: '{% trans \"Stock\" %}',\n searchable: false,\n formatter: function(value, row) { \n var link = '?display=part-stock';\n\n if (value) {\n // There IS stock available for this part\n\n // Is stock \"low\" (below the 'minimum_stock' quantity)?\n if (row.minimum_stock && row.minimum_stock > value) {\n value += `{% trans \"Low stock\" %}`;\n }\n\n } else if (row.on_order) {\n // There is no stock available, but stock is on order\n value = `0{% trans \"On Order\" %}: ${row.on_order}`;\n link = '?display=purchase-orders';\n } else if (row.building) {\n // There is no stock available, but stock is being built\n value = `0{% trans \"Building\" %}: ${row.building}`;\n link = '?display=build-orders';\n } else {\n // There is no stock available\n value = `0{% trans \"No Stock\" %}`;\n }\n\n return renderLink(value, `/part/${row.pk}/${link}`);\n }\n };\n\n if (!options.params.ordering) {\n col['sortable'] = true;\n }\n\n columns.push(col);\n\n columns.push({\n field: 'link',\n title: '{% trans \"Link\" %}',\n formatter: function(value) {\n return renderLink(\n value, value,\n {\n max_length: 32,\n remove_http: true,\n }\n );\n }\n });\n\n // Push an \"actions\" column\n if (options.actions) {\n columns.push({\n field: 'actions',\n title: '',\n switchable: false,\n visible: true,\n searchable: false,\n sortable: false,\n formatter: function(value, row) {\n return options.actions(value, row);\n }\n });\n }\n\n var grid_view = options.gridView && inventreeLoad('part-grid-view') == 1;\n\n $(table).inventreeTable({\n url: url,\n method: 'get',\n queryParams: filters,\n groupBy: false,\n name: options.name || 'part',\n original: params,\n sidePagination: 'server',\n pagination: 'true',\n formatNoMatches: function() {\n return '{% trans \"No parts found\" %}';\n },\n columns: columns,\n showColumns: true,\n showCustomView: grid_view,\n showCustomViewButton: false,\n onPostBody: function() {\n grid_view = inventreeLoad('part-grid-view') == 1;\n if (grid_view) {\n $('#view-part-list').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-part-grid').removeClass('btn-outline-secondary').addClass('btn-secondary');\n } else {\n $('#view-part-grid').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-part-list').removeClass('btn-outline-secondary').addClass('btn-secondary');\n }\n\n if (options.onPostBody) {\n options.onPostBody();\n }\n },\n buttons: options.gridView ? [\n {\n icon: 'fas fa-bars',\n attributes: {\n title: '{% trans \"Display as list\" %}',\n id: 'view-part-list',\n },\n event: () => {\n inventreeSave('part-grid-view', 0);\n $(table).bootstrapTable(\n 'refreshOptions',\n {\n showCustomView: false,\n }\n );\n }\n },\n {\n icon: 'fas fa-th',\n attributes: {\n title: '{% trans \"Display as grid\" %}',\n id: 'view-part-grid',\n },\n event: () => {\n inventreeSave('part-grid-view', 1);\n $(table).bootstrapTable(\n 'refreshOptions',\n {\n showCustomView: true,\n }\n );\n }\n }\n ] : [],\n customView: function(data) {\n\n var html = '';\n\n html = `
`;\n\n data.forEach(function(row, index) {\n \n // Force a new row every 5 columns\n if ((index > 0) && (index % 5 == 0) && (index < data.length)) {\n html += `
`;\n }\n\n html += partGridTile(row);\n });\n\n html += `
`;\n\n return html;\n }\n });\n \n if (options.buttons) {\n linkButtonsToSelection($(table), options.buttons);\n }\n\n /* Button callbacks for part table buttons */\n\n $('#multi-part-order').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = [];\n\n selections.forEach(function(item) {\n parts.push(item.pk);\n });\n\n launchModalForm('/order/purchase-order/order-parts/', {\n data: {\n parts: parts,\n },\n });\n });\n\n $('#multi-part-category').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = [];\n\n selections.forEach(function(item) {\n parts.push(item.pk);\n });\n\n launchModalForm('/part/set-category/', {\n data: {\n parts: parts,\n },\n reload: true,\n });\n });\n\n $('#multi-part-print-label').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var items = [];\n\n selections.forEach(function(item) {\n items.push(item.pk);\n });\n\n printPartLabels(items);\n });\n\n $('#multi-part-export').click(function() {\n var selections = $(table).bootstrapTable('getSelections');\n\n var parts = '';\n\n selections.forEach(function(item) {\n parts += item.pk;\n parts += ',';\n });\n\n location.href = '/part/export/?parts=' + parts;\n });\n}\n\n\n/*\n * Display a table of part categories\n */\nfunction loadPartCategoryTable(table, options) {\n\n var params = options.params || {};\n\n var filterListElement = options.filterList || '#filter-list-category';\n\n var filters = {};\n\n var filterKey = options.filterKey || options.name || 'category';\n\n if (!options.disableFilters) {\n filters = loadTableFilters(filterKey);\n }\n\n \n var tree_view = options.allowTreeView && inventreeLoad('category-tree-view') == 1;\n\n if (tree_view) {\n params.cascade = true; \n }\n\n var original = {};\n\n for (var key in params) {\n original[key] = params[key];\n filters[key] = params[key];\n }\n\n setupFilterList(filterKey, table, filterListElement);\n\n table.inventreeTable({\n treeEnable: tree_view,\n rootParentId: tree_view ? options.params.parent : null,\n uniqueId: 'pk',\n idField: 'pk',\n treeShowField: 'name',\n parentIdField: tree_view ? 'parent' : null,\n method: 'get',\n url: options.url || '{% url \"api-part-category-list\" %}',\n queryParams: filters,\n disablePagination: tree_view,\n sidePagination: tree_view ? 'client' : 'server',\n serverSort: !tree_view, \n search: !tree_view,\n name: 'category',\n original: original,\n showColumns: true,\n buttons: options.allowTreeView ? [\n {\n icon: 'fas fa-bars',\n attributes: {\n title: '{% trans \"Display as list\" %}',\n id: 'view-category-list',\n },\n event: () => {\n inventreeSave('category-tree-view', 0);\n table.bootstrapTable(\n 'refreshOptions',\n {\n treeEnable: false,\n serverSort: true,\n search: true,\n pagination: true,\n }\n );\n }\n },\n {\n icon: 'fas fa-sitemap',\n attributes: {\n title: '{% trans \"Display as tree\" %}',\n id: 'view-category-tree',\n },\n event: () => {\n inventreeSave('category-tree-view', 1);\n table.bootstrapTable(\n 'refreshOptions',\n {\n treeEnable: true,\n serverSort: false,\n search: false,\n pagination: false,\n }\n );\n }\n }\n ] : [],\n onPostBody: function() {\n\n if (options.allowTreeView) {\n\n tree_view = inventreeLoad('category-tree-view') == 1;\n\n if (tree_view) {\n\n $('#view-category-list').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-category-tree').removeClass('btn-outline-secondary').addClass('btn-secondary');\n \n table.treegrid({\n treeColumn: 0,\n onChange: function() {\n table.bootstrapTable('resetView');\n },\n onExpand: function() {\n \n }\n });\n } else {\n $('#view-category-tree').removeClass('btn-secondary').addClass('btn-outline-secondary');\n $('#view-category-list').removeClass('btn-outline-secondary').addClass('btn-secondary');\n }\n }\n },\n columns: [\n {\n checkbox: true,\n title: '{% trans \"Select\" %}',\n searchable: false,\n switchable: false,\n visible: false,\n },\n {\n field: 'name',\n title: '{% trans \"Name\" %}',\n switchable: true,\n sortable: true,\n formatter: function(value, row) {\n\n var html = renderLink(\n value,\n `/part/category/${row.pk}/`\n );\n\n if (row.starred) {\n html += makeIconBadge('fa-bell icon-green', '{% trans \"Subscribed category\" %}');\n }\n\n return html;\n }\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n switchable: true,\n sortable: false,\n },\n {\n field: 'pathstring',\n title: '{% trans \"Path\" %}',\n switchable: !tree_view,\n visible: !tree_view,\n sortable: false,\n },\n {\n field: 'parts',\n title: '{% trans \"Parts\" %}',\n switchable: true,\n sortable: false,\n }\n ]\n });\n}\n\nfunction loadPartTestTemplateTable(table, options) {\n /*\n * Load PartTestTemplate table.\n */\n\n var params = options.params || {};\n\n var part = options.part || null;\n\n var filterListElement = options.filterList || '#filter-list-parttests';\n\n var filters = loadTableFilters('parttests');\n\n var original = {};\n\n for (var k in params) {\n original[k] = params[k];\n }\n\n setupFilterList('parttests', table, filterListElement);\n\n // Override the default values, or add new ones\n for (var key in params) {\n filters[key] = params[key];\n }\n\n table.inventreeTable({\n method: 'get',\n formatNoMatches: function() {\n return '{% trans \"No test templates matching query\" %}';\n },\n url: '{% url \"api-part-test-template-list\" %}',\n queryParams: filters,\n name: 'testtemplate',\n original: original,\n columns: [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n },\n {\n field: 'test_name',\n title: '{% trans \"Test Name\" %}',\n sortable: true,\n },\n {\n field: 'description',\n title: '{% trans \"Description\" %}',\n },\n {\n field: 'required',\n title: '{% trans \"Required\" %}',\n sortable: true,\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'requires_value',\n title: '{% trans \"Requires Value\" %}',\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'requires_attachment',\n title: '{% trans \"Requires Attachment\" %}',\n formatter: function(value) {\n return yesNoLabel(value);\n }\n },\n {\n field: 'buttons',\n formatter: function(value, row) {\n var pk = row.pk;\n\n if (row.part == part) {\n var html = `
`;\n\n html += makeIconButton('fa-edit icon-blue', 'button-test-edit', pk, '{% trans \"Edit test result\" %}');\n html += makeIconButton('fa-trash-alt icon-red', 'button-test-delete', pk, '{% trans \"Delete test result\" %}');\n\n html += `
`;\n\n return html;\n } else {\n var text = '{% trans \"This test is defined for a parent part\" %}';\n\n return renderLink(text, `/part/${row.part}/tests/`); \n }\n }\n }\n ],\n onPostBody: function() {\n\n table.find('.button-test-edit').click(function() {\n var pk = $(this).attr('pk');\n \n var url = `/api/part/test-template/${pk}/`;\n \n constructForm(url, {\n fields: {\n test_name: {},\n description: {},\n required: {},\n requires_value: {},\n requires_attachment: {},\n },\n title: '{% trans \"Edit Test Result Template\" %}',\n onSuccess: function() {\n table.bootstrapTable('refresh');\n },\n });\n });\n\n table.find('.button-test-delete').click(function() {\n var pk = $(this).attr('pk');\n \n var url = `/api/part/test-template/${pk}/`;\n \n constructForm(url, {\n method: 'DELETE',\n title: '{% trans \"Delete Test Result Template\" %}',\n onSuccess: function() {\n table.bootstrapTable('refresh');\n },\n });\n });\n }\n });\n}\n\n\nfunction loadPriceBreakTable(table, options) {\n /*\n * Load PriceBreak table.\n */\n\n var name = options.name || 'pricebreak';\n var human_name = options.human_name || 'price break';\n var linkedGraph = options.linkedGraph || null;\n var chart = null;\n\n table.inventreeTable({\n name: name,\n method: 'get',\n formatNoMatches: function() {\n return `{% trans \"No ${human_name} information found\" %}`;\n },\n queryParams: {part: options.part},\n url: options.url,\n onLoadSuccess: function(tableData) {\n if (linkedGraph) {\n // sort array\n tableData = tableData.sort((a, b) => (a.quantity - b.quantity));\n\n // split up for graph definition\n var graphLabels = Array.from(tableData, (x) => (x.quantity));\n var graphData = Array.from(tableData, (x) => (x.price));\n\n // destroy chart if exists\n if (chart) {\n chart.destroy();\n }\n chart = loadLineChart(linkedGraph,\n {\n labels: graphLabels,\n datasets: [\n {\n label: '{% trans \"Unit Price\" %}',\n data: graphData,\n backgroundColor: 'rgba(255, 206, 86, 0.2)',\n borderColor: 'rgb(255, 206, 86)',\n stepped: true,\n fill: true,\n },\n ],\n }\n );\n }\n },\n columns: [\n {\n field: 'pk',\n title: 'ID',\n visible: false,\n switchable: false,\n },\n {\n field: 'quantity',\n title: '{% trans \"Quantity\" %}',\n sortable: true,\n },\n {\n field: 'price',\n title: '{% trans \"Price\" %}',\n sortable: true,\n formatter: function(value, row) {\n var html = value;\n \n html += `
`;\n\n html += makeIconButton('fa-edit icon-blue', `button-${name}-edit`, row.pk, `{% trans \"Edit ${human_name}\" %}`);\n html += makeIconButton('fa-trash-alt icon-red', `button-${name}-delete`, row.pk, `{% trans \"Delete ${human_name}\" %}`);\n \n html += `
`;\n \n return html;\n }\n },\n ]\n });\n}\n\nfunction loadLineChart(context, data) {\n return new Chart(context, {\n type: 'line',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {position: 'bottom'},\n }\n }\n });\n}\n\nfunction initPriceBreakSet(table, options) {\n\n var part_id = options.part_id;\n var pb_human_name = options.pb_human_name;\n var pb_url_slug = options.pb_url_slug;\n var pb_url = options.pb_url;\n var pb_new_btn = options.pb_new_btn;\n var pb_new_url = options.pb_new_url;\n\n var linkedGraph = options.linkedGraph || null;\n\n loadPriceBreakTable(\n table,\n {\n name: pb_url_slug,\n human_name: pb_human_name,\n url: pb_url,\n linkedGraph: linkedGraph,\n part: part_id,\n }\n );\n\n function reloadPriceBreakTable() {\n table.bootstrapTable('refresh');\n }\n\n pb_new_btn.click(function() {\n launchModalForm(pb_new_url,\n {\n success: reloadPriceBreakTable,\n data: {\n part: part_id,\n }\n }\n );\n });\n\n table.on('click', `.button-${pb_url_slug}-delete`, function() {\n var pk = $(this).attr('pk');\n\n launchModalForm(\n `/part/${pb_url_slug}/${pk}/delete/`,\n {\n success: reloadPriceBreakTable\n }\n );\n });\n\n table.on('click', `.button-${pb_url_slug}-edit`, function() {\n var pk = $(this).attr('pk');\n\n launchModalForm(\n `/part/${pb_url_slug}/${pk}/edit/`,\n {\n success: reloadPriceBreakTable\n }\n );\n });\n}\n\n\nfunction loadStockPricingChart(context, data) {\n return new Chart(context, {\n type: 'bar',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {legend: {position: 'bottom'}},\n scales: {\n y: {\n type: 'linear',\n position: 'left',\n grid: {display: false},\n title: {\n display: true,\n text: '{% trans \"Single Price\" %}'\n }\n },\n y1: {\n type: 'linear',\n position: 'right',\n grid: {display: false},\n titel: {\n display: true,\n text: '{% trans \"Quantity\" %}',\n position: 'right'\n }\n },\n y2: {\n type: 'linear',\n position: 'left',\n grid: {display: false},\n title: {\n display: true,\n text: '{% trans \"Single Price Difference\" %}'\n }\n }\n },\n }\n });\n}\n\n\nfunction loadBomChart(context, data) {\n return new Chart(context, {\n type: 'doughnut',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {\n position: 'bottom',\n },\n scales: {\n xAxes: [\n {\n beginAtZero: true,\n ticks: {\n autoSkip: false,\n }\n }\n ]\n }\n }\n }\n });\n}\n\n\nfunction loadSellPricingChart(context, data) {\n return new Chart(context, {\n type: 'line',\n data: data,\n options: {\n responsive: true,\n maintainAspectRatio: false,\n plugins: {\n legend: {\n position: 'bottom'\n }\n },\n scales: {\n y: {\n type: 'linear',\n position: 'left',\n grid: {\n display: false\n },\n title: {\n display: true,\n text: '{% trans \"Unit Price\" %}',\n }\n },\n y1: {\n type: 'linear',\n position: 'right',\n grid: {\n display: false\n },\n titel: {\n display: true,\n text: '{% trans \"Quantity\" %}',\n position: 'right'\n }\n },\n },\n }\n });\n}\n"},"message":{"kind":"string","value":"javascript linting\n"},"old_file":{"kind":"string","value":"InvenTree/templates/js/translated/part.js"},"subject":{"kind":"string","value":"javascript linting"},"git_diff":{"kind":"string","value":"nvenTree/templates/js/translated/part.js\n loadPartTable,\n loadPartTestTemplateTable,\n loadPartVariantTable,\n loadRelatedPartsTable,\n loadSellPricingChart,\n loadSimplePartTable,\n loadStockPricingChart,\n \n var part = getPart(row);\n \n var html = imageHoverIcon(part.thumbnail) + renderLink(part.full_name, `/part/${part.pk}/`)\n var html = imageHoverIcon(part.thumbnail) + renderLink(part.full_name, `/part/${part.pk}/`);\n \n html += makePartIcons(part);\n "}}},{"rowIdx":2055,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"373d7f724d67d04990ef583e3647a3c4d357ad21"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"mrkcsc/android-spanner,mrkcsc/android-spanner"},"new_contents":{"kind":"string","value":"package com.miguelgaeta.spanner;\n\nimport android.text.SpannableString;\nimport android.text.Spanned;\nimport android.text.style.CharacterStyle;\nimport android.util.Log;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\n\n/**\n * Created by Miguel Gaeta on 4/20/16.\n *\n * A powerful spannable string builder that supports arbitrary substitutions.\n */\npublic class Spanner {\n\n private String sourceString = \"\";\n private final List matchStrategies = new ArrayList<>();\n private final List replacements = new ArrayList<>();\n\n public Spanner(final String sourceString) {\n this.sourceString = sourceString;\n }\n\n @SuppressWarnings(\"unused\")\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start) {\n return addReplacementStrategy(onMatchListener, start, null);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end) {\n return addReplacementStrategy(onMatchListener, start, end, true);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end, boolean endRequired) {\n return addReplacementStrategy(onMatchListener, start, end, endRequired, false);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end, final boolean endRequired, final boolean endWithWhitespaceOrEOL) {\n matchStrategies.add(new MatchStrategy(onMatchListener, start, end, endRequired, endWithWhitespaceOrEOL));\n\n return this;\n }\n\n @SuppressWarnings(\"unused\")\n public Spanner addMarkdownStrategy() {\n addMarkdownBoldStrategy();\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"*\", \"*\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createStrikethroughSpan());\n }\n }, \"~~\", \"~~\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createUnderlineSpan());\n }\n }, \"__\", \"__\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"_\", \"_\", true, true);\n\n return this;\n }\n\n public Spanner addMarkdownBoldStrategy() {\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createBoldItalicSpan());\n }\n }, \"***\", \"***\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createBoldSpan());\n }\n }, \"**\", \"**\");\n\n return this;\n }\n\n public SpannableString toSpannableString() {\n\n for (final MatchStrategy matchStrategy : matchStrategies) {\n int startIndex = 0;\n\n do {\n startIndex = sourceString.indexOf(matchStrategy.start, startIndex);\n\n if (startIndex != -1) {\n final int startIndexOffset = matchStrategy.start.length();\n\n if (matchStrategy.end == null) {\n\n int endIndex = startIndex + startIndexOffset;\n final Replacement replacement = matchStrategy.onMatchListener.call(matchStrategy.start);\n\n startIndex = computeStartIndexWithSpans(startIndex, startIndexOffset, endIndex, replacement);\n\n } else {\n\n int endIndex = sourceString.indexOf(matchStrategy.end, startIndex + startIndexOffset);\n final boolean isEOLMatch = endIndex == -1 && !matchStrategy.endRequired;\n\n if (isEOLMatch) {\n endIndex = sourceString.length();\n }\n\n if (matchStrategy.endWithWhitespaceOrEOL && endIndex != -1) {\n\n if (endIndex != (sourceString.length() - 1) && !Character.isWhitespace(sourceString.charAt(endIndex + 1))) {\n endIndex = -1;\n }\n }\n\n if (endIndex != -1) {\n\n final String match = sourceString.substring(startIndex + startIndexOffset, endIndex);\n final Replacement replacement = matchStrategy.onMatchListener.call(match);\n\n if (!isEOLMatch) {\n endIndex += matchStrategy.end.length();\n }\n\n startIndex = computeStartIndexWithSpans(startIndex, startIndexOffset, endIndex, replacement);\n\n } else {\n startIndex = -1;\n }\n }\n }\n\n } while (startIndex != -1);\n }\n\n return buildSpannableString(sourceString, replacements);\n }\n\n private int computeStartIndexWithSpans(final int startIndex, final int startIndexOffset, final int endIndex, final Replacement replacement) {\n\n // Replace match with user provided replacement.\n sourceString = new StringBuilder(sourceString).replace(startIndex, endIndex, replacement.replacementString).toString();\n\n // Update the new end index location.\n final int endIndexUpdated = startIndex + replacement.replacementString.length();\n\n final int offset = (endIndex - startIndex) - (endIndexUpdated - startIndex);\n\n if (offset != 0) {\n\n for (final Replacement existingReplacement : replacements) {\n\n if (existingReplacement.start > startIndex) {\n existingReplacement.start -= startIndexOffset;\n\n if (existingReplacement.start > endIndexUpdated) {\n existingReplacement.start -= offset - startIndexOffset;\n }\n }\n\n if (existingReplacement.end > startIndex) {\n existingReplacement.end -= startIndexOffset;\n\n if (existingReplacement.end > endIndexUpdated) {\n existingReplacement.end -= offset - startIndexOffset;\n }\n }\n }\n }\n\n replacement.start = startIndex;\n replacement.end = endIndexUpdated;\n\n replacements.add(replacement);\n\n return endIndexUpdated;\n }\n\n /**\n * Given a source string and a corresponding list of replacement objects,\n * transform into a spannable string spans applied from each replacement.\n *\n * Assumes the source string has been formatted with string replacements\n * during the computation step.\n *\n * @param sourceString Source string with replacements.\n * @param replacements Replacement objects with desired spans to apply at start and end indices.\n *\n * @return Source string with spans applied.\n */\n private static SpannableString buildSpannableString(final String sourceString, final Collection replacements) {\n final SpannableString spannableString = new SpannableString(sourceString);\n\n try {\n for (final Replacement replacement : replacements) {\n for (final CharacterStyle characterStyle : replacement.replacementSpans) {\n spannableString.setSpan(characterStyle, replacement.start, replacement.end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);\n }\n }\n } catch (IndexOutOfBoundsException e) {\n Log.i(\"Spanner\", \"Span cannot be applied, index out of bounds.\", e);\n }\n\n return spannableString;\n }\n\n @SuppressWarnings(\"unused\")\n public interface OnMatchListener {\n\n Replacement call(final String match);\n }\n\n /**\n * Represents a desired match strategy with a callback\n * to allow the user to return a replacement string\n * along with desired spans to apply to it.\n *\n * TODO: This could be made simpler by being represented as a regex.\n */\n private static class MatchStrategy {\n\n final OnMatchListener onMatchListener;\n final String start;\n final String end;\n final boolean endRequired;\n final boolean endWithWhitespaceOrEOL;\n\n private MatchStrategy(final OnMatchListener onMatchListener, final String start, final String end, final boolean endRequired, final boolean endWithWhitespaceOrEOL) {\n this.onMatchListener = onMatchListener;\n this.start = start;\n this.end = end;\n this.endRequired = endRequired;\n this.endWithWhitespaceOrEOL = endWithWhitespaceOrEOL;\n }\n }\n\n @SuppressWarnings(\"unused\")\n public static class Replacement {\n\n final String replacementString;\n final List replacementSpans;\n\n int start;\n int end;\n\n public Replacement(final String replacementString, final List replacementSpans) {\n this.replacementString = replacementString;\n this.replacementSpans = replacementSpans;\n }\n\n public Replacement(final String replacementString, CharacterStyle... spanStyles) {\n this(replacementString, Arrays.asList(spanStyles));\n }\n\n public Replacement(final String replacementString) {\n this(replacementString, Collections.emptyList());\n }\n }\n}\n"},"new_file":{"kind":"string","value":"spanner/src/main/java/com/miguelgaeta/spanner/Spanner.java"},"old_contents":{"kind":"string","value":"package com.miguelgaeta.spanner;\n\nimport android.text.SpannableString;\nimport android.text.Spanned;\nimport android.text.style.CharacterStyle;\nimport android.util.Log;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\n\n/**\n * Created by Miguel Gaeta on 4/20/16.\n *\n * A powerful spannable string builder that supports arbitrary substitutions.\n */\npublic class Spanner {\n\n private String sourceString = \"\";\n private final List matchStrategies = new ArrayList<>();\n private final List replacements = new ArrayList<>();\n\n public Spanner(final String sourceString) {\n this.sourceString = sourceString;\n }\n\n @SuppressWarnings(\"unused\")\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start) {\n return addReplacementStrategy(onMatchListener, start, null);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end) {\n return addReplacementStrategy(onMatchListener, start, end, true);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end, boolean endRequired) {\n return addReplacementStrategy(onMatchListener, start, end, endRequired, false);\n }\n\n public Spanner addReplacementStrategy(final OnMatchListener onMatchListener, final String start, final String end, final boolean endRequired, final boolean endWithWhitespaceOrEOL) {\n matchStrategies.add(new MatchStrategy(onMatchListener, start, end, endRequired, endWithWhitespaceOrEOL));\n\n return this;\n }\n\n @SuppressWarnings(\"unused\")\n public Spanner addMarkdownStrategy() {\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createBoldItalicSpan());\n }\n }, \"***\", \"***\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createBoldSpan());\n }\n }, \"**\", \"**\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"*\", \"*\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createStrikethroughSpan());\n }\n }, \"~~\", \"~~\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createUnderlineSpan());\n }\n }, \"__\", \"__\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"_\", \"_\", true, true);\n\n return this;\n }\n\n public SpannableString toSpannableString() {\n\n for (final MatchStrategy matchStrategy : matchStrategies) {\n int startIndex = 0;\n\n do {\n startIndex = sourceString.indexOf(matchStrategy.start, startIndex);\n\n if (startIndex != -1) {\n final int startIndexOffset = matchStrategy.start.length();\n\n if (matchStrategy.end == null) {\n\n int endIndex = startIndex + startIndexOffset;\n final Replacement replacement = matchStrategy.onMatchListener.call(matchStrategy.start);\n\n startIndex = computeStartIndexWithSpans(startIndex, startIndexOffset, endIndex, replacement);\n\n } else {\n\n int endIndex = sourceString.indexOf(matchStrategy.end, startIndex + startIndexOffset);\n final boolean isEOLMatch = endIndex == -1 && !matchStrategy.endRequired;\n\n if (isEOLMatch) {\n endIndex = sourceString.length();\n }\n\n if (matchStrategy.endWithWhitespaceOrEOL && endIndex != -1) {\n\n if (endIndex != (sourceString.length() - 1) && !Character.isWhitespace(sourceString.charAt(endIndex + 1))) {\n endIndex = -1;\n }\n }\n\n if (endIndex != -1) {\n\n final String match = sourceString.substring(startIndex + startIndexOffset, endIndex);\n final Replacement replacement = matchStrategy.onMatchListener.call(match);\n\n if (!isEOLMatch) {\n endIndex += matchStrategy.end.length();\n }\n\n startIndex = computeStartIndexWithSpans(startIndex, startIndexOffset, endIndex, replacement);\n\n } else {\n startIndex = -1;\n }\n }\n }\n\n } while (startIndex != -1);\n }\n\n return buildSpannableString(sourceString, replacements);\n }\n\n private int computeStartIndexWithSpans(final int startIndex, final int startIndexOffset, final int endIndex, final Replacement replacement) {\n\n // Replace match with user provided replacement.\n sourceString = new StringBuilder(sourceString).replace(startIndex, endIndex, replacement.replacementString).toString();\n\n // Update the new end index location.\n final int endIndexUpdated = startIndex + replacement.replacementString.length();\n\n final int offset = (endIndex - startIndex) - (endIndexUpdated - startIndex);\n\n if (offset != 0) {\n\n for (final Replacement existingReplacement : replacements) {\n\n if (existingReplacement.start > startIndex) {\n existingReplacement.start -= startIndexOffset;\n\n if (existingReplacement.start > endIndexUpdated) {\n existingReplacement.start -= offset - startIndexOffset;\n }\n }\n\n if (existingReplacement.end > startIndex) {\n existingReplacement.end -= startIndexOffset;\n\n if (existingReplacement.end > endIndexUpdated) {\n existingReplacement.end -= offset - startIndexOffset;\n }\n }\n }\n }\n\n replacement.start = startIndex;\n replacement.end = endIndexUpdated;\n\n replacements.add(replacement);\n\n return endIndexUpdated;\n }\n\n /**\n * Given a source string and a corresponding list of replacement objects,\n * transform into a spannable string spans applied from each replacement.\n *\n * Assumes the source string has been formatted with string replacements\n * during the computation step.\n *\n * @param sourceString Source string with replacements.\n * @param replacements Replacement objects with desired spans to apply at start and end indices.\n *\n * @return Source string with spans applied.\n */\n private static SpannableString buildSpannableString(final String sourceString, final Collection replacements) {\n final SpannableString spannableString = new SpannableString(sourceString);\n\n try {\n for (final Replacement replacement : replacements) {\n for (final CharacterStyle characterStyle : replacement.replacementSpans) {\n spannableString.setSpan(characterStyle, replacement.start, replacement.end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);\n }\n }\n } catch (IndexOutOfBoundsException e) {\n Log.i(\"Spanner\", \"Span cannot be applied, index out of bounds.\", e);\n }\n\n return spannableString;\n }\n\n @SuppressWarnings(\"unused\")\n public interface OnMatchListener {\n\n Replacement call(final String match);\n }\n\n /**\n * Represents a desired match strategy with a callback\n * to allow the user to return a replacement string\n * along with desired spans to apply to it.\n *\n * TODO: This could be made simpler by being represented as a regex.\n */\n private static class MatchStrategy {\n\n final OnMatchListener onMatchListener;\n final String start;\n final String end;\n final boolean endRequired;\n final boolean endWithWhitespaceOrEOL;\n\n private MatchStrategy(final OnMatchListener onMatchListener, final String start, final String end, final boolean endRequired, final boolean endWithWhitespaceOrEOL) {\n this.onMatchListener = onMatchListener;\n this.start = start;\n this.end = end;\n this.endRequired = endRequired;\n this.endWithWhitespaceOrEOL = endWithWhitespaceOrEOL;\n }\n }\n\n @SuppressWarnings(\"unused\")\n public static class Replacement {\n\n final String replacementString;\n final List replacementSpans;\n\n int start;\n int end;\n\n public Replacement(final String replacementString, final List replacementSpans) {\n this.replacementString = replacementString;\n this.replacementSpans = replacementSpans;\n }\n\n public Replacement(final String replacementString, CharacterStyle... spanStyles) {\n this(replacementString, Arrays.asList(spanStyles));\n }\n\n public Replacement(final String replacementString) {\n this(replacementString, Collections.emptyList());\n }\n }\n}\n"},"message":{"kind":"string","value":"Add bold shorthand.\n"},"old_file":{"kind":"string","value":"spanner/src/main/java/com/miguelgaeta/spanner/Spanner.java"},"subject":{"kind":"string","value":"Add bold shorthand."},"git_diff":{"kind":"string","value":"panner/src/main/java/com/miguelgaeta/spanner/Spanner.java\n \n @SuppressWarnings(\"unused\")\n public Spanner addMarkdownStrategy() {\n addMarkdownBoldStrategy();\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"*\", \"*\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createStrikethroughSpan());\n }\n }, \"~~\", \"~~\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createUnderlineSpan());\n }\n }, \"__\", \"__\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"_\", \"_\", true, true);\n\n return this;\n }\n\n public Spanner addMarkdownBoldStrategy() {\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createBoldSpan());\n }\n }, \"**\", \"**\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"*\", \"*\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createStrikethroughSpan());\n }\n }, \"~~\", \"~~\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createUnderlineSpan());\n }\n }, \"__\", \"__\");\n\n addReplacementStrategy(new OnMatchListener() {\n @Override\n public Replacement call(String match) {\n return new Replacement(match, SpanHelpers.createItalicSpan());\n }\n }, \"_\", \"_\", true, true);\n \n return this;\n }"}}},{"rowIdx":2056,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"b957c0ffb6f5e96ebe7c6ee16442bbeac8ba5b92"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"ekux44/LampShade,ekux44/HueMore"},"new_contents":{"kind":"string","value":"package com.kuxhausen.huemore;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.ArrayList;\n\nimport org.apache.http.HttpEntity;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.StatusLine;\nimport org.apache.http.client.ClientProtocolException;\nimport org.apache.http.client.HttpClient;\nimport org.apache.http.client.methods.HttpPut;\nimport org.apache.http.entity.StringEntity;\nimport org.apache.http.impl.client.DefaultHttpClient;\n\nimport android.annotation.TargetApi;\nimport android.content.Context;\nimport android.content.Intent;\nimport android.content.SharedPreferences;\nimport android.content.SharedPreferences.Editor;\nimport android.database.Cursor;\nimport android.os.AsyncTask;\nimport android.os.Build;\nimport android.os.Bundle;\nimport android.preference.PreferenceManager;\nimport android.support.v4.app.FragmentActivity;\nimport android.support.v4.app.FragmentTransaction;\nimport android.util.Log;\nimport android.view.Menu;\nimport android.view.MenuInflater;\nimport android.view.MenuItem;\n\nimport com.kuxhausen.huemore.billing.IabHelper;\nimport com.kuxhausen.huemore.billing.IabResult;\nimport com.kuxhausen.huemore.billing.Inventory;\nimport com.kuxhausen.huemore.billing.Purchase;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.PlayItems;\nimport com.kuxhausen.huemore.database.DatabaseHelper;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.GroupColumns;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.MoodColumns;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.PreferencesKeys;\nimport com.kuxhausen.huemore.network.GetBulbList;\nimport com.kuxhausen.huemore.network.TransmitGroupMood;\nimport com.kuxhausen.huemore.ui.registration.RegisterWithHubDialogFragment;\n\n/**\n * @author Eric Kuxhausen\n * \n */\npublic class MainActivity extends FragmentActivity implements\n\t\tGroupBulbPagingFragment.OnBulbGroupSelectedListener,\n\t\tMoodsFragment.OnMoodSelectedListener {\n\n\tDatabaseHelper databaseHelper = new DatabaseHelper(this);\n\tInteger[] bulbS;\n\tString mood;\n\tIabHelper mPlayHelper;\n\tMainActivity m;\n\tInventory lastQuerriedInventory;\n\tpublic GetBulbList.OnBulbListReturnedListener bulbListenerFragment;\n\n\t/** Called when the activity is first created. */\n\t@Override\n\tpublic void onCreate(Bundle savedInstanceState) {\n\t\tsuper.onCreate(savedInstanceState);\n\t\tLog.d(\"asdf\", \"onCreate\");\n\t\t\n\t\tsetContentView(R.layout.hue_more);\n\t\tm = this;\n\n\t\t// Check whether the activity is using the layout version with\n\t\t// the fragment_container FrameLayout. If so, we must add the first\n\t\t// fragment\n\t\tif (findViewById(R.id.fragment_container) != null) {\n\n\t\t\t// However, if we're being restored from a previous state,\n\t\t\t// then we don't need to do anything and should return or else\n\t\t\t// we could end up with overlapping fragments.\n\t\t\tif (savedInstanceState != null) {\n\t\t\t\t// return;\n\t\t\t}else{\n\n\t\t\t// Create an instance of ExampleFragment\n\t\t\tGroupBulbPagingFragment firstFragment = new GroupBulbPagingFragment();\n\t\t\t// GroupsFragment firstFragment = new GroupsFragment();\n\n\t\t\t// In case this activity was started with special instructions from\n\t\t\t// an Intent,\n\t\t\t// pass the Intent's extras to the fragment as arguments\n\t\t\tfirstFragment.setArguments(getIntent().getExtras());\n\n\t\t\t// Add the fragment to the 'fragment_container' FrameLayout\n\t\t\tgetSupportFragmentManager()\n\t\t\t\t\t.beginTransaction()\n\t\t\t\t\t.add(R.id.fragment_container, firstFragment,\n\t\t\t\t\t\t\tGroupBulbPagingFragment.class.getName()).commit();\n\t\t\t}\n\n\t\t}\n\n\t\t// (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) ?\n\t\t// this.getActionBar().setDisplayHomeAsUpEnabled(true)\n\t\t// : System.out.println(\"wtf\");\n\t\tSharedPreferences settings = PreferenceManager\n\t\t\t\t.getDefaultSharedPreferences(this);\n\n\t\tif (!settings.contains(PreferencesKeys.THIRD_UPDATE)) {\n\t\t\tdatabaseHelper.updatedPopulate();\n\t\t\t// Mark no longer first update in preferences cache\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.THIRD_UPDATE, false);\n\t\t\tedit.commit();\n\t\t}\n\t\tif (!settings.contains(PreferencesKeys.FIRST_RUN)) {\n\t\t\tdatabaseHelper.initialPopulate();// initialize database\n\t\t\t\n\n\t\t\t// Mark no longer first run in preferences cache\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.FIRST_RUN, false);\n\t\t\tedit.putInt(PreferencesKeys.BULBS_UNLOCKED,\n\t\t\t\t\tPreferencesKeys.ALWAYS_FREE_BULBS);// TODO load from\n\t\t\t// google store\n\t\t\tedit.commit();\n\t\t}\n\n\t\tif (!settings.contains(PreferencesKeys.DEFAULT_TO_GROUPS)) {\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.DEFAULT_TO_GROUPS, false);\n\t\t\tedit.commit();\n\t\t}\n\t\tif (!settings.contains(PreferencesKeys.DEFAULT_TO_MOODS)) {\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.DEFAULT_TO_MOODS, true);\n\t\t\tedit.commit();\n\t\t}\n\n\t\t// check to see if the bridge IP address is setup yet\n\t\tif (!settings.contains(PreferencesKeys.BRIDGE_IP_ADDRESS)) {\n\t\t\tRegisterWithHubDialogFragment rwhdf = new RegisterWithHubDialogFragment();\n\t\t\trwhdf.show(this.getSupportFragmentManager(), \"dialog\");\n\t\t}\n\t\tString firstChunk = \"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAgPUhHgGEdnpyPMAWgP3Xw/jHkReU1O0n6d4rtcULxOrVl/hcZlOsVyByMIZY5wMD84gmMXjbz8pFb4RymFTP7Yp8LSEGiw6DOXc7ydNd0lbZ4WtKyDEwwaio1wRbRPxdU7/4JBpMCh9L6geYx6nYLt0ExZEFxULV3dZJpIlEkEYaNGk/64gc0l34yybccYfORrWzu8u+\";\n\t\tString secondChunk = \"5YxJ5k1ikIJJ2I7/2Rp5AXkj2dWybmT+AGx83zh8+iMGGawEQerGtso9NUqpyZWU08EO9DcF8r2KnFwjmyWvqJ2JzbqCMNt0A08IGQNOrd16/C/65GE6J/EtsggkNIgQti6jD7zd3b2NAQIDAQAB\";\n\t\tString base64EncodedPublicKey = firstChunk + secondChunk;\n\t\t// compute your public key and store it in base64EncodedPublicKey\n\t\tmPlayHelper = new IabHelper(this, base64EncodedPublicKey);\n\t\tLog.d(\"asdf\", \"mPlayHelperCreated\" + (mPlayHelper !=null));\n\t\tmPlayHelper.startSetup(new IabHelper.OnIabSetupFinishedListener() {\n\t\t\tpublic void onIabSetupFinished(IabResult result) {\n\t\t\t\tif (!result.isSuccess()) {\n\t\t\t\t\t// Oh noes, there was a problem.\n\t\t\t\t\t// Log.d(\"asdf\", \"Problem setting up In-app Billing: \"+\n\t\t\t\t\t// result);\n\t\t\t\t} else {\n\t\t\t\t\t// Hooray, IAB is fully set up!\n\t\t\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\t\t\t\t\tif (m.bulbListenerFragment != null) {\n\t\t\t\t\t\tGetBulbList pushGroupMood = new GetBulbList();\n\t\t\t\t\t\tpushGroupMood.execute(m, m.bulbListenerFragment);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\n\t// Listener that's called when we finish querying the items and\n\t// subscriptions we own\n\tIabHelper.QueryInventoryFinishedListener mGotInventoryListener = new IabHelper.QueryInventoryFinishedListener() {\n\t\tpublic void onQueryInventoryFinished(IabResult result,\n\t\t\t\tInventory inventory) {\n\n\t\t\t// Log.d(\"asdf\", \"Query inventory finished.\");\n\t\t\tif (result.isFailure()) {\n\t\t\t\t// handle error\n\t\t\t\treturn;\n\t\t\t} else {\n\t\t\t\t// Log.d(\"asdf\", \"Query inventory was successful.\");\n\t\t\t\tlastQuerriedInventory = inventory;\n\t\t\t\tint numUnlocked = PreferencesKeys.ALWAYS_FREE_BULBS;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_1))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_2))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_3))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_4))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_5))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_6))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_7))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_8))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.BUY_ME_A_BULB_DONATION_1))\n\t\t\t\t\tnumUnlocked = Math.max(50, numUnlocked);\n\t\t\t\t// update UI accordingly\n\n\t\t\t\t// Get preferences cache\n\t\t\t\tSharedPreferences settings = PreferenceManager\n\t\t\t\t\t\t.getDefaultSharedPreferences(m);\n\t\t\t\tint previousMax = settings.getInt(\n\t\t\t\t\t\tPreferencesKeys.BULBS_UNLOCKED,\n\t\t\t\t\t\tPreferencesKeys.ALWAYS_FREE_BULBS);\n\t\t\t\tif (numUnlocked > previousMax) {\n\t\t\t\t\t// Update the number held in settings\n\t\t\t\t\tEditor edit = settings.edit();\n\t\t\t\t\tedit.putInt(PreferencesKeys.BULBS_UNLOCKED, numUnlocked);\n\t\t\t\t\tedit.commit();\n\n\t\t\t\t\tdatabaseHelper.addBulbs(previousMax, numUnlocked);// initialize\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// database\n\t\t\t\t}\n\t\t\t}\n\t\t\t/*\n\t\t\t * Check for items we own. Notice that for each purchase, we check\n\t\t\t * the developer payload to see if it's correct! See\n\t\t\t * verifyDeveloperPayload().\n\t\t\t */\n\t\t\t/*\n\t\t\t * // Do we have the premium upgrade? Purchase premiumPurchase =\n\t\t\t * inventory.getPurchase(SKU_PREMIUM); mIsPremium = (premiumPurchase\n\t\t\t * != null && verifyDeveloperPayload(premiumPurchase)); Log.d(TAG,\n\t\t\t * \"User is \" + (mIsPremium ? \"PREMIUM\" : \"NOT PREMIUM\"));\n\t\t\t * \n\t\t\t * \n\t\t\t * updateUi(); setWaitScreen(false); Log.d(TAG,\n\t\t\t * \"Initial inventory query finished; enabling main UI.\");\n\t\t\t */\n\t\t}\n\t};\n\n\tIabHelper.OnIabPurchaseFinishedListener mPurchaseFinishedListener = new IabHelper.OnIabPurchaseFinishedListener() {\n\t\tpublic void onIabPurchaseFinished(IabResult result, Purchase purchase) {\n\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\n\t\t}\n\t};\n\n\t@TargetApi(Build.VERSION_CODES.HONEYCOMB)\n\tpublic void initializeActionBar(Boolean value) {\n\t\ttry {\n\t\t\tthis.getActionBar().setDisplayHomeAsUpEnabled(value);\n\t\t} catch (Error e) {\n\t\t}\n\t}\n\n\t@Override\n\tprotected void onActivityResult(int requestCode, int resultCode, Intent data) {\n\t\t// Log.d(TAG, \"onActivityResult(\" + requestCode + \",\" + resultCode + \",\"\n\t\t// + data);\n\n\t\t// Pass on the activity result to the helper for handling\n\t\tif (!mPlayHelper.handleActivityResult(requestCode, resultCode, data)) {\n\t\t\t// not handled, so handle it ourselves (here's where you'd\n\t\t\t// perform any handling of activity results not related to in-app\n\t\t\t// billing...\n\t\t\tsuper.onActivityResult(requestCode, resultCode, data);\n\t\t} else {\n\t\t\t// Log.d(TAG, \"onActivityResult handled by IABUtil.\");\n\t\t}\n\t}\n\n\t/** Verifies the developer payload of a purchase. */\n\tboolean verifyDeveloperPayload(Purchase p) {\n\t\tString payload = p.getDeveloperPayload();\n\n\t\t/*\n\t\t * TODO: verify that the developer payload of the purchase is correct.\n\t\t * It will be the same one that you sent when initiating the purchase.\n\t\t * \n\t\t * WARNING: Locally generating a random string when starting a purchase\n\t\t * and verifying it here might seem like a good approach, but this will\n\t\t * fail in the case where the user purchases an item on one device and\n\t\t * then uses your app on a different device, because on the other device\n\t\t * you will not have access to the random string you originally\n\t\t * generated.\n\t\t * \n\t\t * So a good developer payload has these characteristics:\n\t\t * \n\t\t * 1. If two different users purchase an item, the payload is different\n\t\t * between them, so that one user's purchase can't be replayed to\n\t\t * another user.\n\t\t * \n\t\t * 2. The payload must be such that you can verify it even when the app\n\t\t * wasn't the one who initiated the purchase flow (so that items\n\t\t * purchased by the user on one device work on other devices owned by\n\t\t * the user).\n\t\t * \n\t\t * Using your own server to store and verify developer payloads across\n\t\t * app installations is recommended.\n\t\t */\n\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void onDestroy() {\n\t\tsuper.onDestroy();\n\t\tLog.d(\"asdf\", \"onDestroy\");\n\t\tif (mPlayHelper != null)\n\t\t\tmPlayHelper.dispose();\n\t\tmPlayHelper = null;\n\t\tLog.d(\"asdf\", \"mPlayHelperDestroyed\" + (mPlayHelper ==null));\n\t}\n\n\t@Override\n\tpublic void onGroupBulbSelected(Integer[] bulb) {\n\t\tbulbS = bulb;\n\n\t\t// Capture the article fragment from the activity layout\n\t\tMoodManualPagingFragment moodFrag = (MoodManualPagingFragment) getSupportFragmentManager()\n\t\t\t\t.findFragmentById(R.id.moods_fragment);\n\n\t\tif (moodFrag != null) {\n\t\t\t// If article frag is available, we're in two-pane layout...\n\n\t\t\t// Call a method in the ArticleFragment to update its content\n\t\t\tmoodFrag.reset();\n\n\t\t} else {\n\t\t\t// If the frag is not available, we're in the one-pane layout and\n\t\t\t// must swap frags...\n\n\t\t\t// Create fragment and give it an argument for the selected article\n\t\t\tMoodManualPagingFragment newFragment = new MoodManualPagingFragment();\n\t\t\tFragmentTransaction transaction = getSupportFragmentManager()\n\t\t\t\t\t.beginTransaction();\n\t\t\ttransaction\n\t\t\t\t\t.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);\n\t\t\t// Replace whatever is in the fragment_container view with this\n\t\t\t// fragment,\n\t\t\t// and add the transaction to the back stack so the user can\n\t\t\t// navigate back\n\t\t\ttransaction.replace(R.id.fragment_container, newFragment,\n\t\t\t\t\tMoodManualPagingFragment.class.getName());\n\t\t\ttransaction.addToBackStack(null);\n\n\t\t\t// Commit the transaction\n\t\t\ttransaction.commit();\n\t\t\ttransaction\n\t\t\t\t\t.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_CLOSE);\n\n\t\t\tif (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n\t\t\t\tinitializeActionBar(true);\n\n\t\t\t}\n\t\t}\n\n\t}\n\n\t@Override\n\tpublic void onBackPressed() {\n\t\tsuper.onBackPressed();\n\t\tif (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n\t\t\tinitializeActionBar(false);\n\t\t}\n\t}\n\n\tprivate void moveToGroupBulb() {\n\t\tMoodManualPagingFragment moodFrag = (MoodManualPagingFragment) getSupportFragmentManager()\n\t\t\t\t.findFragmentById(R.id.moods_fragment);\n\n\t\tif (moodFrag == null || !moodFrag.isVisible()) {\n\t\t\tthis.onBackPressed();\n\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onMoodSelected(String moodParam) {\n\t\tmood = moodParam;\n\t\tpushMoodGroup();\n\t}\n\n\tpublic void onBrightnessChanged(String brightnessState[]) {\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, brightnessState);\n\t}\n\n\t/*\n\t * test mood by applying to json states array to these bulbs\n\t * \n\t * @param states\n\t */\n\t/*\n\t * public void testMood(Integer[] bulbs, String[] states) {\n\t * TransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t * pushGroupMood.execute(this, bulbs, states); }\n\t */\n\n\t/**\n\t * test mood by applying to json states array to previously selected moods\n\t * \n\t * @param states\n\t */\n\tpublic void testMood(String[] states) {\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, states);\n\t}\n\n\tprivate void pushMoodGroup() {\n\t\tif (bulbS == null || mood == null)\n\t\t\treturn;\n\n\t\tString[] moodColumns = { MoodColumns.STATE };\n\t\tString[] mWereClause = { mood };\n\t\tCursor cursor = getContentResolver().query(\n\t\t\t\tDatabaseDefinitions.MoodColumns.MOODSTATES_URI, // Use the\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// default\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// content URI\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// for the\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// provider.\n\t\t\t\tmoodColumns, // Return the note ID and title for each note.\n\t\t\t\tMoodColumns.MOOD + \"=?\", // selection clause\n\t\t\t\tmWereClause, // election clause args\n\t\t\t\tnull // Use the default sort order.\n\t\t\t\t);\n\n\t\tArrayList moodStates = new ArrayList();\n\t\twhile (cursor.moveToNext()) {\n\t\t\tmoodStates.add(cursor.getString(0));\n\t\t}\n\t\tString[] moodS = moodStates.toArray(new String[moodStates.size()]);\n\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, moodS);\n\t}\n\n\t@Override\n\tpublic boolean onCreateOptionsMenu(Menu menu) {\n\t\tMenuInflater inflater = getMenuInflater();\n\t\tinflater.inflate(R.menu.main, menu);\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic boolean onOptionsItemSelected(MenuItem item) {\n\t\t// Handle item selection\n\t\tswitch (item.getItemId()) {\n\t\tcase android.R.id.home:\n\t\t\tmoveToGroupBulb();\n\t\t\treturn true;\n\t\tcase R.id.action_register_with_hub:\n\t\t\tRegisterWithHubDialogFragment rwhdf = new RegisterWithHubDialogFragment();\n\t\t\trwhdf.show(getSupportFragmentManager(), \"dialog\");\n\t\t\treturn true;\n\t\tcase R.id.action_settings:\n\t\t\tSettings settings = new Settings();\n\t\t\tsettings.show(getSupportFragmentManager(), \"dialog\");\n\t\t\treturn true;\n\t\tcase R.id.action_unlock_more_bulbs:\n\t\t\tif (lastQuerriedInventory == null)\n\t\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\t\t\telse {\n\t\t\t\tif (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_1))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_1, 10001,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_2))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_2, 10002,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_3))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_3, 10003,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_4))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_4, 10004,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_5))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_5, 10005,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_6))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_6, 10006,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_7))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_7, 10007,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_8))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_8, 10008,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t}\n\n\t\t\treturn true;\n\t\tdefault:\n\t\t\treturn super.onOptionsItemSelected(item);\n\t\t}\n\t}\n\n}\n"},"new_file":{"kind":"string","value":"app/src/com/kuxhausen/huemore/MainActivity.java"},"old_contents":{"kind":"string","value":"package com.kuxhausen.huemore;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.ArrayList;\n\nimport org.apache.http.HttpEntity;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.StatusLine;\nimport org.apache.http.client.ClientProtocolException;\nimport org.apache.http.client.HttpClient;\nimport org.apache.http.client.methods.HttpPut;\nimport org.apache.http.entity.StringEntity;\nimport org.apache.http.impl.client.DefaultHttpClient;\n\nimport android.annotation.TargetApi;\nimport android.content.Context;\nimport android.content.Intent;\nimport android.content.SharedPreferences;\nimport android.content.SharedPreferences.Editor;\nimport android.database.Cursor;\nimport android.os.AsyncTask;\nimport android.os.Build;\nimport android.os.Bundle;\nimport android.preference.PreferenceManager;\nimport android.support.v4.app.FragmentActivity;\nimport android.support.v4.app.FragmentTransaction;\nimport android.view.Menu;\nimport android.view.MenuInflater;\nimport android.view.MenuItem;\n\nimport com.kuxhausen.huemore.billing.IabHelper;\nimport com.kuxhausen.huemore.billing.IabResult;\nimport com.kuxhausen.huemore.billing.Inventory;\nimport com.kuxhausen.huemore.billing.Purchase;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.PlayItems;\nimport com.kuxhausen.huemore.database.DatabaseHelper;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.GroupColumns;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.MoodColumns;\nimport com.kuxhausen.huemore.database.DatabaseDefinitions.PreferencesKeys;\nimport com.kuxhausen.huemore.network.GetBulbList;\nimport com.kuxhausen.huemore.network.TransmitGroupMood;\nimport com.kuxhausen.huemore.ui.registration.RegisterWithHubDialogFragment;\n\n/**\n * @author Eric Kuxhausen\n * \n */\npublic class MainActivity extends FragmentActivity implements\n\t\tGroupBulbPagingFragment.OnBulbGroupSelectedListener,\n\t\tMoodsFragment.OnMoodSelectedListener {\n\n\tDatabaseHelper databaseHelper = new DatabaseHelper(this);\n\tInteger[] bulbS;\n\tString mood;\n\tIabHelper mPlayHelper;\n\tMainActivity m;\n\tInventory lastQuerriedInventory;\n\tpublic GetBulbList.OnBulbListReturnedListener bulbListenerFragment;\n\n\t/** Called when the activity is first created. */\n\t@Override\n\tpublic void onCreate(Bundle savedInstanceState) {\n\t\tsuper.onCreate(savedInstanceState);\n\t\tsetContentView(R.layout.hue_more);\n\t\tm = this;\n\n\t\t// Check whether the activity is using the layout version with\n\t\t// the fragment_container FrameLayout. If so, we must add the first\n\t\t// fragment\n\t\tif (findViewById(R.id.fragment_container) != null) {\n\n\t\t\t// However, if we're being restored from a previous state,\n\t\t\t// then we don't need to do anything and should return or else\n\t\t\t// we could end up with overlapping fragments.\n\t\t\tif (savedInstanceState != null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Create an instance of ExampleFragment\n\t\t\tGroupBulbPagingFragment firstFragment = new GroupBulbPagingFragment();\n\t\t\t// GroupsFragment firstFragment = new GroupsFragment();\n\n\t\t\t// In case this activity was started with special instructions from\n\t\t\t// an Intent,\n\t\t\t// pass the Intent's extras to the fragment as arguments\n\t\t\tfirstFragment.setArguments(getIntent().getExtras());\n\n\t\t\t// Add the fragment to the 'fragment_container' FrameLayout\n\t\t\tgetSupportFragmentManager()\n\t\t\t\t\t.beginTransaction()\n\t\t\t\t\t.add(R.id.fragment_container, firstFragment,\n\t\t\t\t\t\t\tGroupBulbPagingFragment.class.getName()).commit();\n\n\t\t}\n\n\t\t// (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) ?\n\t\t// this.getActionBar().setDisplayHomeAsUpEnabled(true)\n\t\t// : System.out.println(\"wtf\");\n\t\tSharedPreferences settings = PreferenceManager\n\t\t\t\t.getDefaultSharedPreferences(this);\n\n\t\tif (!settings.contains(PreferencesKeys.THIRD_UPDATE)) {\n\t\t\tdatabaseHelper.updatedPopulate();\n\t\t\t// Mark no longer first update in preferences cache\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.THIRD_UPDATE, false);\n\t\t\tedit.commit();\n\t\t}\n\t\tif (!settings.contains(PreferencesKeys.FIRST_RUN)) {\n\t\t\tdatabaseHelper.initialPopulate();// initialize database\n\t\t\t\n\n\t\t\t// Mark no longer first run in preferences cache\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.FIRST_RUN, false);\n\t\t\tedit.putInt(PreferencesKeys.BULBS_UNLOCKED,\n\t\t\t\t\tPreferencesKeys.ALWAYS_FREE_BULBS);// TODO load from\n\t\t\t// google store\n\t\t\tedit.commit();\n\t\t}\n\n\t\tif (!settings.contains(PreferencesKeys.DEFAULT_TO_GROUPS)) {\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.DEFAULT_TO_GROUPS, false);\n\t\t\tedit.commit();\n\t\t}\n\t\tif (!settings.contains(PreferencesKeys.DEFAULT_TO_MOODS)) {\n\t\t\tEditor edit = settings.edit();\n\t\t\tedit.putBoolean(PreferencesKeys.DEFAULT_TO_MOODS, true);\n\t\t\tedit.commit();\n\t\t}\n\n\t\t// check to see if the bridge IP address is setup yet\n\t\tif (!settings.contains(PreferencesKeys.BRIDGE_IP_ADDRESS)) {\n\t\t\tRegisterWithHubDialogFragment rwhdf = new RegisterWithHubDialogFragment();\n\t\t\trwhdf.show(this.getSupportFragmentManager(), \"dialog\");\n\t\t}\n\t\tString firstChunk = \"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAgPUhHgGEdnpyPMAWgP3Xw/jHkReU1O0n6d4rtcULxOrVl/hcZlOsVyByMIZY5wMD84gmMXjbz8pFb4RymFTP7Yp8LSEGiw6DOXc7ydNd0lbZ4WtKyDEwwaio1wRbRPxdU7/4JBpMCh9L6geYx6nYLt0ExZEFxULV3dZJpIlEkEYaNGk/64gc0l34yybccYfORrWzu8u+\";\n\t\tString secondChunk = \"5YxJ5k1ikIJJ2I7/2Rp5AXkj2dWybmT+AGx83zh8+iMGGawEQerGtso9NUqpyZWU08EO9DcF8r2KnFwjmyWvqJ2JzbqCMNt0A08IGQNOrd16/C/65GE6J/EtsggkNIgQti6jD7zd3b2NAQIDAQAB\";\n\t\tString base64EncodedPublicKey = firstChunk + secondChunk;\n\t\t// compute your public key and store it in base64EncodedPublicKey\n\t\tmPlayHelper = new IabHelper(this, base64EncodedPublicKey);\n\t\tmPlayHelper.startSetup(new IabHelper.OnIabSetupFinishedListener() {\n\t\t\tpublic void onIabSetupFinished(IabResult result) {\n\t\t\t\tif (!result.isSuccess()) {\n\t\t\t\t\t// Oh noes, there was a problem.\n\t\t\t\t\t// Log.d(\"asdf\", \"Problem setting up In-app Billing: \"+\n\t\t\t\t\t// result);\n\t\t\t\t} else {\n\t\t\t\t\t// Hooray, IAB is fully set up!\n\t\t\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\t\t\t\t\tif (m.bulbListenerFragment != null) {\n\t\t\t\t\t\tGetBulbList pushGroupMood = new GetBulbList();\n\t\t\t\t\t\tpushGroupMood.execute(m, m.bulbListenerFragment);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\n\t// Listener that's called when we finish querying the items and\n\t// subscriptions we own\n\tIabHelper.QueryInventoryFinishedListener mGotInventoryListener = new IabHelper.QueryInventoryFinishedListener() {\n\t\tpublic void onQueryInventoryFinished(IabResult result,\n\t\t\t\tInventory inventory) {\n\n\t\t\t// Log.d(\"asdf\", \"Query inventory finished.\");\n\t\t\tif (result.isFailure()) {\n\t\t\t\t// handle error\n\t\t\t\treturn;\n\t\t\t} else {\n\t\t\t\t// Log.d(\"asdf\", \"Query inventory was successful.\");\n\t\t\t\tlastQuerriedInventory = inventory;\n\t\t\t\tint numUnlocked = PreferencesKeys.ALWAYS_FREE_BULBS;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_1))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_2))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_3))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_4))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_5))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_6))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_7))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_8))\n\t\t\t\t\tnumUnlocked += 5;\n\t\t\t\tif (inventory.hasPurchase(PlayItems.BUY_ME_A_BULB_DONATION_1))\n\t\t\t\t\tnumUnlocked = Math.max(50, numUnlocked);\n\t\t\t\t// update UI accordingly\n\n\t\t\t\t// Get preferences cache\n\t\t\t\tSharedPreferences settings = PreferenceManager\n\t\t\t\t\t\t.getDefaultSharedPreferences(m);\n\t\t\t\tint previousMax = settings.getInt(\n\t\t\t\t\t\tPreferencesKeys.BULBS_UNLOCKED,\n\t\t\t\t\t\tPreferencesKeys.ALWAYS_FREE_BULBS);\n\t\t\t\tif (numUnlocked > previousMax) {\n\t\t\t\t\t// Update the number held in settings\n\t\t\t\t\tEditor edit = settings.edit();\n\t\t\t\t\tedit.putInt(PreferencesKeys.BULBS_UNLOCKED, numUnlocked);\n\t\t\t\t\tedit.commit();\n\n\t\t\t\t\tdatabaseHelper.addBulbs(previousMax, numUnlocked);// initialize\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// database\n\t\t\t\t}\n\t\t\t}\n\t\t\t/*\n\t\t\t * Check for items we own. Notice that for each purchase, we check\n\t\t\t * the developer payload to see if it's correct! See\n\t\t\t * verifyDeveloperPayload().\n\t\t\t */\n\t\t\t/*\n\t\t\t * // Do we have the premium upgrade? Purchase premiumPurchase =\n\t\t\t * inventory.getPurchase(SKU_PREMIUM); mIsPremium = (premiumPurchase\n\t\t\t * != null && verifyDeveloperPayload(premiumPurchase)); Log.d(TAG,\n\t\t\t * \"User is \" + (mIsPremium ? \"PREMIUM\" : \"NOT PREMIUM\"));\n\t\t\t * \n\t\t\t * \n\t\t\t * updateUi(); setWaitScreen(false); Log.d(TAG,\n\t\t\t * \"Initial inventory query finished; enabling main UI.\");\n\t\t\t */\n\t\t}\n\t};\n\n\tIabHelper.OnIabPurchaseFinishedListener mPurchaseFinishedListener = new IabHelper.OnIabPurchaseFinishedListener() {\n\t\tpublic void onIabPurchaseFinished(IabResult result, Purchase purchase) {\n\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\n\t\t}\n\t};\n\n\t@TargetApi(Build.VERSION_CODES.HONEYCOMB)\n\tpublic void initializeActionBar(Boolean value) {\n\t\ttry {\n\t\t\tthis.getActionBar().setDisplayHomeAsUpEnabled(value);\n\t\t} catch (Error e) {\n\t\t}\n\t}\n\n\t@Override\n\tprotected void onActivityResult(int requestCode, int resultCode, Intent data) {\n\t\t// Log.d(TAG, \"onActivityResult(\" + requestCode + \",\" + resultCode + \",\"\n\t\t// + data);\n\n\t\t// Pass on the activity result to the helper for handling\n\t\tif (!mPlayHelper.handleActivityResult(requestCode, resultCode, data)) {\n\t\t\t// not handled, so handle it ourselves (here's where you'd\n\t\t\t// perform any handling of activity results not related to in-app\n\t\t\t// billing...\n\t\t\tsuper.onActivityResult(requestCode, resultCode, data);\n\t\t} else {\n\t\t\t// Log.d(TAG, \"onActivityResult handled by IABUtil.\");\n\t\t}\n\t}\n\n\t/** Verifies the developer payload of a purchase. */\n\tboolean verifyDeveloperPayload(Purchase p) {\n\t\tString payload = p.getDeveloperPayload();\n\n\t\t/*\n\t\t * TODO: verify that the developer payload of the purchase is correct.\n\t\t * It will be the same one that you sent when initiating the purchase.\n\t\t * \n\t\t * WARNING: Locally generating a random string when starting a purchase\n\t\t * and verifying it here might seem like a good approach, but this will\n\t\t * fail in the case where the user purchases an item on one device and\n\t\t * then uses your app on a different device, because on the other device\n\t\t * you will not have access to the random string you originally\n\t\t * generated.\n\t\t * \n\t\t * So a good developer payload has these characteristics:\n\t\t * \n\t\t * 1. If two different users purchase an item, the payload is different\n\t\t * between them, so that one user's purchase can't be replayed to\n\t\t * another user.\n\t\t * \n\t\t * 2. The payload must be such that you can verify it even when the app\n\t\t * wasn't the one who initiated the purchase flow (so that items\n\t\t * purchased by the user on one device work on other devices owned by\n\t\t * the user).\n\t\t * \n\t\t * Using your own server to store and verify developer payloads across\n\t\t * app installations is recommended.\n\t\t */\n\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic void onDestroy() {\n\t\tsuper.onDestroy();\n\t\tif (mPlayHelper != null)\n\t\t\tmPlayHelper.dispose();\n\t\tmPlayHelper = null;\n\t}\n\n\t@Override\n\tpublic void onGroupBulbSelected(Integer[] bulb) {\n\t\tbulbS = bulb;\n\n\t\t// Capture the article fragment from the activity layout\n\t\tMoodManualPagingFragment moodFrag = (MoodManualPagingFragment) getSupportFragmentManager()\n\t\t\t\t.findFragmentById(R.id.moods_fragment);\n\n\t\tif (moodFrag != null) {\n\t\t\t// If article frag is available, we're in two-pane layout...\n\n\t\t\t// Call a method in the ArticleFragment to update its content\n\t\t\tmoodFrag.reset();\n\n\t\t} else {\n\t\t\t// If the frag is not available, we're in the one-pane layout and\n\t\t\t// must swap frags...\n\n\t\t\t// Create fragment and give it an argument for the selected article\n\t\t\tMoodManualPagingFragment newFragment = new MoodManualPagingFragment();\n\t\t\tFragmentTransaction transaction = getSupportFragmentManager()\n\t\t\t\t\t.beginTransaction();\n\t\t\ttransaction\n\t\t\t\t\t.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_OPEN);\n\t\t\t// Replace whatever is in the fragment_container view with this\n\t\t\t// fragment,\n\t\t\t// and add the transaction to the back stack so the user can\n\t\t\t// navigate back\n\t\t\ttransaction.replace(R.id.fragment_container, newFragment,\n\t\t\t\t\tMoodManualPagingFragment.class.getName());\n\t\t\ttransaction.addToBackStack(null);\n\n\t\t\t// Commit the transaction\n\t\t\ttransaction.commit();\n\t\t\ttransaction\n\t\t\t\t\t.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_CLOSE);\n\n\t\t\tif (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n\t\t\t\tinitializeActionBar(true);\n\n\t\t\t}\n\t\t}\n\n\t}\n\n\t@Override\n\tpublic void onBackPressed() {\n\t\tsuper.onBackPressed();\n\t\tif (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {\n\t\t\tinitializeActionBar(false);\n\t\t}\n\t}\n\n\tprivate void moveToGroupBulb() {\n\t\tMoodManualPagingFragment moodFrag = (MoodManualPagingFragment) getSupportFragmentManager()\n\t\t\t\t.findFragmentById(R.id.moods_fragment);\n\n\t\tif (moodFrag == null || !moodFrag.isVisible()) {\n\t\t\tthis.onBackPressed();\n\n\t\t}\n\t}\n\n\t@Override\n\tpublic void onMoodSelected(String moodParam) {\n\t\tmood = moodParam;\n\t\tpushMoodGroup();\n\t}\n\n\tpublic void onBrightnessChanged(String brightnessState[]) {\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, brightnessState);\n\t}\n\n\t/*\n\t * test mood by applying to json states array to these bulbs\n\t * \n\t * @param states\n\t */\n\t/*\n\t * public void testMood(Integer[] bulbs, String[] states) {\n\t * TransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t * pushGroupMood.execute(this, bulbs, states); }\n\t */\n\n\t/**\n\t * test mood by applying to json states array to previously selected moods\n\t * \n\t * @param states\n\t */\n\tpublic void testMood(String[] states) {\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, states);\n\t}\n\n\tprivate void pushMoodGroup() {\n\t\tif (bulbS == null || mood == null)\n\t\t\treturn;\n\n\t\tString[] moodColumns = { MoodColumns.STATE };\n\t\tString[] mWereClause = { mood };\n\t\tCursor cursor = getContentResolver().query(\n\t\t\t\tDatabaseDefinitions.MoodColumns.MOODSTATES_URI, // Use the\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// default\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// content URI\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// for the\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// provider.\n\t\t\t\tmoodColumns, // Return the note ID and title for each note.\n\t\t\t\tMoodColumns.MOOD + \"=?\", // selection clause\n\t\t\t\tmWereClause, // election clause args\n\t\t\t\tnull // Use the default sort order.\n\t\t\t\t);\n\n\t\tArrayList moodStates = new ArrayList();\n\t\twhile (cursor.moveToNext()) {\n\t\t\tmoodStates.add(cursor.getString(0));\n\t\t}\n\t\tString[] moodS = moodStates.toArray(new String[moodStates.size()]);\n\n\t\tTransmitGroupMood pushGroupMood = new TransmitGroupMood();\n\t\tpushGroupMood.execute(this, bulbS, moodS);\n\t}\n\n\t@Override\n\tpublic boolean onCreateOptionsMenu(Menu menu) {\n\t\tMenuInflater inflater = getMenuInflater();\n\t\tinflater.inflate(R.menu.main, menu);\n\t\treturn true;\n\t}\n\n\t@Override\n\tpublic boolean onOptionsItemSelected(MenuItem item) {\n\t\t// Handle item selection\n\t\tswitch (item.getItemId()) {\n\t\tcase android.R.id.home:\n\t\t\tmoveToGroupBulb();\n\t\t\treturn true;\n\t\tcase R.id.action_register_with_hub:\n\t\t\tRegisterWithHubDialogFragment rwhdf = new RegisterWithHubDialogFragment();\n\t\t\trwhdf.show(getSupportFragmentManager(), \"dialog\");\n\t\t\treturn true;\n\t\tcase R.id.action_settings:\n\t\t\tSettings settings = new Settings();\n\t\t\tsettings.show(getSupportFragmentManager(), \"dialog\");\n\t\t\treturn true;\n\t\tcase R.id.action_unlock_more_bulbs:\n\t\t\tif (lastQuerriedInventory == null)\n\t\t\t\tmPlayHelper.queryInventoryAsync(mGotInventoryListener);\n\t\t\telse {\n\t\t\t\tif (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_1))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_1, 10001,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_2))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_2, 10002,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_3))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_3, 10003,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_4))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_4, 10004,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_5))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_5, 10005,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_6))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_6, 10006,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_7))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_7, 10007,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t\telse if (!lastQuerriedInventory\n\t\t\t\t\t\t.hasPurchase(PlayItems.FIVE_BULB_UNLOCK_8))\n\t\t\t\t\tmPlayHelper.launchPurchaseFlow(this,\n\t\t\t\t\t\t\tPlayItems.FIVE_BULB_UNLOCK_8, 10008,\n\t\t\t\t\t\t\tmPurchaseFinishedListener, \"\");\n\t\t\t}\n\n\t\t\treturn true;\n\t\tdefault:\n\t\t\treturn super.onOptionsItemSelected(item);\n\t\t}\n\t}\n\n}\n"},"message":{"kind":"string","value":"Fixed activity recreation issue found in testing\n"},"old_file":{"kind":"string","value":"app/src/com/kuxhausen/huemore/MainActivity.java"},"subject":{"kind":"string","value":"Fixed activity recreation issue found in testing"},"git_diff":{"kind":"string","value":"pp/src/com/kuxhausen/huemore/MainActivity.java\n import android.preference.PreferenceManager;\n import android.support.v4.app.FragmentActivity;\n import android.support.v4.app.FragmentTransaction;\nimport android.util.Log;\n import android.view.Menu;\n import android.view.MenuInflater;\n import android.view.MenuItem;\n \t@Override\n \tpublic void onCreate(Bundle savedInstanceState) {\n \t\tsuper.onCreate(savedInstanceState);\n\t\tLog.d(\"asdf\", \"onCreate\");\n\t\t\n \t\tsetContentView(R.layout.hue_more);\n \t\tm = this;\n \n \t\t\t// then we don't need to do anything and should return or else\n \t\t\t// we could end up with overlapping fragments.\n \t\t\tif (savedInstanceState != null) {\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t\t// return;\n\t\t\t}else{\n \n \t\t\t// Create an instance of ExampleFragment\n \t\t\tGroupBulbPagingFragment firstFragment = new GroupBulbPagingFragment();\n \t\t\t\t\t.beginTransaction()\n \t\t\t\t\t.add(R.id.fragment_container, firstFragment,\n \t\t\t\t\t\t\tGroupBulbPagingFragment.class.getName()).commit();\n\t\t\t}\n \n \t\t}\n \n \t\tString base64EncodedPublicKey = firstChunk + secondChunk;\n \t\t// compute your public key and store it in base64EncodedPublicKey\n \t\tmPlayHelper = new IabHelper(this, base64EncodedPublicKey);\n\t\tLog.d(\"asdf\", \"mPlayHelperCreated\" + (mPlayHelper !=null));\n \t\tmPlayHelper.startSetup(new IabHelper.OnIabSetupFinishedListener() {\n \t\t\tpublic void onIabSetupFinished(IabResult result) {\n \t\t\t\tif (!result.isSuccess()) {\n \t@Override\n \tpublic void onDestroy() {\n \t\tsuper.onDestroy();\n\t\tLog.d(\"asdf\", \"onDestroy\");\n \t\tif (mPlayHelper != null)\n \t\t\tmPlayHelper.dispose();\n \t\tmPlayHelper = null;\n\t\tLog.d(\"asdf\", \"mPlayHelperDestroyed\" + (mPlayHelper ==null));\n \t}\n \n \t@Override"}}},{"rowIdx":2057,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"ee28034a8a155406b111813602cb5f4fb3fb1f4d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"zrccxyb62/hadoop,zrccxyb62/hadoop,zrccxyb62/hadoop,zrccxyb62/hadoop,zrccxyb62/hadoop,zrccxyb62/hadoop,zrccxyb62/hadoop"},"new_contents":{"kind":"string","value":"/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.hadoop.hdfs;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.PrintStream;\nimport java.io.RandomAccessFile;\nimport java.io.StringReader;\nimport java.net.URI;\nimport java.security.PrivilegedExceptionAction;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.EnumSet;\nimport java.util.List;\nimport java.util.concurrent.Callable;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.Future;\n\nimport com.google.common.collect.Lists;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.crypto.CipherSuite;\nimport org.apache.hadoop.crypto.CryptoProtocolVersion;\nimport org.apache.hadoop.crypto.key.JavaKeyStoreProvider;\nimport org.apache.hadoop.crypto.key.KeyProvider;\nimport org.apache.hadoop.crypto.key.KeyProviderFactory;\nimport org.apache.hadoop.fs.CommonConfigurationKeysPublic;\nimport org.apache.hadoop.fs.CreateFlag;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FSTestWrapper;\nimport org.apache.hadoop.fs.FileContext;\nimport org.apache.hadoop.fs.FileContextTestWrapper;\nimport org.apache.hadoop.fs.FileEncryptionInfo;\nimport org.apache.hadoop.fs.FileStatus;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.FileSystemTestHelper;\nimport org.apache.hadoop.fs.FileSystemTestWrapper;\nimport org.apache.hadoop.fs.FsShell;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.fs.RemoteIterator;\nimport org.apache.hadoop.fs.permission.FsAction;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;\nimport org.apache.hadoop.hdfs.client.HdfsAdmin;\nimport org.apache.hadoop.hdfs.protocol.ClientProtocol;\nimport org.apache.hadoop.hdfs.protocol.EncryptionZone;\nimport org.apache.hadoop.hdfs.protocol.HdfsFileStatus;\nimport org.apache.hadoop.hdfs.protocol.LocatedBlocks;\nimport org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;\nimport org.apache.hadoop.hdfs.server.namenode.EncryptionFaultInjector;\nimport org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager;\nimport org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;\nimport org.apache.hadoop.hdfs.server.namenode.NamenodeFsck;\nimport org.apache.hadoop.hdfs.tools.CryptoAdmin;\nimport org.apache.hadoop.hdfs.tools.DFSck;\nimport org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter;\nimport org.apache.hadoop.hdfs.web.WebHdfsConstants;\nimport org.apache.hadoop.hdfs.web.WebHdfsTestUtil;\nimport org.apache.hadoop.io.EnumSetWritable;\nimport org.apache.hadoop.security.AccessControlException;\nimport org.apache.hadoop.security.Credentials;\nimport org.apache.hadoop.security.UserGroupInformation;\nimport org.apache.hadoop.security.token.Token;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension;\nimport org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;\nimport org.apache.hadoop.io.Text;\nimport org.apache.log4j.Level;\nimport org.apache.log4j.Logger;\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.mockito.Mockito;\n\nimport static org.mockito.Matchers.anyBoolean;\nimport static org.mockito.Matchers.anyLong;\nimport static org.mockito.Matchers.anyObject;\nimport static org.mockito.Matchers.anyShort;\nimport static org.mockito.Mockito.withSettings;\nimport static org.mockito.Mockito.any;\nimport static org.mockito.Mockito.anyString;\nimport static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;\nimport static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesEqual;\nimport static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;\nimport static org.apache.hadoop.test.MetricsAsserts.assertGauge;\nimport static org.apache.hadoop.test.MetricsAsserts.getMetrics;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotEquals;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\n\nimport org.xml.sax.InputSource;\nimport org.xml.sax.helpers.DefaultHandler;\n\nimport javax.xml.parsers.SAXParser;\nimport javax.xml.parsers.SAXParserFactory;\n\npublic class TestEncryptionZones {\n\n protected Configuration conf;\n private FileSystemTestHelper fsHelper;\n\n protected MiniDFSCluster cluster;\n protected HdfsAdmin dfsAdmin;\n protected DistributedFileSystem fs;\n private File testRootDir;\n protected final String TEST_KEY = \"test_key\";\n private static final String NS_METRICS = \"FSNamesystem\";\n\n protected FileSystemTestWrapper fsWrapper;\n protected FileContextTestWrapper fcWrapper;\n\n protected static final EnumSet< CreateEncryptionZoneFlag > NO_TRASH =\n EnumSet.of(CreateEncryptionZoneFlag.NO_TRASH);\n\n protected String getKeyProviderURI() {\n return JavaKeyStoreProvider.SCHEME_NAME + \"://file\" +\n new Path(testRootDir.toString(), \"test.jks\").toUri();\n }\n\n @Before\n public void setup() throws Exception {\n conf = new HdfsConfiguration();\n fsHelper = new FileSystemTestHelper();\n // Set up java key store\n String testRoot = fsHelper.getTestRootDir();\n testRootDir = new File(testRoot).getAbsoluteFile();\n conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, getKeyProviderURI());\n conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);\n // Lower the batch size for testing\n conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,\n 2);\n cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();\n Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);\n fs = cluster.getFileSystem();\n fsWrapper = new FileSystemTestWrapper(fs);\n fcWrapper = new FileContextTestWrapper(\n FileContext.getFileContext(cluster.getURI(), conf));\n dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);\n setProvider();\n // Create a test key\n DFSTestUtil.createKey(TEST_KEY, cluster, conf);\n }\n \n protected void setProvider() {\n // Need to set the client's KeyProvider to the NN's for JKS,\n // else the updates do not get flushed properly\n fs.getClient().setKeyProvider(cluster.getNameNode().getNamesystem()\n .getProvider());\n }\n\n @After\n public void teardown() {\n if (cluster != null) {\n cluster.shutdown();\n cluster = null;\n }\n EncryptionFaultInjector.instance = new EncryptionFaultInjector();\n }\n\n public void assertNumZones(final int numZones) throws IOException {\n RemoteIterator it = dfsAdmin.listEncryptionZones();\n int count = 0;\n while (it.hasNext()) {\n count++;\n it.next();\n }\n assertEquals(\"Unexpected number of encryption zones!\", numZones, count);\n }\n\n /**\n * Checks that an encryption zone with the specified keyName and path (if not\n * null) is present.\n *\n * @throws IOException if a matching zone could not be found\n */\n public void assertZonePresent(String keyName, String path) throws IOException {\n final RemoteIterator it = dfsAdmin.listEncryptionZones();\n boolean match = false;\n while (it.hasNext()) {\n EncryptionZone zone = it.next();\n boolean matchKey = (keyName == null);\n boolean matchPath = (path == null);\n if (keyName != null && zone.getKeyName().equals(keyName)) {\n matchKey = true;\n }\n if (path != null && zone.getPath().equals(path)) {\n matchPath = true;\n }\n if (matchKey && matchPath) {\n match = true;\n break;\n }\n }\n assertTrue(\"Did not find expected encryption zone with keyName \" + keyName +\n \" path \" + path, match\n );\n }\n\n /**\n * Make sure hdfs crypto -createZone command creates a trash directory\n * with sticky bits.\n * @throws Exception\n */\n @Test(timeout = 60000)\n public void testTrashStickyBit() throws Exception {\n // create an EZ /zones/zone1, make it world writable.\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n CryptoAdmin cryptoAdmin = new CryptoAdmin(conf);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n fsWrapper.setPermission(zone1,\n new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));\n String[] cryptoArgv = new String[]{\"-createZone\", \"-keyName\", TEST_KEY,\n \"-path\", zone1.toUri().getPath()};\n cryptoAdmin.run(cryptoArgv);\n\n // create a file in EZ\n final Path ezfile1 = new Path(zone1, \"file1\");\n // Create the encrypted file in zone1\n final int len = 8192;\n DFSTestUtil.createFile(fs, ezfile1, len, (short) 1, 0xFEED);\n\n // enable trash, delete /zones/zone1/file1,\n // which moves the file to\n // /zones/zone1/.Trash/$SUPERUSER/Current/zones/zone1/file1\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n final FsShell shell = new FsShell(clientConf);\n String[] argv = new String[]{\"-rm\", ezfile1.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Can't remove a file in EZ as superuser\", 0, res);\n\n final Path trashDir = new Path(zone1, FileSystem.TRASH_PREFIX);\n assertTrue(fsWrapper.exists(trashDir));\n FileStatus trashFileStatus = fsWrapper.getFileStatus(trashDir);\n assertTrue(trashFileStatus.getPermission().getStickyBit());\n\n // create a non-privileged user\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final Path ezfile2 = new Path(zone1, \"file2\");\n final int len = 8192;\n // create a file /zones/zone1/file2 in EZ\n // this file is owned by user:mygroup\n FileSystem fs2 = FileSystem.get(cluster.getConfiguration(0));\n DFSTestUtil.createFile(fs2, ezfile2, len, (short) 1, 0xFEED);\n // delete /zones/zone1/file2,\n // which moves the file to\n // /zones/zone1/.Trash/user/Current/zones/zone1/file2\n String[] argv = new String[]{\"-rm\", ezfile2.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Can't remove a file in EZ as user:mygroup\", 0, res);\n return null;\n }\n });\n }\n\n /**\n * Make sure hdfs crypto -provisionTrash command creates a trash directory\n * with sticky bits.\n * @throws Exception\n */\n @Test(timeout = 60000)\n public void testProvisionTrash() throws Exception {\n // create an EZ /zones/zone1\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n CryptoAdmin cryptoAdmin = new CryptoAdmin(conf);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n String[] cryptoArgv = new String[]{\"-createZone\", \"-keyName\", TEST_KEY,\n \"-path\", zone1.toUri().getPath()};\n cryptoAdmin.run(cryptoArgv);\n\n // remove the trash directory\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n final FsShell shell = new FsShell(clientConf);\n final Path trashDir = new Path(zone1, FileSystem.TRASH_PREFIX);\n String[] argv = new String[]{\"-rmdir\", trashDir.toUri().getPath()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Unable to delete trash directory.\", 0, res);\n assertFalse(fsWrapper.exists(trashDir));\n\n // execute -provisionTrash command option and make sure the trash\n // directory has sticky bit.\n String[] provisionTrashArgv = new String[]{\"-provisionTrash\", \"-path\",\n zone1.toUri().getPath()};\n cryptoAdmin.run(provisionTrashArgv);\n\n assertTrue(fsWrapper.exists(trashDir));\n FileStatus trashFileStatus = fsWrapper.getFileStatus(trashDir);\n assertTrue(trashFileStatus.getPermission().getStickyBit());\n }\n\n @Test(timeout = 60000)\n public void testBasicOperations() throws Exception {\n\n int numZones = 0;\n /* Number of EZs should be 0 if no EZ is created */\n assertEquals(\"Unexpected number of encryption zones!\", numZones,\n cluster.getNamesystem().getNumEncryptionZones());\n /* Test failure of create EZ on a directory that doesn't exist. */\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n fail(\"expected /test doesn't exist\");\n } catch (IOException e) {\n assertExceptionContains(\"cannot find\", e);\n }\n\n /* Normal creation of an EZ */\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, zone1.toString());\n\n /* Test failure of create EZ on a directory which is already an EZ. */\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n } catch (IOException e) {\n assertExceptionContains(\"is already an encryption zone\", e);\n }\n\n /* create EZ on parent of an EZ should fail */\n try {\n dfsAdmin.createEncryptionZone(zoneParent, TEST_KEY, NO_TRASH);\n fail(\"EZ over an EZ\");\n } catch (IOException e) {\n assertExceptionContains(\"encryption zone for a non-empty directory\", e);\n }\n\n /* create EZ on a folder with a folder fails */\n final Path notEmpty = new Path(\"/notEmpty\");\n final Path notEmptyChild = new Path(notEmpty, \"child\");\n fsWrapper.mkdir(notEmptyChild, FsPermission.getDirDefault(), true);\n try {\n dfsAdmin.createEncryptionZone(notEmpty, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on an non-empty directory with folder\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone\", e);\n }\n fsWrapper.delete(notEmptyChild, false);\n\n /* create EZ on a folder with a file fails */\n fsWrapper.createFile(notEmptyChild);\n try {\n dfsAdmin.createEncryptionZone(notEmpty, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on an non-empty directory with file\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone\", e);\n }\n\n /* Test failure of create EZ on a file. */\n try {\n dfsAdmin.createEncryptionZone(notEmptyChild, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on a file\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone for a file.\", e);\n }\n\n /* Test failure of creating an EZ passing a key that doesn't exist. */\n final Path zone2 = new Path(\"/zone2\");\n fsWrapper.mkdir(zone2, FsPermission.getDirDefault(), false);\n final String myKeyName = \"mykeyname\";\n try {\n dfsAdmin.createEncryptionZone(zone2, myKeyName, NO_TRASH);\n fail(\"expected key doesn't exist\");\n } catch (IOException e) {\n assertExceptionContains(\"doesn't exist.\", e);\n }\n\n /* Test failure of empty and null key name */\n try {\n dfsAdmin.createEncryptionZone(zone2, \"\", NO_TRASH);\n fail(\"created a zone with empty key name\");\n } catch (IOException e) {\n assertExceptionContains(\"Must specify a key name when creating\", e);\n }\n try {\n dfsAdmin.createEncryptionZone(zone2, null, NO_TRASH);\n fail(\"created a zone with null key name\");\n } catch (IOException e) {\n assertExceptionContains(\"Must specify a key name when creating\", e);\n }\n\n assertNumZones(1);\n\n /* Test success of creating an EZ when they key exists. */\n DFSTestUtil.createKey(myKeyName, cluster, conf);\n dfsAdmin.createEncryptionZone(zone2, myKeyName, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(myKeyName, zone2.toString());\n\n /* Test failure of create encryption zones as a non super user. */\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n final Path nonSuper = new Path(\"/nonSuper\");\n fsWrapper.mkdir(nonSuper, FsPermission.getDirDefault(), false);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n try {\n userAdmin.createEncryptionZone(nonSuper, TEST_KEY, NO_TRASH);\n fail(\"createEncryptionZone is superuser-only operation\");\n } catch (AccessControlException e) {\n assertExceptionContains(\"Superuser privilege is required\", e);\n }\n return null;\n }\n });\n\n // Test success of creating an encryption zone a few levels down.\n Path deepZone = new Path(\"/d/e/e/p/zone\");\n fsWrapper.mkdir(deepZone, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(deepZone, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, deepZone.toString());\n\n // Create and list some zones to test batching of listEZ\n for (int i=1; i<6; i++) {\n final Path zonePath = new Path(\"/listZone\" + i);\n fsWrapper.mkdir(zonePath, FsPermission.getDirDefault(), false);\n dfsAdmin.createEncryptionZone(zonePath, TEST_KEY, NO_TRASH);\n numZones++;\n assertNumZones(numZones);\n assertZonePresent(null, zonePath.toString());\n }\n\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);\n fs.saveNamespace();\n fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertEquals(\"Unexpected number of encryption zones!\", numZones, cluster\n .getNamesystem().getNumEncryptionZones());\n assertGauge(\"NumEncryptionZones\", numZones, getMetrics(NS_METRICS));\n assertZonePresent(null, zone1.toString());\n\n // Verify newly added ez is present after restarting the NameNode\n // without persisting the namespace.\n Path nonpersistZone = new Path(\"/nonpersistZone\");\n fsWrapper.mkdir(nonpersistZone, FsPermission.getDirDefault(), false);\n dfsAdmin.createEncryptionZone(nonpersistZone, TEST_KEY, NO_TRASH);\n numZones++;\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertZonePresent(null, nonpersistZone.toString());\n }\n\n @Test(timeout = 60000)\n public void testBasicOperationsRootDir() throws Exception {\n int numZones = 0;\n final Path rootDir = new Path(\"/\");\n final Path zone1 = new Path(rootDir, \"zone1\");\n\n /* Normal creation of an EZ on rootDir */\n dfsAdmin.createEncryptionZone(rootDir, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, rootDir.toString());\n\n // Verify rootDir ez is present after restarting the NameNode\n // and saving/loading from fsimage.\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);\n fs.saveNamespace();\n fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertZonePresent(null, rootDir.toString());\n }\n\n /**\n * Test listing encryption zones as a non super user.\n */\n @Test(timeout = 60000)\n public void testListEncryptionZonesAsNonSuperUser() throws Exception {\n\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path superPath = new Path(testRoot, \"superuseronly\");\n final Path allPath = new Path(testRoot, \"accessall\");\n\n fsWrapper.mkdir(superPath, new FsPermission((short) 0700), true);\n dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);\n\n fsWrapper.mkdir(allPath, new FsPermission((short) 0707), true);\n dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n try {\n userAdmin.listEncryptionZones();\n } catch (AccessControlException e) {\n assertExceptionContains(\"Superuser privilege is required\", e);\n }\n return null;\n }\n });\n }\n\n /**\n * Test getEncryptionZoneForPath as a non super user.\n */\n @Test(timeout = 60000)\n public void testGetEZAsNonSuperUser() throws Exception {\n\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path superPath = new Path(testRoot, \"superuseronly\");\n final Path superPathFile = new Path(superPath, \"file1\");\n final Path allPath = new Path(testRoot, \"accessall\");\n final Path allPathFile = new Path(allPath, \"file1\");\n final Path nonEZDir = new Path(testRoot, \"nonEZDir\");\n final Path nonEZFile = new Path(nonEZDir, \"file1\");\n final Path nonexistent = new Path(\"/nonexistent\");\n final int len = 8192;\n\n fsWrapper.mkdir(testRoot, new FsPermission((short) 0777), true);\n fsWrapper.mkdir(superPath, new FsPermission((short) 0700), false);\n fsWrapper.mkdir(allPath, new FsPermission((short) 0777), false);\n fsWrapper.mkdir(nonEZDir, new FsPermission((short) 0777), false);\n dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);\n dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);\n dfsAdmin.allowSnapshot(new Path(\"/\"));\n final Path newSnap = fs.createSnapshot(new Path(\"/\"));\n DFSTestUtil.createFile(fs, superPathFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, allPathFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, nonEZFile, len, (short) 1, 0xFEED);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n\n // Check null arg\n try {\n userAdmin.getEncryptionZoneForPath(null);\n fail(\"should have thrown NPE\");\n } catch (NullPointerException e) {\n /*\n * IWBNI we could use assertExceptionContains, but the NPE that is\n * thrown has no message text.\n */\n }\n\n // Check operation with accessible paths\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(allPath).getPath().\n toString());\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(allPathFile).getPath().\n toString());\n\n // Check operation with inaccessible (lack of permissions) path\n try {\n userAdmin.getEncryptionZoneForPath(superPathFile);\n fail(\"expected AccessControlException\");\n } catch (AccessControlException e) {\n assertExceptionContains(\"Permission denied:\", e);\n }\n\n try {\n userAdmin.getEncryptionZoneForPath(nonexistent);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + nonexistent, e);\n }\n\n // Check operation with non-ez paths\n assertNull(\"expected null for non-ez path\",\n userAdmin.getEncryptionZoneForPath(nonEZDir));\n assertNull(\"expected null for non-ez path\",\n userAdmin.getEncryptionZoneForPath(nonEZFile));\n\n // Check operation with snapshots\n String snapshottedAllPath = newSnap.toString() + allPath.toString();\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n\n /*\n * Delete the file from the non-snapshot and test that it is still ok\n * in the ez.\n */\n fs.delete(allPathFile, false);\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n\n // Delete the ez and make sure ss's ez is still ok.\n fs.delete(allPath, true);\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n try {\n userAdmin.getEncryptionZoneForPath(allPathFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + allPathFile, e);\n }\n try {\n userAdmin.getEncryptionZoneForPath(allPath);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + allPath, e);\n }\n return null;\n }\n });\n }\n\n /**\n * Test success of Rename EZ on a directory which is already an EZ.\n */\n private void doRenameEncryptionZone(FSTestWrapper wrapper) throws Exception {\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path pathFoo = new Path(testRoot, \"foo\");\n final Path pathFooBaz = new Path(pathFoo, \"baz\");\n final Path pathFooBazFile = new Path(pathFooBaz, \"file\");\n final Path pathFooBar = new Path(pathFoo, \"bar\");\n final Path pathFooBarFile = new Path(pathFooBar, \"file\");\n final int len = 8192;\n wrapper.mkdir(pathFoo, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(pathFoo, TEST_KEY, NO_TRASH);\n wrapper.mkdir(pathFooBaz, FsPermission.getDirDefault(), true);\n DFSTestUtil.createFile(fs, pathFooBazFile, len, (short) 1, 0xFEED);\n String contents = DFSTestUtil.readFile(fs, pathFooBazFile);\n try {\n wrapper.rename(pathFooBaz, testRoot);\n } catch (IOException e) {\n assertExceptionContains(pathFooBaz.toString() + \" can't be moved from\" +\n \" an encryption zone.\", e\n );\n }\n\n // Verify that we can rename dir and files within an encryption zone.\n assertTrue(fs.rename(pathFooBaz, pathFooBar));\n assertTrue(\"Rename of dir and file within ez failed\",\n !wrapper.exists(pathFooBaz) && wrapper.exists(pathFooBar));\n assertEquals(\"Renamed file contents not the same\",\n contents, DFSTestUtil.readFile(fs, pathFooBarFile));\n\n // Verify that we can rename an EZ root\n final Path newFoo = new Path(testRoot, \"newfoo\");\n assertTrue(\"Rename of EZ root\", fs.rename(pathFoo, newFoo));\n assertTrue(\"Rename of EZ root failed\",\n !wrapper.exists(pathFoo) && wrapper.exists(newFoo));\n\n // Verify that we can't rename an EZ root onto itself\n try {\n wrapper.rename(newFoo, newFoo);\n } catch (IOException e) {\n assertExceptionContains(\"are the same\", e);\n }\n }\n\n @Test(timeout = 60000)\n public void testRenameFileSystem() throws Exception {\n doRenameEncryptionZone(fsWrapper);\n }\n\n @Test(timeout = 60000)\n public void testRenameFileContext() throws Exception {\n doRenameEncryptionZone(fcWrapper);\n }\n\n private FileEncryptionInfo getFileEncryptionInfo(Path path) throws Exception {\n LocatedBlocks blocks = fs.getClient().getLocatedBlocks(path.toString(), 0);\n return blocks.getFileEncryptionInfo();\n }\n\n @Test(timeout = 120000)\n public void testReadWrite() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n // Create a base file for comparison\n final Path baseFile = new Path(\"/base\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, baseFile, len, (short) 1, 0xFEED);\n // Create the first enc file\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n final Path encFile1 = new Path(zone, \"myfile\");\n DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);\n // Read them back in and compare byte-by-byte\n verifyFilesEqual(fs, baseFile, encFile1, len);\n // Roll the key of the encryption zone\n assertNumZones(1);\n String keyName = dfsAdmin.listEncryptionZones().next().getKeyName();\n cluster.getNamesystem().getProvider().rollNewVersion(keyName);\n // Read them back in and compare byte-by-byte\n verifyFilesEqual(fs, baseFile, encFile1, len);\n // Write a new enc file and validate\n final Path encFile2 = new Path(zone, \"myfile2\");\n DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);\n // FEInfos should be different\n FileEncryptionInfo feInfo1 = getFileEncryptionInfo(encFile1);\n FileEncryptionInfo feInfo2 = getFileEncryptionInfo(encFile2);\n assertFalse(\"EDEKs should be different\", Arrays\n .equals(feInfo1.getEncryptedDataEncryptionKey(),\n feInfo2.getEncryptedDataEncryptionKey()));\n assertNotEquals(\"Key was rolled, versions should be different\",\n feInfo1.getEzKeyVersionName(), feInfo2.getEzKeyVersionName());\n // Contents still equal\n verifyFilesEqual(fs, encFile1, encFile2, len);\n }\n\n @Test(timeout = 120000)\n public void testReadWriteUsingWebHdfs() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final FileSystem webHdfsFs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,\n WebHdfsConstants.WEBHDFS_SCHEME);\n\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n\n /* Create an unencrypted file for comparison purposes. */\n final Path unencFile = new Path(\"/unenc\");\n final int len = 8192;\n DFSTestUtil.createFile(webHdfsFs, unencFile, len, (short) 1, 0xFEED);\n\n /*\n * Create the same file via webhdfs, but this time encrypted. Compare it\n * using both webhdfs and DFS.\n */\n final Path encFile1 = new Path(zone, \"myfile\");\n DFSTestUtil.createFile(webHdfsFs, encFile1, len, (short) 1, 0xFEED);\n verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);\n verifyFilesEqual(fs, unencFile, encFile1, len);\n\n /*\n * Same thing except this time create the encrypted file using DFS.\n */\n final Path encFile2 = new Path(zone, \"myfile2\");\n DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);\n verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);\n verifyFilesEqual(fs, unencFile, encFile2, len);\n\n /* Verify appending to files works correctly. */\n appendOneByte(fs, unencFile);\n appendOneByte(webHdfsFs, encFile1);\n appendOneByte(fs, encFile2);\n verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);\n verifyFilesEqual(fs, unencFile, encFile1, len);\n verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);\n verifyFilesEqual(fs, unencFile, encFile2, len);\n }\n\n private void appendOneByte(FileSystem fs, Path p) throws IOException {\n final FSDataOutputStream out = fs.append(p);\n out.write((byte) 0x123);\n out.close();\n }\n\n @Test(timeout = 60000)\n public void testVersionAndSuiteNegotiation() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n // Create a file in an EZ, which should succeed\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success1\"), 0, (short) 1, 0xFEED);\n // Pass no supported versions, fail\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] {};\n try {\n DFSTestUtil.createFile(fs, new Path(zone, \"fail\"), 0, (short) 1, 0xFEED);\n fail(\"Created a file without specifying a crypto protocol version\");\n } catch (UnknownCryptoProtocolVersionException e) {\n assertExceptionContains(\"No crypto protocol versions\", e);\n }\n // Pass some unknown versions, fail\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[]\n { CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN };\n try {\n DFSTestUtil.createFile(fs, new Path(zone, \"fail\"), 0, (short) 1, 0xFEED);\n fail(\"Created a file without specifying a known crypto protocol version\");\n } catch (UnknownCryptoProtocolVersionException e) {\n assertExceptionContains(\"No crypto protocol versions\", e);\n }\n // Pass some unknown and a good cipherSuites, success\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =\n new CryptoProtocolVersion[] {\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.ENCRYPTION_ZONES };\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success2\"), 0, (short) 1, 0xFEED);\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =\n new CryptoProtocolVersion[] {\n CryptoProtocolVersion.ENCRYPTION_ZONES,\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.UNKNOWN} ;\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success3\"), 4096, (short) 1, 0xFEED);\n // Check KeyProvider state\n // Flushing the KP on the NN, since it caches, and init a test one\n cluster.getNamesystem().getProvider().flush();\n KeyProvider provider = KeyProviderFactory\n .get(new URI(conf.getTrimmed(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI)),\n conf);\n List keys = provider.getKeys();\n assertEquals(\"Expected NN to have created one key per zone\", 1,\n keys.size());\n List allVersions = Lists.newArrayList();\n for (String key : keys) {\n List versions = provider.getKeyVersions(key);\n assertEquals(\"Should only have one key version per key\", 1,\n versions.size());\n allVersions.addAll(versions);\n }\n // Check that the specified CipherSuite was correctly saved on the NN\n for (int i = 2; i <= 3; i++) {\n FileEncryptionInfo feInfo =\n getFileEncryptionInfo(new Path(zone.toString() +\n \"/success\" + i));\n assertEquals(feInfo.getCipherSuite(), CipherSuite.AES_CTR_NOPADDING);\n }\n\n DFSClient old = fs.dfs;\n try {\n testCipherSuiteNegotiation(fs, conf);\n } finally {\n fs.dfs = old;\n }\n }\n\n @SuppressWarnings(\"unchecked\")\n private static void mockCreate(ClientProtocol mcp,\n CipherSuite suite, CryptoProtocolVersion version) throws Exception {\n Mockito.doReturn(\n new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission(\n (short) 777), \"owner\", \"group\", new byte[0], new byte[0],\n 1010, 0, new FileEncryptionInfo(suite,\n version, new byte[suite.getAlgorithmBlockSize()],\n new byte[suite.getAlgorithmBlockSize()],\n \"fakeKey\", \"fakeVersion\"),\n (byte) 0))\n .when(mcp)\n .create(anyString(), (FsPermission) anyObject(), anyString(),\n (EnumSetWritable) anyObject(), anyBoolean(),\n anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());\n }\n\n // This test only uses mocks. Called from the end of an existing test to\n // avoid an extra mini cluster.\n private static void testCipherSuiteNegotiation(DistributedFileSystem fs,\n Configuration conf) throws Exception {\n // Set up mock ClientProtocol to test client-side CipherSuite negotiation\n final ClientProtocol mcp = Mockito.mock(ClientProtocol.class);\n\n // Try with an empty conf\n final Configuration noCodecConf = new Configuration(conf);\n final CipherSuite suite = CipherSuite.AES_CTR_NOPADDING;\n final String confKey = CommonConfigurationKeysPublic\n .HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX + suite\n .getConfigSuffix();\n noCodecConf.set(confKey, \"\");\n fs.dfs = new DFSClient(null, mcp, noCodecConf, null);\n mockCreate(mcp, suite, CryptoProtocolVersion.ENCRYPTION_ZONES);\n try {\n fs.create(new Path(\"/mock\"));\n fail(\"Created with no configured codecs!\");\n } catch (UnknownCipherSuiteException e) {\n assertExceptionContains(\"No configuration found for the cipher\", e);\n }\n\n // Try create with an UNKNOWN CipherSuite\n fs.dfs = new DFSClient(null, mcp, conf, null);\n CipherSuite unknown = CipherSuite.UNKNOWN;\n unknown.setUnknownValue(989);\n mockCreate(mcp, unknown, CryptoProtocolVersion.ENCRYPTION_ZONES);\n try {\n fs.create(new Path(\"/mock\"));\n fail(\"Created with unknown cipher!\");\n } catch (IOException e) {\n assertExceptionContains(\"unknown CipherSuite with ID 989\", e);\n }\n }\n\n @Test(timeout = 120000)\n public void testCreateEZWithNoProvider() throws Exception {\n // Unset the key provider and make sure EZ ops don't work\n final Configuration clusterConf = cluster.getConfiguration(0);\n clusterConf.unset(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI);\n cluster.restartNameNode(true);\n cluster.waitActive();\n final Path zone1 = new Path(\"/zone1\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n fail(\"expected exception\");\n } catch (IOException e) {\n assertExceptionContains(\"since no key provider is available\", e);\n }\n final Path jksPath = new Path(testRootDir.toString(), \"test.jks\");\n clusterConf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,\n JavaKeyStoreProvider.SCHEME_NAME + \"://file\" + jksPath.toUri()\n );\n // Try listing EZs as well\n assertNumZones(0);\n }\n\n @Test(timeout = 120000)\n public void testIsEncryptedMethod() throws Exception {\n doTestIsEncryptedMethod(new Path(\"/\"));\n doTestIsEncryptedMethod(new Path(\"/.reserved/raw\"));\n }\n\n private void doTestIsEncryptedMethod(Path prefix) throws Exception {\n try {\n dTIEM(prefix);\n } finally {\n for (FileStatus s : fsWrapper.listStatus(prefix)) {\n fsWrapper.delete(s.getPath(), true);\n }\n }\n }\n\n private void dTIEM(Path prefix) throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n // Create an unencrypted file to check isEncrypted returns false\n final Path baseFile = new Path(prefix, \"base\");\n fsWrapper.createFile(baseFile);\n FileStatus stat = fsWrapper.getFileStatus(baseFile);\n assertFalse(\"Expected isEncrypted to return false for \" + baseFile,\n stat.isEncrypted());\n\n // Create an encrypted file to check isEncrypted returns true\n final Path zone = new Path(prefix, \"zone\");\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n final Path encFile = new Path(zone, \"encfile\");\n fsWrapper.createFile(encFile);\n stat = fsWrapper.getFileStatus(encFile);\n assertTrue(\"Expected isEncrypted to return true for enc file\" + encFile,\n stat.isEncrypted());\n\n // check that it returns true for an ez root\n stat = fsWrapper.getFileStatus(zone);\n assertTrue(\"Expected isEncrypted to return true for ezroot\",\n stat.isEncrypted());\n\n // check that it returns true for a dir in the ez\n final Path zoneSubdir = new Path(zone, \"subdir\");\n fsWrapper.mkdir(zoneSubdir, FsPermission.getDirDefault(), true);\n stat = fsWrapper.getFileStatus(zoneSubdir);\n assertTrue(\n \"Expected isEncrypted to return true for ez subdir \" + zoneSubdir,\n stat.isEncrypted());\n\n // check that it returns false for a non ez dir\n final Path nonEzDirPath = new Path(prefix, \"nonzone\");\n fsWrapper.mkdir(nonEzDirPath, FsPermission.getDirDefault(), true);\n stat = fsWrapper.getFileStatus(nonEzDirPath);\n assertFalse(\n \"Expected isEncrypted to return false for directory \" + nonEzDirPath,\n stat.isEncrypted());\n\n // check that it returns true for listings within an ez\n FileStatus[] statuses = fsWrapper.listStatus(zone);\n for (FileStatus s : statuses) {\n assertTrue(\"Expected isEncrypted to return true for ez stat \" + zone,\n s.isEncrypted());\n }\n\n statuses = fsWrapper.listStatus(encFile);\n for (FileStatus s : statuses) {\n assertTrue(\n \"Expected isEncrypted to return true for ez file stat \" + encFile,\n s.isEncrypted());\n }\n\n // check that it returns false for listings outside an ez\n statuses = fsWrapper.listStatus(nonEzDirPath);\n for (FileStatus s : statuses) {\n assertFalse(\n \"Expected isEncrypted to return false for nonez stat \" + nonEzDirPath,\n s.isEncrypted());\n }\n\n statuses = fsWrapper.listStatus(baseFile);\n for (FileStatus s : statuses) {\n assertFalse(\n \"Expected isEncrypted to return false for non ez stat \" + baseFile,\n s.isEncrypted());\n }\n }\n\n private class MyInjector extends EncryptionFaultInjector {\n int generateCount;\n CountDownLatch ready;\n CountDownLatch wait;\n\n public MyInjector() {\n this.ready = new CountDownLatch(1);\n this.wait = new CountDownLatch(1);\n }\n\n @Override\n public void startFileAfterGenerateKey() throws IOException {\n ready.countDown();\n try {\n wait.await();\n } catch (InterruptedException e) {\n throw new IOException(e);\n }\n generateCount++;\n }\n }\n\n private class CreateFileTask implements Callable {\n private FileSystemTestWrapper fsWrapper;\n private Path name;\n\n CreateFileTask(FileSystemTestWrapper fsWrapper, Path name) {\n this.fsWrapper = fsWrapper;\n this.name = name;\n }\n\n @Override\n public Void call() throws Exception {\n fsWrapper.createFile(name);\n return null;\n }\n }\n\n private class InjectFaultTask implements Callable {\n final Path zone1 = new Path(\"/zone1\");\n final Path file = new Path(zone1, \"file1\");\n final ExecutorService executor = Executors.newSingleThreadExecutor();\n\n MyInjector injector;\n\n @Override\n public Void call() throws Exception {\n // Set up the injector\n injector = new MyInjector();\n EncryptionFaultInjector.instance = injector;\n Future future =\n executor.submit(new CreateFileTask(fsWrapper, file));\n injector.ready.await();\n // Do the fault\n doFault();\n // Allow create to proceed\n injector.wait.countDown();\n future.get();\n // Cleanup and postconditions\n doCleanup();\n return null;\n }\n\n public void doFault() throws Exception {}\n\n public void doCleanup() throws Exception {}\n }\n\n /**\n * Tests the retry logic in startFile. We release the lock while generating\n * an EDEK, so tricky things can happen in the intervening time.\n */\n @Test(timeout = 120000)\n public void testStartFileRetry() throws Exception {\n final Path zone1 = new Path(\"/zone1\");\n final Path file = new Path(zone1, \"file1\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n ExecutorService executor = Executors.newSingleThreadExecutor();\n\n // Test when the parent directory becomes an EZ\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected a startFile retry\", 2, injector.generateCount);\n fsWrapper.delete(file, false);\n }\n }).get();\n\n // Test when the parent directory unbecomes an EZ\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n fsWrapper.delete(zone1, true);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected no startFile retries\", 1, injector.generateCount);\n fsWrapper.delete(file, false);\n }\n }).get();\n\n // Test when the parent directory becomes a different EZ\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n final String otherKey = \"other_key\";\n DFSTestUtil.createKey(otherKey, cluster, conf);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n fsWrapper.delete(zone1, true);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, otherKey, NO_TRASH);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected a startFile retry\", 2, injector.generateCount);\n fsWrapper.delete(zone1, true);\n }\n }).get();\n\n // Test that the retry limit leads to an error\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n final String anotherKey = \"another_key\";\n DFSTestUtil.createKey(anotherKey, cluster, conf);\n dfsAdmin.createEncryptionZone(zone1, anotherKey, NO_TRASH);\n String keyToUse = otherKey;\n\n MyInjector injector = new MyInjector();\n EncryptionFaultInjector.instance = injector;\n Future future = executor.submit(new CreateFileTask(fsWrapper, file));\n\n // Flip-flop between two EZs to repeatedly fail\n for (int i=0; i testToken = new Token(testIdentifier, new byte[0],\n new Text(), new Text());\n Mockito.when(((DelegationTokenExtension)keyProvider).\n addDelegationTokens(anyString(), (Credentials)any())).\n thenReturn(new Token[] { testToken });\n\n dfs.getClient().setKeyProvider(keyProvider);\n\n Credentials creds = new Credentials();\n final Token tokens[] = dfs.addDelegationTokens(\"JobTracker\", creds);\n DistributedFileSystem.LOG.debug(\"Delegation tokens: \" +\n Arrays.asList(tokens));\n Assert.assertEquals(2, tokens.length);\n Assert.assertEquals(tokens[1], testToken);\n Assert.assertEquals(1, creds.numberOfTokens());\n }\n\n /**\n * Test running fsck on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testFsckOnEncryptionZones() throws Exception {\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n final Path zone1File = new Path(zone1, \"file\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zone1File, len, (short) 1, 0xFEED);\n ByteArrayOutputStream bStream = new ByteArrayOutputStream();\n PrintStream out = new PrintStream(bStream, true);\n int errCode = ToolRunner.run(new DFSck(conf, out),\n new String[]{ \"/\" });\n assertEquals(\"Fsck ran with non-zero error code\", 0, errCode);\n String result = bStream.toString();\n assertTrue(\"Fsck did not return HEALTHY status\",\n result.contains(NamenodeFsck.HEALTHY_STATUS));\n\n // Run fsck directly on the encryption zone instead of root\n errCode = ToolRunner.run(new DFSck(conf, out),\n new String[]{ zoneParent.toString() });\n assertEquals(\"Fsck ran with non-zero error code\", 0, errCode);\n result = bStream.toString();\n assertTrue(\"Fsck did not return HEALTHY status\",\n result.contains(NamenodeFsck.HEALTHY_STATUS));\n }\n\n /**\n * Test correctness of successive snapshot creation and deletion\n * on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testSnapshotsOnEncryptionZones() throws Exception {\n final String TEST_KEY2 = \"testkey2\";\n DFSTestUtil.createKey(TEST_KEY2, cluster, conf);\n\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone = new Path(zoneParent, \"zone\");\n final Path zoneFile = new Path(zone, \"zoneFile\");\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n dfsAdmin.allowSnapshot(zoneParent);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n String contents = DFSTestUtil.readFile(fs, zoneFile);\n final Path snap1 = fs.createSnapshot(zoneParent, \"snap1\");\n final Path snap1Zone = new Path(snap1, zone.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap1Zone).getPath().toString());\n\n // Now delete the encryption zone, recreate the dir, and take another\n // snapshot\n fsWrapper.delete(zone, true);\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n final Path snap2 = fs.createSnapshot(zoneParent, \"snap2\");\n final Path snap2Zone = new Path(snap2, zone.getName());\n assertNull(\"Expected null ez path\",\n dfsAdmin.getEncryptionZoneForPath(snap2Zone));\n\n // Create the encryption zone again\n dfsAdmin.createEncryptionZone(zone, TEST_KEY2, NO_TRASH);\n final Path snap3 = fs.createSnapshot(zoneParent, \"snap3\");\n final Path snap3Zone = new Path(snap3, zone.getName());\n // Check that snap3's EZ has the correct settings\n EncryptionZone ezSnap3 = dfsAdmin.getEncryptionZoneForPath(snap3Zone);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n ezSnap3.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY2, ezSnap3.getKeyName());\n // Check that older snapshots still have the old EZ settings\n EncryptionZone ezSnap1 = dfsAdmin.getEncryptionZoneForPath(snap1Zone);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n ezSnap1.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY, ezSnap1.getKeyName());\n\n // Check that listEZs only shows the current filesystem state\n ArrayList listZones = Lists.newArrayList();\n RemoteIterator it = dfsAdmin.listEncryptionZones();\n while (it.hasNext()) {\n listZones.add(it.next());\n }\n for (EncryptionZone z: listZones) {\n System.out.println(z);\n }\n assertEquals(\"Did not expect additional encryption zones!\", 1,\n listZones.size());\n EncryptionZone listZone = listZones.get(0);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n listZone.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY2, listZone.getKeyName());\n\n // Verify contents of the snapshotted file\n final Path snapshottedZoneFile = new Path(\n snap1.toString() + \"/\" + zone.getName() + \"/\" + zoneFile.getName());\n assertEquals(\"Contents of snapshotted file have changed unexpectedly\",\n contents, DFSTestUtil.readFile(fs, snapshottedZoneFile));\n\n // Now delete the snapshots out of order and verify the zones are still\n // correct\n fs.deleteSnapshot(zoneParent, snap2.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap1Zone).getPath().toString());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap3Zone).getPath().toString());\n fs.deleteSnapshot(zoneParent, snap1.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap3Zone).getPath().toString());\n }\n\n /**\n * Verify symlinks can be created in encryption zones and that\n * they function properly when the target is in the same\n * or different ez.\n */\n @Test(timeout = 60000)\n public void testEncryptionZonesWithSymlinks() throws Exception {\n // Verify we can create an encryption zone over both link and target\n final int len = 8192;\n final Path parent = new Path(\"/parent\");\n final Path linkParent = new Path(parent, \"symdir1\");\n final Path targetParent = new Path(parent, \"symdir2\");\n final Path link = new Path(linkParent, \"link\");\n final Path target = new Path(targetParent, \"target\");\n fs.mkdirs(parent);\n dfsAdmin.createEncryptionZone(parent, TEST_KEY, NO_TRASH);\n fs.mkdirs(linkParent);\n fs.mkdirs(targetParent);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n String content = DFSTestUtil.readFile(fs, target);\n fs.createSymlink(target, link, false);\n assertEquals(\"Contents read from link are not the same as target\",\n content, DFSTestUtil.readFile(fs, link));\n fs.delete(parent, true);\n\n // Now let's test when the symlink and target are in different\n // encryption zones\n fs.mkdirs(linkParent);\n fs.mkdirs(targetParent);\n dfsAdmin.createEncryptionZone(linkParent, TEST_KEY, NO_TRASH);\n dfsAdmin.createEncryptionZone(targetParent, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n content = DFSTestUtil.readFile(fs, target);\n fs.createSymlink(target, link, false);\n assertEquals(\"Contents read from link are not the same as target\",\n content, DFSTestUtil.readFile(fs, link));\n fs.delete(link, true);\n fs.delete(target, true);\n }\n\n @Test(timeout = 60000)\n public void testConcatFailsInEncryptionZones() throws Exception {\n final int len = 8192;\n final Path ez = new Path(\"/ez\");\n fs.mkdirs(ez);\n dfsAdmin.createEncryptionZone(ez, TEST_KEY, NO_TRASH);\n final Path src1 = new Path(ez, \"src1\");\n final Path src2 = new Path(ez, \"src2\");\n final Path target = new Path(ez, \"target\");\n DFSTestUtil.createFile(fs, src1, len, (short)1, 0xFEED);\n DFSTestUtil.createFile(fs, src2, len, (short)1, 0xFEED);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n try {\n fs.concat(target, new Path[] { src1, src2 });\n fail(\"expected concat to throw en exception for files in an ez\");\n } catch (IOException e) {\n assertExceptionContains(\n \"concat can not be called for files in an encryption zone\", e);\n }\n fs.delete(ez, true);\n }\n\n /**\n * Test running the OfflineImageViewer on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testOfflineImageViewerOnEncryptionZones() throws Exception {\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n final Path zone1File = new Path(zone1, \"file\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zone1File, len, (short) 1, 0xFEED);\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER, false);\n fs.saveNamespace();\n\n File originalFsimage = FSImageTestUtil.findLatestImageFile(FSImageTestUtil\n .getFSImage(cluster.getNameNode()).getStorage().getStorageDir(0));\n if (originalFsimage == null) {\n throw new RuntimeException(\"Didn't generate or can't find fsimage\");\n }\n\n // Run the XML OIV processor\n ByteArrayOutputStream output = new ByteArrayOutputStream();\n PrintStream pw = new PrintStream(output);\n PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), pw);\n v.visit(new RandomAccessFile(originalFsimage, \"r\"));\n final String xml = output.toString();\n SAXParser parser = SAXParserFactory.newInstance().newSAXParser();\n parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());\n }\n\n /**\n * Test creating encryption zone on the root path\n */\n @Test(timeout = 60000)\n public void testEncryptionZonesOnRootPath() throws Exception {\n final int len = 8196;\n final Path rootDir = new Path(\"/\");\n final Path zoneFile = new Path(rootDir, \"file\");\n final Path rawFile = new Path(\"/.reserved/raw/file\");\n dfsAdmin.createEncryptionZone(rootDir, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n\n assertEquals(\"File can be created on the root encryption zone \" +\n \"with correct length\",\n len, fs.getFileStatus(zoneFile).getLen());\n assertEquals(\"Root dir is encrypted\",\n true, fs.getFileStatus(rootDir).isEncrypted());\n assertEquals(\"File is encrypted\",\n true, fs.getFileStatus(zoneFile).isEncrypted());\n DFSTestUtil.verifyFilesNotEqual(fs, zoneFile, rawFile, len);\n }\n\n @Test(timeout = 60000)\n public void testEncryptionZonesOnRelativePath() throws Exception {\n final int len = 8196;\n final Path baseDir = new Path(\"/somewhere/base\");\n final Path zoneDir = new Path(\"zone\");\n final Path zoneFile = new Path(\"file\");\n fs.setWorkingDirectory(baseDir);\n fs.mkdirs(zoneDir);\n dfsAdmin.createEncryptionZone(zoneDir, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n\n assertNumZones(1);\n assertZonePresent(TEST_KEY, \"/somewhere/base/zone\");\n\n assertEquals(\"Got unexpected ez path\", \"/somewhere/base/zone\", dfsAdmin\n .getEncryptionZoneForPath(zoneDir).getPath().toString());\n }\n\n @Test(timeout = 60000)\n public void testGetEncryptionZoneOnANonExistentZoneFile() throws Exception {\n final Path ez = new Path(\"/ez\");\n fs.mkdirs(ez);\n dfsAdmin.createEncryptionZone(ez, TEST_KEY, NO_TRASH);\n Path zoneFile = new Path(ez, \"file\");\n try {\n fs.getEZForPath(zoneFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + zoneFile, e);\n }\n try {\n dfsAdmin.getEncryptionZoneForPath(zoneFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + zoneFile, e);\n }\n }\n\n @Test(timeout = 120000)\n public void testEncryptionZoneWithTrash() throws Exception {\n // Create the encryption zone1\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final Path zone1 = new Path(\"/zone1\");\n fs.mkdirs(zone1);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n\n // Create the encrypted file in zone1\n final Path encFile1 = new Path(zone1, \"encFile1\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);\n\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n\n // Delete encrypted file from the shell with trash enabled\n // Verify the file is moved to appropriate trash within the zone\n verifyShellDeleteWithTrash(shell, encFile1);\n\n // Delete encryption zone from the shell with trash enabled\n // Verify the zone is moved to appropriate trash location in user's home dir\n verifyShellDeleteWithTrash(shell, zone1);\n\n final Path topEZ = new Path(\"/topEZ\");\n fs.mkdirs(topEZ);\n dfsAdmin.createEncryptionZone(topEZ, TEST_KEY, NO_TRASH);\n final String NESTED_EZ_TEST_KEY = \"nested_ez_test_key\";\n DFSTestUtil.createKey(NESTED_EZ_TEST_KEY, cluster, conf);\n final Path nestedEZ = new Path(topEZ, \"nestedEZ\");\n fs.mkdirs(nestedEZ);\n dfsAdmin.createEncryptionZone(nestedEZ, NESTED_EZ_TEST_KEY, NO_TRASH);\n final Path topEZFile = new Path(topEZ, \"file\");\n final Path nestedEZFile = new Path(nestedEZ, \"file\");\n DFSTestUtil.createFile(fs, topEZFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, nestedEZFile, len, (short) 1, 0xFEED);\n verifyShellDeleteWithTrash(shell, topEZFile);\n verifyShellDeleteWithTrash(shell, nestedEZFile);\n verifyShellDeleteWithTrash(shell, nestedEZ);\n verifyShellDeleteWithTrash(shell, topEZ);\n }\n\n @Test(timeout = 120000)\n public void testRootDirEZTrash() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n dfsAdmin.createEncryptionZone(new Path(\"/\"), TEST_KEY, NO_TRASH);\n final Path encFile = new Path(\"/encFile\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile, len, (short) 1, 0xFEED);\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n verifyShellDeleteWithTrash(shell, encFile);\n }\n\n @Test(timeout = 120000)\n public void testGetTrashRoots() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n Path ezRoot1 = new Path(\"/ez1\");\n fs.mkdirs(ezRoot1);\n dfsAdmin.createEncryptionZone(ezRoot1, TEST_KEY, NO_TRASH);\n Path ezRoot2 = new Path(\"/ez2\");\n fs.mkdirs(ezRoot2);\n dfsAdmin.createEncryptionZone(ezRoot2, TEST_KEY, NO_TRASH);\n Path ezRoot3 = new Path(\"/ez3\");\n fs.mkdirs(ezRoot3);\n dfsAdmin.createEncryptionZone(ezRoot3, TEST_KEY, NO_TRASH);\n Collection trashRootsBegin = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 0, trashRootsBegin.size());\n\n final Path encFile = new Path(ezRoot2, \"encFile\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile, len, (short) 1, 0xFEED);\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n verifyShellDeleteWithTrash(shell, encFile);\n\n Collection trashRootsDelete1 = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 1,\n trashRootsDelete1.size());\n\n final Path nonEncFile = new Path(\"/nonEncFile\");\n DFSTestUtil.createFile(fs, nonEncFile, len, (short) 1, 0xFEED);\n verifyShellDeleteWithTrash(shell, nonEncFile);\n\n Collection trashRootsDelete2 = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 2,\n trashRootsDelete2.size());\n }\n\n private void verifyShellDeleteWithTrash(FsShell shell, Path path)\n throws Exception{\n try {\n Path trashDir = shell.getCurrentTrashDir(path);\n // Verify that trashDir has a path component named \".Trash\"\n Path checkTrash = trashDir;\n while (!checkTrash.isRoot() && !checkTrash.getName().equals(\".Trash\")) {\n checkTrash = checkTrash.getParent();\n }\n assertEquals(\"No .Trash component found in trash dir \" + trashDir,\n \".Trash\", checkTrash.getName());\n final Path trashFile =\n new Path(shell.getCurrentTrashDir(path) + \"/\" + path);\n String[] argv = new String[]{\"-rm\", \"-r\", path.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"rm failed\", 0, res);\n assertTrue(\"File not in trash : \" + trashFile, fs.exists(trashFile));\n } catch (IOException ioe) {\n fail(ioe.getMessage());\n } finally {\n if (fs.exists(path)) {\n fs.delete(path, true);\n }\n }\n }\n}\n"},"new_file":{"kind":"string","value":"hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java"},"old_contents":{"kind":"string","value":"/**\n * Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.hadoop.hdfs;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.PrintStream;\nimport java.io.RandomAccessFile;\nimport java.io.StringReader;\nimport java.net.URI;\nimport java.security.PrivilegedExceptionAction;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.EnumSet;\nimport java.util.List;\nimport java.util.concurrent.Callable;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.Future;\n\nimport com.google.common.collect.Lists;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.crypto.CipherSuite;\nimport org.apache.hadoop.crypto.CryptoProtocolVersion;\nimport org.apache.hadoop.crypto.key.JavaKeyStoreProvider;\nimport org.apache.hadoop.crypto.key.KeyProvider;\nimport org.apache.hadoop.crypto.key.KeyProviderFactory;\nimport org.apache.hadoop.fs.CommonConfigurationKeysPublic;\nimport org.apache.hadoop.fs.CreateFlag;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FSTestWrapper;\nimport org.apache.hadoop.fs.FileContext;\nimport org.apache.hadoop.fs.FileContextTestWrapper;\nimport org.apache.hadoop.fs.FileEncryptionInfo;\nimport org.apache.hadoop.fs.FileStatus;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.FileSystemTestHelper;\nimport org.apache.hadoop.fs.FileSystemTestWrapper;\nimport org.apache.hadoop.fs.FsShell;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.fs.RemoteIterator;\nimport org.apache.hadoop.fs.permission.FsAction;\nimport org.apache.hadoop.fs.permission.FsPermission;\nimport org.apache.hadoop.hdfs.client.CreateEncryptionZoneFlag;\nimport org.apache.hadoop.hdfs.client.HdfsAdmin;\nimport org.apache.hadoop.hdfs.protocol.ClientProtocol;\nimport org.apache.hadoop.hdfs.protocol.EncryptionZone;\nimport org.apache.hadoop.hdfs.protocol.HdfsFileStatus;\nimport org.apache.hadoop.hdfs.protocol.LocatedBlocks;\nimport org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;\nimport org.apache.hadoop.hdfs.server.namenode.EncryptionFaultInjector;\nimport org.apache.hadoop.hdfs.server.namenode.EncryptionZoneManager;\nimport org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil;\nimport org.apache.hadoop.hdfs.server.namenode.NamenodeFsck;\nimport org.apache.hadoop.hdfs.tools.CryptoAdmin;\nimport org.apache.hadoop.hdfs.tools.DFSck;\nimport org.apache.hadoop.hdfs.tools.offlineImageViewer.PBImageXmlWriter;\nimport org.apache.hadoop.hdfs.web.WebHdfsConstants;\nimport org.apache.hadoop.hdfs.web.WebHdfsTestUtil;\nimport org.apache.hadoop.io.EnumSetWritable;\nimport org.apache.hadoop.security.AccessControlException;\nimport org.apache.hadoop.security.Credentials;\nimport org.apache.hadoop.security.UserGroupInformation;\nimport org.apache.hadoop.security.token.Token;\nimport org.apache.hadoop.util.ToolRunner;\nimport org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension.DelegationTokenExtension;\nimport org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;\nimport org.apache.hadoop.io.Text;\nimport org.apache.log4j.Level;\nimport org.apache.log4j.Logger;\nimport org.junit.After;\nimport org.junit.Assert;\nimport org.junit.Before;\nimport org.junit.Test;\nimport org.mockito.Mockito;\n\nimport static org.mockito.Matchers.anyBoolean;\nimport static org.mockito.Matchers.anyLong;\nimport static org.mockito.Matchers.anyObject;\nimport static org.mockito.Matchers.anyShort;\nimport static org.mockito.Mockito.withSettings;\nimport static org.mockito.Mockito.any;\nimport static org.mockito.Mockito.anyString;\nimport static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;\nimport static org.apache.hadoop.hdfs.DFSTestUtil.verifyFilesEqual;\nimport static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;\nimport static org.apache.hadoop.test.MetricsAsserts.assertGauge;\nimport static org.apache.hadoop.test.MetricsAsserts.getMetrics;\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertFalse;\nimport static org.junit.Assert.assertNotEquals;\nimport static org.junit.Assert.assertNull;\nimport static org.junit.Assert.assertTrue;\nimport static org.junit.Assert.fail;\n\nimport org.xml.sax.InputSource;\nimport org.xml.sax.helpers.DefaultHandler;\n\nimport javax.xml.parsers.SAXParser;\nimport javax.xml.parsers.SAXParserFactory;\n\npublic class TestEncryptionZones {\n\n protected Configuration conf;\n private FileSystemTestHelper fsHelper;\n\n protected MiniDFSCluster cluster;\n protected HdfsAdmin dfsAdmin;\n protected DistributedFileSystem fs;\n private File testRootDir;\n protected final String TEST_KEY = \"test_key\";\n private static final String NS_METRICS = \"FSNamesystem\";\n\n protected FileSystemTestWrapper fsWrapper;\n protected FileContextTestWrapper fcWrapper;\n\n protected static final EnumSet< CreateEncryptionZoneFlag > NO_TRASH =\n EnumSet.of(CreateEncryptionZoneFlag.NO_TRASH);\n\n protected String getKeyProviderURI() {\n return JavaKeyStoreProvider.SCHEME_NAME + \"://file\" +\n new Path(testRootDir.toString(), \"test.jks\").toUri();\n }\n\n @Before\n public void setup() throws Exception {\n conf = new HdfsConfiguration();\n fsHelper = new FileSystemTestHelper();\n // Set up java key store\n String testRoot = fsHelper.getTestRootDir();\n testRootDir = new File(testRoot).getAbsoluteFile();\n conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, getKeyProviderURI());\n conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);\n // Lower the batch size for testing\n conf.setInt(DFSConfigKeys.DFS_NAMENODE_LIST_ENCRYPTION_ZONES_NUM_RESPONSES,\n 2);\n cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();\n Logger.getLogger(EncryptionZoneManager.class).setLevel(Level.TRACE);\n fs = cluster.getFileSystem();\n fsWrapper = new FileSystemTestWrapper(fs);\n fcWrapper = new FileContextTestWrapper(\n FileContext.getFileContext(cluster.getURI(), conf));\n dfsAdmin = new HdfsAdmin(cluster.getURI(), conf);\n setProvider();\n // Create a test key\n DFSTestUtil.createKey(TEST_KEY, cluster, conf);\n }\n \n protected void setProvider() {\n // Need to set the client's KeyProvider to the NN's for JKS,\n // else the updates do not get flushed properly\n fs.getClient().setKeyProvider(cluster.getNameNode().getNamesystem()\n .getProvider());\n }\n\n @After\n public void teardown() {\n if (cluster != null) {\n cluster.shutdown();\n cluster = null;\n }\n EncryptionFaultInjector.instance = new EncryptionFaultInjector();\n }\n\n public void assertNumZones(final int numZones) throws IOException {\n RemoteIterator it = dfsAdmin.listEncryptionZones();\n int count = 0;\n while (it.hasNext()) {\n count++;\n it.next();\n }\n assertEquals(\"Unexpected number of encryption zones!\", numZones, count);\n }\n\n /**\n * Checks that an encryption zone with the specified keyName and path (if not\n * null) is present.\n *\n * @throws IOException if a matching zone could not be found\n */\n public void assertZonePresent(String keyName, String path) throws IOException {\n final RemoteIterator it = dfsAdmin.listEncryptionZones();\n boolean match = false;\n while (it.hasNext()) {\n EncryptionZone zone = it.next();\n boolean matchKey = (keyName == null);\n boolean matchPath = (path == null);\n if (keyName != null && zone.getKeyName().equals(keyName)) {\n matchKey = true;\n }\n if (path != null && zone.getPath().equals(path)) {\n matchPath = true;\n }\n if (matchKey && matchPath) {\n match = true;\n break;\n }\n }\n assertTrue(\"Did not find expected encryption zone with keyName \" + keyName +\n \" path \" + path, match\n );\n }\n\n /**\n * Make sure hdfs crypto -createZone command creates a trash directory\n * with sticky bits.\n * @throws Exception\n */\n @Test(timeout = 60000)\n public void testTrashStickyBit() throws Exception {\n // create an EZ /zones/zone1, make it world writable.\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n CryptoAdmin cryptoAdmin = new CryptoAdmin(conf);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n fsWrapper.setPermission(zone1,\n new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));\n String[] cryptoArgv = new String[]{\"-createZone\", \"-keyName\", TEST_KEY,\n \"-path\", zone1.toUri().getPath()};\n cryptoAdmin.run(cryptoArgv);\n\n // create a file in EZ\n final Path ezfile1 = new Path(zone1, \"file1\");\n // Create the encrypted file in zone1\n final int len = 8192;\n DFSTestUtil.createFile(fs, ezfile1, len, (short) 1, 0xFEED);\n\n // enable trash, delete /zones/zone1/file1,\n // which moves the file to\n // /zones/zone1/.Trash/$SUPERUSER/Current/zones/zone1/file1\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n final FsShell shell = new FsShell(clientConf);\n String[] argv = new String[]{\"-rm\", ezfile1.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Can't remove a file in EZ as superuser\", 0, res);\n\n final Path trashDir = new Path(zone1, FileSystem.TRASH_PREFIX);\n assertTrue(fsWrapper.exists(trashDir));\n FileStatus trashFileStatus = fsWrapper.getFileStatus(trashDir);\n assertTrue(trashFileStatus.getPermission().getStickyBit());\n\n // create a non-privileged user\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final Path ezfile2 = new Path(zone1, \"file2\");\n final int len = 8192;\n // create a file /zones/zone1/file2 in EZ\n // this file is owned by user:mygroup\n FileSystem fs2 = FileSystem.get(cluster.getConfiguration(0));\n DFSTestUtil.createFile(fs2, ezfile2, len, (short) 1, 0xFEED);\n // delete /zones/zone1/file2,\n // which moves the file to\n // /zones/zone1/.Trash/user/Current/zones/zone1/file2\n String[] argv = new String[]{\"-rm\", ezfile2.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Can't remove a file in EZ as user:mygroup\", 0, res);\n return null;\n }\n });\n }\n\n /**\n * Make sure hdfs crypto -provisionTrash command creates a trash directory\n * with sticky bits.\n * @throws Exception\n */\n @Test(timeout = 60000)\n public void testProvisionTrash() throws Exception {\n // create an EZ /zones/zone1\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n CryptoAdmin cryptoAdmin = new CryptoAdmin(conf);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n String[] cryptoArgv = new String[]{\"-createZone\", \"-keyName\", TEST_KEY,\n \"-path\", zone1.toUri().getPath()};\n cryptoAdmin.run(cryptoArgv);\n\n // remove the trash directory\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n final FsShell shell = new FsShell(clientConf);\n final Path trashDir = new Path(zone1, FileSystem.TRASH_PREFIX);\n String[] argv = new String[]{\"-rmdir\", trashDir.toUri().getPath()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"Unable to delete trash directory.\", 0, res);\n assertFalse(fsWrapper.exists(trashDir));\n\n // execute -provisionTrash command option and make sure the trash\n // directory has sticky bit.\n String[] provisionTrashArgv = new String[]{\"-provisionTrash\", \"-path\",\n zone1.toUri().getPath()};\n cryptoAdmin.run(provisionTrashArgv);\n\n assertTrue(fsWrapper.exists(trashDir));\n FileStatus trashFileStatus = fsWrapper.getFileStatus(trashDir);\n assertTrue(trashFileStatus.getPermission().getStickyBit());\n }\n\n @Test(timeout = 60000)\n public void testBasicOperations() throws Exception {\n\n int numZones = 0;\n\n /* Test failure of create EZ on a directory that doesn't exist. */\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n fail(\"expected /test doesn't exist\");\n } catch (IOException e) {\n assertExceptionContains(\"cannot find\", e);\n }\n\n /* Normal creation of an EZ */\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, zone1.toString());\n\n /* Test failure of create EZ on a directory which is already an EZ. */\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n } catch (IOException e) {\n assertExceptionContains(\"is already an encryption zone\", e);\n }\n\n /* create EZ on parent of an EZ should fail */\n try {\n dfsAdmin.createEncryptionZone(zoneParent, TEST_KEY, NO_TRASH);\n fail(\"EZ over an EZ\");\n } catch (IOException e) {\n assertExceptionContains(\"encryption zone for a non-empty directory\", e);\n }\n\n /* create EZ on a folder with a folder fails */\n final Path notEmpty = new Path(\"/notEmpty\");\n final Path notEmptyChild = new Path(notEmpty, \"child\");\n fsWrapper.mkdir(notEmptyChild, FsPermission.getDirDefault(), true);\n try {\n dfsAdmin.createEncryptionZone(notEmpty, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on an non-empty directory with folder\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone\", e);\n }\n fsWrapper.delete(notEmptyChild, false);\n\n /* create EZ on a folder with a file fails */\n fsWrapper.createFile(notEmptyChild);\n try {\n dfsAdmin.createEncryptionZone(notEmpty, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on an non-empty directory with file\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone\", e);\n }\n\n /* Test failure of create EZ on a file. */\n try {\n dfsAdmin.createEncryptionZone(notEmptyChild, TEST_KEY, NO_TRASH);\n fail(\"Created EZ on a file\");\n } catch (IOException e) {\n assertExceptionContains(\"create an encryption zone for a file.\", e);\n }\n\n /* Test failure of creating an EZ passing a key that doesn't exist. */\n final Path zone2 = new Path(\"/zone2\");\n fsWrapper.mkdir(zone2, FsPermission.getDirDefault(), false);\n final String myKeyName = \"mykeyname\";\n try {\n dfsAdmin.createEncryptionZone(zone2, myKeyName, NO_TRASH);\n fail(\"expected key doesn't exist\");\n } catch (IOException e) {\n assertExceptionContains(\"doesn't exist.\", e);\n }\n\n /* Test failure of empty and null key name */\n try {\n dfsAdmin.createEncryptionZone(zone2, \"\", NO_TRASH);\n fail(\"created a zone with empty key name\");\n } catch (IOException e) {\n assertExceptionContains(\"Must specify a key name when creating\", e);\n }\n try {\n dfsAdmin.createEncryptionZone(zone2, null, NO_TRASH);\n fail(\"created a zone with null key name\");\n } catch (IOException e) {\n assertExceptionContains(\"Must specify a key name when creating\", e);\n }\n\n assertNumZones(1);\n\n /* Test success of creating an EZ when they key exists. */\n DFSTestUtil.createKey(myKeyName, cluster, conf);\n dfsAdmin.createEncryptionZone(zone2, myKeyName, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(myKeyName, zone2.toString());\n\n /* Test failure of create encryption zones as a non super user. */\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n final Path nonSuper = new Path(\"/nonSuper\");\n fsWrapper.mkdir(nonSuper, FsPermission.getDirDefault(), false);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n try {\n userAdmin.createEncryptionZone(nonSuper, TEST_KEY, NO_TRASH);\n fail(\"createEncryptionZone is superuser-only operation\");\n } catch (AccessControlException e) {\n assertExceptionContains(\"Superuser privilege is required\", e);\n }\n return null;\n }\n });\n\n // Test success of creating an encryption zone a few levels down.\n Path deepZone = new Path(\"/d/e/e/p/zone\");\n fsWrapper.mkdir(deepZone, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(deepZone, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, deepZone.toString());\n\n // Create and list some zones to test batching of listEZ\n for (int i=1; i<6; i++) {\n final Path zonePath = new Path(\"/listZone\" + i);\n fsWrapper.mkdir(zonePath, FsPermission.getDirDefault(), false);\n dfsAdmin.createEncryptionZone(zonePath, TEST_KEY, NO_TRASH);\n numZones++;\n assertNumZones(numZones);\n assertZonePresent(null, zonePath.toString());\n }\n\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);\n fs.saveNamespace();\n fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertEquals(\"Unexpected number of encryption zones!\", numZones, cluster\n .getNamesystem().getNumEncryptionZones());\n assertGauge(\"NumEncryptionZones\", numZones, getMetrics(NS_METRICS));\n assertZonePresent(null, zone1.toString());\n\n // Verify newly added ez is present after restarting the NameNode\n // without persisting the namespace.\n Path nonpersistZone = new Path(\"/nonpersistZone\");\n fsWrapper.mkdir(nonpersistZone, FsPermission.getDirDefault(), false);\n dfsAdmin.createEncryptionZone(nonpersistZone, TEST_KEY, NO_TRASH);\n numZones++;\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertZonePresent(null, nonpersistZone.toString());\n }\n\n @Test(timeout = 60000)\n public void testBasicOperationsRootDir() throws Exception {\n int numZones = 0;\n final Path rootDir = new Path(\"/\");\n final Path zone1 = new Path(rootDir, \"zone1\");\n\n /* Normal creation of an EZ on rootDir */\n dfsAdmin.createEncryptionZone(rootDir, TEST_KEY, NO_TRASH);\n assertNumZones(++numZones);\n assertZonePresent(null, rootDir.toString());\n\n // Verify rootDir ez is present after restarting the NameNode\n // and saving/loading from fsimage.\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);\n fs.saveNamespace();\n fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);\n cluster.restartNameNode(true);\n assertNumZones(numZones);\n assertZonePresent(null, rootDir.toString());\n }\n\n /**\n * Test listing encryption zones as a non super user.\n */\n @Test(timeout = 60000)\n public void testListEncryptionZonesAsNonSuperUser() throws Exception {\n\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path superPath = new Path(testRoot, \"superuseronly\");\n final Path allPath = new Path(testRoot, \"accessall\");\n\n fsWrapper.mkdir(superPath, new FsPermission((short) 0700), true);\n dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);\n\n fsWrapper.mkdir(allPath, new FsPermission((short) 0707), true);\n dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n try {\n userAdmin.listEncryptionZones();\n } catch (AccessControlException e) {\n assertExceptionContains(\"Superuser privilege is required\", e);\n }\n return null;\n }\n });\n }\n\n /**\n * Test getEncryptionZoneForPath as a non super user.\n */\n @Test(timeout = 60000)\n public void testGetEZAsNonSuperUser() throws Exception {\n\n final UserGroupInformation user = UserGroupInformation.\n createUserForTesting(\"user\", new String[] { \"mygroup\" });\n\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path superPath = new Path(testRoot, \"superuseronly\");\n final Path superPathFile = new Path(superPath, \"file1\");\n final Path allPath = new Path(testRoot, \"accessall\");\n final Path allPathFile = new Path(allPath, \"file1\");\n final Path nonEZDir = new Path(testRoot, \"nonEZDir\");\n final Path nonEZFile = new Path(nonEZDir, \"file1\");\n final Path nonexistent = new Path(\"/nonexistent\");\n final int len = 8192;\n\n fsWrapper.mkdir(testRoot, new FsPermission((short) 0777), true);\n fsWrapper.mkdir(superPath, new FsPermission((short) 0700), false);\n fsWrapper.mkdir(allPath, new FsPermission((short) 0777), false);\n fsWrapper.mkdir(nonEZDir, new FsPermission((short) 0777), false);\n dfsAdmin.createEncryptionZone(superPath, TEST_KEY, NO_TRASH);\n dfsAdmin.createEncryptionZone(allPath, TEST_KEY, NO_TRASH);\n dfsAdmin.allowSnapshot(new Path(\"/\"));\n final Path newSnap = fs.createSnapshot(new Path(\"/\"));\n DFSTestUtil.createFile(fs, superPathFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, allPathFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, nonEZFile, len, (short) 1, 0xFEED);\n\n user.doAs(new PrivilegedExceptionAction() {\n @Override\n public Object run() throws Exception {\n final HdfsAdmin userAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n\n // Check null arg\n try {\n userAdmin.getEncryptionZoneForPath(null);\n fail(\"should have thrown NPE\");\n } catch (NullPointerException e) {\n /*\n * IWBNI we could use assertExceptionContains, but the NPE that is\n * thrown has no message text.\n */\n }\n\n // Check operation with accessible paths\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(allPath).getPath().\n toString());\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(allPathFile).getPath().\n toString());\n\n // Check operation with inaccessible (lack of permissions) path\n try {\n userAdmin.getEncryptionZoneForPath(superPathFile);\n fail(\"expected AccessControlException\");\n } catch (AccessControlException e) {\n assertExceptionContains(\"Permission denied:\", e);\n }\n\n try {\n userAdmin.getEncryptionZoneForPath(nonexistent);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + nonexistent, e);\n }\n\n // Check operation with non-ez paths\n assertNull(\"expected null for non-ez path\",\n userAdmin.getEncryptionZoneForPath(nonEZDir));\n assertNull(\"expected null for non-ez path\",\n userAdmin.getEncryptionZoneForPath(nonEZFile));\n\n // Check operation with snapshots\n String snapshottedAllPath = newSnap.toString() + allPath.toString();\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n\n /*\n * Delete the file from the non-snapshot and test that it is still ok\n * in the ez.\n */\n fs.delete(allPathFile, false);\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n\n // Delete the ez and make sure ss's ez is still ok.\n fs.delete(allPath, true);\n assertEquals(\"expected ez path\", allPath.toString(),\n userAdmin.getEncryptionZoneForPath(\n new Path(snapshottedAllPath)).getPath().toString());\n try {\n userAdmin.getEncryptionZoneForPath(allPathFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + allPathFile, e);\n }\n try {\n userAdmin.getEncryptionZoneForPath(allPath);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + allPath, e);\n }\n return null;\n }\n });\n }\n\n /**\n * Test success of Rename EZ on a directory which is already an EZ.\n */\n private void doRenameEncryptionZone(FSTestWrapper wrapper) throws Exception {\n final Path testRoot = new Path(\"/tmp/TestEncryptionZones\");\n final Path pathFoo = new Path(testRoot, \"foo\");\n final Path pathFooBaz = new Path(pathFoo, \"baz\");\n final Path pathFooBazFile = new Path(pathFooBaz, \"file\");\n final Path pathFooBar = new Path(pathFoo, \"bar\");\n final Path pathFooBarFile = new Path(pathFooBar, \"file\");\n final int len = 8192;\n wrapper.mkdir(pathFoo, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(pathFoo, TEST_KEY, NO_TRASH);\n wrapper.mkdir(pathFooBaz, FsPermission.getDirDefault(), true);\n DFSTestUtil.createFile(fs, pathFooBazFile, len, (short) 1, 0xFEED);\n String contents = DFSTestUtil.readFile(fs, pathFooBazFile);\n try {\n wrapper.rename(pathFooBaz, testRoot);\n } catch (IOException e) {\n assertExceptionContains(pathFooBaz.toString() + \" can't be moved from\" +\n \" an encryption zone.\", e\n );\n }\n\n // Verify that we can rename dir and files within an encryption zone.\n assertTrue(fs.rename(pathFooBaz, pathFooBar));\n assertTrue(\"Rename of dir and file within ez failed\",\n !wrapper.exists(pathFooBaz) && wrapper.exists(pathFooBar));\n assertEquals(\"Renamed file contents not the same\",\n contents, DFSTestUtil.readFile(fs, pathFooBarFile));\n\n // Verify that we can rename an EZ root\n final Path newFoo = new Path(testRoot, \"newfoo\");\n assertTrue(\"Rename of EZ root\", fs.rename(pathFoo, newFoo));\n assertTrue(\"Rename of EZ root failed\",\n !wrapper.exists(pathFoo) && wrapper.exists(newFoo));\n\n // Verify that we can't rename an EZ root onto itself\n try {\n wrapper.rename(newFoo, newFoo);\n } catch (IOException e) {\n assertExceptionContains(\"are the same\", e);\n }\n }\n\n @Test(timeout = 60000)\n public void testRenameFileSystem() throws Exception {\n doRenameEncryptionZone(fsWrapper);\n }\n\n @Test(timeout = 60000)\n public void testRenameFileContext() throws Exception {\n doRenameEncryptionZone(fcWrapper);\n }\n\n private FileEncryptionInfo getFileEncryptionInfo(Path path) throws Exception {\n LocatedBlocks blocks = fs.getClient().getLocatedBlocks(path.toString(), 0);\n return blocks.getFileEncryptionInfo();\n }\n\n @Test(timeout = 120000)\n public void testReadWrite() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n // Create a base file for comparison\n final Path baseFile = new Path(\"/base\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, baseFile, len, (short) 1, 0xFEED);\n // Create the first enc file\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n final Path encFile1 = new Path(zone, \"myfile\");\n DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);\n // Read them back in and compare byte-by-byte\n verifyFilesEqual(fs, baseFile, encFile1, len);\n // Roll the key of the encryption zone\n assertNumZones(1);\n String keyName = dfsAdmin.listEncryptionZones().next().getKeyName();\n cluster.getNamesystem().getProvider().rollNewVersion(keyName);\n // Read them back in and compare byte-by-byte\n verifyFilesEqual(fs, baseFile, encFile1, len);\n // Write a new enc file and validate\n final Path encFile2 = new Path(zone, \"myfile2\");\n DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);\n // FEInfos should be different\n FileEncryptionInfo feInfo1 = getFileEncryptionInfo(encFile1);\n FileEncryptionInfo feInfo2 = getFileEncryptionInfo(encFile2);\n assertFalse(\"EDEKs should be different\", Arrays\n .equals(feInfo1.getEncryptedDataEncryptionKey(),\n feInfo2.getEncryptedDataEncryptionKey()));\n assertNotEquals(\"Key was rolled, versions should be different\",\n feInfo1.getEzKeyVersionName(), feInfo2.getEzKeyVersionName());\n // Contents still equal\n verifyFilesEqual(fs, encFile1, encFile2, len);\n }\n\n @Test(timeout = 120000)\n public void testReadWriteUsingWebHdfs() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final FileSystem webHdfsFs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,\n WebHdfsConstants.WEBHDFS_SCHEME);\n\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n\n /* Create an unencrypted file for comparison purposes. */\n final Path unencFile = new Path(\"/unenc\");\n final int len = 8192;\n DFSTestUtil.createFile(webHdfsFs, unencFile, len, (short) 1, 0xFEED);\n\n /*\n * Create the same file via webhdfs, but this time encrypted. Compare it\n * using both webhdfs and DFS.\n */\n final Path encFile1 = new Path(zone, \"myfile\");\n DFSTestUtil.createFile(webHdfsFs, encFile1, len, (short) 1, 0xFEED);\n verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);\n verifyFilesEqual(fs, unencFile, encFile1, len);\n\n /*\n * Same thing except this time create the encrypted file using DFS.\n */\n final Path encFile2 = new Path(zone, \"myfile2\");\n DFSTestUtil.createFile(fs, encFile2, len, (short) 1, 0xFEED);\n verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);\n verifyFilesEqual(fs, unencFile, encFile2, len);\n\n /* Verify appending to files works correctly. */\n appendOneByte(fs, unencFile);\n appendOneByte(webHdfsFs, encFile1);\n appendOneByte(fs, encFile2);\n verifyFilesEqual(webHdfsFs, unencFile, encFile1, len);\n verifyFilesEqual(fs, unencFile, encFile1, len);\n verifyFilesEqual(webHdfsFs, unencFile, encFile2, len);\n verifyFilesEqual(fs, unencFile, encFile2, len);\n }\n\n private void appendOneByte(FileSystem fs, Path p) throws IOException {\n final FSDataOutputStream out = fs.append(p);\n out.write((byte) 0x123);\n out.close();\n }\n\n @Test(timeout = 60000)\n public void testVersionAndSuiteNegotiation() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final Path zone = new Path(\"/zone\");\n fs.mkdirs(zone);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n // Create a file in an EZ, which should succeed\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success1\"), 0, (short) 1, 0xFEED);\n // Pass no supported versions, fail\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[] {};\n try {\n DFSTestUtil.createFile(fs, new Path(zone, \"fail\"), 0, (short) 1, 0xFEED);\n fail(\"Created a file without specifying a crypto protocol version\");\n } catch (UnknownCryptoProtocolVersionException e) {\n assertExceptionContains(\"No crypto protocol versions\", e);\n }\n // Pass some unknown versions, fail\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS = new CryptoProtocolVersion[]\n { CryptoProtocolVersion.UNKNOWN, CryptoProtocolVersion.UNKNOWN };\n try {\n DFSTestUtil.createFile(fs, new Path(zone, \"fail\"), 0, (short) 1, 0xFEED);\n fail(\"Created a file without specifying a known crypto protocol version\");\n } catch (UnknownCryptoProtocolVersionException e) {\n assertExceptionContains(\"No crypto protocol versions\", e);\n }\n // Pass some unknown and a good cipherSuites, success\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =\n new CryptoProtocolVersion[] {\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.ENCRYPTION_ZONES };\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success2\"), 0, (short) 1, 0xFEED);\n DFSOutputStream.SUPPORTED_CRYPTO_VERSIONS =\n new CryptoProtocolVersion[] {\n CryptoProtocolVersion.ENCRYPTION_ZONES,\n CryptoProtocolVersion.UNKNOWN,\n CryptoProtocolVersion.UNKNOWN} ;\n DFSTestUtil\n .createFile(fs, new Path(zone, \"success3\"), 4096, (short) 1, 0xFEED);\n // Check KeyProvider state\n // Flushing the KP on the NN, since it caches, and init a test one\n cluster.getNamesystem().getProvider().flush();\n KeyProvider provider = KeyProviderFactory\n .get(new URI(conf.getTrimmed(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI)),\n conf);\n List keys = provider.getKeys();\n assertEquals(\"Expected NN to have created one key per zone\", 1,\n keys.size());\n List allVersions = Lists.newArrayList();\n for (String key : keys) {\n List versions = provider.getKeyVersions(key);\n assertEquals(\"Should only have one key version per key\", 1,\n versions.size());\n allVersions.addAll(versions);\n }\n // Check that the specified CipherSuite was correctly saved on the NN\n for (int i = 2; i <= 3; i++) {\n FileEncryptionInfo feInfo =\n getFileEncryptionInfo(new Path(zone.toString() +\n \"/success\" + i));\n assertEquals(feInfo.getCipherSuite(), CipherSuite.AES_CTR_NOPADDING);\n }\n\n DFSClient old = fs.dfs;\n try {\n testCipherSuiteNegotiation(fs, conf);\n } finally {\n fs.dfs = old;\n }\n }\n\n @SuppressWarnings(\"unchecked\")\n private static void mockCreate(ClientProtocol mcp,\n CipherSuite suite, CryptoProtocolVersion version) throws Exception {\n Mockito.doReturn(\n new HdfsFileStatus(0, false, 1, 1024, 0, 0, new FsPermission(\n (short) 777), \"owner\", \"group\", new byte[0], new byte[0],\n 1010, 0, new FileEncryptionInfo(suite,\n version, new byte[suite.getAlgorithmBlockSize()],\n new byte[suite.getAlgorithmBlockSize()],\n \"fakeKey\", \"fakeVersion\"),\n (byte) 0))\n .when(mcp)\n .create(anyString(), (FsPermission) anyObject(), anyString(),\n (EnumSetWritable) anyObject(), anyBoolean(),\n anyShort(), anyLong(), (CryptoProtocolVersion[]) anyObject());\n }\n\n // This test only uses mocks. Called from the end of an existing test to\n // avoid an extra mini cluster.\n private static void testCipherSuiteNegotiation(DistributedFileSystem fs,\n Configuration conf) throws Exception {\n // Set up mock ClientProtocol to test client-side CipherSuite negotiation\n final ClientProtocol mcp = Mockito.mock(ClientProtocol.class);\n\n // Try with an empty conf\n final Configuration noCodecConf = new Configuration(conf);\n final CipherSuite suite = CipherSuite.AES_CTR_NOPADDING;\n final String confKey = CommonConfigurationKeysPublic\n .HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX + suite\n .getConfigSuffix();\n noCodecConf.set(confKey, \"\");\n fs.dfs = new DFSClient(null, mcp, noCodecConf, null);\n mockCreate(mcp, suite, CryptoProtocolVersion.ENCRYPTION_ZONES);\n try {\n fs.create(new Path(\"/mock\"));\n fail(\"Created with no configured codecs!\");\n } catch (UnknownCipherSuiteException e) {\n assertExceptionContains(\"No configuration found for the cipher\", e);\n }\n\n // Try create with an UNKNOWN CipherSuite\n fs.dfs = new DFSClient(null, mcp, conf, null);\n CipherSuite unknown = CipherSuite.UNKNOWN;\n unknown.setUnknownValue(989);\n mockCreate(mcp, unknown, CryptoProtocolVersion.ENCRYPTION_ZONES);\n try {\n fs.create(new Path(\"/mock\"));\n fail(\"Created with unknown cipher!\");\n } catch (IOException e) {\n assertExceptionContains(\"unknown CipherSuite with ID 989\", e);\n }\n }\n\n @Test(timeout = 120000)\n public void testCreateEZWithNoProvider() throws Exception {\n // Unset the key provider and make sure EZ ops don't work\n final Configuration clusterConf = cluster.getConfiguration(0);\n clusterConf.unset(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI);\n cluster.restartNameNode(true);\n cluster.waitActive();\n final Path zone1 = new Path(\"/zone1\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n try {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n fail(\"expected exception\");\n } catch (IOException e) {\n assertExceptionContains(\"since no key provider is available\", e);\n }\n final Path jksPath = new Path(testRootDir.toString(), \"test.jks\");\n clusterConf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,\n JavaKeyStoreProvider.SCHEME_NAME + \"://file\" + jksPath.toUri()\n );\n // Try listing EZs as well\n assertNumZones(0);\n }\n\n @Test(timeout = 120000)\n public void testIsEncryptedMethod() throws Exception {\n doTestIsEncryptedMethod(new Path(\"/\"));\n doTestIsEncryptedMethod(new Path(\"/.reserved/raw\"));\n }\n\n private void doTestIsEncryptedMethod(Path prefix) throws Exception {\n try {\n dTIEM(prefix);\n } finally {\n for (FileStatus s : fsWrapper.listStatus(prefix)) {\n fsWrapper.delete(s.getPath(), true);\n }\n }\n }\n\n private void dTIEM(Path prefix) throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n // Create an unencrypted file to check isEncrypted returns false\n final Path baseFile = new Path(prefix, \"base\");\n fsWrapper.createFile(baseFile);\n FileStatus stat = fsWrapper.getFileStatus(baseFile);\n assertFalse(\"Expected isEncrypted to return false for \" + baseFile,\n stat.isEncrypted());\n\n // Create an encrypted file to check isEncrypted returns true\n final Path zone = new Path(prefix, \"zone\");\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n final Path encFile = new Path(zone, \"encfile\");\n fsWrapper.createFile(encFile);\n stat = fsWrapper.getFileStatus(encFile);\n assertTrue(\"Expected isEncrypted to return true for enc file\" + encFile,\n stat.isEncrypted());\n\n // check that it returns true for an ez root\n stat = fsWrapper.getFileStatus(zone);\n assertTrue(\"Expected isEncrypted to return true for ezroot\",\n stat.isEncrypted());\n\n // check that it returns true for a dir in the ez\n final Path zoneSubdir = new Path(zone, \"subdir\");\n fsWrapper.mkdir(zoneSubdir, FsPermission.getDirDefault(), true);\n stat = fsWrapper.getFileStatus(zoneSubdir);\n assertTrue(\n \"Expected isEncrypted to return true for ez subdir \" + zoneSubdir,\n stat.isEncrypted());\n\n // check that it returns false for a non ez dir\n final Path nonEzDirPath = new Path(prefix, \"nonzone\");\n fsWrapper.mkdir(nonEzDirPath, FsPermission.getDirDefault(), true);\n stat = fsWrapper.getFileStatus(nonEzDirPath);\n assertFalse(\n \"Expected isEncrypted to return false for directory \" + nonEzDirPath,\n stat.isEncrypted());\n\n // check that it returns true for listings within an ez\n FileStatus[] statuses = fsWrapper.listStatus(zone);\n for (FileStatus s : statuses) {\n assertTrue(\"Expected isEncrypted to return true for ez stat \" + zone,\n s.isEncrypted());\n }\n\n statuses = fsWrapper.listStatus(encFile);\n for (FileStatus s : statuses) {\n assertTrue(\n \"Expected isEncrypted to return true for ez file stat \" + encFile,\n s.isEncrypted());\n }\n\n // check that it returns false for listings outside an ez\n statuses = fsWrapper.listStatus(nonEzDirPath);\n for (FileStatus s : statuses) {\n assertFalse(\n \"Expected isEncrypted to return false for nonez stat \" + nonEzDirPath,\n s.isEncrypted());\n }\n\n statuses = fsWrapper.listStatus(baseFile);\n for (FileStatus s : statuses) {\n assertFalse(\n \"Expected isEncrypted to return false for non ez stat \" + baseFile,\n s.isEncrypted());\n }\n }\n\n private class MyInjector extends EncryptionFaultInjector {\n int generateCount;\n CountDownLatch ready;\n CountDownLatch wait;\n\n public MyInjector() {\n this.ready = new CountDownLatch(1);\n this.wait = new CountDownLatch(1);\n }\n\n @Override\n public void startFileAfterGenerateKey() throws IOException {\n ready.countDown();\n try {\n wait.await();\n } catch (InterruptedException e) {\n throw new IOException(e);\n }\n generateCount++;\n }\n }\n\n private class CreateFileTask implements Callable {\n private FileSystemTestWrapper fsWrapper;\n private Path name;\n\n CreateFileTask(FileSystemTestWrapper fsWrapper, Path name) {\n this.fsWrapper = fsWrapper;\n this.name = name;\n }\n\n @Override\n public Void call() throws Exception {\n fsWrapper.createFile(name);\n return null;\n }\n }\n\n private class InjectFaultTask implements Callable {\n final Path zone1 = new Path(\"/zone1\");\n final Path file = new Path(zone1, \"file1\");\n final ExecutorService executor = Executors.newSingleThreadExecutor();\n\n MyInjector injector;\n\n @Override\n public Void call() throws Exception {\n // Set up the injector\n injector = new MyInjector();\n EncryptionFaultInjector.instance = injector;\n Future future =\n executor.submit(new CreateFileTask(fsWrapper, file));\n injector.ready.await();\n // Do the fault\n doFault();\n // Allow create to proceed\n injector.wait.countDown();\n future.get();\n // Cleanup and postconditions\n doCleanup();\n return null;\n }\n\n public void doFault() throws Exception {}\n\n public void doCleanup() throws Exception {}\n }\n\n /**\n * Tests the retry logic in startFile. We release the lock while generating\n * an EDEK, so tricky things can happen in the intervening time.\n */\n @Test(timeout = 120000)\n public void testStartFileRetry() throws Exception {\n final Path zone1 = new Path(\"/zone1\");\n final Path file = new Path(zone1, \"file1\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n ExecutorService executor = Executors.newSingleThreadExecutor();\n\n // Test when the parent directory becomes an EZ\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected a startFile retry\", 2, injector.generateCount);\n fsWrapper.delete(file, false);\n }\n }).get();\n\n // Test when the parent directory unbecomes an EZ\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n fsWrapper.delete(zone1, true);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected no startFile retries\", 1, injector.generateCount);\n fsWrapper.delete(file, false);\n }\n }).get();\n\n // Test when the parent directory becomes a different EZ\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n final String otherKey = \"other_key\";\n DFSTestUtil.createKey(otherKey, cluster, conf);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n\n executor.submit(new InjectFaultTask() {\n @Override\n public void doFault() throws Exception {\n fsWrapper.delete(zone1, true);\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, otherKey, NO_TRASH);\n }\n @Override\n public void doCleanup() throws Exception {\n assertEquals(\"Expected a startFile retry\", 2, injector.generateCount);\n fsWrapper.delete(zone1, true);\n }\n }).get();\n\n // Test that the retry limit leads to an error\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n final String anotherKey = \"another_key\";\n DFSTestUtil.createKey(anotherKey, cluster, conf);\n dfsAdmin.createEncryptionZone(zone1, anotherKey, NO_TRASH);\n String keyToUse = otherKey;\n\n MyInjector injector = new MyInjector();\n EncryptionFaultInjector.instance = injector;\n Future future = executor.submit(new CreateFileTask(fsWrapper, file));\n\n // Flip-flop between two EZs to repeatedly fail\n for (int i=0; i testToken = new Token(testIdentifier, new byte[0],\n new Text(), new Text());\n Mockito.when(((DelegationTokenExtension)keyProvider).\n addDelegationTokens(anyString(), (Credentials)any())).\n thenReturn(new Token[] { testToken });\n\n dfs.getClient().setKeyProvider(keyProvider);\n\n Credentials creds = new Credentials();\n final Token tokens[] = dfs.addDelegationTokens(\"JobTracker\", creds);\n DistributedFileSystem.LOG.debug(\"Delegation tokens: \" +\n Arrays.asList(tokens));\n Assert.assertEquals(2, tokens.length);\n Assert.assertEquals(tokens[1], testToken);\n Assert.assertEquals(1, creds.numberOfTokens());\n }\n\n /**\n * Test running fsck on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testFsckOnEncryptionZones() throws Exception {\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n final Path zone1File = new Path(zone1, \"file\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zone1File, len, (short) 1, 0xFEED);\n ByteArrayOutputStream bStream = new ByteArrayOutputStream();\n PrintStream out = new PrintStream(bStream, true);\n int errCode = ToolRunner.run(new DFSck(conf, out),\n new String[]{ \"/\" });\n assertEquals(\"Fsck ran with non-zero error code\", 0, errCode);\n String result = bStream.toString();\n assertTrue(\"Fsck did not return HEALTHY status\",\n result.contains(NamenodeFsck.HEALTHY_STATUS));\n\n // Run fsck directly on the encryption zone instead of root\n errCode = ToolRunner.run(new DFSck(conf, out),\n new String[]{ zoneParent.toString() });\n assertEquals(\"Fsck ran with non-zero error code\", 0, errCode);\n result = bStream.toString();\n assertTrue(\"Fsck did not return HEALTHY status\",\n result.contains(NamenodeFsck.HEALTHY_STATUS));\n }\n\n /**\n * Test correctness of successive snapshot creation and deletion\n * on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testSnapshotsOnEncryptionZones() throws Exception {\n final String TEST_KEY2 = \"testkey2\";\n DFSTestUtil.createKey(TEST_KEY2, cluster, conf);\n\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone = new Path(zoneParent, \"zone\");\n final Path zoneFile = new Path(zone, \"zoneFile\");\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n dfsAdmin.allowSnapshot(zoneParent);\n dfsAdmin.createEncryptionZone(zone, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n String contents = DFSTestUtil.readFile(fs, zoneFile);\n final Path snap1 = fs.createSnapshot(zoneParent, \"snap1\");\n final Path snap1Zone = new Path(snap1, zone.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap1Zone).getPath().toString());\n\n // Now delete the encryption zone, recreate the dir, and take another\n // snapshot\n fsWrapper.delete(zone, true);\n fsWrapper.mkdir(zone, FsPermission.getDirDefault(), true);\n final Path snap2 = fs.createSnapshot(zoneParent, \"snap2\");\n final Path snap2Zone = new Path(snap2, zone.getName());\n assertNull(\"Expected null ez path\",\n dfsAdmin.getEncryptionZoneForPath(snap2Zone));\n\n // Create the encryption zone again\n dfsAdmin.createEncryptionZone(zone, TEST_KEY2, NO_TRASH);\n final Path snap3 = fs.createSnapshot(zoneParent, \"snap3\");\n final Path snap3Zone = new Path(snap3, zone.getName());\n // Check that snap3's EZ has the correct settings\n EncryptionZone ezSnap3 = dfsAdmin.getEncryptionZoneForPath(snap3Zone);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n ezSnap3.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY2, ezSnap3.getKeyName());\n // Check that older snapshots still have the old EZ settings\n EncryptionZone ezSnap1 = dfsAdmin.getEncryptionZoneForPath(snap1Zone);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n ezSnap1.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY, ezSnap1.getKeyName());\n\n // Check that listEZs only shows the current filesystem state\n ArrayList listZones = Lists.newArrayList();\n RemoteIterator it = dfsAdmin.listEncryptionZones();\n while (it.hasNext()) {\n listZones.add(it.next());\n }\n for (EncryptionZone z: listZones) {\n System.out.println(z);\n }\n assertEquals(\"Did not expect additional encryption zones!\", 1,\n listZones.size());\n EncryptionZone listZone = listZones.get(0);\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n listZone.getPath().toString());\n assertEquals(\"Unexpected ez key\", TEST_KEY2, listZone.getKeyName());\n\n // Verify contents of the snapshotted file\n final Path snapshottedZoneFile = new Path(\n snap1.toString() + \"/\" + zone.getName() + \"/\" + zoneFile.getName());\n assertEquals(\"Contents of snapshotted file have changed unexpectedly\",\n contents, DFSTestUtil.readFile(fs, snapshottedZoneFile));\n\n // Now delete the snapshots out of order and verify the zones are still\n // correct\n fs.deleteSnapshot(zoneParent, snap2.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap1Zone).getPath().toString());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap3Zone).getPath().toString());\n fs.deleteSnapshot(zoneParent, snap1.getName());\n assertEquals(\"Got unexpected ez path\", zone.toString(),\n dfsAdmin.getEncryptionZoneForPath(snap3Zone).getPath().toString());\n }\n\n /**\n * Verify symlinks can be created in encryption zones and that\n * they function properly when the target is in the same\n * or different ez.\n */\n @Test(timeout = 60000)\n public void testEncryptionZonesWithSymlinks() throws Exception {\n // Verify we can create an encryption zone over both link and target\n final int len = 8192;\n final Path parent = new Path(\"/parent\");\n final Path linkParent = new Path(parent, \"symdir1\");\n final Path targetParent = new Path(parent, \"symdir2\");\n final Path link = new Path(linkParent, \"link\");\n final Path target = new Path(targetParent, \"target\");\n fs.mkdirs(parent);\n dfsAdmin.createEncryptionZone(parent, TEST_KEY, NO_TRASH);\n fs.mkdirs(linkParent);\n fs.mkdirs(targetParent);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n String content = DFSTestUtil.readFile(fs, target);\n fs.createSymlink(target, link, false);\n assertEquals(\"Contents read from link are not the same as target\",\n content, DFSTestUtil.readFile(fs, link));\n fs.delete(parent, true);\n\n // Now let's test when the symlink and target are in different\n // encryption zones\n fs.mkdirs(linkParent);\n fs.mkdirs(targetParent);\n dfsAdmin.createEncryptionZone(linkParent, TEST_KEY, NO_TRASH);\n dfsAdmin.createEncryptionZone(targetParent, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n content = DFSTestUtil.readFile(fs, target);\n fs.createSymlink(target, link, false);\n assertEquals(\"Contents read from link are not the same as target\",\n content, DFSTestUtil.readFile(fs, link));\n fs.delete(link, true);\n fs.delete(target, true);\n }\n\n @Test(timeout = 60000)\n public void testConcatFailsInEncryptionZones() throws Exception {\n final int len = 8192;\n final Path ez = new Path(\"/ez\");\n fs.mkdirs(ez);\n dfsAdmin.createEncryptionZone(ez, TEST_KEY, NO_TRASH);\n final Path src1 = new Path(ez, \"src1\");\n final Path src2 = new Path(ez, \"src2\");\n final Path target = new Path(ez, \"target\");\n DFSTestUtil.createFile(fs, src1, len, (short)1, 0xFEED);\n DFSTestUtil.createFile(fs, src2, len, (short)1, 0xFEED);\n DFSTestUtil.createFile(fs, target, len, (short)1, 0xFEED);\n try {\n fs.concat(target, new Path[] { src1, src2 });\n fail(\"expected concat to throw en exception for files in an ez\");\n } catch (IOException e) {\n assertExceptionContains(\n \"concat can not be called for files in an encryption zone\", e);\n }\n fs.delete(ez, true);\n }\n\n /**\n * Test running the OfflineImageViewer on a system with encryption zones.\n */\n @Test(timeout = 60000)\n public void testOfflineImageViewerOnEncryptionZones() throws Exception {\n final int len = 8196;\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");\n final Path zone1File = new Path(zone1, \"file\");\n fsWrapper.mkdir(zone1, FsPermission.getDirDefault(), true);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zone1File, len, (short) 1, 0xFEED);\n fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER, false);\n fs.saveNamespace();\n\n File originalFsimage = FSImageTestUtil.findLatestImageFile(FSImageTestUtil\n .getFSImage(cluster.getNameNode()).getStorage().getStorageDir(0));\n if (originalFsimage == null) {\n throw new RuntimeException(\"Didn't generate or can't find fsimage\");\n }\n\n // Run the XML OIV processor\n ByteArrayOutputStream output = new ByteArrayOutputStream();\n PrintStream pw = new PrintStream(output);\n PBImageXmlWriter v = new PBImageXmlWriter(new Configuration(), pw);\n v.visit(new RandomAccessFile(originalFsimage, \"r\"));\n final String xml = output.toString();\n SAXParser parser = SAXParserFactory.newInstance().newSAXParser();\n parser.parse(new InputSource(new StringReader(xml)), new DefaultHandler());\n }\n\n /**\n * Test creating encryption zone on the root path\n */\n @Test(timeout = 60000)\n public void testEncryptionZonesOnRootPath() throws Exception {\n final int len = 8196;\n final Path rootDir = new Path(\"/\");\n final Path zoneFile = new Path(rootDir, \"file\");\n final Path rawFile = new Path(\"/.reserved/raw/file\");\n dfsAdmin.createEncryptionZone(rootDir, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n\n assertEquals(\"File can be created on the root encryption zone \" +\n \"with correct length\",\n len, fs.getFileStatus(zoneFile).getLen());\n assertEquals(\"Root dir is encrypted\",\n true, fs.getFileStatus(rootDir).isEncrypted());\n assertEquals(\"File is encrypted\",\n true, fs.getFileStatus(zoneFile).isEncrypted());\n DFSTestUtil.verifyFilesNotEqual(fs, zoneFile, rawFile, len);\n }\n\n @Test(timeout = 60000)\n public void testEncryptionZonesOnRelativePath() throws Exception {\n final int len = 8196;\n final Path baseDir = new Path(\"/somewhere/base\");\n final Path zoneDir = new Path(\"zone\");\n final Path zoneFile = new Path(\"file\");\n fs.setWorkingDirectory(baseDir);\n fs.mkdirs(zoneDir);\n dfsAdmin.createEncryptionZone(zoneDir, TEST_KEY, NO_TRASH);\n DFSTestUtil.createFile(fs, zoneFile, len, (short) 1, 0xFEED);\n\n assertNumZones(1);\n assertZonePresent(TEST_KEY, \"/somewhere/base/zone\");\n\n assertEquals(\"Got unexpected ez path\", \"/somewhere/base/zone\", dfsAdmin\n .getEncryptionZoneForPath(zoneDir).getPath().toString());\n }\n\n @Test(timeout = 60000)\n public void testGetEncryptionZoneOnANonExistentZoneFile() throws Exception {\n final Path ez = new Path(\"/ez\");\n fs.mkdirs(ez);\n dfsAdmin.createEncryptionZone(ez, TEST_KEY, NO_TRASH);\n Path zoneFile = new Path(ez, \"file\");\n try {\n fs.getEZForPath(zoneFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + zoneFile, e);\n }\n try {\n dfsAdmin.getEncryptionZoneForPath(zoneFile);\n fail(\"FileNotFoundException should be thrown for a non-existent\"\n + \" file path\");\n } catch (FileNotFoundException e) {\n assertExceptionContains(\"Path not found: \" + zoneFile, e);\n }\n }\n\n @Test(timeout = 120000)\n public void testEncryptionZoneWithTrash() throws Exception {\n // Create the encryption zone1\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n final Path zone1 = new Path(\"/zone1\");\n fs.mkdirs(zone1);\n dfsAdmin.createEncryptionZone(zone1, TEST_KEY, NO_TRASH);\n\n // Create the encrypted file in zone1\n final Path encFile1 = new Path(zone1, \"encFile1\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile1, len, (short) 1, 0xFEED);\n\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n\n // Delete encrypted file from the shell with trash enabled\n // Verify the file is moved to appropriate trash within the zone\n verifyShellDeleteWithTrash(shell, encFile1);\n\n // Delete encryption zone from the shell with trash enabled\n // Verify the zone is moved to appropriate trash location in user's home dir\n verifyShellDeleteWithTrash(shell, zone1);\n\n final Path topEZ = new Path(\"/topEZ\");\n fs.mkdirs(topEZ);\n dfsAdmin.createEncryptionZone(topEZ, TEST_KEY, NO_TRASH);\n final String NESTED_EZ_TEST_KEY = \"nested_ez_test_key\";\n DFSTestUtil.createKey(NESTED_EZ_TEST_KEY, cluster, conf);\n final Path nestedEZ = new Path(topEZ, \"nestedEZ\");\n fs.mkdirs(nestedEZ);\n dfsAdmin.createEncryptionZone(nestedEZ, NESTED_EZ_TEST_KEY, NO_TRASH);\n final Path topEZFile = new Path(topEZ, \"file\");\n final Path nestedEZFile = new Path(nestedEZ, \"file\");\n DFSTestUtil.createFile(fs, topEZFile, len, (short) 1, 0xFEED);\n DFSTestUtil.createFile(fs, nestedEZFile, len, (short) 1, 0xFEED);\n verifyShellDeleteWithTrash(shell, topEZFile);\n verifyShellDeleteWithTrash(shell, nestedEZFile);\n verifyShellDeleteWithTrash(shell, nestedEZ);\n verifyShellDeleteWithTrash(shell, topEZ);\n }\n\n @Test(timeout = 120000)\n public void testRootDirEZTrash() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n dfsAdmin.createEncryptionZone(new Path(\"/\"), TEST_KEY, NO_TRASH);\n final Path encFile = new Path(\"/encFile\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile, len, (short) 1, 0xFEED);\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n verifyShellDeleteWithTrash(shell, encFile);\n }\n\n @Test(timeout = 120000)\n public void testGetTrashRoots() throws Exception {\n final HdfsAdmin dfsAdmin =\n new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);\n Path ezRoot1 = new Path(\"/ez1\");\n fs.mkdirs(ezRoot1);\n dfsAdmin.createEncryptionZone(ezRoot1, TEST_KEY, NO_TRASH);\n Path ezRoot2 = new Path(\"/ez2\");\n fs.mkdirs(ezRoot2);\n dfsAdmin.createEncryptionZone(ezRoot2, TEST_KEY, NO_TRASH);\n Path ezRoot3 = new Path(\"/ez3\");\n fs.mkdirs(ezRoot3);\n dfsAdmin.createEncryptionZone(ezRoot3, TEST_KEY, NO_TRASH);\n Collection trashRootsBegin = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 0, trashRootsBegin.size());\n\n final Path encFile = new Path(ezRoot2, \"encFile\");\n final int len = 8192;\n DFSTestUtil.createFile(fs, encFile, len, (short) 1, 0xFEED);\n Configuration clientConf = new Configuration(conf);\n clientConf.setLong(FS_TRASH_INTERVAL_KEY, 1);\n FsShell shell = new FsShell(clientConf);\n verifyShellDeleteWithTrash(shell, encFile);\n\n Collection trashRootsDelete1 = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 1,\n trashRootsDelete1.size());\n\n final Path nonEncFile = new Path(\"/nonEncFile\");\n DFSTestUtil.createFile(fs, nonEncFile, len, (short) 1, 0xFEED);\n verifyShellDeleteWithTrash(shell, nonEncFile);\n\n Collection trashRootsDelete2 = fs.getTrashRoots(true);\n assertEquals(\"Unexpected getTrashRoots result\", 2,\n trashRootsDelete2.size());\n }\n\n private void verifyShellDeleteWithTrash(FsShell shell, Path path)\n throws Exception{\n try {\n Path trashDir = shell.getCurrentTrashDir(path);\n // Verify that trashDir has a path component named \".Trash\"\n Path checkTrash = trashDir;\n while (!checkTrash.isRoot() && !checkTrash.getName().equals(\".Trash\")) {\n checkTrash = checkTrash.getParent();\n }\n assertEquals(\"No .Trash component found in trash dir \" + trashDir,\n \".Trash\", checkTrash.getName());\n final Path trashFile =\n new Path(shell.getCurrentTrashDir(path) + \"/\" + path);\n String[] argv = new String[]{\"-rm\", \"-r\", path.toString()};\n int res = ToolRunner.run(shell, argv);\n assertEquals(\"rm failed\", 0, res);\n assertTrue(\"File not in trash : \" + trashFile, fs.exists(trashFile));\n } catch (IOException ioe) {\n fail(ioe.getMessage());\n } finally {\n if (fs.exists(path)) {\n fs.delete(path, true);\n }\n }\n }\n}\n"},"message":{"kind":"string","value":"HDFS-10814. Add assertion for getNumEncryptionZones when no EZ is created. Contributed by Vinitha Reddy Gankidi.\n\n(cherry picked from commit 4bd45f54eedd449a98a90540698c6ceb47454fec)\n"},"old_file":{"kind":"string","value":"hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java"},"subject":{"kind":"string","value":"HDFS-10814. Add assertion for getNumEncryptionZones when no EZ is created. Contributed by Vinitha Reddy Gankidi."},"git_diff":{"kind":"string","value":"adoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java\n public void testBasicOperations() throws Exception {\n \n int numZones = 0;\n\n /* Number of EZs should be 0 if no EZ is created */\n assertEquals(\"Unexpected number of encryption zones!\", numZones,\n cluster.getNamesystem().getNumEncryptionZones());\n /* Test failure of create EZ on a directory that doesn't exist. */\n final Path zoneParent = new Path(\"/zones\");\n final Path zone1 = new Path(zoneParent, \"zone1\");"}}},{"rowIdx":2058,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"36d280522f56f1dfa99760d0292d60cc8aee9709"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"selckin/wicket,AlienQueen/wicket,dashorst/wicket,klopfdreh/wicket,apache/wicket,topicusonderwijs/wicket,aldaris/wicket,apache/wicket,apache/wicket,mosoft521/wicket,freiheit-com/wicket,selckin/wicket,freiheit-com/wicket,mosoft521/wicket,mosoft521/wicket,mafulafunk/wicket,klopfdreh/wicket,astrapi69/wicket,mafulafunk/wicket,astrapi69/wicket,aldaris/wicket,AlienQueen/wicket,dashorst/wicket,freiheit-com/wicket,mafulafunk/wicket,dashorst/wicket,AlienQueen/wicket,mosoft521/wicket,dashorst/wicket,klopfdreh/wicket,selckin/wicket,bitstorm/wicket,topicusonderwijs/wicket,bitstorm/wicket,aldaris/wicket,dashorst/wicket,selckin/wicket,klopfdreh/wicket,zwsong/wicket,selckin/wicket,aldaris/wicket,AlienQueen/wicket,zwsong/wicket,freiheit-com/wicket,astrapi69/wicket,bitstorm/wicket,freiheit-com/wicket,topicusonderwijs/wicket,aldaris/wicket,zwsong/wicket,AlienQueen/wicket,topicusonderwijs/wicket,topicusonderwijs/wicket,apache/wicket,mosoft521/wicket,apache/wicket,bitstorm/wicket,zwsong/wicket,astrapi69/wicket,klopfdreh/wicket,bitstorm/wicket"},"new_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.wicket.markup.head;\n\nimport org.apache.wicket.core.util.string.JavaScriptUtils;\nimport org.apache.wicket.request.Response;\nimport org.apache.wicket.request.mapper.parameter.PageParameters;\nimport org.apache.wicket.request.resource.ResourceReference;\nimport org.apache.wicket.util.lang.Args;\nimport org.apache.wicket.util.string.Strings;\n\n/**\n * Base class for all {@link HeaderItem}s that represent javascripts. This class mainly contains\n * factory methods.\n * \n * @author papegaaij\n */\npublic abstract class JavaScriptHeaderItem extends HeaderItem\n{\n\t/**\n\t * The condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * {@code null} or empty string for no condition.\n\t */\n\tprivate final String condition;\n\n\tprotected JavaScriptHeaderItem(String condition)\n\t{\n\t\tthis.condition = condition;\n\t}\n\n\t/**\n\t * @return the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t */\n\tpublic String getCondition()\n\t{\n\t\treturn condition;\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference)\n\t{\n\t\treturn forReference(reference, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference, String id)\n\t{\n\t\treturn forReference(reference, null, id);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id)\n\t{\n\t\treturn forReference(reference, pageParameters, id, false);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer)\n\t{\n\t\treturn forReference(reference, pageParameters, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the JavaScript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tString id, boolean defer)\n\t{\n\t\treturn forReference(reference, null, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n\t * @param reference\n\t * resource reference pointing to the JavaScript resource\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tboolean defer)\n\t{\n\t\treturn forReference(reference, null, null, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer, String charset)\n\t{\n\t\treturn new JavaScriptReferenceHeaderItem(reference, pageParameters, id, defer, charset, null);\n\t}\n\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @param condition\n\t * the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer, String charset, String condition)\n\t{\n\t\treturn new JavaScriptReferenceHeaderItem(reference, pageParameters, id, defer, charset, condition);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptContentHeaderItem} for the given content.\n\t * \n\t * @param javascript\n\t * javascript content to be rendered.\n\t * @param id\n\t * unique id for the javascript element. This can be null, however in that case the\n\t * ajax header contribution can't detect duplicate script fragments.\n\t * @return A newly created {@link JavaScriptContentHeaderItem} for the given content.\n\t */\n\tpublic static JavaScriptContentHeaderItem forScript(CharSequence javascript, String id)\n\t{\n\t\treturn forScript(javascript, id, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptContentHeaderItem} for the given content.\n\t *\n\t * @param javascript\n\t * javascript content to be rendered.\n\t * @param id\n\t * unique id for the javascript element. This can be null, however in that case the\n\t * ajax header contribution can't detect duplicate script fragments.\n\t * @param condition\n\t * the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * @return A newly created {@link JavaScriptContentHeaderItem} for the given content.\n\t */\n\tpublic static JavaScriptContentHeaderItem forScript(CharSequence javascript, String id, String condition)\n\t{\n\t\treturn new JavaScriptContentHeaderItem(javascript, id, condition);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url)\n\t{\n\t\treturn forUrl(url, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id)\n\t{\n\t\treturn forUrl(url, id, false);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer)\n\t{\n\t\treturn forUrl(url, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer,\n\t\tString charset)\n\t{\n\t\treturn forUrl(url, id, defer, charset, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t *\n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer,\n\t\tString charset, String condition)\n\t{\n\t\treturn new JavaScriptUrlReferenceHeaderItem(url, id, defer, charset, condition);\n\t}\n\n\tprotected final void internalRenderJavaScriptReference(Response response, String url,\n\t\tString id, boolean defer, String charset, String condition)\n\t{\n\t\tArgs.notEmpty(url, \"url\");\n\n\t\tboolean hasCondition = Strings.isEmpty(condition) == false;\n\t\tif (hasCondition)\n\t\t{\n\t\t\tresponse.write(\"\\n\");\n\t\t}\n\t}\n}\n"},"new_file":{"kind":"string","value":"wicket-core/src/main/java/org/apache/wicket/markup/head/JavaScriptHeaderItem.java"},"old_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.wicket.markup.head;\n\nimport org.apache.wicket.request.Response;\nimport org.apache.wicket.request.mapper.parameter.PageParameters;\nimport org.apache.wicket.request.resource.ResourceReference;\nimport org.apache.wicket.util.lang.Args;\nimport org.apache.wicket.core.util.string.JavaScriptUtils;\nimport org.apache.wicket.util.string.Strings;\n\n/**\n * Base class for all {@link HeaderItem}s that represent javascripts. This class mainly contains\n * factory methods.\n * \n * @author papegaaij\n */\npublic abstract class JavaScriptHeaderItem extends HeaderItem\n{\n\t/**\n\t * The condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * {@code null} or empty string for no condition.\n\t */\n\tprivate final String condition;\n\t\n\tprotected JavaScriptHeaderItem(String condition)\n\t{\n\t\tthis.condition = condition;\n\t}\n\n\t/**\n\t * @return the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t */\n\tpublic String getCondition()\n\t{\n\t\treturn condition;\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference)\n\t{\n\t\treturn forReference(reference, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference, String id)\n\t{\n\t\treturn forReference(reference, null, id);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id)\n\t{\n\t\treturn forReference(reference, pageParameters, id, false);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer)\n\t{\n\t\treturn forReference(reference, pageParameters, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t * \n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer, String charset)\n\t{\n\t\treturn new JavaScriptReferenceHeaderItem(reference, pageParameters, id, defer, charset, null);\n\t}\n\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n\t * @param reference\n\t * resource reference pointing to the javascript resource\n\t * @param pageParameters\n\t * the parameters for this Javascript resource reference\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @param condition\n\t * the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tPageParameters pageParameters, String id, boolean defer, String charset, String condition)\n\t{\n\t\treturn new JavaScriptReferenceHeaderItem(reference, pageParameters, id, defer, charset, condition);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptContentHeaderItem} for the given content.\n\t * \n\t * @param javascript\n\t * javascript content to be rendered.\n\t * @param id\n\t * unique id for the javascript element. This can be null, however in that case the\n\t * ajax header contribution can't detect duplicate script fragments.\n\t * @return A newly created {@link JavaScriptContentHeaderItem} for the given content.\n\t */\n\tpublic static JavaScriptContentHeaderItem forScript(CharSequence javascript, String id)\n\t{\n\t\treturn forScript(javascript, id, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptContentHeaderItem} for the given content.\n\t *\n\t * @param javascript\n\t * javascript content to be rendered.\n\t * @param id\n\t * unique id for the javascript element. This can be null, however in that case the\n\t * ajax header contribution can't detect duplicate script fragments.\n\t * @param condition\n\t * the condition to use for Internet Explorer conditional comments. E.g. \"IE 7\".\n\t * @return A newly created {@link JavaScriptContentHeaderItem} for the given content.\n\t */\n\tpublic static JavaScriptContentHeaderItem forScript(CharSequence javascript, String id, String condition)\n\t{\n\t\treturn new JavaScriptContentHeaderItem(javascript, id, condition);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url)\n\t{\n\t\treturn forUrl(url, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id)\n\t{\n\t\treturn forUrl(url, id, false);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer)\n\t{\n\t\treturn forUrl(url, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t * \n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer,\n\t\tString charset)\n\t{\n\t\treturn forUrl(url, id, defer, charset, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t *\n\t * @param url\n\t * context-relative url of the the javascript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @param charset\n\t * a non null value specifies the charset attribute of the script tag\n\t * @return A newly created {@link JavaScriptUrlReferenceHeaderItem} for the given url.\n\t */\n\tpublic static JavaScriptUrlReferenceHeaderItem forUrl(String url, String id, boolean defer,\n\t\tString charset, String condition)\n\t{\n\t\treturn new JavaScriptUrlReferenceHeaderItem(url, id, defer, charset, condition);\n\t}\n\n\tprotected final void internalRenderJavaScriptReference(Response response, String url,\n\t\tString id, boolean defer, String charset, String condition)\n\t{\n\t\tArgs.notEmpty(url, \"url\");\n\t\t\n\t\tboolean hasCondition = Strings.isEmpty(condition) == false; \n\t\tif (hasCondition)\n\t\t{\n\t\t\tresponse.write(\"\\n\");\n\t\t}\n\t}\n}\n"},"message":{"kind":"string","value":"WICKET-4778 Add factory methods to JavaScriptHeaderItem to create a deferred JavaScript header item.\n"},"old_file":{"kind":"string","value":"wicket-core/src/main/java/org/apache/wicket/markup/head/JavaScriptHeaderItem.java"},"subject":{"kind":"string","value":"WICKET-4778 Add factory methods to JavaScriptHeaderItem to create a deferred JavaScript header item."},"git_diff":{"kind":"string","value":"icket-core/src/main/java/org/apache/wicket/markup/head/JavaScriptHeaderItem.java\n */\n package org.apache.wicket.markup.head;\n \nimport org.apache.wicket.core.util.string.JavaScriptUtils;\n import org.apache.wicket.request.Response;\n import org.apache.wicket.request.mapper.parameter.PageParameters;\n import org.apache.wicket.request.resource.ResourceReference;\n import org.apache.wicket.util.lang.Args;\nimport org.apache.wicket.core.util.string.JavaScriptUtils;\n import org.apache.wicket.util.string.Strings;\n \n /**\n \t * {@code null} or empty string for no condition.\n \t */\n \tprivate final String condition;\n\t\n\n \tprotected JavaScriptHeaderItem(String condition)\n \t{\n \t\tthis.condition = condition;\n \t/**\n \t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n \t * \n\t * @param reference\n\t * resource reference pointing to the JavaScript resource\n\t * @param id\n\t * id that will be used to filter duplicate reference (it's still filtered by URL\n\t * too)\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tString id, boolean defer)\n\t{\n\t\treturn forReference(reference, null, id, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n\t * @param reference\n\t * resource reference pointing to the JavaScript resource\n\t * @param defer\n\t * specifies that the execution of a script should be deferred (delayed) until after\n\t * the page has been loaded.\n\t * @return A newly created {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t */\n\tpublic static JavaScriptReferenceHeaderItem forReference(ResourceReference reference,\n\t\tboolean defer)\n\t{\n\t\treturn forReference(reference, null, null, defer, null);\n\t}\n\n\t/**\n\t * Creates a {@link JavaScriptReferenceHeaderItem} for the given reference.\n\t *\n \t * @param reference\n \t * resource reference pointing to the javascript resource\n \t * @param pageParameters\n \t\tString id, boolean defer, String charset, String condition)\n \t{\n \t\tArgs.notEmpty(url, \"url\");\n\t\t\n\t\tboolean hasCondition = Strings.isEmpty(condition) == false; \n\n\t\tboolean hasCondition = Strings.isEmpty(condition) == false;\n \t\tif (hasCondition)\n \t\t{\n \t\t\tresponse.write(\"\\n\");"}}},{"rowIdx":2059,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"ff371f804739ff6bf8c5bd9a0865fd0f65bac0cc"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"UMM-CSci-3601-S17/digital-display-garden-iteration-3-dorfner,UMM-CSci-3601-S17/digital-display-garden-iteration-3-dorfner,UMM-CSci-3601-S17/digital-display-garden-iteration-3-dorfner,UMM-CSci-3601-S17/digital-display-garden-iteration-3-dorfner,UMM-CSci-3601-S17/digital-display-garden-iteration-4-dorfner-v2,UMM-CSci-3601-S17/digital-display-garden-iteration-4-dorfner-v2,UMM-CSci-3601-S17/digital-display-garden-iteration-4-dorfner-v2,UMM-CSci-3601-S17/digital-display-garden-iteration-4-dorfner-v2,UMM-CSci-3601-S17/digital-display-garden-iteration-4-dorfner-v2"},"new_contents":{"kind":"string","value":"package umm3601.digitalDisplayGarden;\n\nimport com.google.gson.Gson;\nimport com.mongodb.MongoClient;\nimport com.mongodb.client.*;\nimport com.mongodb.client.model.Accumulators;\nimport com.mongodb.client.model.Aggregates;\nimport com.mongodb.client.model.Sorts;\nimport com.mongodb.util.JSON;\nimport org.bson.BsonInvalidOperationException;\nimport org.bson.Document;\nimport org.bson.types.ObjectId;\n\nimport org.bson.conversions.Bson;\nimport org.joda.time.DateTime;\n\nimport java.io.OutputStream;\nimport java.util.Iterator;\n\nimport static com.mongodb.client.model.Filters.eq;\nimport static com.mongodb.client.model.Filters.and;\nimport static com.mongodb.client.model.Filters.exists;\nimport static com.mongodb.client.model.Projections.include;\nimport static com.mongodb.client.model.Updates.*;\nimport static com.mongodb.client.model.Projections.fields;\n\nimport java.io.IOException;\nimport java.util.*;\n\nimport static com.mongodb.client.model.Updates.push;\n\npublic class PlantController {\n\n private final MongoCollection plantCollection;\n private final MongoCollection commentCollection;\n private final MongoCollection configCollection;\n\n public PlantController(String databaseName) throws IOException {\n // Set up our server address\n // (Default host: 'localhost', default port: 27017)\n // ServerAddress testAddress = new ServerAddress();\n\n // Try connecting to the server\n //MongoClient mongoClient = new MongoClient(testAddress, credentials);\n MongoClient mongoClient = new MongoClient(); // Defaults!\n\n // Try connecting to a database\n MongoDatabase db = mongoClient.getDatabase(databaseName);\n\n plantCollection = db.getCollection(\"plants\");\n commentCollection = db.getCollection(\"comments\");\n configCollection = db.getCollection(\"config\");\n }\n\n public String getLiveUploadId() {\n try\n {\n FindIterable findIterable = configCollection.find(exists(\"liveUploadId\"));\n Iterator iterator = findIterable.iterator();\n Document doc = iterator.next();\n\n return doc.getString(\"liveUploadId\");\n }\n catch(Exception e)\n {\n e.printStackTrace();\n System.err.println(\" [hint] Database might be empty? Couldn't getLiveUploadId\");\n throw e;\n }\n }\n\n // List plants\n public String listPlants(Map queryParams, String uploadId) {\n Document filterDoc = new Document();\n filterDoc.append(\"uploadId\", uploadId);\n\n if (queryParams.containsKey(\"gardenLocation\")) {\n String location =(queryParams.get(\"gardenLocation\")[0]);\n filterDoc = filterDoc.append(\"gardenLocation\", location);\n }\n\n\n if (queryParams.containsKey(\"commonName\")) {\n String commonName =(queryParams.get(\"commonName\")[0]);\n filterDoc = filterDoc.append(\"commonName\", commonName);\n }\n\n FindIterable matchingPlants = plantCollection.find(filterDoc);\n\n return JSON.serialize(matchingPlants);\n }\n\n /**\n * Takes a String representing an ID number of a plant\n * and when the ID is found in the database returns a JSON document\n * as a String of the following form\n *\n * \n * {\n * \"plantID\" : String,\n * \"commonName\" : String,\n * \"cultivar\" : String\n * }\n * \n *\n * If the ID is invalid or not found, the following JSON value is\n * returned\n *\n * \n * null\n * \n *\n * @param plantID an ID number of a plant in the DB\n * @param uploadID Dataset to find the plant\n * @return a string representation of a JSON value\n */\n public String getPlantByPlantID(String plantID, String uploadID) {\n\n FindIterable jsonPlant;\n String returnVal;\n try {\n\n jsonPlant = plantCollection.find(and(eq(\"id\", plantID),\n eq(\"uploadId\", uploadID)))\n .projection(fields(include(\"commonName\", \"cultivar\")));\n\n Iterator iterator = jsonPlant.iterator();\n\n if (iterator.hasNext()) {\n incrementMetadata(plantID, \"pageViews\");\n addVisit(plantID);\n returnVal = iterator.next().toJson();\n } else {\n returnVal = \"null\";\n }\n\n } catch (IllegalArgumentException e) {\n returnVal = \"null\";\n }\n\n return returnVal;\n\n }\n\n /**\n *\n * @param plantID The plant to get feedback of\n * @param uploadID Dataset to find the plant\n *\n * @return JSON for the number of interactions of a plant (likes + dislikes + comments)\n * Of the form:\n * {\n * interactionCount: number\n * }\n */\n\n public String getFeedbackForPlantByPlantID(String plantID, String uploadID) {\n Document out = new Document();\n\n Document filter = new Document();\n filter.put(\"commentOnPlant\", plantID);\n filter.put(\"uploadId\", uploadID);\n long comments = commentCollection.count(filter);\n long likes = 0;\n long dislikes = 0;\n long interactions = 0;\n\n\n //Get a plant by plantID\n FindIterable doc = plantCollection.find(new Document().append(\"id\", plantID).append(\"uploadId\", uploadID));\n\n Iterator iterator = doc.iterator();\n if(iterator.hasNext()) {\n Document result = (Document) iterator.next();\n\n //Get metadata.rating array\n List ratings = (List) ((Document) result.get(\"metadata\")).get(\"ratings\");\n\n //Loop through all of the entries within the array, counting like=true(like) and like=false(dislike)\n for(Document rating : ratings)\n {\n if(rating.get(\"like\").equals(true))\n likes++;\n else if(rating.get(\"like\").equals(false))\n dislikes++;\n }\n }\n\n interactions = likes + dislikes + comments;\n\n out.put(\"interactionCount\", interactions);\n return JSON.serialize(out);\n }\n\n public String getGardenLocationsAsJson(String uploadID){\n AggregateIterable documents\n = plantCollection.aggregate(\n Arrays.asList(\n Aggregates.match(eq(\"uploadId\", uploadID)), //!! Order is important here\n Aggregates.group(\"$gardenLocation\"),\n Aggregates.sort(Sorts.ascending(\"_id\"))\n ));\n return JSON.serialize(documents);\n }\n\n public String[] getGardenLocations(String uploadID){\n Document filter = new Document();\n filter.append(\"uploadId\", uploadID);\n DistinctIterable bedIterator = plantCollection.distinct(\"gardenLocation\", filter, String.class);\n List beds = new ArrayList();\n for(String s : bedIterator)\n {\n beds.add(s);\n }\n return beds.toArray(new String[beds.size()]);\n }\n\n /**\n * Accepts string representation of JSON object containing\n * at least the following.\n * \n * {\n * plantId: String,\n * comment: String\n * }\n * \n * If either of the keys are missing or the types of the values are\n * wrong, false is returned.\n * @param json string representation of JSON object\n * @param uploadID Dataset to find the plant\n * @return true iff the comment was successfully submitted\n */\n\n public boolean storePlantComment(String json, String uploadID) {\n\n try {\n\n Document toInsert = new Document();\n Document parsedDocument = Document.parse(json);\n\n if (parsedDocument.containsKey(\"plantId\") && parsedDocument.get(\"plantId\") instanceof String) {\n\n FindIterable jsonPlant = plantCollection.find(eq(\"_id\",\n new ObjectId(parsedDocument.getString(\"plantId\"))));\n\n Iterator iterator = jsonPlant.iterator();\n\n if(iterator.hasNext()){\n toInsert.put(\"commentOnPlant\", iterator.next().getString(\"id\"));\n } else {\n return false;\n }\n\n } else {\n return false;\n }\n\n if (parsedDocument.containsKey(\"comment\") && parsedDocument.get(\"comment\") instanceof String) {\n toInsert.put(\"comment\", parsedDocument.getString(\"comment\"));\n } else {\n return false;\n }\n\n toInsert.append(\"uploadId\", uploadID);\n\n commentCollection.insertOne(toInsert);\n\n } catch (BsonInvalidOperationException e){\n e.printStackTrace();\n return false;\n } catch (org.bson.json.JsonParseException e){\n return false;\n } catch (IllegalArgumentException e){\n return false;\n }\n\n return true;\n }\n\n public void writeComments(OutputStream outputStream, String uploadId) throws IOException{\n\n FindIterable iter = commentCollection.find(\n and(\n exists(\"commentOnPlant\"),\n eq(\"uploadId\", uploadId)\n ));\n Iterator iterator = iter.iterator();\n\n CommentWriter commentWriter = new CommentWriter(outputStream);\n\n while (iterator.hasNext()) {\n Document comment = (Document) iterator.next();\n commentWriter.writeComment(comment.getString(\"commentOnPlant\"),\n comment.getString(\"comment\"),\n ((ObjectId) comment.get(\"_id\")).getDate());\n }\n commentWriter.complete();\n }\n\n /**\n * Adds a like or dislike to the specified plant.\n *\n * @param id a hexstring specifiying the oid\n * @param like true if this is a like, false if this is a dislike\n * @param uploadID Dataset to find the plant\n * @return true iff the operation succeeded.\n */\n\n public boolean addFlowerRating(String id, boolean like, String uploadID) {\n\n Document filterDoc = new Document();\n\n ObjectId objectId;\n\n try {\n objectId = new ObjectId(id);\n } catch (IllegalArgumentException e) {\n return false;\n }\n\n filterDoc.append(\"_id\", new ObjectId(id));\n filterDoc.append(\"uploadId\", uploadID);\n\n Document rating = new Document();\n rating.append(\"like\", like);\n rating.append(\"ratingOnObjectOfId\", objectId);\n\n return null != plantCollection.findOneAndUpdate(filterDoc, push(\"metadata.ratings\", rating));\n }\n\n /**\n * Accepts string representation of JSON object containing\n * at least the following:\n * \n * {\n * id: String,\n * like: boolean\n * }\n * \n *\n * @param json string representation of a JSON object\n * @param uploadID Dataset to find the plant\n * @return true iff the operation succeeded.\n */\n\n public boolean addFlowerRating(String json, String uploadID){\n boolean like;\n String id;\n\n try {\n\n Document parsedDocument = Document.parse(json);\n\n if(parsedDocument.containsKey(\"id\") && parsedDocument.get(\"id\") instanceof String){\n id = parsedDocument.getString(\"id\");\n } else {\n return false;\n }\n\n if(parsedDocument.containsKey(\"like\") && parsedDocument.get(\"like\") instanceof Boolean){\n like = parsedDocument.getBoolean(\"like\");\n } else {\n return false;\n }\n\n } catch (BsonInvalidOperationException e){\n e.printStackTrace();\n return false;\n } catch (org.bson.json.JsonParseException e){\n return false;\n }\n\n return addFlowerRating(id, like, uploadID);\n }\n\n /**\n *\n * @return a sorted JSON array of all the distinct uploadIds in the DB\n */\n public String listUploadIds() {\n AggregateIterable documents\n = plantCollection.aggregate(\n Arrays.asList(\n Aggregates.group(\"$uploadId\"),\n Aggregates.sort(Sorts.ascending(\"_id\"))\n ));\n List lst = new LinkedList<>();\n for(Document d: documents) {\n lst.add(d.getString(\"_id\"));\n }\n return JSON.serialize(lst);\n// return JSON.serialize(plantCollection.distinct(\"uploadId\",\"\".getClass()));\n }\n\n\n\n\n\n\n /**\n * Finds a plant and atomically increments the specified field\n * in its metadata object. This method returns true if the plant was\n * found successfully (false otherwise), but there is no indication of\n * whether the field was found.\n *\n * @param plantID a ID number of a plant in the DB\n * @param field a field to be incremented in the metadata object of the plant\n * @return true if a plant was found\n * @throws com.mongodb.MongoCommandException when the id is valid and the field is empty\n */\n public boolean incrementMetadata(String plantID, String field) {\n\n Document searchDocument = new Document();\n searchDocument.append(\"id\", plantID);\n\n Bson updateDocument = inc(\"metadata.\" + field, 1);\n\n return null != plantCollection.findOneAndUpdate(searchDocument, updateDocument);\n }\n public boolean addVisit(String plantID) {\n\n Document filterDoc = new Document();\n filterDoc.append(\"id\", plantID);\n\n Document visit = new Document();\n visit.append(\"visit\", new ObjectId());\n\n return null != plantCollection.findOneAndUpdate(filterDoc, push(\"metadata.visits\", visit));\n }\n\n}"},"new_file":{"kind":"string","value":"server/src/main/java/umm3601/digitalDisplayGarden/PlantController.java"},"old_contents":{"kind":"string","value":"package umm3601.digitalDisplayGarden;\n\nimport com.google.gson.Gson;\nimport com.mongodb.MongoClient;\nimport com.mongodb.client.*;\nimport com.mongodb.client.model.Accumulators;\nimport com.mongodb.client.model.Aggregates;\nimport com.mongodb.client.model.Sorts;\nimport com.mongodb.util.JSON;\nimport org.bson.BsonInvalidOperationException;\nimport org.bson.Document;\nimport org.bson.types.ObjectId;\n\nimport org.bson.conversions.Bson;\nimport org.joda.time.DateTime;\n\nimport java.io.OutputStream;\nimport java.util.Iterator;\n\nimport static com.mongodb.client.model.Filters.eq;\nimport static com.mongodb.client.model.Filters.and;\nimport static com.mongodb.client.model.Filters.exists;\nimport static com.mongodb.client.model.Projections.include;\nimport static com.mongodb.client.model.Updates.*;\nimport static com.mongodb.client.model.Projections.fields;\n\nimport java.io.IOException;\nimport java.util.*;\n\nimport static com.mongodb.client.model.Updates.push;\n\npublic class PlantController {\n\n private final MongoCollection plantCollection;\n private final MongoCollection commentCollection;\n private final MongoCollection configCollection;\n\n public PlantController(String databaseName) throws IOException {\n // Set up our server address\n // (Default host: 'localhost', default port: 27017)\n // ServerAddress testAddress = new ServerAddress();\n\n // Try connecting to the server\n //MongoClient mongoClient = new MongoClient(testAddress, credentials);\n MongoClient mongoClient = new MongoClient(); // Defaults!\n\n // Try connecting to a database\n MongoDatabase db = mongoClient.getDatabase(databaseName);\n\n plantCollection = db.getCollection(\"plants\");\n commentCollection = db.getCollection(\"comments\");\n configCollection = db.getCollection(\"config\");\n }\n\n public String getLiveUploadId() {\n try\n {\n FindIterable findIterable = configCollection.find(exists(\"liveUploadId\"));\n Iterator iterator = findIterable.iterator();\n Document doc = iterator.next();\n\n return doc.getString(\"liveUploadId\");\n }\n catch(Exception e)\n {\n e.printStackTrace();\n System.err.println(\" [hint] Database might be empty? Couldn't getLiveUploadId\");\n throw e;\n }\n }\n\n // List plants\n public String listPlants(Map queryParams, String uploadId) {\n Document filterDoc = new Document();\n filterDoc.append(\"uploadId\", uploadId);\n\n if (queryParams.containsKey(\"gardenLocation\")) {\n String location =(queryParams.get(\"gardenLocation\")[0]);\n filterDoc = filterDoc.append(\"gardenLocation\", location);\n }\n\n\n if (queryParams.containsKey(\"commonName\")) {\n String commonName =(queryParams.get(\"commonName\")[0]);\n filterDoc = filterDoc.append(\"commonName\", commonName);\n }\n\n FindIterable matchingPlants = plantCollection.find(filterDoc);\n\n return JSON.serialize(matchingPlants);\n }\n\n /**\n * Takes a String representing an ID number of a plant\n * and when the ID is found in the database returns a JSON document\n * as a String of the following form\n *\n * \n * {\n * \"plantID\" : String,\n * \"commonName\" : String,\n * \"cultivar\" : String\n * }\n * \n *\n * If the ID is invalid or not found, the following JSON value is\n * returned\n *\n * \n * null\n * \n *\n * @param plantID an ID number of a plant in the DB\n * @param uploadID Dataset to find the plant\n * @return a string representation of a JSON value\n */\n public String getPlantByPlantID(String plantID, String uploadID) {\n\n FindIterable jsonPlant;\n String returnVal;\n try {\n\n jsonPlant = plantCollection.find(and(eq(\"id\", plantID),\n eq(\"uploadId\", uploadID)))\n .projection(fields(include(\"commonName\", \"cultivar\")));\n\n Iterator iterator = jsonPlant.iterator();\n\n if (iterator.hasNext()) {\n incrementMetadata(plantID, \"pageViews\");\n addVisit(plantID);\n returnVal = iterator.next().toJson();\n } else {\n returnVal = \"null\";\n }\n\n } catch (IllegalArgumentException e) {\n returnVal = \"null\";\n }\n\n return returnVal;\n\n }\n\n /**\n *\n * @param plantID The plant to get feedback of\n * @param uploadID Dataset to find the plant\n *\n * @return JSON for the number of comments, likes, and dislikes\n * Of the form:\n * {\n * commentCount: number\n * likeCount: number\n * dislikeCount: number\n * }\n */\n\n public String getFeedbackForPlantByPlantID(String plantID, String uploadID) {\n Document out = new Document();\n\n Document filter = new Document();\n filter.put(\"commentOnPlant\", plantID);\n filter.put(\"uploadId\", uploadID);\n long comments = commentCollection.count(filter);\n long likes = 0;\n long dislikes = 0;\n long interactions = 0;\n\n\n //Get a plant by plantID\n FindIterable doc = plantCollection.find(new Document().append(\"id\", plantID).append(\"uploadId\", uploadID));\n\n Iterator iterator = doc.iterator();\n if(iterator.hasNext()) {\n Document result = (Document) iterator.next();\n\n //Get metadata.rating array\n List ratings = (List) ((Document) result.get(\"metadata\")).get(\"ratings\");\n\n //Loop through all of the entries within the array, counting like=true(like) and like=false(dislike)\n for(Document rating : ratings)\n {\n if(rating.get(\"like\").equals(true))\n likes++;\n else if(rating.get(\"like\").equals(false))\n dislikes++;\n }\n }\n\n interactions = likes + dislikes + comments;\n\n out.put(\"interactionCount\", interactions);\n return JSON.serialize(out);\n }\n\n public String getGardenLocationsAsJson(String uploadID){\n AggregateIterable documents\n = plantCollection.aggregate(\n Arrays.asList(\n Aggregates.match(eq(\"uploadId\", uploadID)), //!! Order is important here\n Aggregates.group(\"$gardenLocation\"),\n Aggregates.sort(Sorts.ascending(\"_id\"))\n ));\n return JSON.serialize(documents);\n }\n\n public String[] getGardenLocations(String uploadID){\n Document filter = new Document();\n filter.append(\"uploadId\", uploadID);\n DistinctIterable bedIterator = plantCollection.distinct(\"gardenLocation\", filter, String.class);\n List beds = new ArrayList();\n for(String s : bedIterator)\n {\n beds.add(s);\n }\n return beds.toArray(new String[beds.size()]);\n }\n\n /**\n * Accepts string representation of JSON object containing\n * at least the following.\n * \n * {\n * plantId: String,\n * comment: String\n * }\n * \n * If either of the keys are missing or the types of the values are\n * wrong, false is returned.\n * @param json string representation of JSON object\n * @param uploadID Dataset to find the plant\n * @return true iff the comment was successfully submitted\n */\n\n public boolean storePlantComment(String json, String uploadID) {\n\n try {\n\n Document toInsert = new Document();\n Document parsedDocument = Document.parse(json);\n\n if (parsedDocument.containsKey(\"plantId\") && parsedDocument.get(\"plantId\") instanceof String) {\n\n FindIterable jsonPlant = plantCollection.find(eq(\"_id\",\n new ObjectId(parsedDocument.getString(\"plantId\"))));\n\n Iterator iterator = jsonPlant.iterator();\n\n if(iterator.hasNext()){\n toInsert.put(\"commentOnPlant\", iterator.next().getString(\"id\"));\n } else {\n return false;\n }\n\n } else {\n return false;\n }\n\n if (parsedDocument.containsKey(\"comment\") && parsedDocument.get(\"comment\") instanceof String) {\n toInsert.put(\"comment\", parsedDocument.getString(\"comment\"));\n } else {\n return false;\n }\n\n toInsert.append(\"uploadId\", uploadID);\n\n commentCollection.insertOne(toInsert);\n\n } catch (BsonInvalidOperationException e){\n e.printStackTrace();\n return false;\n } catch (org.bson.json.JsonParseException e){\n return false;\n } catch (IllegalArgumentException e){\n return false;\n }\n\n return true;\n }\n\n public void writeComments(OutputStream outputStream, String uploadId) throws IOException{\n\n FindIterable iter = commentCollection.find(\n and(\n exists(\"commentOnPlant\"),\n eq(\"uploadId\", uploadId)\n ));\n Iterator iterator = iter.iterator();\n\n CommentWriter commentWriter = new CommentWriter(outputStream);\n\n while (iterator.hasNext()) {\n Document comment = (Document) iterator.next();\n commentWriter.writeComment(comment.getString(\"commentOnPlant\"),\n comment.getString(\"comment\"),\n ((ObjectId) comment.get(\"_id\")).getDate());\n }\n commentWriter.complete();\n }\n\n /**\n * Adds a like or dislike to the specified plant.\n *\n * @param id a hexstring specifiying the oid\n * @param like true if this is a like, false if this is a dislike\n * @param uploadID Dataset to find the plant\n * @return true iff the operation succeeded.\n */\n\n public boolean addFlowerRating(String id, boolean like, String uploadID) {\n\n Document filterDoc = new Document();\n\n ObjectId objectId;\n\n try {\n objectId = new ObjectId(id);\n } catch (IllegalArgumentException e) {\n return false;\n }\n\n filterDoc.append(\"_id\", new ObjectId(id));\n filterDoc.append(\"uploadId\", uploadID);\n\n Document rating = new Document();\n rating.append(\"like\", like);\n rating.append(\"ratingOnObjectOfId\", objectId);\n\n return null != plantCollection.findOneAndUpdate(filterDoc, push(\"metadata.ratings\", rating));\n }\n\n /**\n * Accepts string representation of JSON object containing\n * at least the following:\n * \n * {\n * id: String,\n * like: boolean\n * }\n * \n *\n * @param json string representation of a JSON object\n * @param uploadID Dataset to find the plant\n * @return true iff the operation succeeded.\n */\n\n public boolean addFlowerRating(String json, String uploadID){\n boolean like;\n String id;\n\n try {\n\n Document parsedDocument = Document.parse(json);\n\n if(parsedDocument.containsKey(\"id\") && parsedDocument.get(\"id\") instanceof String){\n id = parsedDocument.getString(\"id\");\n } else {\n return false;\n }\n\n if(parsedDocument.containsKey(\"like\") && parsedDocument.get(\"like\") instanceof Boolean){\n like = parsedDocument.getBoolean(\"like\");\n } else {\n return false;\n }\n\n } catch (BsonInvalidOperationException e){\n e.printStackTrace();\n return false;\n } catch (org.bson.json.JsonParseException e){\n return false;\n }\n\n return addFlowerRating(id, like, uploadID);\n }\n\n /**\n *\n * @return a sorted JSON array of all the distinct uploadIds in the DB\n */\n public String listUploadIds() {\n AggregateIterable documents\n = plantCollection.aggregate(\n Arrays.asList(\n Aggregates.group(\"$uploadId\"),\n Aggregates.sort(Sorts.ascending(\"_id\"))\n ));\n List lst = new LinkedList<>();\n for(Document d: documents) {\n lst.add(d.getString(\"_id\"));\n }\n return JSON.serialize(lst);\n// return JSON.serialize(plantCollection.distinct(\"uploadId\",\"\".getClass()));\n }\n\n\n\n\n\n\n /**\n * Finds a plant and atomically increments the specified field\n * in its metadata object. This method returns true if the plant was\n * found successfully (false otherwise), but there is no indication of\n * whether the field was found.\n *\n * @param plantID a ID number of a plant in the DB\n * @param field a field to be incremented in the metadata object of the plant\n * @return true if a plant was found\n * @throws com.mongodb.MongoCommandException when the id is valid and the field is empty\n */\n public boolean incrementMetadata(String plantID, String field) {\n\n Document searchDocument = new Document();\n searchDocument.append(\"id\", plantID);\n\n Bson updateDocument = inc(\"metadata.\" + field, 1);\n\n return null != plantCollection.findOneAndUpdate(searchDocument, updateDocument);\n }\n public boolean addVisit(String plantID) {\n\n Document filterDoc = new Document();\n filterDoc.append(\"id\", plantID);\n\n Document visit = new Document();\n visit.append(\"visit\", new ObjectId());\n\n return null != plantCollection.findOneAndUpdate(filterDoc, push(\"metadata.visits\", visit));\n }\n\n}"},"message":{"kind":"string","value":"Changed comment on Feedback method\nIssue #5\n"},"old_file":{"kind":"string","value":"server/src/main/java/umm3601/digitalDisplayGarden/PlantController.java"},"subject":{"kind":"string","value":"Changed comment on Feedback method Issue #5"},"git_diff":{"kind":"string","value":"erver/src/main/java/umm3601/digitalDisplayGarden/PlantController.java\n * @param plantID The plant to get feedback of\n * @param uploadID Dataset to find the plant\n *\n * @return JSON for the number of comments, likes, and dislikes\n * @return JSON for the number of interactions of a plant (likes + dislikes + comments)\n * Of the form:\n * {\n * commentCount: number\n * likeCount: number\n * dislikeCount: number\n * interactionCount: number\n * }\n */\n "}}},{"rowIdx":2060,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"17efa2814f1c7fdb0f6454ae8578d8fba1f18e34"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Esri/geotrigger-editor,Esri/geotrigger-editor"},"new_contents":{"kind":"string","value":"GeotriggerEditor.module('Editor', function(Editor, App, Backbone, Marionette, $, _) {\n\n // Editor Router\n // ---------------\n //\n // Handle routes to show the active vs complete todo items\n\n var Router = Marionette.AppRouter.extend({\n appRoutes: {\n '': 'index',\n 'list': 'list',\n 'list/:term': 'list',\n 'new': 'create',\n 'edit/:id': 'edit',\n '*notfound': 'notFound'\n }\n });\n\n // Editor Controller (Mediator)\n // ------------------------------\n //\n // Control the workflow and logic that exists at the application\n // level, above the implementation detail of views and models\n\n var Controller = function() {};\n\n _.extend(Controller.prototype, {\n\n // initialization\n start: function() {\n this.setup();\n\n App.vent.trigger('notify', 'Fetching application data..');\n\n if (window.location.hash.match('edit')) {\n App._zoomToLayer = true;\n }\n\n App.collections.triggers.fetch({\n fetch: true,\n reset: true,\n success: function (model, response, options) {\n App.vent.trigger('notify:clear');\n\n // don't start history until triggers have been fetched\n Backbone.history.start();\n\n if (response && response.length === 0) {\n App.router.navigate('list', { trigger: true });\n }\n\n else if (App.config.fitOnLoad && !Backbone.history.fragment.match('edit')) {\n App.execute('map:fit');\n }\n }\n });\n\n App.vent.on('draw:new', function(options){\n if (Backbone.history.fragment === 'new' ||\n Backbone.history.fragment.match('edit')) {\n } else {\n App.router.navigate('new', { trigger: true });\n }\n }, this);\n\n App.vent.on('trigger:create', this.createTrigger, this);\n App.vent.on('trigger:update', this.updateTrigger, this);\n App.vent.on('trigger:destroy', this.deleteTrigger, this);\n },\n\n // setup\n\n setup: function() {\n this.setupMap();\n this.setupDrawer();\n this.setupControls();\n this.setupNotifications();\n },\n\n setupMap: function() {\n var view = new App.Views.Map({ collection: App.collections.triggers });\n App.regions.map.show(view);\n },\n\n setupDrawer: function() {\n var drawer = App.regions.drawer;\n var content = App.mainRegion.$el.find('#gt-content');\n\n drawer.on('show', function(){\n content.addClass('gt-active');\n });\n\n drawer.on('close', function(){\n content.removeClass('gt-active');\n });\n },\n\n setupControls: function() {\n var view = new App.Views.Controls();\n App.regions.controls.show(view);\n },\n\n setupNotifications: function() {\n var view = new App.Views.NotificationList({\n collection: App.collections.notifications\n });\n\n App.regions.notes.show(view);\n\n App.vent.on('notify', function(options){\n if (typeof options === 'string') {\n options = {\n type: 'info',\n message: options\n };\n }\n\n var note = new App.Models.Notification(options);\n App.collections.notifications.add(note);\n }, this);\n },\n\n // routes\n\n index: function() {\n App.vent.trigger('index');\n App.regions.drawer.close();\n },\n\n list: function(term) {\n if (!App.regions.drawer.$el || !App.regions.drawer.$el.has('.gt-list').length) {\n App.vent.trigger('trigger:list');\n var model = new Backbone.Model({ count: App.collections.triggers.length });\n var view = new App.Views.List({ model: model, collection: App.collections.triggers });\n App.regions.drawer.show(view);\n } else if (!term) {\n App.vent.trigger('trigger:list:reset');\n }\n\n if (term) {\n term = decodeURIComponent(term.replace(/\\+/g,'%20'));\n App.vent.trigger('trigger:list:search', term);\n }\n },\n\n create: function() {\n App.vent.trigger('trigger:new');\n\n var view = new App.Views.Form();\n App.regions.drawer.show(view);\n\n App.vent.trigger('trigger:new:ready');\n },\n\n edit: function(triggerId) {\n var model = this.getTrigger(triggerId);\n\n if (!model) {\n App.vent.trigger('notify', {\n type: 'error',\n message: 'That trigger doesn\\'t exist!'\n });\n } else {\n var view = new App.Views.Form({ model: model });\n App.regions.drawer.show(view);\n App.vent.trigger('trigger:edit', triggerId);\n view.parseShape();\n }\n },\n\n notFound: function() {\n App.vent.trigger('notify', {\n type: 'error',\n message: 'Couldn\\'t find page: \"' + Backbone.history.fragment + '\"'\n });\n },\n\n // crud\n\n createTrigger: function(triggerData) {\n App.execute('draw:clear');\n App.collections.triggers.create(triggerData, {\n // wait: true, // wait is broken in backbone 1.1.0\n success: function() {\n App.router.navigate('list', { trigger: true });\n }\n });\n },\n\n getTrigger: function(id) {\n var model = App.collections.triggers.findWhere({'triggerId':id});\n return model;\n },\n\n updateTrigger: function(triggerData) {\n App.collections.triggers.once('change', function(data){\n App.router.navigate('list', { trigger: true });\n });\n var model = App.collections.triggers.findWhere({'triggerId':triggerData.triggerId});\n model.set(triggerData);\n model.set('id', model.get('triggerId')); // hack to ensure proper method\n model.save();\n },\n\n deleteTrigger: function(model) {\n App.collections.triggers.once('remove', function(data){\n if (Backbone.history.fragment.match('edit')) {\n App.router.navigate('list', { trigger: true });\n }\n });\n model.set('id', model.get('triggerId')); // hack to ensure proper method\n model.destroy();\n }\n });\n\n // Editor Initializer\n // ------------------\n //\n // Get the Editor up and running by initializing the mediator\n // when the the application is started, pulling in all of the\n // existing triggers and displaying them.\n\n Editor.addInitializer(function() {\n // initialize collections\n App.collections = App.collections || {};\n App.collections.triggers = new App.Models.Triggers();\n App.collections.notifications = new App.Models.Notifications();\n\n // initialize controller\n var controller = new Controller();\n\n // initialize router\n App.router = new Router({ controller: controller });\n\n controller.start();\n });\n\n});"},"new_file":{"kind":"string","value":"src/js/controllers/editor.js"},"old_contents":{"kind":"string","value":"GeotriggerEditor.module('Editor', function(Editor, App, Backbone, Marionette, $, _) {\n\n // Editor Router\n // ---------------\n //\n // Handle routes to show the active vs complete todo items\n\n var Router = Marionette.AppRouter.extend({\n appRoutes: {\n '': 'index',\n 'list': 'list',\n 'list/:term': 'list',\n 'new': 'create',\n 'edit/:id': 'edit',\n '*notfound': 'notFound'\n }\n });\n\n // Editor Controller (Mediator)\n // ------------------------------\n //\n // Control the workflow and logic that exists at the application\n // level, above the implementation detail of views and models\n\n var Controller = function() {};\n\n _.extend(Controller.prototype, {\n\n // initialization\n start: function() {\n this.setup();\n\n App.vent.trigger('notify', 'Fetching application data..');\n\n if (window.location.hash.match('edit')) {\n App._zoomToLayer = true;\n }\n\n App.collections.triggers.fetch({\n fetch: true,\n reset: true,\n success: function (model, response, options) {\n App.vent.trigger('notify:clear');\n\n // don't start history until triggers have been fetched\n Backbone.history.start();\n\n if (response && response.length === 0) {\n App.router.navigate('list', { trigger: true });\n }\n\n else if (App.config.fitOnLoad && !Backbone.history.fragment.match('edit')) {\n App.execute('map:fit');\n }\n }\n });\n\n App.vent.on('draw:new', function(options){\n if (Backbone.history.fragment === 'new' ||\n Backbone.history.fragment.match('edit')) {\n } else {\n App.router.navigate('new', { trigger: true });\n }\n }, this);\n\n App.vent.on('trigger:create', this.createTrigger, this);\n App.vent.on('trigger:update', this.updateTrigger, this);\n App.vent.on('trigger:destroy', this.deleteTrigger, this);\n },\n\n // setup\n\n setup: function() {\n this.setupMap();\n this.setupDrawer();\n this.setupControls();\n this.setupNotifications();\n },\n\n setupMap: function() {\n var view = new App.Views.Map({ collection: App.collections.triggers });\n App.regions.map.show(view);\n },\n\n setupDrawer: function() {\n var drawer = App.regions.drawer;\n var content = App.mainRegion.$el.find('#gt-content');\n\n drawer.on('show', function(){\n content.addClass('gt-active');\n });\n\n drawer.on('close', function(){\n content.removeClass('gt-active');\n });\n },\n\n setupControls: function() {\n var view = new App.Views.Controls();\n App.regions.controls.show(view);\n },\n\n setupNotifications: function() {\n var view = new App.Views.NotificationList({\n collection: App.collections.notifications\n });\n\n App.regions.notes.show(view);\n\n App.vent.on('notify', function(options){\n if (typeof options === 'string') {\n options = {\n type: 'info',\n message: options\n };\n }\n\n var note = new App.Models.Notification(options);\n App.collections.notifications.add(note);\n }, this);\n },\n\n // routes\n\n index: function() {\n App.vent.trigger('index');\n App.regions.drawer.close();\n },\n\n list: function(term) {\n if (!App.regions.drawer.$el || !App.regions.drawer.$el.has('.gt-list').length) {\n App.vent.trigger('trigger:list');\n var model = new Backbone.Model({ count: App.collections.triggers.length });\n var view = new App.Views.List({ model: model, collection: App.collections.triggers });\n App.regions.drawer.show(view);\n } else if (!term) {\n App.vent.trigger('trigger:list:reset');\n }\n\n if (term) {\n term = decodeURIComponent(term.replace(/\\+/g,'%20'));\n App.vent.trigger('trigger:list:search', term);\n }\n },\n\n create: function() {\n App.vent.trigger('trigger:new');\n\n var view = new App.Views.Form();\n App.regions.drawer.show(view);\n\n App.vent.trigger('trigger:new:ready');\n },\n\n edit: function(triggerId) {\n var model = this.getTrigger(triggerId);\n\n if (!model) {\n App.vent.trigger('notify', {\n type: 'error',\n message: 'That trigger doesn\\'t exist!'\n });\n } else {\n var view = new App.Views.Form({ model: model });\n App.regions.drawer.show(view);\n App.vent.trigger('trigger:edit', triggerId);\n view.parseShape();\n }\n },\n\n notFound: function() {\n App.vent.trigger('notify', {\n type: 'error',\n message: 'Couldn\\'t find page: \"' + Backbone.history.fragment + '\"'\n });\n },\n\n // crud\n\n createTrigger: function(triggerData) {\n App.execute('draw:clear');\n App.collections.triggers.create(triggerData, {\n // wait: true, // wait is broken in backbone 1.1.0\n success: function() {\n App.router.navigate('list', { trigger: true });\n }\n });\n },\n\n getTrigger: function(id) {\n var model = App.collections.triggers.findWhere({'triggerId':id});\n return model;\n },\n\n updateTrigger: function(triggerData) {\n App.collections.triggers.once('change', function(data){\n App.router.navigate('list', { trigger: true });\n });\n var model = App.collections.triggers.findWhere({'triggerId':triggerData.triggerId});\n model.set(triggerData);\n model.save();\n },\n\n deleteTrigger: function(model) {\n App.collections.triggers.once('remove', function(data){\n if (Backbone.history.fragment.match('edit')) {\n App.router.navigate('list', { trigger: true });\n }\n });\n model.destroy();\n }\n });\n\n // Editor Initializer\n // ------------------\n //\n // Get the Editor up and running by initializing the mediator\n // when the the application is started, pulling in all of the\n // existing triggers and displaying them.\n\n Editor.addInitializer(function() {\n // initialize collections\n App.collections = App.collections || {};\n App.collections.triggers = new App.Models.Triggers();\n App.collections.notifications = new App.Models.Notifications();\n\n // initialize controller\n var controller = new Controller();\n\n // initialize router\n App.router = new Router({ controller: controller });\n\n controller.start();\n });\n\n});"},"message":{"kind":"string","value":"fix broken delete/update\n"},"old_file":{"kind":"string","value":"src/js/controllers/editor.js"},"subject":{"kind":"string","value":"fix broken delete/update"},"git_diff":{"kind":"string","value":"rc/js/controllers/editor.js\n });\n var model = App.collections.triggers.findWhere({'triggerId':triggerData.triggerId});\n model.set(triggerData);\n model.set('id', model.get('triggerId')); // hack to ensure proper method\n model.save();\n },\n \n App.router.navigate('list', { trigger: true });\n }\n });\n model.set('id', model.get('triggerId')); // hack to ensure proper method\n model.destroy();\n }\n });"}}},{"rowIdx":2061,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"9db05e9cae85d826995dec9c80345cadaa981f11"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"pratikd650/colorWheel,pratikd650/colorWheel"},"new_contents":{"kind":"string","value":"var globalColorWheel;\nvar timersList = [];\nvar outerWheel, innerWheel;\n\nvar count = 0;\nfunction callTimerCallbacks() {\n count = (count + 1) % 60;\n for(var i = 0; i < timersList.length; i++) {\n timersList[i](count);\n }\n}\n\n//---------------------------------------------------------------------------------\nvar Led = React.createClass({\n getInitialState:function() {\n return {rgb:{r:255, g:0, b:0}}; // Set it to red\n },\n \n setLed: function() {\n this.setState({rgb:globalColorWheel.state.rgb});\n },\n\n render: function(){\n var x = this.props.x;\n var y = this.props.y;\n var a = this.props.angle;\n var thickness = this.props.thickness;\n var dx = Math.round(Math.cos(a) * (thickness-2));\n var dy = Math.round(Math.sin(a) * (thickness-2));\n var rgb = this.state.rgb;\n \n return(\n );\n }\n})\n \n//---------------------------------------------------------------------------------\nvar LedOneWheel = React.createClass({\n getInitialState:function() {\n return {speed:0, angle:0, counter:60};\n },\n\n changeSpeed:function(speedInc) {\n if ((speedInc == 1 && this.state.speed < 1) \n || (speedInc == -1 && this.state.speed > -1)) {\n console.log(\"LedOneWheel n=\", this.props.n, \" speedInc=\", speedInc);\n this.setState({speed:this.state.speed + speedInc})\n }\n },\n \n tick:function(count) {\n if (this.state.speed == 0)\n return;\n if (count==0 || count % this.state.counter == 0) {\n var a = this.state.angle;\n a = (a + this.state.speed) % this.props.n;\n this.setState({angle:a});\n }\n },\n \n componentDidMount: function() {\n timersList.push(this.tick); \n console.log(\"Adding timer for LedOneWheel\");\n }, \n \n componentWillUnmount: function() {\n var index = timersList.indexof(this.tick); \n if (index > -1) timersList.splice( index, 1 );\n console.log(\"Removing timer for LedOneWheel\");\n },\n \n render:function() {\n var n = this.props.n;\n var radius = this.props.radius;\n var r = this.props.r;\n var thickness = this.props.thickness;\n var r2 = r - thickness;\n var leds = [];\n for(var i = 0; i < n; i++) {\n var a1 = Math.PI * 2 * i / n;\n var x = radius + Math.round(Math.cos(a1)*r);\n var y = radius - Math.round(Math.sin(a1)*r);\n \n leds.push();\n }\n return ({leds});\n }\n})\n\n//---------------------------------------------------------------------------------\nvar LedWheel = React.createClass({\n // The state is minumum of the radius sepcified in the props, and the available radius\n getInitialState:function() {\n console.log(\"LedWheel.getInitialState\", this.props.radius);\n return {radius:this.props.radius};\n },\n\n computeAvailableRadius:function() {\n console.log(\"LedWheel.computeAvailableRadius\", this.props.radius);\n if (this.elem) {\n // calculate parent's width - padding\n var p = this.elem.parentNode;\n var s= window.getComputedStyle(p);\n var w = p.clientWidth - parseFloat(s.paddingLeft) - parseFloat(s.paddingLeft); // Need parseFloat to get rid of px in 14px\n // Divide width by 2, and leave off an extra pixel\n var r = Math.min(this.props.radius, Math.round(w/2));\n console.log(\"Computed Radius\", r);\n this.setState({radius:r})\n } \n },\n \n handleResize: function(e) {\n this.computeAvailableRadius();\n },\n\n componentDidMount: function() {\n this.computeAvailableRadius();\n window.addEventListener('resize', 100, this.handleResize);\n },\n\n componentWillUnmount: function() {\n window.removeEventListener('resize', this.handleResize);\n },\n\n getDefaultProps: function() {\n return { radius:200 };\n },\n\n // thickeness is calculated from radius as follows\n // radius^2 = thickness/2 ^2 + (r1 + thickness)^2\n // thickness/2 / r1 = tan(PI/24)\n // Solving\n // r1 = thickness / (2*tan(PI/24))\n // radius^2 = thickness^2 * ( (1/2)^2 + (1/(2*tan(PI/24) + 1)^2 )\n\n render: function() {\n var radius = this.state.radius-1;\n var thickness = radius / Math.sqrt(0.25 + Math.pow(1 + (1/(2*Math.tan(Math.PI/24))), 2) );\n var r1 = thickness / (2 * Math.tan(Math.PI/24));\n var r2 = thickness / (2 * Math.tan(Math.PI/12));\n\n var self = this;\n return (\n \n \n );\n } \n})\n\n//---------------------------------------------------------------------------------\nvar HueSquare = React.createClass({\n getInitialState: function() {\n return {hue:0}; // initial hue is 0, inicial color is red\n },\n\n getDefaultProps: function() {\n return { n: 8, radius:100, thickness:30 };\n },\n \n render: function() {\n var n = this.props.n;\n var radius = this.props.radius;\n var thickness = this.props.thickness;\n \n var radius2 = radius - thickness - thickness/2;\n var squareSize = 2 * radius2/Math.sqrt(2) - 2; // side of square that fits in inner circle \n var smallSquareSize = squareSize/n;\n \n var colorSquares = [];\n var sat = 255;\n for(var j = 0; j < n; j++) {\n var val = 255;\n for(var i = 0; i < n; i++) {\n var hsv = {hue:this.state.hue, sat:sat, val:val};\n var rgb = hsv2rgb(hsv);\n\n //console.log(hsv, rgb);\n colorSquares.push();\n val = val -256/n; \n }\n sat = sat -256/n; \n }\n return ({colorSquares});\n }\n})\n\n//---------------------------------------------------------------------------------\nvar ColorWheel = React.createClass({\n // The state is minumum of the radius sepcified in the props, and the available radius\n getInitialState:function() {\n console.log(\"ColorWheel.getInitialState\", this.props.radius);\n return {radius:this.props.radius, hueIndex:0, hue:0, rgb:{r:255,g:0,b:0}}; // initial hue is 0, inicial color is red\n },\n \n computeAvailableRadius:function() {\n if (this.elem) {\n // calculate parent's width - padding\n var p = this.elem.parentNode;\n var s= window.getComputedStyle(p);\n var w = p.clientWidth - parseFloat(s.paddingLeft) - parseFloat(s.paddingLeft); // Need parseFloat to get rid of px in 14px\n // Divide width by 2, and leave off an extra pixel\n var r = Math.min(this.props.radius, Math.round(w/2));\n console.log(\"Computed Radius\", r);\n this.setState({radius:r})\n } \n },\n \n handleResize: function(e) {\n this.computeAvailableRadius();\n },\n\n componentDidMount: function() {\n this.computeAvailableRadius();\n window.addEventListener('resize', this.handleResize);\n globalColorWheel = this;\n },\n\n componentWillUnmount: function() {\n window.removeEventListener('resize', this.handleResize);\n },\n\n selectHue: function(i, hue, rgb) {\n this.setState({hueIndex:i, hue:hue, rgb:rgb});\n this.hueSquare.setState({hue:hue});\n },\n \n getDefaultProps: function() {\n return { n: 24, radius:100};\n },\n\n render: function() {\n var n = this.props.n;\n var radius = this.state.radius -1;\n var thickness = Math.round(radius/3);\n // radus is the outer radius\n \n var radius2 = radius - thickness - thickness/2;\n var radius3 = radius - thickness/2;\n \n \n var colorSegments = [];\n for(var i = 0; i < n; i++) {\n var a1 = Math.PI * 2 * i / n;\n var a2 = Math.PI * 2 * (i+1)/n;\n var hue = Math.round(256 * i/n);\n var hsv = {hue:hue, sat:255, val:255};\n var rgb = hsv2rgb(hsv);\n var r = this.state.hueIndex == i ? radius : radius3;\n \n //console.log(hsv, rgb);\n colorSegments.push();\n }\n console.log(\"ColorWheel\", radius);\n var self = this;\n return (\n {colorSegments}\n \n );\n }\n})\n\n//---------------------------------------------------------------------------------\nvar LeftRightArrow = React.createClass({\n changeSpeed:function(speedInc) {\n this.setState({speed:(this.state.speed + speedInc)}); \n },\n \n getInitialState: function() {\n return {speed:0};\n },\n \n render: function() {\n return(\n
\n
\n \n \n \n \n
\n
);\n }\n})\n\n\n//---------------------------------------------------------------------------------\n\nReactDOM.render(\n ,\n document.getElementById('main')\n)\n\nReactDOM.render(\n ,\n document.getElementById('right')\n)\n\nReactDOM.render(\n
\n \n \n ,\n document.getElementById('left')\n)\n\nwindow.setInterval(callTimerCallbacks, 20);\n"},"new_file":{"kind":"string","value":"scripts/main.js"},"old_contents":{"kind":"string","value":"var globalColorWheel;\nvar timersList = [];\nvar outerWheel, innerWheel;\n\nvar count = 0;\nfunction callTimerCallbacks() {\n count = (count + 1) % 60;\n for(var i = 0; i < timersList.length; i++) {\n timersList[i](count);\n }\n}\n\n//---------------------------------------------------------------------------------\nvar Led = React.createClass({\n getInitialState:function() {\n return {rgb:{r:255, g:0, b:0}}; // Set it to red\n },\n \n setLed: function() {\n this.setState({rgb:globalColorWheel.state.rgb});\n },\n\n render: function(){\n var x = this.props.x;\n var y = this.props.y;\n var a = this.props.angle;\n var thickness = this.props.thickness;\n var dx = Math.round(Math.cos(a) * (thickness-2));\n var dy = Math.round(Math.sin(a) * (thickness-2));\n var rgb = this.state.rgb;\n \n return(\n );\n }\n})\n \n//---------------------------------------------------------------------------------\nvar LedOneWheel = React.createClass({\n getInitialState:function() {\n return {speed:0, angle:0, counter:60};\n },\n\n changeSpeed:function(speedInc) {\n if ((speedInc == 1 && this.state.speed < 1) \n || (speedInc == -1 && this.state.speed > -1)) {\n console.log(\"LedOneWheel n=\", this.props.n, \" speedInc=\", speedInc);\n this.setState({speed:this.state.speed + speedInc})\n }\n },\n \n tick:function(count) {\n if (this.state.speed == 0)\n return;\n if (count==0 || count % this.state.counter == 0) {\n var a = this.state.angle;\n a = (a + this.state.speed) % this.props.n;\n this.setState({angle:a});\n }\n },\n \n componentDidMount: function() {\n timersList.push(this.tick); \n console.log(\"Adding timer for LedOneWheel\");\n }, \n \n componentWillUnmount: function() {\n var index = timersList.indexof(this.tick); \n if (index > -1) timersList.splice( index, 1 );\n console.log(\"Removing timer for LedOneWheel\");\n },\n \n render:function() {\n var n = this.props.n;\n var radius = this.props.radius;\n var r = this.props.r;\n var thickness = this.props.thickness;\n var r2 = r - thickness;\n var leds = [];\n for(var i = 0; i < n; i++) {\n var a1 = Math.PI * 2 * i / n;\n var x = radius + Math.round(Math.cos(a1)*r);\n var y = radius - Math.round(Math.sin(a1)*r);\n \n leds.push();\n }\n return ({leds});\n }\n})\n\n//---------------------------------------------------------------------------------\nvar LedWheel = React.createClass({\n // The state is minumum of the radius sepcified in the props, and the available radius\n getInitialState:function() {\n console.log(\"LedWheel.getInitialState\", this.props.radius);\n return {radius:this.props.radius};\n },\n\n computeAvailableRadius:function() {\n console.log(\"LedWheel.computeAvailableRadius\", this.props.radius);\n if (this.elem) {\n // calculate parent's width - padding\n var p = this.elem.parentNode;\n var s= window.getComputedStyle(p);\n var w = p.clientWidth - parseFloat(s.paddingLeft) - parseFloat(s.paddingLeft); // Need parseFloat to get rid of px in 14px\n // Divide width by 2, and leave off an extra pixel\n var r = Math.min(this.props.radius, Math.round(w/2));\n console.log(\"Computed Radius\", r);\n this.setState({radius:r})\n } \n },\n \n handleResize: function(e) {\n this.computeAvailableRadius();\n },\n\n componentDidMount: function() {\n this.computeAvailableRadius();\n window.addEventListener('resize', 100, this.handleResize);\n },\n\n componentWillUnmount: function() {\n window.removeEventListener('resize', this.handleResize);\n },\n\n getDefaultProps: function() {\n return { radius:200 };\n },\n\n // thickeness is calculated from radius as follows\n // radius^2 = thickness/2 ^2 + (r1 + thickness)^2\n // thickness/2 / r1 = tan(PI/24)\n // Solving\n // r1 = thickness / (2*tan(PI/24))\n // radius^2 = thickness^2 * ( (1/2)^2 + (1/(2*tan(PI/24) + 1)^2 )\n\n render: function() {\n var radius = this.state.radius-1;\n var thickness = radius / Math.sqrt(0.25 + Math.pow(1 + (1/(2*Math.tan(Math.PI/24))), 2) );\n var r1 = thickness / (2 * Math.tan(Math.PI/24));\n var r2 = thickness / (2 * Math.tan(Math.PI/12));\n\n var self = this;\n return (\n \n \n );\n } \n})\n\n//---------------------------------------------------------------------------------\nvar HueSquare = React.createClass({\n getInitialState: function() {\n return {hue:0}; // initial hue is 0, inicial color is red\n },\n\n getDefaultProps: function() {\n return { n: 8, radius:100, thickness:30 };\n },\n \n render: function() {\n var n = this.props.n;\n var radius = this.props.radius;\n var thickness = this.props.thickness;\n \n var radius2 = radius - thickness - thickness/2;\n var squareSize = 2 * radius2/Math.sqrt(2) - 2; // side of square that fits in inner circle \n var smallSquareSize = squareSize/n;\n \n var colorSquares = [];\n var sat = 255;\n for(var j = 0; j < n; j++) {\n var val = 255;\n for(var i = 0; i < n; i++) {\n var hsv = {hue:this.state.hue, sat:sat, val:val};\n var rgb = hsv2rgb(hsv);\n\n //console.log(hsv, rgb);\n colorSquares.push();\n val = val -256/n; \n }\n sat = sat -256/n; \n }\n return ({colorSquares});\n }\n})\n\n//---------------------------------------------------------------------------------\nvar ColorWheel = React.createClass({\n // The state is minumum of the radius sepcified in the props, and the available radius\n getInitialState:function() {\n console.log(\"ColorWheel.getInitialState\", this.props.radius);\n return {radius:this.props.radius, hueIndex:0, hue:0, rgb:{r:255,g:0,b:0}}; // initial hue is 0, inicial color is red\n },\n \n computeAvailableRadius:function() {\n if (this.elem) {\n // calculate parent's width - padding\n var p = this.elem.parentNode;\n var s= window.getComputedStyle(p);\n var w = p.clientWidth - parseFloat(s.paddingLeft) - parseFloat(s.paddingLeft); // Need parseFloat to get rid of px in 14px\n // Divide width by 2, and leave off an extra pixel\n var r = Math.min(this.props.radius, Math.round(w/2));\n console.log(\"Computed Radius\", r);\n this.setState({radius:r})\n } \n },\n \n handleResize: function(e) {\n this.computeAvailableRadius();\n },\n\n componentDidMount: function() {\n this.computeAvailableRadius();\n window.addEventListener('resize', this.handleResize);\n globalColorWheel = this;\n },\n\n componentWillUnmount: function() {\n window.removeEventListener('resize', this.handleResize);\n },\n\n selectHue: function(i, hue, rgb) {\n this.setState({hueIndex:i, hue:hue, rgb:rgb});\n this.hueSquare.setState({hue:hue});\n },\n \n getDefaultProps: function() {\n return { n: 24, radius:100};\n },\n\n render: function() {\n var n = this.props.n;\n var radius = this.state.radius -1;\n var thickness = Math.round(radius/3);\n // radus is the outer radius\n \n var radius2 = radius - thickness - thickness/2;\n var radius3 = radius - thickness/2;\n \n \n var colorSegments = [];\n for(var i = 0; i < n; i++) {\n var a1 = Math.PI * 2 * i / n;\n var a2 = Math.PI * 2 * (i+1)/n;\n var hue = Math.round(256 * i/n);\n var hsv = {hue:hue, sat:255, val:255};\n var rgb = hsv2rgb(hsv);\n var r = this.state.hueIndex == i ? radius : radius3;\n \n //console.log(hsv, rgb);\n colorSegments.push();\n }\n console.log(\"ColorWheel\", radius);\n var self = this;\n return (\n {colorSegments}\n \n );\n }\n})\n\n//---------------------------------------------------------------------------------\nvar LeftRightArrow = React.createClass({\n changeSpeed:function(speedInc) {\n this.setState({speed:(this.state.speed + speedInc)}); \n },\n \n getInitialState: function() {\n return {speed:0};\n },\n \n render: function() {\n return(
\n \n \n \n \n
);\n }\n})\n\n\n//---------------------------------------------------------------------------------\n\nReactDOM.render(\n ,\n document.getElementById('main')\n)\n\nReactDOM.render(\n ,\n document.getElementById('right')\n)\n\nReactDOM.render(\n
\n \n \n ,\n document.getElementById('left')\n)\n\nwindow.setInterval(callTimerCallbacks, 20);\n"},"message":{"kind":"string","value":"Update main.js"},"old_file":{"kind":"string","value":"scripts/main.js"},"subject":{"kind":"string","value":"Update main.js"},"git_diff":{"kind":"string","value":"cripts/main.js\n },\n \n render: function() {\n return(
\n return(\n
\n
\n \n \n \n \n
);\n
\n
);\n }\n })\n "}}},{"rowIdx":2062,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"4ca344922ca0fe7aedaec8e2d2ef1d95fda62c3a"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"monkbroc/handwriting-sheet,monkbroc/handwriting-sheet"},"new_contents":{"kind":"string","value":"$(document).ready(function() {\n\n /* === Models === */\n\n var EditorState = Backbone.Model.extend({\n defaults: {\n wideSpacing: true,\n allCaps: true,\n mode: 'singleLine',\n },\n\n singleLine: function() {\n return this.get('mode') == 'singleLine';\n },\n\n multiLine: function() {\n return !this.singleLine();\n },\n\n id: 'singleton',\n localStorage: new Backbone.LocalStorage(\"EditorState\"),\n });\n\n var PracticeText = Backbone.Model.extend({\n defaults: {\n lines: ['ABC'],\n },\n\n id: 'singleton',\n localStorage: new Backbone.LocalStorage(\"PracticeText\"),\n });\n\n /* === Views === */\n\n var ControlsView = Backbone.View.extend({\n /* Pass in state and el to constructor */\n initialize: function(options) {\n _.extend(this, options);\n this.$wideSpacing = this.$(\"#wide-spacing\");\n this.$allCaps = this.$(\"#all-caps\");\n this.$mode = this.$('input[name=\"mode\"]');\n },\n\n events: {\n 'change #wide-spacing': 'wideSpacingChanged',\n 'change #all-caps': 'allCapsChanged',\n 'change [name=\"mode\"]': 'modeChanged',\n 'click #print': 'print',\n },\n\n wideSpacingChanged: function(event) {\n var val = this.$wideSpacing.prop('checked');\n this.state.save('wideSpacing', val);\n },\n\n allCapsChanged: function(event) {\n var val = this.$allCaps.prop('checked');\n this.state.save('allCaps', val);\n },\n\n modeChanged: function(event) {\n var val = this.$mode.filter(function(index, element) {\n return element.checked;\n }).val();\n this.state.save('mode', val);\n },\n\n render: function() {\n this.$wideSpacing.prop('checked', this.state.get('wideSpacing'));\n this.$allCaps.prop('checked', this.state.get('allCaps'));\n var mode = this.state.get('mode');\n this.$mode.filter(function(index, element) {\n return $(element).val() == mode;\n }).prop('checked', true);\n\n return this;\n },\n\n print: function() {\n window.print();\n },\n });\n \n var SheetView = Backbone.View.extend({\n /* Pass model, state and el to constructor */\n initialize: function(options) {\n _.extend(this, options);\n _.bindAll(this, 'render');\n this.listenTo(this.model, 'change', this.render);\n this.listenTo(this.state, 'change', this.render);\n\n this.$guidelines = this.$(\".guideline\");\n this.$firstLine = this.$guidelines.first();\n this.$otherLines = this.$guidelines.slice(1);\n this.$writeHere = this.$(\".write-here\");\n },\n\n events: {\n \"input .guideline\": \"updateLine\",\n // Events for old IE\n \"keyup .guideline\": \"updateLine\",\n \"cut .guideline\": \"updateLine\",\n \"blur .guideline\": \"updateLine\",\n \"paste .guideline\": \"updateLine\",\n },\n\n updateLine: function(event) {\n var $el = $(event.target);\n var lineNum = this.$guidelines.index($el);\n if(lineNum < 0) {\n console.log(\"Bad line number in updateLine\");\n return;\n }\n var lines = _.clone(this.model.get('lines'));\n lines[lineNum] = $el.html();\n this.model.save('lines', lines);\n },\n\n render: function() {\n this.$el.toggleClass(\"all-caps\", this.state.get('allCaps'));\n this.$el.toggleClass(\"wide-spacing\", this.state.get('wideSpacing'));\n\n var lines = this.model.get('lines');\n\n this.$el.toggleClass(\"single-line-mode\", this.state.singleLine());\n\n var mode = this.state.get('mode');\n var $active = $(document.activeElement);\n switch(mode) {\n case 'singleLine':\n var text = lines[0];\n this.$firstLine.attr('contenteditable', true);\n this.$otherLines.attr('contenteditable', false);\n this.$guidelines.not($active).html(text);\n break;\n case 'multiLine':\n this.$guidelines.attr('contenteditable', true);\n this.$guidelines.each(function(line) {\n $(this).not($active).html(lines[line] || '');\n });\n break;\n }\n },\n });\n\n /* === Application start === */\n\n var editorState = new EditorState();\n var practiceText = new PracticeText();\n var controls = new ControlsView({\n state: editorState,\n el: $(\".controls\")\n });\n var sheet = new SheetView({\n state: editorState,\n model: practiceText,\n el: $(\".sheet\")\n });\n\n\n editorState.fetch();\n practiceText.fetch();\n\n controls.render();\n sheet.render();\n\n /* === Widgets === */\n\n $('[data-toggle=\"check\"]').radiocheck();\n $('[data-toggle=\"radio\"]').radiocheck();\n});\n\n// vim: sw=2 expandtab\n"},"new_file":{"kind":"string","value":"js/handwriting.js"},"old_contents":{"kind":"string","value":"$(document).ready(function() {\n\n /* === Models === */\n\n var EditorState = Backbone.Model.extend({\n defaults: {\n wideSpacing: true,\n allCaps: true,\n mode: 'singleLine',\n },\n\n singleLine: function() {\n return this.get('mode') == 'singleLine';\n },\n\n multiLine: function() {\n return !this.singleLine();\n },\n\n id: 'singleton',\n localStorage: new Backbone.LocalStorage(\"EditorState\"),\n });\n\n var PracticeText = Backbone.Model.extend({\n defaults: {\n lines: ['ABC'],\n },\n\n id: 'singleton',\n localStorage: new Backbone.LocalStorage(\"PracticeText\"),\n });\n\n /* === Views === */\n\n var ControlsView = Backbone.View.extend({\n /* Pass in state and el to constructor */\n initialize: function(options) {\n _.extend(this, options);\n this.$wideSpacing = this.$(\"#wide-spacing\");\n this.$allCaps = this.$(\"#all-caps\");\n this.$mode = this.$('input[name=\"mode\"]');\n },\n\n events: {\n 'change #wide-spacing': 'wideSpacingChanged',\n 'change #all-caps': 'allCapsChanged',\n 'change [name=\"mode\"]': 'modeChanged',\n 'click #print': 'print',\n },\n\n wideSpacingChanged: function(event) {\n var val = this.$wideSpacing.prop('checked');\n this.state.save('wideSpacing', val);\n },\n\n allCapsChanged: function(event) {\n var val = this.$allCaps.prop('checked');\n this.state.save('allCaps', val);\n },\n\n modeChanged: function(event) {\n var val = this.$mode.filter(function(index, element) {\n return element.checked;\n }).val();\n this.state.save('mode', val);\n },\n\n render: function() {\n this.$wideSpacing.prop('checked', this.state.get('wideSpacing'));\n this.$allCaps.prop('checked', this.state.get('allCaps'));\n var mode = this.state.get('mode');\n this.$mode.filter(function(index, element) {\n return $(element).val() == mode;\n }).prop('checked', true);\n\n return this;\n },\n\n print: function() {\n window.print();\n },\n });\n \n var SheetView = Backbone.View.extend({\n /* Pass model, state and el to constructor */\n initialize: function(options) {\n _.extend(this, options);\n _.bindAll(this, 'render');\n this.listenTo(this.model, 'change', this.render);\n this.listenTo(this.state, 'change', this.render);\n\n this.$guidelines = this.$(\".guideline\");\n this.$firstLine = this.$guidelines.first();\n this.$otherLines = this.$guidelines.slice(1);\n this.$writeHere = this.$(\".write-here\");\n },\n\n events: {\n \"input .guideline\": \"updateLine\",\n // Events for old IE\n \"keyup .guideline\": \"updateLine\",\n \"cut .guideline\": \"updateLine\",\n \"blur .guideline\": \"updateLine\",\n \"paste .guideline\": \"updateLine\",\n },\n\n updateLine: function(event) {\n var $el = $(event.target);\n var lineNum = this.$guidelines.index($el);\n if(lineNum < 0) {\n console.log(\"Bad line number in updateLine\");\n return;\n }\n var lines = _.clone(this.model.get('lines'));\n lines[lineNum] = $el.html();\n this.model.save('lines', lines);\n },\n\n render: function() {\n this.$el.toggleClass(\"all-caps\", this.state.get('allCaps'));\n this.$el.toggleClass(\"wide-spacing\", this.state.get('wideSpacing'));\n\n var lines = this.model.get('lines');\n\n this.$el.toggleClass(\"single-line-mode\", this.state.singleLine());\n\n var mode = this.state.get('mode');\n switch(mode) {\n case 'singleLine':\n var text = lines[0];\n this.$firstLine.attr('contenteditable', true);\n this.$otherLines.attr('contenteditable', false);\n this.$guidelines.html(text);\n break;\n case 'multiLine':\n this.$guidelines.attr('contenteditable', true);\n this.$guidelines.each(function(line) {\n $(this).html(lines[line] || '');\n });\n break;\n }\n },\n });\n\n /* === Application start === */\n\n var editorState = new EditorState();\n var practiceText = new PracticeText();\n var controls = new ControlsView({\n state: editorState,\n el: $(\".controls\")\n });\n var sheet = new SheetView({\n state: editorState,\n model: practiceText,\n el: $(\".sheet\")\n });\n\n\n editorState.fetch();\n practiceText.fetch();\n\n controls.render();\n sheet.render();\n\n /* === Widgets === */\n\n $('[data-toggle=\"check\"]').radiocheck();\n $('[data-toggle=\"radio\"]').radiocheck();\n});\n\n// vim: sw=2 expandtab\n"},"message":{"kind":"string","value":"Work around Firefox issue with replacing HTML of focused element\n"},"old_file":{"kind":"string","value":"js/handwriting.js"},"subject":{"kind":"string","value":"Work around Firefox issue with replacing HTML of focused element"},"git_diff":{"kind":"string","value":"s/handwriting.js\n this.$el.toggleClass(\"single-line-mode\", this.state.singleLine());\n \n var mode = this.state.get('mode');\n var $active = $(document.activeElement);\n switch(mode) {\n case 'singleLine':\n var text = lines[0];\n this.$firstLine.attr('contenteditable', true);\n this.$otherLines.attr('contenteditable', false);\n this.$guidelines.html(text);\n this.$guidelines.not($active).html(text);\n break;\n case 'multiLine':\n this.$guidelines.attr('contenteditable', true);\n this.$guidelines.each(function(line) {\n $(this).html(lines[line] || '');\n $(this).not($active).html(lines[line] || '');\n });\n break;\n }"}}},{"rowIdx":2063,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"2ca30191d2bb430a4b27b125177406cf263d444f"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"apache/kafka,Chasego/kafka,TiVo/kafka,TiVo/kafka,TiVo/kafka,Chasego/kafka,noslowerdna/kafka,lindong28/kafka,apache/kafka,lindong28/kafka,sslavic/kafka,lindong28/kafka,noslowerdna/kafka,apache/kafka,noslowerdna/kafka,guozhangwang/kafka,lindong28/kafka,sslavic/kafka,Chasego/kafka,guozhangwang/kafka,guozhangwang/kafka,sslavic/kafka,noslowerdna/kafka,sslavic/kafka,guozhangwang/kafka,apache/kafka,TiVo/kafka,Chasego/kafka"},"new_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.kafka.clients.producer.internals;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Deque;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.kafka.clients.ApiVersions;\nimport org.apache.kafka.clients.producer.Callback;\nimport org.apache.kafka.common.Cluster;\nimport org.apache.kafka.common.KafkaException;\nimport org.apache.kafka.common.MetricName;\nimport org.apache.kafka.common.Node;\nimport org.apache.kafka.common.PartitionInfo;\nimport org.apache.kafka.common.TopicPartition;\nimport org.apache.kafka.common.errors.UnsupportedVersionException;\nimport org.apache.kafka.common.header.Header;\nimport org.apache.kafka.common.metrics.Measurable;\nimport org.apache.kafka.common.metrics.MetricConfig;\nimport org.apache.kafka.common.metrics.Metrics;\nimport org.apache.kafka.common.metrics.Sensor;\nimport org.apache.kafka.common.metrics.stats.Meter;\nimport org.apache.kafka.common.record.AbstractRecords;\nimport org.apache.kafka.common.record.CompressionRatioEstimator;\nimport org.apache.kafka.common.record.CompressionType;\nimport org.apache.kafka.common.record.MemoryRecords;\nimport org.apache.kafka.common.record.MemoryRecordsBuilder;\nimport org.apache.kafka.common.record.Record;\nimport org.apache.kafka.common.record.RecordBatch;\nimport org.apache.kafka.common.record.TimestampType;\nimport org.apache.kafka.common.utils.CopyOnWriteMap;\nimport org.apache.kafka.common.utils.LogContext;\nimport org.apache.kafka.common.utils.Time;\nimport org.slf4j.Logger;\n\n/**\n * This class acts as a queue that accumulates records into {@link MemoryRecords}\n * instances to be sent to the server.\n *

\n * The accumulator uses a bounded amount of memory and append calls will block when that memory is exhausted, unless\n * this behavior is explicitly disabled.\n */\npublic final class RecordAccumulator {\n\n private final Logger log;\n private volatile boolean closed;\n private final AtomicInteger flushesInProgress;\n private final AtomicInteger appendsInProgress;\n private final int batchSize;\n private final CompressionType compression;\n private final int lingerMs;\n private final long retryBackoffMs;\n private final int deliveryTimeoutMs;\n private final BufferPool free;\n private final Time time;\n private final ApiVersions apiVersions;\n private final ConcurrentMap> batches;\n private final IncompleteBatches incomplete;\n // The following variables are only accessed by the sender thread, so we don't need to protect them.\n private final Map muted;\n private int drainIndex;\n private final TransactionManager transactionManager;\n private long nextBatchExpiryTimeMs = Long.MAX_VALUE; // the earliest time (absolute) a batch will expire.\n\n /**\n * Create a new record accumulator\n *\n * @param logContext The log context used for logging\n * @param batchSize The size to use when allocating {@link MemoryRecords} instances\n * @param compression The compression codec for the records\n * @param lingerMs An artificial delay time to add before declaring a records instance that isn't full ready for\n * sending. This allows time for more records to arrive. Setting a non-zero lingerMs will trade off some\n * latency for potentially better throughput due to more batching (and hence fewer, larger requests).\n * @param retryBackoffMs An artificial delay time to retry the produce request upon receiving an error. This avoids\n * exhausting all retries in a short period of time.\n * @param metrics The metrics\n * @param time The time instance to use\n * @param apiVersions Request API versions for current connected brokers\n * @param transactionManager The shared transaction state object which tracks producer IDs, epochs, and sequence\n * numbers per partition.\n */\n public RecordAccumulator(LogContext logContext,\n int batchSize,\n CompressionType compression,\n int lingerMs,\n long retryBackoffMs,\n int deliveryTimeoutMs,\n Metrics metrics,\n String metricGrpName,\n Time time,\n ApiVersions apiVersions,\n TransactionManager transactionManager,\n BufferPool bufferPool) {\n this.log = logContext.logger(RecordAccumulator.class);\n this.drainIndex = 0;\n this.closed = false;\n this.flushesInProgress = new AtomicInteger(0);\n this.appendsInProgress = new AtomicInteger(0);\n this.batchSize = batchSize;\n this.compression = compression;\n this.lingerMs = lingerMs;\n this.retryBackoffMs = retryBackoffMs;\n this.deliveryTimeoutMs = deliveryTimeoutMs;\n this.batches = new CopyOnWriteMap<>();\n this.free = bufferPool;\n this.incomplete = new IncompleteBatches();\n this.muted = new HashMap<>();\n this.time = time;\n this.apiVersions = apiVersions;\n this.transactionManager = transactionManager;\n registerMetrics(metrics, metricGrpName);\n }\n\n private void registerMetrics(Metrics metrics, String metricGrpName) {\n MetricName metricName = metrics.metricName(\"waiting-threads\", metricGrpName, \"The number of user threads blocked waiting for buffer memory to enqueue their records\");\n Measurable waitingThreads = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.queued();\n }\n };\n metrics.addMetric(metricName, waitingThreads);\n\n metricName = metrics.metricName(\"buffer-total-bytes\", metricGrpName, \"The maximum amount of buffer memory the client can use (whether or not it is currently used).\");\n Measurable totalBytes = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.totalMemory();\n }\n };\n metrics.addMetric(metricName, totalBytes);\n\n metricName = metrics.metricName(\"buffer-available-bytes\", metricGrpName, \"The total amount of buffer memory that is not being used (either unallocated or in the free list).\");\n Measurable availableBytes = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.availableMemory();\n }\n };\n metrics.addMetric(metricName, availableBytes);\n\n Sensor bufferExhaustedRecordSensor = metrics.sensor(\"buffer-exhausted-records\");\n MetricName rateMetricName = metrics.metricName(\"buffer-exhausted-rate\", metricGrpName, \"The average per-second number of record sends that are dropped due to buffer exhaustion\");\n MetricName totalMetricName = metrics.metricName(\"buffer-exhausted-total\", metricGrpName, \"The total number of record sends that are dropped due to buffer exhaustion\");\n bufferExhaustedRecordSensor.add(new Meter(rateMetricName, totalMetricName));\n }\n\n /**\n * Add a record to the accumulator, return the append result\n *

\n * The append result will contain the future metadata, and flag for whether the appended batch is full or a new batch is created\n *

\n *\n * @param tp The topic/partition to which this record is being sent\n * @param timestamp The timestamp of the record\n * @param key The key for the record\n * @param value The value for the record\n * @param headers the Headers for the record\n * @param callback The user-supplied callback to execute when the request is complete\n * @param maxTimeToBlock The maximum time in milliseconds to block for buffer memory to be available\n * @param abortOnNewBatch A boolean that indicates returning before a new batch is created and \n * running the the partitioner's onNewBatch method before trying to append again\n */\n public RecordAppendResult append(TopicPartition tp,\n long timestamp,\n byte[] key,\n byte[] value,\n Header[] headers,\n Callback callback,\n long maxTimeToBlock,\n boolean abortOnNewBatch) throws InterruptedException {\n // We keep track of the number of appending thread to make sure we do not miss batches in\n // abortIncompleteBatches().\n appendsInProgress.incrementAndGet();\n ByteBuffer buffer = null;\n if (headers == null) headers = Record.EMPTY_HEADERS;\n try {\n // check if we have an in-progress batch\n Deque dq = getOrCreateDeque(tp);\n synchronized (dq) {\n if (closed)\n throw new KafkaException(\"Producer closed while send in progress\");\n RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);\n if (appendResult != null)\n return appendResult;\n }\n\n // we don't have an in-progress record batch try to allocate a new batch\n if (abortOnNewBatch) {\n // Return a result that will cause another call to append.\n return new RecordAppendResult(null, false, false, true);\n }\n \n byte maxUsableMagic = apiVersions.maxUsableProduceMagic();\n int size = Math.max(this.batchSize, AbstractRecords.estimateSizeInBytesUpperBound(maxUsableMagic, compression, key, value, headers));\n log.trace(\"Allocating a new {} byte message buffer for topic {} partition {}\", size, tp.topic(), tp.partition());\n buffer = free.allocate(size, maxTimeToBlock);\n synchronized (dq) {\n // Need to check if producer is closed again after grabbing the dequeue lock.\n if (closed)\n throw new KafkaException(\"Producer closed while send in progress\");\n\n RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);\n if (appendResult != null) {\n // Somebody else found us a batch, return the one we waited for! Hopefully this doesn't happen often...\n return appendResult;\n }\n\n MemoryRecordsBuilder recordsBuilder = recordsBuilder(buffer, maxUsableMagic);\n ProducerBatch batch = new ProducerBatch(tp, recordsBuilder, time.milliseconds());\n FutureRecordMetadata future = Objects.requireNonNull(batch.tryAppend(timestamp, key, value, headers,\n callback, time.milliseconds()));\n\n dq.addLast(batch);\n incomplete.add(batch);\n\n // Don't deallocate this buffer in the finally block as it's being used in the record batch\n buffer = null;\n return new RecordAppendResult(future, dq.size() > 1 || batch.isFull(), true, false);\n }\n } finally {\n if (buffer != null)\n free.deallocate(buffer);\n appendsInProgress.decrementAndGet();\n }\n }\n\n private MemoryRecordsBuilder recordsBuilder(ByteBuffer buffer, byte maxUsableMagic) {\n if (transactionManager != null && maxUsableMagic < RecordBatch.MAGIC_VALUE_V2) {\n throw new UnsupportedVersionException(\"Attempting to use idempotence with a broker which does not \" +\n \"support the required message format (v2). The broker must be version 0.11 or later.\");\n }\n return MemoryRecords.builder(buffer, maxUsableMagic, compression, TimestampType.CREATE_TIME, 0L);\n }\n\n /**\n * Try to append to a ProducerBatch.\n *\n * If it is full, we return null and a new batch is created. We also close the batch for record appends to free up\n * resources like compression buffers. The batch will be fully closed (ie. the record batch headers will be written\n * and memory records built) in one of the following cases (whichever comes first): right before send,\n * if it is expired, or when the producer is closed.\n */\n private RecordAppendResult tryAppend(long timestamp, byte[] key, byte[] value, Header[] headers,\n Callback callback, Deque deque) {\n ProducerBatch last = deque.peekLast();\n if (last != null) {\n FutureRecordMetadata future = last.tryAppend(timestamp, key, value, headers, callback, time.milliseconds());\n if (future == null)\n last.closeForRecordAppends();\n else\n return new RecordAppendResult(future, deque.size() > 1 || last.isFull(), false, false);\n }\n return null;\n }\n\n private boolean isMuted(TopicPartition tp, long now) {\n // Take care to avoid unnecessary map look-ups because this method is a hotspot if producing to a\n // large number of partitions\n Long throttleUntilTime = muted.get(tp);\n if (throttleUntilTime == null)\n return false;\n\n if (now >= throttleUntilTime) {\n muted.remove(tp);\n return false;\n }\n \n return true;\n }\n\n public void resetNextBatchExpiryTime() {\n nextBatchExpiryTimeMs = Long.MAX_VALUE;\n }\n\n public void maybeUpdateNextBatchExpiryTime(ProducerBatch batch) {\n if (batch.createdMs + deliveryTimeoutMs > 0) {\n // the non-negative check is to guard us against potential overflow due to setting\n // a large value for deliveryTimeoutMs\n nextBatchExpiryTimeMs = Math.min(nextBatchExpiryTimeMs, batch.createdMs + deliveryTimeoutMs);\n } else {\n log.warn(\"Skipping next batch expiry time update due to addition overflow: \"\n + \"batch.createMs={}, deliveryTimeoutMs={}\", batch.createdMs, deliveryTimeoutMs);\n }\n }\n\n /**\n * Get a list of batches which have been sitting in the accumulator too long and need to be expired.\n */\n public List expiredBatches(long now) {\n List expiredBatches = new ArrayList<>();\n for (Map.Entry> entry : this.batches.entrySet()) {\n // expire the batches in the order of sending\n Deque deque = entry.getValue();\n synchronized (deque) {\n while (!deque.isEmpty()) {\n ProducerBatch batch = deque.getFirst();\n if (batch.hasReachedDeliveryTimeout(deliveryTimeoutMs, now)) {\n deque.poll();\n batch.abortRecordAppends();\n expiredBatches.add(batch);\n } else {\n maybeUpdateNextBatchExpiryTime(batch);\n break;\n }\n }\n }\n }\n return expiredBatches;\n }\n\n public long getDeliveryTimeoutMs() {\n return deliveryTimeoutMs;\n }\n\n /**\n * Re-enqueue the given record batch in the accumulator. In Sender.completeBatch method, we check\n * whether the batch has reached deliveryTimeoutMs or not. Hence we do not do the delivery timeout check here.\n */\n public void reenqueue(ProducerBatch batch, long now) {\n batch.reenqueued(now);\n Deque deque = getOrCreateDeque(batch.topicPartition);\n synchronized (deque) {\n if (transactionManager != null)\n insertInSequenceOrder(deque, batch);\n else\n deque.addFirst(batch);\n }\n }\n\n /**\n * Split the big batch that has been rejected and reenqueue the split batches in to the accumulator.\n * @return the number of split batches.\n */\n public int splitAndReenqueue(ProducerBatch bigBatch) {\n // Reset the estimated compression ratio to the initial value or the big batch compression ratio, whichever\n // is bigger. There are several different ways to do the reset. We chose the most conservative one to ensure\n // the split doesn't happen too often.\n CompressionRatioEstimator.setEstimation(bigBatch.topicPartition.topic(), compression,\n Math.max(1.0f, (float) bigBatch.compressionRatio()));\n Deque dq = bigBatch.split(this.batchSize);\n int numSplitBatches = dq.size();\n Deque partitionDequeue = getOrCreateDeque(bigBatch.topicPartition);\n while (!dq.isEmpty()) {\n ProducerBatch batch = dq.pollLast();\n incomplete.add(batch);\n // We treat the newly split batches as if they are not even tried.\n synchronized (partitionDequeue) {\n if (transactionManager != null) {\n // We should track the newly created batches since they already have assigned sequences.\n transactionManager.addInFlightBatch(batch);\n insertInSequenceOrder(partitionDequeue, batch);\n } else {\n partitionDequeue.addFirst(batch);\n }\n }\n }\n return numSplitBatches;\n }\n\n // We will have to do extra work to ensure the queue is in order when requests are being retried and there are\n // multiple requests in flight to that partition. If the first in flight request fails to append, then all the\n // subsequent in flight requests will also fail because the sequence numbers will not be accepted.\n //\n // Further, once batches are being retried, we are reduced to a single in flight request for that partition. So when\n // the subsequent batches come back in sequence order, they will have to be placed further back in the queue.\n //\n // Note that this assumes that all the batches in the queue which have an assigned sequence also have the current\n // producer id. We will not attempt to reorder messages if the producer id has changed, we will throw an\n // IllegalStateException instead.\n private void insertInSequenceOrder(Deque deque, ProducerBatch batch) {\n // When we are requeing and have enabled idempotence, the reenqueued batch must always have a sequence.\n if (batch.baseSequence() == RecordBatch.NO_SEQUENCE)\n throw new IllegalStateException(\"Trying to re-enqueue a batch which doesn't have a sequence even \" +\n \"though idempotency is enabled.\");\n\n if (transactionManager.nextBatchBySequence(batch.topicPartition) == null)\n throw new IllegalStateException(\"We are re-enqueueing a batch which is not tracked as part of the in flight \" +\n \"requests. batch.topicPartition: \" + batch.topicPartition + \"; batch.baseSequence: \" + batch.baseSequence());\n\n ProducerBatch firstBatchInQueue = deque.peekFirst();\n if (firstBatchInQueue != null && firstBatchInQueue.hasSequence() && firstBatchInQueue.baseSequence() < batch.baseSequence()) {\n // The incoming batch can't be inserted at the front of the queue without violating the sequence ordering.\n // This means that the incoming batch should be placed somewhere further back.\n // We need to find the right place for the incoming batch and insert it there.\n // We will only enter this branch if we have multiple inflights sent to different brokers and we need to retry\n // the inflight batches.\n //\n // Since we reenqueue exactly one batch a time and ensure that the queue is ordered by sequence always, it\n // is a simple linear scan of a subset of the in flight batches to find the right place in the queue each time.\n List orderedBatches = new ArrayList<>();\n while (deque.peekFirst() != null && deque.peekFirst().hasSequence() && deque.peekFirst().baseSequence() < batch.baseSequence())\n orderedBatches.add(deque.pollFirst());\n\n log.debug(\"Reordered incoming batch with sequence {} for partition {}. It was placed in the queue at \" +\n \"position {}\", batch.baseSequence(), batch.topicPartition, orderedBatches.size());\n // Either we have reached a point where there are batches without a sequence (ie. never been drained\n // and are hence in order by default), or the batch at the front of the queue has a sequence greater\n // than the incoming batch. This is the right place to add the incoming batch.\n deque.addFirst(batch);\n\n // Now we have to re insert the previously queued batches in the right order.\n for (int i = orderedBatches.size() - 1; i >= 0; --i) {\n deque.addFirst(orderedBatches.get(i));\n }\n\n // At this point, the incoming batch has been queued in the correct place according to its sequence.\n } else {\n deque.addFirst(batch);\n }\n }\n\n /**\n * Get a list of nodes whose partitions are ready to be sent, and the earliest time at which any non-sendable\n * partition will be ready; Also return the flag for whether there are any unknown leaders for the accumulated\n * partition batches.\n *

\n * A destination node is ready to send data if:\n *

    \n *
  1. There is at least one partition that is not backing off its send\n *
  2. and those partitions are not muted (to prevent reordering if\n * {@value org.apache.kafka.clients.producer.ProducerConfig#MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION}\n * is set to one)
  3. \n *
  4. and any of the following are true
  5. \n *
      \n *
    • The record set is full
    • \n *
    • The record set has sat in the accumulator for at least lingerMs milliseconds
    • \n *
    • The accumulator is out of memory and threads are blocking waiting for data (in this case all partitions\n * are immediately considered ready).
    • \n *
    • The accumulator has been closed
    • \n *
    \n *
\n */\n public ReadyCheckResult ready(Cluster cluster, long nowMs) {\n Set readyNodes = new HashSet<>();\n long nextReadyCheckDelayMs = Long.MAX_VALUE;\n Set unknownLeaderTopics = new HashSet<>();\n\n boolean exhausted = this.free.queued() > 0;\n for (Map.Entry> entry : this.batches.entrySet()) {\n Deque deque = entry.getValue();\n synchronized (deque) {\n // When producing to a large number of partitions, this path is hot and deques are often empty.\n // We check whether a batch exists first to avoid the more expensive checks whenever possible.\n ProducerBatch batch = deque.peekFirst();\n if (batch != null) {\n TopicPartition part = entry.getKey();\n Node leader = cluster.leaderFor(part);\n if (leader == null) {\n // This is a partition for which leader is not known, but messages are available to send.\n // Note that entries are currently not removed from batches when deque is empty.\n unknownLeaderTopics.add(part.topic());\n } else if (!readyNodes.contains(leader) && !isMuted(part, nowMs)) {\n long waitedTimeMs = batch.waitedTimeMs(nowMs);\n boolean backingOff = batch.attempts() > 0 && waitedTimeMs < retryBackoffMs;\n long timeToWaitMs = backingOff ? retryBackoffMs : lingerMs;\n boolean full = deque.size() > 1 || batch.isFull();\n boolean expired = waitedTimeMs >= timeToWaitMs;\n boolean sendable = full || expired || exhausted || closed || flushInProgress();\n if (sendable && !backingOff) {\n readyNodes.add(leader);\n } else {\n long timeLeftMs = Math.max(timeToWaitMs - waitedTimeMs, 0);\n // Note that this results in a conservative estimate since an un-sendable partition may have\n // a leader that will later be found to have sendable data. However, this is good enough\n // since we'll just wake up and then sleep again for the remaining time.\n nextReadyCheckDelayMs = Math.min(timeLeftMs, nextReadyCheckDelayMs);\n }\n }\n }\n }\n }\n return new ReadyCheckResult(readyNodes, nextReadyCheckDelayMs, unknownLeaderTopics);\n }\n\n /**\n * Check whether there are any batches which haven't been drained\n */\n public boolean hasUndrained() {\n for (Map.Entry> entry : this.batches.entrySet()) {\n Deque deque = entry.getValue();\n synchronized (deque) {\n if (!deque.isEmpty())\n return true;\n }\n }\n return false;\n }\n\n private boolean shouldStopDrainBatchesForPartition(ProducerBatch first, TopicPartition tp) {\n ProducerIdAndEpoch producerIdAndEpoch = null;\n if (transactionManager != null) {\n if (!transactionManager.isSendToPartitionAllowed(tp))\n return true;\n\n producerIdAndEpoch = transactionManager.producerIdAndEpoch();\n if (!producerIdAndEpoch.isValid())\n // we cannot send the batch until we have refreshed the producer id\n return true;\n\n if (!first.hasSequence() && transactionManager.hasUnresolvedSequence(first.topicPartition))\n // Don't drain any new batches while the state of previous sequence numbers\n // is unknown. The previous batches would be unknown if they were aborted\n // on the client after being sent to the broker at least once.\n return true;\n\n int firstInFlightSequence = transactionManager.firstInFlightSequence(first.topicPartition);\n if (firstInFlightSequence != RecordBatch.NO_SEQUENCE && first.hasSequence()\n && first.baseSequence() != firstInFlightSequence)\n // If the queued batch already has an assigned sequence, then it is being retried.\n // In this case, we wait until the next immediate batch is ready and drain that.\n // We only move on when the next in line batch is complete (either successfully or due to\n // a fatal broker error). This effectively reduces our in flight request count to 1.\n return true;\n }\n return false;\n }\n\n private List drainBatchesForOneNode(Cluster cluster, Node node, int maxSize, long now) {\n int size = 0;\n List parts = cluster.partitionsForNode(node.id());\n List ready = new ArrayList<>();\n /* to make starvation less likely this loop doesn't start at 0 */\n int start = drainIndex = drainIndex % parts.size();\n do {\n PartitionInfo part = parts.get(drainIndex);\n TopicPartition tp = new TopicPartition(part.topic(), part.partition());\n this.drainIndex = (this.drainIndex + 1) % parts.size();\n\n // Only proceed if the partition has no in-flight batches.\n if (isMuted(tp, now))\n continue;\n\n Deque deque = getDeque(tp);\n if (deque == null)\n continue;\n\n synchronized (deque) {\n // invariant: !isMuted(tp,now) && deque != null\n ProducerBatch first = deque.peekFirst();\n if (first == null)\n continue;\n\n // first != null\n boolean backoff = first.attempts() > 0 && first.waitedTimeMs(now) < retryBackoffMs;\n // Only drain the batch if it is not during backoff period.\n if (backoff)\n continue;\n\n if (size + first.estimatedSizeInBytes() > maxSize && !ready.isEmpty()) {\n // there is a rare case that a single batch size is larger than the request size due to\n // compression; in this case we will still eventually send this batch in a single request\n break;\n } else {\n if (shouldStopDrainBatchesForPartition(first, tp))\n break;\n\n boolean isTransactional = transactionManager != null && transactionManager.isTransactional();\n ProducerIdAndEpoch producerIdAndEpoch =\n transactionManager != null ? transactionManager.producerIdAndEpoch() : null;\n ProducerBatch batch = deque.pollFirst();\n if (producerIdAndEpoch != null && !batch.hasSequence()) {\n // If the batch already has an assigned sequence, then we should not change the producer id and\n // sequence number, since this may introduce duplicates. In particular, the previous attempt\n // may actually have been accepted, and if we change the producer id and sequence here, this\n // attempt will also be accepted, causing a duplicate.\n //\n // Additionally, we update the next sequence number bound for the partition, and also have\n // the transaction manager track the batch so as to ensure that sequence ordering is maintained\n // even if we receive out of order responses.\n batch.setProducerState(producerIdAndEpoch, transactionManager.sequenceNumber(batch.topicPartition), isTransactional);\n transactionManager.incrementSequenceNumber(batch.topicPartition, batch.recordCount);\n log.debug(\"Assigned producerId {} and producerEpoch {} to batch with base sequence \" +\n \"{} being sent to partition {}\", producerIdAndEpoch.producerId,\n producerIdAndEpoch.epoch, batch.baseSequence(), tp);\n\n transactionManager.addInFlightBatch(batch);\n }\n batch.close();\n size += batch.records().sizeInBytes();\n ready.add(batch);\n\n batch.drained(now);\n }\n }\n } while (start != drainIndex);\n return ready;\n }\n\n /**\n * Drain all the data for the given nodes and collate them into a list of batches that will fit within the specified\n * size on a per-node basis. This method attempts to avoid choosing the same topic-node over and over.\n *\n * @param cluster The current cluster metadata\n * @param nodes The list of node to drain\n * @param maxSize The maximum number of bytes to drain\n * @param now The current unix time in milliseconds\n * @return A list of {@link ProducerBatch} for each node specified with total size less than the requested maxSize.\n */\n public Map> drain(Cluster cluster, Set nodes, int maxSize, long now) {\n if (nodes.isEmpty())\n return Collections.emptyMap();\n\n Map> batches = new HashMap<>();\n for (Node node : nodes) {\n List ready = drainBatchesForOneNode(cluster, node, maxSize, now);\n batches.put(node.id(), ready);\n }\n return batches;\n }\n\n /**\n * The earliest absolute time a batch will expire (in milliseconds)\n */\n public Long nextExpiryTimeMs() {\n return this.nextBatchExpiryTimeMs;\n }\n\n private Deque getDeque(TopicPartition tp) {\n return batches.get(tp);\n }\n\n /**\n * Get the deque for the given topic-partition, creating it if necessary.\n */\n private Deque getOrCreateDeque(TopicPartition tp) {\n Deque d = this.batches.get(tp);\n if (d != null)\n return d;\n d = new ArrayDeque<>();\n Deque previous = this.batches.putIfAbsent(tp, d);\n if (previous == null)\n return d;\n else\n return previous;\n }\n\n /**\n * Deallocate the record batch\n */\n public void deallocate(ProducerBatch batch) {\n incomplete.remove(batch);\n // Only deallocate the batch if it is not a split batch because split batch are allocated outside the\n // buffer pool.\n if (!batch.isSplitBatch())\n free.deallocate(batch.buffer(), batch.initialCapacity());\n }\n\n /**\n * Package private for unit test. Get the buffer pool remaining size in bytes.\n */\n long bufferPoolAvailableMemory() {\n return free.availableMemory();\n }\n\n /**\n * Are there any threads currently waiting on a flush?\n *\n * package private for test\n */\n boolean flushInProgress() {\n return flushesInProgress.get() > 0;\n }\n\n /* Visible for testing */\n Map> batches() {\n return Collections.unmodifiableMap(batches);\n }\n\n /**\n * Initiate the flushing of data from the accumulator...this makes all requests immediately ready\n */\n public void beginFlush() {\n this.flushesInProgress.getAndIncrement();\n }\n\n /**\n * Are there any threads currently appending messages?\n */\n private boolean appendsInProgress() {\n return appendsInProgress.get() > 0;\n }\n\n /**\n * Mark all partitions as ready to send and block until the send is complete\n */\n public void awaitFlushCompletion() throws InterruptedException {\n try {\n for (ProducerBatch batch : this.incomplete.copyAll())\n batch.produceFuture.await();\n } finally {\n this.flushesInProgress.decrementAndGet();\n }\n }\n\n /**\n * Check whether there are any pending batches (whether sent or unsent).\n */\n public boolean hasIncomplete() {\n return !this.incomplete.isEmpty();\n }\n\n /**\n * This function is only called when sender is closed forcefully. It will fail all the\n * incomplete batches and return.\n */\n public void abortIncompleteBatches() {\n // We need to keep aborting the incomplete batch until no thread is trying to append to\n // 1. Avoid losing batches.\n // 2. Free up memory in case appending threads are blocked on buffer full.\n // This is a tight loop but should be able to get through very quickly.\n do {\n abortBatches();\n } while (appendsInProgress());\n // After this point, no thread will append any messages because they will see the close\n // flag set. We need to do the last abort after no thread was appending in case there was a new\n // batch appended by the last appending thread.\n abortBatches();\n this.batches.clear();\n }\n\n /**\n * Go through incomplete batches and abort them.\n */\n private void abortBatches() {\n abortBatches(new KafkaException(\"Producer is closed forcefully.\"));\n }\n\n /**\n * Abort all incomplete batches (whether they have been sent or not)\n */\n void abortBatches(final RuntimeException reason) {\n for (ProducerBatch batch : incomplete.copyAll()) {\n Deque dq = getDeque(batch.topicPartition);\n synchronized (dq) {\n batch.abortRecordAppends();\n dq.remove(batch);\n }\n batch.abort(reason);\n deallocate(batch);\n }\n }\n\n /**\n * Abort any batches which have not been drained\n */\n void abortUndrainedBatches(RuntimeException reason) {\n for (ProducerBatch batch : incomplete.copyAll()) {\n Deque dq = getDeque(batch.topicPartition);\n boolean aborted = false;\n synchronized (dq) {\n if ((transactionManager != null && !batch.hasSequence()) || (transactionManager == null && !batch.isClosed())) {\n aborted = true;\n batch.abortRecordAppends();\n dq.remove(batch);\n }\n }\n if (aborted) {\n batch.abort(reason);\n deallocate(batch);\n }\n }\n }\n\n public void mutePartition(TopicPartition tp) {\n muted.put(tp, Long.MAX_VALUE);\n }\n\n public void unmutePartition(TopicPartition tp, long throttleUntilTimeMs) {\n muted.put(tp, throttleUntilTimeMs);\n }\n\n /**\n * Close this accumulator and force all the record buffers to be drained\n */\n public void close() {\n this.closed = true;\n }\n\n /*\n * Metadata about a record just appended to the record accumulator\n */\n public final static class RecordAppendResult {\n public final FutureRecordMetadata future;\n public final boolean batchIsFull;\n public final boolean newBatchCreated;\n public final boolean abortForNewBatch;\n\n public RecordAppendResult(FutureRecordMetadata future, boolean batchIsFull, boolean newBatchCreated, boolean abortForNewBatch) {\n this.future = future;\n this.batchIsFull = batchIsFull;\n this.newBatchCreated = newBatchCreated;\n this.abortForNewBatch = abortForNewBatch;\n }\n }\n\n /*\n * The set of nodes that have at least one complete record batch in the accumulator\n */\n public final static class ReadyCheckResult {\n public final Set readyNodes;\n public final long nextReadyCheckDelayMs;\n public final Set unknownLeaderTopics;\n\n public ReadyCheckResult(Set readyNodes, long nextReadyCheckDelayMs, Set unknownLeaderTopics) {\n this.readyNodes = readyNodes;\n this.nextReadyCheckDelayMs = nextReadyCheckDelayMs;\n this.unknownLeaderTopics = unknownLeaderTopics;\n }\n }\n}\n"},"new_file":{"kind":"string","value":"clients/src/main/java/org/apache/kafka/clients/producer/internals/RecordAccumulator.java"},"old_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apache.kafka.clients.producer.internals;\n\nimport java.nio.ByteBuffer;\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.Deque;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport org.apache.kafka.clients.ApiVersions;\nimport org.apache.kafka.clients.producer.Callback;\nimport org.apache.kafka.common.Cluster;\nimport org.apache.kafka.common.KafkaException;\nimport org.apache.kafka.common.MetricName;\nimport org.apache.kafka.common.Node;\nimport org.apache.kafka.common.PartitionInfo;\nimport org.apache.kafka.common.TopicPartition;\nimport org.apache.kafka.common.errors.UnsupportedVersionException;\nimport org.apache.kafka.common.header.Header;\nimport org.apache.kafka.common.metrics.Measurable;\nimport org.apache.kafka.common.metrics.MetricConfig;\nimport org.apache.kafka.common.metrics.Metrics;\nimport org.apache.kafka.common.metrics.Sensor;\nimport org.apache.kafka.common.metrics.stats.Meter;\nimport org.apache.kafka.common.record.AbstractRecords;\nimport org.apache.kafka.common.record.CompressionRatioEstimator;\nimport org.apache.kafka.common.record.CompressionType;\nimport org.apache.kafka.common.record.MemoryRecords;\nimport org.apache.kafka.common.record.MemoryRecordsBuilder;\nimport org.apache.kafka.common.record.Record;\nimport org.apache.kafka.common.record.RecordBatch;\nimport org.apache.kafka.common.record.TimestampType;\nimport org.apache.kafka.common.utils.CopyOnWriteMap;\nimport org.apache.kafka.common.utils.LogContext;\nimport org.apache.kafka.common.utils.Time;\nimport org.slf4j.Logger;\n\n/**\n * This class acts as a queue that accumulates records into {@link MemoryRecords}\n * instances to be sent to the server.\n *

\n * The accumulator uses a bounded amount of memory and append calls will block when that memory is exhausted, unless\n * this behavior is explicitly disabled.\n */\npublic final class RecordAccumulator {\n\n private final Logger log;\n private volatile boolean closed;\n private final AtomicInteger flushesInProgress;\n private final AtomicInteger appendsInProgress;\n private final int batchSize;\n private final CompressionType compression;\n private final int lingerMs;\n private final long retryBackoffMs;\n private final int deliveryTimeoutMs;\n private final BufferPool free;\n private final Time time;\n private final ApiVersions apiVersions;\n private final ConcurrentMap> batches;\n private final IncompleteBatches incomplete;\n // The following variables are only accessed by the sender thread, so we don't need to protect them.\n private final Map muted;\n private int drainIndex;\n private final TransactionManager transactionManager;\n private long nextBatchExpiryTimeMs = Long.MAX_VALUE; // the earliest time (absolute) a batch will expire.\n\n /**\n * Create a new record accumulator\n *\n * @param logContext The log context used for logging\n * @param batchSize The size to use when allocating {@link MemoryRecords} instances\n * @param compression The compression codec for the records\n * @param lingerMs An artificial delay time to add before declaring a records instance that isn't full ready for\n * sending. This allows time for more records to arrive. Setting a non-zero lingerMs will trade off some\n * latency for potentially better throughput due to more batching (and hence fewer, larger requests).\n * @param retryBackoffMs An artificial delay time to retry the produce request upon receiving an error. This avoids\n * exhausting all retries in a short period of time.\n * @param metrics The metrics\n * @param time The time instance to use\n * @param apiVersions Request API versions for current connected brokers\n * @param transactionManager The shared transaction state object which tracks producer IDs, epochs, and sequence\n * numbers per partition.\n */\n public RecordAccumulator(LogContext logContext,\n int batchSize,\n CompressionType compression,\n int lingerMs,\n long retryBackoffMs,\n int deliveryTimeoutMs,\n Metrics metrics,\n String metricGrpName,\n Time time,\n ApiVersions apiVersions,\n TransactionManager transactionManager,\n BufferPool bufferPool) {\n this.log = logContext.logger(RecordAccumulator.class);\n this.drainIndex = 0;\n this.closed = false;\n this.flushesInProgress = new AtomicInteger(0);\n this.appendsInProgress = new AtomicInteger(0);\n this.batchSize = batchSize;\n this.compression = compression;\n this.lingerMs = lingerMs;\n this.retryBackoffMs = retryBackoffMs;\n this.deliveryTimeoutMs = deliveryTimeoutMs;\n this.batches = new CopyOnWriteMap<>();\n this.free = bufferPool;\n this.incomplete = new IncompleteBatches();\n this.muted = new HashMap<>();\n this.time = time;\n this.apiVersions = apiVersions;\n this.transactionManager = transactionManager;\n registerMetrics(metrics, metricGrpName);\n }\n\n private void registerMetrics(Metrics metrics, String metricGrpName) {\n MetricName metricName = metrics.metricName(\"waiting-threads\", metricGrpName, \"The number of user threads blocked waiting for buffer memory to enqueue their records\");\n Measurable waitingThreads = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.queued();\n }\n };\n metrics.addMetric(metricName, waitingThreads);\n\n metricName = metrics.metricName(\"buffer-total-bytes\", metricGrpName, \"The maximum amount of buffer memory the client can use (whether or not it is currently used).\");\n Measurable totalBytes = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.totalMemory();\n }\n };\n metrics.addMetric(metricName, totalBytes);\n\n metricName = metrics.metricName(\"buffer-available-bytes\", metricGrpName, \"The total amount of buffer memory that is not being used (either unallocated or in the free list).\");\n Measurable availableBytes = new Measurable() {\n public double measure(MetricConfig config, long now) {\n return free.availableMemory();\n }\n };\n metrics.addMetric(metricName, availableBytes);\n\n Sensor bufferExhaustedRecordSensor = metrics.sensor(\"buffer-exhausted-records\");\n MetricName rateMetricName = metrics.metricName(\"buffer-exhausted-rate\", metricGrpName, \"The average per-second number of record sends that are dropped due to buffer exhaustion\");\n MetricName totalMetricName = metrics.metricName(\"buffer-exhausted-total\", metricGrpName, \"The total number of record sends that are dropped due to buffer exhaustion\");\n bufferExhaustedRecordSensor.add(new Meter(rateMetricName, totalMetricName));\n }\n\n /**\n * Add a record to the accumulator, return the append result\n *

\n * The append result will contain the future metadata, and flag for whether the appended batch is full or a new batch is created\n *

\n *\n * @param tp The topic/partition to which this record is being sent\n * @param timestamp The timestamp of the record\n * @param key The key for the record\n * @param value The value for the record\n * @param headers the Headers for the record\n * @param callback The user-supplied callback to execute when the request is complete\n * @param maxTimeToBlock The maximum time in milliseconds to block for buffer memory to be available\n * @param abortOnNewBatch A boolean that indicates returning before a new batch is created and \n * running the the partitioner's onNewBatch method before trying to append again\n */\n public RecordAppendResult append(TopicPartition tp,\n long timestamp,\n byte[] key,\n byte[] value,\n Header[] headers,\n Callback callback,\n long maxTimeToBlock,\n boolean abortOnNewBatch) throws InterruptedException {\n // We keep track of the number of appending thread to make sure we do not miss batches in\n // abortIncompleteBatches().\n appendsInProgress.incrementAndGet();\n ByteBuffer buffer = null;\n if (headers == null) headers = Record.EMPTY_HEADERS;\n try {\n // check if we have an in-progress batch\n Deque dq = getOrCreateDeque(tp);\n synchronized (dq) {\n if (closed)\n throw new KafkaException(\"Producer closed while send in progress\");\n RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);\n if (appendResult != null)\n return appendResult;\n }\n\n // we don't have an in-progress record batch try to allocate a new batch\n if (abortOnNewBatch) {\n // Return a result that will cause another call to append.\n return new RecordAppendResult(null, false, false, true);\n }\n \n byte maxUsableMagic = apiVersions.maxUsableProduceMagic();\n int size = Math.max(this.batchSize, AbstractRecords.estimateSizeInBytesUpperBound(maxUsableMagic, compression, key, value, headers));\n log.trace(\"Allocating a new {} byte message buffer for topic {} partition {}\", size, tp.topic(), tp.partition());\n buffer = free.allocate(size, maxTimeToBlock);\n synchronized (dq) {\n // Need to check if producer is closed again after grabbing the dequeue lock.\n if (closed)\n throw new KafkaException(\"Producer closed while send in progress\");\n\n RecordAppendResult appendResult = tryAppend(timestamp, key, value, headers, callback, dq);\n if (appendResult != null) {\n // Somebody else found us a batch, return the one we waited for! Hopefully this doesn't happen often...\n return appendResult;\n }\n\n MemoryRecordsBuilder recordsBuilder = recordsBuilder(buffer, maxUsableMagic);\n ProducerBatch batch = new ProducerBatch(tp, recordsBuilder, time.milliseconds());\n FutureRecordMetadata future = Objects.requireNonNull(batch.tryAppend(timestamp, key, value, headers,\n callback, time.milliseconds()));\n\n dq.addLast(batch);\n incomplete.add(batch);\n\n // Don't deallocate this buffer in the finally block as it's being used in the record batch\n buffer = null;\n return new RecordAppendResult(future, dq.size() > 1 || batch.isFull(), true, false);\n }\n } finally {\n if (buffer != null)\n free.deallocate(buffer);\n appendsInProgress.decrementAndGet();\n }\n }\n\n private MemoryRecordsBuilder recordsBuilder(ByteBuffer buffer, byte maxUsableMagic) {\n if (transactionManager != null && maxUsableMagic < RecordBatch.MAGIC_VALUE_V2) {\n throw new UnsupportedVersionException(\"Attempting to use idempotence with a broker which does not \" +\n \"support the required message format (v2). The broker must be version 0.11 or later.\");\n }\n return MemoryRecords.builder(buffer, maxUsableMagic, compression, TimestampType.CREATE_TIME, 0L);\n }\n\n /**\n * Try to append to a ProducerBatch.\n *\n * If it is full, we return null and a new batch is created. We also close the batch for record appends to free up\n * resources like compression buffers. The batch will be fully closed (ie. the record batch headers will be written\n * and memory records built) in one of the following cases (whichever comes first): right before send,\n * if it is expired, or when the producer is closed.\n */\n private RecordAppendResult tryAppend(long timestamp, byte[] key, byte[] value, Header[] headers,\n Callback callback, Deque deque) {\n ProducerBatch last = deque.peekLast();\n if (last != null) {\n FutureRecordMetadata future = last.tryAppend(timestamp, key, value, headers, callback, time.milliseconds());\n if (future == null)\n last.closeForRecordAppends();\n else\n return new RecordAppendResult(future, deque.size() > 1 || last.isFull(), false, false);\n }\n return null;\n }\n\n private boolean isMuted(TopicPartition tp, long now) {\n // Take care to avoid unnecessary map look-ups because this method is a hotspot if producing to a\n // large number of partitions\n Long throttleUntilTime = muted.get(tp);\n if (throttleUntilTime == null)\n return false;\n\n if (now >= throttleUntilTime) {\n muted.remove(tp);\n return false;\n }\n \n return true;\n }\n\n public void resetNextBatchExpiryTime() {\n nextBatchExpiryTimeMs = Long.MAX_VALUE;\n }\n\n public void maybeUpdateNextBatchExpiryTime(ProducerBatch batch) {\n if (batch.createdMs + deliveryTimeoutMs > 0) {\n // the non-negative check is to guard us against potential overflow due to setting\n // a large value for deliveryTimeoutMs\n nextBatchExpiryTimeMs = Math.min(nextBatchExpiryTimeMs, batch.createdMs + deliveryTimeoutMs);\n } else {\n log.warn(\"Skipping next batch expiry time update due to addition overflow: \"\n + \"batch.createMs={}, deliveryTimeoutMs={}\", batch.createdMs, deliveryTimeoutMs);\n }\n }\n\n /**\n * Get a list of batches which have been sitting in the accumulator too long and need to be expired.\n */\n public List expiredBatches(long now) {\n List expiredBatches = new ArrayList<>();\n for (Map.Entry> entry : this.batches.entrySet()) {\n // expire the batches in the order of sending\n Deque deque = entry.getValue();\n synchronized (deque) {\n while (!deque.isEmpty()) {\n ProducerBatch batch = deque.getFirst();\n if (batch.hasReachedDeliveryTimeout(deliveryTimeoutMs, now)) {\n deque.poll();\n batch.abortRecordAppends();\n expiredBatches.add(batch);\n } else {\n maybeUpdateNextBatchExpiryTime(batch);\n break;\n }\n }\n }\n }\n return expiredBatches;\n }\n\n public long getDeliveryTimeoutMs() {\n return deliveryTimeoutMs;\n }\n\n /**\n * Re-enqueue the given record batch in the accumulator. In Sender.completeBatch method, we check\n * whether the batch has reached deliveryTimeoutMs or not. Hence we do not do the delivery timeout check here.\n */\n public void reenqueue(ProducerBatch batch, long now) {\n batch.reenqueued(now);\n Deque deque = getOrCreateDeque(batch.topicPartition);\n synchronized (deque) {\n if (transactionManager != null)\n insertInSequenceOrder(deque, batch);\n else\n deque.addFirst(batch);\n }\n }\n\n /**\n * Split the big batch that has been rejected and reenqueue the split batches in to the accumulator.\n * @return the number of split batches.\n */\n public int splitAndReenqueue(ProducerBatch bigBatch) {\n // Reset the estimated compression ratio to the initial value or the big batch compression ratio, whichever\n // is bigger. There are several different ways to do the reset. We chose the most conservative one to ensure\n // the split doesn't happen too often.\n CompressionRatioEstimator.setEstimation(bigBatch.topicPartition.topic(), compression,\n Math.max(1.0f, (float) bigBatch.compressionRatio()));\n Deque dq = bigBatch.split(this.batchSize);\n int numSplitBatches = dq.size();\n Deque partitionDequeue = getOrCreateDeque(bigBatch.topicPartition);\n while (!dq.isEmpty()) {\n ProducerBatch batch = dq.pollLast();\n incomplete.add(batch);\n // We treat the newly split batches as if they are not even tried.\n synchronized (partitionDequeue) {\n if (transactionManager != null) {\n // We should track the newly created batches since they already have assigned sequences.\n transactionManager.addInFlightBatch(batch);\n insertInSequenceOrder(partitionDequeue, batch);\n } else {\n partitionDequeue.addFirst(batch);\n }\n }\n }\n return numSplitBatches;\n }\n\n // We will have to do extra work to ensure the queue is in order when requests are being retried and there are\n // multiple requests in flight to that partition. If the first in flight request fails to append, then all the\n // subsequent in flight requests will also fail because the sequence numbers will not be accepted.\n //\n // Further, once batches are being retried, we are reduced to a single in flight request for that partition. So when\n // the subsequent batches come back in sequence order, they will have to be placed further back in the queue.\n //\n // Note that this assumes that all the batches in the queue which have an assigned sequence also have the current\n // producer id. We will not attempt to reorder messages if the producer id has changed, we will throw an\n // IllegalStateException instead.\n private void insertInSequenceOrder(Deque deque, ProducerBatch batch) {\n // When we are requeing and have enabled idempotence, the reenqueued batch must always have a sequence.\n if (batch.baseSequence() == RecordBatch.NO_SEQUENCE)\n throw new IllegalStateException(\"Trying to re-enqueue a batch which doesn't have a sequence even \" +\n \"though idempotency is enabled.\");\n\n if (transactionManager.nextBatchBySequence(batch.topicPartition) == null)\n throw new IllegalStateException(\"We are re-enqueueing a batch which is not tracked as part of the in flight \" +\n \"requests. batch.topicPartition: \" + batch.topicPartition + \"; batch.baseSequence: \" + batch.baseSequence());\n\n ProducerBatch firstBatchInQueue = deque.peekFirst();\n if (firstBatchInQueue != null && firstBatchInQueue.hasSequence() && firstBatchInQueue.baseSequence() < batch.baseSequence()) {\n // The incoming batch can't be inserted at the front of the queue without violating the sequence ordering.\n // This means that the incoming batch should be placed somewhere further back.\n // We need to find the right place for the incoming batch and insert it there.\n // We will only enter this branch if we have multiple inflights sent to different brokers and we need to retry\n // the inflight batches.\n //\n // Since we reenqueue exactly one batch a time and ensure that the queue is ordered by sequence always, it\n // is a simple linear scan of a subset of the in flight batches to find the right place in the queue each time.\n List orderedBatches = new ArrayList<>();\n while (deque.peekFirst() != null && deque.peekFirst().hasSequence() && deque.peekFirst().baseSequence() < batch.baseSequence())\n orderedBatches.add(deque.pollFirst());\n\n log.debug(\"Reordered incoming batch with sequence {} for partition {}. It was placed in the queue at \" +\n \"position {}\", batch.baseSequence(), batch.topicPartition, orderedBatches.size());\n // Either we have reached a point where there are batches without a sequence (ie. never been drained\n // and are hence in order by default), or the batch at the front of the queue has a sequence greater\n // than the incoming batch. This is the right place to add the incoming batch.\n deque.addFirst(batch);\n\n // Now we have to re insert the previously queued batches in the right order.\n for (int i = orderedBatches.size() - 1; i >= 0; --i) {\n deque.addFirst(orderedBatches.get(i));\n }\n\n // At this point, the incoming batch has been queued in the correct place according to its sequence.\n } else {\n deque.addFirst(batch);\n }\n }\n\n /**\n * Get a list of nodes whose partitions are ready to be sent, and the earliest time at which any non-sendable\n * partition will be ready; Also return the flag for whether there are any unknown leaders for the accumulated\n * partition batches.\n *

\n * A destination node is ready to send data if:\n *

    \n *
  1. There is at least one partition that is not backing off its send\n *
  2. and those partitions are not muted (to prevent reordering if\n * {@value org.apache.kafka.clients.producer.ProducerConfig#MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION}\n * is set to one)
  3. \n *
  4. and any of the following are true
  5. \n *
      \n *
    • The record set is full
    • \n *
    • The record set has sat in the accumulator for at least lingerMs milliseconds
    • \n *
    • The accumulator is out of memory and threads are blocking waiting for data (in this case all partitions\n * are immediately considered ready).
    • \n *
    • The accumulator has been closed
    • \n *
    \n *
\n */\n public ReadyCheckResult ready(Cluster cluster, long nowMs) {\n Set readyNodes = new HashSet<>();\n long nextReadyCheckDelayMs = Long.MAX_VALUE;\n Set unknownLeaderTopics = new HashSet<>();\n\n boolean exhausted = this.free.queued() > 0;\n for (Map.Entry> entry : this.batches.entrySet()) {\n TopicPartition part = entry.getKey();\n Deque deque = entry.getValue();\n\n Node leader = cluster.leaderFor(part);\n synchronized (deque) {\n if (leader == null && !deque.isEmpty()) {\n // This is a partition for which leader is not known, but messages are available to send.\n // Note that entries are currently not removed from batches when deque is empty.\n unknownLeaderTopics.add(part.topic());\n } else if (!readyNodes.contains(leader) && !isMuted(part, nowMs)) {\n ProducerBatch batch = deque.peekFirst();\n if (batch != null) {\n long waitedTimeMs = batch.waitedTimeMs(nowMs);\n boolean backingOff = batch.attempts() > 0 && waitedTimeMs < retryBackoffMs;\n long timeToWaitMs = backingOff ? retryBackoffMs : lingerMs;\n boolean full = deque.size() > 1 || batch.isFull();\n boolean expired = waitedTimeMs >= timeToWaitMs;\n boolean sendable = full || expired || exhausted || closed || flushInProgress();\n if (sendable && !backingOff) {\n readyNodes.add(leader);\n } else {\n long timeLeftMs = Math.max(timeToWaitMs - waitedTimeMs, 0);\n // Note that this results in a conservative estimate since an un-sendable partition may have\n // a leader that will later be found to have sendable data. However, this is good enough\n // since we'll just wake up and then sleep again for the remaining time.\n nextReadyCheckDelayMs = Math.min(timeLeftMs, nextReadyCheckDelayMs);\n }\n }\n }\n }\n }\n return new ReadyCheckResult(readyNodes, nextReadyCheckDelayMs, unknownLeaderTopics);\n }\n\n /**\n * Check whether there are any batches which haven't been drained\n */\n public boolean hasUndrained() {\n for (Map.Entry> entry : this.batches.entrySet()) {\n Deque deque = entry.getValue();\n synchronized (deque) {\n if (!deque.isEmpty())\n return true;\n }\n }\n return false;\n }\n\n private boolean shouldStopDrainBatchesForPartition(ProducerBatch first, TopicPartition tp) {\n ProducerIdAndEpoch producerIdAndEpoch = null;\n if (transactionManager != null) {\n if (!transactionManager.isSendToPartitionAllowed(tp))\n return true;\n\n producerIdAndEpoch = transactionManager.producerIdAndEpoch();\n if (!producerIdAndEpoch.isValid())\n // we cannot send the batch until we have refreshed the producer id\n return true;\n\n if (!first.hasSequence() && transactionManager.hasUnresolvedSequence(first.topicPartition))\n // Don't drain any new batches while the state of previous sequence numbers\n // is unknown. The previous batches would be unknown if they were aborted\n // on the client after being sent to the broker at least once.\n return true;\n\n int firstInFlightSequence = transactionManager.firstInFlightSequence(first.topicPartition);\n if (firstInFlightSequence != RecordBatch.NO_SEQUENCE && first.hasSequence()\n && first.baseSequence() != firstInFlightSequence)\n // If the queued batch already has an assigned sequence, then it is being retried.\n // In this case, we wait until the next immediate batch is ready and drain that.\n // We only move on when the next in line batch is complete (either successfully or due to\n // a fatal broker error). This effectively reduces our in flight request count to 1.\n return true;\n }\n return false;\n }\n\n private List drainBatchesForOneNode(Cluster cluster, Node node, int maxSize, long now) {\n int size = 0;\n List parts = cluster.partitionsForNode(node.id());\n List ready = new ArrayList<>();\n /* to make starvation less likely this loop doesn't start at 0 */\n int start = drainIndex = drainIndex % parts.size();\n do {\n PartitionInfo part = parts.get(drainIndex);\n TopicPartition tp = new TopicPartition(part.topic(), part.partition());\n this.drainIndex = (this.drainIndex + 1) % parts.size();\n\n // Only proceed if the partition has no in-flight batches.\n if (isMuted(tp, now))\n continue;\n\n Deque deque = getDeque(tp);\n if (deque == null)\n continue;\n\n synchronized (deque) {\n // invariant: !isMuted(tp,now) && deque != null\n ProducerBatch first = deque.peekFirst();\n if (first == null)\n continue;\n\n // first != null\n boolean backoff = first.attempts() > 0 && first.waitedTimeMs(now) < retryBackoffMs;\n // Only drain the batch if it is not during backoff period.\n if (backoff)\n continue;\n\n if (size + first.estimatedSizeInBytes() > maxSize && !ready.isEmpty()) {\n // there is a rare case that a single batch size is larger than the request size due to\n // compression; in this case we will still eventually send this batch in a single request\n break;\n } else {\n if (shouldStopDrainBatchesForPartition(first, tp))\n break;\n\n boolean isTransactional = transactionManager != null && transactionManager.isTransactional();\n ProducerIdAndEpoch producerIdAndEpoch =\n transactionManager != null ? transactionManager.producerIdAndEpoch() : null;\n ProducerBatch batch = deque.pollFirst();\n if (producerIdAndEpoch != null && !batch.hasSequence()) {\n // If the batch already has an assigned sequence, then we should not change the producer id and\n // sequence number, since this may introduce duplicates. In particular, the previous attempt\n // may actually have been accepted, and if we change the producer id and sequence here, this\n // attempt will also be accepted, causing a duplicate.\n //\n // Additionally, we update the next sequence number bound for the partition, and also have\n // the transaction manager track the batch so as to ensure that sequence ordering is maintained\n // even if we receive out of order responses.\n batch.setProducerState(producerIdAndEpoch, transactionManager.sequenceNumber(batch.topicPartition), isTransactional);\n transactionManager.incrementSequenceNumber(batch.topicPartition, batch.recordCount);\n log.debug(\"Assigned producerId {} and producerEpoch {} to batch with base sequence \" +\n \"{} being sent to partition {}\", producerIdAndEpoch.producerId,\n producerIdAndEpoch.epoch, batch.baseSequence(), tp);\n\n transactionManager.addInFlightBatch(batch);\n }\n batch.close();\n size += batch.records().sizeInBytes();\n ready.add(batch);\n\n batch.drained(now);\n }\n }\n } while (start != drainIndex);\n return ready;\n }\n\n /**\n * Drain all the data for the given nodes and collate them into a list of batches that will fit within the specified\n * size on a per-node basis. This method attempts to avoid choosing the same topic-node over and over.\n *\n * @param cluster The current cluster metadata\n * @param nodes The list of node to drain\n * @param maxSize The maximum number of bytes to drain\n * @param now The current unix time in milliseconds\n * @return A list of {@link ProducerBatch} for each node specified with total size less than the requested maxSize.\n */\n public Map> drain(Cluster cluster, Set nodes, int maxSize, long now) {\n if (nodes.isEmpty())\n return Collections.emptyMap();\n\n Map> batches = new HashMap<>();\n for (Node node : nodes) {\n List ready = drainBatchesForOneNode(cluster, node, maxSize, now);\n batches.put(node.id(), ready);\n }\n return batches;\n }\n\n /**\n * The earliest absolute time a batch will expire (in milliseconds)\n */\n public Long nextExpiryTimeMs() {\n return this.nextBatchExpiryTimeMs;\n }\n\n private Deque getDeque(TopicPartition tp) {\n return batches.get(tp);\n }\n\n /**\n * Get the deque for the given topic-partition, creating it if necessary.\n */\n private Deque getOrCreateDeque(TopicPartition tp) {\n Deque d = this.batches.get(tp);\n if (d != null)\n return d;\n d = new ArrayDeque<>();\n Deque previous = this.batches.putIfAbsent(tp, d);\n if (previous == null)\n return d;\n else\n return previous;\n }\n\n /**\n * Deallocate the record batch\n */\n public void deallocate(ProducerBatch batch) {\n incomplete.remove(batch);\n // Only deallocate the batch if it is not a split batch because split batch are allocated outside the\n // buffer pool.\n if (!batch.isSplitBatch())\n free.deallocate(batch.buffer(), batch.initialCapacity());\n }\n\n /**\n * Package private for unit test. Get the buffer pool remaining size in bytes.\n */\n long bufferPoolAvailableMemory() {\n return free.availableMemory();\n }\n\n /**\n * Are there any threads currently waiting on a flush?\n *\n * package private for test\n */\n boolean flushInProgress() {\n return flushesInProgress.get() > 0;\n }\n\n /* Visible for testing */\n Map> batches() {\n return Collections.unmodifiableMap(batches);\n }\n\n /**\n * Initiate the flushing of data from the accumulator...this makes all requests immediately ready\n */\n public void beginFlush() {\n this.flushesInProgress.getAndIncrement();\n }\n\n /**\n * Are there any threads currently appending messages?\n */\n private boolean appendsInProgress() {\n return appendsInProgress.get() > 0;\n }\n\n /**\n * Mark all partitions as ready to send and block until the send is complete\n */\n public void awaitFlushCompletion() throws InterruptedException {\n try {\n for (ProducerBatch batch : this.incomplete.copyAll())\n batch.produceFuture.await();\n } finally {\n this.flushesInProgress.decrementAndGet();\n }\n }\n\n /**\n * Check whether there are any pending batches (whether sent or unsent).\n */\n public boolean hasIncomplete() {\n return !this.incomplete.isEmpty();\n }\n\n /**\n * This function is only called when sender is closed forcefully. It will fail all the\n * incomplete batches and return.\n */\n public void abortIncompleteBatches() {\n // We need to keep aborting the incomplete batch until no thread is trying to append to\n // 1. Avoid losing batches.\n // 2. Free up memory in case appending threads are blocked on buffer full.\n // This is a tight loop but should be able to get through very quickly.\n do {\n abortBatches();\n } while (appendsInProgress());\n // After this point, no thread will append any messages because they will see the close\n // flag set. We need to do the last abort after no thread was appending in case there was a new\n // batch appended by the last appending thread.\n abortBatches();\n this.batches.clear();\n }\n\n /**\n * Go through incomplete batches and abort them.\n */\n private void abortBatches() {\n abortBatches(new KafkaException(\"Producer is closed forcefully.\"));\n }\n\n /**\n * Abort all incomplete batches (whether they have been sent or not)\n */\n void abortBatches(final RuntimeException reason) {\n for (ProducerBatch batch : incomplete.copyAll()) {\n Deque dq = getDeque(batch.topicPartition);\n synchronized (dq) {\n batch.abortRecordAppends();\n dq.remove(batch);\n }\n batch.abort(reason);\n deallocate(batch);\n }\n }\n\n /**\n * Abort any batches which have not been drained\n */\n void abortUndrainedBatches(RuntimeException reason) {\n for (ProducerBatch batch : incomplete.copyAll()) {\n Deque dq = getDeque(batch.topicPartition);\n boolean aborted = false;\n synchronized (dq) {\n if ((transactionManager != null && !batch.hasSequence()) || (transactionManager == null && !batch.isClosed())) {\n aborted = true;\n batch.abortRecordAppends();\n dq.remove(batch);\n }\n }\n if (aborted) {\n batch.abort(reason);\n deallocate(batch);\n }\n }\n }\n\n public void mutePartition(TopicPartition tp) {\n muted.put(tp, Long.MAX_VALUE);\n }\n\n public void unmutePartition(TopicPartition tp, long throttleUntilTimeMs) {\n muted.put(tp, throttleUntilTimeMs);\n }\n\n /**\n * Close this accumulator and force all the record buffers to be drained\n */\n public void close() {\n this.closed = true;\n }\n\n /*\n * Metadata about a record just appended to the record accumulator\n */\n public final static class RecordAppendResult {\n public final FutureRecordMetadata future;\n public final boolean batchIsFull;\n public final boolean newBatchCreated;\n public final boolean abortForNewBatch;\n\n public RecordAppendResult(FutureRecordMetadata future, boolean batchIsFull, boolean newBatchCreated, boolean abortForNewBatch) {\n this.future = future;\n this.batchIsFull = batchIsFull;\n this.newBatchCreated = newBatchCreated;\n this.abortForNewBatch = abortForNewBatch;\n }\n }\n\n /*\n * The set of nodes that have at least one complete record batch in the accumulator\n */\n public final static class ReadyCheckResult {\n public final Set readyNodes;\n public final long nextReadyCheckDelayMs;\n public final Set unknownLeaderTopics;\n\n public ReadyCheckResult(Set readyNodes, long nextReadyCheckDelayMs, Set unknownLeaderTopics) {\n this.readyNodes = readyNodes;\n this.nextReadyCheckDelayMs = nextReadyCheckDelayMs;\n this.unknownLeaderTopics = unknownLeaderTopics;\n }\n }\n}\n"},"message":{"kind":"string","value":"MINOR: Avoid unnecessary leaderFor calls when ProducerBatch queue empty (#7196)\n\nThe RecordAccumulator ready calls `leaderFor` unnecessarily when the ProducerBatch\r\nqueue is empty. When producing to many partitions, the queue is often empty and the\r\n`leaderFor` call can be expensive in comparison. Remove the unnecessary call.\r\n\r\nReviewers: Ismael Juma <0794a5c7e497910d682bf2c54c66cc30760c0221@juma.me.uk>"},"old_file":{"kind":"string","value":"clients/src/main/java/org/apache/kafka/clients/producer/internals/RecordAccumulator.java"},"subject":{"kind":"string","value":"MINOR: Avoid unnecessary leaderFor calls when ProducerBatch queue empty (#7196)"},"git_diff":{"kind":"string","value":"lients/src/main/java/org/apache/kafka/clients/producer/internals/RecordAccumulator.java\n \n boolean exhausted = this.free.queued() > 0;\n for (Map.Entry> entry : this.batches.entrySet()) {\n TopicPartition part = entry.getKey();\n Deque deque = entry.getValue();\n\n Node leader = cluster.leaderFor(part);\n synchronized (deque) {\n if (leader == null && !deque.isEmpty()) {\n // This is a partition for which leader is not known, but messages are available to send.\n // Note that entries are currently not removed from batches when deque is empty.\n unknownLeaderTopics.add(part.topic());\n } else if (!readyNodes.contains(leader) && !isMuted(part, nowMs)) {\n ProducerBatch batch = deque.peekFirst();\n if (batch != null) {\n // When producing to a large number of partitions, this path is hot and deques are often empty.\n // We check whether a batch exists first to avoid the more expensive checks whenever possible.\n ProducerBatch batch = deque.peekFirst();\n if (batch != null) {\n TopicPartition part = entry.getKey();\n Node leader = cluster.leaderFor(part);\n if (leader == null) {\n // This is a partition for which leader is not known, but messages are available to send.\n // Note that entries are currently not removed from batches when deque is empty.\n unknownLeaderTopics.add(part.topic());\n } else if (!readyNodes.contains(leader) && !isMuted(part, nowMs)) {\n long waitedTimeMs = batch.waitedTimeMs(nowMs);\n boolean backingOff = batch.attempts() > 0 && waitedTimeMs < retryBackoffMs;\n long timeToWaitMs = backingOff ? retryBackoffMs : lingerMs;"}}},{"rowIdx":2064,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"d033270582538fcc584da8e2ae0c56a3f52a703d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"bozimmerman/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud,Tycheo/coffeemud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,oriontribunal/CoffeeMud,oriontribunal/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud"},"new_contents":{"kind":"string","value":"package com.planet_ink.coffee_mud.MOBS;\r\n\r\n\r\nimport com.planet_ink.coffee_mud.interfaces.*;\r\nimport com.planet_ink.coffee_mud.common.*;\r\nimport com.planet_ink.coffee_mud.utils.*;\r\nimport java.util.*;\r\n\r\npublic class StdRideable extends StdMOB implements Rideable\r\n{\r\n\tprotected int rideBasis=Rideable.RIDEABLE_LAND;\r\n\tprotected int mobCapacity=2;\r\n\tprotected Vector riders=new Vector();\r\n\tpublic StdRideable()\r\n\t{\r\n\t\tsuper();\r\n\t\tUsername=\"a horse\";\r\n\t\tsetDescription(\"A brown riding horse looks sturdy and reliable.\");\r\n\t\tsetDisplayText(\"a horse stands here.\");\r\n\t\tbaseEnvStats().setWeight(700);\r\n\t\trecoverEnvStats();\r\n\t}\r\n\tpublic Environmental newInstance()\r\n\t{\r\n\t\treturn new StdRideable();\r\n\t}\r\n\tpublic DeadBody killMeDead()\r\n\t{\r\n\t\twhile(riders.size()>0)\r\n\t\t{\r\n\t\t\tMOB mob=fetchRider(0);\r\n\t\t\tif(mob!=null)\r\n\t\t\t{\r\n\t\t\t\tmob.setRiding(null);\r\n\t\t\t\tdelRider(mob);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn super.killMeDead();\r\n\t}\r\n\t\r\n\t// common item/mob stuff\r\n\tpublic int rideBasis(){return rideBasis;}\r\n\tpublic void setRideBasis(int basis){rideBasis=basis;}\r\n\tpublic int mobCapacity(){ return mobCapacity;}\r\n\tpublic void setMobCapacity(int newCapacity){mobCapacity=newCapacity;}\r\n\tpublic int numRiders(){return riders.size();}\r\n\tpublic boolean mobileRideBasis(){return true;}\r\n\tpublic MOB fetchRider(int which)\r\n\t{\r\n\t\ttry\t{ return (MOB)riders.elementAt(which);\t}\r\n\t\tcatch(java.lang.ArrayIndexOutOfBoundsException e){}\r\n\t\treturn null;\r\n\t}\r\n\tpublic void addRider(MOB mob)\r\n\t{ \r\n\t\tif(mob!=null)\r\n\t\t\triders.addElement(mob);\r\n\t}\r\n\tpublic void delRider(MOB mob)\r\n\t{ \r\n\t\tif(mob!=null)\r\n\t\t\triders.removeElement(mob);\r\n\t}\r\n\tpublic void recoverEnvStats()\r\n\t{\r\n\t\tsuper.recoverEnvStats();\r\n\t\tif(rideBasis==Rideable.RIDEABLE_AIR)\r\n\t\t\tenvStats().setDisposition(envStats().disposition()|EnvStats.IS_FLYING);\r\n\t\telse\r\n\t\tif(rideBasis==Rideable.RIDEABLE_WATER)\r\n\t\t\tenvStats().setDisposition(envStats().disposition()|EnvStats.IS_SWIMMING);\r\n\t}\r\n\tpublic void affectEnvStats(Environmental affected, EnvStats affectableStats)\r\n\t{\r\n\t\tsuper.affectEnvStats(affected,affectableStats);\r\n\t\tif(affected instanceof MOB)\r\n\t\t{\r\n\t\t\tMOB mob=(MOB)affected;\r\n\t\t\tif((mob.isInCombat())&&(mob.rangeToTarget()==0)&&(amRiding(mob)))\r\n\t\t\t{\r\n\t\t\t\taffectableStats.setAttackAdjustment(affectableStats.attackAdjustment()-mob.baseEnvStats().attackAdjustment());\r\n\t\t\t\taffectableStats.setDamage(affectableStats.damage()-mob.baseEnvStats().damage());\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\tpublic boolean amRiding(MOB mob)\r\n\t{\r\n\t\treturn riders.contains(mob);\r\n\t}\r\n\tpublic String stateString(){return \"riding on\";}\r\n\tpublic String mountString(int commandType){return \"mount(s)\";}\r\n\tpublic String dismountString(){return \"dismount(s)\";}\r\n\tpublic String stateStringSubject(){return \"being ridden by\";}\r\n\r\n\tpublic boolean okAffect(Affect affect)\r\n\t{\r\n\t\tswitch(affect.targetMinor())\r\n\t\t{\r\n\t\tcase Affect.TYP_DISMOUNT:\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\tif(!amRiding(affect.source()))\r\n\t\t\t\t{\r\n\t\t\t\t\taffect.source().tell(\"You are not \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\t\tif(affect.source().riding()==this)\r\n\t\t\t\t\t\taffect.source().setRiding(null);\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t\t// protects from standard mob rejection\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_SIT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot simply sit on \"+name()+\", try 'mount'.\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_SLEEP:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot lie down on \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_MOUNT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\tif(numRiders()>=mobCapacity())\r\n\t\t\t\t{\r\n\t\t\t\t\t// for items\r\n\t\t\t\t\t//affect.source().tell(name()+\" is full.\");\r\n\t\t\t\t\t// for mobs\r\n\t\t\t\t\t affect.source().tell(\"No more can fit on \"+name()+\".\");\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t\t// protects from standard mob rejection\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_ENTER:\r\n\t\t\tif(amRiding(affect.source())\r\n\t\t\t &&(affect.target()!=null)\r\n\t\t\t &&(affect.target() instanceof Room))\r\n\t\t\t{\r\n\t\t\t\tRoom sourceRoom=(Room)affect.source().location();\r\n\t\t\t\tRoom targetRoom=(Room)affect.target();\r\n\t\t\t\tif((sourceRoom!=null)&&(!affect.amITarget(sourceRoom)))\r\n\t\t\t\t{\r\n\t\t\t\t\tboolean ok=!((targetRoom.domainType()&Room.INDOORS)>0);\r\n\t\t\t\t\tswitch(rideBasis)\r\n\t\t\t\t\t{\r\n\t\t\t\t\tcase Rideable.RIDEABLE_LAND:\r\n\t\t\t\t\t\tif((targetRoom.domainType()==Room.DOMAIN_OUTDOORS_AIR)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_OUTDOORS_UNDERWATER)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_UNDERWATER)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_AIR)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_OUTDOORS_WATERSURFACE))\r\n\t\t\t\t\t\t\tok=false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\tcase Rideable.RIDEABLE_AIR:\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\tcase Rideable.RIDEABLE_WATER:\r\n\t\t\t\t\t\tif((sourceRoom.domainType()!=Room.DOMAIN_OUTDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(targetRoom.domainType()!=Room.DOMAIN_OUTDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(sourceRoom.domainType()!=Room.DOMAIN_INDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(targetRoom.domainType()!=Room.DOMAIN_INDOORS_WATERSURFACE))\r\n\t\t\t\t\t\t\tok=false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif(!ok)\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\taffect.source().tell(\"You cannot ride \"+name()+\" that way.\");\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif(Sense.isSitting(affect.source()))\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\taffect.source().tell(\"You cannot crawl while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_BUY:\r\n\t\tcase Affect.TYP_SELL:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot do that while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\t}\r\n\t\tif((Util.bset(affect.sourceMajor(),Affect.ACT_HANDS))&&(amRiding(affect.source())))\r\n\t\t{\r\n\t\t\tif(((affect.target()!=null)&&(affect.target() instanceof Item)&&(affect.target()!=this)&&(affect.source().location()!=null)&&(affect.source().location().isContent((Item)affect.target())))\r\n\t\t\t|| ((affect.tool()!=null)&&(affect.tool() instanceof Item)&&(affect.tool()!=this)&&(affect.source().location()!=null)&&(affect.source().location().isContent((Item)affect.tool())))\r\n\t\t\t|| ((affect.sourceMinor()==Affect.TYP_GIVE)&&(affect.target()!=null)&&(affect.target() instanceof MOB)&&(affect.target()!=this)&&(!amRiding((MOB)affect.target()))))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot do that while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif(Util.bset(affect.targetCode(),Affect.MASK_MALICIOUS))\r\n\t\t{\r\n\t\t\tif((affect.amITarget(this))\r\n\t\t\t &&((affect.source().riding()==this)\r\n\t\t\t\t ||(this.amRiding(affect.source()))))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You can't attack \"+name()+\" right now.\");\r\n\t\t\t\tif(getVictim()==affect.source()) setVictim(null);\r\n\t\t\t\tif(affect.source().getVictim()==this) affect.source().setVictim(null);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif((affect.amISource(this))\r\n\t\t\t &&(affect.target()!=null)\r\n\t\t\t &&(affect.target() instanceof MOB)\r\n\t\t\t &&((amRiding((MOB)affect.target()))\r\n\t\t\t\t ||(((MOB)affect.target()).riding()==this)))\r\n\t\t\t \r\n\t\t\t{\r\n\t\t\t\tMOB targ=(MOB)affect.target();\r\n\t\t\t\ttell(\"You can't attack \"+targ.name()+\" right now.\");\r\n\t\t\t\tif(getVictim()==targ) setVictim(null);\r\n\t\t\t\tif(targ.getVictim()==this) targ.setVictim(null);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn super.okAffect(affect);\r\n\t}\r\n\t\t\r\n\tpublic void affect(Affect affect)\r\n\t{\r\n\t\tsuper.affect(affect);\r\n\t\tswitch(affect.targetMinor())\r\n\t\t{\r\n\t\tcase Affect.TYP_DISMOUNT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t\taffect.source().setRiding(null);\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_MOUNT:\r\n\t\t\tif((affect.amITarget(this))&&(!amRiding(affect.source())))\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n}\r\n"},"new_file":{"kind":"string","value":"com/planet_ink/coffee_mud/MOBS/StdRideable.java"},"old_contents":{"kind":"string","value":"package com.planet_ink.coffee_mud.MOBS;\r\n\r\n\r\nimport com.planet_ink.coffee_mud.interfaces.*;\r\nimport com.planet_ink.coffee_mud.common.*;\r\nimport com.planet_ink.coffee_mud.utils.*;\r\nimport java.util.*;\r\n\r\npublic class StdRideable extends StdMOB implements Rideable\r\n{\r\n\tprotected int rideBasis=Rideable.RIDEABLE_LAND;\r\n\tprotected int mobCapacity=2;\r\n\tprotected Vector riders=new Vector();\r\n\tpublic StdRideable()\r\n\t{\r\n\t\tsuper();\r\n\t\tUsername=\"a horse\";\r\n\t\tsetDescription(\"A brown riding horse looks sturdy and reliable.\");\r\n\t\tsetDisplayText(\"a horse stands here.\");\r\n\t\tbaseEnvStats().setWeight(700);\r\n\t\trecoverEnvStats();\r\n\t}\r\n\tpublic Environmental newInstance()\r\n\t{\r\n\t\treturn new StdRideable();\r\n\t}\r\n\tpublic DeadBody killMeDead()\r\n\t{\r\n\t\twhile(riders.size()>0)\r\n\t\t{\r\n\t\t\tMOB mob=fetchRider(0);\r\n\t\t\tif(mob!=null)\r\n\t\t\t{\r\n\t\t\t\tmob.setRiding(null);\r\n\t\t\t\tdelRider(mob);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn super.killMeDead();\r\n\t}\r\n\t\r\n\t// common item/mob stuff\r\n\tpublic int rideBasis(){return rideBasis;}\r\n\tpublic void setRideBasis(int basis){rideBasis=basis;}\r\n\tpublic int mobCapacity(){ return mobCapacity;}\r\n\tpublic void setMobCapacity(int newCapacity){mobCapacity=newCapacity;}\r\n\tpublic int numRiders(){return riders.size();}\r\n\tpublic boolean mobileRideBasis(){return true;}\r\n\tpublic MOB fetchRider(int which)\r\n\t{\r\n\t\ttry\t{ return (MOB)riders.elementAt(which);\t}\r\n\t\tcatch(java.lang.ArrayIndexOutOfBoundsException e){}\r\n\t\treturn null;\r\n\t}\r\n\tpublic void addRider(MOB mob)\r\n\t{ \r\n\t\tif(mob!=null)\r\n\t\t\triders.addElement(mob);\r\n\t}\r\n\tpublic void delRider(MOB mob)\r\n\t{ \r\n\t\tif(mob!=null)\r\n\t\t\triders.removeElement(mob);\r\n\t}\r\n\tpublic void recoverEnvStats()\r\n\t{\r\n\t\tsuper.recoverEnvStats();\r\n\t\tif(rideBasis==Rideable.RIDEABLE_AIR)\r\n\t\t\tenvStats().setDisposition(envStats().disposition()|EnvStats.IS_FLYING);\r\n\t\telse\r\n\t\tif(rideBasis==Rideable.RIDEABLE_WATER)\r\n\t\t\tenvStats().setDisposition(envStats().disposition()|EnvStats.IS_SWIMMING);\r\n\t}\r\n\tpublic void affectEnvStats(Environmental affected, EnvStats affectableStats)\r\n\t{\r\n\t\tsuper.affectEnvStats(affected,affectableStats);\r\n\t\tif(affected instanceof MOB)\r\n\t\t{\r\n\t\t\tMOB mob=(MOB)affected;\r\n\t\t\tif((mob.isInCombat())&&(mob.rangeToTarget()==0)&&(amRiding(mob)))\r\n\t\t\t{\r\n\t\t\t\taffectableStats.setAttackAdjustment(affectableStats.attackAdjustment()-mob.baseEnvStats().attackAdjustment());\r\n\t\t\t\taffectableStats.setDamage(affectableStats.damage()-mob.baseEnvStats().damage());\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\tpublic boolean amRiding(MOB mob)\r\n\t{\r\n\t\treturn riders.contains(mob);\r\n\t}\r\n\tpublic String stateString(){return \"riding on\";}\r\n\tpublic String mountString(int commandType){return \"mount(s)\";}\r\n\tpublic String dismountString(){return \"dismount(s)\";}\r\n\tpublic String stateStringSubject(){return \"being ridden by\";}\r\n\r\n\tpublic boolean okAffect(Affect affect)\r\n\t{\r\n\t\tswitch(affect.targetMinor())\r\n\t\t{\r\n\t\tcase Affect.TYP_DISMOUNT:\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\tif(!amRiding(affect.source()))\r\n\t\t\t\t{\r\n\t\t\t\t\taffect.source().tell(\"You are not \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\t\tif(affect.source().riding()==this)\r\n\t\t\t\t\t\taffect.source().setRiding(null);\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t\t// protects from standard mob rejection\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_SIT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot simply sit on \"+name()+\", try 'mount'.\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_SLEEP:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot lie down on \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_MOUNT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You are \"+stateString()+\" \"+name()+\"!\");\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif(affect.amITarget(this))\r\n\t\t\t{\r\n\t\t\t\tif(numRiders()>=mobCapacity())\r\n\t\t\t\t{\r\n\t\t\t\t\t// for items\r\n\t\t\t\t\t//affect.source().tell(name()+\" is full.\");\r\n\t\t\t\t\t// for mobs\r\n\t\t\t\t\t affect.source().tell(\"No more can fit on \"+name()+\".\");\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t\t// protects from standard mob rejection\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_ENTER:\r\n\t\t\tif(amRiding(affect.source())\r\n\t\t\t &&(affect.target()!=null)\r\n\t\t\t &&(affect.target() instanceof Room))\r\n\t\t\t{\r\n\t\t\t\tRoom sourceRoom=(Room)affect.source().location();\r\n\t\t\t\tRoom targetRoom=(Room)affect.target();\r\n\t\t\t\tif((sourceRoom!=null)&&(!affect.amITarget(sourceRoom)))\r\n\t\t\t\t{\r\n\t\t\t\t\tboolean ok=!((targetRoom.domainType()&Room.INDOORS)>0);\r\n\t\t\t\t\tswitch(rideBasis)\r\n\t\t\t\t\t{\r\n\t\t\t\t\tcase Rideable.RIDEABLE_LAND:\r\n\t\t\t\t\t\tif((targetRoom.domainType()==Room.DOMAIN_OUTDOORS_AIR)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_OUTDOORS_UNDERWATER)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_UNDERWATER)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_INDOORS_AIR)\r\n\t\t\t\t\t\t ||(targetRoom.domainType()==Room.DOMAIN_OUTDOORS_WATERSURFACE))\r\n\t\t\t\t\t\t\tok=false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\tcase Rideable.RIDEABLE_AIR:\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\tcase Rideable.RIDEABLE_WATER:\r\n\t\t\t\t\t\tif((sourceRoom.domainType()!=Room.DOMAIN_OUTDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(targetRoom.domainType()!=Room.DOMAIN_OUTDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(sourceRoom.domainType()!=Room.DOMAIN_INDOORS_WATERSURFACE)\r\n\t\t\t\t\t\t\t&&(targetRoom.domainType()!=Room.DOMAIN_INDOORS_WATERSURFACE))\r\n\t\t\t\t\t\t\tok=false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif(!ok)\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\taffect.source().tell(\"You cannot ride \"+name()+\" that way.\");\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif(Sense.isSitting(affect.source()))\r\n\t\t\t\t\t{\r\n\t\t\t\t\t\taffect.source().tell(\"You cannot crawl while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_BUY:\r\n\t\tcase Affect.TYP_SELL:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot do that while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\t\t}\r\n\t\tif((Util.bset(affect.sourceMajor(),Affect.ACT_HANDS))&&(amRiding(affect.source())))\r\n\t\t{\r\n\t\t\tif(((affect.target()!=null)&&(affect.target() instanceof Item)&&(affect.target()!=this)&&(affect.source().location()!=null)&&(affect.source().location().isContent((Item)affect.target())))\r\n\t\t\t|| ((affect.tool()!=null)&&(affect.tool() instanceof Item)&&(affect.tool()!=this)&&(affect.source().location()!=null)&&(affect.source().location().isContent((Item)affect.tool())))\r\n\t\t\t|| ((affect.sourceMinor()==Affect.TYP_GIVE)&&(affect.target()!=null)&&(affect.target() instanceof MOB)&&(affect.target()!=this)&&(!amRiding((MOB)affect.target()))))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You cannot do that while \"+stateString()+\" \"+name()+\".\");\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif(Util.bset(affect.targetCode(),Affect.MASK_MALICIOUS))\r\n\t\t{\r\n\t\t\tif((affect.amITarget(this))\r\n\t\t\t &&((affect.source().riding()==this)\r\n\t\t\t\t ||(this.amRiding(affect.source()))))\r\n\t\t\t{\r\n\t\t\t\taffect.source().tell(\"You can't attack \"+riding().name()+\" right now.\");\r\n\t\t\t\tif(getVictim()==affect.source()) setVictim(null);\r\n\t\t\t\tif(affect.source().getVictim()==this) affect.source().setVictim(null);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\telse\r\n\t\t\tif((affect.amISource(this))\r\n\t\t\t &&(affect.target()!=null)\r\n\t\t\t &&(affect.target() instanceof MOB)\r\n\t\t\t &&((amRiding((MOB)affect.target()))\r\n\t\t\t\t ||(((MOB)affect.target()).riding()==this)))\r\n\t\t\t \r\n\t\t\t{\r\n\t\t\t\tMOB targ=(MOB)affect.target();\r\n\t\t\t\ttell(\"You can't attack \"+targ.name()+\" right now.\");\r\n\t\t\t\tif(getVictim()==targ) setVictim(null);\r\n\t\t\t\tif(targ.getVictim()==this) targ.setVictim(null);\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn super.okAffect(affect);\r\n\t}\r\n\t\t\r\n\tpublic void affect(Affect affect)\r\n\t{\r\n\t\tsuper.affect(affect);\r\n\t\tswitch(affect.targetMinor())\r\n\t\t{\r\n\t\tcase Affect.TYP_DISMOUNT:\r\n\t\t\tif(amRiding(affect.source()))\r\n\t\t\t\taffect.source().setRiding(null);\r\n\t\t\tbreak;\r\n\t\tcase Affect.TYP_MOUNT:\r\n\t\t\tif((affect.amITarget(this))&&(!amRiding(affect.source())))\r\n\t\t\t\taffect.source().setRiding(this);\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n}\r\n"},"message":{"kind":"string","value":"\n\ngit-svn-id: svn://192.168.1.10/public/CoffeeMud@1215 0d6f1817-ed0e-0410-87c9-987e46238f29\n"},"old_file":{"kind":"string","value":"com/planet_ink/coffee_mud/MOBS/StdRideable.java"},"subject":{"kind":"string","value":""},"git_diff":{"kind":"string","value":"om/planet_ink/coffee_mud/MOBS/StdRideable.java\n \t\t\t &&((affect.source().riding()==this)\n \t\t\t\t ||(this.amRiding(affect.source()))))\n \t\t\t{\n\t\t\t\taffect.source().tell(\"You can't attack \"+riding().name()+\" right now.\");\n\t\t\t\taffect.source().tell(\"You can't attack \"+name()+\" right now.\");\n \t\t\t\tif(getVictim()==affect.source()) setVictim(null);\n \t\t\t\tif(affect.source().getVictim()==this) affect.source().setVictim(null);\n \t\t\t\treturn false;"}}},{"rowIdx":2065,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"04aa2de3552262e93a07698ca35eafad5a1b24de"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"rickbergfalk/marky-mark,rickbergfalk/marky-mark"},"new_contents":{"kind":"string","value":"exports.getFrontMatter = function(lines) {\n if ((/^---/).test(lines[0].trim())) {\n var frontMatter = [];\n lines.shift();\n var line;\n // Keep shifting off lines till we find the next ---\n while (!(/^---/).test(line = lines.shift())) {\n frontMatter.push(line);\n } \n return frontMatter.join('\\n');\n } else {\n return '';\n }\n};\n"},"new_file":{"kind":"string","value":"lib/content.js"},"old_contents":{"kind":"string","value":"exports.getFrontMatter = function(lines) {\n if ((/^---/).test(lines[0].trim())) {\n var frontMatter = [];\n lines.shift();\n var line;\n // Keep shifting off lines till we find the next ---\n while (!(/^---/).test(line = lines.shift().trim())) {\n frontMatter.push(line);\n } \n return frontMatter.join('\\n');\n } else {\n return '';\n }\n};\n"},"message":{"kind":"string","value":"don't remove indentation in yaml"},"old_file":{"kind":"string","value":"lib/content.js"},"subject":{"kind":"string","value":"don't remove indentation in yaml"},"git_diff":{"kind":"string","value":"ib/content.js\n lines.shift();\n var line;\n // Keep shifting off lines till we find the next ---\n while (!(/^---/).test(line = lines.shift().trim())) {\n while (!(/^---/).test(line = lines.shift())) {\n frontMatter.push(line);\n } \n return frontMatter.join('\\n');"}}},{"rowIdx":2066,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"agpl-3.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"daaca99d1699ab8383bf57e4d77085246f483480"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"cloudcoderdotorg/CloudCoder,daveho/CloudCoder,jspacco/CloudCoder2,jspacco/CloudCoder2,jspacco/CloudCoder,vjpudelski/CloudCoder,vjpudelski/CloudCoder,daveho/CloudCoder,jspacco/CloudCoder,csirkeee/CloudCoder,x77686d/CloudCoder,jspacco/CloudCoder,csirkeee/CloudCoder,aayushmudgal/CloudCoder,x77686d/CloudCoder,wicky-info/CloudCoder,cloudcoderdotorg/CloudCoder,ndrppnc/CloudCoder,wicky-info/CloudCoder,daveho/CloudCoder,vjpudelski/CloudCoder,csirkeee/CloudCoder,daveho/CloudCoder,cloudcoderdotorg/CloudCoder,ndrppnc/CloudCoder,daveho/CloudCoder,ndrppnc/CloudCoder,cloudcoderdotorg/CloudCoder,jspacco/CloudCoder2,wicky-info/CloudCoder,csirkeee/CloudCoder,daveho/CloudCoder,jspacco/CloudCoder2,vjpudelski/CloudCoder,jspacco/CloudCoder,jspacco/CloudCoder2,aayushmudgal/CloudCoder,x77686d/CloudCoder,x77686d/CloudCoder,daveho/CloudCoder,csirkeee/CloudCoder,jspacco/CloudCoder,wicky-info/CloudCoder,wicky-info/CloudCoder,jspacco/CloudCoder,cloudcoderdotorg/CloudCoder,csirkeee/CloudCoder,vjpudelski/CloudCoder,cloudcoderdotorg/CloudCoder,jspacco/CloudCoder2,aayushmudgal/CloudCoder,aayushmudgal/CloudCoder,aayushmudgal/CloudCoder,x77686d/CloudCoder,wicky-info/CloudCoder,ndrppnc/CloudCoder,x77686d/CloudCoder,x77686d/CloudCoder,vjpudelski/CloudCoder,ndrppnc/CloudCoder,vjpudelski/CloudCoder,cloudcoderdotorg/CloudCoder,csirkeee/CloudCoder,jspacco/CloudCoder2,wicky-info/CloudCoder,aayushmudgal/CloudCoder,jspacco/CloudCoder"},"new_contents":{"kind":"string","value":"// CloudCoder - a web-based pedagogical programming environment\n// Copyright (C) 2011-2012, Jaime Spacco \n// Copyright (C) 2011-2012, David H. Hovemeyer \n//\n// This program is free software: you can redistribute it and/or modify\n// it under the terms of the GNU Affero General Public License as published by\n// the Free Software Foundation, either version 3 of the License, or\n// (at your option) any later version.\n//\n// This program is distributed in the hope that it will be useful,\n// but WITHOUT ANY WARRANTY; without even the implied warranty of\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n// GNU Affero General Public License for more details.\n//\n// You should have received a copy of the GNU Affero General Public License\n// along with this program. If not, see .\n\npackage org.cloudcoder.app.server.persist;\n\nimport java.io.FileNotFoundException;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.util.Properties;\nimport java.util.Scanner;\n\nimport org.apache.commons.beanutils.BeanUtils;\nimport org.apache.commons.beanutils.PropertyUtils;\nimport org.apache.log4j.ConsoleAppender;\nimport org.apache.log4j.Level;\nimport org.apache.log4j.Logger;\nimport org.apache.log4j.PatternLayout;\nimport org.cloudcoder.app.shared.model.Change;\nimport org.cloudcoder.app.shared.model.ConfigurationSetting;\nimport org.cloudcoder.app.shared.model.ConfigurationSettingName;\nimport org.cloudcoder.app.shared.model.Course;\nimport org.cloudcoder.app.shared.model.CourseRegistration;\nimport org.cloudcoder.app.shared.model.CourseRegistrationType;\nimport org.cloudcoder.app.shared.model.Event;\nimport org.cloudcoder.app.shared.model.ModelObjectField;\nimport org.cloudcoder.app.shared.model.ModelObjectSchema;\nimport org.cloudcoder.app.shared.model.Problem;\nimport org.cloudcoder.app.shared.model.ProblemLicense;\nimport org.cloudcoder.app.shared.model.ProblemType;\nimport org.cloudcoder.app.shared.model.SubmissionReceipt;\nimport org.cloudcoder.app.shared.model.Term;\nimport org.cloudcoder.app.shared.model.TestCase;\nimport org.cloudcoder.app.shared.model.TestResult;\nimport org.cloudcoder.app.shared.model.User;\n\n/**\n * Create the webapp database, using the metadata information\n * specified by the model classes.\n * \n * @author David Hovemeyer\n */\npublic class CreateWebappDatabase {\n\tprivate static final boolean DEBUG = false;\n\t\n\tprivate static class ConfigProperties {\n\t\tprivate Properties properties;\n\t\t\n\t\tpublic ConfigProperties() throws FileNotFoundException, IOException {\n\t\t\tproperties = new Properties();\n\t\t\tproperties.load(new FileReader(\"../local.properties\"));\n\t\t}\n\t\t\n\t\tpublic String get(String propName) {\n\t\t\tString value = properties.getProperty(\"cloudcoder.db.\" + propName);\n\t\t\tif (value == null) {\n\t\t\t\tthrow new IllegalArgumentException(\"Unknown property: \" + propName);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t}\n\t\n\tpublic static void main(String[] args) throws Exception {\n\t\tconfigureLog4j();\n\t\t\n\t\tScanner keyboard = new Scanner(System.in);\n\t\t\n\t\tSystem.out.print(\"Enter a username for your CloudCoder account: \");\n\t\tString ccUserName = keyboard.nextLine();\n\t\t\n\t\tSystem.out.print(\"Enter a password for your CloudCoder account: \");\n\t\tString ccPassword = keyboard.nextLine();\n\t\t\n\t\tSystem.out.print(\"What is your institution name (e.g, 'Unseen University')? \");\n\t\tString ccInstitutionName = keyboard.nextLine();\n\t\t\n\t\tClass.forName(\"com.mysql.jdbc.Driver\");\n\n\t\tConfigProperties config = new ConfigProperties();\n\t\t\n\t\tString dbUser = config.get(\"user\");\n\t\tString dbPasswd = config.get(\"passwd\");\n\t\tString dbName = config.get(\"databaseName\");\n\t\tString dbHost = config.get(\"host\");\n\t\t\n\t\t// Connect to the database server, but don't specify a database name \n\t\tConnection conn = DriverManager.getConnection(\"jdbc:mysql://\" + dbHost + \"/?user=\" + dbUser + \"&password=\" + dbPasswd);\n\t\t\n\t\tSystem.out.println(\"Creating database\");\n\t\tDBUtil.execSql(conn, \"create database \" + dbName);\n\t\t\n\t\tconn.close();\n\t\t\n\t\t// Reconnect to the newly-created database\n\t\tconn = DriverManager.getConnection(\"jdbc:mysql://\" + dbHost + \"/\" + dbName + \"?user=\" + dbUser + \"&password=\" + dbPasswd);\n\t\t\n\t\t// Create tables and indexes\n\t\tcreateTable(conn, JDBCDatabase.CHANGES, Change.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.CONFIGURATION_SETTINGS, ConfigurationSetting.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.COURSES, Course.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.COURSE_REGISTRATIONS, CourseRegistration.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.EVENTS, Event.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.PROBLEMS, Problem.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.SUBMISSION_RECEIPTS, SubmissionReceipt.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TERMS, Term.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TEST_CASES, TestCase.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TEST_RESULTS, TestResult.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.USERS, User.SCHEMA);\n\t\t\n\t\t// Create initial database contents\n\t\t\n\t\t// Set institution name\n\t\tConfigurationSetting instName = new ConfigurationSetting();\n\t\tinstName.setName(ConfigurationSettingName.PUB_TEXT_INSTITUTION);\n\t\tinstName.setValue(ccInstitutionName);\n\t\tstoreBean(conn, instName, ConfigurationSetting.SCHEMA, JDBCDatabase.CONFIGURATION_SETTINGS);\n\t\t\n\t\t// Terms\n\t\tSystem.out.println(\"Creating terms...\");\n\t\tstoreTerm(conn, \"Winter\", 0, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Spring\", 1, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer\", 2, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer 1\", 3, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer 2\", 4, JDBCDatabase.TERMS);\n\t\tTerm fall = storeTerm(conn, \"Fall\", 5, JDBCDatabase.TERMS);\n\t\t\n\t\t// Create an initial demo course\n\t\tSystem.out.println(\"Creating demo course...\");\n\t\tCourse course = new Course();\n\t\tcourse.setName(\"CCDemo\");\n\t\tcourse.setTitle(\"CloudCoder demo course\");\n\t\tcourse.setTermId(fall.getId());\n\t\tcourse.setTerm(fall);\n\t\tcourse.setYear(2012);\n\t\tcourse.setUrl(\"http://cloudcoder.org/\");\n\t\tstoreBean(conn, course, Course.SCHEMA, JDBCDatabase.COURSES);\n\t\t\n\t\t// Create an initial user\n\t\tSystem.out.println(\"Creating initial user...\");\n\t\tUser user = new User();\n\t\tuser.setUsername(ccUserName);\n\t\tuser.setPasswordHash(BCrypt.hashpw(ccPassword, BCrypt.gensalt(12)));\n\t\tstoreBean(conn, user, User.SCHEMA, JDBCDatabase.USERS);\n\t\t\n\t\t// Register the user as an instructor in the demo course\n\t\tSystem.out.println(\"Registering initial user for demo course...\");\n\t\tCourseRegistration courseReg = new CourseRegistration();\n\t\tcourseReg.setCourseId(course.getId());\n\t\tcourseReg.setUserId(user.getId());\n\t\tcourseReg.setRegistrationType(CourseRegistrationType.INSTRUCTOR);\n\t\tcourseReg.setSection(101);\n\t\tstoreBean(conn, courseReg, CourseRegistration.SCHEMA, JDBCDatabase.COURSE_REGISTRATIONS);\n\t\t\n\t\t// Create a Problem\n\t\tSystem.out.println(\"Creating hello, world problem in demo course...\");\n\t\tProblem problem = new Problem();\n\t\tproblem.setCourseId(course.getId());\n\t\tproblem.setWhenAssigned(System.currentTimeMillis());\n\t\tproblem.setWhenDue(problem.getWhenAssigned() + (24L*60*60*1000));\n\t\tproblem.setVisible(true);\n\t\tproblem.setProblemType(ProblemType.C_PROGRAM);\n\t\tproblem.setTestname(\"hello\");\n\t\tproblem.setBriefDescription(\"Print hello, world\");\n\t\tproblem.setDescription(\n\t\t\t\t\"

Print a line with the following text:

\\n\" +\n\t\t\t\t\"
Hello, world
\\n\"\n\t\t);\t\t\t// At the moment, we don't need to allow NULL field values.\n\n\t\tproblem.setSkeleton(\n\t\t\t\t\"#include \\n\\n\" +\n\t\t\t\t\"int main(void) {\\n\" +\n\t\t\t\t\"\\t// TODO - add your code here\\n\\n\" +\n\t\t\t\t\"\\treturn 0;\\n\" +\n\t\t\t\t\"}\\n\"\n\t\t\t\t);\n\t\tproblem.setSchemaVersion(Problem.CURRENT_SCHEMA_VERSION);\n\t\tproblem.setAuthorName(\"A. User\");\n\t\tproblem.setAuthorEmail(\"auser@cs.unseen.edu\");\n\t\tproblem.setAuthorWebsite(\"http://cs.unseen.edu/~auser\");\n\t\tproblem.setTimestampUtc(System.currentTimeMillis());\n\t\tproblem.setLicense(ProblemLicense.CC_ATTRIB_SHAREALIKE_3_0);\n\t\t\n\t\tstoreBean(conn, problem, Problem.SCHEMA, JDBCDatabase.PROBLEMS);\n\t\t\n\t\t// Add a TestCase\n\t\tSystem.out.println(\"Creating test case for hello, world problem...\");\n\t\tTestCase testCase = new TestCase();\n\t\ttestCase.setProblemId(problem.getProblemId());\n\t\ttestCase.setTestCaseName(\"hello\");\n\t\ttestCase.setInput(\"\");\n\t\ttestCase.setOutput(\"^\\\\s*Hello\\\\s*,\\\\s*world\\\\s*$i\");\n\t\ttestCase.setSecret(false);\n\t\t\n\t\tstoreBean(conn, testCase, TestCase.SCHEMA, JDBCDatabase.TEST_CASES);\n\t\t\n\t\tconn.close();\n\t\t\n\t\tSystem.out.println(\"Success!\");\n\t}\n\n\tprivate static void createTable(Connection conn, String tableName, ModelObjectSchema schema) throws SQLException {\n\t\tSystem.out.println(\"Creating table \" + tableName);\n\t\tString sql = DBUtil.getCreateTableStatement(schema, tableName);\n\t\tif (DEBUG) {\n\t\t\tSystem.out.println(sql);\n\t\t}\n\t\tDBUtil.execSql(conn, sql);\n\t}\n\n\tprivate static Term storeTerm(Connection conn, String name, int seq, String tableName) throws SQLException {\n\t\tTerm term = new Term();\n\t\tterm.setName(name);\n\t\tterm.setSeq(seq);\n\t\tstoreBean(conn, term, Term.SCHEMA, tableName);\n\t\treturn term;\n\t}\n\n\t// Use introspection to store an arbitrary bean in the database.\n\t// Eventually we could use this sort of approach to replace much\n\t// of our hand-written JDBC code, although I don't know how great\n\t// and idea that would be (for example, it might not yield adequate\n\t// performance.) For just creating the database, it should be\n\t// fine.\n\tprivate static void storeBean(Connection conn, Object bean, ModelObjectSchema schema, String tableName) throws SQLException {\n\t\tStringBuilder buf = new StringBuilder();\n\t\t\n\t\tbuf.append(\"insert into \" + tableName);\n\t\tbuf.append(\" values (\");\n\t\tbuf.append(DBUtil.getInsertPlaceholdersNoId(schema));\n\t\tbuf.append(\")\");\n\t\t\n\t\tPreparedStatement stmt = null;\n\t\tResultSet genKeys = null;\n\t\t\n\t\ttry {\n\t\t\tstmt = conn.prepareStatement(buf.toString(), schema.hasUniqueId() ? PreparedStatement.RETURN_GENERATED_KEYS : 0);\n\t\t\t\n\t\t\t// Now for the magic: iterate through the schema fields\n\t\t\t// and bind the query parameters based on the bean properties.\n\t\t\tint index = 1;\n\t\t\tfor (ModelObjectField field : schema.getFieldList()) {\n\t\t\t\tif (field.isUniqueId()) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tObject value = PropertyUtils.getProperty(bean, field.getPropertyName());\n\t\t\t\t\tif (value instanceof Enum) {\n\t\t\t\t\t\t// Enum values are converted to integers\n\t\t\t\t\t\tvalue = Integer.valueOf(((Enum)value).ordinal());\n\t\t\t\t\t}\n\t\t\t\t\tstmt.setObject(index++, value);\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tthrow new SQLException(\n\t\t\t\t\t\t\t\"Couldn't get property \" + field.getPropertyName() +\n\t\t\t\t\t\t\t\" of \" + bean.getClass().getName() + \" object\");\n\t\t\t\t}\n\t\t\t}\n\t\t\t\n\t\t\t// Execute the insert\n\t\t\tstmt.executeUpdate();\n\t\t\t\n\t\t\tif (schema.hasUniqueId()) {\n\t\t\t\tgenKeys = stmt.getGeneratedKeys();\n\t\t\t\tif (!genKeys.next()) {\n\t\t\t\t\tthrow new SQLException(\"Couldn't get generated id for \" + bean.getClass().getName()); \n\t\t\t\t}\n\t\t\t\tint id = genKeys.getInt(1);\n\t\t\t\t\n\t\t\t\t// Set the unique id value in the bean\n\t\t\t\ttry {\n\t\t\t\t\tBeanUtils.setProperty(bean, schema.getUniqueIdField().getPropertyName(), id);\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tthrow new SQLException(\"Couldn't set generated unique id for \" + bean.getClass().getName());\n\t\t\t\t}\n\t\t\t}\n\t\t} finally {\n\t\t\tDBUtil.closeQuietly(genKeys);\n\t\t\tDBUtil.closeQuietly(stmt);\n\t\t}\n\t}\n\t\n\tprivate static void configureLog4j() {\n\t\t// See: http://robertmaldon.blogspot.com/2007/09/programmatically-configuring-log4j-and.html\n\t\tLogger rootLogger = Logger.getRootLogger();\n\t\tif (!rootLogger.getAllAppenders().hasMoreElements()) {\n\t\t\trootLogger.setLevel(Level.INFO);\n\t\t\trootLogger.addAppender(new ConsoleAppender(new PatternLayout(\"%-5p [%t]: %m%n\")));\n\t\t}\n\t}\n}\n"},"new_file":{"kind":"string","value":"CloudCoderModelClassesPersistence/src/org/cloudcoder/app/server/persist/CreateWebappDatabase.java"},"old_contents":{"kind":"string","value":"// CloudCoder - a web-based pedagogical programming environment\n// Copyright (C) 2011-2012, Jaime Spacco \n// Copyright (C) 2011-2012, David H. Hovemeyer \n//\n// This program is free software: you can redistribute it and/or modify\n// it under the terms of the GNU Affero General Public License as published by\n// the Free Software Foundation, either version 3 of the License, or\n// (at your option) any later version.\n//\n// This program is distributed in the hope that it will be useful,\n// but WITHOUT ANY WARRANTY; without even the implied warranty of\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n// GNU Affero General Public License for more details.\n//\n// You should have received a copy of the GNU Affero General Public License\n// along with this program. If not, see .\n\npackage org.cloudcoder.app.server.persist;\n\nimport java.io.FileNotFoundException;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.util.Properties;\nimport java.util.Scanner;\n\nimport org.apache.commons.beanutils.BeanUtils;\nimport org.apache.commons.beanutils.PropertyUtils;\nimport org.apache.log4j.ConsoleAppender;\nimport org.apache.log4j.Level;\nimport org.apache.log4j.Logger;\nimport org.apache.log4j.PatternLayout;\nimport org.cloudcoder.app.shared.model.Change;\nimport org.cloudcoder.app.shared.model.ConfigurationSetting;\nimport org.cloudcoder.app.shared.model.ConfigurationSettingName;\nimport org.cloudcoder.app.shared.model.Course;\nimport org.cloudcoder.app.shared.model.CourseRegistration;\nimport org.cloudcoder.app.shared.model.CourseRegistrationType;\nimport org.cloudcoder.app.shared.model.Event;\nimport org.cloudcoder.app.shared.model.ModelObjectField;\nimport org.cloudcoder.app.shared.model.ModelObjectSchema;\nimport org.cloudcoder.app.shared.model.Problem;\nimport org.cloudcoder.app.shared.model.ProblemLicense;\nimport org.cloudcoder.app.shared.model.ProblemType;\nimport org.cloudcoder.app.shared.model.SubmissionReceipt;\nimport org.cloudcoder.app.shared.model.Term;\nimport org.cloudcoder.app.shared.model.TestCase;\nimport org.cloudcoder.app.shared.model.TestResult;\nimport org.cloudcoder.app.shared.model.User;\n\n/**\n * Create the webapp database, using the metadata information\n * specified by the model classes.\n * \n * @author David Hovemeyer\n */\npublic class CreateWebappDatabase {\n\tprivate static final boolean DEBUG = false;\n\t\n\tprivate static class ConfigProperties {\n\t\tprivate Properties properties;\n\t\t\n\t\tpublic ConfigProperties() throws FileNotFoundException, IOException {\n\t\t\tproperties = new Properties();\n\t\t\tproperties.load(new FileReader(\"../local.properties\"));\n\t\t}\n\t\t\n\t\tpublic String get(String propName) {\n\t\t\tString value = properties.getProperty(\"cloudcoder.db.\" + propName);\n\t\t\tif (value == null) {\n\t\t\t\tthrow new IllegalArgumentException(\"Unknown property: \" + propName);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t}\n\t\n\tpublic static void main(String[] args) throws Exception {\n\t\tconfigureLog4j();\n\t\t\n\t\tScanner keyboard = new Scanner(System.in);\n\t\t\n\t\tSystem.out.print(\"Enter a username for your CloudCoder account: \");\n\t\tString ccUserName = keyboard.nextLine();\n\t\t\n\t\tSystem.out.print(\"Enter a password for your CloudCoder account: \");\n\t\tString ccPassword = keyboard.nextLine();\n\t\t\n\t\tSystem.out.print(\"What is your institution name (e.g, 'Unseen University')? \");\n\t\tString ccInstitutionName = keyboard.nextLine();\n\t\t\n\t\tClass.forName(\"com.mysql.jdbc.Driver\");\n\n\t\tConfigProperties config = new ConfigProperties();\n\t\t\n\t\tString dbUser = config.get(\"user\");\n\t\tString dbPasswd = config.get(\"passwd\");\n\t\tString dbName = config.get(\"databaseName\");\n\t\tString dbHost = config.get(\"host\");\n\t\t\n\t\t// Connect to the database server, but don't specify a database name \n\t\tConnection conn = DriverManager.getConnection(\"jdbc:mysql://\" + dbHost + \"/?user=\" + dbUser + \"&password=\" + dbPasswd);\n\t\t\n\t\tSystem.out.println(\"Creating database\");\n\t\tDBUtil.execSql(conn, \"create database \" + dbName);\n\t\t\n\t\tconn.close();\n\t\t\n\t\t// Reconnect to the newly-created database\n\t\tconn = DriverManager.getConnection(\"jdbc:mysql://\" + dbHost + \"/\" + dbName + \"?user=\" + dbUser + \"&password=\" + dbPasswd);\n\t\t\n\t\t// Create tables and indexes\n\t\tcreateTable(conn, JDBCDatabase.CHANGES, Change.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.CONFIGURATION_SETTINGS, ConfigurationSetting.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.COURSES, Course.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.COURSE_REGISTRATIONS, CourseRegistration.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.EVENTS, Event.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.PROBLEMS, Problem.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.SUBMISSION_RECEIPTS, SubmissionReceipt.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TERMS, Term.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TEST_CASES, TestCase.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.TEST_RESULTS, TestResult.SCHEMA);\n\t\tcreateTable(conn, JDBCDatabase.USERS, User.SCHEMA);\n\t\t\n\t\t// Create initial database contents\n\t\t\n\t\t// Set institution name\n\t\tConfigurationSetting instName = new ConfigurationSetting();\n\t\tinstName.setName(ConfigurationSettingName.PUB_TEXT_INSTITUTION);\n\t\tinstName.setValue(ccInstitutionName);\n\t\tstoreBean(conn, instName, ConfigurationSetting.SCHEMA, JDBCDatabase.CONFIGURATION_SETTINGS);\n\t\t\n\t\t// Terms\n\t\tSystem.out.println(\"Creating terms...\");\n\t\tstoreTerm(conn, \"Winter\", 0, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Spring\", 1, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer\", 2, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer 1\", 3, JDBCDatabase.TERMS);\n\t\tstoreTerm(conn, \"Summer 2\", 4, JDBCDatabase.TERMS);\n\t\tTerm fall = storeTerm(conn, \"Fall\", 5, JDBCDatabase.TERMS);\n\t\t\n\t\t// Create an initial demo course\n\t\tSystem.out.println(\"Creating demo course...\");\n\t\tCourse course = new Course();\n\t\tcourse.setName(\"CCDemo\");\n\t\tcourse.setTitle(\"CloudCoder demo course\");\n\t\tcourse.setTermId(fall.getId());\n\t\tcourse.setTerm(fall);\n\t\tcourse.setYear(2012);\n\t\tcourse.setUrl(\"http://cloudcoder.org/\");\n\t\tstoreBean(conn, course, Course.SCHEMA, JDBCDatabase.COURSES);\n\t\t\n\t\t// Create an initial user\n\t\tSystem.out.println(\"Creating initial user...\");\n\t\tUser user = new User();\n\t\tuser.setUsername(ccUserName);\n\t\tuser.setPasswordHash(BCrypt.hashpw(ccPassword, BCrypt.gensalt(12)));\n\t\tstoreBean(conn, user, User.SCHEMA, JDBCDatabase.USERS);\n\t\t\n\t\t// Register the user as an instructor in the demo course\n\t\tSystem.out.println(\"Registering initial user for demo course...\");\n\t\tCourseRegistration courseReg = new CourseRegistration();\n\t\tcourseReg.setCourseId(course.getId());\n\t\tcourseReg.setUserId(user.getId());\n\t\tcourseReg.setRegistrationType(CourseRegistrationType.INSTRUCTOR);\n\t\tcourseReg.setSection(101);\n\t\tstoreBean(conn, courseReg, CourseRegistration.SCHEMA, JDBCDatabase.COURSE_REGISTRATIONS);\n\t\t\n\t\t// Create a Problem\n\t\tSystem.out.println(\"Creating hello, world problem in demo course...\");\n\t\tProblem problem = new Problem();\n\t\tproblem.setCourseId(course.getId());\n\t\tproblem.setWhenAssigned(System.currentTimeMillis());\n\t\tproblem.setWhenDue(problem.getWhenAssigned() + (24L*60*60*1000));\n\t\tproblem.setVisible(true);\n\t\tproblem.setProblemType(ProblemType.C_PROGRAM);\n\t\tproblem.setTestname(\"hello\");\n\t\tproblem.setBriefDescription(\"Print hello, world\");\n\t\tproblem.setDescription(\n\t\t\t\t\"

Print a line with the following text:

\\n\" +\n\t\t\t\t\"
Hello, world
\\n\"\n\t\t);\t\t\t// At the moment, we don't need to allow NULL field values.\n\n\t\tproblem.setSkeleton(\n\t\t\t\t\"#include \\n\\n\" +\n\t\t\t\t\"int main(void) {\\n\" +\n\t\t\t\t\"\\t// TODO - add your code here\\n\\n\" +\n\t\t\t\t\"\\treturn 0;\\n\" +\n\t\t\t\t\"}\\n\"\n\t\t\t\t);\n\t\tproblem.setSchemaVersion(Problem.CURRENT_SCHEMA_VERSION);\n\t\tproblem.setAuthorName(\"A. User\");\n\t\tproblem.setAuthorEmail(\"auser@cs.unseen.edu\");\n\t\tproblem.setAuthorWebsite(\"http://cs.unseen.edu/~auser\");\n\t\tproblem.setTimestampUtc(System.currentTimeMillis());\n\t\tproblem.setLicense(ProblemLicense.CC_ATTRIB_SHAREALIKE_3_0);\n\t\t\n\t\tstoreBean(conn, problem, Problem.SCHEMA, JDBCDatabase.PROBLEMS);\n\t\t\n\t\t// Add a TestCase\n\t\tSystem.out.println(\"Creating test case for hello, world problem...\");\n\t\tTestCase testCase = new TestCase();\n\t\ttestCase.setProblemId(problem.getProblemId());\n\t\ttestCase.setTestCaseName(\"hello\");\n\t\ttestCase.setInput(\"\");\n\t\ttestCase.setOutput(\"^\\\\s*Hello\\\\s*,\\\\s*world\\\\s*$i\");\n\t\ttestCase.setSecret(false);\n\t\t\n\t\tstoreBean(conn, testCase, TestCase.SCHEMA, JDBCDatabase.TEST_CASES);\n\t\t\n\t\tconn.close();\n\t\t\n\t\tSystem.out.println(\"Success!\");\n\t}\n\n\tprivate static void createTable(Connection conn, String tableName, ModelObjectSchema schema) throws SQLException {\n\t\tSystem.out.println(\"Creating table \" + tableName);\n\t\tString sql = DBUtil.getCreateTableStatement(schema, tableName);\n\t\tif (DEBUG) {\n\t\t\tSystem.out.println(sql);\n\t\t}\n\t\tDBUtil.execSql(conn, sql);\n\t}\n\n\tprivate static Term storeTerm(Connection conn, String name, int seq, String tableName) throws SQLException {\n\t\tTerm term = new Term();\n\t\tterm.setName(name);\n\t\tterm.setSeq(seq);\n\t\tstoreBean(conn, term, Term.SCHEMA, tableName);\n\t\treturn term;\n\t}\n\n\t// Use introspection to store an arbitrary bean in the database.\n\t// Eventually we could use this sort of approach to replace much\n\t// of our hand-written JDBC code, although I don't know how great\n\t// and idea that would be (for example, it might not yield adequate\n\t// performance.) For just creating the database, it should be\n\t// fine.\n\tprivate static void storeBean(Connection conn, Object bean, ModelObjectSchema schema, String tableName) throws SQLException {\n\t\tStringBuilder buf = new StringBuilder();\n\t\t\n\t\tbuf.append(\"insert into \" + tableName);\n\t\tbuf.append(\" values (\");\n\t\tbuf.append(DBUtil.getInsertPlaceholdersNoId(schema));\n\t\tbuf.append(\")\");\n\t\t\n\t\tPreparedStatement stmt = null;\n\t\tResultSet genKeys = null;\n\t\t\n\t\ttry {\n\t\t\tstmt = conn.prepareStatement(buf.toString(), schema.hasUniqueId() ? PreparedStatement.RETURN_GENERATED_KEYS : 0);\n\t\t\t\n\t\t\t// Now for the magic: iterate through the schema fields\n\t\t\t// and bind the query parameters based on the bean properties.\n\t\t\tint index = 1;\n\t\t\tfor (ModelObjectField field : schema.getFieldList()) {\n\t\t\t\tif (field.isUniqueId()) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\ttry {\n\t\t\t\t\tObject value = PropertyUtils.getProperty(bean, field.getPropertyName());\n\t\t\t\t\tif (value instanceof Enum) {\n\t\t\t\t\t\t// Enum values are converted to integers\n\t\t\t\t\t\tvalue = Integer.valueOf(((Enum)value).ordinal());\n\t\t\t\t\t}\n\t\t\t\t\tstmt.setObject(index++, value);\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tthrow new SQLException(\n\t\t\t\t\t\t\t\"Couldn't get property \" + field.getPropertyName() +\n\t\t\t\t\t\t\t\" of \" + bean.getClass().getName() + \" object\");\n\t\t\t\t}\n\t\t\t}\n\t\t\t\n\t\t\t// Execute the insert\n\t\t\tstmt.executeUpdate();\n\t\t\t\n\t\t\tif (schema.hasUniqueId()) {\n\t\t\t\tgenKeys = stmt.getGeneratedKeys();\n\t\t\t\tif (!genKeys.next()) {\n\t\t\t\t\tthrow new SQLException(\"Couldn't get generated id for \" + bean.getClass().getName()); \n\t\t\t\t}\n\t\t\t\tint id = genKeys.getInt(1);\n\t\t\t\t\n\t\t\t\t// Set the unique id value in the bean\n\t\t\t\ttry {\n\t\t\t\t\tBeanUtils.setProperty(bean, schema.getUniqueIdField().getPropertyName(), id);\n\t\t\t\t} catch (Exception e) {\n\t\t\t\t\tthrow new SQLException(\"Couldn't set generated unique id for \" + bean.getClass().getName());\n\t\t\t\t}\n\t\t\t}\n\t\t} finally {\n\t\t\tDBUtil.closeQuietly(genKeys);\n\t\t\tDBUtil.closeQuietly(stmt);\n\t\t}\n\t}\n\t\n\tprivate static void configureLog4j() {\n\t\t// See: http://robertmaldon.blogspot.com/2007/09/programmatically-configuring-log4j-and.html\n\t\tLogger rootLogger = Logger.getRootLogger();\n\t\tif (!rootLogger.getAllAppenders().hasMoreElements()) {\n\t\t\trootLogger.setLevel(Level.INFO);\n\t\t\trootLogger.addAppender(new ConsoleAppender(\n\t\t\t\t\tnew PatternLayout(\"%-5p [%t]: %m%n\")));\n\n\t\t\t// The TTCC_CONVERSION_PATTERN contains more info than\n\t\t\t// the pattern we used for the root logger\n\t\t\tLogger pkgLogger = rootLogger.getLoggerRepository().getLogger(\"robertmaldon.moneymachine\");\n\t\t\tpkgLogger.setLevel(Level.DEBUG);\n\t\t\tpkgLogger.addAppender(new ConsoleAppender(\n\t\t\t\t\tnew PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN)));\n\t\t}\n\t}\n}\n"},"message":{"kind":"string","value":"when configuring log4j, only need to create the root logger"},"old_file":{"kind":"string","value":"CloudCoderModelClassesPersistence/src/org/cloudcoder/app/server/persist/CreateWebappDatabase.java"},"subject":{"kind":"string","value":"when configuring log4j, only need to create the root logger"},"git_diff":{"kind":"string","value":"loudCoderModelClassesPersistence/src/org/cloudcoder/app/server/persist/CreateWebappDatabase.java\n \t\tLogger rootLogger = Logger.getRootLogger();\n \t\tif (!rootLogger.getAllAppenders().hasMoreElements()) {\n \t\t\trootLogger.setLevel(Level.INFO);\n\t\t\trootLogger.addAppender(new ConsoleAppender(\n\t\t\t\t\tnew PatternLayout(\"%-5p [%t]: %m%n\")));\n\n\t\t\t// The TTCC_CONVERSION_PATTERN contains more info than\n\t\t\t// the pattern we used for the root logger\n\t\t\tLogger pkgLogger = rootLogger.getLoggerRepository().getLogger(\"robertmaldon.moneymachine\");\n\t\t\tpkgLogger.setLevel(Level.DEBUG);\n\t\t\tpkgLogger.addAppender(new ConsoleAppender(\n\t\t\t\t\tnew PatternLayout(PatternLayout.TTCC_CONVERSION_PATTERN)));\n\t\t\trootLogger.addAppender(new ConsoleAppender(new PatternLayout(\"%-5p [%t]: %m%n\")));\n \t\t}\n \t}\n }"}}},{"rowIdx":2067,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"44afe12f1bada9bb7724a3678553c86aa31b907c"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"raeleus/skin-composer"},"new_contents":{"kind":"string","value":"package com.ray3k.skincomposer;\n\nimport com.badlogic.gdx.math.MathUtils;\nimport com.badlogic.gdx.math.Vector2;\nimport com.badlogic.gdx.scenes.scene2d.*;\nimport com.badlogic.gdx.scenes.scene2d.actions.Actions;\nimport com.badlogic.gdx.scenes.scene2d.ui.Image;\nimport com.badlogic.gdx.scenes.scene2d.ui.Skin;\nimport com.badlogic.gdx.scenes.scene2d.ui.Table;\nimport com.badlogic.gdx.scenes.scene2d.ui.WidgetGroup;\nimport com.badlogic.gdx.scenes.scene2d.utils.ClickListener;\nimport com.badlogic.gdx.scenes.scene2d.utils.Disableable;\nimport com.badlogic.gdx.scenes.scene2d.utils.Drawable;\nimport com.badlogic.gdx.utils.Align;\n\nimport static com.badlogic.gdx.scenes.scene2d.actions.Actions.*;\n\npublic class PopTable extends Table {\n private Stage stage;\n private Image stageBackground;\n private WidgetGroup group;\n private final static Vector2 temp = new Vector2();\n private boolean hideOnUnfocus;\n private int preferredEdge;\n private boolean keepSizedWithinStage;\n private boolean automaticallyResized;\n \n public PopTable() {\n this(new PopTableStyle());\n }\n \n public PopTable(Skin skin) {\n this(skin.get(PopTableStyle.class));\n }\n \n public PopTable(Skin skin, String style) {\n this(skin.get(style, PopTableStyle.class));\n }\n \n public PopTable(PopTableStyle style) {\n setTouchable(Touchable.enabled);\n \n stageBackground = new Image(style.stageBackground);\n stageBackground.setFillParent(true);\n stageBackground.addListener(new ClickListener() {\n @Override\n public void clicked(InputEvent event, float x, float y) {\n super.clicked(event, x, y);\n if (hideOnUnfocus) {\n hide();\n }\n }\n });\n \n setBackground(style.background);\n \n hideOnUnfocus = true;\n preferredEdge = Align.top;\n keepSizedWithinStage = true;\n automaticallyResized = true;\n }\n \n public void alignToActorEdge(Actor actor, int edge) {\n float widgetX;\n switch (edge) {\n case Align.left:\n case Align.bottomLeft:\n case Align.topLeft:\n widgetX = -getWidth();\n break;\n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n widgetX = actor.getWidth();\n break;\n default:\n widgetX = actor.getWidth() / 2f - getWidth() / 2f;\n break;\n }\n \n float widgetY;\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n widgetY = -getHeight();\n break;\n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n widgetY = actor.getHeight();\n break;\n default:\n widgetY = actor.getHeight() / 2f - getHeight() / 2f;\n break;\n }\n \n temp.set(widgetX, widgetY);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n }\n \n private float actorEdgeStageHorizontalDistance(Actor actor, int edge) {\n temp.set(0, 0);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n \n float returnValue;\n switch (edge) {\n case Align.left:\n case Align.bottomLeft:\n case Align.topLeft:\n returnValue = temp.x;\n break;\n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n returnValue = stage.getWidth() - (temp.x + actor.getWidth());\n break;\n default:\n returnValue = 0;\n break;\n }\n \n return returnValue;\n }\n \n private float actorEdgeStageVerticalDistance(Actor actor, int edge) {\n temp.set(0, 0);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n \n float returnValue;\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n returnValue = temp.y;\n break;\n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n returnValue = stage.getHeight() - (temp.y + actor.getHeight());\n break;\n default:\n returnValue = 0;\n break;\n }\n \n return returnValue;\n }\n \n public void moveToInsideStage() {\n if (getStage() != null) {\n if (getX() < 0) setX(0);\n else if (getX() + getWidth() > getStage().getWidth()) setX(getStage().getWidth() - getWidth());\n \n if (getY() < 0) setY(0);\n else if (getY() + getHeight() > getStage().getHeight()) setY(getStage().getHeight() - getHeight());\n }\n }\n \n public boolean isOutsideStage() {\n return getX() < 0 || getX() + getWidth() > getStage().getWidth() || getY() < 0 || getY() + getHeight() > getStage().getHeight();\n }\n \n public void hide() {\n hide(fadeOut(.2f));\n }\n \n public void hide(Action action) {\n fire(new TableHiddenEvent());\n group.addAction(sequence(action, Actions.removeActor()));\n }\n \n public void show(Stage stage) {\n Action action = sequence(alpha(0), fadeIn(.2f));\n this.show(stage, action);\n }\n \n public void show(Stage stage, Action action) {\n this.stage = stage;\n group = new WidgetGroup();\n group.setFillParent(true);\n stage.addActor(group);\n \n group.addActor(stageBackground);\n group.addActor(this);\n \n pack();\n \n if (keepSizedWithinStage) {\n if (getWidth() > stage.getWidth()) {\n setWidth(stage.getWidth());\n }\n \n if (getHeight() > stage.getHeight()) {\n setHeight(stage.getHeight());\n }\n }\n \n setPosition((int) (stage.getWidth() / 2f - getWidth() / 2f), (int) (stage.getHeight() / 2f - getHeight() / 2f));\n \n group.addAction(action);\n }\n \n public static class PopTableStyle {\n /*Optional*/\n public Drawable background, stageBackground;\n \n public PopTableStyle() {\n \n }\n \n public PopTableStyle(PopTableStyle style) {\n background = style.background;\n stageBackground = style.stageBackground;\n }\n }\n \n public static class PopTableClickListener extends ClickListener {\n protected PopTable popTable;\n \n public PopTableClickListener(Skin skin) {\n this(skin.get(PopTableStyle.class));\n }\n \n public PopTableClickListener(Skin skin, String style) {\n this(skin.get(style, PopTableStyle.class));\n }\n \n public PopTableClickListener(PopTableStyle style) {\n popTable = new PopTable(style);\n popTable.automaticallyResized = false;\n popTable.addListener(new TableHiddenListener() {\n @Override\n public void tableHidden(Event event) {\n PopTableClickListener.this.tableHidden(event);\n }\n });\n }\n \n @Override\n public void clicked(InputEvent event, float x, float y) {\n super.clicked(event, x, y);\n var stage = event.getListenerActor().getStage();\n var actor = event.getListenerActor();\n \n if (actor instanceof Disableable) {\n if (((Disableable) actor).isDisabled()) return;\n }\n \n popTable.show(stage);\n int edge = popTable.getPreferredEdge();\n \n popTable.alignToActorEdge(actor, edge);\n \n var rightDistance = popTable.actorEdgeStageHorizontalDistance(actor, Align.right);\n var leftDistance = popTable.actorEdgeStageHorizontalDistance(actor, Align.left);\n switch (edge) {\n case Align.left:\n case Align.topLeft:\n case Align.bottomLeft:\n if (popTable.getX() < 0) {\n if (rightDistance > leftDistance) {\n edge &= ~Align.left;\n edge |= Align.right;\n popTable.setWidth(Math.min(popTable.getWidth(), rightDistance));\n } else {\n popTable.setWidth(Math.min(popTable.getWidth(), leftDistance));\n }\n }\n break;\n \n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n if (popTable.getX() + popTable.getWidth() > stage.getWidth()) {\n if (leftDistance > rightDistance) {\n edge &= ~Align.right;\n edge |= Align.left;\n popTable.setWidth(Math.min(popTable.getWidth(), leftDistance));\n } else {\n popTable.setWidth(Math.min(popTable.getWidth(), rightDistance));\n }\n }\n break;\n }\n \n var topDistance = popTable.actorEdgeStageVerticalDistance(actor, Align.top);\n var bottomDistance = popTable.actorEdgeStageVerticalDistance(actor, Align.bottom);\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n if (popTable.getY() < 0) {\n if (topDistance > bottomDistance) {\n edge &= ~Align.bottom;\n edge |= Align.top;\n popTable.setHeight(Math.min(popTable.getHeight(), topDistance));\n } else {\n popTable.setHeight(Math.min(popTable.getHeight(), bottomDistance));\n }\n }\n break;\n \n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n if (popTable.getY() + popTable.getHeight() > stage.getHeight()) {\n if (bottomDistance > topDistance) {\n edge &= ~Align.top;\n edge |= Align.bottom;\n popTable.setHeight(Math.min(popTable.getHeight(), bottomDistance));\n } else {\n popTable.setHeight(Math.min(popTable.getHeight(), topDistance));\n }\n }\n break;\n }\n \n popTable.alignToActorEdge(actor, edge);\n \n popTable.moveToInsideStage();\n }\n \n public PopTable getPopTable() {\n return popTable;\n }\n \n /**\n * Override this method to be performed when the popTable is hidden or dismissed.\n */\n public void tableHidden(Event event) {\n \n }\n }\n \n public static class TableHiddenEvent extends Event {\n \n }\n \n public static abstract class TableHiddenListener implements EventListener {\n @Override\n public boolean handle(Event event) {\n if (event instanceof TableHiddenEvent) {\n tableHidden(event);\n return true;\n } else{\n return false;\n }\n }\n \n public abstract void tableHidden(Event event);\n }\n \n public boolean isHideOnUnfocus() {\n return hideOnUnfocus;\n }\n \n public void setHideOnUnfocus(boolean hideOnUnfocus) {\n this.hideOnUnfocus = hideOnUnfocus;\n }\n \n public int getPreferredEdge() {\n return preferredEdge;\n }\n \n public void setPreferredEdge(int preferredEdge) {\n this.preferredEdge = preferredEdge;\n }\n \n public boolean isKeepSizedWithinStage() {\n return keepSizedWithinStage;\n }\n \n public void setKeepSizedWithinStage(boolean keepSizedWithinStage) {\n this.keepSizedWithinStage = keepSizedWithinStage;\n }\n \n public boolean isAutomaticallyResized() {\n return automaticallyResized;\n }\n \n public void setAutomaticallyResized(boolean automaticallyResized) {\n this.automaticallyResized = automaticallyResized;\n }\n \n @Override\n public void layout() {\n super.layout();\n if (automaticallyResized) {\n var centerX = getX(Align.center);\n var centerY = getY(Align.center);\n pack();\n if (keepSizedWithinStage) {\n moveToInsideStage();\n }\n setPosition(centerX, centerY, Align.center);\n setPosition(MathUtils.floor(getX()), MathUtils.floor(getY()));\n }\n }\n}"},"new_file":{"kind":"string","value":"core/src/com/ray3k/skincomposer/PopTable.java"},"old_contents":{"kind":"string","value":"package com.ray3k.skincomposer;\n\nimport com.badlogic.gdx.math.Vector2;\nimport com.badlogic.gdx.scenes.scene2d.*;\nimport com.badlogic.gdx.scenes.scene2d.actions.Actions;\nimport com.badlogic.gdx.scenes.scene2d.ui.Image;\nimport com.badlogic.gdx.scenes.scene2d.ui.Skin;\nimport com.badlogic.gdx.scenes.scene2d.ui.Table;\nimport com.badlogic.gdx.scenes.scene2d.ui.WidgetGroup;\nimport com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;\nimport com.badlogic.gdx.scenes.scene2d.utils.ClickListener;\nimport com.badlogic.gdx.scenes.scene2d.utils.Disableable;\nimport com.badlogic.gdx.scenes.scene2d.utils.Drawable;\nimport com.badlogic.gdx.utils.Align;\n\nimport static com.badlogic.gdx.scenes.scene2d.actions.Actions.*;\n\npublic class PopTable extends Table {\n private Stage stage;\n private Image stageBackground;\n private WidgetGroup group;\n private final static Vector2 temp = new Vector2();\n private boolean hideOnUnfocus;\n private int preferredEdge;\n private boolean keepSizedWithinStage;\n \n public PopTable() {\n this(new PopTableStyle());\n }\n \n public PopTable(Skin skin) {\n this(skin.get(PopTableStyle.class));\n }\n \n public PopTable(Skin skin, String style) {\n this(skin.get(style, PopTableStyle.class));\n }\n \n public PopTable(PopTableStyle style) {\n setTouchable(Touchable.enabled);\n \n stageBackground = new Image(style.stageBackground);\n stageBackground.setFillParent(true);\n stageBackground.addListener(new ClickListener() {\n @Override\n public void clicked(InputEvent event, float x, float y) {\n super.clicked(event, x, y);\n if (hideOnUnfocus) {\n hide();\n }\n }\n });\n \n setBackground(style.background);\n \n hideOnUnfocus = true;\n preferredEdge = Align.top;\n keepSizedWithinStage = true;\n }\n \n public void alignToActorEdge(Actor actor, int edge) {\n float widgetX;\n switch (edge) {\n case Align.left:\n case Align.bottomLeft:\n case Align.topLeft:\n widgetX = -getWidth();\n break;\n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n widgetX = actor.getWidth();\n break;\n default:\n widgetX = actor.getWidth() / 2f - getWidth() / 2f;\n break;\n }\n \n float widgetY;\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n widgetY = -getHeight();\n break;\n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n widgetY = actor.getHeight();\n break;\n default:\n widgetY = actor.getHeight() / 2f - getHeight() / 2f;\n break;\n }\n \n temp.set(widgetX, widgetY);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n }\n \n private float actorEdgeStageHorizontalDistance(Actor actor, int edge) {\n temp.set(0, 0);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n \n float returnValue;\n switch (edge) {\n case Align.left:\n case Align.bottomLeft:\n case Align.topLeft:\n returnValue = temp.x;\n break;\n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n returnValue = stage.getWidth() - (temp.x + actor.getWidth());\n break;\n default:\n returnValue = 0;\n break;\n }\n \n return returnValue;\n }\n \n private float actorEdgeStageVerticalDistance(Actor actor, int edge) {\n temp.set(0, 0);\n actor.localToStageCoordinates(temp);\n setPosition(temp.x, temp.y);\n \n float returnValue;\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n returnValue = temp.y;\n break;\n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n returnValue = stage.getHeight() - (temp.y + actor.getHeight());\n break;\n default:\n returnValue = 0;\n break;\n }\n \n return returnValue;\n }\n \n public void moveToInsideStage() {\n if (getStage() != null) {\n if (getX() < 0) setX(0);\n else if (getX() + getWidth() > getStage().getWidth()) setX(getStage().getWidth() - getWidth());\n \n if (getY() < 0) setY(0);\n else if (getY() + getHeight() > getStage().getHeight()) setY(getStage().getHeight() - getHeight());\n }\n }\n \n public boolean isOutsideStage() {\n return getX() < 0 || getX() + getWidth() > getStage().getWidth() || getY() < 0 || getY() + getHeight() > getStage().getHeight();\n }\n \n public void hide() {\n hide(fadeOut(.2f));\n }\n \n public void hide(Action action) {\n fire(new TableHiddenEvent());\n group.addAction(sequence(action, Actions.removeActor()));\n }\n \n public void show(Stage stage) {\n Action action = sequence(alpha(0), fadeIn(.2f));\n this.show(stage, action);\n }\n \n public void show(Stage stage, Action action) {\n this.stage = stage;\n group = new WidgetGroup();\n group.setFillParent(true);\n stage.addActor(group);\n \n group.addActor(stageBackground);\n group.addActor(this);\n \n pack();\n \n if (keepSizedWithinStage) {\n if (getWidth() > stage.getWidth()) {\n setWidth(stage.getWidth());\n }\n \n if (getHeight() > stage.getHeight()) {\n setHeight(stage.getHeight());\n }\n }\n \n setPosition((int) (stage.getWidth() / 2f - getWidth() / 2f), (int) (stage.getHeight() / 2f - getHeight() / 2f));\n \n group.addAction(action);\n }\n \n public static class PopTableStyle {\n /*Optional*/\n public Drawable background, stageBackground;\n \n public PopTableStyle() {\n \n }\n \n public PopTableStyle(PopTableStyle style) {\n background = style.background;\n stageBackground = style.stageBackground;\n }\n }\n \n public static class PopTableClickListener extends ClickListener {\n protected PopTable popTable;\n \n public PopTableClickListener(Skin skin) {\n this(skin.get(PopTableStyle.class));\n }\n \n public PopTableClickListener(Skin skin, String style) {\n this(skin.get(style, PopTableStyle.class));\n }\n \n public PopTableClickListener(PopTableStyle style) {\n popTable = new PopTable(style);\n popTable.addListener(new TableHiddenListener() {\n @Override\n public void tableHidden(Event event) {\n PopTableClickListener.this.tableHidden(event);\n }\n });\n }\n \n @Override\n public void clicked(InputEvent event, float x, float y) {\n super.clicked(event, x, y);\n var stage = event.getListenerActor().getStage();\n var actor = event.getListenerActor();\n \n if (actor instanceof Disableable) {\n if (((Disableable) actor).isDisabled()) return;\n }\n \n popTable.show(stage);\n int edge = popTable.getPreferredEdge();\n \n popTable.alignToActorEdge(actor, edge);\n \n var rightDistance = popTable.actorEdgeStageHorizontalDistance(actor, Align.right);\n var leftDistance = popTable.actorEdgeStageHorizontalDistance(actor, Align.left);\n switch (edge) {\n case Align.left:\n case Align.topLeft:\n case Align.bottomLeft:\n if (popTable.getX() < 0) {\n if (rightDistance > leftDistance) {\n edge &= ~Align.left;\n edge |= Align.right;\n popTable.setWidth(Math.min(popTable.getWidth(), rightDistance));\n } else {\n popTable.setWidth(Math.min(popTable.getWidth(), leftDistance));\n }\n }\n break;\n \n case Align.right:\n case Align.bottomRight:\n case Align.topRight:\n if (popTable.getX() + popTable.getWidth() > stage.getWidth()) {\n if (leftDistance > rightDistance) {\n edge &= ~Align.right;\n edge |= Align.left;\n popTable.setWidth(Math.min(popTable.getWidth(), leftDistance));\n } else {\n popTable.setWidth(Math.min(popTable.getWidth(), rightDistance));\n }\n }\n break;\n }\n \n var topDistance = popTable.actorEdgeStageVerticalDistance(actor, Align.top);\n var bottomDistance = popTable.actorEdgeStageVerticalDistance(actor, Align.bottom);\n switch (edge) {\n case Align.bottom:\n case Align.bottomLeft:\n case Align.bottomRight:\n if (popTable.getY() < 0) {\n if (topDistance > bottomDistance) {\n edge &= ~Align.bottom;\n edge |= Align.top;\n popTable.setHeight(Math.min(popTable.getHeight(), topDistance));\n } else {\n popTable.setHeight(Math.min(popTable.getHeight(), bottomDistance));\n }\n }\n break;\n \n case Align.top:\n case Align.topLeft:\n case Align.topRight:\n if (popTable.getY() + popTable.getHeight() > stage.getHeight()) {\n if (bottomDistance > topDistance) {\n edge &= ~Align.top;\n edge |= Align.bottom;\n popTable.setHeight(Math.min(popTable.getHeight(), bottomDistance));\n } else {\n popTable.setHeight(Math.min(popTable.getHeight(), topDistance));\n }\n }\n break;\n }\n \n popTable.alignToActorEdge(actor, edge);\n \n popTable.moveToInsideStage();\n }\n \n public PopTable getPopTable() {\n return popTable;\n }\n \n /**\n * Override this method to be performed when the popTable is hidden or dismissed.\n */\n public void tableHidden(Event event) {\n \n }\n }\n \n public static class TableHiddenEvent extends Event {\n \n }\n \n public static abstract class TableHiddenListener implements EventListener {\n @Override\n public boolean handle(Event event) {\n if (event instanceof TableHiddenEvent) {\n tableHidden(event);\n return true;\n } else{\n return false;\n }\n }\n \n public abstract void tableHidden(Event event);\n }\n \n public boolean isHideOnUnfocus() {\n return hideOnUnfocus;\n }\n \n public void setHideOnUnfocus(boolean hideOnUnfocus) {\n this.hideOnUnfocus = hideOnUnfocus;\n }\n \n public int getPreferredEdge() {\n return preferredEdge;\n }\n \n public void setPreferredEdge(int preferredEdge) {\n this.preferredEdge = preferredEdge;\n }\n \n public boolean isKeepSizedWithinStage() {\n return keepSizedWithinStage;\n }\n \n public void setKeepSizedWithinStage(boolean keepSizedWithinStage) {\n this.keepSizedWithinStage = keepSizedWithinStage;\n }\n\n}"},"message":{"kind":"string","value":"Added automaticallyResized option to PopTable.\n"},"old_file":{"kind":"string","value":"core/src/com/ray3k/skincomposer/PopTable.java"},"subject":{"kind":"string","value":"Added automaticallyResized option to PopTable."},"git_diff":{"kind":"string","value":"ore/src/com/ray3k/skincomposer/PopTable.java\n package com.ray3k.skincomposer;\n \nimport com.badlogic.gdx.math.MathUtils;\n import com.badlogic.gdx.math.Vector2;\n import com.badlogic.gdx.scenes.scene2d.*;\n import com.badlogic.gdx.scenes.scene2d.actions.Actions;\n import com.badlogic.gdx.scenes.scene2d.ui.Skin;\n import com.badlogic.gdx.scenes.scene2d.ui.Table;\n import com.badlogic.gdx.scenes.scene2d.ui.WidgetGroup;\nimport com.badlogic.gdx.scenes.scene2d.utils.ChangeListener;\n import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;\n import com.badlogic.gdx.scenes.scene2d.utils.Disableable;\n import com.badlogic.gdx.scenes.scene2d.utils.Drawable;\n private boolean hideOnUnfocus;\n private int preferredEdge;\n private boolean keepSizedWithinStage;\n private boolean automaticallyResized;\n \n public PopTable() {\n this(new PopTableStyle());\n hideOnUnfocus = true;\n preferredEdge = Align.top;\n keepSizedWithinStage = true;\n automaticallyResized = true;\n }\n \n public void alignToActorEdge(Actor actor, int edge) {\n \n public PopTableClickListener(PopTableStyle style) {\n popTable = new PopTable(style);\n popTable.automaticallyResized = false;\n popTable.addListener(new TableHiddenListener() {\n @Override\n public void tableHidden(Event event) {\n public void setKeepSizedWithinStage(boolean keepSizedWithinStage) {\n this.keepSizedWithinStage = keepSizedWithinStage;\n }\n\n \n public boolean isAutomaticallyResized() {\n return automaticallyResized;\n }\n \n public void setAutomaticallyResized(boolean automaticallyResized) {\n this.automaticallyResized = automaticallyResized;\n }\n \n @Override\n public void layout() {\n super.layout();\n if (automaticallyResized) {\n var centerX = getX(Align.center);\n var centerY = getY(Align.center);\n pack();\n if (keepSizedWithinStage) {\n moveToInsideStage();\n }\n setPosition(centerX, centerY, Align.center);\n setPosition(MathUtils.floor(getX()), MathUtils.floor(getY()));\n }\n }\n }"}}},{"rowIdx":2068,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"a5e621592f0bf7295ed66449a1780a489830941a"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Permafrost/Tundra.java,Permafrost/Tundra.java"},"new_contents":{"kind":"string","value":"/*\n * The MIT License (MIT)\n *\n * Copyright (c) 2021 Lachlan Dowding\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage permafrost.tundra.data;\n\nimport com.wm.data.IData;\nimport com.wm.data.IDataCursor;\nimport com.wm.data.IDataFactory;\nimport com.wm.lang.ns.NSField;\nimport com.wm.lang.ns.NSRecord;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\n\n/**\n * Collection of convenience methods for working with IDataCursor objects.\n */\npublic class IDataCursorHelper {\n /**\n * Disallow instantiation of this class.\n */\n private IDataCursorHelper() {}\n\n /**\n * Append all elements in the given source cursor to the end of the given target cursor.\n *\n * @param sourceCursor The cursor containing elements to be appended.\n * @param targetCursor The cursor to which the elements will be appended.\n */\n public static void append(IDataCursor sourceCursor, IDataCursor targetCursor) {\n if (sourceCursor != null && targetCursor != null) {\n targetCursor.last();\n if (sourceCursor.first()) {\n targetCursor.insertAfter(sourceCursor.getKey(), sourceCursor.getValue());\n while (sourceCursor.next()) {\n targetCursor.insertAfter(sourceCursor.getKey(), sourceCursor.getValue());\n }\n }\n }\n }\n\n /**\n * Prepend all elements in the given source cursor to the beginning of the given target cursor.\n *\n * @param sourceCursor The cursor containing elements to be appended.\n * @param targetCursor The cursor to which the elements will be appended.\n */\n public static void prepend(IDataCursor sourceCursor, IDataCursor targetCursor) {\n if (sourceCursor != null && targetCursor != null) {\n targetCursor.first();\n if (sourceCursor.last()) {\n targetCursor.insertBefore(sourceCursor.getKey(), sourceCursor.getValue());\n while (sourceCursor.previous()) {\n targetCursor.insertBefore(sourceCursor.getKey(), sourceCursor.getValue());\n }\n }\n }\n }\n\n /**\n * Removes all elements from the given cursor.\n *\n * @param cursor The cursor from which to remove all elements.\n */\n public static void clear(IDataCursor cursor) {\n clear(cursor, null);\n }\n\n /**\n * Removes all elements from the given cursor.\n *\n * @param cursor The cursor from which to remove all elements.\n */\n public static void clear(IDataCursor cursor, Iterable keysToPreserve) {\n if (cursor != null) {\n IDataCursor preservedCursor = null;\n\n try {\n if (keysToPreserve != null) {\n IData preservedDocument = IDataFactory.create();\n preservedCursor = preservedDocument.getCursor();\n for (String key : keysToPreserve) {\n if (key != null) {\n while(cursor.first(key)) {\n preservedCursor.insertAfter(key, cursor.getValue());\n cursor.delete();\n }\n }\n }\n }\n\n cursor.first();\n while (cursor.delete());\n\n append(preservedCursor, cursor);\n } finally {\n if (preservedCursor != null) {\n preservedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Positions the cursor on the first element with the given key whose value has the specified class.\n *\n * @param cursor The cursor to be positioned.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return True if the key existed with a value of the required class and the cursor was repositioned,\n * otherwise false.\n */\n @SuppressWarnings(\"unchecked\")\n public static boolean first(IDataCursor cursor, Class valueClass, String key) {\n boolean first = false;\n if (cursor != null) {\n if (cursor.first(key)) {\n Object candidateValue = cursor.getValue();\n if (valueClass.isInstance(candidateValue)) {\n first = true;\n } else {\n while(cursor.next(key)) {\n candidateValue = cursor.getValue();\n if (valueClass.isInstance(candidateValue)) {\n first = true;\n break;\n }\n }\n }\n }\n }\n return first;\n }\n\n /**\n * Returns the value of the first element in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V get(IDataCursor cursor, Class valueClass, String key) {\n V value;\n if (first(cursor, valueClass, key)) {\n value = (V)cursor.getValue();\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Removes the first element in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V remove(IDataCursor cursor, Class valueClass, String key) {\n V value;\n if (first(cursor, valueClass, key)) {\n value = (V)cursor.getValue();\n cursor.delete();\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Renames the first element's key in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param sourceKey The element's key before renaming.\n * @param targetKey The element's key after renaming.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V rename(IDataCursor cursor, Class valueClass, String sourceKey, String targetKey) {\n V value;\n if (first(cursor, valueClass, sourceKey)) {\n value = (V)cursor.getValue();\n cursor.delete();\n cursor.insertAfter(targetKey, value);\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Replaces the value of the first element in the cursor with the given key whose existing value has the specified\n * class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's existing value.\n * @param key The element's key.\n * @param newValue The new value to be associated with the given key.\n * @param The required class of the element's existing value.\n */\n @SuppressWarnings(\"unchecked\")\n public static void replace(IDataCursor cursor, Class valueClass, String key, Object newValue) {\n if (first(cursor, valueClass, key)) {\n cursor.setValue(newValue);\n }\n }\n\n /**\n * Replaces all elements in the given target cursor with the elements in the given source cursor.\n *\n * @param sourceCursor The cursor containing the elements to be used to replace the target cursor elements.\n * @param targetCursor The cursor whose elements are to be replaced with the source cursor elements.\n */\n public static void replace(IDataCursor sourceCursor, IDataCursor targetCursor) {\n clear(targetCursor);\n append(sourceCursor, targetCursor);\n }\n\n /**\n * Lexicographically sorts the elements in the given cursor by key.\n *\n * @param cursor The cursor whose elements are to be sorted.\n */\n public static void sort(IDataCursor cursor) {\n if (cursor != null) {\n if (cursor.first()) {\n List keys = keys(cursor);\n Collections.sort(keys);\n\n IData sortedDocument = IDataFactory.create();\n IDataCursor sortedCursor = sortedDocument.getCursor();\n try {\n for(String key : keys) {\n if (cursor.first(key)) {\n sortedCursor.insertAfter(key, cursor.getValue());\n cursor.delete();\n }\n }\n\n replace(sortedCursor, cursor);\n } finally {\n sortedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Returns true if the given cursor is empty.\n *\n * @param cursor The cursor to be checked if empty.\n * @return True if the cursor contains no elements, otherwise false.\n */\n public static boolean isEmpty(IDataCursor cursor) {\n return cursor == null || !cursor.first();\n }\n\n /**\n * Returns the number of items in the given cursor.\n *\n * @param cursor The cursor whose size is to be returned.\n * @return The number of items in the given cursor.\n */\n public static int size(IDataCursor cursor) {\n int size = 0;\n\n if (cursor != null) {\n if (cursor.first()) {\n size++;\n while (cursor.next()) {\n size++;\n }\n }\n }\n\n return size;\n }\n\n /**\n * Returns the list of keys present in the given cursor.\n *\n * @param cursor The cursor to return the list of keys from.\n * @return The list of keys present in the given cursor.\n */\n public static List keys(IDataCursor cursor) {\n ArrayList keys = new ArrayList(size(cursor));\n\n if (cursor != null) {\n if (cursor.first()) {\n keys.add(cursor.getKey());\n while(cursor.next()) {\n keys.add(cursor.getKey());\n }\n }\n }\n\n return keys;\n }\n\n /**\n * Returns the list of values present in the given cursor.\n *\n * @param cursor The cursor to return the list of values from.\n * @return The list of values present in the given cursor.\n */\n public static List values(IDataCursor cursor) {\n ArrayList values = new ArrayList(size(cursor));\n\n if (cursor != null) {\n if (cursor.first()) {\n values.add(cursor.getValue());\n while(cursor.next()) {\n values.add(cursor.getValue());\n }\n }\n }\n\n return values;\n }\n\n /**\n * Sanitizes the given cursor against the given record by removing all disallowed unspecified values.\n *\n * @param cursor The cursor to be sanitized.\n * @param record The record that defines the structure the cursor will be sanitized against.\n * @param recurse Whether to recursively sanitize child IData and IData[] elements.\n */\n public static void sanitize(IDataCursor cursor, NSRecord record, boolean recurse) {\n if (cursor != null && record != null) {\n NSField[] fields = record.getFields();\n if (fields != null) {\n IData sanitizedDocument = IDataFactory.create();\n IDataCursor sanitizedCursor = sanitizedDocument.getCursor();\n try {\n for (NSField field : fields) {\n if (field != null) {\n String key = field.getName();\n if (cursor.first(key)) {\n Object value = sanitize(cursor.getValue(), field, recurse);\n if (value != null) {\n sanitizedCursor.insertAfter(key, value);\n cursor.delete();\n } else {\n while(cursor.next(key)) {\n value = sanitize(cursor.getValue(), field, recurse);\n if (value != null) {\n sanitizedCursor.insertAfter(key, value);\n cursor.delete();\n break;\n }\n }\n }\n }\n }\n }\n if (record.isClosed()) {\n // if the record disallows unspecified fields, then remove all remaining unsanitized fields\n clear(cursor);\n } else {\n // if the record allows unspecified fields, then include any remaining keys at the end of the\n // document, but sort them lexicographically for predictable repeatable results\n sort(cursor);\n }\n\n prepend(sanitizedCursor, cursor);\n } finally {\n sanitizedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Sanitizes the given value against the given field.\n *\n * @param value The value to sanitize.\n * @param field The field against which to sanitize.\n * @param recurse Whether to recursively sanitize child IData and IData[] objects.\n * @return The sanitized value, or null if not a valid value for this field.\n */\n private static Object sanitize(Object value, NSField field, boolean recurse) {\n Object sanitizedValue = null;\n\n int fieldType = field.getType();\n int fieldDimensions = field.getDimensions();\n\n if (field instanceof NSRecord) {\n if (fieldDimensions == NSField.DIM_ARRAY) {\n IData[] array = IDataHelper.toIDataArray(value);\n if (array != null) {\n if (recurse) {\n for (IData document : array) {\n if (document != null) {\n IDataCursor documentCursor = document.getCursor();\n try {\n sanitize(documentCursor, (NSRecord)field, recurse);\n } finally {\n documentCursor.destroy();\n }\n }\n }\n }\n sanitizedValue = array;\n }\n } else if (fieldDimensions == NSField.DIM_SCALAR) {\n IData document = IDataHelper.toIData(value);\n if (document != null) {\n if (recurse) {\n IDataCursor documentCursor = document.getCursor();\n try {\n sanitize(documentCursor, (NSRecord)field, recurse);\n } finally {\n documentCursor.destroy();\n }\n }\n sanitizedValue = document;\n }\n }\n } else if (fieldType == NSField.FIELD_STRING) {\n if ((fieldDimensions == NSField.DIM_TABLE && value instanceof String[][]) ||\n (fieldDimensions == NSField.DIM_ARRAY && value instanceof String[]) ||\n (fieldDimensions == NSField.DIM_SCALAR && value instanceof String)) {\n sanitizedValue = value;\n }\n } else {\n if ((fieldDimensions == NSField.DIM_TABLE && value instanceof Object[][]) ||\n (fieldDimensions == NSField.DIM_ARRAY && value instanceof Object[]) ||\n (fieldDimensions == NSField.DIM_SCALAR && value instanceof Object)) {\n sanitizedValue = value;\n }\n }\n\n return sanitizedValue;\n }\n}\n"},"new_file":{"kind":"string","value":"src/main/java/permafrost/tundra/data/IDataCursorHelper.java"},"old_contents":{"kind":"string","value":"/*\n * The MIT License (MIT)\n *\n * Copyright (c) 2021 Lachlan Dowding\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage permafrost.tundra.data;\n\nimport com.wm.data.IData;\nimport com.wm.data.IDataCursor;\nimport com.wm.data.IDataFactory;\nimport com.wm.lang.ns.NSField;\nimport com.wm.lang.ns.NSRecord;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\n\n/**\n * Collection of convenience methods for working with IDataCursor objects.\n */\npublic class IDataCursorHelper {\n /**\n * Disallow instantiation of this class.\n */\n private IDataCursorHelper() {}\n\n /**\n * Append all elements in the given source cursor to the end of the given target cursor.\n *\n * @param sourceCursor The cursor containing elements to be appended.\n * @param targetCursor The cursor to which the elements will be appended.\n */\n public static void append(IDataCursor sourceCursor, IDataCursor targetCursor) {\n if (sourceCursor != null && targetCursor != null) {\n targetCursor.last();\n if (sourceCursor.first()) {\n targetCursor.insertAfter(sourceCursor.getKey(), sourceCursor.getValue());\n while (sourceCursor.next()) {\n targetCursor.insertAfter(sourceCursor.getKey(), sourceCursor.getValue());\n }\n }\n }\n }\n\n /**\n * Prepend all elements in the given source cursor to the beginning of the given target cursor.\n *\n * @param sourceCursor The cursor containing elements to be appended.\n * @param targetCursor The cursor to which the elements will be appended.\n */\n public static void prepend(IDataCursor sourceCursor, IDataCursor targetCursor) {\n if (sourceCursor != null && targetCursor != null) {\n targetCursor.first();\n if (sourceCursor.last()) {\n targetCursor.insertBefore(sourceCursor.getKey(), sourceCursor.getValue());\n while (sourceCursor.previous()) {\n targetCursor.insertBefore(sourceCursor.getKey(), sourceCursor.getValue());\n }\n }\n }\n }\n\n /**\n * Removes all elements from the given cursor.\n *\n * @param cursor The cursor from which to remove all elements.\n */\n public static void clear(IDataCursor cursor) {\n clear(cursor, null);\n }\n\n /**\n * Removes all elements from the given cursor.\n *\n * @param cursor The cursor from which to remove all elements.\n */\n public static void clear(IDataCursor cursor, Iterable keysToPreserve) {\n if (cursor != null) {\n IDataCursor preservedCursor = null;\n\n try {\n if (keysToPreserve != null) {\n IData preservedDocument = IDataFactory.create();\n preservedCursor = preservedDocument.getCursor();\n for (String key : keysToPreserve) {\n if (key != null) {\n while(cursor.first(key)) {\n preservedCursor.insertAfter(key, cursor.getValue());\n cursor.delete();\n }\n }\n }\n }\n\n cursor.first();\n while (cursor.delete());\n\n append(preservedCursor, cursor);\n } finally {\n if (preservedCursor != null) {\n preservedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Replaces all elements in the given target cursor with the elements in the given source cursor.\n *\n * @param sourceCursor The cursor containing the elements to be used to replace the target cursor elements.\n * @param targetCursor The cursor whose elements are to be replaced with the source cursor elements.\n */\n public static void replace(IDataCursor sourceCursor, IDataCursor targetCursor) {\n clear(targetCursor);\n append(sourceCursor, targetCursor);\n }\n\n /**\n * Lexicographically sorts the elements in the given cursor by key.\n *\n * @param cursor The cursor whose elements are to be sorted.\n */\n public static void sort(IDataCursor cursor) {\n if (cursor != null) {\n if (cursor.first()) {\n List keys = keys(cursor);\n Collections.sort(keys);\n\n IData sortedDocument = IDataFactory.create();\n IDataCursor sortedCursor = sortedDocument.getCursor();\n try {\n for(String key : keys) {\n if (cursor.first(key)) {\n sortedCursor.insertAfter(key, cursor.getValue());\n cursor.delete();\n }\n }\n\n replace(sortedCursor, cursor);\n } finally {\n sortedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Returns the number of items in the given cursor.\n *\n * @param cursor The cursor whose size is to be returned.\n * @return The number of items in the given cursor.\n */\n public static int size(IDataCursor cursor) {\n int size = 0;\n\n if (cursor != null) {\n if (cursor.first()) {\n size++;\n while (cursor.next()) {\n size++;\n }\n }\n }\n\n return size;\n }\n\n /**\n * Returns the list of keys present in the given cursor.\n *\n * @param cursor The cursor to return the list of keys from.\n * @return The list of keys present in the given cursor.\n */\n public static List keys(IDataCursor cursor) {\n ArrayList keys = new ArrayList(size(cursor));\n\n if (cursor != null) {\n if (cursor.first()) {\n keys.add(cursor.getKey());\n while(cursor.next()) {\n keys.add(cursor.getKey());\n }\n }\n }\n\n return keys;\n }\n\n /**\n * Returns the list of values present in the given cursor.\n *\n * @param cursor The cursor to return the list of values from.\n * @return The list of values present in the given cursor.\n */\n public static List values(IDataCursor cursor) {\n ArrayList values = new ArrayList(size(cursor));\n\n if (cursor != null) {\n if (cursor.first()) {\n values.add(cursor.getValue());\n while(cursor.next()) {\n values.add(cursor.getValue());\n }\n }\n }\n\n return values;\n }\n\n /**\n * Sanitizes the given cursor against the given record by removing all disallowed unspecified values.\n *\n * @param cursor The cursor to be sanitized.\n * @param record The record that defines the structure the cursor will be sanitized against.\n * @param recurse Whether to recursively sanitize child IData and IData[] elements.\n */\n public static void sanitize(IDataCursor cursor, NSRecord record, boolean recurse) {\n if (cursor != null && record != null) {\n NSField[] fields = record.getFields();\n if (fields != null) {\n IData sanitizedDocument = IDataFactory.create();\n IDataCursor sanitizedCursor = sanitizedDocument.getCursor();\n try {\n for (NSField field : fields) {\n if (field != null) {\n\n String key = field.getName();\n if (cursor.first(key)) {\n Object value = sanitize(cursor.getValue(), field, recurse);\n if (value != null) {\n sanitizedCursor.insertAfter(key, value);\n cursor.delete();\n } else {\n while(cursor.next(key)) {\n value = sanitize(cursor.getValue(), field, recurse);\n if (value != null) {\n sanitizedCursor.insertAfter(key, value);\n cursor.delete();\n break;\n }\n }\n }\n }\n }\n }\n if (record.isClosed()) {\n // if the record disallows unspecified fields, then remove all remaining unsanitized fields\n clear(cursor);\n } else {\n // if the record allows unspecified fields, then include any remaining keys at the end of the\n // document, but sort them lexicographically for predictable repeatable results\n sort(cursor);\n }\n\n prepend(sanitizedCursor, cursor);\n } finally {\n sanitizedCursor.destroy();\n }\n }\n }\n }\n\n /**\n * Sanitizes the given value against the given field.\n *\n * @param value The value to sanitize.\n * @param field The field against which to sanitize.\n * @param recurse Whether to recursively sanitize child IData and IData[] objects.\n * @return The sanitized value, or null if not a valid value for this field.\n */\n private static Object sanitize(Object value, NSField field, boolean recurse) {\n Object sanitizedValue = null;\n\n int fieldType = field.getType();\n int fieldDimensions = field.getDimensions();\n\n if (field instanceof NSRecord) {\n if (fieldDimensions == NSField.DIM_ARRAY) {\n IData[] array = IDataHelper.toIDataArray(value);\n if (array != null) {\n if (recurse) {\n for (IData document : array) {\n if (document != null) {\n IDataCursor documentCursor = document.getCursor();\n try {\n sanitize(documentCursor, (NSRecord)field, recurse);\n } finally {\n documentCursor.destroy();\n }\n }\n }\n }\n sanitizedValue = array;\n }\n } else if (fieldDimensions == NSField.DIM_SCALAR) {\n IData document = IDataHelper.toIData(value);\n if (document != null) {\n if (recurse) {\n IDataCursor documentCursor = document.getCursor();\n try {\n sanitize(documentCursor, (NSRecord)field, recurse);\n } finally {\n documentCursor.destroy();\n }\n }\n sanitizedValue = document;\n }\n }\n } else if (fieldType == NSField.FIELD_STRING) {\n if ((fieldDimensions == NSField.DIM_TABLE && value instanceof String[][]) ||\n (fieldDimensions == NSField.DIM_ARRAY && value instanceof String[]) ||\n (fieldDimensions == NSField.DIM_SCALAR && value instanceof String)) {\n sanitizedValue = value;\n }\n } else {\n if ((fieldDimensions == NSField.DIM_TABLE && value instanceof Object[][]) ||\n (fieldDimensions == NSField.DIM_ARRAY && value instanceof Object[]) ||\n (fieldDimensions == NSField.DIM_SCALAR && value instanceof Object)) {\n sanitizedValue = value;\n }\n }\n\n return sanitizedValue;\n }\n}\n"},"message":{"kind":"string","value":"Add IDataCursorHelper methods first, get, remove, and rename\n"},"old_file":{"kind":"string","value":"src/main/java/permafrost/tundra/data/IDataCursorHelper.java"},"subject":{"kind":"string","value":"Add IDataCursorHelper methods first, get, remove, and rename"},"git_diff":{"kind":"string","value":"rc/main/java/permafrost/tundra/data/IDataCursorHelper.java\n }\n \n /**\n * Positions the cursor on the first element with the given key whose value has the specified class.\n *\n * @param cursor The cursor to be positioned.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return True if the key existed with a value of the required class and the cursor was repositioned,\n * otherwise false.\n */\n @SuppressWarnings(\"unchecked\")\n public static boolean first(IDataCursor cursor, Class valueClass, String key) {\n boolean first = false;\n if (cursor != null) {\n if (cursor.first(key)) {\n Object candidateValue = cursor.getValue();\n if (valueClass.isInstance(candidateValue)) {\n first = true;\n } else {\n while(cursor.next(key)) {\n candidateValue = cursor.getValue();\n if (valueClass.isInstance(candidateValue)) {\n first = true;\n break;\n }\n }\n }\n }\n }\n return first;\n }\n\n /**\n * Returns the value of the first element in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V get(IDataCursor cursor, Class valueClass, String key) {\n V value;\n if (first(cursor, valueClass, key)) {\n value = (V)cursor.getValue();\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Removes the first element in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param key The element's key.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V remove(IDataCursor cursor, Class valueClass, String key) {\n V value;\n if (first(cursor, valueClass, key)) {\n value = (V)cursor.getValue();\n cursor.delete();\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Renames the first element's key in the cursor with the given key whose value has the specified class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's value.\n * @param sourceKey The element's key before renaming.\n * @param targetKey The element's key after renaming.\n * @param The required class of the element's value.\n * @return The value associated with the given key, if any.\n */\n @SuppressWarnings(\"unchecked\")\n public static V rename(IDataCursor cursor, Class valueClass, String sourceKey, String targetKey) {\n V value;\n if (first(cursor, valueClass, sourceKey)) {\n value = (V)cursor.getValue();\n cursor.delete();\n cursor.insertAfter(targetKey, value);\n } else {\n value = null;\n }\n return value;\n }\n\n /**\n * Replaces the value of the first element in the cursor with the given key whose existing value has the specified\n * class.\n *\n * @param cursor The cursor containing elements.\n * @param valueClass The required class of the element's existing value.\n * @param key The element's key.\n * @param newValue The new value to be associated with the given key.\n * @param The required class of the element's existing value.\n */\n @SuppressWarnings(\"unchecked\")\n public static void replace(IDataCursor cursor, Class valueClass, String key, Object newValue) {\n if (first(cursor, valueClass, key)) {\n cursor.setValue(newValue);\n }\n }\n\n /**\n * Replaces all elements in the given target cursor with the elements in the given source cursor.\n *\n * @param sourceCursor The cursor containing the elements to be used to replace the target cursor elements.\n }\n }\n }\n }\n\n /**\n * Returns true if the given cursor is empty.\n *\n * @param cursor The cursor to be checked if empty.\n * @return True if the cursor contains no elements, otherwise false.\n */\n public static boolean isEmpty(IDataCursor cursor) {\n return cursor == null || !cursor.first();\n }\n \n /**\n try {\n for (NSField field : fields) {\n if (field != null) {\n\n String key = field.getName();\n if (cursor.first(key)) {\n Object value = sanitize(cursor.getValue(), field, recurse);"}}},{"rowIdx":2069,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"dc66d42a74aee604fa261942105f03fa89d2c859"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"katre/bazel,katre/bazel,cushon/bazel,bazelbuild/bazel,cushon/bazel,katre/bazel,cushon/bazel,cushon/bazel,bazelbuild/bazel,bazelbuild/bazel,bazelbuild/bazel,katre/bazel,katre/bazel,katre/bazel,cushon/bazel,bazelbuild/bazel,cushon/bazel,bazelbuild/bazel"},"new_contents":{"kind":"string","value":"// Copyright 2018 The Bazel Authors. All rights reserved.\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License\n\npackage com.google.devtools.build.lib.rules.config;\n\nimport static com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition.COMMAND_LINE_OPTION_PREFIX;\n\nimport com.google.common.collect.Sets;\nimport com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition;\nimport com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition.Settings;\nimport com.google.devtools.build.lib.cmdline.BazelModuleContext;\nimport com.google.devtools.build.lib.cmdline.Label;\nimport com.google.devtools.build.lib.starlarkbuildapi.config.ConfigGlobalLibraryApi;\nimport com.google.devtools.build.lib.starlarkbuildapi.config.ConfigurationTransitionApi;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport net.starlark.java.eval.Dict;\nimport net.starlark.java.eval.EvalException;\nimport net.starlark.java.eval.Module;\nimport net.starlark.java.eval.Sequence;\nimport net.starlark.java.eval.Starlark;\nimport net.starlark.java.eval.StarlarkCallable;\nimport net.starlark.java.eval.StarlarkSemantics;\nimport net.starlark.java.eval.StarlarkThread;\nimport net.starlark.java.syntax.Location;\n\n/**\n * Implementation of {@link ConfigGlobalLibraryApi}.\n *\n *

A collection of top-level Starlark functions pertaining to configuration.\n */\npublic class ConfigGlobalLibrary implements ConfigGlobalLibraryApi {\n\n @Override\n public ConfigurationTransitionApi transition(\n StarlarkCallable implementation,\n Sequence inputs, // expected\n Sequence outputs, // expected\n StarlarkThread thread)\n throws EvalException {\n StarlarkSemantics semantics = thread.getSemantics();\n List inputsList = Sequence.cast(inputs, String.class, \"inputs\");\n List outputsList = Sequence.cast(outputs, String.class, \"outputs\");\n validateBuildSettingKeys(inputsList, Settings.INPUTS);\n validateBuildSettingKeys(outputsList, Settings.OUTPUTS);\n BazelModuleContext moduleContext =\n BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread));\n Location location = thread.getCallerLocation();\n return StarlarkDefinedConfigTransition.newRegularTransition(\n implementation,\n inputsList,\n outputsList,\n semantics,\n moduleContext.label(),\n location,\n moduleContext.repoMapping());\n }\n\n @Override\n public ConfigurationTransitionApi analysisTestTransition(\n Dict changedSettings, // expected\n StarlarkThread thread)\n throws EvalException {\n Map changedSettingsMap =\n Dict.cast(changedSettings, String.class, Object.class, \"changed_settings dict\");\n validateBuildSettingKeys(changedSettingsMap.keySet(), Settings.OUTPUTS);\n BazelModuleContext moduleContext =\n BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread));\n Location location = thread.getCallerLocation();\n return StarlarkDefinedConfigTransition.newAnalysisTestTransition(\n changedSettingsMap, moduleContext.repoMapping(), moduleContext.label(), location);\n }\n\n private void validateBuildSettingKeys(Iterable optionKeys, Settings keyErrorDescriptor)\n throws EvalException {\n\n HashSet processedOptions = Sets.newHashSet();\n String singularErrorDescriptor = keyErrorDescriptor == Settings.INPUTS ? \"input\" : \"output\";\n\n for (String optionKey : optionKeys) {\n if (!optionKey.startsWith(COMMAND_LINE_OPTION_PREFIX)) {\n try {\n Label.parseAbsoluteUnchecked(optionKey);\n } catch (IllegalArgumentException e) {\n throw Starlark.errorf(\n \"invalid transition %s '%s'. If this is intended as a native option, \"\n + \"it must begin with //command_line_option: %s\",\n singularErrorDescriptor, optionKey, e.getMessage());\n }\n } else {\n String optionName = optionKey.substring(COMMAND_LINE_OPTION_PREFIX.length());\n if (!validOptionName(optionName)) {\n throw Starlark.errorf(\n \"Invalid transition %s '%s'. Cannot transition on --experimental_* or \"\n + \"--incompatible_* options\",\n singularErrorDescriptor, optionKey);\n }\n }\n if (!processedOptions.add(optionKey)) {\n throw Starlark.errorf(\"duplicate transition %s '%s'\", singularErrorDescriptor, optionKey);\n }\n }\n }\n\n private static boolean validOptionName(String optionName) {\n if (optionName.startsWith(\"experimental_\")) {\n // Don't allow experimental flags.\n return false;\n }\n\n if (optionName.equals(\"incompatible_enable_cc_toolchain_resolution\")\n || optionName.equals(\"incompatible_enable_cgo_toolchain_resolution\")\n || optionName.equals(\"incompatible_enable_apple_toolchain_resolution\")) {\n // This is specifically allowed.\n return true;\n } else if (optionName.startsWith(\"incompatible_\")) {\n // Don't allow other incompatible flags.\n return false;\n }\n\n return true;\n }\n}\n"},"new_file":{"kind":"string","value":"src/main/java/com/google/devtools/build/lib/rules/config/ConfigGlobalLibrary.java"},"old_contents":{"kind":"string","value":"// Copyright 2018 The Bazel Authors. All rights reserved.\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License\n\npackage com.google.devtools.build.lib.rules.config;\n\nimport static com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition.COMMAND_LINE_OPTION_PREFIX;\n\nimport com.google.common.collect.Sets;\nimport com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition;\nimport com.google.devtools.build.lib.analysis.config.StarlarkDefinedConfigTransition.Settings;\nimport com.google.devtools.build.lib.cmdline.BazelModuleContext;\nimport com.google.devtools.build.lib.cmdline.Label;\nimport com.google.devtools.build.lib.starlarkbuildapi.config.ConfigGlobalLibraryApi;\nimport com.google.devtools.build.lib.starlarkbuildapi.config.ConfigurationTransitionApi;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport net.starlark.java.eval.Dict;\nimport net.starlark.java.eval.EvalException;\nimport net.starlark.java.eval.Module;\nimport net.starlark.java.eval.Sequence;\nimport net.starlark.java.eval.Starlark;\nimport net.starlark.java.eval.StarlarkCallable;\nimport net.starlark.java.eval.StarlarkSemantics;\nimport net.starlark.java.eval.StarlarkThread;\nimport net.starlark.java.syntax.Location;\n\n/**\n * Implementation of {@link ConfigGlobalLibraryApi}.\n *\n *

A collection of top-level Starlark functions pertaining to configuration.\n */\npublic class ConfigGlobalLibrary implements ConfigGlobalLibraryApi {\n\n @Override\n public ConfigurationTransitionApi transition(\n StarlarkCallable implementation,\n Sequence inputs, // expected\n Sequence outputs, // expected\n StarlarkThread thread)\n throws EvalException {\n StarlarkSemantics semantics = thread.getSemantics();\n List inputsList = Sequence.cast(inputs, String.class, \"inputs\");\n List outputsList = Sequence.cast(outputs, String.class, \"outputs\");\n validateBuildSettingKeys(inputsList, Settings.INPUTS);\n validateBuildSettingKeys(outputsList, Settings.OUTPUTS);\n BazelModuleContext moduleContext =\n BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread));\n Location location = thread.getCallerLocation();\n return StarlarkDefinedConfigTransition.newRegularTransition(\n implementation,\n inputsList,\n outputsList,\n semantics,\n moduleContext.label(),\n location,\n moduleContext.repoMapping());\n }\n\n @Override\n public ConfigurationTransitionApi analysisTestTransition(\n Dict changedSettings, // expected\n StarlarkThread thread)\n throws EvalException {\n Map changedSettingsMap =\n Dict.cast(changedSettings, String.class, Object.class, \"changed_settings dict\");\n validateBuildSettingKeys(changedSettingsMap.keySet(), Settings.OUTPUTS);\n BazelModuleContext moduleContext =\n BazelModuleContext.of(Module.ofInnermostEnclosingStarlarkFunction(thread));\n Location location = thread.getCallerLocation();\n return StarlarkDefinedConfigTransition.newAnalysisTestTransition(\n changedSettingsMap, moduleContext.repoMapping(), moduleContext.label(), location);\n }\n\n private void validateBuildSettingKeys(Iterable optionKeys, Settings keyErrorDescriptor)\n throws EvalException {\n\n HashSet processedOptions = Sets.newHashSet();\n String singularErrorDescriptor = keyErrorDescriptor == Settings.INPUTS ? \"input\" : \"output\";\n\n for (String optionKey : optionKeys) {\n if (!optionKey.startsWith(COMMAND_LINE_OPTION_PREFIX)) {\n try {\n Label.parseAbsoluteUnchecked(optionKey);\n } catch (IllegalArgumentException e) {\n throw Starlark.errorf(\n \"invalid transition %s '%s'. If this is intended as a native option, \"\n + \"it must begin with //command_line_option: %s\",\n singularErrorDescriptor, optionKey, e.getMessage());\n }\n } else {\n String optionName = optionKey.substring(COMMAND_LINE_OPTION_PREFIX.length());\n if (!validOptionName(optionName)) {\n throw Starlark.errorf(\n \"Invalid transition %s '%s'. Cannot transition on --experimental_* or \"\n + \"--incompatible_* options\",\n singularErrorDescriptor, optionKey);\n }\n }\n if (!processedOptions.add(optionKey)) {\n throw Starlark.errorf(\"duplicate transition %s '%s'\", singularErrorDescriptor, optionKey);\n }\n }\n }\n\n private static boolean validOptionName(String optionName) {\n if (optionName.startsWith(\"experimental_\")) {\n // Don't allow experimental flags.\n return false;\n }\n\n if (optionName.equals(\"incompatible_enable_cc_toolchain_resolution\")\n || optionName.equals(\"incompatible_enable_apple_toolchain_resolution\")) {\n // This is specifically allowed.\n return true;\n } else if (optionName.startsWith(\"incompatible_\")) {\n // Don't allow other incompatible flags.\n return false;\n }\n\n return true;\n }\n}\n"},"message":{"kind":"string","value":"Add command option incompatible_enable_cgo_toolchain_resolution\n\nPiperOrigin-RevId: 464537505\nChange-Id: Icab8a3d5277939e4968aa04e69016a99b8e009e9\n"},"old_file":{"kind":"string","value":"src/main/java/com/google/devtools/build/lib/rules/config/ConfigGlobalLibrary.java"},"subject":{"kind":"string","value":"Add command option incompatible_enable_cgo_toolchain_resolution"},"git_diff":{"kind":"string","value":"rc/main/java/com/google/devtools/build/lib/rules/config/ConfigGlobalLibrary.java\n }\n \n if (optionName.equals(\"incompatible_enable_cc_toolchain_resolution\")\n || optionName.equals(\"incompatible_enable_cgo_toolchain_resolution\")\n || optionName.equals(\"incompatible_enable_apple_toolchain_resolution\")) {\n // This is specifically allowed.\n return true;"}}},{"rowIdx":2070,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"bsd-3-clause"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"507a343f705e8360601579500b2acef5bb05e6b4"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"mrdon/PLUG"},"new_contents":{"kind":"string","value":"package com.atlassian.plugin;\n\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.util.*;\n\npublic interface Plugin extends Resourced, Comparable\n{\n /**\n * @deprecated since 2.2.0. This comparator only takes into account the plugin name and assumes it is not null,\n * yet a) that constraint is not validated anywhere in plugin loading and b) the plugin could have used the i18n\n * name, and only the application can resolve that to a name useful for comparisons.\n */\n public static final Comparator NAME_COMPARATOR = new PluginNameComparator();\n\n /**\n * Gets the version of the plugins system to handle this plugin\n * @return The plugins version. If undefined, assumed to be 1.\n */\n int getPluginsVersion();\n\n /**\n * Sets the version of the plugins system\n * @param version The version\n */\n void setPluginsVersion(int version);\n\n String getName();\n\n void setName(String name);\n\n String getI18nNameKey();\n\n void setI18nNameKey(String i18nNameKey);\n\n String getKey();\n\n void setKey(String aPackage);\n\n void addModuleDescriptor(ModuleDescriptor moduleDescriptor);\n\n /**\n * Get the {@link Collection} of {@link ModuleDescriptor descriptors}. The iteration order of the collection is\n * the order that the modules will be enabled, and should be the same order that the modules appear in the\n * plugin descriptor.\n *\n * @return the modules contained by this plugin in the order they are to be enabled\n */\n Collection> getModuleDescriptors();\n\n /**\n * Get the {@link ModuleDescriptor} for a particular key. Returns null if the plugin does not exist.\n *

\n * Note: The {@link ModuleDescriptor#getModule()} may throw {@link ClassCastException} if the expected type is incorrect.\n *\n * @param key the {@link String} complete key of the module, in the form \"org.example.plugin:module-key\".\n * @return the {@link ModuleDescriptor} of the expected type.\n */\n ModuleDescriptor getModuleDescriptor(String key);\n\n /**\n * Get the {@link ModuleDescriptor descriptors} whose module class implements or is assignable from the supplied {@link Class}.\n *

\n * Note: The {@link ModuleDescriptor#getModule()} may throw {@link ClassCastException} if the expected type is incorrect.\n * Normally this method would not be supplied with anything other than {@link Object} or &lt;?&gt;, unless you are\n * confident in the super type of the module classes this {@link Plugin} provides.\n *\n * @param The expected module type of the returned {@link ModuleDescriptor descriptors}.\n * @param moduleClass the {@link Class super class} the {@link ModuleDescriptor descriptors} return.\n * @return the {@link List} of {@link ModuleDescriptor descriptors} of the expected type.\n */\n List> getModuleDescriptorsByModuleClass(Class moduleClass);\n\n boolean isEnabledByDefault();\n\n void setEnabledByDefault(boolean enabledByDefault);\n\n PluginInformation getPluginInformation();\n\n void setPluginInformation(PluginInformation pluginInformation);\n\n void setResources(Resourced resources);\n\n /**\n * @return the current state of the plugin\n * @since 2.2.0\n */\n PluginState getPluginState();\n\n /**\n * @deprecated since 2.2.0, use {@link #getPluginState()} instead\n * @return\n */\n boolean isEnabled();\n\n\n /**\n * Whether the plugin is a \"system\" plugin that shouldn't be made visible to the user\n */\n boolean isSystemPlugin();\n\n boolean containsSystemModule();\n\n void setSystemPlugin(boolean system);\n\n /**\n * Whether the plugin is a \"bundled\" plugin that can't be removed.\n */\n boolean isBundledPlugin();\n\n /**\n * The date this plugin was loaded into the system.\n */\n Date getDateLoaded();\n\n /**\n * Whether or not this plugin can be 'uninstalled'.\n */\n boolean isUninstallable();\n\n /**\n * Should the plugin file be deleted on unistall?\n */\n boolean isDeleteable();\n\n /**\n * Whether or not this plugin is loaded dynamically at runtime\n */\n boolean isDynamicallyLoaded();\n\n /**\n * Get the plugin to load a specific class.\n *\n * @param clazz The name of the class to be loaded\n * @param callingClass The class calling the loading (used to help find a classloader)\n * @return The loaded class.\n * @throws ClassNotFoundException\n */\n Class loadClass(String clazz, Class callingClass) throws ClassNotFoundException;\n\n /**\n * Get the classloader for the plugin.\n *\n * @return The classloader used to load classes for this plugin\n */\n ClassLoader getClassLoader();\n\n /**\n * Retrieve the URL of the resource from the plugin.\n *\n * @param path the name of the resource to be loaded\n * @return The URL to the resource, or null if the resource is not found\n */\n URL getResource(String path);\n\n /**\n * Load a given resource from the plugin. Plugins that are loaded dynamically will need\n * to implement this in a way that loads the resource from the same context as the plugin.\n * Static plugins can just pull them from their own classloader.\n *\n * @param name The name of the resource to be loaded.\n * @return An InputStream for the resource, or null if the resource is not found.\n */\n InputStream getResourceAsStream(String name);\n\n /**\n * @deprecated Since 2.2.0, use {@link #enable()} or {@link #disable()} instead\n */\n void setEnabled(boolean enabled);\n\n /**\n * Free any resources held by this plugin. To be called during uninstallation of the {@link Plugin}.\n * @deprecated Since 2.2.0, use {@link #uninstall()} instead\n */\n void close();\n\n /**\n * Installs the plugin into any internal, managing container. This method will be called on every startup. Unless\n * an exception is thrown, the plugin should be in the {@link PluginState#INSTALLED} state. If the plugin is already\n * in the {@link PluginState#INSTALLED} state, nothing will happen.\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be installed\n */\n void install() throws PluginException;\n\n /**\n * Uninstalls the plugin from any internal container. This method will be called on every shutdown. Unless an\n * exception is thrown, the plugin should be in the {@link PluginState#UNINSTALLED} state. If the plugin is already\n * in the {@link PluginState#UNINSTALLED} state, nothing will happen.\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be uninstalled\n */\n void uninstall() throws PluginException;\n\n /**\n * Enables the plugin. Unless an exception is thrown, the plugin should then be in either the\n * {@link PluginState#ENABLING} or {@link PluginState#ENABLED} state. If the plugin is already in the\n * {@link PluginState#ENABLING} or {@link PluginState#ENABLED} state, nothing will happen.\n *\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be enabled\n */\n void enable() throws PluginException;\n\n /**\n * Disables the plugin. Unless an exception is thrown, the plugin should be in the {@link PluginState#DISABLED}\n * state. If the plugin is already in the {@link PluginState#DISABLED} state, nothing will happen.\n *\n * @since 2.2.0 If the plugin could not be disabled\n * @throws PluginException\n */\n void disable() throws PluginException;\n\n /**\n * @return A list of plugin keys that this plugin is dependent upon, or an empty list if none\n * @since 2.2.0\n */\n Set getRequiredPlugins();\n}\n"},"new_file":{"kind":"string","value":"atlassian-plugins-core/src/main/java/com/atlassian/plugin/Plugin.java"},"old_contents":{"kind":"string","value":"package com.atlassian.plugin;\n\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.util.*;\n\npublic interface Plugin extends Resourced, Comparable\n{\n public static final Comparator NAME_COMPARATOR = new PluginNameComparator();\n\n /**\n * Gets the version of the plugins system to handle this plugin\n * @return The plugins version. If undefined, assumed to be 1.\n */\n int getPluginsVersion();\n\n /**\n * Sets the version of the plugins system\n * @param version The version\n */\n void setPluginsVersion(int version);\n\n String getName();\n\n void setName(String name);\n\n String getI18nNameKey();\n\n void setI18nNameKey(String i18nNameKey);\n\n String getKey();\n\n void setKey(String aPackage);\n\n void addModuleDescriptor(ModuleDescriptor moduleDescriptor);\n\n /**\n * Get the {@link Collection} of {@link ModuleDescriptor descriptors}. The iteration order of the collection is\n * the order that the modules will be enabled, and should be the same order that the modules appear in the\n * plugin descriptor.\n *\n * @return the modules contained by this plugin in the order they are to be enabled\n */\n Collection> getModuleDescriptors();\n\n /**\n * Get the {@link ModuleDescriptor} for a particular key. Returns null if the plugin does not exist.\n *

\n * Note: The {@link ModuleDescriptor#getModule()} may throw {@link ClassCastException} if the expected type is incorrect.\n *\n * @param key the {@link String} complete key of the module, in the form \"org.example.plugin:module-key\".\n * @return the {@link ModuleDescriptor} of the expected type.\n */\n ModuleDescriptor getModuleDescriptor(String key);\n\n /**\n * Get the {@link ModuleDescriptor descriptors} whose module class implements or is assignable from the supplied {@link Class}.\n *

\n * Note: The {@link ModuleDescriptor#getModule()} may throw {@link ClassCastException} if the expected type is incorrect.\n * Normally this method would not be supplied with anything other than {@link Object} or &lt;?&gt;, unless you are\n * confident in the super type of the module classes this {@link Plugin} provides.\n *\n * @param The expected module type of the returned {@link ModuleDescriptor descriptors}.\n * @param moduleClass the {@link Class super class} the {@link ModuleDescriptor descriptors} return.\n * @return the {@link List} of {@link ModuleDescriptor descriptors} of the expected type.\n */\n List> getModuleDescriptorsByModuleClass(Class moduleClass);\n\n boolean isEnabledByDefault();\n\n void setEnabledByDefault(boolean enabledByDefault);\n\n PluginInformation getPluginInformation();\n\n void setPluginInformation(PluginInformation pluginInformation);\n\n void setResources(Resourced resources);\n\n /**\n * @return the current state of the plugin\n * @since 2.2.0\n */\n PluginState getPluginState();\n\n /**\n * @deprecated since 2.2.0, use {@link #getPluginState()} instead\n * @return\n */\n boolean isEnabled();\n\n\n /**\n * Whether the plugin is a \"system\" plugin that shouldn't be made visible to the user\n */\n boolean isSystemPlugin();\n\n boolean containsSystemModule();\n\n void setSystemPlugin(boolean system);\n\n /**\n * Whether the plugin is a \"bundled\" plugin that can't be removed.\n */\n boolean isBundledPlugin();\n\n /**\n * The date this plugin was loaded into the system.\n */\n Date getDateLoaded();\n\n /**\n * Whether or not this plugin can be 'uninstalled'.\n */\n boolean isUninstallable();\n\n /**\n * Should the plugin file be deleted on unistall?\n */\n boolean isDeleteable();\n\n /**\n * Whether or not this plugin is loaded dynamically at runtime\n */\n boolean isDynamicallyLoaded();\n\n /**\n * Get the plugin to load a specific class.\n *\n * @param clazz The name of the class to be loaded\n * @param callingClass The class calling the loading (used to help find a classloader)\n * @return The loaded class.\n * @throws ClassNotFoundException\n */\n Class loadClass(String clazz, Class callingClass) throws ClassNotFoundException;\n\n /**\n * Get the classloader for the plugin.\n *\n * @return The classloader used to load classes for this plugin\n */\n ClassLoader getClassLoader();\n\n /**\n * Retrieve the URL of the resource from the plugin.\n *\n * @param path the name of the resource to be loaded\n * @return The URL to the resource, or null if the resource is not found\n */\n URL getResource(String path);\n\n /**\n * Load a given resource from the plugin. Plugins that are loaded dynamically will need\n * to implement this in a way that loads the resource from the same context as the plugin.\n * Static plugins can just pull them from their own classloader.\n *\n * @param name The name of the resource to be loaded.\n * @return An InputStream for the resource, or null if the resource is not found.\n */\n InputStream getResourceAsStream(String name);\n\n /**\n * @deprecated Since 2.2.0, use {@link #enable()} or {@link #disable()} instead\n */\n void setEnabled(boolean enabled);\n\n /**\n * Free any resources held by this plugin. To be called during uninstallation of the {@link Plugin}.\n * @deprecated Since 2.2.0, use {@link #uninstall()} instead\n */\n void close();\n\n /**\n * Installs the plugin into any internal, managing container. This method will be called on every startup. Unless\n * an exception is thrown, the plugin should be in the {@link PluginState#INSTALLED} state. If the plugin is already\n * in the {@link PluginState#INSTALLED} state, nothing will happen.\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be installed\n */\n void install() throws PluginException;\n\n /**\n * Uninstalls the plugin from any internal container. This method will be called on every shutdown. Unless an\n * exception is thrown, the plugin should be in the {@link PluginState#UNINSTALLED} state. If the plugin is already\n * in the {@link PluginState#UNINSTALLED} state, nothing will happen.\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be uninstalled\n */\n void uninstall() throws PluginException;\n\n /**\n * Enables the plugin. Unless an exception is thrown, the plugin should then be in either the\n * {@link PluginState#ENABLING} or {@link PluginState#ENABLED} state. If the plugin is already in the\n * {@link PluginState#ENABLING} or {@link PluginState#ENABLED} state, nothing will happen.\n *\n *\n * @since 2.2.0\n * @throws PluginException If the plugin could not be enabled\n */\n void enable() throws PluginException;\n\n /**\n * Disables the plugin. Unless an exception is thrown, the plugin should be in the {@link PluginState#DISABLED}\n * state. If the plugin is already in the {@link PluginState#DISABLED} state, nothing will happen.\n *\n * @since 2.2.0 If the plugin could not be disabled\n * @throws PluginException\n */\n void disable() throws PluginException;\n\n /**\n * @return A list of plugin keys that this plugin is dependent upon, or an empty list if none\n * @since 2.2.0\n */\n Set getRequiredPlugins();\n}\n"},"message":{"kind":"string","value":"Deprecating plugin name comparator because it doesn't take into account i18n names\nPLUG-340\n\ngit-svn-id: 3d1f0b8d955af71bf8e09c956c180519124e4717@31515 2c54a935-e501-0410-bc05-97a93f6bca70\n"},"old_file":{"kind":"string","value":"atlassian-plugins-core/src/main/java/com/atlassian/plugin/Plugin.java"},"subject":{"kind":"string","value":"Deprecating plugin name comparator because it doesn't take into account i18n names PLUG-340"},"git_diff":{"kind":"string","value":"tlassian-plugins-core/src/main/java/com/atlassian/plugin/Plugin.java\n \n public interface Plugin extends Resourced, Comparable\n {\n /**\n * @deprecated since 2.2.0. This comparator only takes into account the plugin name and assumes it is not null,\n * yet a) that constraint is not validated anywhere in plugin loading and b) the plugin could have used the i18n\n * name, and only the application can resolve that to a name useful for comparisons.\n */\n public static final Comparator NAME_COMPARATOR = new PluginNameComparator();\n \n /**"}}},{"rowIdx":2071,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"e167d1c9baff9f945acc582541937a2a82b428e5"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"o19s/elasticsearch-learning-to-rank,o19s/elasticsearch-learning-to-rank"},"new_contents":{"kind":"string","value":"/*\n * Copyright [2017] Wikimedia Foundation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.o19s.es.ltr.rest;\n\nimport com.o19s.es.ltr.action.CreateModelFromSetAction;\nimport com.o19s.es.ltr.action.CreateModelFromSetAction.CreateModelFromSetRequestBuilder;\nimport com.o19s.es.ltr.feature.FeatureValidation;\nimport com.o19s.es.ltr.feature.store.StoredLtrModel;\nimport org.elasticsearch.ExceptionsHelper;\nimport org.elasticsearch.action.ActionListener;\nimport org.elasticsearch.client.node.NodeClient;\nimport org.elasticsearch.xcontent.ParseField;\nimport org.elasticsearch.common.ParsingException;\nimport org.elasticsearch.xcontent.ObjectParser;\nimport org.elasticsearch.xcontent.XContentParser;\nimport org.elasticsearch.index.engine.VersionConflictEngineException;\nimport org.elasticsearch.rest.BytesRestResponse;\nimport org.elasticsearch.rest.RestRequest;\nimport org.elasticsearch.rest.RestStatus;\nimport org.elasticsearch.rest.action.RestStatusToXContentListener;\n\nimport java.io.IOException;\nimport java.util.List;\n\nimport static java.util.Arrays.asList;\nimport static java.util.Collections.unmodifiableList;\n\npublic class RestCreateModelFromSet extends FeatureStoreBaseRestHandler {\n\n @Override\n public String getName() {\n return \"Create initial models for features\";\n }\n\n @Override\n public List routes() {\n return unmodifiableList(asList(\n new Route(RestRequest.Method.POST , \"/_ltr/{store}/_featureset/{name}/_createmodel\"),\n new Route(RestRequest.Method.POST, \"/_ltr/_featureset/{name}/_createmodel\" )));\n }\n\n @Override\n protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {\n if (!request.hasContentOrSourceParam()) {\n throw new IllegalArgumentException(\"Missing content or source param.\");\n }\n String store = indexName(request);\n Long expectedVersion = null;\n if (request.hasParam(\"version\")) {\n expectedVersion = request.paramAsLong(\"version\", -1);\n if (expectedVersion <= 0) {\n throw new IllegalArgumentException(\"version must be a strictly positive long value\");\n }\n }\n String routing = request.param(\"routing\");\n ParserState state = new ParserState();\n request.withContentOrSourceParamParserOrNull((p) -> ParserState.parse(p, state));\n CreateModelFromSetRequestBuilder builder = new CreateModelFromSetRequestBuilder(client);\n if (expectedVersion != null) {\n builder.withVersion(store, request.param(\"name\"), expectedVersion, state.model.name, state.model.model);\n } else {\n builder.withoutVersion(store, request.param(\"name\"), state.model.name, state.model.model);\n }\n builder.request().setValidation(state.validation);\n builder.routing(routing);\n return (channel) -> builder.execute(ActionListener.wrap(\n response -> new RestStatusToXContentListener(channel,\n (r) -> r.getResponse().getLocation(routing)).onResponse(response),\n (e) -> {\n final Exception exc;\n final RestStatus status;\n if (ExceptionsHelper.unwrap(e, VersionConflictEngineException.class) != null) {\n exc = new IllegalArgumentException(\"Element of type [\" + StoredLtrModel.TYPE +\n \"] are not updatable, please create a new one instead.\");\n exc.addSuppressed(e);\n status = RestStatus.METHOD_NOT_ALLOWED;\n } else {\n exc = e;\n status = ExceptionsHelper.status(exc);\n }\n\n try {\n channel.sendResponse(new BytesRestResponse(channel, status, exc));\n } catch (Exception inner) {\n inner.addSuppressed(e);\n logger.error(\"failed to send failure response\", inner);\n }\n }\n ));\n }\n\n private static class ParserState {\n private static final ObjectParser PARSER = new ObjectParser<>(\"create_model_from_set\", ParserState::new);\n\n static {\n PARSER.declareObject(ParserState::setModel, Model.MODEL_PARSER::apply, new ParseField(\"model\"));\n PARSER.declareObject(ParserState::setValidation, FeatureValidation.PARSER::apply, new ParseField(\"validation\"));\n }\n\n private Model model;\n private FeatureValidation validation;\n\n public Model getModel() {\n return model;\n }\n\n public void setModel(Model model) {\n this.model = model;\n }\n\n public FeatureValidation getValidation() {\n return validation;\n }\n\n public void setValidation(FeatureValidation validation) {\n this.validation = validation;\n }\n\n public static void parse(XContentParser parser, ParserState value) throws IOException {\n PARSER.parse(parser, value, null);\n if (value.model == null) {\n throw new ParsingException(parser.getTokenLocation(), \"Missing required value [model]\");\n }\n }\n\n private static class Model {\n private static final ObjectParser MODEL_PARSER = new ObjectParser<>(\"model\", Model::new);\n static {\n MODEL_PARSER.declareString(Model::setName, new ParseField(\"name\"));\n MODEL_PARSER.declareObject(Model::setModel,\n StoredLtrModel.LtrModelDefinition::parse,\n new ParseField(\"model\"));\n }\n\n String name;\n StoredLtrModel.LtrModelDefinition model;\n\n public void setName(String name) {\n this.name = name;\n }\n\n public void setModel(StoredLtrModel.LtrModelDefinition model) {\n this.model = model;\n }\n\n public static void parse(XContentParser parser, Model value) throws IOException {\n MODEL_PARSER.parse(parser, value, null);\n if (value.name == null) {\n throw new ParsingException(parser.getTokenLocation(), \"Missing required value [name]\");\n }\n }\n }\n }\n}\n"},"new_file":{"kind":"string","value":"src/main/java/com/o19s/es/ltr/rest/RestCreateModelFromSet.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright [2017] Wikimedia Foundation\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.o19s.es.ltr.rest;\n\nimport com.o19s.es.ltr.action.CreateModelFromSetAction;\nimport com.o19s.es.ltr.action.CreateModelFromSetAction.CreateModelFromSetRequestBuilder;\nimport com.o19s.es.ltr.feature.FeatureValidation;\nimport com.o19s.es.ltr.feature.store.StoredLtrModel;\nimport org.elasticsearch.ExceptionsHelper;\nimport org.elasticsearch.action.ActionListener;\nimport org.elasticsearch.client.node.NodeClient;\nimport org.elasticsearch.xcontent.ParseField;\nimport org.elasticsearch.common.ParsingException;\nimport org.elasticsearch.xcontent.ObjectParser;\nimport org.elasticsearch.xcontent.XContentParser;\nimport org.elasticsearch.index.engine.VersionConflictEngineException;\nimport org.elasticsearch.rest.BytesRestResponse;\nimport org.elasticsearch.rest.RestRequest;\nimport org.elasticsearch.rest.RestStatus;\nimport org.elasticsearch.rest.action.RestStatusToXContentListener;\n\nimport java.io.IOException;\nimport java.util.List;\n\nimport static java.util.Arrays.asList;\nimport static java.util.Collections.unmodifiableList;\n\npublic class RestCreateModelFromSet extends FeatureStoreBaseRestHandler {\n\n @Override\n public String getName() {\n return \"Create initial models for features\";\n }\n\n @Override\n public List routes() {\n return unmodifiableList(asList(\n new Route(RestRequest.Method.POST , \"/_ltr/{store}/_featureset/{name}/_createmodel\"),\n new Route(RestRequest.Method.POST, \"/_ltr/_featureset/{name}/_createmodel\" )));\n }\n\n @Override\n protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {\n String store = indexName(request);\n Long expectedVersion = null;\n if (request.hasParam(\"version\")) {\n expectedVersion = request.paramAsLong(\"version\", -1);\n if (expectedVersion <= 0) {\n throw new IllegalArgumentException(\"version must be a strictly positive long value\");\n }\n }\n String routing = request.param(\"routing\");\n ParserState state = new ParserState();\n request.withContentOrSourceParamParserOrNull((p) -> ParserState.parse(p, state));\n CreateModelFromSetRequestBuilder builder = new CreateModelFromSetRequestBuilder(client);\n if (expectedVersion != null) {\n builder.withVersion(store, request.param(\"name\"), expectedVersion, state.model.name, state.model.model);\n } else {\n builder.withoutVersion(store, request.param(\"name\"), state.model.name, state.model.model);\n }\n builder.request().setValidation(state.validation);\n builder.routing(routing);\n return (channel) -> builder.execute(ActionListener.wrap(\n response -> new RestStatusToXContentListener(channel,\n (r) -> r.getResponse().getLocation(routing)).onResponse(response),\n (e) -> {\n final Exception exc;\n final RestStatus status;\n if (ExceptionsHelper.unwrap(e, VersionConflictEngineException.class) != null) {\n exc = new IllegalArgumentException(\"Element of type [\" + StoredLtrModel.TYPE +\n \"] are not updatable, please create a new one instead.\");\n exc.addSuppressed(e);\n status = RestStatus.METHOD_NOT_ALLOWED;\n } else {\n exc = e;\n status = ExceptionsHelper.status(exc);\n }\n\n try {\n channel.sendResponse(new BytesRestResponse(channel, status, exc));\n } catch (Exception inner) {\n inner.addSuppressed(e);\n logger.error(\"failed to send failure response\", inner);\n }\n }\n ));\n }\n\n private static class ParserState {\n private static final ObjectParser PARSER = new ObjectParser<>(\"create_model_from_set\", ParserState::new);\n\n static {\n PARSER.declareObject(ParserState::setModel, Model.MODEL_PARSER::apply, new ParseField(\"model\"));\n PARSER.declareObject(ParserState::setValidation, FeatureValidation.PARSER::apply, new ParseField(\"validation\"));\n }\n\n private Model model;\n private FeatureValidation validation;\n\n public Model getModel() {\n return model;\n }\n\n public void setModel(Model model) {\n this.model = model;\n }\n\n public FeatureValidation getValidation() {\n return validation;\n }\n\n public void setValidation(FeatureValidation validation) {\n this.validation = validation;\n }\n\n public static void parse(XContentParser parser, ParserState value) throws IOException {\n PARSER.parse(parser, value, null);\n if (value.model == null) {\n throw new ParsingException(parser.getTokenLocation(), \"Missing required value [model]\");\n }\n }\n\n private static class Model {\n private static final ObjectParser MODEL_PARSER = new ObjectParser<>(\"model\", Model::new);\n static {\n MODEL_PARSER.declareString(Model::setName, new ParseField(\"name\"));\n MODEL_PARSER.declareObject(Model::setModel,\n StoredLtrModel.LtrModelDefinition::parse,\n new ParseField(\"model\"));\n }\n\n String name;\n StoredLtrModel.LtrModelDefinition model;\n\n public void setName(String name) {\n this.name = name;\n }\n\n public void setModel(StoredLtrModel.LtrModelDefinition model) {\n this.model = model;\n }\n\n public static void parse(XContentParser parser, Model value) throws IOException {\n MODEL_PARSER.parse(parser, value, null);\n if (value.name == null) {\n throw new ParsingException(parser.getTokenLocation(), \"Missing required value [name]\");\n }\n }\n }\n }\n}\n"},"message":{"kind":"string","value":"Do not fail _createmodel with NPE when no content is provided (#415)\n\nrelates #401"},"old_file":{"kind":"string","value":"src/main/java/com/o19s/es/ltr/rest/RestCreateModelFromSet.java"},"subject":{"kind":"string","value":"Do not fail _createmodel with NPE when no content is provided (#415)"},"git_diff":{"kind":"string","value":"rc/main/java/com/o19s/es/ltr/rest/RestCreateModelFromSet.java\n \n @Override\n protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {\n if (!request.hasContentOrSourceParam()) {\n throw new IllegalArgumentException(\"Missing content or source param.\");\n }\n String store = indexName(request);\n Long expectedVersion = null;\n if (request.hasParam(\"version\")) {"}}},{"rowIdx":2072,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"664c19eabb3369109d6ca06f986e282f0cfb357f"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"nanowang/aozora-phonegap-build,project-yoru/aozora-phonegap-build,project-yoru/aozora-phonegap-build,nanowang/aozora-phonegap-build"},"new_contents":{"kind":"string","value":"(function() {\n document.addEventListener('deviceready', (function() {\n return console.log('device ready');\n }), false);\n\n}).call(this);\n"},"new_file":{"kind":"string","value":"scripts/main.js"},"old_contents":{"kind":"string","value":""},"message":{"kind":"string","value":"disable useMin for main.js\n"},"old_file":{"kind":"string","value":"scripts/main.js"},"subject":{"kind":"string","value":"disable useMin for main.js"},"git_diff":{"kind":"string","value":"cripts/main.js\n(function() {\n document.addEventListener('deviceready', (function() {\n return console.log('device ready');\n }), false);\n\n}).call(this);"}}},{"rowIdx":2073,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"648b21aa4f356e7a59dcc55fec722e9a519bfc64"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"pacozaa/BoofCV,pacozaa/BoofCV,pacozaa/BoofCV,pacozaa/BoofCV"},"new_contents":{"kind":"string","value":"/*\n * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved.\n *\n * This file is part of BoofCV (http://boofcv.org).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage boofcv.alg.sfm.d2;\n\nimport boofcv.abst.feature.detect.interest.ConfigFastHessian;\nimport boofcv.abst.feature.detect.interest.ConfigGeneralDetector;\nimport boofcv.abst.feature.tracker.PkltConfig;\nimport boofcv.factory.feature.tracker.FactoryPointTracker;\nimport boofcv.gui.image.ShowImages;\nimport boofcv.io.PathLabel;\nimport boofcv.struct.image.ImageFloat32;\nimport boofcv.struct.image.ImageSInt16;\nimport boofcv.struct.image.ImageSingleBand;\nimport boofcv.struct.image.ImageUInt8;\nimport georegression.struct.InvertibleTransform;\nimport georegression.struct.affine.Affine2D_F64;\nimport georegression.struct.homo.Homography2D_F64;\nimport georegression.struct.point.Point2D_F64;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * Creates a mosaic from an image sequence using tracked point features. Each the input window\n * moaes toward the mosaic image's boundary it is automatically reset. When reset the current\n * image is put in the initial position and the mosaic distorted accordingly.\n *\n * @author Peter Abeles\n * @param Input image type\n * @param Image derivative type\n */\n// TODO add support for color again\n// TODO comment and clean up code\npublic class VideoMosaicSequentialPointApp\n\t\textends VideoStitchBaseApp\n{\n\tprivate static int maxFeatures = 250;\n\n\tpublic VideoMosaicSequentialPointApp(Class imageType, Class derivType) {\n\t\tsuper(2,imageType,new Mosaic2DPanel());\n\n\t\tPkltConfig config =\n\t\t\t\tPkltConfig.createDefault(imageType, derivType);\n\t\tconfig.featureRadius = 3;\n\t\tconfig.pyramidScaling = new int[]{1,2,4,8};\n\n\t\tConfigFastHessian configFH = new ConfigFastHessian();\n\t\tconfigFH.initialSampleSize = 2;\n\t\tconfigFH.maxFeaturesPerScale = 200;\n\n\t\taddAlgorithm(0, \"KLT\", FactoryPointTracker.klt(config, new ConfigGeneralDetector(maxFeatures, 3, 1)));\n\t\taddAlgorithm(0, \"ST-BRIEF\", FactoryPointTracker.\n\t\t\t\tdda_ST_BRIEF(150, new ConfigGeneralDetector(400, 1, 10), imageType, null));\n\t\t// size of the description region has been increased to improve quality.\n\t\taddAlgorithm(0, \"ST-NCC\", FactoryPointTracker.\n\t\t\t\tdda_ST_NCC(new ConfigGeneralDetector(500, 3, 9), 10, imageType, derivType));\n\t\taddAlgorithm(0, \"FH-SURF\", FactoryPointTracker.dda_FH_SURF_Fast(configFH, null, null, imageType));\n\t\taddAlgorithm(0, \"ST-SURF-KLT\", FactoryPointTracker.\n\t\t\t\tcombined_ST_SURF_KLT(new ConfigGeneralDetector(400, 3, 1), 3,\n\t\t\t\t\t\tconfig.pyramidScaling, 75, null, null, imageType, derivType));\n\t\taddAlgorithm(0, \"FH-SURF-KLT\", FactoryPointTracker.combined_FH_SURF_KLT(3,\n\t\t\t\tconfig.pyramidScaling, 75, configFH, null, null, imageType));\n\n\t\taddAlgorithm(1,\"Affine\", new Affine2D_F64());\n\t\taddAlgorithm(1,\"Homography\", new Homography2D_F64());\n\n\t\tabsoluteMinimumTracks = 40;\n\t\trespawnTrackFraction = 0.3;\n\t\trespawnCoverageFraction = 0.8;\n\t\tmaxJumpFraction = 0.3;\n\t}\n\n\tprivate Affine2D_F64 createInitialTransform() {\n\t\tfloat scale = 0.8f;\n\n\t\tAffine2D_F64 H = new Affine2D_F64(scale,0,0,scale, stitchWidth /4, stitchHeight /4);\n\t\treturn H.invert(null);\n\t}\n\n\t@Override\n\tprotected void init(int inputWidth, int inputHeight) {\n\t\tsetStitchImageSize(1000, 600);\n\t\t((Mosaic2DPanel)gui).setMosaicSize(stitchWidth, stitchHeight);\n\t\talg.configure(stitchWidth, stitchHeight,createInitialTransform());\n\t}\n\n\t@Override\n\tprotected boolean checkLocation(StitchingFromMotion2D.Corners corners) {\n\t\tif( closeToBorder(corners.p0) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p1) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p2) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p3) )\n\t\t\treturn true;\n\n\t\treturn false;\n\t}\n\n\tprivate boolean closeToBorder( Point2D_F64 pt ) {\n\t\tif( pt.x < borderTolerance || pt.y < borderTolerance)\n\t\t\treturn true;\n\t\treturn( pt.x >= stitchWidth - borderTolerance || pt.y >= stitchHeight - borderTolerance);\n\t}\n\n\tpublic static void main( String args[] ) {\n//\t\tClass type = ImageFloat32.class;\n//\t\tClass derivType = type;\n\n\t\tClass type = ImageUInt8.class;\n\t\tClass derivType = ImageSInt16.class;\n\n\t\tVideoMosaicSequentialPointApp app = new VideoMosaicSequentialPointApp(type,derivType);\n\n\t\tList inputs = new ArrayList();\n\t\tinputs.add(new PathLabel(\"Plane 1\", \"../data/applet/mosaic/airplane01.mjpeg\"));\n\t\tinputs.add(new PathLabel(\"Plane 2\", \"../data/applet/mosaic/airplane02.mjpeg\"));\n\t\tinputs.add(new PathLabel(\"Shake\", \"../data/applet/shake.mjpeg\"));\n\n\t\tapp.setInputList(inputs);\n\n\t\t// wait for it to process one image so that the size isn't all screwed up\n\t\twhile( !app.getHasProcessedImage() ) {\n\t\t\tThread.yield();\n\t\t}\n\n\t\tShowImages.showWindow(app, \"Video Image Mosaic\");\n\t}\n}\n"},"new_file":{"kind":"string","value":"evaluation/visualization/src/boofcv/alg/sfm/d2/VideoMosaicSequentialPointApp.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved.\n *\n * This file is part of BoofCV (http://boofcv.org).\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage boofcv.alg.sfm.d2;\n\nimport boofcv.abst.feature.detect.interest.ConfigFastHessian;\nimport boofcv.abst.feature.detect.interest.ConfigGeneralDetector;\nimport boofcv.abst.feature.tracker.PkltConfig;\nimport boofcv.factory.feature.tracker.FactoryPointTracker;\nimport boofcv.gui.image.ShowImages;\nimport boofcv.io.PathLabel;\nimport boofcv.struct.image.ImageFloat32;\nimport boofcv.struct.image.ImageSingleBand;\nimport georegression.struct.InvertibleTransform;\nimport georegression.struct.affine.Affine2D_F64;\nimport georegression.struct.homo.Homography2D_F64;\nimport georegression.struct.point.Point2D_F64;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * Creates a mosaic from an image sequence using tracked point features. Each the input window\n * moaes toward the mosaic image's boundary it is automatically reset. When reset the current\n * image is put in the initial position and the mosaic distorted accordingly.\n *\n * @author Peter Abeles\n * @param Input image type\n * @param Image derivative type\n */\n// TODO add support for color again\n// TODO comment and clean up code\npublic class VideoMosaicSequentialPointApp\n\t\textends VideoStitchBaseApp\n{\n\tprivate static int maxFeatures = 250;\n\n\tpublic VideoMosaicSequentialPointApp(Class imageType, Class derivType) {\n\t\tsuper(2,imageType,new Mosaic2DPanel());\n\n\t\tPkltConfig config =\n\t\t\t\tPkltConfig.createDefault(imageType, derivType);\n\t\tconfig.featureRadius = 3;\n\t\tconfig.pyramidScaling = new int[]{1,2,4,8};\n\n\t\tConfigFastHessian configFH = new ConfigFastHessian();\n\t\tconfigFH.initialSampleSize = 2;\n\t\tconfigFH.maxFeaturesPerScale = 200;\n\n\t\taddAlgorithm(0, \"KLT\", FactoryPointTracker.klt(config, new ConfigGeneralDetector(maxFeatures, 3, 1)));\n\t\taddAlgorithm(0, \"ST-BRIEF\", FactoryPointTracker.\n\t\t\t\tdda_ST_BRIEF(150, new ConfigGeneralDetector(400, 1, 10), imageType, null));\n\t\t// size of the description region has been increased to improve quality.\n\t\taddAlgorithm(0, \"ST-NCC\", FactoryPointTracker.\n\t\t\t\tdda_ST_NCC(new ConfigGeneralDetector(500, 3, 9), 10, imageType, derivType));\n\t\taddAlgorithm(0, \"FH-SURF\", FactoryPointTracker.dda_FH_SURF_Fast(configFH, null, null, imageType));\n\t\taddAlgorithm(0, \"ST-SURF-KLT\", FactoryPointTracker.\n\t\t\t\tcombined_ST_SURF_KLT(new ConfigGeneralDetector(400, 3, 1), 3,\n\t\t\t\t\t\tconfig.pyramidScaling, 75, null, null, imageType, derivType));\n\t\taddAlgorithm(0, \"FH-SURF-KLT\", FactoryPointTracker.combined_FH_SURF_KLT(3,\n\t\t\t\tconfig.pyramidScaling, 75, configFH, null, null, imageType));\n\n\t\taddAlgorithm(1,\"Affine\", new Affine2D_F64());\n\t\taddAlgorithm(1,\"Homography\", new Homography2D_F64());\n\n\t\tabsoluteMinimumTracks = 40;\n\t\trespawnTrackFraction = 0.3;\n\t\trespawnCoverageFraction = 0.8;\n\t\tmaxJumpFraction = 0.3;\n\t}\n\n\tprivate Affine2D_F64 createInitialTransform() {\n\t\tfloat scale = 0.8f;\n\n\t\tAffine2D_F64 H = new Affine2D_F64(scale,0,0,scale, stitchWidth /4, stitchHeight /4);\n\t\treturn H.invert(null);\n\t}\n\n\t@Override\n\tprotected void init(int inputWidth, int inputHeight) {\n\t\tsetStitchImageSize(1000, 600);\n\t\t((Mosaic2DPanel)gui).setMosaicSize(stitchWidth, stitchHeight);\n\t\talg.configure(stitchWidth, stitchHeight,createInitialTransform());\n\t}\n\n\t@Override\n\tprotected boolean checkLocation(StitchingFromMotion2D.Corners corners) {\n\t\tif( closeToBorder(corners.p0) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p1) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p2) )\n\t\t\treturn true;\n\t\tif( closeToBorder(corners.p3) )\n\t\t\treturn true;\n\n\t\treturn false;\n\t}\n\n\tprivate boolean closeToBorder( Point2D_F64 pt ) {\n\t\tif( pt.x < borderTolerance || pt.y < borderTolerance)\n\t\t\treturn true;\n\t\treturn( pt.x >= stitchWidth - borderTolerance || pt.y >= stitchHeight - borderTolerance);\n\t}\n\n\tpublic static void main( String args[] ) {\n\t\tClass type = ImageFloat32.class;\n\t\tClass derivType = type;\n\n//\t\tClass type = ImageUInt8.class;\n//\t\tClass derivType = ImageSInt16.class;\n\n\t\tVideoMosaicSequentialPointApp app = new VideoMosaicSequentialPointApp(type,derivType);\n\n\t\tList inputs = new ArrayList();\n\t\tinputs.add(new PathLabel(\"Plane 1\", \"../data/applet/mosaic/airplane01.mjpeg\"));\n\t\tinputs.add(new PathLabel(\"Plane 2\", \"../data/applet/mosaic/airplane02.mjpeg\"));\n\t\tinputs.add(new PathLabel(\"Shake\", \"../data/applet/shake.mjpeg\"));\n\n\t\tapp.setInputList(inputs);\n\n\t\t// wait for it to process one image so that the size isn't all screwed up\n\t\twhile( !app.getHasProcessedImage() ) {\n\t\t\tThread.yield();\n\t\t}\n\n\t\tShowImages.showWindow(app, \"Video Image Mosaic\");\n\t}\n}\n"},"message":{"kind":"string","value":"Updated for previous changes\n"},"old_file":{"kind":"string","value":"evaluation/visualization/src/boofcv/alg/sfm/d2/VideoMosaicSequentialPointApp.java"},"subject":{"kind":"string","value":"Updated for previous changes"},"git_diff":{"kind":"string","value":"valuation/visualization/src/boofcv/alg/sfm/d2/VideoMosaicSequentialPointApp.java\n import boofcv.gui.image.ShowImages;\n import boofcv.io.PathLabel;\n import boofcv.struct.image.ImageFloat32;\nimport boofcv.struct.image.ImageSInt16;\n import boofcv.struct.image.ImageSingleBand;\nimport boofcv.struct.image.ImageUInt8;\n import georegression.struct.InvertibleTransform;\n import georegression.struct.affine.Affine2D_F64;\n import georegression.struct.homo.Homography2D_F64;\n \t}\n \n \tpublic static void main( String args[] ) {\n\t\tClass type = ImageFloat32.class;\n\t\tClass derivType = type;\n//\t\tClass type = ImageFloat32.class;\n//\t\tClass derivType = type;\n \n//\t\tClass type = ImageUInt8.class;\n//\t\tClass derivType = ImageSInt16.class;\n\t\tClass type = ImageUInt8.class;\n\t\tClass derivType = ImageSInt16.class;\n \n \t\tVideoMosaicSequentialPointApp app = new VideoMosaicSequentialPointApp(type,derivType);\n "}}},{"rowIdx":2074,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"22c2e479a28a8303fb4ca7499c9aa448c06c2f1e"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"alex-bl/ivyDEextension,apache/ant-ivyde,alex-bl/ivyDEextension,apache/ant-ivyde"},"new_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage org.apache.ivyde.eclipse.workspaceresolver;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\nimport org.apache.ivy.Ivy;\nimport org.apache.ivy.core.module.descriptor.Artifact;\nimport org.apache.ivy.core.module.descriptor.Configuration;\nimport org.apache.ivy.core.module.descriptor.DefaultArtifact;\nimport org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor;\nimport org.apache.ivy.core.module.descriptor.DependencyDescriptor;\nimport org.apache.ivy.core.module.descriptor.ExcludeRule;\nimport org.apache.ivy.core.module.descriptor.License;\nimport org.apache.ivy.core.module.descriptor.ModuleDescriptor;\nimport org.apache.ivy.core.module.id.ModuleRevisionId;\nimport org.apache.ivy.core.report.ArtifactDownloadReport;\nimport org.apache.ivy.core.report.DownloadReport;\nimport org.apache.ivy.core.report.DownloadStatus;\nimport org.apache.ivy.core.report.MetadataArtifactDownloadReport;\nimport org.apache.ivy.core.resolve.DownloadOptions;\nimport org.apache.ivy.core.resolve.ResolveData;\nimport org.apache.ivy.core.resolve.ResolvedModuleRevision;\nimport org.apache.ivy.core.settings.IvySettings;\nimport org.apache.ivy.plugins.resolver.AbstractResolver;\nimport org.apache.ivy.plugins.resolver.util.ResolvedResource;\nimport org.apache.ivy.plugins.version.VersionMatcher;\nimport org.apache.ivy.util.Message;\nimport org.apache.ivyde.eclipse.IvyDEException;\nimport org.apache.ivyde.eclipse.IvyPlugin;\nimport org.apache.ivyde.eclipse.cpcontainer.IvyClasspathContainer;\nimport org.apache.ivyde.eclipse.cpcontainer.IvyClasspathUtil;\nimport org.eclipse.core.resources.ResourcesPlugin;\nimport org.eclipse.core.runtime.IStatus;\nimport org.eclipse.jdt.core.IJavaProject;\nimport org.eclipse.jdt.core.JavaCore;\nimport org.eclipse.jdt.core.JavaModelException;\n\n/**\n * This is an Eclipse workspace Ivy resolver. When used with the custom IvyClasspathContainer\n * changes, this resolver will link dependent projects when they are open in the same workspace,\n * allowing full-fledged linked project functionality Eclipse provides, such as incremental\n * compilation, debugging, mouseover javadocs, and source browsing across projects.\n * \n * How it works During a resolve, it looks at all open projects in the workspace that have\n * Ivy containers. The first project that publishes the module on which the project being\n * resolved depends, will be picked and returned as a special type of artifact called \"project\".\n * \n * The IvyClasspathContainer will recognize the artifact as a project and put the eclipse project as\n * a dependent project within the classpath container of the parent.\n * \n * If you do not want a project to be linked as a dependency, close it or delete from the workspace.\n * As soon as you do that, any projects that were linked to it will automatically re-resolve (see\n * {@link WorkspaceResourceChangeListener}) and use the standard Ivy means of finding the\n * dependency.\n * \n * The {@link WorkspaceResourceChangeListener} will also auto-resolve when a new project is added or\n * opened, so opening a project will automatically link it into the currently open projects where\n * necessary.\n * \n * Since the resolver is not aware which module revision a project is publishing, it optimistically\n * matches any revision of the module.\n * \n * Since the resolver stops after finding the first open project which matches the module, having\n * multiple open versions of the same project in the workspace (for example, different branches) may\n * set the wrong version as a dependency. You are advised to only open the version of the project\n * which you want other projects in the workspace to depend on.\n * \n * NOTE: Transitive dependencies are not passed from the dependent project to the parent when\n * projects are linked. If you find you are missing some transitive dependencies, just set your\n * dependent eclipse project to export its ivy dependencies. (Project->Properties->Java Build\n * Path->Order and Export-> -> check the ivy container) This will only export the configuration that\n * project is using and not what a dependent project may ask for when it's being resolved. To do\n * that, this resolver will need to be modified to pass transitive dependencies along.\n */\npublic class WorkspaceResolver extends AbstractResolver {\n\n public static final String ECLIPSE_PROJECT_TYPE = \"eclipse-project\";\n\n public static final String ECLIPSE_PROJECT_EXTENSION = \"eclipse-project\";\n\n public static final String CACHE_NAME = \"__ivyde-workspace-resolver-cache\";\n\n private final IJavaProject resolvingJavaProject;\n\n private IJavaProject[] projects;\n\n public WorkspaceResolver(IJavaProject javaProject, IvySettings ivySettings) {\n this.resolvingJavaProject = javaProject;\n setName(javaProject.getElementName() + \"-ivyde-workspace-resolver\");\n setSettings(ivySettings);\n setCache(CACHE_NAME);\n\n try {\n projects = JavaCore.create(ResourcesPlugin.getWorkspace().getRoot()).getJavaProjects();\n } catch (JavaModelException e) {\n IvyPlugin.log(IStatus.ERROR, \"JDT Error while resolving in workspace for \"\n + resolvingJavaProject.getElementName(), e);\n }\n }\n\n public DownloadReport download(Artifact[] artifacts, DownloadOptions options) {\n // Not much to do here - downloads are not required for workspace projects.\n DownloadReport dr = new DownloadReport();\n for (int i = 0; i < artifacts.length; i++) {\n final ArtifactDownloadReport adr = new ArtifactDownloadReport(artifacts[i]);\n dr.addArtifactReport(adr);\n\n // Only report java projects as downloaded\n if (artifacts[i].getType().equals(ECLIPSE_PROJECT_TYPE)) {\n Message.verbose(\"\\t[IN WORKSPACE] \" + artifacts[i]);\n adr.setDownloadStatus(DownloadStatus.NO);\n adr.setSize(0);\n } else {\n Message.verbose(\"\\t[Eclipse Workspace resolver] \"\n + \"cannot download non-project artifact: \" + artifacts[i]);\n adr.setDownloadStatus(DownloadStatus.FAILED);\n }\n }\n return dr;\n }\n\n public ResolvedModuleRevision getDependency(DependencyDescriptor dd, ResolveData data)\n throws ParseException {\n\n ModuleRevisionId dependencyMrid = dd.getDependencyRevisionId();\n\n VersionMatcher versionMatcher = getSettings().getVersionMatcher();\n\n // Iterate over workspace to find Java project which has an Ivy\n // container for this dependency\n for (int i = 0; i < projects.length; i++) {\n IJavaProject javaProject = projects[i];\n if (resolvingJavaProject.equals(javaProject)) {\n // we don't want to introduce self dependency\n continue;\n }\n if (!javaProject.exists()) {\n continue;\n }\n List/* */containers = IvyClasspathUtil\n .getIvyClasspathContainers(javaProject);\n Iterator/* */itContainer = containers.iterator();\n while (itContainer.hasNext()) {\n IvyClasspathContainer ivycp = (IvyClasspathContainer) itContainer.next();\n ModuleDescriptor md;\n try {\n md = ivycp.getConf().getCachedModuleDescriptor();\n } catch (IvyDEException e) {\n IvyPlugin.log(IStatus.WARNING, \"Resolve in workspace for '\"\n + resolvingJavaProject.getElementName() + \"' cannot depend on \"\n + ivycp.getDescription() + \" [\" + e.getMessage() + \"]\", null);\n continue;\n }\n\n if (!md.getModuleRevisionId().getModuleId().equals(dependencyMrid.getModuleId())) {\n // it doesn't match org#module\n continue;\n }\n\n // Found one; check if it is for the module we need\n if (md.getModuleRevisionId().getRevision().equals(Ivy.getWorkingRevision())\n || versionMatcher.accept(dd.getDependencyRevisionId(), md)) {\n\n Artifact af = new DefaultArtifact(md.getModuleRevisionId(), md\n .getPublicationDate(), javaProject.getPath().toString(),\n ECLIPSE_PROJECT_TYPE, ECLIPSE_PROJECT_EXTENSION);\n\n DefaultModuleDescriptor workspaceMd = cloneMd(md, af);\n\n MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(af);\n madr.setDownloadStatus(DownloadStatus.SUCCESSFUL);\n madr.setSearched(true);\n\n return new ResolvedModuleRevision(this, this, workspaceMd, madr);\n }\n }\n }\n\n // Didn't find module in any open project, proceed to other resolvers.\n return null;\n }\n\n private DefaultModuleDescriptor cloneMd(ModuleDescriptor md, Artifact af) {\n\n DefaultModuleDescriptor newMd = new DefaultModuleDescriptor(md.getModuleRevisionId(),\n \"release\", null, true);\n newMd.addConfiguration(new Configuration(ModuleDescriptor.DEFAULT_CONFIGURATION));\n newMd.setLastModified(System.currentTimeMillis());\n\n newMd.setDescription(md.getDescription());\n newMd.setHomePage(md.getHomePage());\n newMd.setLastModified(md.getLastModified());\n newMd.setPublicationDate(md.getPublicationDate());\n newMd.setResolvedPublicationDate(md.getResolvedPublicationDate());\n newMd.setStatus(md.getStatus());\n\n Configuration[] allConfs = md.getConfigurations();\n if (allConfs.length == 0) {\n newMd.addArtifact(ModuleDescriptor.DEFAULT_CONFIGURATION, af);\n } else {\n for (int k = 0; k < allConfs.length; k++) {\n newMd.addConfiguration(allConfs[k]);\n newMd.addArtifact(allConfs[k].getName(), af);\n }\n }\n\n DependencyDescriptor[] dependencies = md.getDependencies();\n for (int k = 0; k < dependencies.length; k++) {\n newMd.addDependency(dependencies[k]);\n }\n\n ExcludeRule[] allExcludeRules = md.getAllExcludeRules();\n for (int k = 0; k < allExcludeRules.length; k++) {\n newMd.addExcludeRule(allExcludeRules[k]);\n }\n\n Map extraInfo = md.getExtraInfo();\n Iterator it = extraInfo.entrySet().iterator();\n while (it.hasNext()) {\n Entry entry = (Entry) it.next();\n newMd.addExtraInfo((String) entry.getKey(), (String) entry.getValue());\n }\n\n License[] licenses = md.getLicenses();\n for (int k = 0; k < licenses.length; k++) {\n newMd.addLicense(licenses[k]);\n }\n\n return newMd;\n }\n\n public void publish(Artifact artifact, File src, boolean overwrite) throws IOException {\n throw new UnsupportedOperationException(\"publish not supported by \" + getName());\n }\n\n public ResolvedResource findIvyFileRef(DependencyDescriptor dd, ResolveData data) {\n return null;\n }\n}\n"},"new_file":{"kind":"string","value":"org.apache.ivyde.eclipse/src/java/org/apache/ivyde/eclipse/workspaceresolver/WorkspaceResolver.java"},"old_contents":{"kind":"string","value":"/*\n * Licensed to the Apache Software Foundation (ASF) under one or more\n * contributor license agreements. See the NOTICE file distributed with\n * this work for additional information regarding copyright ownership.\n * The ASF licenses this file to You under the Apache License, Version 2.0\n * (the \"License\"); you may not use this file except in compliance with\n * the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n */\npackage org.apache.ivyde.eclipse.workspaceresolver;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.text.ParseException;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\nimport org.apache.ivy.core.module.descriptor.Artifact;\nimport org.apache.ivy.core.module.descriptor.Configuration;\nimport org.apache.ivy.core.module.descriptor.DefaultArtifact;\nimport org.apache.ivy.core.module.descriptor.DefaultModuleDescriptor;\nimport org.apache.ivy.core.module.descriptor.DependencyDescriptor;\nimport org.apache.ivy.core.module.descriptor.ExcludeRule;\nimport org.apache.ivy.core.module.descriptor.License;\nimport org.apache.ivy.core.module.descriptor.ModuleDescriptor;\nimport org.apache.ivy.core.module.id.ModuleRevisionId;\nimport org.apache.ivy.core.report.ArtifactDownloadReport;\nimport org.apache.ivy.core.report.DownloadReport;\nimport org.apache.ivy.core.report.DownloadStatus;\nimport org.apache.ivy.core.report.MetadataArtifactDownloadReport;\nimport org.apache.ivy.core.resolve.DownloadOptions;\nimport org.apache.ivy.core.resolve.ResolveData;\nimport org.apache.ivy.core.resolve.ResolvedModuleRevision;\nimport org.apache.ivy.core.settings.IvySettings;\nimport org.apache.ivy.plugins.resolver.AbstractResolver;\nimport org.apache.ivy.plugins.resolver.util.ResolvedResource;\nimport org.apache.ivy.plugins.version.VersionMatcher;\nimport org.apache.ivy.util.Message;\nimport org.apache.ivyde.eclipse.IvyDEException;\nimport org.apache.ivyde.eclipse.IvyPlugin;\nimport org.apache.ivyde.eclipse.cpcontainer.IvyClasspathContainer;\nimport org.apache.ivyde.eclipse.cpcontainer.IvyClasspathUtil;\nimport org.eclipse.core.resources.ResourcesPlugin;\nimport org.eclipse.core.runtime.IStatus;\nimport org.eclipse.jdt.core.IJavaProject;\nimport org.eclipse.jdt.core.JavaCore;\nimport org.eclipse.jdt.core.JavaModelException;\n\n/**\n * This is an Eclipse workspace Ivy resolver. When used with the custom IvyClasspathContainer\n * changes, this resolver will link dependent projects when they are open in the same workspace,\n * allowing full-fledged linked project functionality Eclipse provides, such as incremental\n * compilation, debugging, mouseover javadocs, and source browsing across projects.\n * \n * How it works During a resolve, it looks at all open projects in the workspace that have\n * Ivy containers. The first project that publishes the module on which the project being\n * resolved depends, will be picked and returned as a special type of artifact called \"project\".\n * \n * The IvyClasspathContainer will recognize the artifact as a project and put the eclipse project as\n * a dependent project within the classpath container of the parent.\n * \n * If you do not want a project to be linked as a dependency, close it or delete from the workspace.\n * As soon as you do that, any projects that were linked to it will automatically re-resolve (see\n * {@link WorkspaceResourceChangeListener}) and use the standard Ivy means of finding the\n * dependency.\n * \n * The {@link WorkspaceResourceChangeListener} will also auto-resolve when a new project is added or\n * opened, so opening a project will automatically link it into the currently open projects where\n * necessary.\n * \n * Since the resolver is not aware which module revision a project is publishing, it optimistically\n * matches any revision of the module.\n * \n * Since the resolver stops after finding the first open project which matches the module, having\n * multiple open versions of the same project in the workspace (for example, different branches) may\n * set the wrong version as a dependency. You are advised to only open the version of the project\n * which you want other projects in the workspace to depend on.\n * \n * NOTE: Transitive dependencies are not passed from the dependent project to the parent when\n * projects are linked. If you find you are missing some transitive dependencies, just set your\n * dependent eclipse project to export its ivy dependencies. (Project->Properties->Java Build\n * Path->Order and Export-> -> check the ivy container) This will only export the configuration that\n * project is using and not what a dependent project may ask for when it's being resolved. To do\n * that, this resolver will need to be modified to pass transitive dependencies along.\n */\npublic class WorkspaceResolver extends AbstractResolver {\n\n public static final String ECLIPSE_PROJECT_TYPE = \"eclipse-project\";\n\n public static final String ECLIPSE_PROJECT_EXTENSION = \"eclipse-project\";\n\n public static final String CACHE_NAME = \"__ivyde-workspace-resolver-cache\";\n\n private final IJavaProject resolvingJavaProject;\n\n private IJavaProject[] projects;\n\n public WorkspaceResolver(IJavaProject javaProject, IvySettings ivySettings) {\n this.resolvingJavaProject = javaProject;\n setName(javaProject.getElementName() + \"-ivyde-workspace-resolver\");\n setSettings(ivySettings);\n setCache(CACHE_NAME);\n\n try {\n projects = JavaCore.create(ResourcesPlugin.getWorkspace().getRoot()).getJavaProjects();\n } catch (JavaModelException e) {\n IvyPlugin.log(IStatus.ERROR, \"JDT Error while resolving in workspace for \"\n + resolvingJavaProject.getElementName(), e);\n }\n }\n\n public DownloadReport download(Artifact[] artifacts, DownloadOptions options) {\n // Not much to do here - downloads are not required for workspace projects.\n DownloadReport dr = new DownloadReport();\n for (int i = 0; i < artifacts.length; i++) {\n final ArtifactDownloadReport adr = new ArtifactDownloadReport(artifacts[i]);\n dr.addArtifactReport(adr);\n\n // Only report java projects as downloaded\n if (artifacts[i].getType().equals(ECLIPSE_PROJECT_TYPE)) {\n Message.verbose(\"\\t[IN WORKSPACE] \" + artifacts[i]);\n adr.setDownloadStatus(DownloadStatus.NO);\n adr.setSize(0);\n } else {\n Message.verbose(\"\\t[Eclipse Workspace resolver] \"\n + \"cannot download non-project artifact: \" + artifacts[i]);\n adr.setDownloadStatus(DownloadStatus.FAILED);\n }\n }\n return dr;\n }\n\n public ResolvedModuleRevision getDependency(DependencyDescriptor dd, ResolveData data)\n throws ParseException {\n\n ModuleRevisionId dependencyMrid = dd.getDependencyRevisionId();\n\n VersionMatcher versionMatcher = getSettings().getVersionMatcher();\n\n // Iterate over workspace to find Java project which has an Ivy\n // container for this dependency\n for (int i = 0; i < projects.length; i++) {\n IJavaProject javaProject = projects[i];\n if (resolvingJavaProject.equals(javaProject)) {\n // we don't want to introduce self dependency\n continue;\n }\n if (!javaProject.exists()) {\n continue;\n }\n List/* */containers = IvyClasspathUtil\n .getIvyClasspathContainers(javaProject);\n Iterator/* */itContainer = containers.iterator();\n while (itContainer.hasNext()) {\n IvyClasspathContainer ivycp = (IvyClasspathContainer) itContainer.next();\n ModuleDescriptor md;\n try {\n md = ivycp.getConf().getCachedModuleDescriptor();\n } catch (IvyDEException e) {\n IvyPlugin.log(IStatus.WARNING, \"Resolve in workspace for '\"\n + resolvingJavaProject.getElementName() + \"' cannot depend on \"\n + ivycp.getDescription() + \" [\" + e.getMessage() + \"]\", null);\n continue;\n }\n\n if (!md.getModuleRevisionId().getModuleId().equals(dependencyMrid.getModuleId())) {\n // it doesn't match org#module\n continue;\n }\n\n // Found one; check if it is for the module we need\n if (md.getModuleRevisionId().getRevision().startsWith(\"working@\")\n || versionMatcher.accept(dd.getDependencyRevisionId(), md)) {\n\n Artifact af = new DefaultArtifact(md.getModuleRevisionId(), md\n .getPublicationDate(), javaProject.getPath().toString(),\n ECLIPSE_PROJECT_TYPE, ECLIPSE_PROJECT_EXTENSION);\n\n DefaultModuleDescriptor workspaceMd = cloneMd(md, af);\n\n MetadataArtifactDownloadReport madr = new MetadataArtifactDownloadReport(af);\n madr.setDownloadStatus(DownloadStatus.SUCCESSFUL);\n madr.setSearched(true);\n\n return new ResolvedModuleRevision(this, this, workspaceMd, madr);\n }\n }\n }\n\n // Didn't find module in any open project, proceed to other resolvers.\n return null;\n }\n\n private DefaultModuleDescriptor cloneMd(ModuleDescriptor md, Artifact af) {\n\n DefaultModuleDescriptor newMd = new DefaultModuleDescriptor(md.getModuleRevisionId(),\n \"release\", null, true);\n newMd.addConfiguration(new Configuration(ModuleDescriptor.DEFAULT_CONFIGURATION));\n newMd.setLastModified(System.currentTimeMillis());\n\n newMd.setDescription(md.getDescription());\n newMd.setHomePage(md.getHomePage());\n newMd.setLastModified(md.getLastModified());\n newMd.setPublicationDate(md.getPublicationDate());\n newMd.setResolvedPublicationDate(md.getResolvedPublicationDate());\n newMd.setStatus(md.getStatus());\n\n Configuration[] allConfs = md.getConfigurations();\n if (allConfs.length == 0) {\n newMd.addArtifact(ModuleDescriptor.DEFAULT_CONFIGURATION, af);\n } else {\n for (int k = 0; k < allConfs.length; k++) {\n newMd.addConfiguration(allConfs[k]);\n newMd.addArtifact(allConfs[k].getName(), af);\n }\n }\n\n DependencyDescriptor[] dependencies = md.getDependencies();\n for (int k = 0; k < dependencies.length; k++) {\n newMd.addDependency(dependencies[k]);\n }\n\n ExcludeRule[] allExcludeRules = md.getAllExcludeRules();\n for (int k = 0; k < allExcludeRules.length; k++) {\n newMd.addExcludeRule(allExcludeRules[k]);\n }\n\n Map extraInfo = md.getExtraInfo();\n Iterator it = extraInfo.entrySet().iterator();\n while (it.hasNext()) {\n Entry entry = (Entry) it.next();\n newMd.addExtraInfo((String) entry.getKey(), (String) entry.getValue());\n }\n\n License[] licenses = md.getLicenses();\n for (int k = 0; k < licenses.length; k++) {\n newMd.addLicense(licenses[k]);\n }\n\n return newMd;\n }\n\n public void publish(Artifact artifact, File src, boolean overwrite) throws IOException {\n throw new UnsupportedOperationException(\"publish not supported by \" + getName());\n }\n\n public ResolvedResource findIvyFileRef(DependencyDescriptor dd, ResolveData data) {\n return null;\n }\n}\n"},"message":{"kind":"string","value":"more precise fix for IVYDE-186\n\n\ngit-svn-id: 804c0f3032317a396a1b8894d1ed8cc0b56e9f1c@814307 13f79535-47bb-0310-9956-ffa450edef68\n"},"old_file":{"kind":"string","value":"org.apache.ivyde.eclipse/src/java/org/apache/ivyde/eclipse/workspaceresolver/WorkspaceResolver.java"},"subject":{"kind":"string","value":"more precise fix for IVYDE-186"},"git_diff":{"kind":"string","value":"rg.apache.ivyde.eclipse/src/java/org/apache/ivyde/eclipse/workspaceresolver/WorkspaceResolver.java\n import java.util.Map;\n import java.util.Map.Entry;\n \nimport org.apache.ivy.Ivy;\n import org.apache.ivy.core.module.descriptor.Artifact;\n import org.apache.ivy.core.module.descriptor.Configuration;\n import org.apache.ivy.core.module.descriptor.DefaultArtifact;\n }\n \n // Found one; check if it is for the module we need\n if (md.getModuleRevisionId().getRevision().startsWith(\"working@\")\n if (md.getModuleRevisionId().getRevision().equals(Ivy.getWorkingRevision())\n || versionMatcher.accept(dd.getDependencyRevisionId(), md)) {\n \n Artifact af = new DefaultArtifact(md.getModuleRevisionId(), md"}}},{"rowIdx":2075,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"52eb713ee4562d50eb3a60ffb2b1688ececf5dfa"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"trask/glowroot,glowroot/glowroot,glowroot/glowroot,trask/glowroot,trask/glowroot,glowroot/glowroot,glowroot/glowroot,trask/glowroot"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2015-2018 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.glowroot.agent.plugin.cassandra;\n\nimport org.glowroot.agent.plugin.api.QueryEntry;\nimport org.glowroot.agent.plugin.api.checker.Nullable;\nimport org.glowroot.agent.plugin.api.weaving.BindReceiver;\nimport org.glowroot.agent.plugin.api.weaving.BindReturn;\nimport org.glowroot.agent.plugin.api.weaving.Mixin;\nimport org.glowroot.agent.plugin.api.weaving.OnReturn;\nimport org.glowroot.agent.plugin.api.weaving.Pointcut;\n\npublic class ResultSetAspect {\n\n // the field and method names are verbose since they will be mixed in to existing classes\n @Mixin(\"com.datastax.driver.core.ResultSet\")\n public static class ResultSetImpl implements ResultSet {\n\n // this may be async or non-async query entry\n //\n // needs to be volatile, since ResultSets are thread safe, and therefore app/framework does\n // *not* need to provide visibility when used across threads and so this cannot piggyback\n // (unlike with jdbc ResultSets)\n private volatile @Nullable QueryEntry glowroot$queryEntry;\n\n @Override\n public @Nullable QueryEntry glowroot$getQueryEntry() {\n return glowroot$queryEntry;\n }\n\n @Override\n public void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry) {\n glowroot$queryEntry = queryEntry;\n }\n }\n\n // the method names are verbose since they will be mixed in to existing classes\n public interface ResultSet {\n\n @Nullable\n QueryEntry glowroot$getQueryEntry();\n\n void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry);\n }\n\n @Pointcut(className = \"com.datastax.driver.core.ResultSet\", methodName = \"one\",\n methodParameterTypes = {})\n public static class OneAdvice {\n @OnReturn\n public static void onReturn(@BindReturn @Nullable Object row,\n @BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n return;\n }\n if (row != null) {\n queryEntry.incrementCurrRow();\n } else {\n queryEntry.rowNavigationAttempted();\n }\n }\n }\n\n @Pointcut(className = \"java.lang.Iterable\",\n subTypeRestriction = \"com.datastax.driver.core.ResultSet\",\n methodName = \"iterator\", methodParameterTypes = {})\n public static class IteratorAdvice {\n @OnReturn\n public static void onReturn(@BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n // tracing must be disabled (e.g. exceeded trace entry limit)\n return;\n }\n queryEntry.rowNavigationAttempted();\n }\n }\n\n @Pointcut(className = \"com.datastax.driver.core.PagingIterable\"\n + \"|com.datastax.driver.core.ResultSet\",\n subTypeRestriction = \"com.datastax.driver.core.ResultSet\",\n methodName = \"isExhausted\", methodParameterTypes = {})\n public static class IsExhaustedAdvice {\n @OnReturn\n public static void onReturn(@BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n // tracing must be disabled (e.g. exceeded trace entry limit)\n return;\n }\n queryEntry.rowNavigationAttempted();\n }\n }\n}\n"},"new_file":{"kind":"string","value":"agent/plugins/cassandra-plugin/src/main/java/org/glowroot/agent/plugin/cassandra/ResultSetAspect.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright 2015-2018 the original author or authors.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.glowroot.agent.plugin.cassandra;\n\nimport org.glowroot.agent.plugin.api.QueryEntry;\nimport org.glowroot.agent.plugin.api.checker.Nullable;\nimport org.glowroot.agent.plugin.api.weaving.BindReceiver;\nimport org.glowroot.agent.plugin.api.weaving.BindReturn;\nimport org.glowroot.agent.plugin.api.weaving.Mixin;\nimport org.glowroot.agent.plugin.api.weaving.OnReturn;\nimport org.glowroot.agent.plugin.api.weaving.Pointcut;\n\npublic class ResultSetAspect {\n\n // the field and method names are verbose since they will be mixed in to existing classes\n @Mixin(\"com.datastax.driver.core.ResultSet\")\n public static class ResultSetImpl implements ResultSet {\n\n // this may be async or non-async query entry\n //\n // needs to be volatile, since ResultSets are thread safe, and therefore app/framework does\n // *not* need to provide visibility when used across threads and so this cannot piggyback\n // (unlike with jdbc ResultSets)\n private volatile @Nullable QueryEntry glowroot$queryEntry;\n\n @Override\n public @Nullable QueryEntry glowroot$getQueryEntry() {\n return glowroot$queryEntry;\n }\n\n @Override\n public void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry) {\n glowroot$queryEntry = queryEntry;\n }\n\n @Override\n public boolean glowroot$hasQueryEntry() {\n return glowroot$queryEntry != null;\n }\n }\n\n // the method names are verbose since they will be mixed in to existing classes\n public interface ResultSet {\n\n @Nullable\n QueryEntry glowroot$getQueryEntry();\n\n void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry);\n\n boolean glowroot$hasQueryEntry();\n }\n\n @Pointcut(className = \"com.datastax.driver.core.ResultSet\", methodName = \"one\",\n methodParameterTypes = {})\n public static class OneAdvice {\n @OnReturn\n public static void onReturn(@BindReturn @Nullable Object row,\n @BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n return;\n }\n if (row != null) {\n queryEntry.incrementCurrRow();\n } else {\n queryEntry.rowNavigationAttempted();\n }\n }\n }\n\n @Pointcut(className = \"java.lang.Iterable\",\n subTypeRestriction = \"com.datastax.driver.core.ResultSet\",\n methodName = \"iterator\", methodParameterTypes = {})\n public static class IteratorAdvice {\n @OnReturn\n public static void onReturn(@BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n // tracing must be disabled (e.g. exceeded trace entry limit)\n return;\n }\n queryEntry.rowNavigationAttempted();\n }\n }\n\n @Pointcut(className = \"com.datastax.driver.core.PagingIterable\"\n + \"|com.datastax.driver.core.ResultSet\",\n subTypeRestriction = \"com.datastax.driver.core.ResultSet\",\n methodName = \"isExhausted\", methodParameterTypes = {})\n public static class IsExhaustedAdvice {\n @OnReturn\n public static void onReturn(@BindReceiver ResultSet resultSet) {\n QueryEntry queryEntry = resultSet.glowroot$getQueryEntry();\n if (queryEntry == null) {\n // tracing must be disabled (e.g. exceeded trace entry limit)\n return;\n }\n queryEntry.rowNavigationAttempted();\n }\n }\n}\n"},"message":{"kind":"string","value":"Remove unused code\n"},"old_file":{"kind":"string","value":"agent/plugins/cassandra-plugin/src/main/java/org/glowroot/agent/plugin/cassandra/ResultSetAspect.java"},"subject":{"kind":"string","value":"Remove unused code"},"git_diff":{"kind":"string","value":"gent/plugins/cassandra-plugin/src/main/java/org/glowroot/agent/plugin/cassandra/ResultSetAspect.java\n public void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry) {\n glowroot$queryEntry = queryEntry;\n }\n\n @Override\n public boolean glowroot$hasQueryEntry() {\n return glowroot$queryEntry != null;\n }\n }\n \n // the method names are verbose since they will be mixed in to existing classes\n QueryEntry glowroot$getQueryEntry();\n \n void glowroot$setQueryEntry(@Nullable QueryEntry queryEntry);\n\n boolean glowroot$hasQueryEntry();\n }\n \n @Pointcut(className = \"com.datastax.driver.core.ResultSet\", methodName = \"one\","}}},{"rowIdx":2076,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"02dc574dea6683c835d80c9f3e0ce8b6499f3e00"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"joseph-roque/uOttawaCampusGuide,joseph-roque/uottawa-campus-guide,joseph-roque/uottawa-campus-guide,joseph-roque/uOttawaCampusGuide,joseph-roque/uOttawaCampusGuide,joseph-roque/uottawa-campus-guide"},"new_contents":{"kind":"string","value":"/**\n *\n * @license\n * Copyright (C) 2016-2017 Joseph Roque\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * @author Joseph Roque\n * @created 2017-03-09\n * @file StudySpotList.js\n * @providesModule StudySpotList\n * @description Displays a list of filterable study spots\n *\n * @flow\n */\n'use strict';\n\n// React imports\nimport React from 'react';\nimport {\n FlatList,\n Image,\n Platform,\n StyleSheet,\n Text,\n TouchableOpacity,\n View,\n} from 'react-native';\n\n// Types\nimport type { Language, StudySpot, TimeFormat } from 'types';\n\n// Type definition for component props.\ntype Props = {\n activeFilters: Set < string >, // Set of active study spot filters\n filter: ?string, // Filter the list of buildings\n studyFilters: Object, // Descriptions of study room filters\n language: Language, // Language to display building names in\n onSelect: (s: StudySpot) => void, // Callback for when a spot is selected\n spots: Array < StudySpot >, // Study spot properties to display\n timeFormat: TimeFormat, // Format to display times in\n}\n\n// Type definition for component state\ntype State = {\n studySpots: Array < StudySpot >, // List of study spots\n};\n\n// Imports\nimport moment from 'moment';\nimport PaddedIcon from 'PaddedIcon';\nimport * as Configuration from 'Configuration';\nimport * as Constants from 'Constants';\nimport * as DisplayUtils from 'DisplayUtils';\nimport * as TextUtils from 'TextUtils';\nimport * as Translations from 'Translations';\n\nconst TIME_UNAVAILABLE_REGEX = /[Nn]\\/[Aa]/;\n\nexport default class StudySpotList extends React.Component {\n\n /**\n * Properties this component expects to be provided by its parent.\n */\n props: Props;\n\n /**\n * Current state of the component.\n */\n state: State;\n\n /**\n * Constructor.\n *\n * @param {props} props component props\n */\n constructor(props: Props) {\n super(props);\n this.state = { studySpots: []};\n }\n\n /**\n * Loads the study spots once the view has been mounted.\n */\n componentDidMount(): void {\n this._filterStudySpots(this.props);\n }\n\n /**\n * If a new filter is provided, update the list of study spots.\n *\n * @param {Props} nextProps the new props being received\n */\n componentWillReceiveProps(nextProps: Props): void {\n // Basic boolean comparisons to see if re-filtering needs to occur\n if (nextProps.filter != this.props.filter\n || nextProps.language != this.props.language\n || nextProps.spots != this.props.spots) {\n this._filterStudySpots(nextProps);\n return;\n }\n\n // Compare filters to see if re-filtering needs to occur\n if (this.props.activeFilters != nextProps.activeFilters) {\n this._filterStudySpots(nextProps);\n return;\n }\n }\n\n /**\n * Check if the spot contains all of the active filters.\n *\n * @param {Set} activeFilters currently active filters\n * @param {StudySpot} spot details about the study spot\n * @returns {boolean} true iff the spot contains all the active filters, false otherwise\n */\n _spotMatchesAllFilters(activeFilters: Set < string >, spot: StudySpot): boolean {\n if (activeFilters.size === 0) {\n return true;\n }\n\n let matches = 0;\n spot.filters.forEach((filter) => {\n if (activeFilters.has(filter)) {\n matches++;\n }\n });\n\n if (activeFilters.has('open')) {\n if (TIME_UNAVAILABLE_REGEX.test(spot.opens)) {\n matches++;\n } else {\n const openTime = moment(spot.opens, 'HH:mm');\n const closeTime = moment(spot.closes, 'HH:mm');\n const currentTime = moment();\n if (openTime.diff(currentTime) < 0 && closeTime.diff(currentTime, 'hours') >= 1) {\n matches++;\n }\n }\n }\n\n return matches === activeFilters.size;\n }\n\n /**\n * Only show study spots which names or building names contain the search terms.\n *\n * @param {Props} props the props to use to filter\n */\n _filterStudySpots({ filter, activeFilters, spots }: Props): void {\n // Ignore the case of the search terms\n const adjustedSearchTerms: ?string = (filter == null || filter.length === 0)\n ? null\n : filter.toUpperCase();\n\n // Create array for spots\n const filteredSpots: Array < StudySpot > = [];\n\n for (let i = 0; spots && i < spots.length; i++) {\n const spot: Object = spots[i];\n\n // Don't add the spot if it doesn't match all the filters\n if (!this._spotMatchesAllFilters(activeFilters, spot)) {\n continue;\n }\n\n // If the search terms are empty, or the spot properties contains the terms, add it to the list\n if (adjustedSearchTerms == null\n || spot.building.toUpperCase().indexOf(adjustedSearchTerms) >= 0\n || spot.room.toUpperCase().indexOf(adjustedSearchTerms) >= 0\n || (spot.name && spot.name.toUpperCase().indexOf(adjustedSearchTerms) >= 0)\n || (spot.name_en && spot.name_en.toUpperCase().indexOf(adjustedSearchTerms) >= 0)\n || (spot.name_fr && spot.name_fr.toUpperCase().indexOf(adjustedSearchTerms) >= 0)) {\n filteredSpots.push(spot);\n continue;\n }\n }\n\n this.setState({ studySpots: filteredSpots });\n }\n\n /**\n * Displays a spots's name, image and description.\n *\n * @param {StudySpot} spot information about the study spot to display\n * @returns {ReactElement} an image and views describing the spot\n */\n _renderItem({ item }: { item: StudySpot }): ReactElement < any > {\n const altName = Translations.getName(this.props.language, item);\n const name = `${item.building} ${item.room ? item.room : ''}`;\n const description = Translations.getVariant(this.props.language, 'description', item) || '';\n\n let openingTime = '';\n if (TIME_UNAVAILABLE_REGEX.test(item.opens)) {\n openingTime = item.opens;\n } else {\n openingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.opens);\n }\n\n let closingTime = '';\n if (TIME_UNAVAILABLE_REGEX.test(item.closes)) {\n if (!TIME_UNAVAILABLE_REGEX.test(item.opens)) {\n closingTime = ` - ${item.closes}`;\n }\n } else {\n closingTime = ` - ${TextUtils.convertTimeFormat(this.props.timeFormat, item.closes)}`;\n }\n\n return (\n this.props.onSelect(item)}>\n \n \n \n {name}\n {altName == null ? null : {altName}}\n {`${openingTime}${closingTime}`}\n {description}\n \n {item.filters.map((filter) => (\n \n ))}\n \n \n \n \n );\n }\n\n /**\n * Renders a separator line between rows.\n *\n * @returns {ReactElement} a separator for the list of study spots\n */\n _renderSeparator(): ReactElement < any > {\n return ;\n }\n\n /**\n * Returns a list of touchable views listing the study spot descriptions.\n *\n * @returns {ReactElement} the hierarchy of views to render.\n */\n render(): ReactElement < any > {\n return (\n \n `${studySpot.building}.${studySpot.room}`}\n renderItem={this._renderItem.bind(this)} />\n \n );\n }\n}\n\n// Private styles for component\nconst _styles = StyleSheet.create({\n container: {\n flex: 1,\n backgroundColor: Constants.Colors.primaryBackground,\n },\n spot: {\n flex: 1,\n margin: Constants.Sizes.Margins.Expanded,\n alignItems: 'center',\n flexDirection: 'row',\n },\n spotProperties: {\n flex: 1,\n },\n spotImage: {\n alignSelf: 'flex-start',\n marginRight: Constants.Sizes.Margins.Expanded,\n width: 64,\n height: 64,\n },\n spotName: {\n color: Constants.Colors.primaryWhiteText,\n fontSize: Constants.Sizes.Text.Subtitle,\n },\n spotSubtitle: {\n color: Constants.Colors.secondaryWhiteText,\n fontSize: Constants.Sizes.Text.Caption,\n marginTop: Constants.Sizes.Margins.Condensed,\n },\n spotDescription: {\n color: Constants.Colors.primaryWhiteText,\n fontSize: Constants.Sizes.Text.Body,\n marginTop: Constants.Sizes.Margins.Condensed,\n },\n spotFilters: {\n flexDirection: 'row',\n alignSelf: 'flex-end',\n marginTop: Constants.Sizes.Margins.Expanded,\n },\n separator: {\n height: StyleSheet.hairlineWidth,\n marginLeft: Constants.Sizes.Margins.Expanded,\n backgroundColor: Constants.Colors.tertiaryBackground,\n },\n});\n"},"new_file":{"kind":"string","value":"src/components/StudySpotList.js"},"old_contents":{"kind":"string","value":"/**\n *\n * @license\n * Copyright (C) 2016-2017 Joseph Roque\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * @author Joseph Roque\n * @created 2017-03-09\n * @file StudySpotList.js\n * @providesModule StudySpotList\n * @description Displays a list of filterable study spots\n *\n * @flow\n */\n'use strict';\n\n// React imports\nimport React from 'react';\nimport {\n FlatList,\n Image,\n Platform,\n StyleSheet,\n Text,\n TouchableOpacity,\n View,\n} from 'react-native';\n\n// Types\nimport type { Language, StudySpot, TimeFormat } from 'types';\n\n// Type definition for component props.\ntype Props = {\n activeFilters: Set < string >, // Set of active study spot filters\n filter: ?string, // Filter the list of buildings\n studyFilters: Object, // Descriptions of study room filters\n language: Language, // Language to display building names in\n onSelect: (s: StudySpot) => void, // Callback for when a spot is selected\n spots: Array < StudySpot >, // Study spot properties to display\n timeFormat: TimeFormat, // Format to display times in\n}\n\n// Type definition for component state\ntype State = {\n studySpots: Array < StudySpot >, // List of study spots\n};\n\n// Imports\nimport moment from 'moment';\nimport PaddedIcon from 'PaddedIcon';\nimport * as Configuration from 'Configuration';\nimport * as Constants from 'Constants';\nimport * as DisplayUtils from 'DisplayUtils';\nimport * as TextUtils from 'TextUtils';\nimport * as Translations from 'Translations';\n\nexport default class StudySpotList extends React.Component {\n\n /**\n * Properties this component expects to be provided by its parent.\n */\n props: Props;\n\n /**\n * Current state of the component.\n */\n state: State;\n\n /**\n * Constructor.\n *\n * @param {props} props component props\n */\n constructor(props: Props) {\n super(props);\n this.state = { studySpots: []};\n }\n\n /**\n * Loads the study spots once the view has been mounted.\n */\n componentDidMount(): void {\n this._filterStudySpots(this.props);\n }\n\n /**\n * If a new filter is provided, update the list of study spots.\n *\n * @param {Props} nextProps the new props being received\n */\n componentWillReceiveProps(nextProps: Props): void {\n // Basic boolean comparisons to see if re-filtering needs to occur\n if (nextProps.filter != this.props.filter\n || nextProps.language != this.props.language\n || nextProps.spots != this.props.spots) {\n this._filterStudySpots(nextProps);\n return;\n }\n\n // Compare filters to see if re-filtering needs to occur\n if (this.props.activeFilters != nextProps.activeFilters) {\n this._filterStudySpots(nextProps);\n return;\n }\n }\n\n /**\n * Check if the spot contains all of the active filters.\n *\n * @param {Set} activeFilters currently active filters\n * @param {StudySpot} spot details about the study spot\n * @returns {boolean} true iff the spot contains all the active filters, false otherwise\n */\n _spotMatchesAllFilters(activeFilters: Set < string >, spot: StudySpot): boolean {\n if (activeFilters.size === 0) {\n return true;\n }\n\n let matches = 0;\n spot.filters.forEach((filter) => {\n if (activeFilters.has(filter)) {\n matches++;\n }\n });\n\n if (activeFilters.has('open')) {\n const openTime = moment(spot.opens, 'HH:mm');\n const closeTime = moment(spot.closes, 'HH:mm');\n const currentTime = moment();\n if (openTime.diff(currentTime) < 0 && closeTime.diff(currentTime, 'hours') >= 1) {\n matches++;\n }\n }\n\n return matches === activeFilters.size;\n }\n\n /**\n * Only show study spots which names or building names contain the search terms.\n *\n * @param {Props} props the props to use to filter\n */\n _filterStudySpots({ filter, activeFilters, spots }: Props): void {\n // Ignore the case of the search terms\n const adjustedSearchTerms: ?string = (filter == null || filter.length === 0)\n ? null\n : filter.toUpperCase();\n\n // Create array for spots\n const filteredSpots: Array < StudySpot > = [];\n\n for (let i = 0; spots && i < spots.length; i++) {\n const spot: Object = spots[i];\n\n // Don't add the spot if it doesn't match all the filters\n if (!this._spotMatchesAllFilters(activeFilters, spot)) {\n continue;\n }\n\n // If the search terms are empty, or the spot properties contains the terms, add it to the list\n if (adjustedSearchTerms == null\n || spot.building.toUpperCase().indexOf(adjustedSearchTerms) >= 0\n || spot.room.toUpperCase().indexOf(adjustedSearchTerms) >= 0\n || (spot.name && spot.name.toUpperCase().indexOf(adjustedSearchTerms) >= 0)\n || (spot.name_en && spot.name_en.toUpperCase().indexOf(adjustedSearchTerms) >= 0)\n || (spot.name_fr && spot.name_fr.toUpperCase().indexOf(adjustedSearchTerms) >= 0)) {\n filteredSpots.push(spot);\n continue;\n }\n }\n\n this.setState({ studySpots: filteredSpots });\n }\n\n /**\n * Displays a spots's name, image and description.\n *\n * @param {StudySpot} spot information about the study spot to display\n * @returns {ReactElement} an image and views describing the spot\n */\n _renderItem({ item }: { item: StudySpot }): ReactElement < any > {\n const altName = Translations.getName(this.props.language, item);\n const name = `${item.building} ${item.room}`;\n const openingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.opens);\n const closingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.closes);\n const description = Translations.getVariant(this.props.language, 'description', item) || '';\n\n return (\n this.props.onSelect(item)}>\n \n \n \n {name}\n {altName == null ? null : {altName}}\n {`${openingTime} - ${closingTime}`}\n {description}\n \n {item.filters.map((filter) => (\n \n ))}\n \n \n \n \n );\n }\n\n /**\n * Renders a separator line between rows.\n *\n * @returns {ReactElement} a separator for the list of study spots\n */\n _renderSeparator(): ReactElement < any > {\n return ;\n }\n\n /**\n * Returns a list of touchable views listing the study spot descriptions.\n *\n * @returns {ReactElement} the hierarchy of views to render.\n */\n render(): ReactElement < any > {\n return (\n \n `${studySpot.building}.${studySpot.room}`}\n renderItem={this._renderItem.bind(this)} />\n \n );\n }\n}\n\n// Private styles for component\nconst _styles = StyleSheet.create({\n container: {\n flex: 1,\n backgroundColor: Constants.Colors.primaryBackground,\n },\n spot: {\n flex: 1,\n margin: Constants.Sizes.Margins.Expanded,\n alignItems: 'center',\n flexDirection: 'row',\n },\n spotProperties: {\n flex: 1,\n },\n spotImage: {\n alignSelf: 'flex-start',\n marginRight: Constants.Sizes.Margins.Expanded,\n width: 64,\n height: 64,\n },\n spotName: {\n color: Constants.Colors.primaryWhiteText,\n fontSize: Constants.Sizes.Text.Subtitle,\n },\n spotSubtitle: {\n color: Constants.Colors.secondaryWhiteText,\n fontSize: Constants.Sizes.Text.Caption,\n marginTop: Constants.Sizes.Margins.Condensed,\n },\n spotDescription: {\n color: Constants.Colors.primaryWhiteText,\n fontSize: Constants.Sizes.Text.Body,\n marginTop: Constants.Sizes.Margins.Condensed,\n },\n spotFilters: {\n flexDirection: 'row',\n alignSelf: 'flex-end',\n marginTop: Constants.Sizes.Margins.Expanded,\n },\n separator: {\n height: StyleSheet.hairlineWidth,\n marginLeft: Constants.Sizes.Margins.Expanded,\n backgroundColor: Constants.Colors.tertiaryBackground,\n },\n});\n"},"message":{"kind":"string","value":"Enable filtering study rooms by 'open'\n"},"old_file":{"kind":"string","value":"src/components/StudySpotList.js"},"subject":{"kind":"string","value":"Enable filtering study rooms by 'open'"},"git_diff":{"kind":"string","value":"rc/components/StudySpotList.js\n import * as TextUtils from 'TextUtils';\n import * as Translations from 'Translations';\n \nconst TIME_UNAVAILABLE_REGEX = /[Nn]\\/[Aa]/;\n\n export default class StudySpotList extends React.Component {\n \n /**\n });\n \n if (activeFilters.has('open')) {\n const openTime = moment(spot.opens, 'HH:mm');\n const closeTime = moment(spot.closes, 'HH:mm');\n const currentTime = moment();\n if (openTime.diff(currentTime) < 0 && closeTime.diff(currentTime, 'hours') >= 1) {\n if (TIME_UNAVAILABLE_REGEX.test(spot.opens)) {\n matches++;\n } else {\n const openTime = moment(spot.opens, 'HH:mm');\n const closeTime = moment(spot.closes, 'HH:mm');\n const currentTime = moment();\n if (openTime.diff(currentTime) < 0 && closeTime.diff(currentTime, 'hours') >= 1) {\n matches++;\n }\n }\n }\n \n */\n _renderItem({ item }: { item: StudySpot }): ReactElement < any > {\n const altName = Translations.getName(this.props.language, item);\n const name = `${item.building} ${item.room}`;\n const openingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.opens);\n const closingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.closes);\n const name = `${item.building} ${item.room ? item.room : ''}`;\n const description = Translations.getVariant(this.props.language, 'description', item) || '';\n\n let openingTime = '';\n if (TIME_UNAVAILABLE_REGEX.test(item.opens)) {\n openingTime = item.opens;\n } else {\n openingTime = TextUtils.convertTimeFormat(this.props.timeFormat, item.opens);\n }\n\n let closingTime = '';\n if (TIME_UNAVAILABLE_REGEX.test(item.closes)) {\n if (!TIME_UNAVAILABLE_REGEX.test(item.opens)) {\n closingTime = ` - ${item.closes}`;\n }\n } else {\n closingTime = ` - ${TextUtils.convertTimeFormat(this.props.timeFormat, item.closes)}`;\n }\n \n return (\n \n {name}\n {altName == null ? null : {altName}}\n {`${openingTime} - ${closingTime}`}\n {`${openingTime}${closingTime}`}\n {description}\n \n {item.filters.map((filter) => ("}}},{"rowIdx":2077,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"b2c9a7967e017288d25c84e54b4c973b491f2ef5"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"CS2103JAN2017-W13-B4/main,CS2103JAN2017-W13-B4/main"},"new_contents":{"kind":"string","value":"package seedu.doist.model.task;\n\nimport java.util.Date;\nimport java.util.Objects;\n\nimport seedu.doist.commons.util.CollectionUtil;\nimport seedu.doist.model.tag.UniqueTagList;\n\n/**\n * Represents a Task in the to-do list.\n * Guarantees: details are present and not null, field values are validated.\n */\npublic class Task implements ReadOnlyTask {\n\n private Description desc;\n private Priority priority;\n private FinishedStatus finishedStatus;\n private UniqueTagList tags;\n private Date startDate;\n private Date endDate;\n\n /**\n * Every field must be present and not null.\n */\n public Task(Description name, Priority priority, FinishedStatus finishedStatus,\n UniqueTagList tags, Date startDate, Date endDate) {\n assert !CollectionUtil.isAnyNull(name, priority, finishedStatus, tags);\n this.desc = name;\n this.priority = priority;\n this.finishedStatus = finishedStatus;\n this.tags = new UniqueTagList(tags); // protect internal tags from changes in the arg list\n this.startDate = startDate;\n this.endDate = endDate;\n }\n\n public Task(Description name, Priority priority, FinishedStatus finishedStatus, UniqueTagList tags) {\n this(name, priority, finishedStatus, tags, null, null);\n }\n\n public Task(Description name, Priority priority, UniqueTagList tags) {\n this(name, priority, new FinishedStatus(), tags, null, null);\n }\n\n public Task(Description name, UniqueTagList tags) {\n this(name, new Priority(), new FinishedStatus(), tags, null, null);\n }\n\n public Task(Description name, Date startDate, Date endDate) {\n this(name, new Priority(), new FinishedStatus(), new UniqueTagList(), startDate, endDate);\n }\n\n public Task(Description name, UniqueTagList tags, Date startDate, Date endDate) {\n this(name, new Priority(), new FinishedStatus(), tags, startDate, endDate);\n }\n\n /**\n * Creates a copy of the given ReadOnlyTask.\n */\n public Task(ReadOnlyTask source) {\n this(source.getDescription(), source.getPriority(), source.getFinishedStatus(),\n source.getTags(), source.getStartDate(), source.getEndDate());\n }\n\n public void setDescription(Description desc) {\n assert desc != null;\n this.desc = desc;\n }\n\n @Override\n public Description getDescription() {\n return desc;\n }\n\n public void setPriority(Priority priority) {\n assert priority != null;\n this.priority = priority;\n }\n\n public void setStartDate(Date startDate) {\n this.startDate = startDate;\n }\n\n public void setEndDate(Date endDate) {\n this.endDate = endDate;\n }\n\n public Date getStartDate() {\n return startDate;\n }\n\n public Date getEndDate() {\n return endDate;\n }\n\n @Override\n public Priority getPriority() {\n return priority;\n }\n\n public void setFinishedStatus(boolean isFinished) {\n assert finishedStatus != null;\n this.finishedStatus.setIsFinished(isFinished);\n }\n\n public void setFinishedStatus(FinishedStatus status) {\n assert finishedStatus != null;\n this.finishedStatus = status;\n }\n\n @Override\n public FinishedStatus getFinishedStatus() {\n return finishedStatus;\n }\n\n @Override\n public UniqueTagList getTags() {\n return new UniqueTagList(tags);\n }\n\n /**\n * Replaces this person's tags with the tags in the argument tag list.\n */\n public void setTags(UniqueTagList replacement) {\n tags.setTags(replacement);\n }\n\n /**\n * Updates this task with the details of {@code replacement}.\n */\n public void resetData(ReadOnlyTask replacement) {\n assert replacement != null;\n\n this.setDescription(replacement.getDescription());\n this.setPriority(replacement.getPriority());\n this.setFinishedStatus(replacement.getFinishedStatus());\n this.setTags(replacement.getTags());\n this.setStartDate(replacement.getStartDate());\n this.setEndDate(replacement.getEndDate());\n }\n\n @Override\n public boolean equals(Object other) {\n return other == this // short circuit if same object\n || (other instanceof ReadOnlyTask // instanceof handles nulls\n && this.isSameStateAs((ReadOnlyTask) other));\n }\n\n @Override\n public int hashCode() {\n // use this method for custom fields hashing instead of implementing your own\n return Objects.hash(desc, tags);\n }\n\n @Override\n public String toString() {\n return getAsText();\n }\n\n}\n"},"new_file":{"kind":"string","value":"src/main/java/seedu/doist/model/task/Task.java"},"old_contents":{"kind":"string","value":"package seedu.doist.model.task;\n\nimport java.util.Date;\nimport java.util.Objects;\n\nimport seedu.doist.commons.util.CollectionUtil;\nimport seedu.doist.model.tag.UniqueTagList;\n\n/**\n * Represents a Task in the to-do list.\n * Guarantees: details are present and not null, field values are validated.\n */\npublic class Task implements ReadOnlyTask {\n\n private Description desc;\n private Priority priority;\n private FinishedStatus finishedStatus;\n private UniqueTagList tags;\n private Date startDate;\n private Date endDate;\n\n /**\n * Every field must be present and not null.\n */\n public Task(Description name, Priority priority, FinishedStatus finishedStatus,\n UniqueTagList tags, Date startDate, Date endDate) {\n assert !CollectionUtil.isAnyNull(name, priority, finishedStatus, tags);\n this.desc = name;\n this.priority = priority;\n this.finishedStatus = finishedStatus;\n this.tags = new UniqueTagList(tags); // protect internal tags from changes in the arg list\n this.startDate = startDate;\n this.endDate = endDate;\n }\n\n public Task(Description name, Priority priority, FinishedStatus finishedStatus, UniqueTagList tags) {\n this(name, priority, finishedStatus, tags, null, null);\n }\n\n public Task(Description name, Priority priority, UniqueTagList tags) {\n this(name, priority, new FinishedStatus(), tags, null, null);\n }\n\n public Task(Description name, UniqueTagList tags) {\n this(name, new Priority(), new FinishedStatus(), tags, null, null);\n }\n\n public Task(Description name, Date startDate, Date endDate) {\n this(name, new Priority(), new FinishedStatus(), new UniqueTagList(), startDate, endDate);\n }\n\n public Task(Description name, UniqueTagList tags, Date startDate, Date endDate) {\n this(name, new Priority(), new FinishedStatus(), tags, startDate, endDate);\n }\n\n /**\n * Creates a copy of the given ReadOnlyTask.\n */\n public Task(ReadOnlyTask source) {\n this(source.getDescription(), source.getPriority(), source.getFinishedStatus(),\n source.getTags(), source.getStartDate(), source.getEndDate());\n }\n\n public void setDescription(Description desc) {\n assert desc != null;\n this.desc = desc;\n }\n\n @Override\n public Description getDescription() {\n return desc;\n }\n\n public void setPriority(Priority priority) {\n assert priority != null;\n this.priority = priority;\n }\n\n public Date getStartDate() {\n return startDate;\n }\n\n public Date getEndDate() {\n return endDate;\n }\n\n @Override\n public Priority getPriority() {\n return priority;\n }\n\n public void setFinishedStatus(boolean isFinished) {\n assert finishedStatus != null;\n this.finishedStatus.setIsFinished(isFinished);\n }\n\n public void setFinishedStatus(FinishedStatus status) {\n assert finishedStatus != null;\n this.finishedStatus = status;\n }\n\n @Override\n public FinishedStatus getFinishedStatus() {\n return finishedStatus;\n }\n\n @Override\n public UniqueTagList getTags() {\n return new UniqueTagList(tags);\n }\n\n /**\n * Replaces this person's tags with the tags in the argument tag list.\n */\n public void setTags(UniqueTagList replacement) {\n tags.setTags(replacement);\n }\n\n /**\n * Updates this task with the details of {@code replacement}.\n */\n public void resetData(ReadOnlyTask replacement) {\n assert replacement != null;\n\n this.setDescription(replacement.getDescription());\n this.setPriority(replacement.getPriority());\n this.setFinishedStatus(replacement.getFinishedStatus());\n this.setTags(replacement.getTags());\n }\n\n @Override\n public boolean equals(Object other) {\n return other == this // short circuit if same object\n || (other instanceof ReadOnlyTask // instanceof handles nulls\n && this.isSameStateAs((ReadOnlyTask) other));\n }\n\n @Override\n public int hashCode() {\n // use this method for custom fields hashing instead of implementing your own\n return Objects.hash(desc, tags);\n }\n\n @Override\n public String toString() {\n return getAsText();\n }\n\n}\n"},"message":{"kind":"string","value":"Edit Task class to provide setters for Start and End Date\n"},"old_file":{"kind":"string","value":"src/main/java/seedu/doist/model/task/Task.java"},"subject":{"kind":"string","value":"Edit Task class to provide setters for Start and End Date"},"git_diff":{"kind":"string","value":"rc/main/java/seedu/doist/model/task/Task.java\n this.priority = priority;\n }\n \n public void setStartDate(Date startDate) {\n this.startDate = startDate;\n }\n\n public void setEndDate(Date endDate) {\n this.endDate = endDate;\n }\n\n public Date getStartDate() {\n return startDate;\n }\n this.setPriority(replacement.getPriority());\n this.setFinishedStatus(replacement.getFinishedStatus());\n this.setTags(replacement.getTags());\n this.setStartDate(replacement.getStartDate());\n this.setEndDate(replacement.getEndDate());\n }\n \n @Override"}}},{"rowIdx":2078,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"c50c72a784baeecd4c98643380c1febab3ed6024"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"cmccabe/salvo,cmccabe/salvo"},"new_contents":{"kind":"string","value":"package scorched.android;\n\nimport android.content.Context;\nimport android.graphics.Canvas;\nimport android.graphics.Color;\nimport android.graphics.LinearGradient;\nimport android.graphics.Paint;\nimport android.graphics.Path;\nimport android.graphics.Rect;\nimport android.graphics.Shader;\nimport android.graphics.Typeface;\nimport android.util.AttributeSet;\nimport android.util.Log;\nimport android.view.MotionEvent;\nimport android.view.View;\n\n/**\n * A slider widget which the user can slide back and forth.\n *\n * There are arrows on the ends for fine adjustments.\n *\n * Most functions are synchronized on mState to prevent wackiness. The mState\n * mutex should generally be held a pretty short amount of time.\n */\npublic class SalvoSlider extends View {\n /* ================= Types ================= */\n /** Describes the state of this slider */\n public enum SliderState {\n /**\n * The slider is not in use. Only a blank space will be drawn. Touch\n * will be disabled.\n */\n DISABLED,\n\n /**\n * The slider will be drawn using a bar graphic. Touch will be \n * enabled.\n */\n BAR,\n\n /**\n * The slider will be drawn using the angle graphic. Touch will be\n * enabled.\n */\n ANGLE,\n };\n\n /** Used to hook up Listeners */\n public static interface Listener {\n void onPositionChange(int val);\n }\n\n /* ================= Constants ================= */\n private static final String TAG = \"SalvoSlider\";\n\n private static final int BUTTON_PERCENT = 20;\n\n /* ================= Members ================= */\n private SliderState mState;\n\n // //// User input stuff\n /** Listener to notify when slider value changes */\n private Listener mListener;\n\n /** Minimum slider value */\n private int mMin;\n\n /** Maximum slider value. */\n private int mMax;\n\n /** True if the slider's value increases left-to-right rather\n * than left-to-right */\n private boolean mReversed;\n\n /** Current slider value */\n private int mVal;\n\n // //// Current configuration\n /**\n * Current slider color. If mColor == Color.WHITE then the slider is\n * disabled.\n */\n private int mColor;\n\n /** Current slider width */\n private int mWidth;\n\n /** Current slider height */\n private int mHeight;\n\n // //// Things computed by cacheStuff()\n /** Current left boundary of slidable area */\n private int mLeftBound;\n\n /** Current right boundary of slidable area */\n private int mRightBound;\n\n /** Gradient paint for bar */\n private Paint mBarPaint;\n\n /** Paint for text that's drawn on bars */\n private Paint mFontPaint;\n\n /////// Temporaries\n private Paint mTempPaint;\n\n private Rect mTempRect;\n\n private Path mTempPath;\n\n /* ================= Access ================= */\n\n /* ================= Operations ================= */\n /**\n * Cache a bunch of stuff that we don't want to have to recalculate on \n * each draw().\n */\n private void cacheStuff() {\n assert Thread.holdsLock(mState);\n // Don't need to cache anything for the DISABLED state\n if (mState == SliderState.DISABLED)\n return;\n mLeftBound = (mWidth * BUTTON_PERCENT) / 100;\n mRightBound = (mWidth * (100 - BUTTON_PERCENT)) / 100;\n\n if (mState == SliderState.BAR) {\n int colors[] = new int[3];\n colors[0] = Color.WHITE;\n colors[1] = mColor;\n colors[2] = Color.WHITE;\n Shader barShader = new LinearGradient(0, 0, 0, (mHeight * 3) / 2,\n colors, null, Shader.TileMode.REPEAT);\n mBarPaint = new Paint();\n mBarPaint.setShader(barShader);\n }\n\n mFontPaint = new Paint();\n mFontPaint.setColor(Color.WHITE);\n mFontPaint.setAntiAlias(true);\n adjustTypefaceToFit(mFontPaint,\n (mHeight * 4) / 5, Typeface.SANS_SERIF);\n mFontPaint.setTextAlign(Paint.Align.CENTER);\n }\n\n @Override\n protected void onDraw(Canvas canvas) {\n int x;\n synchronized (mState) {\n super.onDraw(canvas);\n switch (mState) {\n case DISABLED:\n canvas.drawColor(Color.BLACK);\n break;\n\n case BAR:\n canvas.drawColor(Color.BLACK);\n x = mLeftBound\n + (((mRightBound - mLeftBound) * (mVal - mMin)) /\n (mMax - mMin));\n mTempRect.set(mLeftBound, 0, x, mHeight);\n canvas.drawRect(mTempRect, mBarPaint);\n drawEndButtons(canvas);\n drawSliderText(canvas);\n break;\n\n case ANGLE:\n canvas.drawColor(Color.BLACK);\n\n int totalWidth = mRightBound - mLeftBound;\n int w = totalWidth / 6;\n x = mLeftBound\n + ((totalWidth * mVal) / (mMax - mMin));\n mTempPath.moveTo(x, 0);\n mTempPath.lineTo(x - w, mHeight);\n mTempPath.lineTo(x + w, mHeight);\n mTempPaint.setColor(Color.argb(255, 236, 189, 62));\n mTempPaint.setAntiAlias(true);\n mTempPaint.setStrokeWidth(1);\n canvas.drawPath(mTempPath, mTempPaint);\n mTempPath.rewind();\n drawEndButtons(canvas);\n drawSliderText(canvas);\n break;\n }\n }\n }\n\n private void drawSliderText(Canvas canvas) {\n String str;\n if (mReversed) {\n str = \"\" + (mMax - mVal);\n }\n else {\n str = \"\" + mVal;\n }\n canvas.drawText(str, mWidth / 2, (mHeight * 4) / 5, mFontPaint);\n }\n\n private void adjustTypefaceToFit(Paint p, int height, Typeface tf) {\n p.setTypeface(tf);\n Paint.FontMetrics metrics = new Paint.FontMetrics();\n int size = 40;\n p.setTextSize(size);\n p.getFontMetrics(metrics);\n int fontHeight = (int)(metrics.top + metrics.bottom);\n if (Math.abs(fontHeight - height) > 5) {\n size = (size * height) / fontHeight;\n }\n p.setTextSize(size);\n }\n\n private void drawEndButtons(Canvas canvas) {\n // Draw end buttons\n drawEndButton(canvas, 0, mLeftBound);\n drawEndButton(canvas, mWidth, mRightBound);\n }\n\n private void drawEndButton(Canvas canvas, int xLeft, int xRight) {\n mTempPaint.setColor(Color.BLACK);\n mTempPaint.setAntiAlias(false);\n mTempRect.set(xLeft, 0, xRight, mHeight);\n canvas.drawRect(mTempRect, mTempPaint);\n\n mTempPaint.setARGB(255, 236, 189, 62);\n mTempPaint.setAntiAlias(false);\n mTempPaint.setStrokeWidth(mHeight / 20);\n canvas.drawLine(xLeft, 0, xRight, 0, mTempPaint);\n canvas.drawLine(xLeft, mHeight, xRight, mHeight, mTempPaint);\n canvas.drawLine(xLeft, 0, xLeft, mHeight, mTempPaint);\n canvas.drawLine(xRight, 0, xRight, mHeight, mTempPaint);\n mTempPaint.setStrokeWidth(1);\n\n int w = xRight - xLeft;\n mTempPath.moveTo(xLeft + (w / 5), mHeight / 2);\n mTempPath.lineTo(xLeft + ((4 * w) / 5), mHeight / 5);\n mTempPath.lineTo(xLeft + ((4 * w) / 5), (mHeight * 4) / 5);\n mTempPaint.setAntiAlias(true);\n canvas.drawPath(mTempPath, mTempPaint);\n mTempPath.rewind();\n }\n\n @Override\n public boolean onTouchEvent(MotionEvent me) {\n boolean invalidate = false;\n synchronized (mState) {\n if (mState == SliderState.DISABLED) {\n return true;\n }\n int action = me.getAction();\n int x = (int)me.getX();\n if (x < mLeftBound) {\n invalidate = downButton(action);\n }\n else if (x > mRightBound) {\n invalidate = upButton(action);\n }\n else {\n int off = x - mLeftBound;\n int newVal = mMin +\n (off * (mMax - mMin)) /\n (mRightBound - mLeftBound);\n if (newVal != mVal) {\n updateVal(newVal);\n invalidate = true;\n }\n }\n }\n if (invalidate) {\n invalidate();\n }\n return true;\n }\n\n private boolean downButton(int action) {\n if (action == MotionEvent.ACTION_DOWN) {\n updateVal(mVal - 1);\n return true;\n }\n else {\n return false;\n }\n }\n\n private boolean upButton(int action) {\n if (action == MotionEvent.ACTION_DOWN) {\n updateVal(mVal + 1);\n return true;\n }\n else {\n return false;\n }\n }\n\n private void updateVal(int val) {\n assert Thread.holdsLock(mState);\n if (val < mMin) {\n val = mMin;\n }\n else if (val > mMax) {\n val = mMax;\n }\n mVal = val;\n mListener.onPositionChange(mReversed ? (mMax - mVal) : mVal);\n }\n\n /**\n * Change the slider state to something else. This can be called \n * from non-UI threads.\n */\n public void setState(SliderState state, Listener listener, int min,\n int max, int val, int color) {\n Log.w(TAG, \"setState state=\" + state + \",min=\" + min + \",max=\" + max\n + \",color=\" + color);\n synchronized (mState) {\n mState = state;\n mListener = listener;\n\n // user input stuff\n assert (mListener != null);\n if (min < max) {\n mMin = min;\n mMax = max;\n mReversed = false;\n mVal = val;\n }\n else {\n mMin = max;\n mMax = min;\n mReversed = true;\n mVal = mMax - val;\n }\n\n // configuration\n mColor = color;\n mWidth = getWidth();\n mHeight = getHeight();\n cacheStuff();\n }\n\n postInvalidate();\n }\n\n /* ================= Lifecycle ================= */\n private void construct() {\n mState = SliderState.DISABLED;\n setFocusable(true);\n\n // Temporaries\n mTempPaint = new Paint();\n mTempRect = new Rect();\n mTempPath = new Path();\n }\n\n /** Constructor for \"manual\" instantiation */\n public SalvoSlider(Context context) {\n super(context);\n construct();\n }\n\n /** Contructor for layout file */\n public SalvoSlider(Context context, AttributeSet attrs) {\n super(context, attrs);\n construct();\n }\n}\n"},"new_file":{"kind":"string","value":"src/scorched/android/SalvoSlider.java"},"old_contents":{"kind":"string","value":"package scorched.android;\n\nimport android.content.Context;\nimport android.graphics.Canvas;\nimport android.graphics.Color;\nimport android.graphics.LinearGradient;\nimport android.graphics.Paint;\nimport android.graphics.Path;\nimport android.graphics.Rect;\nimport android.graphics.Shader;\nimport android.graphics.Typeface;\nimport android.util.AttributeSet;\nimport android.util.Log;\nimport android.view.MotionEvent;\nimport android.view.View;\n\n/**\n * A slider widget which the user can slide back and forth.\n *\n * There are arrows on the ends for fine adjustments.\n *\n * Most functions are synchronized on mState to prevent wackiness. The mState\n * mutex should generally be held a pretty short amount of time.\n */\npublic class SalvoSlider extends View {\n /* ================= Types ================= */\n /** Describes the state of this slider */\n public enum SliderState {\n /**\n * The slider is not in use. Only a blank space will be drawn. Touch\n * will be disabled.\n */\n DISABLED,\n\n /**\n * The slider will be drawn using a bar graphic. Touch will be enabled.\n */\n BAR,\n\n /**\n * The slider will be drawn using the angle graphic. Touch will be\n * enabled.\n */\n ANGLE,\n };\n\n /** Used to hook up Listeners */\n public static interface Listener {\n void onPositionChange(int val);\n }\n\n /* ================= Constants ================= */\n private static final String TAG = \"SalvoSlider\";\n\n private static final int BUTTON_PERCENT = 20;\n\n /* ================= Members ================= */\n private SliderState mState;\n\n // //// User input stuff\n /** Listener to notify when slider value changes */\n private Listener mListener;\n\n /** Minimum slider value */\n private int mMin;\n\n /** Maximum slider value. */\n private int mMax;\n\n /** True if the slider's value increases left-to-right rather\n * than left-to-right */\n private boolean mReversed;\n\n /** Current slider value */\n private int mVal;\n\n // //// Current configuration\n /**\n * Current slider color. If mColor == Color.WHITE then the slider is\n * disabled.\n */\n private int mColor;\n\n /** Current slider width */\n private int mWidth;\n\n /** Current slider height */\n private int mHeight;\n\n // //// Things computed by cacheStuff()\n /** Current left boundary of slidable area */\n private int mLeftBound;\n\n /** Current right boundary of slidable area */\n private int mRightBound;\n\n /** Gradient paint for bar */\n private Paint mBarPaint;\n\n /** Paint for text that's drawn on bars */\n private Paint mFontPaint;\n\n /////// Temporaries\n private Paint mTempPaint;\n\n private Rect mTempRect;\n\n private Path mTempPath;\n\n /* ================= Access ================= */\n\n /* ================= Operations ================= */\n /**\n * Cache a bunch of stuff that we don't want to have to recalculate on each\n * draw().\n */\n private void cacheStuff() {\n assert Thread.holdsLock(mState);\n // Don't need to cache anything for the DISABLED state\n if (mState == SliderState.DISABLED)\n return;\n mLeftBound = (mWidth * BUTTON_PERCENT) / 100;\n mRightBound = (mWidth * (100 - BUTTON_PERCENT)) / 100;\n\n if (mState == SliderState.BAR) {\n int colors[] = new int[3];\n colors[0] = Color.WHITE;\n colors[1] = mColor;\n colors[2] = Color.WHITE;\n Shader barShader = new LinearGradient(0, 0, 0, (mHeight * 3) / 2,\n colors, null, Shader.TileMode.REPEAT);\n mBarPaint = new Paint();\n mBarPaint.setShader(barShader);\n }\n\n mFontPaint = new Paint();\n mFontPaint.setColor(Color.WHITE);\n mFontPaint.setAntiAlias(true);\n adjustTypefaceToFit(mFontPaint,\n (mHeight * 4) / 5, Typeface.SANS_SERIF);\n mFontPaint.setTextAlign(Paint.Align.CENTER);\n }\n\n @Override\n protected void onDraw(Canvas canvas) {\n int x;\n synchronized (mState) {\n super.onDraw(canvas);\n switch (mState) {\n case DISABLED:\n canvas.drawColor(Color.BLACK);\n break;\n\n case BAR:\n canvas.drawColor(Color.BLACK);\n x = mLeftBound\n + (((mRightBound - mLeftBound) * mVal) /\n (mMax - mMin));\n mTempRect.set(mLeftBound, 0, x, mHeight);\n canvas.drawRect(mTempRect, mBarPaint);\n drawEndButtons(canvas);\n drawSliderText(canvas);\n break;\n\n case ANGLE:\n canvas.drawColor(Color.BLACK);\n\n int totalWidth = mRightBound - mLeftBound;\n int w = totalWidth / 6;\n x = mLeftBound\n + ((totalWidth * mVal) / (mMax - mMin));\n mTempPath.moveTo(x, 0);\n mTempPath.lineTo(x - w, mHeight);\n mTempPath.lineTo(x + w, mHeight);\n mTempPaint.setColor(Color.argb(255, 236, 189, 62));\n mTempPaint.setAntiAlias(true);\n mTempPaint.setStrokeWidth(1);\n canvas.drawPath(mTempPath, mTempPaint);\n mTempPath.rewind();\n drawEndButtons(canvas);\n drawSliderText(canvas);\n break;\n }\n }\n }\n\n private void drawSliderText(Canvas canvas) {\n String str;\n if (mReversed) {\n str = \"\" + (mMax - mVal);\n }\n else {\n str = \"\" + mVal;\n }\n canvas.drawText(str, mWidth / 2, (mHeight * 4) / 5, mFontPaint);\n }\n\n private void adjustTypefaceToFit(Paint p, int height, Typeface tf) {\n p.setTypeface(tf);\n Paint.FontMetrics metrics = new Paint.FontMetrics();\n int size = 40;\n p.setTextSize(size);\n p.getFontMetrics(metrics);\n int fontHeight = (int)(metrics.top + metrics.bottom);\n if (Math.abs(fontHeight - height) > 5) {\n size = (size * height) / fontHeight;\n }\n p.setTextSize(size);\n }\n\n private void drawEndButtons(Canvas canvas) {\n // Draw end buttons\n drawEndButton(canvas, 0, mLeftBound);\n drawEndButton(canvas, mWidth, mRightBound);\n }\n\n private void drawEndButton(Canvas canvas, int xLeft, int xRight) {\n mTempPaint.setColor(Color.BLACK);\n mTempPaint.setAntiAlias(false);\n mTempRect.set(xLeft, 0, xRight, mHeight);\n canvas.drawRect(mTempRect, mTempPaint);\n\n mTempPaint.setARGB(255, 236, 189, 62);\n mTempPaint.setAntiAlias(false);\n mTempPaint.setStrokeWidth(mHeight / 20);\n canvas.drawLine(xLeft, 0, xRight, 0, mTempPaint);\n canvas.drawLine(xLeft, mHeight, xRight, mHeight, mTempPaint);\n canvas.drawLine(xLeft, 0, xLeft, mHeight, mTempPaint);\n canvas.drawLine(xRight, 0, xRight, mHeight, mTempPaint);\n mTempPaint.setStrokeWidth(1);\n\n int w = xRight - xLeft;\n mTempPath.moveTo(xLeft + (w / 5), mHeight / 2);\n mTempPath.lineTo(xLeft + ((4 * w) / 5), mHeight / 5);\n mTempPath.lineTo(xLeft + ((4 * w) / 5), (mHeight * 4) / 5);\n mTempPaint.setAntiAlias(true);\n canvas.drawPath(mTempPath, mTempPaint);\n mTempPath.rewind();\n }\n\n @Override\n public boolean onTouchEvent(MotionEvent me) {\n boolean invalidate = false;\n synchronized (mState) {\n if (mState == SliderState.DISABLED) {\n return true;\n }\n int action = me.getAction();\n int x = (int)me.getX();\n if (x < mLeftBound) {\n invalidate = downButton(action);\n }\n else if (x > mRightBound) {\n invalidate = upButton(action);\n }\n else {\n int off = x - mLeftBound;\n int newVal = mMin +\n (off * (mMax - mMin)) /\n (mRightBound - mLeftBound);\n if (newVal != mVal) {\n updateVal(newVal);\n invalidate = true;\n }\n }\n }\n if (invalidate) {\n invalidate();\n }\n return true;\n }\n\n private boolean downButton(int action) {\n if (action == MotionEvent.ACTION_DOWN) {\n updateVal(mVal - 1);\n return true;\n }\n else {\n return false;\n }\n }\n\n private boolean upButton(int action) {\n if (action == MotionEvent.ACTION_DOWN) {\n updateVal(mVal + 1);\n return true;\n }\n else {\n return false;\n }\n }\n\n private void updateVal(int val) {\n assert Thread.holdsLock(mState);\n if (val < mMin) {\n val = mMin;\n }\n else if (val > mMax) {\n val = mMax;\n }\n mVal = val;\n mListener.onPositionChange(mReversed ? (mMax - mVal) : mVal);\n }\n\n /**\n * Change the slider state to something else. This can be called from non-UI\n * threads.\n */\n public void setState(SliderState state, Listener listener, int min,\n int max, int val, int color) {\n Log.w(TAG, \"setState state=\" + state + \",min=\" + min + \",max=\" + max\n + \",color=\" + color);\n synchronized (mState) {\n mState = state;\n mListener = listener;\n\n // user input stuff\n assert (mListener != null);\n if (min < max) {\n mMin = min;\n mMax = max;\n mReversed = false;\n mVal = val;\n }\n else {\n mMin = max;\n mMax = min;\n mReversed = true;\n mVal = mMax - val;\n }\n\n // configuration\n mColor = color;\n mWidth = getWidth();\n mHeight = getHeight();\n cacheStuff();\n }\n\n postInvalidate();\n }\n\n /* ================= Lifecycle ================= */\n private void construct() {\n mState = SliderState.DISABLED;\n setFocusable(true);\n\n // Temporaries\n mTempPaint = new Paint();\n mTempRect = new Rect();\n mTempPath = new Path();\n }\n\n /** Constructor for \"manual\" instantiation */\n public SalvoSlider(Context context) {\n super(context);\n construct();\n }\n\n /** Contructor for layout file */\n public SalvoSlider(Context context, AttributeSet attrs) {\n super(context, attrs);\n construct();\n }\n}\n"},"message":{"kind":"string","value":"Fix slider draw bug and fix overlong lines\n"},"old_file":{"kind":"string","value":"src/scorched/android/SalvoSlider.java"},"subject":{"kind":"string","value":"Fix slider draw bug and fix overlong lines"},"git_diff":{"kind":"string","value":"rc/scorched/android/SalvoSlider.java\n DISABLED,\n \n /**\n * The slider will be drawn using a bar graphic. Touch will be enabled.\n * The slider will be drawn using a bar graphic. Touch will be \n * enabled.\n */\n BAR,\n \n \n /* ================= Operations ================= */\n /**\n * Cache a bunch of stuff that we don't want to have to recalculate on each\n * draw().\n * Cache a bunch of stuff that we don't want to have to recalculate on \n * each draw().\n */\n private void cacheStuff() {\n assert Thread.holdsLock(mState);\n case BAR:\n canvas.drawColor(Color.BLACK);\n x = mLeftBound\n + (((mRightBound - mLeftBound) * mVal) /\n + (((mRightBound - mLeftBound) * (mVal - mMin)) /\n (mMax - mMin));\n mTempRect.set(mLeftBound, 0, x, mHeight);\n canvas.drawRect(mTempRect, mBarPaint);\n }\n \n /**\n * Change the slider state to something else. This can be called from non-UI\n * threads.\n * Change the slider state to something else. This can be called \n * from non-UI threads.\n */\n public void setState(SliderState state, Listener listener, int min,\n int max, int val, int color) {"}}},{"rowIdx":2079,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mpl-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"442717be3770f5efa39c13e9cc6540d3ea9887c3"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core"},"new_contents":{"kind":"string","value":"/*************************************************************************\n *\n * OpenOffice.org - a multi-platform office productivity suite\n *\n * $RCSfile: StorageUnitTest.java,v $\n *\n * $Revision: 1.7 $\n *\n * last change: $Author: hr $ $Date: 2005-10-27 14:07:14 $\n *\n * The Contents of this file are made available subject to\n * the terms of GNU Lesser General Public License Version 2.1.\n *\n *\n * GNU Lesser General Public License Version 2.1\n * =============================================\n * Copyright 2005 by Sun Microsystems, Inc.\n * 901 San Antonio Road, Palo Alto, CA 94303, USA\n *\n * This library is free software; you can redistribute it and/or\n * modify it under the terms of the GNU Lesser General Public\n * License version 2.1, as published by the Free Software Foundation.\n *\n * This library is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n * Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public\n * License along with this library; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place, Suite 330, Boston,\n * MA 02111-1307 USA\n *\n ************************************************************************/\npackage complex.storages;\n\nimport com.sun.star.lang.XMultiServiceFactory;\nimport com.sun.star.lang.XMultiComponentFactory;\nimport com.sun.star.connection.XConnector;\nimport com.sun.star.connection.XConnection;\n\nimport com.sun.star.bridge.XUnoUrlResolver;\nimport com.sun.star.uno.UnoRuntime;\nimport com.sun.star.uno.XInterface;\nimport com.sun.star.uno.XNamingService;\nimport com.sun.star.uno.XComponentContext;\n\nimport com.sun.star.container.*;\nimport com.sun.star.beans.*;\nimport com.sun.star.lang.*;\n\nimport complexlib.ComplexTestCase;\n\nimport complex.storages.*;\n\nimport util.utils;\nimport java.util.*;\nimport java.io.*;\n\n/* This unit test for storage objects is designed to\n * test most important statements from storage service\n * specification.\n *\n * Regression tests are added to extend the tested\n * functionalities.\n */\npublic class StorageUnitTest extends ComplexTestCase\n{\n private XMultiServiceFactory m_xMSF = null;\n private XSingleServiceFactory m_xStorageFactory = null;\n\n public String[] getTestMethodNames()\n {\n return new String[] {\n \"ExecuteTest01\",\n \"ExecuteTest02\",\n \"ExecuteTest03\",\n \"ExecuteTest04\",\n \"ExecuteTest05\",\n \"ExecuteTest06\",\n \"ExecuteTest07\",\n \"ExecuteTest08\",\n \"ExecuteTest09\",\n \"ExecuteTest10\",\n \"ExecuteTest11\",\n \"ExecuteTest12\",\n \"ExecuteRegressionTest_114358\",\n \"ExecuteRegressionTest_i29169\",\n \"ExecuteRegressionTest_i30400\",\n \"ExecuteRegressionTest_i29321\",\n \"ExecuteRegressionTest_i30677\",\n \"ExecuteRegressionTest_i27773\",\n \"ExecuteRegressionTest_i46848\",\n \"ExecuteRegressionTest_i55821\"};\n }\n\n public String getTestObjectName()\n {\n return \"StorageUnitTest\";\n }\n\n public void before()\n {\n m_xMSF = (XMultiServiceFactory)param.getMSF();\n if ( m_xMSF == null )\n {\n failed( \"Can't create service factory!\" );\n return;\n }\n\n try {\n Object oStorageFactory = m_xMSF.createInstance( \"com.sun.star.embed.StorageFactory\" );\n m_xStorageFactory = (XSingleServiceFactory)UnoRuntime.queryInterface( XSingleServiceFactory.class,\n oStorageFactory );\n }\n catch( Exception e )\n {\n failed( \"Can't create storage factory!\" );\n return;\n }\n\n if ( m_xStorageFactory == null )\n {\n failed( \"Can't create service factory!\" );\n return;\n }\n }\n\n public void ExecuteTest01()\n {\n StorageTest aTest = new Test01( m_xMSF, m_xStorageFactory, log );\n assure( \"Test01 failed!\", aTest.test() );\n }\n\n public void ExecuteTest02()\n {\n StorageTest aTest = new Test02( m_xMSF, m_xStorageFactory, log );\n assure( \"Test02 failed!\", aTest.test() );\n }\n\n public void ExecuteTest03()\n {\n StorageTest aTest = new Test03( m_xMSF, m_xStorageFactory, log );\n assure( \"Test03 failed!\", aTest.test() );\n }\n\n public void ExecuteTest04()\n {\n StorageTest aTest = new Test04( m_xMSF, m_xStorageFactory, log );\n assure( \"Test04 failed!\", aTest.test() );\n }\n\n public void ExecuteTest05()\n {\n StorageTest aTest = new Test05( m_xMSF, m_xStorageFactory, log );\n assure( \"Test05 failed!\", aTest.test() );\n }\n\n public void ExecuteTest06()\n {\n StorageTest aTest = new Test06( m_xMSF, m_xStorageFactory, log );\n assure( \"Test06 failed!\", aTest.test() );\n }\n\n public void ExecuteTest07()\n {\n StorageTest aTest = new Test07( m_xMSF, m_xStorageFactory, log );\n assure( \"Test07 failed!\", aTest.test() );\n }\n\n public void ExecuteTest08()\n {\n StorageTest aTest = new Test08( m_xMSF, m_xStorageFactory, log );\n assure( \"Test08 failed!\", aTest.test() );\n }\n\n public void ExecuteTest09()\n {\n StorageTest aTest = new Test09( m_xMSF, m_xStorageFactory, log );\n assure( \"Test09 failed!\", aTest.test() );\n }\n\n public void ExecuteTest10()\n {\n StorageTest aTest = new Test10( m_xMSF, m_xStorageFactory, log );\n assure( \"Test10 failed!\", aTest.test() );\n }\n\n public void ExecuteTest11()\n {\n StorageTest aTest = new Test11( m_xMSF, m_xStorageFactory, log );\n assure( \"Test11 failed!\", aTest.test() );\n }\n\n public void ExecuteTest12()\n {\n StorageTest aTest = new Test12( m_xMSF, m_xStorageFactory, log );\n assure( \"Test12 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_114358()\n {\n StorageTest aTest = new RegressionTest_114358( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_114358 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i29169()\n {\n StorageTest aTest = new RegressionTest_i29169( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i29169 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i30400()\n {\n StorageTest aTest = new RegressionTest_i30400( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i30400 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i29321()\n {\n StorageTest aTest = new RegressionTest_i29321( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i29321 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i30677()\n {\n StorageTest aTest = new RegressionTest_i30677( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i30677 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i27773()\n {\n StorageTest aTest = new RegressionTest_i27773( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i27773 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i46848()\n {\n StorageTest aTest = new RegressionTest_i46848( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i46848 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i55821()\n {\n StorageTest aTest = new RegressionTest_i55821( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i55821 failed!\", aTest.test() );\n }\n}\n\n"},"new_file":{"kind":"string","value":"package/qa/storages/StorageUnitTest.java"},"old_contents":{"kind":"string","value":"/*************************************************************************\n *\n * OpenOffice.org - a multi-platform office productivity suite\n *\n * $RCSfile: StorageUnitTest.java,v $\n *\n * $Revision: 1.6 $\n *\n * last change: $Author: hr $ $Date: 2005-09-23 15:53:43 $\n *\n * The Contents of this file are made available subject to\n * the terms of GNU Lesser General Public License Version 2.1.\n *\n *\n * GNU Lesser General Public License Version 2.1\n * =============================================\n * Copyright 2005 by Sun Microsystems, Inc.\n * 901 San Antonio Road, Palo Alto, CA 94303, USA\n *\n * This library is free software; you can redistribute it and/or\n * modify it under the terms of the GNU Lesser General Public\n * License version 2.1, as published by the Free Software Foundation.\n *\n * This library is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n * Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public\n * License along with this library; if not, write to the Free Software\n * Foundation, Inc., 59 Temple Place, Suite 330, Boston,\n * MA 02111-1307 USA\n *\n ************************************************************************/\npackage complex.storages;\n\nimport com.sun.star.lang.XMultiServiceFactory;\nimport com.sun.star.lang.XMultiComponentFactory;\nimport com.sun.star.connection.XConnector;\nimport com.sun.star.connection.XConnection;\n\nimport com.sun.star.bridge.XUnoUrlResolver;\nimport com.sun.star.uno.UnoRuntime;\nimport com.sun.star.uno.XInterface;\nimport com.sun.star.uno.XNamingService;\nimport com.sun.star.uno.XComponentContext;\n\nimport com.sun.star.container.*;\nimport com.sun.star.beans.*;\nimport com.sun.star.lang.*;\n\nimport complexlib.ComplexTestCase;\n\nimport complex.storages.*;\n\nimport util.utils;\nimport java.util.*;\nimport java.io.*;\n\n/* This unit test for storage objects is designed to\n * test most important statements from storage service\n * specification.\n *\n * Regression tests are added to extend the tested\n * functionalities.\n */\npublic class StorageUnitTest extends ComplexTestCase\n{\n private XMultiServiceFactory m_xMSF = null;\n private XSingleServiceFactory m_xStorageFactory = null;\n\n public String[] getTestMethodNames()\n {\n return new String[] {\n \"ExecuteTest01\",\n \"ExecuteTest02\",\n \"ExecuteTest03\",\n \"ExecuteTest04\",\n \"ExecuteTest05\",\n \"ExecuteTest06\",\n \"ExecuteTest07\",\n \"ExecuteTest08\",\n \"ExecuteTest09\",\n \"ExecuteTest10\",\n \"ExecuteTest11\",\n \"ExecuteTest12\",\n \"ExecuteRegressionTest_114358\",\n \"ExecuteRegressionTest_i29169\",\n \"ExecuteRegressionTest_i30400\",\n \"ExecuteRegressionTest_i29321\",\n \"ExecuteRegressionTest_i30677\",\n \"ExecuteRegressionTest_i27773\",\n \"ExecuteRegressionTest_i46848\"};\n }\n\n public String getTestObjectName()\n {\n return \"StorageUnitTest\";\n }\n\n public void before()\n {\n m_xMSF = (XMultiServiceFactory)param.getMSF();\n if ( m_xMSF == null )\n {\n failed( \"Can't create service factory!\" );\n return;\n }\n\n try {\n Object oStorageFactory = m_xMSF.createInstance( \"com.sun.star.embed.StorageFactory\" );\n m_xStorageFactory = (XSingleServiceFactory)UnoRuntime.queryInterface( XSingleServiceFactory.class,\n oStorageFactory );\n }\n catch( Exception e )\n {\n failed( \"Can't create storage factory!\" );\n return;\n }\n\n if ( m_xStorageFactory == null )\n {\n failed( \"Can't create service factory!\" );\n return;\n }\n }\n\n public void ExecuteTest01()\n {\n StorageTest aTest = new Test01( m_xMSF, m_xStorageFactory, log );\n assure( \"Test01 failed!\", aTest.test() );\n }\n\n public void ExecuteTest02()\n {\n StorageTest aTest = new Test02( m_xMSF, m_xStorageFactory, log );\n assure( \"Test02 failed!\", aTest.test() );\n }\n\n public void ExecuteTest03()\n {\n StorageTest aTest = new Test03( m_xMSF, m_xStorageFactory, log );\n assure( \"Test03 failed!\", aTest.test() );\n }\n\n public void ExecuteTest04()\n {\n StorageTest aTest = new Test04( m_xMSF, m_xStorageFactory, log );\n assure( \"Test04 failed!\", aTest.test() );\n }\n\n public void ExecuteTest05()\n {\n StorageTest aTest = new Test05( m_xMSF, m_xStorageFactory, log );\n assure( \"Test05 failed!\", aTest.test() );\n }\n\n public void ExecuteTest06()\n {\n StorageTest aTest = new Test06( m_xMSF, m_xStorageFactory, log );\n assure( \"Test06 failed!\", aTest.test() );\n }\n\n public void ExecuteTest07()\n {\n StorageTest aTest = new Test07( m_xMSF, m_xStorageFactory, log );\n assure( \"Test07 failed!\", aTest.test() );\n }\n\n public void ExecuteTest08()\n {\n StorageTest aTest = new Test08( m_xMSF, m_xStorageFactory, log );\n assure( \"Test08 failed!\", aTest.test() );\n }\n\n public void ExecuteTest09()\n {\n StorageTest aTest = new Test09( m_xMSF, m_xStorageFactory, log );\n assure( \"Test09 failed!\", aTest.test() );\n }\n\n public void ExecuteTest10()\n {\n StorageTest aTest = new Test10( m_xMSF, m_xStorageFactory, log );\n assure( \"Test10 failed!\", aTest.test() );\n }\n\n public void ExecuteTest11()\n {\n StorageTest aTest = new Test11( m_xMSF, m_xStorageFactory, log );\n assure( \"Test11 failed!\", aTest.test() );\n }\n\n public void ExecuteTest12()\n {\n StorageTest aTest = new Test12( m_xMSF, m_xStorageFactory, log );\n assure( \"Test12 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_114358()\n {\n StorageTest aTest = new RegressionTest_114358( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_114358 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i29169()\n {\n StorageTest aTest = new RegressionTest_i29169( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i29169 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i30400()\n {\n StorageTest aTest = new RegressionTest_i30400( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i30400 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i29321()\n {\n StorageTest aTest = new RegressionTest_i29321( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i29321 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i30677()\n {\n StorageTest aTest = new RegressionTest_i30677( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i30677 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i27773()\n {\n StorageTest aTest = new RegressionTest_i27773( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i27773 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i46848()\n {\n StorageTest aTest = new RegressionTest_i46848( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i46848 failed!\", aTest.test() );\n }\n}\n\n"},"message":{"kind":"string","value":"INTEGRATION: CWS fwk24 (1.6.2); FILE MERGED\n2005/10/13 13:57:57 mav 1.6.2.1: #i55821# second commit of a storage with encrypted entries\n"},"old_file":{"kind":"string","value":"package/qa/storages/StorageUnitTest.java"},"subject":{"kind":"string","value":"INTEGRATION: CWS fwk24 (1.6.2); FILE MERGED 2005/10/13 13:57:57 mav 1.6.2.1: #i55821# second commit of a storage with encrypted entries"},"git_diff":{"kind":"string","value":"ackage/qa/storages/StorageUnitTest.java\n *\n * $RCSfile: StorageUnitTest.java,v $\n *\n * $Revision: 1.6 $\n *\n * last change: $Author: hr $ $Date: 2005-09-23 15:53:43 $\n * $Revision: 1.7 $\n *\n * last change: $Author: hr $ $Date: 2005-10-27 14:07:14 $\n *\n * The Contents of this file are made available subject to\n * the terms of GNU Lesser General Public License Version 2.1.\n \"ExecuteRegressionTest_i29321\",\n \"ExecuteRegressionTest_i30677\",\n \"ExecuteRegressionTest_i27773\",\n \"ExecuteRegressionTest_i46848\"};\n \"ExecuteRegressionTest_i46848\",\n \"ExecuteRegressionTest_i55821\"};\n }\n \n public String getTestObjectName()\n StorageTest aTest = new RegressionTest_i46848( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i46848 failed!\", aTest.test() );\n }\n\n public void ExecuteRegressionTest_i55821()\n {\n StorageTest aTest = new RegressionTest_i55821( m_xMSF, m_xStorageFactory, log );\n assure( \"RegressionTest_i55821 failed!\", aTest.test() );\n }\n }\n "}}},{"rowIdx":2080,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"06b2ec27ac5489691aa65fa50b8038894fd273bc"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"nehasingh189/mll,nehasingh189/mll,nehasingh189/mll,nehasingh189/mll"},"new_contents":{"kind":"string","value":"'use strict';\n\nlet argv = require('yargs').argv;\nlet del = require('del');\nlet gulp = require('gulp');\nlet htmlmin = require('gulp-htmlmin');\nlet gulpif = require('gulp-if');\nlet jshint = require('gulp-jshint');\nlet minifycss = require('gulp-minify-css');\nlet rev = require('gulp-rev');\nlet runSequence = require('run-sequence');\nlet server = require('karma').Server;\nlet stylish = require('jshint-stylish');\nlet templateCache = require('gulp-angular-templatecache');\nlet uglify = require('gulp-uglify');\nlet usemin = require('gulp-usemin');\n\n/**\n * Build Task\n * Cleans the dist and rebuilds files\n */\ngulp.task('build', (done) => {\n runSequence('cleanup', ['fonts', 'templates'], 'usemin', function() {\n done();\n })\n});\n\n/**\n * Clean-Up Task\n * Deletes the dist directory and the generated index file\n */\ngulp.task('cleanup', (done) => {\n del(['./dist', './index.html']);\n done();\n});\n\n/**\n * Develop Task\n * Automatically watches files and rebuilds if there is a change\n */\ngulp.task('develop', (done) => {\n gulp.watch('./source/*.*', { interval: 1000 }, ['build']);\n console.log('Develop is running! Make some changes in ./source/');\n console.log('CTRL^C to exit');\n});\n\n/**\n * Fonts Task\n * Generates the fonts folder in the dist\n */\ngulp.task('fonts', () => {\n gulp.src('./bower_components/font-awesome/fonts/**/*.{ttf,woff,eof,svg}*')\n .pipe(gulp.dest('./dist/fonts'));\n gulp.src('./bower_components/bootstrap/dist/fonts/**/*.{ttf,woff,eof,svg}*')\n .pipe(gulp.dest('./dist/fonts'));\n gulp.src('./source/fonts/*.*')\n .pipe(gulp.dest('./dist/fonts'));\n});\n\n/**\n * Code Quality Task\n */\ngulp.task('jshint', (done) => {\n gulp.src(['./source/scripts/**/*.js', '!./source/scripts/modules/templates/*.js'])\n .pipe(jshint())\n .pipe(jshint.reporter(stylish));\n done();\n});\n\n/**\n * Generate the template-cache file\n */\ngulp.task('templates', () => {\n return gulp.src('./source/scripts/**/*.html')\n //.pipe(htmlmin({ collapseWhitespace: true }))\n .pipe(templateCache({\n module: 'mllApp.templates',\n standalone: true,\n filename: 'templates.module.js',\n /*transformUrl: (url) => url.slice(url.lastIndexOf('\\\\') + 1),*/\n transformUrl: (url) => {\n return url.substr(url.lastIndexOf('\\\\') + 1);\n }\n }))\n .pipe(gulp.dest('./source/scripts/modules/templates/'));\n});\n\n/**\n * Test-runner Task\n */\ngulp.task('test', function (done) {\n new server({\n configFile: __dirname + '/karma.conf.js',\n singleRun: true\n }, done()).start();\n});\n\n/**\n * Usemin Task\n * Runs the usemin library to consolidate all the source files.\n * Then generates index files with refenrences to the current build.\n */\ngulp.task('usemin', ['templates'], function () {\n return gulp.src('./source/index.html')\n .pipe(usemin({\n cssBootstrap: [minifycss(), rev()],\n cssCustom: [minifycss(), rev()],\n jsJQuery: [gulpif(argv.prd, uglify()), rev()],\n jsBootstrap: [gulpif(argv.prd, uglify()), rev()],\n jsAngular: [gulpif(argv.prd, uglify()), rev()],\n jsTemplates: [gulpif(argv.prd, uglify()), rev()],\n jsCustom: [rev()]\n }))\n .pipe(gulp.dest('./'));\n});\n"},"new_file":{"kind":"string","value":"src/main/webapp/gulpfile.js"},"old_contents":{"kind":"string","value":"'use strict';\n\nlet argv = require('yargs').argv;\nlet del = require('del');\nlet gulp = require('gulp');\nlet htmlmin = require('gulp-htmlmin');\nlet gulpif = require('gulp-if');\nlet jshint = require('gulp-jshint');\nlet minifycss = require('gulp-minify-css');\nlet rev = require('gulp-rev');\nlet runSequence = require('run-sequence');\nlet server = require('karma').Server;\nlet stylish = require('jshint-stylish');\nlet templateCache = require('gulp-angular-templatecache');\nlet uglify = require('gulp-uglify');\nlet usemin = require('gulp-usemin');\n\n/**\n * Build Task\n * Cleans the dist and rebuilds files\n */\ngulp.task('build', (done) => {\n runSequence('cleanup', ['fonts', 'templates'], 'usemin', function() {\n done();\n })\n});\n\n/**\n * Clean-Up Task\n * Deletes the dist directory and the generated index file\n */\ngulp.task('cleanup', (done) => {\n del(['./dist', './index.html']);\n done();\n});\n\n/**\n * Develop Task\n * Automatically watches files and rebuilds if there is a change\n */\ngulp.task('develop', (done) => {\n gulp.watch('./source/*.*', { interval: 1000 }, ['build']);\n console.log('Develop is running! Make some changes in ./source/');\n console.log('CTRL^C to exit');\n});\n\n/**\n * Fonts Task\n * Generates the fonts folder in the dist\n */\ngulp.task('fonts', () => {\n gulp.src('./bower_components/font-awesome/fonts/**/*.{ttf,woff,eof,svg}*')\n .pipe(gulp.dest('./dist/fonts'));\n gulp.src('./bower_components/bootstrap/dist/fonts/**/*.{ttf,woff,eof,svg}*')\n .pipe(gulp.dest('./dist/fonts'));\n gulp.src('./source/fonts/*.*')\n .pipe(gulp.dest('./dist/fonts'));\n});\n\n/**\n * Code Quality Task\n */\ngulp.task('jshint', (done) => {\n gulp.src(['./source/scripts/**/*.js', '!./source/scripts/modules/templates/*.js'])\n .pipe(jshint())\n .pipe(jshint.reporter(stylish));\n done();\n});\n\n/**\n * Generate the template-cache file\n */\ngulp.task('templates', () => {\n return gulp.src('./source/scripts/**/*.html')\n //.pipe(htmlmin({ collapseWhitespace: true }))\n .pipe(templateCache({\n module: 'mllApp.templates',\n standalone: true,\n filename: 'templates.module.js',\n /*transformUrl: (url) => url.slice(url.lastIndexOf('\\\\') + 1),*/\n transformUrl: (url) => {\n return url.substr(url.lastIndexOf('\\\\') + 1);\n }\n }))\n .pipe(gulp.dest('./source/scripts/modules/templates/'));\n});\n\n/**\n * Test-runner Task\n */\ngulp.task('test', function (done) {\n new server({\n configFile: __dirname + '/karma.conf.js',\n singleRun: true\n }, done).start();\n});\n\n/**\n * Usemin Task\n * Runs the usemin library to consolidate all the source files.\n * Then generates index files with refenrences to the current build.\n */\ngulp.task('usemin', ['templates'], function () {\n return gulp.src('./source/index.html')\n .pipe(usemin({\n cssBootstrap: [minifycss(), rev()],\n cssCustom: [minifycss(), rev()],\n jsJQuery: [gulpif(argv.prd, uglify()), rev()],\n jsBootstrap: [gulpif(argv.prd, uglify()), rev()],\n jsAngular: [gulpif(argv.prd, uglify()), rev()],\n jsTemplates: [gulpif(argv.prd, uglify()), rev()],\n jsCustom: [rev()]\n }))\n .pipe(gulp.dest('./'));\n});\n"},"message":{"kind":"string","value":"add done function call to prevent gulp error in test task\n"},"old_file":{"kind":"string","value":"src/main/webapp/gulpfile.js"},"subject":{"kind":"string","value":"add done function call to prevent gulp error in test task"},"git_diff":{"kind":"string","value":"rc/main/webapp/gulpfile.js\n new server({\n configFile: __dirname + '/karma.conf.js',\n singleRun: true\n }, done).start();\n }, done()).start();\n });\n \n /**"}}},{"rowIdx":2081,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"457d257c251cb7d255bdd8e58cd5bf51ebd26af1"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"carrot/cream"},"new_contents":{"kind":"string","value":"package com.carrotcreative.cream.cache;\n\nimport android.content.Context;\n\nimport com.carrotcreative.cream.tasks.ReadSerializableTask;\nimport com.carrotcreative.cream.tasks.WriteSerializableTask;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.Serializable;\nimport java.util.regex.Pattern;\n\npublic class CacheManager {\n\n public static final String PREFIX_EXPIRATION_DELIMITER = \"-CR-\";\n\n //=======================================\n //============== Singleton ==============\n //=======================================\n\n private static CacheManager sInstance;\n\n public static CacheManager getInstance(Context context){\n if(sInstance == null){\n sInstance = new CacheManager(context);\n }\n return sInstance;\n }\n\n //===================================\n //============== Class ==============\n //===================================\n\n private final File mRootDir;\n private final Context mContext;\n\n private CacheManager(Context context)\n {\n mContext = context;\n mRootDir = context.getCacheDir();\n }\n\n //============================================\n //================== Cache ===================\n //============================================\n\n public void readSerializable(String directoryString, String fileExtension, String prefix, boolean regardForExpiration,\n ReadSerializableTask.ReadSerializableCallback cb)\n {\n File directory = new File(mRootDir, directoryString);\n\n //Finding the file\n final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension);\n for(File f : matchingFiles)\n {\n long expiration = getFileExpiration(f, fileExtension);\n\n //If it's not expired, or we have no regard for expiration\n if(!(System.currentTimeMillis() > expiration) || !regardForExpiration) {\n readSerializable(f, cb);\n return;\n }\n }\n cb.failure(null);\n }\n\n public void writeSerializable(String directoryString, long expirationMinutes, String fileExtension, String prefix, Serializable content, WriteSerializableTask.WriteSerializableCallback cb)\n {\n File directory = new File(mRootDir, directoryString);\n long expiration = getExpirationEpochMinutes(expirationMinutes);\n String fileString = prefix + PREFIX_EXPIRATION_DELIMITER + expiration + \".\" + fileExtension;\n File file = new File(directory, fileString);\n deleteAllByPrefix(prefix, directory, fileExtension);\n writeSerializable(content, file, cb);\n }\n\n /**\n * This goes and deletes files that are expired by trashDays\n */\n public void runTrashCleanup(String directoryString, String fileExtension, long trashMinutes)\n {\n File cleanupDir = new File(mRootDir, directoryString);\n File[] allFiles = cleanupDir.listFiles();\n\n //http://docs.oracle.com/javase/1.5.0/docs/api/java/io/File.html#listFiles%28%29\n if(allFiles != null){\n for(File f : allFiles)\n {\n if(f.toString().endsWith(fileExtension))\n {\n long trashDate = getFileTrashDate(f, fileExtension, trashMinutes);\n if(f.isFile() && (System.currentTimeMillis() > trashDate))\n {\n f.delete();\n }\n }\n }\n }\n }\n\n //==============================================\n //============== Helper Functions ==============\n //==============================================\n\n /**\n * Only to be ran by write functions.\n *\n * We're writing a fresh object, so obviously\n * we want to delete all of the old ones.\n */\n private void deleteAllByPrefix(String prefix, File directory, String fileExtension)\n {\n final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension);\n for(File f : matchingFiles)\n {\n f.delete();\n }\n }\n\n private long getFileTrashDate(File f, String extension, long trashMinutes)\n {\n long fileExpiration = getFileExpiration(f, extension);\n long diff = 1000 * 60 * trashMinutes;\n return fileExpiration + diff;\n }\n\n private long getFileExpiration(File f, String extension)\n {\n String expirationString = f.getName()\n .replaceFirst(\".*\" + PREFIX_EXPIRATION_DELIMITER, \"\")\n .replace(\".\" + extension, \"\");\n return Long.parseLong(expirationString);\n }\n\n private long getExpirationEpochMinutes(long minutes)\n {\n long diff = 1000 * 60 * minutes;\n return System.currentTimeMillis() + diff;\n }\n\n private static File[] getMatchingFiles(File root, String prefix, String fileExtension) {\n String regex = prefix + PREFIX_EXPIRATION_DELIMITER + \".*\" + \"\\\\.\" + fileExtension;\n if(!root.isDirectory())\n {\n root.mkdir();\n return new File[0];\n }\n final Pattern p = Pattern.compile(regex); // careful: could also throw an exception!\n return root.listFiles(new FileFilter(){\n @Override\n public boolean accept(File file) {\n return p.matcher(file.getName()).matches();\n }\n });\n }\n\n private static void writeSerializable(Serializable obj, File file,\n WriteSerializableTask.WriteSerializableCallback cb) {\n\n WriteSerializableTask task = new WriteSerializableTask(obj, file, cb);\n Void[] voidArray = new Void[0];\n task.execute(voidArray);\n }\n\n private void readSerializable(File file, ReadSerializableTask.ReadSerializableCallback cb) {\n\n ReadSerializableTask task = new ReadSerializableTask(cb, file);\n Void[] voidArray = new Void[0];\n task.execute(voidArray);\n }\n\n}"},"new_file":{"kind":"string","value":"src/main/java/com/carrotcreative/cream/cache/CacheManager.java"},"old_contents":{"kind":"string","value":"package com.carrotcreative.cream.cache;\n\nimport android.content.Context;\n\nimport com.carrotcreative.cream.tasks.ReadSerializableTask;\nimport com.carrotcreative.cream.tasks.WriteSerializableTask;\n\nimport java.io.File;\nimport java.io.FileFilter;\nimport java.io.Serializable;\nimport java.util.regex.Pattern;\n\npublic class CacheManager {\n\n //=======================================\n //============== Singleton ==============\n //=======================================\n\n private static CacheManager sInstance;\n\n public static CacheManager getInstance(Context context){\n if(sInstance == null){\n sInstance = new CacheManager(context);\n }\n return sInstance;\n }\n\n //===================================\n //============== Class ==============\n //===================================\n\n private final File mRootDir;\n private final Context mContext;\n\n private CacheManager(Context context)\n {\n mContext = context;\n mRootDir = context.getCacheDir();\n }\n\n //============================================\n //================== Cache ===================\n //============================================\n\n public void readSerializable(String directoryString, String fileExtension, String prefix, boolean regardForExpiration,\n ReadSerializableTask.ReadSerializableCallback cb)\n {\n File directory = new File(mRootDir, directoryString);\n\n //Finding the file\n final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension);\n for(File f : matchingFiles)\n {\n long expiration = getFileExpiration(f, fileExtension);\n\n //If it's not expired, or we have no regard for expiration\n if(!(System.currentTimeMillis() > expiration) || !regardForExpiration) {\n readSerializable(f, cb);\n return;\n }\n }\n cb.failure(null);\n }\n\n public void writeSerializable(String directoryString, long expirationMinutes, String fileExtension, String prefix, Serializable content, WriteSerializableTask.WriteSerializableCallback cb)\n {\n File directory = new File(mRootDir, directoryString);\n\n long expiration = getExpirationEpochMinutes(expirationMinutes);\n String fileString = prefix + \"-\" + expiration + \".\" + fileExtension;\n File file = new File(directory, fileString);\n deleteAllByPrefix(prefix, directory, fileExtension);\n writeSerializable(content, file, cb);\n }\n\n /**\n * This goes and deletes files that are expired by trashDays\n */\n public void runTrashCleanup(String directoryString, String fileExtension, long trashMinutes)\n {\n File cleanupDir = new File(mRootDir, directoryString);\n File[] allFiles = cleanupDir.listFiles();\n\n //http://docs.oracle.com/javase/1.5.0/docs/api/java/io/File.html#listFiles%28%29\n if(allFiles != null){\n for(File f : allFiles)\n {\n if(f.toString().endsWith(fileExtension))\n {\n long trashDate = getFileTrashDate(f, fileExtension, trashMinutes);\n if(f.isFile() && (System.currentTimeMillis() > trashDate))\n {\n f.delete();\n }\n }\n }\n }\n }\n\n //==============================================\n //============== Helper Functions ==============\n //==============================================\n\n /**\n * Only to be ran by write functions.\n *\n * We're writing a fresh object, so obviously\n * we want to delete all of the old ones.\n */\n private void deleteAllByPrefix(String prefix, File directory, String fileExtension)\n {\n final File[] matchingFiles = getMatchingFiles(directory, prefix, fileExtension);\n for(File f : matchingFiles)\n {\n f.delete();\n }\n }\n\n private long getFileTrashDate(File f, String extension, long trashMinutes)\n {\n long fileExpiration = getFileExpiration(f, extension);\n long diff = 1000 * 60 * trashMinutes;\n return fileExpiration + diff;\n }\n\n private long getFileExpiration(File f, String extension)\n {\n String expirationString = f.getName().replaceFirst(\".*-\", \"\").replace(\".\" + extension, \"\");\n return Long.parseLong(expirationString);\n }\n\n private long getExpirationEpochMinutes(long minutes)\n {\n long diff = 1000 * 60 * minutes;\n return System.currentTimeMillis() + diff;\n }\n\n private static File[] getMatchingFiles(File root, String prefix, String fileExtension) {\n String regex = prefix + \"-.*\" + \".\" + fileExtension;\n if(!root.isDirectory()) {\n root.mkdir();\n return new File[0];\n }\n final Pattern p = Pattern.compile(regex); // careful: could also throw an exception!\n return root.listFiles(new FileFilter(){\n @Override\n public boolean accept(File file) {\n return p.matcher(file.getName()).matches();\n }\n });\n }\n\n private static void writeSerializable(Serializable obj, File file,\n WriteSerializableTask.WriteSerializableCallback cb) {\n\n WriteSerializableTask task = new WriteSerializableTask(obj, file, cb);\n Void[] voidArray = new Void[0];\n task.execute(voidArray);\n }\n\n private void readSerializable(File file, ReadSerializableTask.ReadSerializableCallback cb) {\n\n ReadSerializableTask task = new ReadSerializableTask(cb, file);\n Void[] voidArray = new Void[0];\n task.execute(voidArray);\n }\n\n}"},"message":{"kind":"string","value":"Updating file delimiter to be more specific\n\nWas running into issues with the actual identifier was messing with\nthe regex splitting between the expiration + identifier.\n"},"old_file":{"kind":"string","value":"src/main/java/com/carrotcreative/cream/cache/CacheManager.java"},"subject":{"kind":"string","value":"Updating file delimiter to be more specific"},"git_diff":{"kind":"string","value":"rc/main/java/com/carrotcreative/cream/cache/CacheManager.java\n import java.util.regex.Pattern;\n \n public class CacheManager {\n\n public static final String PREFIX_EXPIRATION_DELIMITER = \"-CR-\";\n \n //=======================================\n //============== Singleton ==============\n public void writeSerializable(String directoryString, long expirationMinutes, String fileExtension, String prefix, Serializable content, WriteSerializableTask.WriteSerializableCallback cb)\n {\n File directory = new File(mRootDir, directoryString);\n\n long expiration = getExpirationEpochMinutes(expirationMinutes);\n String fileString = prefix + \"-\" + expiration + \".\" + fileExtension;\n String fileString = prefix + PREFIX_EXPIRATION_DELIMITER + expiration + \".\" + fileExtension;\n File file = new File(directory, fileString);\n deleteAllByPrefix(prefix, directory, fileExtension);\n writeSerializable(content, file, cb);\n \n private long getFileExpiration(File f, String extension)\n {\n String expirationString = f.getName().replaceFirst(\".*-\", \"\").replace(\".\" + extension, \"\");\n String expirationString = f.getName()\n .replaceFirst(\".*\" + PREFIX_EXPIRATION_DELIMITER, \"\")\n .replace(\".\" + extension, \"\");\n return Long.parseLong(expirationString);\n }\n \n }\n \n private static File[] getMatchingFiles(File root, String prefix, String fileExtension) {\n String regex = prefix + \"-.*\" + \".\" + fileExtension;\n if(!root.isDirectory()) {\n String regex = prefix + PREFIX_EXPIRATION_DELIMITER + \".*\" + \"\\\\.\" + fileExtension;\n if(!root.isDirectory())\n {\n root.mkdir();\n return new File[0];\n }"}}},{"rowIdx":2082,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":"error: pathspec 'xmvn-it/src/test/java/org/fedoraproject/xmvn/it/ArchiveLayoutIntegrationTest.java' did not match any file(s) known to git\n"},"commit":{"kind":"string","value":"3fb18db2b7b119fa414a48493b2d0c74d87a3865"},"returncode":{"kind":"number","value":1,"string":"1"},"repos":{"kind":"string","value":"fedora-java/xmvn,fedora-java/xmvn,fedora-java/xmvn"},"new_contents":{"kind":"string","value":"/*-\n * Copyright (c) 2021 Red Hat, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.fedoraproject.xmvn.it;\n\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.nio.file.DirectoryStream;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.regex.Pattern;\n\nimport org.junit.jupiter.api.Test;\n\n/**\n * Test whether binary distribution has expected layout.\n * \n * @author Mikolaj Izdebski\n */\npublic class ArchiveLayoutIntegrationTest\n extends AbstractIntegrationTest\n{\n private static class PathExpectation\n {\n private final String regex;\n\n private final Pattern pattern;\n\n private final int lowerBound;\n\n private final int upperBound;\n\n private int matchCount;\n\n public PathExpectation( int lowerBound, int upperBound, String regex )\n {\n this.regex = regex;\n this.lowerBound = lowerBound;\n this.upperBound = upperBound;\n pattern = Pattern.compile( regex );\n }\n\n public boolean matches( String path )\n {\n if ( pattern.matcher( path ).matches() )\n {\n matchCount++;\n return true;\n }\n\n return false;\n }\n\n public void verify( List errors )\n {\n if ( matchCount < lowerBound || matchCount > upperBound )\n {\n errors.add( \"Pattern \" + regex + \" was expected at least \" + lowerBound + \" and at most \" + upperBound\n + \" times, but was found \" + matchCount + \" times\" );\n }\n }\n }\n\n private List expectations = new ArrayList<>();\n\n private void expect( int lowerBound, int upperBound, String regex )\n {\n expectations.add( new PathExpectation( lowerBound, upperBound, regex ) );\n }\n\n private void matchSingleFile( Path baseDir, Path path, String dirSuffix, List errors )\n {\n String pathStr = baseDir.relativize( path ) + dirSuffix;\n\n if ( expectations.stream().filter( expectation -> expectation.matches( pathStr ) ).count() == 0 )\n {\n errors.add( \"Path \" + pathStr + \" did not match any pattern\" );\n }\n }\n\n private void matchDirectoryTree( Path baseDir, Path dir, List errors )\n throws Exception\n {\n matchSingleFile( baseDir, dir, \"/\", errors );\n\n try ( DirectoryStream ds = Files.newDirectoryStream( dir ) )\n {\n for ( Path path : ds )\n {\n if ( Files.isDirectory( path, LinkOption.NOFOLLOW_LINKS ) )\n {\n matchDirectoryTree( baseDir, path, errors );\n }\n else\n {\n matchSingleFile( baseDir, path, \"\", errors );\n }\n }\n }\n }\n\n @Test\n public void testArchiveLayout()\n throws Exception\n {\n expect( 1, 1, \"/\" );\n expect( 1, 1, \"LICENSE\" );\n expect( 1, 1, \"NOTICE\" );\n expect( 1, 1, \"README\\\\.txt\" );\n expect( 1, 1, \"NOTICE-XMVN\" );\n expect( 1, 1, \"AUTHORS-XMVN\" );\n expect( 1, 1, \"README-XMVN\\\\.md\" );\n\n expect( 1, 1, \"bin/\" );\n expect( 1, 1, \"bin/mvn\" );\n expect( 1, 1, \"bin/mvn\\\\.cmd\" );\n expect( 1, 1, \"bin/mvnDebug\" );\n expect( 1, 1, \"bin/mvnDebug\\\\.cmd\" );\n expect( 1, 1, \"bin/mvnyjp\" );\n expect( 1, 1, \"bin/m2\\\\.conf\" );\n\n expect( 1, 1, \"boot/\" );\n expect( 1, 1, \"boot/plexus-classworlds-.*\\\\.jar\" );\n expect( 1, 1, \"boot/plexus-classworlds.license\" );\n\n expect( 1, 1, \"conf/\" );\n expect( 1, 1, \"conf/settings\\\\.xml\" );\n expect( 1, 1, \"conf/toolchains\\\\.xml\" );\n expect( 1, 1, \"conf/logging/\" );\n expect( 1, 1, \"conf/logging/simplelogger\\\\.properties\" );\n\n expect( 1, 1, \"lib/\" );\n expect( 30, 60, \"lib/[^/]*\\\\.jar\" );\n expect( 15, 30, \"lib/[^/]*\\\\.license\" );\n expect( 10, 100, \"lib/jansi-native/.*\" );\n\n expect( 1, 1, \"lib/ext/\" );\n expect( 1, 1, \"lib/ext/README\\\\.txt\" );\n expect( 1, 1, \"lib/ext/xmvn-connector-.*\\\\.jar\" );\n expect( 1, 1, \"lib/ext/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/ext/xmvn-api-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/installer/\" );\n expect( 1, 1, \"lib/installer/xmvn-install-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/jcommander-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/slf4j-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/slf4j-simple-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/asm-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/commons-compress-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/resolver/\" );\n expect( 1, 1, \"lib/resolver/xmvn-resolve-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/jcommander-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/subst/\" );\n expect( 1, 1, \"lib/subst/xmvn-subst-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/jcommander-.*\\\\.jar\" );\n\n Path baseDir = getMavenHome();\n List errors = new ArrayList<>();\n matchDirectoryTree( baseDir, baseDir, errors );\n\n for ( PathExpectation expect : expectations )\n {\n expect.verify( errors );\n }\n\n if ( !errors.isEmpty() )\n {\n fail( String.join( \"\\n\", errors ) );\n }\n }\n}\n"},"new_file":{"kind":"string","value":"xmvn-it/src/test/java/org/fedoraproject/xmvn/it/ArchiveLayoutIntegrationTest.java"},"old_contents":{"kind":"string","value":""},"message":{"kind":"string","value":"[it] Add test for distribution archive layout\n"},"old_file":{"kind":"string","value":"xmvn-it/src/test/java/org/fedoraproject/xmvn/it/ArchiveLayoutIntegrationTest.java"},"subject":{"kind":"string","value":"[it] Add test for distribution archive layout"},"git_diff":{"kind":"string","value":"mvn-it/src/test/java/org/fedoraproject/xmvn/it/ArchiveLayoutIntegrationTest.java\n/*-\n * Copyright (c) 2021 Red Hat, Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.fedoraproject.xmvn.it;\n\nimport static org.junit.jupiter.api.Assertions.fail;\n\nimport java.nio.file.DirectoryStream;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.regex.Pattern;\n\nimport org.junit.jupiter.api.Test;\n\n/**\n * Test whether binary distribution has expected layout.\n * \n * @author Mikolaj Izdebski\n */\npublic class ArchiveLayoutIntegrationTest\n extends AbstractIntegrationTest\n{\n private static class PathExpectation\n {\n private final String regex;\n\n private final Pattern pattern;\n\n private final int lowerBound;\n\n private final int upperBound;\n\n private int matchCount;\n\n public PathExpectation( int lowerBound, int upperBound, String regex )\n {\n this.regex = regex;\n this.lowerBound = lowerBound;\n this.upperBound = upperBound;\n pattern = Pattern.compile( regex );\n }\n\n public boolean matches( String path )\n {\n if ( pattern.matcher( path ).matches() )\n {\n matchCount++;\n return true;\n }\n\n return false;\n }\n\n public void verify( List errors )\n {\n if ( matchCount < lowerBound || matchCount > upperBound )\n {\n errors.add( \"Pattern \" + regex + \" was expected at least \" + lowerBound + \" and at most \" + upperBound\n + \" times, but was found \" + matchCount + \" times\" );\n }\n }\n }\n\n private List expectations = new ArrayList<>();\n\n private void expect( int lowerBound, int upperBound, String regex )\n {\n expectations.add( new PathExpectation( lowerBound, upperBound, regex ) );\n }\n\n private void matchSingleFile( Path baseDir, Path path, String dirSuffix, List errors )\n {\n String pathStr = baseDir.relativize( path ) + dirSuffix;\n\n if ( expectations.stream().filter( expectation -> expectation.matches( pathStr ) ).count() == 0 )\n {\n errors.add( \"Path \" + pathStr + \" did not match any pattern\" );\n }\n }\n\n private void matchDirectoryTree( Path baseDir, Path dir, List errors )\n throws Exception\n {\n matchSingleFile( baseDir, dir, \"/\", errors );\n\n try ( DirectoryStream ds = Files.newDirectoryStream( dir ) )\n {\n for ( Path path : ds )\n {\n if ( Files.isDirectory( path, LinkOption.NOFOLLOW_LINKS ) )\n {\n matchDirectoryTree( baseDir, path, errors );\n }\n else\n {\n matchSingleFile( baseDir, path, \"\", errors );\n }\n }\n }\n }\n\n @Test\n public void testArchiveLayout()\n throws Exception\n {\n expect( 1, 1, \"/\" );\n expect( 1, 1, \"LICENSE\" );\n expect( 1, 1, \"NOTICE\" );\n expect( 1, 1, \"README\\\\.txt\" );\n expect( 1, 1, \"NOTICE-XMVN\" );\n expect( 1, 1, \"AUTHORS-XMVN\" );\n expect( 1, 1, \"README-XMVN\\\\.md\" );\n\n expect( 1, 1, \"bin/\" );\n expect( 1, 1, \"bin/mvn\" );\n expect( 1, 1, \"bin/mvn\\\\.cmd\" );\n expect( 1, 1, \"bin/mvnDebug\" );\n expect( 1, 1, \"bin/mvnDebug\\\\.cmd\" );\n expect( 1, 1, \"bin/mvnyjp\" );\n expect( 1, 1, \"bin/m2\\\\.conf\" );\n\n expect( 1, 1, \"boot/\" );\n expect( 1, 1, \"boot/plexus-classworlds-.*\\\\.jar\" );\n expect( 1, 1, \"boot/plexus-classworlds.license\" );\n\n expect( 1, 1, \"conf/\" );\n expect( 1, 1, \"conf/settings\\\\.xml\" );\n expect( 1, 1, \"conf/toolchains\\\\.xml\" );\n expect( 1, 1, \"conf/logging/\" );\n expect( 1, 1, \"conf/logging/simplelogger\\\\.properties\" );\n\n expect( 1, 1, \"lib/\" );\n expect( 30, 60, \"lib/[^/]*\\\\.jar\" );\n expect( 15, 30, \"lib/[^/]*\\\\.license\" );\n expect( 10, 100, \"lib/jansi-native/.*\" );\n\n expect( 1, 1, \"lib/ext/\" );\n expect( 1, 1, \"lib/ext/README\\\\.txt\" );\n expect( 1, 1, \"lib/ext/xmvn-connector-.*\\\\.jar\" );\n expect( 1, 1, \"lib/ext/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/ext/xmvn-api-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/installer/\" );\n expect( 1, 1, \"lib/installer/xmvn-install-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/jcommander-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/slf4j-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/slf4j-simple-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/asm-.*\\\\.jar\" );\n expect( 1, 1, \"lib/installer/commons-compress-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/resolver/\" );\n expect( 1, 1, \"lib/resolver/xmvn-resolve-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/resolver/jcommander-.*\\\\.jar\" );\n\n expect( 1, 1, \"lib/subst/\" );\n expect( 1, 1, \"lib/subst/xmvn-subst-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/xmvn-api-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/xmvn-core-.*\\\\.jar\" );\n expect( 1, 1, \"lib/subst/jcommander-.*\\\\.jar\" );\n\n Path baseDir = getMavenHome();\n List errors = new ArrayList<>();\n matchDirectoryTree( baseDir, baseDir, errors );\n\n for ( PathExpectation expect : expectations )\n {\n expect.verify( errors );\n }\n\n if ( !errors.isEmpty() )\n {\n fail( String.join( \"\\n\", errors ) );\n }\n }\n}"}}},{"rowIdx":2083,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"7852770916bc0a53b48bb1a6e3b28453b0ef83e9"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"psoreide/bnd,psoreide/bnd,psoreide/bnd"},"new_contents":{"kind":"string","value":"package aQute.launchpad;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.Closeable;\nimport java.io.File;\nimport java.io.PrintStream;\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Hashtable;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.Set;\nimport java.util.concurrent.CopyOnWriteArrayList;\nimport java.util.concurrent.TimeoutException;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Consumer;\nimport java.util.jar.JarOutputStream;\nimport java.util.jar.Manifest;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\n\nimport org.eclipse.jdt.annotation.Nullable;\nimport org.osgi.annotation.versioning.ProviderType;\nimport org.osgi.framework.Bundle;\nimport org.osgi.framework.BundleContext;\nimport org.osgi.framework.BundleException;\nimport org.osgi.framework.Constants;\nimport org.osgi.framework.Filter;\nimport org.osgi.framework.FrameworkEvent;\nimport org.osgi.framework.FrameworkUtil;\nimport org.osgi.framework.InvalidSyntaxException;\nimport org.osgi.framework.ServiceEvent;\nimport org.osgi.framework.ServiceException;\nimport org.osgi.framework.ServiceReference;\nimport org.osgi.framework.ServiceRegistration;\nimport org.osgi.framework.hooks.service.EventListenerHook;\nimport org.osgi.framework.hooks.service.FindHook;\nimport org.osgi.framework.hooks.service.ListenerHook.ListenerInfo;\nimport org.osgi.framework.launch.Framework;\nimport org.osgi.framework.namespace.PackageNamespace;\nimport org.osgi.framework.wiring.BundleCapability;\nimport org.osgi.framework.wiring.BundleWiring;\nimport org.osgi.framework.wiring.FrameworkWiring;\nimport org.osgi.util.tracker.ServiceTracker;\n\nimport aQute.bnd.service.specifications.RunSpecification;\nimport aQute.lib.converter.Converter;\nimport aQute.lib.exceptions.Exceptions;\nimport aQute.lib.inject.Injector;\nimport aQute.lib.io.IO;\nimport aQute.lib.strings.Strings;\nimport aQute.libg.glob.Glob;\nimport aQute.libg.parameters.ParameterMap;\n\n/**\n * This class provides an OSGi framework that is configured with the current bnd\n * workspace. A project directory is used to find the workspace. This makes all\n * repositories in the workspace available to the framework. To be able to test\n * JUnit code against/in this framework it is necessary that all packages on the\n * buildpath and testpath are actually exported in the framework. This class\n * will ensure that. Once the framework is up and running it will be possible to\n * add bundles to it. There are a number of ways that this can be achieved:\n *

    \n *
  • Build a bundle – A bnd Builder is provided to create a bundle and install\n * it. This makes it possible to add classes from the src or test directories or\n * resources. See {@link #bundle()}. Convenience methods are added to get\n * services, see {@link #getService(Class)} et. al. Notice that this framework\n * starts in the same process as that the JUnit code runs. This is normally a\n * separately started VM.\n */\n@ProviderType\npublic class Launchpad implements AutoCloseable {\n\n\tpublic static final String\t\t\t\t\tBUNDLE_PRIORITY\t\t\t= \"Bundle-Priority\";\n\tprivate static final long\t\t\t\t\tSERVICE_DEFAULT_TIMEOUT\t= 60000L;\n\tstatic final AtomicInteger\t\t\t\t\tn\t\t\t\t\t\t= new AtomicInteger();\n\tfinal File\t\t\t\t\t\t\t\t\tprojectDir;\n\n\tfinal Framework\t\t\t\t\t\t\t\tframework;\n\tfinal List>\t\t\ttrackers\t\t\t\t= new ArrayList<>();\n\tfinal List\t\t\t\t\tframeworkEvents\t\t\t= new CopyOnWriteArrayList();\n\tfinal Injector\t\t\t\t\t\tinjector;\n\tfinal Map, ServiceTracker>\tinjectedDoNotClose\t\t= new HashMap<>();\n\tfinal Set\t\t\t\t\t\t\tframeworkExports;\n\tfinal List\t\t\t\t\t\t\terrors\t\t\t\t\t= new ArrayList<>();\n\tfinal String\t\t\t\t\t\t\t\tname;\n\tfinal String\t\t\t\t\t\t\t\tclassName;\n\tfinal RunSpecification\t\t\t\t\t\trunspec;\n\tfinal boolean\t\t\t\t\t\t\t\thasTestBundle;\n\n\tBundle\t\t\t\t\t\t\t\t\t\ttestbundle;\n\tboolean\t\t\t\t\t\t\t\t\t\tdebug;\n\tPrintStream\t\t\t\t\t\t\t\t\tout\t\t\t\t\t\t= System.err;\n\tServiceTracker\t\t\thooks;\n\tprivate long\t\t\t\t\t\t\t\tcloseTimeout;\n\n\tLaunchpad(Framework framework, String name, String className,\n\t\tRunSpecification runspec, long closeTimeout, boolean debug, boolean hasTestBundle) {\n\t\tthis.runspec = runspec;\n\t\tthis.closeTimeout = closeTimeout;\n\t\tthis.hasTestBundle = hasTestBundle;\n\n\t\ttry {\n\t\t\tthis.className = className;\n\t\t\tthis.name = name;\n\t\t\tthis.projectDir = IO.work;\n\t\t\tthis.debug = debug;\n\t\t\tthis.framework = framework;\n\t\t\tthis.framework.init();\n\t\t\tthis.injector = new Injector<>(makeConverter(), this::getService, Service.class);\n\t\t\tthis.frameworkExports = getExports(framework).keySet();\n\n\t\t\treport(\"Initialized framework %s\", this.framework);\n\t\t\treport(\"Classpath %s\", System.getProperty(\"java.class.path\")\n\t\t\t\t.replace(File.pathSeparatorChar, '\\n'));\n\n\t\t\tframework.getBundleContext()\n\t\t\t\t.addFrameworkListener(frameworkEvents::add);\n\n\t\t\thooks = new ServiceTracker(framework.getBundleContext(), FindHook.class, null);\n\t\t\thooks.open();\n\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\tpublic void report(String format, Object... args) {\n\t\tif (!debug)\n\t\t\treturn;\n\n\t\tout.printf(format + \"%n\", args);\n\t}\n\n\t/**\n\t * Generate an error so that the test case can check if we found anything\n\t * wrong. This is easy to check with {@link #check(String...)}\n\t * \n\t * @param format the format string used in\n\t * {@link String#format(String, Object...)}\n\t * @param args the arguments to be formatted\n\t */\n\tpublic void error(String format, Object... args) {\n\t\treport(format, args);\n\t\tString msg = String.format(format, args);\n\t\terrors.add(msg);\n\t}\n\n\t/**\n\t * Check the errors found, filtering out any unwanted with globbing patters.\n\t * Each error is filtered against all the patterns. This method return true\n\t * if there are no unfiltered errors, otherwise false.\n\t * \n\t * @param patterns globbing patterns\n\t * @return true if no errors after filtering,otherwise false\n\t */\n\tpublic boolean check(String... patterns) {\n\t\tGlob[] globs = Stream.of(patterns)\n\t\t\t.map(Glob::new)\n\t\t\t.toArray(Glob[]::new);\n\t\tboolean[] used = new boolean[globs.length];\n\n\t\tString[] unmatched = errors.stream()\n\t\t\t.filter(msg -> {\n\t\t\t\tfor (int i = 0; i < globs.length; i++) {\n\t\t\t\t\tif (globs[i].finds(msg) >= 0) {\n\t\t\t\t\t\tused[i] = true;\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t.toArray(String[]::new);\n\n\t\tif (unmatched.length == 0) {\n\n\t\t\tList report = new ArrayList<>();\n\t\t\tfor (int i = 0; i < used.length; i++) {\n\t\t\t\tif (!used[i]) {\n\t\t\t\t\treport.add(globs[i]);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (report.isEmpty())\n\t\t\t\treturn true;\n\n\t\t\tout.println(\"Missing patterns\");\n\t\t\tout.println(Strings.join(\"\\n\", globs));\n\t\t\treturn false;\n\t\t}\n\n\t\tout.println(\"Errors\");\n\t\tout.println(Strings.join(\"\\n\", unmatched));\n\t\treturn false;\n\t}\n\n\t/**\n\t * Add a file as a bundle to the framework. This bundle will not be started.\n\t * \n\t * @param f the file to install\n\t * @return the bundle object\n\t */\n\tpublic Bundle bundle(File f) {\n\t\ttry {\n\t\t\treport(\"Installing %s\", f);\n\t\t\treturn framework.getBundleContext()\n\t\t\t\t.installBundle(toInstallURI(f));\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to installing %s : %s\", f, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Set the debug flag\n\t */\n\tpublic Launchpad debug() {\n\t\tthis.debug = true;\n\t\treturn this;\n\t}\n\n\t/**\n\t * Install a number of bundles based on their bundle specification. A bundle\n\t * specification is the format used in for example -runbundles.\n\t * \n\t * @param specification the bundle specifications\n\t * @return a list of bundles\n\t */\n\tpublic List bundles(String specification) {\n\t\ttry {\n\t\t\treturn LaunchpadBuilder.workspace.getLatestBundles(projectDir.getAbsolutePath(), specification)\n\t\t\t\t.stream()\n\t\t\t\t.map(File::new)\n\t\t\t\t.map(this::bundle)\n\t\t\t\t.collect(Collectors.toList());\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Install a number of bundles\n\t * \n\t * @param runbundles the list of bundles\n\t * @return a list of bundle objects\n\t */\n\tpublic List bundles(File... runbundles) {\n\t\tif (runbundles == null || runbundles.length == 0)\n\t\t\treturn Collections.emptyList();\n\n\t\ttry {\n\t\t\tList bundles = new ArrayList<>();\n\t\t\tfor (File f : runbundles) {\n\t\t\t\tBundle b = bundle(f);\n\t\t\t\tbundles.add(b);\n\t\t\t}\n\t\t\treturn bundles;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Start a bundle\n\t * \n\t * @param b the bundle object\n\t */\n\tpublic void start(Bundle b) {\n\t\ttry {\n\t\t\tif (!isFragment(b)) {\n\t\t\t\treport(\"Starting %s\", b);\n\t\t\t\tb.start();\n\n\t\t\t\tSet exports = getExports(b).keySet();\n\t\t\t\tSet imports = getImports(b).keySet();\n\t\t\t\texports.removeAll(imports);\n\t\t\t\texports.retainAll(frameworkExports);\n\n\t\t\t\tif (!exports.isEmpty()) {\n\t\t\t\t\terror(\n\t\t\t\t\t\t\"bundle %s is exporting but NOT importing package(s) %s that are/is also exported by the framework.\\n\"\n\t\t\t\t\t\t\t+ \"This means that the test code and the bundle cannot share classes of these package.\",\n\t\t\t\t\t\tb, exports);\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\treport(\"Not starting fragment %s\", b);\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to start %s : %s\", b, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Start all bundles\n\t * \n\t * @param bs a collection of bundles\n\t */\n\tpublic void start(Collection bs) {\n\t\tbs.forEach(this::start);\n\t}\n\n\t/**\n\t * Close this framework\n\t */\n\t@Override\n\tpublic void close() throws Exception {\n\t\treport(\"Stop the framework\");\n\t\tframework.stop();\n\t\treport(\"Stopped the framework\");\n\t\tframework.waitForStop(closeTimeout);\n\t\treport(\"Framework fully stopped\");\n\t}\n\n\t/**\n\t * Get the Bundle Context. If a test bundle was installed then this is the\n\t * context of the test bundle otherwise it is the context of the framework.\n\t * To be able to proxy services it is necessary to have a test bundle\n\t * installed.\n\t * \n\t * @return the bundle context of the test bundle or the framework\n\t */\n\tpublic BundleContext getBundleContext() {\n\t\tif (testbundle != null)\n\t\t\treturn testbundle.getBundleContext();\n\n\t\treturn framework.getBundleContext();\n\t}\n\n\t/**\n\t * Get a service registered under class. If multiple services are registered\n\t * it will return the first\n\t * \n\t * @param serviceInterface the name of the service\n\t * @return a service\n\t */\n\tpublic Optional getService(Class serviceInterface) {\n\t\treturn getService(serviceInterface, null);\n\t}\n\n\tpublic Optional getService(Class serviceInterface, @Nullable String target) {\n\t\treturn getServices(serviceInterface, target, 0, 0, false).stream()\n\t\t\t.map(this::getService)\n\t\t\t.findFirst();\n\t}\n\n\t/**\n\t * Get a list of services of a given name\n\t * \n\t * @param serviceClass the service name\n\t * @return a list of services\n\t */\n\tpublic List getServices(Class serviceClass) {\n\t\treturn getServices(serviceClass, null);\n\t}\n\n\t/**\n\t * Get a list of services in the current registry\n\t * \n\t * @param serviceClass the type of the service\n\t * @param target the target, may be null\n\t * @return a list of found services currently in the registry\n\t */\n\tpublic List getServices(Class serviceClass, @Nullable String target) {\n\t\treturn getServices(serviceClass, target, 0, 0, false).stream()\n\t\t\t.map(this::getService)\n\t\t\t.collect(Collectors.toList());\n\t}\n\n\t/**\n\t * Get a service from a reference. If the service is null, then throw an\n\t * exception.\n\t * \n\t * @param ref the reference\n\t * @return the service, never null\n\t */\n\tpublic T getService(ServiceReference ref) {\n\t\ttry {\n\t\t\tT service = getBundleContext().getService(ref);\n\t\t\tif (service == null) {\n\t\t\t\tif (ref.getBundle() == null) {\n\t\t\t\t\tthrow new ServiceException(\n\t\t\t\t\t\t\"getService(\" + ref + \") returns null, the service is no longer registered\");\n\t\t\t\t}\n\t\t\t\tthrow new ServiceException(\"getService(\" + ref + \") returns null, this probbaly means the \\n\"\n\t\t\t\t\t+ \"component failed to activate. The cause can \\n\" + \"generally be found in the log.\\n\" + \"\");\n\t\t\t}\n\t\t\treturn service;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Add the standard Gogo bundles\n\t */\n\tpublic Launchpad gogo() {\n\t\ttry {\n\t\t\tbundles(\"org.apache.felix.gogo.runtime,org.apache.felix.gogo.command,org.apache.felix.gogo.shell\");\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Add the standard Gogo bundles\n\t */\n\tpublic Launchpad snapshot() {\n\t\ttry {\n\t\t\tbundles(\"biz.aQute.bnd.runtime.snapshot\");\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Inject an object with services and other OSGi specific values.\n\t * \n\t * @param object the object to inject\n\t */\n\n\tpublic Launchpad inject(Object object) {\n\t\ttry {\n\t\t\tinjector.inject(object);\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Install a bundle from a file\n\t * \n\t * @param file the file to install\n\t * @return a bundle\n\t */\n\tpublic Bundle install(File file) {\n\t\ttry {\n\t\t\treport(\"Installing %s\", file);\n\t\t\treturn framework.getBundleContext()\n\t\t\t\t.installBundle(toInstallURI(file));\n\t\t} catch (BundleException e) {\n\t\t\treport(\"Failed to install %s : %s\", file, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Create a new synthetic bundle.\n\t * \n\t * @return the bundle builder\n\t */\n\tpublic BundleBuilder bundle() {\n\t\treturn new BundleBuilder(this);\n\t}\n\n\t/**\n\t * Create a new object and inject it.\n\t * \n\t * @param type the type of object\n\t * @return a new object injected and all\n\t */\n\tpublic T newInstance(Class type) {\n\t\ttry {\n\t\t\treturn injector.newInstance(type);\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to create and instance for %s : %s\", type, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Show the information of how the framework is setup and is running\n\t */\n\tpublic Launchpad report() throws InvalidSyntaxException {\n\t\tboolean old = debug;\n\t\tdebug = true;\n\t\treportBundles();\n\t\treportServices();\n\t\treportEvents();\n\t\tdebug = old;\n\t\treturn this;\n\t}\n\n\t/**\n\t * Show the installed bundles\n\t */\n\tpublic void reportBundles() {\n\t\tStream.of(framework.getBundleContext()\n\t\t\t.getBundles())\n\t\t\t.forEach(bb -> {\n\t\t\t\treport(\"%4s %s\", bundleStateToString(bb.getState()), bb);\n\t\t\t});\n\t}\n\n\t/**\n\t * Show the registered service\n\t */\n\tpublic void reportServices() throws InvalidSyntaxException {\n\t\tStream.of(framework.getBundleContext()\n\t\t\t.getAllServiceReferences(null, null))\n\t\t\t.forEach(sref -> {\n\t\t\t\treport(\"%s\", sref);\n\t\t\t});\n\t}\n\n\t/**\n\t * Wait for a Service Reference to be registered\n\t * \n\t * @param class1 the name of the service\n\t * @param timeoutInMs the time to wait\n\t * @return a service reference\n\t */\n\tpublic Optional> waitForServiceReference(Class class1, long timeoutInMs) {\n\n\t\treturn getServices(class1, null, 1, timeoutInMs, false).stream()\n\t\t\t.findFirst();\n\n\t}\n\n\t/**\n\t * Wait for a Service Reference to be registered\n\t * \n\t * @param class1 the name of the service\n\t * @param timeoutInMs the time to wait\n\t * @return a service reference\n\t */\n\tpublic Optional> waitForServiceReference(Class class1, long timeoutInMs, String target) {\n\n\t\treturn getServices(class1, target, 1, timeoutInMs, false).stream()\n\t\t\t.findFirst();\n\n\t}\n\n\t/**\n\t * Wait for service to be registered\n\t * \n\t * @param class1 name of the service\n\t * @param timeoutInMs timeout in ms\n\t * @return a service\n\t */\n\tpublic Optional waitForService(Class class1, long timeoutInMs) {\n\t\treturn this.waitForService(class1, timeoutInMs, null);\n\t}\n\n\t/**\n\t * Wait for service to be registered\n\t * \n\t * @param class1 name of the service\n\t * @param timeoutInMs timeout in ms\n\t * @param target filter, may be null\n\t * @return a service\n\t */\n\tpublic Optional waitForService(Class class1, long timeoutInMs, String target) {\n\t\ttry {\n\t\t\treturn getServices(class1, target, 1, timeoutInMs, false).stream()\n\t\t\t\t.findFirst()\n\t\t\t\t.map(getBundleContext()::getService);\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Turn a service reference's properties into a Map\n\t * \n\t * @param reference the reference\n\t * @return a Map with all the properties of the reference\n\t */\n\tpublic Map toMap(ServiceReference reference) {\n\t\tMap map = new HashMap<>();\n\n\t\tfor (String key : reference.getPropertyKeys()) {\n\t\t\tmap.put(key, reference.getProperty(key));\n\t\t}\n\n\t\treturn map;\n\t}\n\n\t/**\n\t * Get a bundle by symbolic name\n\t */\n\tpublic Optional getBundle(String bsn) {\n\t\treturn Stream.of(getBundleContext().getBundles())\n\t\t\t.filter(b -> bsn.equals(b.getSymbolicName()))\n\t\t\t.findFirst();\n\t}\n\n\t/**\n\t * Broadcast a message to many services at once\n\t */\n\n\t@SuppressWarnings(\"unchecked\")\n\tpublic int broadcast(Class type, Consumer consumer) {\n\t\tServiceTracker tracker = new ServiceTracker<>(getBundleContext(), type, null);\n\t\ttracker.open();\n\t\tint n = 0;\n\t\ttry {\n\t\t\tfor (T instance : (T[]) tracker.getServices()) {\n\t\t\t\tconsumer.accept(instance);\n\t\t\t\tn++;\n\t\t\t}\n\t\t} finally {\n\t\t\ttracker.close();\n\t\t}\n\t\treturn n;\n\t}\n\n\t/**\n\t * Hide a service by registering a hook. This should in general be done\n\t * before you let others look. In general, the Launchpad should be started\n\t * in {@link LaunchpadBuilder#nostart()} mode. This initializes the OSGi\n\t * framework making it possible to register a service before\n\t */\n\n\tpublic Closeable hide(Class type) {\n\t\treturn hide(type, \"hide\");\n\t}\n\n\t/**\n\t * Hide a service. This will register a FindHook and an EventHook for the\n\t * type. This will remove the visibility of all services with that type for\n\t * all bundles _except_ the testbundle. Notice that bundles that already\n\t * obtained a references are not affected. If you use this facility it is\n\t * best to not start the framework before you hide a service. You can\n\t * indicate this to the build with {@link LaunchpadBuilder#nostart()}. The\n\t * framework can be started after creation with {@link #start()}. Notice\n\t * that services through the testbundle remain visible for this hide.\n\t * \n\t * @param type the type to hide\n\t * @param reason the reason why it is hidden\n\t * @return a Closeable, when closed it will remove the hooks\n\t */\n\tpublic Closeable hide(Class type, String reason) {\n\t\tServiceRegistration eventReg = framework.getBundleContext()\n\t\t\t.registerService(EventListenerHook.class, new EventListenerHook() {\n\t\t\t\t@Override\n\t\t\t\tpublic void event(ServiceEvent event, Map> listeners) {\n\n\t\t\t\t\tServiceReference ref = event.getServiceReference();\n\t\t\t\t\tif (selectForHiding(type, ref))\n\t\t\t\t\t\tlisteners.clear();\n\t\t\t\t}\n\n\t\t\t\t@Override\n\t\t\t\tpublic String toString() {\n\t\t\t\t\treturn \"Launchpad[\" + reason + \"]\";\n\t\t\t\t}\n\t\t\t}, null);\n\n\t\tServiceRegistration findReg = framework.getBundleContext()\n\t\t\t.registerService(FindHook.class, new FindHook() {\n\n\t\t\t\t@Override\n\t\t\t\tpublic void find(BundleContext context, String name, String filter, boolean allServices,\n\t\t\t\t\tCollection> references) {\n\t\t\t\t\tif (name == null || name.equals(type.getName())) {\n\t\t\t\t\t\treferences.removeIf(ref -> selectForHiding(type, ref));\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t@Override\n\t\t\t\tpublic String toString() {\n\t\t\t\t\treturn \"Launchpad[\" + reason + \"]\";\n\t\t\t\t}\n\n\t\t\t}, null);\n\n\t\treturn () -> {\n\t\t\teventReg.unregister();\n\t\t\tfindReg.unregister();\n\t\t};\n\t}\n\n\t/**\n\t * Check of a service reference has one of the given types in its object\n\t * class\n\t * \n\t * @param serviceReference the service reference to check\n\t * @param types the set of types\n\t * @return true if one of the types name is in the service reference's\n\t * objectClass property\n\t */\n\tpublic boolean isOneOfType(ServiceReference serviceReference, Class... types) {\n\t\tString[] objectClasses = (String[]) serviceReference.getProperty(Constants.OBJECTCLASS);\n\t\tfor (Class type : types) {\n\t\t\tString name = type.getName();\n\n\t\t\tfor (String objectClass : objectClasses) {\n\t\t\t\tif (objectClass.equals(name))\n\t\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate boolean selectForHiding(Class type, ServiceReference ref) {\n\n\t\t//\n\t\t// We never hide services registered by the testbundle\n\t\t//\n\n\t\tif (ref.getBundle() == testbundle)\n\t\t\treturn false;\n\n\t\t// only hide references when one of their\n\t\t// service interfaces is of the hidden type\n\n\t\treturn isOneOfType(ref, type);\n\t}\n\n\t/**\n\t * Start the framework if not yet started\n\t */\n\n\tpublic void start() {\n\t\ttry {\n\t\t\tframework.start();\n\t\t\tList toBeStarted = new ArrayList<>();\n\t\t\tfor (String path : runspec.runbundles) {\n\t\t\t\tFile file = new File(path);\n\t\t\t\tif (!file.isFile())\n\t\t\t\t\tthrow new IllegalArgumentException(\"-runbundle \" + file + \" does not exist or is not a file\");\n\n\t\t\t\tBundle b = install(file);\n\t\t\t\tif (!isFragment(b)) {\n\t\t\t\t\ttoBeStarted.add(b);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tFrameworkWiring fw = framework.adapt(FrameworkWiring.class);\n\t\t\tfw.resolveBundles(toBeStarted);\n\n\t\t\tCollections.sort(toBeStarted, this::startorder);\n\n\t\t\tif (hasTestBundle)\n\t\t\t\ttestbundle();\n\n\t\t\ttoBeStarted.forEach(this::start);\n\n\t\t} catch (BundleException e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t// reverse ordering. I.e. highest priority is first\n\tint startorder(Bundle a, Bundle b) {\n\n\t\treturn Integer.compare(getPriority(b), getPriority(a));\n\t}\n\n\tprivate int getPriority(Bundle b) {\n\t\ttry {\n\t\t\tString h = b.getHeaders()\n\t\t\t\t.get(BUNDLE_PRIORITY);\n\t\t\tif (h != null)\n\t\t\t\treturn Integer.parseInt(h);\n\t\t} catch (Exception e) {\n\t\t\t// ignore\n\t\t}\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Stop the framework if not yet stopped\n\t */\n\n\tpublic void stop() {\n\t\ttry {\n\t\t\treport(\"Stopping the framework\");\n\t\t\tframework.stop();\n\t\t} catch (BundleException e) {\n\t\t\treport(\"Could not stop the framework : %s\", e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Set the test bundle\n\t */\n\tpublic void testbundle() {\n\t\tif (testbundle != null) {\n\t\t\tthrow new IllegalArgumentException(\"Test bundle already exists\");\n\t\t}\n\t\ttry {\n\t\t\tByteArrayOutputStream bout = new ByteArrayOutputStream();\n\t\t\tManifest man = new Manifest();\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(\"Manifest-Version\", \"1\");\n\t\t\tString name = projectDir.getName()\n\t\t\t\t.toUpperCase();\n\t\t\treport(\"Creating test bundle %s\", name);\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(Constants.BUNDLE_SYMBOLICNAME, name);\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(Constants.BUNDLE_MANIFESTVERSION, \"2\");\n\t\t\tJarOutputStream jout = new JarOutputStream(bout, man);\n\t\t\tjout.close();\n\t\t\tByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());\n\t\t\tthis.testbundle = framework.getBundleContext()\n\t\t\t\t.installBundle(name, bin);\n\t\t\tthis.testbundle.start();\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to create test bundle\");\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Register a service. You can specify the type and the instance as well as\n\t * the properties. The properties are specified as varargs. That means you\n\t * can define a property by specifying the key (which must be a String) and\n\t * the value consecutively. The value can be any of the types allowed by the\n\t * service properties.\n\t * \n\t *
    \n\t * fw.register(Foo.class, instance, \"foo\", 10, \"bar\", new long[] {\n\t * \t1, 2, 3\n\t * });\n\t * 
    \n\t * \n\t * @param type the service type\n\t * @param instance the service object\n\t * @param props the service properties specified as a seq of \"key\", value\n\t * @return the service registration\n\t */\n\tpublic ServiceRegistration register(Class type, T instance, Object... props) {\n\t\treport(\"Registering service %s %s\", type, instance, Arrays.toString(props));\n\t\tHashtable ht = new Hashtable<>();\n\t\tfor (int i = 0; i < props.length; i += 2) {\n\t\t\tString key = (String) props[i];\n\t\t\tObject value = null;\n\t\t\tif (i + 1 < props.length) {\n\t\t\t\tvalue = props[i + 1];\n\t\t\t}\n\t\t\tht.put(key, value);\n\t\t}\n\t\treturn getBundleContext().registerService(type, instance, ht);\n\t}\n\n\t/**\n\t * Return the framework object\n\t * \n\t * @return the framework object\n\t */\n\tpublic Framework getFramework() {\n\t\treturn framework;\n\t}\n\n\t/**\n\t * Add a component class. This creates a little bundle that holds the\n\t * component class so that bnd adds the DS XML. However, it also imports the\n\t * package of the component class so that in runtime DS will load it from\n\t * the classpath.\n\t */\n\n\tpublic Bundle component(Class type) {\n\t\treturn bundle().addResource(type)\n\t\t\t.start();\n\t}\n\n\t/**\n\t * Runs the given code within the context of a synthetic bundle. Creates a\n\t * synthetic bundle and adds the supplied class to it using\n\t * {@link BundleBuilder#addResourceWithCopy}. It then loads the class using\n\t * the synthetic bundle's class loader and instantiates it using the public,\n\t * no-parameter constructor.\n\t * \n\t * @param clazz the class to instantiate within the context of the\n\t * framework.\n\t * @return The instantiated object.\n\t * @see BundleBuilder#addResourceWithCopy(Class)\n\t */\n\tpublic T instantiateInFramework(Class clazz) {\n\t\ttry {\n\t\t\tclazz.getConstructor();\n\t\t} catch (NoSuchMethodException e) {\n\t\t\tExceptions.duck(e);\n\t\t}\n\t\tBundle b = bundle().addResourceWithCopy(clazz)\n\t\t\t.start();\n\t\ttry {\n\t\t\t@SuppressWarnings(\"unchecked\")\n\t\t\tClass insideClass = (Class) b.loadClass(clazz.getName());\n\t\t\treturn insideClass.getConstructor()\n\t\t\t\t.newInstance();\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Check if a bundle is a fragement\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isFragment(Bundle b) {\n\t\treturn b.getHeaders()\n\t\t\t.get(Constants.FRAGMENT_HOST) != null;\n\t}\n\n\t/**\n\t * Check if a bundle is in the ACTIVE state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isActive(Bundle b) {\n\t\treturn b.getState() == Bundle.ACTIVE;\n\t}\n\n\t/**\n\t * Check if a bundle is in the RESOLVED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isResolved(Bundle b) {\n\t\treturn b.getState() == Bundle.RESOLVED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the INSTALLED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isInstalled(Bundle b) {\n\t\treturn b.getState() == Bundle.INSTALLED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the UNINSTALLED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isUninstalled(Bundle b) {\n\t\treturn b.getState() == Bundle.UNINSTALLED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isStarting(Bundle b) {\n\t\treturn b.getState() == Bundle.STARTING;\n\t}\n\n\t/**\n\t * Check if a bundle is in the STOPPING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isStopping(Bundle b) {\n\t\treturn b.getState() == Bundle.STOPPING;\n\t}\n\n\t/**\n\t * Check if a bundle is in the ACTIVE or STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isRunning(Bundle b) {\n\t\treturn isActive(b) || isStarting(b);\n\t}\n\n\t/**\n\t * Check if a bundle is in the RESOLVED or ACTIVE or STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isReady(Bundle b) {\n\t\treturn isResolved(b) || isActive(b) || isStarting(b);\n\t}\n\n\tprivate ParameterMap getExports(Bundle b) {\n\t\treturn new ParameterMap(b.getHeaders()\n\t\t\t.get(Constants.EXPORT_PACKAGE));\n\t}\n\n\tprivate ParameterMap getImports(Bundle b) {\n\t\treturn new ParameterMap(b.getHeaders()\n\t\t\t.get(Constants.IMPORT_PACKAGE));\n\t}\n\n\tprivate String toInstallURI(File c) {\n\t\treturn \"reference:\" + c.toURI();\n\t}\n\n\tObject getService(Injector.Target param) {\n\n\t\ttry {\n\t\t\tif (param.type == BundleContext.class) {\n\t\t\t\treturn getBundleContext();\n\t\t\t}\n\t\t\tif (param.type == Bundle.class)\n\t\t\t\treturn testbundle;\n\n\t\t\tif (param.type == Framework.class) {\n\t\t\t\treturn framework;\n\t\t\t}\n\t\t\tif (param.type == Bundle[].class) {\n\t\t\t\treturn framework.getBundleContext()\n\t\t\t\t\t.getBundles();\n\t\t\t}\n\n\t\t\tService service = param.annotation;\n\t\t\tString target = service.target()\n\t\t\t\t.isEmpty() ? null : service.target();\n\n\t\t\tClass serviceClass = service.service();\n\n\t\t\tif (serviceClass == Object.class)\n\t\t\t\tserviceClass = getServiceType(param.type);\n\n\t\t\tif (serviceClass == null)\n\t\t\t\tserviceClass = getServiceType(param.primaryType);\n\n\t\t\tif (serviceClass == null)\n\t\t\t\tthrow new IllegalArgumentException(\"Cannot define service class for \" + param);\n\n\t\t\tlong timeout = service.timeout();\n\t\t\tif (timeout <= 0)\n\t\t\t\ttimeout = SERVICE_DEFAULT_TIMEOUT;\n\n\t\t\tboolean multiple = isMultiple(param.type);\n\t\t\tint cardinality = multiple ? service.minimum() : 1;\n\n\t\t\tList> matchedReferences = getServices(serviceClass, target, cardinality,\n\t\t\t\ttimeout, true);\n\n\t\t\tif (multiple)\n\t\t\t\treturn matchedReferences;\n\t\t\telse\n\t\t\t\treturn matchedReferences.get(0);\n\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t@SuppressWarnings({\n\t\t\"rawtypes\", \"unchecked\"\n\t})\n\tpublic List> getServices(Class serviceClass, @Nullable String target, int cardinality,\n\t\tlong timeout, boolean exception) {\n\t\ttry {\n\n\t\t\tString className = serviceClass.getName();\n\n\t\t\tServiceTracker tracker = injectedDoNotClose.computeIfAbsent(serviceClass, (c) -> {\n\t\t\t\tServiceTracker t = new ServiceTracker(framework.getBundleContext(), className,\n\t\t\t\t\tnull);\n\t\t\t\tt.open(true);\n\t\t\t\treturn t;\n\t\t\t});\n\n\t\t\tlong deadline = System.currentTimeMillis() + timeout;\n\n\t\t\twhile (true) {\n\n\t\t\t\t// we get the ALL services regardless of class space or hidden\n\t\t\t\t// by hooks or filters.\n\n\t\t\t\t@SuppressWarnings(\"unchecked\")\n\t\t\t\tList> allReferences = (List>) getReferences(tracker,\n\t\t\t\t\tserviceClass);\n\n\t\t\t\tList> visibleReferences = allReferences.stream()\n\t\t\t\t\t.filter(ref -> ref.isAssignableTo(framework, className))\n\t\t\t\t\t.collect(Collectors.toList());\n\n\t\t\t\tList> unhiddenReferences = new ArrayList<>(visibleReferences);\n\t\t\t\tMap, FindHook> hookMap = new HashMap<>();\n\n\t\t\t\tfor (FindHook hook : this.hooks.getServices(new FindHook[0])) {\n\t\t\t\t\tList> original = new ArrayList<>(unhiddenReferences);\n\t\t\t\t\thook.find(testbundle.getBundleContext(), className, target, true, (Collection) unhiddenReferences);\n\t\t\t\t\toriginal.removeAll(unhiddenReferences);\n\t\t\t\t\tfor (ServiceReference ref : original) {\n\t\t\t\t\t\thookMap.put(ref, hook);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tList> matchedReferences;\n\n\t\t\t\tif (target == null) {\n\t\t\t\t\tmatchedReferences = new ArrayList<>(unhiddenReferences);\n\t\t\t\t} else {\n\t\t\t\t\tFilter filter = framework.getBundleContext()\n\t\t\t\t\t\t.createFilter(target);\n\t\t\t\t\tmatchedReferences = visibleReferences.stream()\n\t\t\t\t\t\t.filter(filter::match)\n\t\t\t\t\t\t.collect(Collectors.toList());\n\t\t\t\t}\n\n\t\t\t\tif (cardinality <= matchedReferences.size()) {\n\t\t\t\t\treturn matchedReferences;\n\t\t\t\t}\n\n\t\t\t\tif (deadline < System.currentTimeMillis()) {\n\t\t\t\t\tString error = \"Injection of service \" + className;\n\t\t\t\t\tif (target != null)\n\t\t\t\t\t\terror += \" with target \" + target;\n\n\t\t\t\t\terror += \" failed.\";\n\n\t\t\t\t\tif (allReferences.size() > visibleReferences.size()) {\n\t\t\t\t\t\tList> invisibleReferences = new ArrayList<>(allReferences);\n\t\t\t\t\t\tinvisibleReferences.removeAll(visibleReferences);\n\t\t\t\t\t\tfor (ServiceReference r : invisibleReferences) {\n\t\t\t\t\t\t\terror += \"\\nInvisible reference \" + r + \"[\" + r.getProperty(Constants.SERVICE_ID)\n\t\t\t\t\t\t\t\t+ \"] from bundle \" + r.getBundle();\n\n\t\t\t\t\t\t\tString[] objectClass = (String[]) r.getProperty(Constants.OBJECTCLASS);\n\t\t\t\t\t\t\tfor (String clazz : objectClass) {\n\t\t\t\t\t\t\t\terror += \"\\n \" + clazz + \"\\n registrar: \"\n\t\t\t\t\t\t\t\t\t+ getSource(clazz, r.getBundle()).orElse(\"null\") + \"\\n framework: \"\n\t\t\t\t\t\t\t\t\t+ getSource(clazz, framework).orElse(\"null\");\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (visibleReferences.size() > unhiddenReferences.size()) {\n\t\t\t\t\t\tList> hiddenReferences = new ArrayList<>(visibleReferences);\n\t\t\t\t\t\thiddenReferences.removeAll(unhiddenReferences);\n\t\t\t\t\t\tfor (ServiceReference r : hiddenReferences) {\n\t\t\t\t\t\t\terror += \"\\nHidden (FindHook) Reference \" + r + \" from bundle \" + r.getBundle() + \" hook \"\n\t\t\t\t\t\t\t\t+ hookMap.get(r);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (unhiddenReferences.size() > matchedReferences.size()) {\n\t\t\t\t\t\tList> untargetReferences = new ArrayList<>(unhiddenReferences);\n\t\t\t\t\t\tuntargetReferences.removeAll(matchedReferences);\n\t\t\t\t\t\terror += \"\\nReference not matched by the target filter \" + target;\n\t\t\t\t\t\tfor (ServiceReference ref : untargetReferences) {\n\t\t\t\t\t\t\terror += \"\\n \" + ref + \" : \" + getProperties(ref);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (exception)\n\t\t\t\t\t\tthrow new TimeoutException(error);\n\n\t\t\t\t\treturn Collections.emptyList();\n\t\t\t\t}\n\t\t\t\tThread.sleep(100);\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\tprivate Map getProperties(ServiceReference ref) {\n\t\tMap map = new HashMap<>();\n\t\tfor (String k : ref.getPropertyKeys()) {\n\t\t\tObject property = ref.getProperty(k);\n\t\t\tString s;\n\t\t\tif (property != null && property.getClass()\n\t\t\t\t.isArray()) {\n\t\t\t\ts = Arrays.deepToString((Object[]) property);\n\t\t\t} else\n\t\t\t\ts = property + \"\";\n\n\t\t\tmap.put(k, s);\n\t\t}\n\t\treturn map;\n\t}\n\n\tprivate Optional getSource(String className, Bundle from) {\n\t\ttry {\n\t\t\tClass loadClass = from.loadClass(className);\n\t\t\tBundle bundle = FrameworkUtil.getBundle(loadClass);\n\n\t\t\tif (bundle == null)\n\t\t\t\treturn Optional.of(\"from class path\");\n\t\t\telse {\n\t\t\t\tBundleWiring wiring = bundle.adapt(BundleWiring.class);\n\t\t\t\tString exported = \"PRIVATE! \";\n\t\t\t\tList capabilities = wiring.getCapabilities(PackageNamespace.PACKAGE_NAMESPACE);\n\t\t\t\tString packageName = loadClass.getPackage()\n\t\t\t\t\t.getName();\n\n\t\t\t\tfor (BundleCapability c : capabilities) {\n\t\t\t\t\tif (packageName.equals(c.getAttributes()\n\t\t\t\t\t\t.get(PackageNamespace.PACKAGE_NAMESPACE))) {\n\t\t\t\t\t\texported = \"Exported from \";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn Optional.of(exported + \" \" + bundle.toString());\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\treturn Optional.empty();\n\t\t}\n\t}\n\n\tvoid reportEvents() {\n\t\tframeworkEvents.forEach(fe -> {\n\t\t\treport(\"%s\", fe);\n\t\t});\n\t}\n\n\tprivate String bundleStateToString(int state) {\n\t\tswitch (state) {\n\t\t\tcase Bundle.UNINSTALLED :\n\t\t\t\treturn \"UNIN\";\n\t\t\tcase Bundle.INSTALLED :\n\t\t\t\treturn \"INST\";\n\t\t\tcase Bundle.RESOLVED :\n\t\t\t\treturn \"RSLV\";\n\t\t\tcase Bundle.STARTING :\n\t\t\t\treturn \"STAR\";\n\t\t\tcase Bundle.ACTIVE :\n\t\t\t\treturn \"ACTV\";\n\t\t\tcase Bundle.STOPPING :\n\t\t\t\treturn \"STOP\";\n\t\t\tdefault :\n\t\t\t\treturn \"UNKN\";\n\t\t}\n\t}\n\n\tprivate List> getReferences(ServiceTracker tracker, Class serviceClass) {\n\t\tServiceReference[] references = tracker.getServiceReferences();\n\t\tif (references == null) {\n\t\t\treturn Collections.emptyList();\n\t\t}\n\t\tArrays.sort(references);\n\t\treturn Arrays.asList(references);\n\t}\n\n\tprivate Class getServiceType(Type type) {\n\t\tif (type instanceof Class)\n\t\t\treturn (Class) type;\n\n\t\tif (type instanceof ParameterizedType) {\n\t\t\tType rawType = ((ParameterizedType) type).getRawType();\n\t\t\tif (rawType instanceof Class) {\n\t\t\t\tClass rawClass = (Class) rawType;\n\t\t\t\tif (Iterable.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t\tif (Optional.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t\tif (ServiceReference.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\n\tprivate boolean isMultiple(Type type) {\n\t\tif (type instanceof Class) {\n\t\t\treturn ((Class) type).isArray();\n\t\t}\n\t\tif (type instanceof ParameterizedType) {\n\n\t\t\tType rawType = ((ParameterizedType) type).getRawType();\n\t\t\tif (rawType instanceof Class) {\n\t\t\t\tClass clazz = (Class) rawType;\n\t\t\t\tif (Iterable.class.isAssignableFrom(clazz))\n\t\t\t\t\treturn true;\n\t\t\t}\n\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate boolean isParameterizedType(Type to, Class clazz) {\n\t\tif (to instanceof ParameterizedType) {\n\t\t\tif (((ParameterizedType) to).getRawType() == clazz)\n\t\t\t\treturn true;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate Converter makeConverter() {\n\t\tConverter converter = new Converter();\n\t\tconverter.hook(null, (to, from) -> {\n\t\t\ttry {\n\t\t\t\tif (!(from instanceof ServiceReference))\n\t\t\t\t\treturn null;\n\n\t\t\t\tServiceReference reference = (ServiceReference) from;\n\n\t\t\t\tif (isParameterizedType(to, ServiceReference.class))\n\t\t\t\t\treturn reference;\n\n\t\t\t\tif (isParameterizedType(to, Map.class))\n\t\t\t\t\treturn converter.convert(to, toMap(reference));\n\n\t\t\t\tObject service = getService(reference);\n\n\t\t\t\tif (isParameterizedType(to, Optional.class))\n\t\t\t\t\treturn Optional.ofNullable(service);\n\n\t\t\t\treturn service;\n\t\t\t} catch (Exception e) {\n\t\t\t\tthrow e;\n\t\t\t}\n\t\t});\n\t\treturn converter;\n\t}\n\n\tpublic String getName() {\n\t\treturn name;\n\t}\n\n\tpublic String getClassName() {\n\t\treturn className;\n\t}\n}\n"},"new_file":{"kind":"string","value":"biz.aQute.launchpad/src/aQute/launchpad/Launchpad.java"},"old_contents":{"kind":"string","value":"package aQute.launchpad;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.Closeable;\nimport java.io.File;\nimport java.io.PrintStream;\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Hashtable;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Optional;\nimport java.util.Set;\nimport java.util.concurrent.CopyOnWriteArrayList;\nimport java.util.concurrent.TimeoutException;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.function.Consumer;\nimport java.util.jar.JarOutputStream;\nimport java.util.jar.Manifest;\nimport java.util.stream.Collectors;\nimport java.util.stream.Stream;\n\nimport org.eclipse.jdt.annotation.Nullable;\nimport org.osgi.annotation.versioning.ProviderType;\nimport org.osgi.framework.Bundle;\nimport org.osgi.framework.BundleContext;\nimport org.osgi.framework.BundleException;\nimport org.osgi.framework.Constants;\nimport org.osgi.framework.Filter;\nimport org.osgi.framework.FrameworkEvent;\nimport org.osgi.framework.FrameworkUtil;\nimport org.osgi.framework.InvalidSyntaxException;\nimport org.osgi.framework.ServiceEvent;\nimport org.osgi.framework.ServiceException;\nimport org.osgi.framework.ServiceReference;\nimport org.osgi.framework.ServiceRegistration;\nimport org.osgi.framework.hooks.service.EventListenerHook;\nimport org.osgi.framework.hooks.service.FindHook;\nimport org.osgi.framework.hooks.service.ListenerHook.ListenerInfo;\nimport org.osgi.framework.launch.Framework;\nimport org.osgi.framework.namespace.PackageNamespace;\nimport org.osgi.framework.wiring.BundleCapability;\nimport org.osgi.framework.wiring.BundleWiring;\nimport org.osgi.framework.wiring.FrameworkWiring;\nimport org.osgi.util.tracker.ServiceTracker;\n\nimport aQute.bnd.service.specifications.RunSpecification;\nimport aQute.lib.converter.Converter;\nimport aQute.lib.exceptions.Exceptions;\nimport aQute.lib.inject.Injector;\nimport aQute.lib.io.IO;\nimport aQute.lib.strings.Strings;\nimport aQute.libg.glob.Glob;\nimport aQute.libg.parameters.ParameterMap;\n\n/**\n * This class provides an OSGi framework that is configured with the current bnd\n * workspace. A project directory is used to find the workspace. This makes all\n * repositories in the workspace available to the framework. To be able to test\n * JUnit code against/in this framework it is necessary that all packages on the\n * buildpath and testpath are actually exported in the framework. This class\n * will ensure that. Once the framework is up and running it will be possible to\n * add bundles to it. There are a number of ways that this can be achieved:\n *
      \n *
    • Build a bundle – A bnd Builder is provided to create a bundle and install\n * it. This makes it possible to add classes from the src or test directories or\n * resources. See {@link #bundle()}. Convenience methods are added to get\n * services, see {@link #getService(Class)} et. al. Notice that this framework\n * starts in the same process as that the JUnit code runs. This is normally a\n * separately started VM.\n */\n@ProviderType\npublic class Launchpad implements AutoCloseable {\n\n\tpublic static final String\t\t\t\t\tBUNDLE_PRIORITY\t\t\t= \"Bundle-Priority\";\n\tprivate static final long\t\t\t\t\tSERVICE_DEFAULT_TIMEOUT\t= 60000L;\n\tstatic final AtomicInteger\t\t\t\t\tn\t\t\t\t\t\t= new AtomicInteger();\n\tfinal File\t\t\t\t\t\t\t\t\tprojectDir;\n\n\tfinal Framework\t\t\t\t\t\t\t\tframework;\n\tfinal List>\t\t\ttrackers\t\t\t\t= new ArrayList<>();\n\tfinal List\t\t\t\t\tframeworkEvents\t\t\t= new CopyOnWriteArrayList();\n\tfinal Injector\t\t\t\t\t\tinjector;\n\tfinal Map, ServiceTracker>\tinjectedDoNotClose\t\t= new HashMap<>();\n\tfinal Set\t\t\t\t\t\t\tframeworkExports;\n\tfinal List\t\t\t\t\t\t\terrors\t\t\t\t\t= new ArrayList<>();\n\tfinal String\t\t\t\t\t\t\t\tname;\n\tfinal String\t\t\t\t\t\t\t\tclassName;\n\tfinal RunSpecification\t\t\t\t\t\trunspec;\n\tfinal boolean\t\t\t\t\t\t\t\thasTestBundle;\n\n\tBundle\t\t\t\t\t\t\t\t\t\ttestbundle;\n\tboolean\t\t\t\t\t\t\t\t\t\tdebug;\n\tPrintStream\t\t\t\t\t\t\t\t\tout\t\t\t\t\t\t= System.err;\n\tServiceTracker\t\t\thooks;\n\tprivate long\t\t\t\t\t\t\t\tcloseTimeout;\n\n\tLaunchpad(Framework framework, String name, String className,\n\t\tRunSpecification runspec, long closeTimeout, boolean debug, boolean hasTestBundle) {\n\t\tthis.runspec = runspec;\n\t\tthis.closeTimeout = closeTimeout;\n\t\tthis.hasTestBundle = hasTestBundle;\n\n\t\ttry {\n\t\t\tthis.className = className;\n\t\t\tthis.name = name;\n\t\t\tthis.projectDir = IO.work;\n\t\t\tthis.debug = debug;\n\t\t\tthis.framework = framework;\n\t\t\tthis.framework.init();\n\t\t\tthis.injector = new Injector<>(makeConverter(), this::getService, Service.class);\n\t\t\tthis.frameworkExports = getExports(framework).keySet();\n\n\t\t\treport(\"Initialized framework %s\", this.framework);\n\t\t\treport(\"Classpath %s\", System.getProperty(\"java.class.path\")\n\t\t\t\t.replace(File.pathSeparatorChar, '\\n'));\n\n\t\t\tframework.getBundleContext()\n\t\t\t\t.addFrameworkListener(frameworkEvents::add);\n\n\t\t\thooks = new ServiceTracker(framework.getBundleContext(), FindHook.class, null);\n\t\t\thooks.open();\n\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\tpublic void report(String format, Object... args) {\n\t\tif (!debug)\n\t\t\treturn;\n\n\t\tout.printf(format + \"%n\", args);\n\t}\n\n\t/**\n\t * Generate an error so that the test case can check if we found anything\n\t * wrong. This is easy to check with {@link #check(String...)}\n\t * \n\t * @param format the format string used in\n\t * {@link String#format(String, Object...)}\n\t * @param args the arguments to be formatted\n\t */\n\tpublic void error(String format, Object... args) {\n\t\treport(format, args);\n\t\tString msg = String.format(format, args);\n\t\terrors.add(msg);\n\t}\n\n\t/**\n\t * Check the errors found, filtering out any unwanted with globbing patters.\n\t * Each error is filtered against all the patterns. This method return true\n\t * if there are no unfiltered errors, otherwise false.\n\t * \n\t * @param patterns globbing patterns\n\t * @return true if no errors after filtering,otherwise false\n\t */\n\tpublic boolean check(String... patterns) {\n\t\tGlob[] globs = Stream.of(patterns)\n\t\t\t.map(Glob::new)\n\t\t\t.toArray(Glob[]::new);\n\t\tboolean[] used = new boolean[globs.length];\n\n\t\tString[] unmatched = errors.stream()\n\t\t\t.filter(msg -> {\n\t\t\t\tfor (int i = 0; i < globs.length; i++) {\n\t\t\t\t\tif (globs[i].finds(msg) >= 0) {\n\t\t\t\t\t\tused[i] = true;\n\t\t\t\t\t\treturn false;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\treturn true;\n\t\t\t})\n\t\t\t.toArray(String[]::new);\n\n\t\tif (unmatched.length == 0) {\n\n\t\t\tList report = new ArrayList<>();\n\t\t\tfor (int i = 0; i < used.length; i++) {\n\t\t\t\tif (!used[i]) {\n\t\t\t\t\treport.add(globs[i]);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (report.isEmpty())\n\t\t\t\treturn true;\n\n\t\t\tout.println(\"Missing patterns\");\n\t\t\tout.println(Strings.join(\"\\n\", globs));\n\t\t\treturn false;\n\t\t}\n\n\t\tout.println(\"Errors\");\n\t\tout.println(Strings.join(\"\\n\", unmatched));\n\t\treturn false;\n\t}\n\n\t/**\n\t * Add a file as a bundle to the framework. This bundle will not be started.\n\t * \n\t * @param f the file to install\n\t * @return the bundle object\n\t */\n\tpublic Bundle bundle(File f) {\n\t\ttry {\n\t\t\treport(\"Installing %s\", f);\n\t\t\treturn framework.getBundleContext()\n\t\t\t\t.installBundle(toInstallURI(f));\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to installing %s : %s\", f, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Set the debug flag\n\t */\n\tpublic Launchpad debug() {\n\t\tthis.debug = true;\n\t\treturn this;\n\t}\n\n\t/**\n\t * Install a number of bundles based on their bundle specification. A bundle\n\t * specification is the format used in for example -runbundles.\n\t * \n\t * @param specification the bundle specifications\n\t * @return a list of bundles\n\t */\n\tpublic List bundles(String specification) {\n\t\ttry {\n\t\t\treturn LaunchpadBuilder.workspace.getLatestBundles(projectDir.getAbsolutePath(), specification)\n\t\t\t\t.stream()\n\t\t\t\t.map(File::new)\n\t\t\t\t.map(this::bundle)\n\t\t\t\t.collect(Collectors.toList());\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Install a number of bundles\n\t * \n\t * @param runbundles the list of bundles\n\t * @return a list of bundle objects\n\t */\n\tpublic List bundles(File... runbundles) {\n\t\tif (runbundles == null || runbundles.length == 0)\n\t\t\treturn Collections.emptyList();\n\n\t\ttry {\n\t\t\tList bundles = new ArrayList<>();\n\t\t\tfor (File f : runbundles) {\n\t\t\t\tBundle b = bundle(f);\n\t\t\t\tbundles.add(b);\n\t\t\t}\n\t\t\treturn bundles;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Start a bundle\n\t * \n\t * @param b the bundle object\n\t */\n\tpublic void start(Bundle b) {\n\t\ttry {\n\t\t\tif (!isFragment(b)) {\n\t\t\t\treport(\"Starting %s\", b);\n\t\t\t\tb.start();\n\n\t\t\t\tSet exports = getExports(b).keySet();\n\t\t\t\tSet imports = getImports(b).keySet();\n\t\t\t\texports.removeAll(imports);\n\t\t\t\texports.retainAll(frameworkExports);\n\n\t\t\t\tif (!exports.isEmpty()) {\n\t\t\t\t\terror(\n\t\t\t\t\t\t\"bundle %s is exporting but NOT importing package(s) %s that are/is also exported by the framework.\\n\"\n\t\t\t\t\t\t\t+ \"This means that the test code and the bundle cannot share classes of these package.\",\n\t\t\t\t\t\tb, exports);\n\t\t\t\t}\n\n\t\t\t} else {\n\t\t\t\treport(\"Not starting fragment %s\", b);\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to start %s : %s\", b, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Start all bundles\n\t * \n\t * @param bs a collection of bundles\n\t */\n\tpublic void start(Collection bs) {\n\t\tbs.forEach(this::start);\n\t}\n\n\t/**\n\t * Close this framework\n\t */\n\t@Override\n\tpublic void close() throws Exception {\n\t\treport(\"Stop the framework\");\n\t\tframework.stop();\n\t\treport(\"Stopped the framework\");\n\t\tframework.waitForStop(closeTimeout);\n\t\treport(\"Framework fully stopped\");\n\t}\n\n\t/**\n\t * Get the Bundle Context. If a test bundle was installed then this is the\n\t * context of the test bundle otherwise it is the context of the framework.\n\t * To be able to proxy services it is necessary to have a test bundle\n\t * installed.\n\t * \n\t * @return the bundle context of the test bundle or the framework\n\t */\n\tpublic BundleContext getBundleContext() {\n\t\tif (testbundle != null)\n\t\t\treturn testbundle.getBundleContext();\n\n\t\treturn framework.getBundleContext();\n\t}\n\n\t/**\n\t * Get a service registered under class. If multiple services are registered\n\t * it will return the first\n\t * \n\t * @param serviceInterface the name of the service\n\t * @return a service\n\t */\n\tpublic Optional getService(Class serviceInterface) {\n\t\treturn getService(serviceInterface, null);\n\t}\n\n\tpublic Optional getService(Class serviceInterface, @Nullable String target) {\n\t\treturn getServices(serviceInterface, target, 0, 0, false).stream()\n\t\t\t.map(this::getService)\n\t\t\t.findFirst();\n\t}\n\n\t/**\n\t * Get a list of services of a given name\n\t * \n\t * @param serviceClass the service name\n\t * @return a list of services\n\t */\n\tpublic List getServices(Class serviceClass) {\n\t\treturn getServices(serviceClass, null);\n\t}\n\n\t/**\n\t * Get a list of services in the current registry\n\t * \n\t * @param serviceClass the type of the service\n\t * @param target the target, may be null\n\t * @return a list of found services currently in the registry\n\t */\n\tpublic List getServices(Class serviceClass, @Nullable String target) {\n\t\treturn getServices(serviceClass, target, 0, 0, false).stream()\n\t\t\t.map(this::getService)\n\t\t\t.collect(Collectors.toList());\n\t}\n\n\t/**\n\t * Get a service from a reference. If the service is null, then throw an\n\t * exception.\n\t * \n\t * @param ref the reference\n\t * @return the service, never null\n\t */\n\tpublic T getService(ServiceReference ref) {\n\t\ttry {\n\t\t\tT service = getBundleContext().getService(ref);\n\t\t\tif (service == null) {\n\t\t\t\tif (ref.getBundle() == null) {\n\t\t\t\t\tthrow new ServiceException(\n\t\t\t\t\t\t\"getService(\" + ref + \") returns null, the service is no longer registered\");\n\t\t\t\t}\n\t\t\t\tthrow new ServiceException(\"getService(\" + ref + \") returns null, this probbaly means the \\n\"\n\t\t\t\t\t+ \"component failed to activate. The cause can \\n\" + \"generally be found in the log.\\n\" + \"\");\n\t\t\t}\n\t\t\treturn service;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Add the standard Gogo bundles\n\t */\n\tpublic Launchpad gogo() {\n\t\ttry {\n\t\t\tbundles(\"org.apache.felix.gogo.runtime,org.apache.felix.gogo.command,org.apache.felix.gogo.shell\");\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Add the standard Gogo bundles\n\t */\n\tpublic Launchpad snapshot() {\n\t\ttry {\n\t\t\tbundles(\"biz.aQute.bnd.runtime.snapshot\");\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Inject an object with services and other OSGi specific values.\n\t * \n\t * @param object the object to inject\n\t */\n\n\tpublic Launchpad inject(Object object) {\n\t\ttry {\n\t\t\tinjector.inject(object);\n\t\t\treturn this;\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Install a bundle from a file\n\t * \n\t * @param file the file to install\n\t * @return a bundle\n\t */\n\tpublic Bundle install(File file) {\n\t\ttry {\n\t\t\treport(\"Installing %s\", file);\n\t\t\treturn framework.getBundleContext()\n\t\t\t\t.installBundle(toInstallURI(file));\n\t\t} catch (BundleException e) {\n\t\t\treport(\"Failed to install %s : %s\", file, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Create a new synthetic bundle.\n\t * \n\t * @return the bundle builder\n\t */\n\tpublic BundleBuilder bundle() {\n\t\treturn new BundleBuilder(this);\n\t}\n\n\t/**\n\t * Create a new object and inject it.\n\t * \n\t * @param type the type of object\n\t * @return a new object injected and all\n\t */\n\tpublic T newInstance(Class type) {\n\t\ttry {\n\t\t\treturn injector.newInstance(type);\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to create and instance for %s : %s\", type, e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Show the information of how the framework is setup and is running\n\t */\n\tpublic Launchpad report() throws InvalidSyntaxException {\n\t\tboolean old = debug;\n\t\tdebug = true;\n\t\treportBundles();\n\t\treportServices();\n\t\treportEvents();\n\t\tdebug = old;\n\t\treturn this;\n\t}\n\n\t/**\n\t * Show the installed bundles\n\t */\n\tpublic void reportBundles() {\n\t\tStream.of(framework.getBundleContext()\n\t\t\t.getBundles())\n\t\t\t.forEach(bb -> {\n\t\t\t\treport(\"%4s %s\", bundleStateToString(bb.getState()), bb);\n\t\t\t});\n\t}\n\n\t/**\n\t * Show the registered service\n\t */\n\tpublic void reportServices() throws InvalidSyntaxException {\n\t\tStream.of(framework.getBundleContext()\n\t\t\t.getAllServiceReferences(null, null))\n\t\t\t.forEach(sref -> {\n\t\t\t\treport(\"%s\", sref);\n\t\t\t});\n\t}\n\n\t/**\n\t * Wait for a Service Reference to be registered\n\t * \n\t * @param class1 the name of the service\n\t * @param timeoutInMs the time to wait\n\t * @return a service reference\n\t */\n\tpublic Optional> waitForServiceReference(Class class1, long timeoutInMs) {\n\n\t\treturn getServices(class1, null, 1, timeoutInMs, false).stream()\n\t\t\t.findFirst();\n\n\t}\n\n\t/**\n\t * Wait for a Service Reference to be registered\n\t * \n\t * @param class1 the name of the service\n\t * @param timeoutInMs the time to wait\n\t * @return a service reference\n\t */\n\tpublic Optional> waitForServiceReference(Class class1, long timeoutInMs, String target) {\n\n\t\treturn getServices(class1, target, 1, timeoutInMs, false).stream()\n\t\t\t.findFirst();\n\n\t}\n\n\t/**\n\t * Wait for service to be registered\n\t * \n\t * @param class1 name of the service\n\t * @param timeoutInMs timeout in ms\n\t * @return a service\n\t */\n\tpublic Optional waitForService(Class class1, long timeoutInMs) {\n\t\treturn this.waitForService(class1, timeoutInMs, null);\n\t}\n\n\t/**\n\t * Wait for service to be registered\n\t * \n\t * @param class1 name of the service\n\t * @param timeoutInMs timeout in ms\n\t * @param target filter, may be null\n\t * @return a service\n\t */\n\tpublic Optional waitForService(Class class1, long timeoutInMs, String target) {\n\t\ttry {\n\t\t\treturn getServices(class1, target, 1, timeoutInMs, false).stream()\n\t\t\t\t.findFirst()\n\t\t\t\t.map(getBundleContext()::getService);\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Turn a service reference's properties into a Map\n\t * \n\t * @param reference the reference\n\t * @return a Map with all the properties of the reference\n\t */\n\tpublic Map toMap(ServiceReference reference) {\n\t\tMap map = new HashMap<>();\n\n\t\tfor (String key : reference.getPropertyKeys()) {\n\t\t\tmap.put(key, reference.getProperty(key));\n\t\t}\n\n\t\treturn map;\n\t}\n\n\t/**\n\t * Get a bundle by symbolic name\n\t */\n\tpublic Optional getBundle(String bsn) {\n\t\treturn Stream.of(getBundleContext().getBundles())\n\t\t\t.filter(b -> bsn.equals(b.getSymbolicName()))\n\t\t\t.findFirst();\n\t}\n\n\t/**\n\t * Broadcast a message to many services at once\n\t */\n\n\t@SuppressWarnings(\"unchecked\")\n\tpublic int broadcast(Class type, Consumer consumer) {\n\t\tServiceTracker tracker = new ServiceTracker<>(getBundleContext(), type, null);\n\t\ttracker.open();\n\t\tint n = 0;\n\t\ttry {\n\t\t\tfor (T instance : (T[]) tracker.getServices()) {\n\t\t\t\tconsumer.accept(instance);\n\t\t\t\tn++;\n\t\t\t}\n\t\t} finally {\n\t\t\ttracker.close();\n\t\t}\n\t\treturn n;\n\t}\n\n\t/**\n\t * Hide a service by registering a hook. This should in general be done\n\t * before you let others look. In general, the Launchpad should be started\n\t * in {@link LaunchpadBuilder#nostart()} mode. This initializes the OSGi\n\t * framework making it possible to register a service before\n\t */\n\n\tpublic Closeable hide(Class type) {\n\t\treturn hide(type, \"hide\");\n\t}\n\n\t/**\n\t * Hide a service. This will register a FindHook and an EventHook for the\n\t * type. This will remove the visibility of all services with that type for\n\t * all bundles _except_ the testbundle. Notice that bundles that already\n\t * obtained a references are not affected. If you use this facility it is\n\t * best to not start the framework before you hide a service. You can\n\t * indicate this to the build with {@link LaunchpadBuilder#nostart()}. The\n\t * framework can be started after creation with {@link #start()}. Notice\n\t * that services through the testbundle remain visible for this hide.\n\t * \n\t * @param type the type to hide\n\t * @param reason the reason why it is hidden\n\t * @return a Closeable, when closed it will remove the hooks\n\t */\n\tpublic Closeable hide(Class type, String reason) {\n\t\tServiceRegistration eventReg = framework.getBundleContext()\n\t\t\t.registerService(EventListenerHook.class, new EventListenerHook() {\n\t\t\t\t@Override\n\t\t\t\tpublic void event(ServiceEvent event, Map> listeners) {\n\n\t\t\t\t\tServiceReference ref = event.getServiceReference();\n\t\t\t\t\tif (selectForHiding(type, ref))\n\t\t\t\t\t\tlisteners.clear();\n\t\t\t\t}\n\n\t\t\t\t@Override\n\t\t\t\tpublic String toString() {\n\t\t\t\t\treturn \"Launchpad[\" + reason + \"]\";\n\t\t\t\t}\n\t\t\t}, null);\n\n\t\tServiceRegistration findReg = framework.getBundleContext()\n\t\t\t.registerService(FindHook.class, new FindHook() {\n\n\t\t\t\t@Override\n\t\t\t\tpublic void find(BundleContext context, String name, String filter, boolean allServices,\n\t\t\t\t\tCollection> references) {\n\t\t\t\t\tif (name == null || name.equals(type.getName())) {\n\t\t\t\t\t\treferences.removeIf(ref -> selectForHiding(type, ref));\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t@Override\n\t\t\t\tpublic String toString() {\n\t\t\t\t\treturn \"Launchpad[\" + reason + \"]\";\n\t\t\t\t}\n\n\t\t\t}, null);\n\n\t\treturn () -> {\n\t\t\teventReg.unregister();\n\t\t\tfindReg.unregister();\n\t\t};\n\t}\n\n\t/**\n\t * Check of a service reference has one of the given types in its object\n\t * class\n\t * \n\t * @param serviceReference the service reference to check\n\t * @param types the set of types\n\t * @return true if one of the types name is in the service reference's\n\t * objectClass property\n\t */\n\tpublic boolean isOneOfType(ServiceReference serviceReference, Class... types) {\n\t\tString[] objectClasses = (String[]) serviceReference.getProperty(Constants.OBJECTCLASS);\n\t\tfor (Class type : types) {\n\t\t\tString name = type.getName();\n\n\t\t\tfor (String objectClass : objectClasses) {\n\t\t\t\tif (objectClass.equals(name))\n\t\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate boolean selectForHiding(Class type, ServiceReference ref) {\n\n\t\t//\n\t\t// We never hide services registered by the testbundle\n\t\t//\n\n\t\tif (ref.getBundle() == testbundle)\n\t\t\treturn false;\n\n\t\t// only hide references when one of their\n\t\t// service interfaces is of the hidden type\n\n\t\treturn isOneOfType(ref, type);\n\t}\n\n\t/**\n\t * Start the framework if not yet started\n\t */\n\n\tpublic void start() {\n\t\ttry {\n\t\t\tframework.start();\n\t\t\tList toBeStarted = new ArrayList<>();\n\t\t\tfor (String path : runspec.runbundles) {\n\t\t\t\tFile file = new File(path);\n\t\t\t\tif (!file.isFile())\n\t\t\t\t\tthrow new IllegalArgumentException(\"-runbundle \" + file + \" does not exist or is not a file\");\n\n\t\t\t\tBundle b = install(file);\n\t\t\t\tif (!isFragment(b)) {\n\t\t\t\t\ttoBeStarted.add(b);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tFrameworkWiring fw = framework.adapt(FrameworkWiring.class);\n\t\t\tfw.resolveBundles(toBeStarted);\n\n\t\t\tCollections.sort(toBeStarted, this::startorder);\n\t\t\ttoBeStarted.forEach(this::start);\n\n\t\t\tif (hasTestBundle)\n\t\t\t\ttestbundle();\n\n\t\t\ttoBeStarted.forEach(this::start);\n\n\t\t} catch (BundleException e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t// reverse ordering. I.e. highest priority is first\n\tint startorder(Bundle a, Bundle b) {\n\n\t\treturn Integer.compare(getPriority(b), getPriority(a));\n\t}\n\n\tprivate int getPriority(Bundle b) {\n\t\ttry {\n\t\t\tString h = b.getHeaders()\n\t\t\t\t.get(BUNDLE_PRIORITY);\n\t\t\tif (h != null)\n\t\t\t\treturn Integer.parseInt(h);\n\t\t} catch (Exception e) {\n\t\t\t// ignore\n\t\t}\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Stop the framework if not yet stopped\n\t */\n\n\tpublic void stop() {\n\t\ttry {\n\t\t\treport(\"Stopping the framework\");\n\t\t\tframework.stop();\n\t\t} catch (BundleException e) {\n\t\t\treport(\"Could not stop the framework : %s\", e);\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Set the test bundle\n\t */\n\tpublic void testbundle() {\n\t\tif (testbundle != null) {\n\t\t\tthrow new IllegalArgumentException(\"Test bundle already exists\");\n\t\t}\n\t\ttry {\n\t\t\tByteArrayOutputStream bout = new ByteArrayOutputStream();\n\t\t\tManifest man = new Manifest();\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(\"Manifest-Version\", \"1\");\n\t\t\tString name = projectDir.getName()\n\t\t\t\t.toUpperCase();\n\t\t\treport(\"Creating test bundle %s\", name);\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(Constants.BUNDLE_SYMBOLICNAME, name);\n\t\t\tman.getMainAttributes()\n\t\t\t\t.putValue(Constants.BUNDLE_MANIFESTVERSION, \"2\");\n\t\t\tJarOutputStream jout = new JarOutputStream(bout, man);\n\t\t\tjout.close();\n\t\t\tByteArrayInputStream bin = new ByteArrayInputStream(bout.toByteArray());\n\t\t\tthis.testbundle = framework.getBundleContext()\n\t\t\t\t.installBundle(name, bin);\n\t\t\tthis.testbundle.start();\n\t\t} catch (Exception e) {\n\t\t\treport(\"Failed to create test bundle\");\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Register a service. You can specify the type and the instance as well as\n\t * the properties. The properties are specified as varargs. That means you\n\t * can define a property by specifying the key (which must be a String) and\n\t * the value consecutively. The value can be any of the types allowed by the\n\t * service properties.\n\t * \n\t *
      \n\t * fw.register(Foo.class, instance, \"foo\", 10, \"bar\", new long[] {\n\t * \t1, 2, 3\n\t * });\n\t * 
      \n\t * \n\t * @param type the service type\n\t * @param instance the service object\n\t * @param props the service properties specified as a seq of \"key\", value\n\t * @return the service registration\n\t */\n\tpublic ServiceRegistration register(Class type, T instance, Object... props) {\n\t\treport(\"Registering service %s %s\", type, instance, Arrays.toString(props));\n\t\tHashtable ht = new Hashtable<>();\n\t\tfor (int i = 0; i < props.length; i += 2) {\n\t\t\tString key = (String) props[i];\n\t\t\tObject value = null;\n\t\t\tif (i + 1 < props.length) {\n\t\t\t\tvalue = props[i + 1];\n\t\t\t}\n\t\t\tht.put(key, value);\n\t\t}\n\t\treturn getBundleContext().registerService(type, instance, ht);\n\t}\n\n\t/**\n\t * Return the framework object\n\t * \n\t * @return the framework object\n\t */\n\tpublic Framework getFramework() {\n\t\treturn framework;\n\t}\n\n\t/**\n\t * Add a component class. This creates a little bundle that holds the\n\t * component class so that bnd adds the DS XML. However, it also imports the\n\t * package of the component class so that in runtime DS will load it from\n\t * the classpath.\n\t */\n\n\tpublic Bundle component(Class type) {\n\t\treturn bundle().addResource(type)\n\t\t\t.start();\n\t}\n\n\t/**\n\t * Runs the given code within the context of a synthetic bundle. Creates a\n\t * synthetic bundle and adds the supplied class to it using\n\t * {@link BundleBuilder#addResourceWithCopy}. It then loads the class using\n\t * the synthetic bundle's class loader and instantiates it using the public,\n\t * no-parameter constructor.\n\t * \n\t * @param clazz the class to instantiate within the context of the\n\t * framework.\n\t * @return The instantiated object.\n\t * @see BundleBuilder#addResourceWithCopy(Class)\n\t */\n\tpublic T instantiateInFramework(Class clazz) {\n\t\ttry {\n\t\t\tclazz.getConstructor();\n\t\t} catch (NoSuchMethodException e) {\n\t\t\tExceptions.duck(e);\n\t\t}\n\t\tBundle b = bundle().addResourceWithCopy(clazz)\n\t\t\t.start();\n\t\ttry {\n\t\t\t@SuppressWarnings(\"unchecked\")\n\t\t\tClass insideClass = (Class) b.loadClass(clazz.getName());\n\t\t\treturn insideClass.getConstructor()\n\t\t\t\t.newInstance();\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t/**\n\t * Check if a bundle is a fragement\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isFragment(Bundle b) {\n\t\treturn b.getHeaders()\n\t\t\t.get(Constants.FRAGMENT_HOST) != null;\n\t}\n\n\t/**\n\t * Check if a bundle is in the ACTIVE state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isActive(Bundle b) {\n\t\treturn b.getState() == Bundle.ACTIVE;\n\t}\n\n\t/**\n\t * Check if a bundle is in the RESOLVED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isResolved(Bundle b) {\n\t\treturn b.getState() == Bundle.RESOLVED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the INSTALLED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isInstalled(Bundle b) {\n\t\treturn b.getState() == Bundle.INSTALLED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the UNINSTALLED state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isUninstalled(Bundle b) {\n\t\treturn b.getState() == Bundle.UNINSTALLED;\n\t}\n\n\t/**\n\t * Check if a bundle is in the STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isStarting(Bundle b) {\n\t\treturn b.getState() == Bundle.STARTING;\n\t}\n\n\t/**\n\t * Check if a bundle is in the STOPPING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isStopping(Bundle b) {\n\t\treturn b.getState() == Bundle.STOPPING;\n\t}\n\n\t/**\n\t * Check if a bundle is in the ACTIVE or STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isRunning(Bundle b) {\n\t\treturn isActive(b) || isStarting(b);\n\t}\n\n\t/**\n\t * Check if a bundle is in the RESOLVED or ACTIVE or STARTING state\n\t * \n\t * @param b the bundle to check\n\t */\n\tpublic boolean isReady(Bundle b) {\n\t\treturn isResolved(b) || isActive(b) || isStarting(b);\n\t}\n\n\tprivate ParameterMap getExports(Bundle b) {\n\t\treturn new ParameterMap(b.getHeaders()\n\t\t\t.get(Constants.EXPORT_PACKAGE));\n\t}\n\n\tprivate ParameterMap getImports(Bundle b) {\n\t\treturn new ParameterMap(b.getHeaders()\n\t\t\t.get(Constants.IMPORT_PACKAGE));\n\t}\n\n\tprivate String toInstallURI(File c) {\n\t\treturn \"reference:\" + c.toURI();\n\t}\n\n\tObject getService(Injector.Target param) {\n\n\t\ttry {\n\t\t\tif (param.type == BundleContext.class) {\n\t\t\t\treturn getBundleContext();\n\t\t\t}\n\t\t\tif (param.type == Bundle.class)\n\t\t\t\treturn testbundle;\n\n\t\t\tif (param.type == Framework.class) {\n\t\t\t\treturn framework;\n\t\t\t}\n\t\t\tif (param.type == Bundle[].class) {\n\t\t\t\treturn framework.getBundleContext()\n\t\t\t\t\t.getBundles();\n\t\t\t}\n\n\t\t\tService service = param.annotation;\n\t\t\tString target = service.target()\n\t\t\t\t.isEmpty() ? null : service.target();\n\n\t\t\tClass serviceClass = service.service();\n\n\t\t\tif (serviceClass == Object.class)\n\t\t\t\tserviceClass = getServiceType(param.type);\n\n\t\t\tif (serviceClass == null)\n\t\t\t\tserviceClass = getServiceType(param.primaryType);\n\n\t\t\tif (serviceClass == null)\n\t\t\t\tthrow new IllegalArgumentException(\"Cannot define service class for \" + param);\n\n\t\t\tlong timeout = service.timeout();\n\t\t\tif (timeout <= 0)\n\t\t\t\ttimeout = SERVICE_DEFAULT_TIMEOUT;\n\n\t\t\tboolean multiple = isMultiple(param.type);\n\t\t\tint cardinality = multiple ? service.minimum() : 1;\n\n\t\t\tList> matchedReferences = getServices(serviceClass, target, cardinality,\n\t\t\t\ttimeout, true);\n\n\t\t\tif (multiple)\n\t\t\t\treturn matchedReferences;\n\t\t\telse\n\t\t\t\treturn matchedReferences.get(0);\n\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\t@SuppressWarnings({\n\t\t\"rawtypes\", \"unchecked\"\n\t})\n\tpublic List> getServices(Class serviceClass, @Nullable String target, int cardinality,\n\t\tlong timeout, boolean exception) {\n\t\ttry {\n\n\t\t\tString className = serviceClass.getName();\n\n\t\t\tServiceTracker tracker = injectedDoNotClose.computeIfAbsent(serviceClass, (c) -> {\n\t\t\t\tServiceTracker t = new ServiceTracker(framework.getBundleContext(), className,\n\t\t\t\t\tnull);\n\t\t\t\tt.open(true);\n\t\t\t\treturn t;\n\t\t\t});\n\n\t\t\tlong deadline = System.currentTimeMillis() + timeout;\n\n\t\t\twhile (true) {\n\n\t\t\t\t// we get the ALL services regardless of class space or hidden\n\t\t\t\t// by hooks or filters.\n\n\t\t\t\t@SuppressWarnings(\"unchecked\")\n\t\t\t\tList> allReferences = (List>) getReferences(tracker,\n\t\t\t\t\tserviceClass);\n\n\t\t\t\tList> visibleReferences = allReferences.stream()\n\t\t\t\t\t.filter(ref -> ref.isAssignableTo(framework, className))\n\t\t\t\t\t.collect(Collectors.toList());\n\n\t\t\t\tList> unhiddenReferences = new ArrayList<>(visibleReferences);\n\t\t\t\tMap, FindHook> hookMap = new HashMap<>();\n\n\t\t\t\tfor (FindHook hook : this.hooks.getServices(new FindHook[0])) {\n\t\t\t\t\tList> original = new ArrayList<>(unhiddenReferences);\n\t\t\t\t\thook.find(testbundle.getBundleContext(), className, target, true, (Collection) unhiddenReferences);\n\t\t\t\t\toriginal.removeAll(unhiddenReferences);\n\t\t\t\t\tfor (ServiceReference ref : original) {\n\t\t\t\t\t\thookMap.put(ref, hook);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tList> matchedReferences;\n\n\t\t\t\tif (target == null) {\n\t\t\t\t\tmatchedReferences = new ArrayList<>(unhiddenReferences);\n\t\t\t\t} else {\n\t\t\t\t\tFilter filter = framework.getBundleContext()\n\t\t\t\t\t\t.createFilter(target);\n\t\t\t\t\tmatchedReferences = visibleReferences.stream()\n\t\t\t\t\t\t.filter(filter::match)\n\t\t\t\t\t\t.collect(Collectors.toList());\n\t\t\t\t}\n\n\t\t\t\tif (cardinality <= matchedReferences.size()) {\n\t\t\t\t\treturn matchedReferences;\n\t\t\t\t}\n\n\t\t\t\tif (deadline < System.currentTimeMillis()) {\n\t\t\t\t\tString error = \"Injection of service \" + className;\n\t\t\t\t\tif (target != null)\n\t\t\t\t\t\terror += \" with target \" + target;\n\n\t\t\t\t\terror += \" failed.\";\n\n\t\t\t\t\tif (allReferences.size() > visibleReferences.size()) {\n\t\t\t\t\t\tList> invisibleReferences = new ArrayList<>(allReferences);\n\t\t\t\t\t\tinvisibleReferences.removeAll(visibleReferences);\n\t\t\t\t\t\tfor (ServiceReference r : invisibleReferences) {\n\t\t\t\t\t\t\terror += \"\\nInvisible reference \" + r + \"[\" + r.getProperty(Constants.SERVICE_ID)\n\t\t\t\t\t\t\t\t+ \"] from bundle \" + r.getBundle();\n\n\t\t\t\t\t\t\tString[] objectClass = (String[]) r.getProperty(Constants.OBJECTCLASS);\n\t\t\t\t\t\t\tfor (String clazz : objectClass) {\n\t\t\t\t\t\t\t\terror += \"\\n \" + clazz + \"\\n registrar: \"\n\t\t\t\t\t\t\t\t\t+ getSource(clazz, r.getBundle()).orElse(\"null\") + \"\\n framework: \"\n\t\t\t\t\t\t\t\t\t+ getSource(clazz, framework).orElse(\"null\");\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (visibleReferences.size() > unhiddenReferences.size()) {\n\t\t\t\t\t\tList> hiddenReferences = new ArrayList<>(visibleReferences);\n\t\t\t\t\t\thiddenReferences.removeAll(unhiddenReferences);\n\t\t\t\t\t\tfor (ServiceReference r : hiddenReferences) {\n\t\t\t\t\t\t\terror += \"\\nHidden (FindHook) Reference \" + r + \" from bundle \" + r.getBundle() + \" hook \"\n\t\t\t\t\t\t\t\t+ hookMap.get(r);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (unhiddenReferences.size() > matchedReferences.size()) {\n\t\t\t\t\t\tList> untargetReferences = new ArrayList<>(unhiddenReferences);\n\t\t\t\t\t\tuntargetReferences.removeAll(matchedReferences);\n\t\t\t\t\t\terror += \"\\nReference not matched by the target filter \" + target;\n\t\t\t\t\t\tfor (ServiceReference ref : untargetReferences) {\n\t\t\t\t\t\t\terror += \"\\n \" + ref + \" : \" + getProperties(ref);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\n\t\t\t\t\tif (exception)\n\t\t\t\t\t\tthrow new TimeoutException(error);\n\n\t\t\t\t\treturn Collections.emptyList();\n\t\t\t\t}\n\t\t\t\tThread.sleep(100);\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\tthrow Exceptions.duck(e);\n\t\t}\n\t}\n\n\tprivate Map getProperties(ServiceReference ref) {\n\t\tMap map = new HashMap<>();\n\t\tfor (String k : ref.getPropertyKeys()) {\n\t\t\tObject property = ref.getProperty(k);\n\t\t\tString s;\n\t\t\tif (property != null && property.getClass()\n\t\t\t\t.isArray()) {\n\t\t\t\ts = Arrays.deepToString((Object[]) property);\n\t\t\t} else\n\t\t\t\ts = property + \"\";\n\n\t\t\tmap.put(k, s);\n\t\t}\n\t\treturn map;\n\t}\n\n\tprivate Optional getSource(String className, Bundle from) {\n\t\ttry {\n\t\t\tClass loadClass = from.loadClass(className);\n\t\t\tBundle bundle = FrameworkUtil.getBundle(loadClass);\n\n\t\t\tif (bundle == null)\n\t\t\t\treturn Optional.of(\"from class path\");\n\t\t\telse {\n\t\t\t\tBundleWiring wiring = bundle.adapt(BundleWiring.class);\n\t\t\t\tString exported = \"PRIVATE! \";\n\t\t\t\tList capabilities = wiring.getCapabilities(PackageNamespace.PACKAGE_NAMESPACE);\n\t\t\t\tString packageName = loadClass.getPackage()\n\t\t\t\t\t.getName();\n\n\t\t\t\tfor (BundleCapability c : capabilities) {\n\t\t\t\t\tif (packageName.equals(c.getAttributes()\n\t\t\t\t\t\t.get(PackageNamespace.PACKAGE_NAMESPACE))) {\n\t\t\t\t\t\texported = \"Exported from \";\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn Optional.of(exported + \" \" + bundle.toString());\n\t\t\t}\n\t\t} catch (Exception e) {\n\t\t\treturn Optional.empty();\n\t\t}\n\t}\n\n\tvoid reportEvents() {\n\t\tframeworkEvents.forEach(fe -> {\n\t\t\treport(\"%s\", fe);\n\t\t});\n\t}\n\n\tprivate String bundleStateToString(int state) {\n\t\tswitch (state) {\n\t\t\tcase Bundle.UNINSTALLED :\n\t\t\t\treturn \"UNIN\";\n\t\t\tcase Bundle.INSTALLED :\n\t\t\t\treturn \"INST\";\n\t\t\tcase Bundle.RESOLVED :\n\t\t\t\treturn \"RSLV\";\n\t\t\tcase Bundle.STARTING :\n\t\t\t\treturn \"STAR\";\n\t\t\tcase Bundle.ACTIVE :\n\t\t\t\treturn \"ACTV\";\n\t\t\tcase Bundle.STOPPING :\n\t\t\t\treturn \"STOP\";\n\t\t\tdefault :\n\t\t\t\treturn \"UNKN\";\n\t\t}\n\t}\n\n\tprivate List> getReferences(ServiceTracker tracker, Class serviceClass) {\n\t\tServiceReference[] references = tracker.getServiceReferences();\n\t\tif (references == null) {\n\t\t\treturn Collections.emptyList();\n\t\t}\n\t\tArrays.sort(references);\n\t\treturn Arrays.asList(references);\n\t}\n\n\tprivate Class getServiceType(Type type) {\n\t\tif (type instanceof Class)\n\t\t\treturn (Class) type;\n\n\t\tif (type instanceof ParameterizedType) {\n\t\t\tType rawType = ((ParameterizedType) type).getRawType();\n\t\t\tif (rawType instanceof Class) {\n\t\t\t\tClass rawClass = (Class) rawType;\n\t\t\t\tif (Iterable.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t\tif (Optional.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t\tif (ServiceReference.class.isAssignableFrom(rawClass)) {\n\t\t\t\t\treturn getServiceType(((ParameterizedType) type).getActualTypeArguments()[0]);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n\n\tprivate boolean isMultiple(Type type) {\n\t\tif (type instanceof Class) {\n\t\t\treturn ((Class) type).isArray();\n\t\t}\n\t\tif (type instanceof ParameterizedType) {\n\n\t\t\tType rawType = ((ParameterizedType) type).getRawType();\n\t\t\tif (rawType instanceof Class) {\n\t\t\t\tClass clazz = (Class) rawType;\n\t\t\t\tif (Iterable.class.isAssignableFrom(clazz))\n\t\t\t\t\treturn true;\n\t\t\t}\n\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate boolean isParameterizedType(Type to, Class clazz) {\n\t\tif (to instanceof ParameterizedType) {\n\t\t\tif (((ParameterizedType) to).getRawType() == clazz)\n\t\t\t\treturn true;\n\t\t}\n\t\treturn false;\n\t}\n\n\tprivate Converter makeConverter() {\n\t\tConverter converter = new Converter();\n\t\tconverter.hook(null, (to, from) -> {\n\t\t\ttry {\n\t\t\t\tif (!(from instanceof ServiceReference))\n\t\t\t\t\treturn null;\n\n\t\t\t\tServiceReference reference = (ServiceReference) from;\n\n\t\t\t\tif (isParameterizedType(to, ServiceReference.class))\n\t\t\t\t\treturn reference;\n\n\t\t\t\tif (isParameterizedType(to, Map.class))\n\t\t\t\t\treturn converter.convert(to, toMap(reference));\n\n\t\t\t\tObject service = getService(reference);\n\n\t\t\t\tif (isParameterizedType(to, Optional.class))\n\t\t\t\t\treturn Optional.ofNullable(service);\n\n\t\t\t\treturn service;\n\t\t\t} catch (Exception e) {\n\t\t\t\tthrow e;\n\t\t\t}\n\t\t});\n\t\treturn converter;\n\t}\n\n\tpublic String getName() {\n\t\treturn name;\n\t}\n\n\tpublic String getClassName() {\n\t\treturn className;\n\t}\n}\n"},"message":{"kind":"string","value":"[launchpad] Started bundles twice\n\nSigned-off-by: Peter Kriens <8ac8092331bb621432cbd9fbe40e52149af05592@aqute.biz>\n"},"old_file":{"kind":"string","value":"biz.aQute.launchpad/src/aQute/launchpad/Launchpad.java"},"subject":{"kind":"string","value":"[launchpad] Started bundles twice"},"git_diff":{"kind":"string","value":"iz.aQute.launchpad/src/aQute/launchpad/Launchpad.java\n \t\t\tfw.resolveBundles(toBeStarted);\n \n \t\t\tCollections.sort(toBeStarted, this::startorder);\n\t\t\ttoBeStarted.forEach(this::start);\n \n \t\t\tif (hasTestBundle)\n \t\t\t\ttestbundle();"}}},{"rowIdx":2084,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"8f463de49f685aa62fa678d7786961e0bee82a0d"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"scala/scala,martijnhoekstra/scala,slothspot/scala,jvican/scala,lrytz/scala,slothspot/scala,lrytz/scala,martijnhoekstra/scala,shimib/scala,lrytz/scala,slothspot/scala,shimib/scala,martijnhoekstra/scala,martijnhoekstra/scala,slothspot/scala,martijnhoekstra/scala,scala/scala,shimib/scala,scala/scala,shimib/scala,felixmulder/scala,jvican/scala,jvican/scala,felixmulder/scala,scala/scala,lrytz/scala,slothspot/scala,lrytz/scala,felixmulder/scala,jvican/scala,lrytz/scala,jvican/scala,slothspot/scala,martijnhoekstra/scala,scala/scala,felixmulder/scala,felixmulder/scala,felixmulder/scala,shimib/scala,slothspot/scala,scala/scala,felixmulder/scala,shimib/scala,jvican/scala,jvican/scala"},"new_contents":{"kind":"string","value":"/* ____ ____ ____ ____ ______ *\\\n** / __// __ \\/ __// __ \\/ ____/ SOcos COmpiles Scala **\n** __\\_ \\/ /_/ / /__/ /_/ /\\_ \\ (c) 2002, LAMP/EPFL **\n** /_____/\\____/\\___/\\____/____/ **\n**\n\n\\* */\n\n//todo check significance of JAVA flag.\n\npackage scalac.symtab;\n\nimport scala.tools.util.Position;\n\nimport scalac.ApplicationError;\nimport scalac.Global;\nimport scalac.Phase;\nimport scalac.framework.History;\nimport scalac.util.ArrayApply;\nimport scalac.util.Name;\nimport scalac.util.Names;\nimport scalac.util.NameTransformer;\nimport scalac.util.Debug;\n\n\npublic abstract class Symbol implements Modifiers, Kinds {\n\n /** An empty symbol array */\n public static final Symbol[] EMPTY_ARRAY = new Symbol[0];\n\n /** An empty array of symbol arrays */\n public static final Symbol[][] EMPTY_ARRAY_ARRAY = new Symbol[0][];\n\n /** The absent symbol */\n public static final Symbol NONE = new NoSymbol();\n\n// Attribues -------------------------------------------------------------\n\n public static final int IS_ROOT = 0x00000001;\n public static final int IS_ANONYMOUS = 0x00000002;\n public static final int IS_LABEL = 0x00000010;\n public static final int IS_CONSTRUCTOR = 0x00000020;\n public static final int IS_ACCESSMETHOD = 0x00000100;\n public static final int IS_ERROR = 0x10000000;\n public static final int IS_THISTYPE = 0x20000000;\n public static final int IS_LOCALDUMMY = 0x40000000;\n public static final int IS_COMPOUND = 0x80000000;\n\n// Fields -------------------------------------------------------------\n\n /** The unique identifier generator */\n private static int ids;\n\n /** The kind of the symbol */\n public int kind;\n\n /** The position of the symbol */\n public int pos;\n\n /** The name of the symbol */\n public Name name;\n\n /** The modifiers of the symbol */\n public int flags;\n\n /** The owner of the symbol */\n private Symbol owner;\n\n /** The infos of the symbol */\n private TypeIntervalList infos;\n\n /** The attributes of the symbol */\n private final int attrs;\n\n /** The unique identifier */\n public final int id;\n\n\n// Constructors -----------------------------------------------------------\n\n /** Generic symbol constructor */\n public Symbol(int kind, Symbol owner, int pos, int flags, Name name, int attrs) {\n this.kind = kind;\n this.pos = pos;\n this.name = name;\n this.owner = owner == null ? this : owner;\n this.flags = flags & ~(INITIALIZED | LOCKED); // safety first\n this.attrs = attrs;\n this.id = ids++;\n }\n\n// Factories --------------------------------------------------------------\n\n /** Creates a new term owned by this symbol. */\n public final Symbol newTerm(int pos, int flags, Name name) {\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new constructor of this symbol. */\n public final Symbol newConstructor(int pos, int flags) {\n assert isType(): Debug.show(this);\n return new ConstructorSymbol(this, pos, flags);\n }\n\n /** Creates a new method owned by this symbol. */\n public final Symbol newMethod(int pos, int flags, Name name) {\n assert isClass(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new access method owned by this symbol. */\n public final Symbol newAccessMethod(int pos, Name name) {\n assert isClass(): Debug.show(this);\n int flags = PRIVATE | FINAL | SYNTHETIC;\n return newTerm(pos, flags, name, IS_ACCESSMETHOD);\n }\n\n /** Creates a new function owned by this symbol. */\n public final Symbol newFunction(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new method or function owned by this symbol. */\n public final Symbol newMethodOrFunction(int pos, int flags, Name name){\n assert isClass() || isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new label owned by this symbol. */\n public final Symbol newLabel(int pos, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, 0, name, IS_LABEL);\n }\n\n /** Creates a new field owned by this symbol. */\n public final Symbol newField(int pos, int flags, Name name) {\n assert isClass(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new variable owned by this symbol. */\n public final Symbol newVariable(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new variable owned by this symbol. */\n public final Symbol newFieldOrVariable(int pos, int flags, Name name) {\n assert isClass() || isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new pattern variable owned by this symbol. */\n public final Symbol newPatternVariable(int pos, Name name) {\n return newVariable(pos, 0, name);\n }\n\n /** Creates a new value parameter owned by this symbol. */\n public final Symbol newVParam(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags | PARAM, name);\n }\n\n /**\n * Creates a new value parameter owned by this symbol and\n * initializes it with the type.\n */\n public final Symbol newVParam(int pos, int flags, Name name, Type type) {\n Symbol tparam = newVParam(pos, flags, name);\n tparam.setInfo(type);\n return tparam;\n }\n\n /**\n * Creates a new initialized dummy symbol for template of this\n * class.\n */\n public final Symbol newLocalDummy() {\n assert isClass(): Debug.show(this);\n Symbol local = newTerm(pos, 0, Names.LOCAL(this), IS_LOCALDUMMY);\n local.setInfo(Type.NoType);\n return local;\n }\n\n /** Creates a new module owned by this symbol. */\n public final Symbol newModule(int pos, int flags, Name name) {\n return new ModuleSymbol(this, pos, flags, name);\n }\n\n /**\n * Creates a new package owned by this symbol and initializes it\n * with an empty scope.\n */\n public final Symbol newPackage(int pos, Name name) {\n return newPackage(pos, name, null);\n }\n\n /**\n * Creates a new package owned by this symbol, initializes it with\n * the loader and enters it in the scope if it's non-null.\n */\n public final Symbol newLoadedPackage(Name name, SymbolLoader loader,\n Scope scope)\n {\n assert loader != null: Debug.show(this) + \" - \" + name;\n Symbol peckage = newPackage(Position.NOPOS, name, loader);\n if (scope != null) scope.enterNoHide(peckage);\n return peckage;\n }\n\n /**\n * Creates a new error value owned by this symbol and initializes\n * it with an error type.\n */\n public Symbol newErrorValue(Name name) {\n Symbol symbol = newTerm(pos, SYNTHETIC, name, IS_ERROR);\n symbol.setInfo(Type.ErrorType);\n return symbol;\n }\n\n /** Creates a new type alias owned by this symbol. */\n public final Symbol newTypeAlias(int pos, int flags, Name name) {\n return new AliasTypeSymbol(this, pos, flags, name, 0);\n }\n\n /** Creates a new abstract type owned by this symbol. */\n public final Symbol newAbstractType(int pos, int flags, Name name) {\n return new AbsTypeSymbol(this, pos, flags, name, 0);\n }\n\n /** Creates a new type parameter owned by this symbol. */\n public final Symbol newTParam(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newAbstractType(pos, flags | PARAM, name);\n }\n\n /**\n * Creates a new type parameter owned by this symbol and\n * initializes it with the type.\n */\n public final Symbol newTParam(int pos, int flags, Name name, Type type) {\n Symbol tparam = newTParam(pos, flags, name);\n tparam.setInfo(type);\n return tparam;\n }\n\n /**\n * Creates a new type alias owned by this symbol and initializes\n * it with the info.\n */\n public final Symbol newTypeAlias(int pos, int flags, Name name, Type info){\n Symbol alias = newTypeAlias(pos, flags, name);\n alias.setInfo(info);\n alias.allConstructors().setInfo(Type.MethodType(EMPTY_ARRAY, info));\n return alias;\n }\n\n /** Creates a new class owned by this symbol. */\n public final ClassSymbol newClass(int pos, int flags, Name name) {\n return newClass(pos, flags, name, 0);\n }\n\n /** Creates a new anonymous class owned by this symbol. */\n public final ClassSymbol newAnonymousClass(int pos, Name name) {\n assert isTerm(): Debug.show(this);\n return newClass(pos, 0, name, IS_ANONYMOUS);\n }\n\n /**\n * Creates a new class with a linked module, both owned by this\n * symbol, initializes them with the loader and enters the class\n * and the module in the scope if it's non-null.\n */\n public final ClassSymbol newLoadedClass(int flags, Name name,\n SymbolLoader loader, Scope scope)\n {\n assert isPackageClass(): Debug.show(this);\n assert loader != null: Debug.show(this) + \" - \" + name;\n ClassSymbol clasz = new LinkedClassSymbol(this, flags, name);\n clasz.setInfo(loader);\n clasz.allConstructors().setInfo(loader);\n clasz.linkedModule().setInfo(loader);\n clasz.linkedModule().moduleClass().setInfo(loader);\n if (scope != null) scope.enterNoHide(clasz);\n if (scope != null) scope.enterNoHide(clasz.linkedModule());\n return clasz;\n }\n\n /**\n * Creates a new error class owned by this symbol and initializes\n * it with an error type.\n */\n public ClassSymbol newErrorClass(Name name) {\n ClassSymbol symbol = newClass(pos, SYNTHETIC, name, IS_ERROR);\n Scope scope = new ErrorScope(this);\n symbol.setInfo(Type.compoundType(Type.EMPTY_ARRAY, scope, this));\n symbol.allConstructors().setInfo(Type.ErrorType);\n return symbol;\n }\n\n /** Creates a new term owned by this symbol. */\n final Symbol newTerm(int pos, int flags, Name name, int attrs) {\n return new TermSymbol(this, pos, flags, name, attrs);\n }\n\n /** Creates a new package owned by this symbol. */\n final Symbol newPackage(int pos, Name name, Type info) {\n assert isPackageClass(): Debug.show(this);\n Symbol peckage = newModule(pos, JAVA | PACKAGE, name);\n if (info == null) info = Type.compoundType(\n Type.EMPTY_ARRAY, new Scope(), peckage.moduleClass());\n peckage.moduleClass().setInfo(info);\n return peckage;\n }\n\n /** Creates a new class owned by this symbol. */\n final ClassSymbol newClass(int pos, int flags, Name name, int attrs) {\n return new ClassSymbol(this, pos, flags, name, attrs);\n }\n\n /** Creates a new compound class owned by this symbol. */\n final ClassSymbol newCompoundClass(Type info) {\n int pos = Position.FIRSTPOS;\n Name name = Names.COMPOUND_NAME.toTypeName();\n int flags = ABSTRACT | SYNTHETIC;\n int attrs = IS_COMPOUND;\n ClassSymbol clasz = newClass(pos, flags, name, attrs);\n clasz.setInfo(info);\n clasz.primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, clasz.typeConstructor()));\n return clasz;\n }\n\n// Copying & cloning ------------------------------------------------------\n\n /** Return a fresh symbol with the same fields as this one.\n */\n public final Symbol cloneSymbol() {\n return cloneSymbol(owner);\n }\n\n /** Return a fresh symbol with the same fields as this one and the\n * given owner.\n */\n public final Symbol cloneSymbol(Symbol owner) {\n Symbol clone = cloneSymbolImpl(owner, attrs);\n clone.setInfo(info());\n return clone;\n }\n\n protected abstract Symbol cloneSymbolImpl(Symbol owner, int attrs);\n\n /** Returns a shallow copy of the given array. */\n public static Symbol[] cloneArray(Symbol[] array) {\n return cloneArray(0, array, 0);\n }\n\n /**\n * Returns a shallow copy of the given array prefixed by \"prefix\"\n * null items.\n */\n public static Symbol[] cloneArray(int prefix, Symbol[] array) {\n return cloneArray(prefix, array, 0);\n }\n\n /**\n * Returns a shallow copy of the given array suffixed by \"suffix\"\n * null items.\n */\n public static Symbol[] cloneArray(Symbol[] array, int suffix) {\n return cloneArray(0, array, suffix);\n }\n\n /**\n * Returns a shallow copy of the given array prefixed by \"prefix\"\n * null items and suffixed by \"suffix\" null items.\n */\n public static Symbol[] cloneArray(int prefix, Symbol[] array, int suffix) {\n assert prefix >= 0 && suffix >= 0: prefix + \" - \" + suffix;\n int size = prefix + array.length + suffix;\n if (size == 0) return EMPTY_ARRAY;\n Symbol[] clone = new Symbol[size];\n for (int i = 0; i < array.length; i++) clone[prefix + i] = array[i];\n return clone;\n }\n\n /** Returns the concatenation of the two arrays. */\n public static Symbol[] concat(Symbol[] array1, Symbol[] array2) {\n if (array1.length == 0) return array2;\n if (array2.length == 0) return array1;\n Symbol[] clone = cloneArray(array1.length, array2);\n for (int i = 0; i < array1.length; i++) clone[i] = array1[i];\n return clone;\n }\n\n// Setters ---------------------------------------------------------------\n\n /** Set owner */\n public Symbol setOwner(Symbol owner) {\n assert !isConstructor() && !isNone() && !isError(): Debug.show(this);\n setOwner0(owner);\n return this;\n }\n protected void setOwner0(Symbol owner) {\n this.owner = owner;\n }\n\n /** Set type -- this is an alias for setInfo(Type info) */\n public final Symbol setType(Type info) { return setInfo(info); }\n\n /**\n * Set initial information valid from start of current phase. This\n * information is visible in the current phase and will be\n * transformed by the current phase (except if current phase is\n * the first one).\n */\n public Symbol setInfo(Type info) {\n return setInfoAt(info, Global.instance.currentPhase);\n }\n\n /**\n * Set initial information valid from start of given phase. This\n * information is visible in the given phase and will be\n * transformed by the given phase.\n */\n private final Symbol setInfoAt(Type info, Phase phase) {\n assert info != null: Debug.show(this);\n assert phase != null: Debug.show(this);\n assert !isConstructor()\n || info instanceof Type.LazyType\n || info == Type.NoType\n || info == Type.ErrorType\n || info instanceof Type.MethodType\n || info instanceof Type.OverloadedType\n || info instanceof Type.PolyType\n : \"illegal type for \" + this + \": \" + info;\n infos = new TypeIntervalList(null, info, phase);\n if (info instanceof Type.LazyType) flags &= ~INITIALIZED;\n else flags |= INITIALIZED;\n return this;\n }\n\n /**\n * Set new information valid from start of next phase. This\n * information is only visible in next phase or through\n * \"nextInfo\". It will not be transformed by the current phase.\n */\n public final Symbol updateInfo(Type info) {\n return updateInfoAt(info, Global.instance.currentPhase.next);\n }\n\n /**\n * Set new information valid from start of given phase. This\n * information is only visible from the start of the given phase\n * which is also the first phase that will transform this\n * information.\n */\n private final Symbol updateInfoAt(Type info, Phase phase) {\n assert info != null: Debug.show(this);\n assert phase != null: Debug.show(this);\n assert infos != null: Debug.show(this);\n assert !phase.precedes(infos.limit()) :\n Debug.show(this) + \" -- \" + phase + \" -- \" + infos.limit();\n if (infos.limit() == phase) {\n if (infos.start == phase)\n infos = infos.prev;\n else\n infos.setLimit(infos.limit().prev);\n }\n infos = new TypeIntervalList(infos, info, phase);\n return this;\n }\n\n /** Set type of `this' in current class\n */\n public Symbol setTypeOfThis(Type tp) {\n throw new ApplicationError(this + \".setTypeOfThis\");\n }\n\n /** Set the low bound of this type variable\n */\n public Symbol setLoBound(Type lobound) {\n throw new ApplicationError(\"setLoBound inapplicable for \" + this);\n }\n\n /** Set the view bound of this type variable\n */\n public Symbol setVuBound(Type lobound) {\n throw new ApplicationError(\"setVuBound inapplicable for \" + this);\n }\n\n /** Add an auxiliary constructor to class.\n */\n public void addConstructor(Symbol constr) {\n throw new ApplicationError(\"addConstructor inapplicable for \" + this);\n }\n\n// Symbol classification ----------------------------------------------------\n\n /** Does this symbol denote an error symbol? */\n public final boolean isError() {\n return (attrs & IS_ERROR) != 0;\n }\n\n /** Does this symbol denote the none symbol? */\n public final boolean isNone() {\n return kind == Kinds.NONE;\n }\n\n /** Does this symbol denote a type? */\n public final boolean isType() {\n return kind == TYPE || kind == CLASS || kind == ALIAS;\n }\n\n /** Does this symbol denote a term? */\n public final boolean isTerm() {\n return kind == VAL;\n }\n\n /** Does this symbol denote a value? */\n public final boolean isValue() {\n preInitialize();\n return kind == VAL && !(isModule() && isJava()) && !isPackage();\n }\n\n /** Does this symbol denote a stable value? */\n public final boolean isStable() {\n return kind == VAL &&\n\t ((flags & DEF) == 0) &&\n ((flags & STABLE) != 0 ||\n (flags & MUTABLE) == 0 && type().isObjectType());\n }\n\n /** Does this symbol have the STABLE flag? */\n public final boolean hasStableFlag() {\n return (flags & STABLE) != 0;\n }\n\n /** Is this symbol static (i.e. with no outer instance)? */\n public final boolean isStatic() {\n return isRoot() || owner.isStaticOwner();\n }\n\n /** Does this symbol denote a class that defines static symbols? */\n public final boolean isStaticOwner() {\n return isPackageClass() || (isStatic() && isModuleClass()\n // !!! remove later? translation does not work (yet?)\n && isJava());\n }\n\n /** Is this symbol final?\n */\n public final boolean isFinal() {\n\treturn\n\t (flags & (FINAL | PRIVATE)) != 0 || isLocal() || owner.isModuleClass();\n }\n\n /** Does this symbol denote a variable? */\n public final boolean isVariable() {\n return kind == VAL && (flags & MUTABLE) != 0;\n }\n\n /** Does this symbol denote a view bounded type variable? */\n public final boolean isViewBounded() {\n\tGlobal global = Global.instance;\n return kind == TYPE && (flags & VIEWBOUND) != 0 &&\n\t global.currentPhase.id <= global.PHASE.REFCHECK.id();\n }\n\n /**\n * Does this symbol denote a final method? A final method is one\n * that can't be overridden in a subclass. This method assumes\n * that this symbol denotes a method. It doesn't test it.\n */\n public final boolean isMethodFinal() {\n return (flags & FINAL) != 0 || isPrivate() || isLifted();\n }\n\n /** Does this symbol denote a sealed class symbol? */\n public final boolean isSealed() {\n return (flags & SEALED) != 0;\n }\n\n /** Does this symbol denote a method?\n */\n public final boolean isInitializedMethod() {\n if (infos == null) return false;\n switch (rawInfo()) {\n case MethodType(_, _):\n case PolyType(_, _):\n return true;\n case OverloadedType(Symbol[] alts, _):\n for (int i = 0; i < alts.length; i++)\n if (alts[i].isMethod()) return true;\n return false;\n default:\n return false;\n }\n }\n\n public final boolean isMethod() {\n initialize();\n return isInitializedMethod();\n }\n\n public final boolean isCaseFactory() {\n return isMethod() && !isConstructor() && (flags & CASE) != 0;\n }\n\n public final boolean isAbstractClass() {\n preInitialize();\n return kind == CLASS && (flags & ABSTRACT) != 0 &&\n this != Global.instance.definitions.ARRAY_CLASS;\n }\n\n public final boolean isAbstractOverride() {\n preInitialize();\n return (flags & (ABSTRACT | OVERRIDE)) == (ABSTRACT | OVERRIDE);\n }\n\n /* Does this symbol denote an anonymous class? */\n public final boolean isAnonymousClass() {\n return isClass() && (attrs & IS_ANONYMOUS) != 0;\n }\n\n /** Does this symbol denote the root class or root module?\n */\n public final boolean isRoot() {\n return (attrs & IS_ROOT) != 0;\n }\n\n /** Does this symbol denote something loaded from a Java class? */\n public final boolean isJava() {\n preInitialize();\n return (flags & JAVA) != 0;\n }\n\n /** Does this symbol denote a Java package? */\n public final boolean isPackage() {\n return kind == VAL && (flags & PACKAGE) != 0;\n }\n\n /** Does this symbol denote a Java package class? */\n public final boolean isPackageClass() {\n return kind == CLASS && (flags & PACKAGE) != 0;\n }\n\n /** Does this symbol denote a module? */\n public final boolean isModule() {\n return kind == VAL && (flags & MODUL) != 0;\n }\n\n /** Does this symbol denote a module class? */\n public final boolean isModuleClass() {\n return kind == CLASS && (flags & MODUL) != 0;\n }\n\n /** Does this symbol denote a class? */\n public final boolean isClass() {\n return kind == CLASS && (flags & PACKAGE) == 0;\n }\n\n /** Does this symbol denote a case class?\n */\n public final boolean isCaseClass() {\n preInitialize();\n return kind == CLASS && (flags & CASE) != 0;\n }\n\n /** Does this symbol denote a uniform (i.e. parameterless) class? */\n public final boolean isTrait() {\n //preInitialize(); todo: enable, problem is that then we cannot print\n // during unpickle\n return kind == CLASS && (flags & TRAIT) != 0;\n }\n\n /** Does this class symbol denote a compound type symbol? */\n public final boolean isCompoundSym() {\n return (attrs & IS_COMPOUND) != 0;\n }\n\n /** Does this symbol denote a this symbol? */\n public final boolean isThisSym() {\n return (attrs & IS_THISTYPE) != 0;\n }\n\n /** Does this symbol denote an interface? */\n public final boolean isInterface() {\n info(); // force delayed transformInfos that may change this flag\n return (flags & INTERFACE) != 0;\n }\n\n /** Does this symbol denote a type alias? */\n public final boolean isTypeAlias() {\n return kind == ALIAS;\n }\n\n /** Does this symbol denote an abstract type? */\n public final boolean isAbstractType() {\n return kind == TYPE;\n }\n\n /** Does this symbol denote a class type? */\n public final boolean isClassType() {\n return kind == CLASS;\n }\n\n /** Does this symbol denote a public symbol? */\n public final boolean isPublic() {\n return !isProtected() && !isPrivate();\n }\n\n /** Does this symbol denote a protected symbol? */\n public final boolean isProtected() {\n preInitialize();\n return (flags & PROTECTED) != 0;\n }\n\n /** Does this symbol denote a private symbol? */\n public final boolean isPrivate() {\n preInitialize();\n return (flags & PRIVATE) != 0;\n }\n\n /** Has this symbol been lifted? */\n public final boolean isLifted() {\n preInitialize();\n return (flags & LIFTED) != 0;\n }\n\n /** Does this symbol denote a deferred symbol? */\n public final boolean isDeferred() {\n return (flags & DEFERRED) != 0;\n }\n\n /** Does this symbol denote a synthetic symbol? */\n public final boolean isSynthetic() {\n return (flags & SYNTHETIC) != 0;\n }\n\n /** Does this symbol denote an accessor? */\n public final boolean isAccessor() {\n return (flags & ACCESSOR) != 0;\n }\n\n /** Does this symbol denote an access method? (a method to access\n * private of protected members from inner classes) */\n public final boolean isAccessMethod() {\n return (attrs & IS_ACCESSMETHOD) != 0;\n }\n\n /** Is this symbol locally defined? I.e. not a member of a class or module */\n public final boolean isLocal() {\n return owner.kind == VAL &&\n !((flags & PARAM) != 0 && owner.isPrimaryConstructor());\n }\n\n /** Is this symbol a parameter? Includes type parameters of methods.\n */\n public final boolean isParameter() {\n return (flags & PARAM) != 0;\n }\n\n /** Is this symbol a def parameter?\n */\n public final boolean isDefParameter() {\n return (flags & (PARAM | DEF)) == (PARAM | DEF);\n }\n\n /** Is this class locally defined?\n * A class is local, if\n * - it is anonymous, or\n * - its owner is a value\n * - it is defined within a local class\n */\n public final boolean isLocalClass() {\n return isClass() &&\n (isAnonymousClass() ||\n owner.isValue() ||\n owner.isLocalClass());\n }\n\n /** Is this symbol an instance initializer? */\n public boolean isInitializer() {\n return false;\n }\n\n /** Is this symbol a constructor? */\n public final boolean isConstructor() {\n return (attrs & IS_CONSTRUCTOR) != 0;\n }\n\n /** Is this symbol the primary constructor of a type? */\n public final boolean isPrimaryConstructor() {\n return isConstructor() && this == constructorClass().primaryConstructor();\n }\n\n /** Symbol was preloaded from package\n */\n public final boolean isExternal() {\n return pos == Position.NOPOS;\n }\n\n /** Is this symbol an overloaded symbol? */\n public final boolean isOverloaded() {\n switch (info()) {\n case OverloadedType(_,_): return true;\n default : return false;\n }\n }\n\n /** Does this symbol denote a label? */\n public final boolean isLabel() {\n return (attrs & IS_LABEL) != 0;\n }\n\n /** Is this symbol accessed? */\n public final boolean isAccessed() {\n return (flags & ACCESSED) != 0;\n }\n\n /** The variance of this symbol as an integer\n */\n public int variance() {\n if ((flags & COVARIANT) != 0) return 1;\n else if ((flags & CONTRAVARIANT) != 0) return -1;\n else return 0;\n }\n\n// Symbol names ----------------------------------------------------------------\n\n /** Get the fully qualified name of this Symbol\n * (this is always a normal name, never a type name)\n */\n\n /** Get the simple name of this Symbol (this is always a term name)\n */\n public Name simpleName() {\n if (isConstructor()) return constructorClass().name.toTermName();\n return name;\n }\n\n// Acess to related symbols -----------------------------------------------------\n\n /** Get type parameters */\n public Symbol[] typeParams() {\n return EMPTY_ARRAY;\n }\n\n /** Get value parameters */\n public Symbol[] valueParams() {\n return EMPTY_ARRAY;\n }\n\n /** Get result type */\n public final Type resultType() {\n return type().resultType();\n }\n\n /** Get type parameters at start of next phase */\n public final Symbol[] nextTypeParams() {\n Global.instance.nextPhase();\n Symbol[] tparams = typeParams();\n Global.instance.prevPhase();\n return tparams;\n }\n\n /** Get value parameters at start of next phase */\n public final Symbol[] nextValueParams() {\n Global.instance.nextPhase();\n Symbol[] vparams = valueParams();\n Global.instance.prevPhase();\n return vparams;\n }\n\n /** Get result type at start of next phase */\n public final Type nextResultType() {\n return nextType().resultType();\n }\n\n /** Get all constructors of class */\n public Symbol allConstructors() {\n return NONE;\n }\n\n /** Get primary constructor of class */\n public Symbol primaryConstructor() {\n return NONE;\n }\n\n /**\n * Returns the class linked to this module or null if there is no\n * such class. The returned value remains the same for the whole\n * life of the symbol.\n *\n * See method \"linkedModule\" to learn more about linked modules\n * and classes.\n */\n public ClassSymbol linkedClass() {\n assert isModule(): \"not a module: \" + Debug.show(this);\n return null;\n }\n\n /**\n * Returns the module linked to this class or null if there is no\n * such module. The returned value remains the same for the whole\n * life of the symbol.\n *\n * Linked modules and classes are intended to be used by the\n * symbol table loader. They are needed because it is impossible\n * to know from the name of a class or source file if it defines a\n * class or a module. For that reason a class and a module (each\n * linked to the other) are created for each of those files. Once\n * the file is read the class, the module or both are initialized\n * depending on what the file defines.\n *\n * It is guaranteed that if a class \"c\" has a linked module then\n * \"c.linkedModule().linkedClasss() == c\" and that if a module \"m\"\n * has a linked class then \"m.linkedClasss().linkedModule() == m\".\n *\n * The linked module of a Java class, is the module that contains\n * the static members of that class. A Java class has always a\n * linked module.\n *\n * The linked module of a Scala class, is the module with the same\n * name declared in the same scope. A Scala class may or may not\n * have a linked module. However, this does not depend on the\n * presence or absence of a module with the same name but on how\n * the class is created. Therefore a Scala class may have no\n * linked module even though there exists a module with the same\n * name in the same scope. A Scala class may also have a linked\n * module even though there exists no module with the same name in\n * the same scope. In the latter case, the linked would be\n * initialized to NoType (which prevents accesses to it).\n *\n * There is a last catch about linked modules. It may happen that\n * the symbol returned by \"linkedModule\" is not a module (and that\n * method \"linkedClass\" works on a non-module symbol). At creation\n * time, linked modules are always modules, but at initialization\n * time, it may be discovered that the module is in fact a case\n * class factory method. In that case, the module is downgraded to\n * a non-module term. This implies that from then on calls to its\n * method \"moduleClass\" will fail, but the links defined by the\n * methods \"linkedModule\" and \"linkedClass\" remain unchanged.\n */\n public ModuleSymbol linkedModule() {\n assert isClassType(): \"not a class: \" + Debug.show(this);\n return null;\n }\n\n /** Get owner */\n public Symbol owner() {\n return owner;\n }\n\n /** Get owner, but if owner is primary constructor of a class,\n * get class symbol instead. This is useful for type parameters\n * and value parameters in classes which have the primary constructor\n * as owner.\n */\n public Symbol classOwner() {\n Symbol owner = owner();\n Symbol clazz = owner.constructorClass();\n if (clazz.primaryConstructor() == owner) return clazz;\n else return owner;\n }\n\n /** The next enclosing class */\n public Symbol enclClass() {\n return owner().enclClass();\n }\n\n /** The next enclosing method */\n public Symbol enclMethod() {\n return isMethod() ? this : owner().enclMethod();\n }\n\n /** If this is a constructor, return the class it constructs.\n * Otherwise return the symbol itself.\n */\n public Symbol constructorClass() {\n return this;\n }\n\n /** Return first alternative if this has a (possibly lazy)\n * overloaded type, otherwise symbol itself.\n * Needed in ClassSymbol.primaryConstructor() and in UnPickle.\n */\n public Symbol firstAlternative() {\n if (infos == null)\n return this;\n else if (infos.info instanceof Type.OverloadedType)\n return infos.info.alternativeSymbols()[0];\n else if (infos.info instanceof LazyOverloadedType)\n return ((LazyOverloadedType) infos.info).sym1.firstAlternative();\n else\n return this;\n }\n\n /**\n * Returns the class of this module. This method may be invoked\n * only on module symbols. It returns always a non-null module\n * class symbol whose identity never changes.\n */\n public ModuleClassSymbol moduleClass() {\n throw Debug.abort(\"not a module\", this);\n }\n\n /**\n * Returns the source module of this module class. This method may\n * be invoked only on module class symbols. It returns always a\n * non-null module symbol whose identity never changes.\n *\n * This method should be used with great care. If possible, one\n * should always use moduleClass instead. For example, one should\n * write \"m.moduleClass()==c\" rather than \"m==c.sourceModule()\".\n *\n * This method is problematic because the module - module-class\n * relation is not a one - one relation. There might be more than\n * one module that share the same module class. In that case, the\n * source module of the module class is the module that created\n * the class. This implies that \"m.moduleClass().sourceModule()\"\n * may be different of \"m\". However, its is guaranteed that\n * \"c.sourceModule().moduleClass()\" always returns \"c\".\n *\n * Phases like \"AddInterfaces\" and \"ExpandMixins\" are two examples\n * of phases that create additional modules referring the same\n * module class.\n *\n * Even if a module class is related to only one module, the use\n * of this method may still be dangerous. The problem is that\n * modules and module classes are not always as related as one\n * might expect. For example, modules declared in a function are\n * lifted out of the function by phase \"LambdaLift\". During this\n * process, the module value is transformed into a module method\n * with a \"Ref\" argument. If the \"sourceModule\" method is used to\n * replace references to module classes by references to their\n * source modules and this is done it naively with the class of a\n * lifted module, it will yield wrong code because the the \"Ref\"\n * argument will be missing.\n */\n public ModuleSymbol sourceModule() {\n throw Debug.abort(\"not a module class\", this);\n }\n\n /** if type is a (possibly lazy) overloaded type, return its alternatves\n * else return array consisting of symbol itself\n */\n public Symbol[] alternativeSymbols() {\n Symbol[] alts = type().alternativeSymbols();\n if (alts.length == 0) return new Symbol[]{this};\n else return alts;\n }\n\n /** if type is a (possibly lazy) overloaded type, return its alternatves\n * else return array consisting of type itself\n */\n public Type[] alternativeTypes() {\n return type().alternativeTypes();\n }\n\n /** The symbol accessed by this accessor function.\n */\n public Symbol accessed() {\n assert (flags & ACCESSOR) != 0;\n String name1 = name.toString();\n if (name1.endsWith(Names._EQ.toString()))\n name1 = name1.substring(0, name1.length() - Names._EQ.length());\n return owner.info().lookup(Name.fromString(name1 + \"$\"));\n }\n\n /** The members of this class or module symbol\n */\n public Scope members() {\n return info().members();\n }\n\n /** Lookup symbol with given name; return Symbol.NONE if not found.\n */\n public Symbol lookup(Name name) {\n return info().lookup(name);\n }\n\n// Symbol types --------------------------------------------------------------\n\n /** Was symbol's type updated during given phase? */\n public final boolean isUpdatedAt(Phase phase) {\n Phase next = phase.next;\n TypeIntervalList infos = this.infos;\n while (infos != null) {\n if (infos.start == next) return true;\n if (infos.limit().precedes(next)) return false;\n infos = infos.prev;\n }\n return false;\n }\n\n /** Is this symbol locked? */\n public final boolean isLocked() {\n return (flags & LOCKED) != 0;\n }\n\n /** Is this symbol initialized? */\n public final boolean isInitialized() {\n return (flags & INITIALIZED) != 0;\n }\n\n /** Initialize the symbol */\n public final Symbol initialize() {\n info();\n return this;\n }\n\n /** Make sure symbol is entered\n */\n public final void preInitialize() {\n //todo: clean up\n if (infos.info instanceof SymbolLoader)\n infos.info.complete(this);\n }\n\n /** Get info at start of current phase; This is:\n * for a term symbol, its type\n * for a type variable, its bound\n * for a type alias, its right-hand side\n * for a class symbol, the compound type consisting of\n * its baseclasses and members.\n */\n public final Type info() {\n //if (isModule()) moduleClass().initialize();\n if ((flags & INITIALIZED) == 0) {\n Global global = Global.instance;\n Phase current = global.currentPhase;\n global.currentPhase = rawFirstInfoStartPhase();\n Type info = rawFirstInfo();\n assert info != null : this;\n if ((flags & LOCKED) != 0) {\n setInfo(Type.ErrorType);\n flags |= INITIALIZED;\n throw new CyclicReference(this, info);\n }\n flags |= LOCKED;\n //System.out.println(\"completing \" + this);//DEBUG\n info.complete(this);\n flags = flags & ~LOCKED;\n if (info instanceof SourceCompleter && (flags & SNDTIME) == 0) {\n flags |= SNDTIME;\n Type tp = info();\n flags &= ~SNDTIME;\n } else {\n assert !(rawInfo() instanceof Type.LazyType) : this;\n //flags |= INITIALIZED;\n }\n //System.out.println(\"done: \" + this);//DEBUG\n global.currentPhase = current;\n }\n return rawInfo();\n }\n\n /** Get info at start of next phase\n */\n public final Type nextInfo() {\n Global.instance.nextPhase();\n Type info = info();\n Global.instance.prevPhase();\n return info;\n }\n\n /** Get info at start of given phase\n */\n protected final Type infoAt(Phase phase) {\n Global global = phase.global;\n Phase current = global.currentPhase;\n global.currentPhase = phase;\n Type info = info();\n global.currentPhase = current;\n return info;\n }\n\n /** Get info at start of current phase, without forcing lazy types.\n */\n public final Type rawInfo() {\n return rawInfoAt(Global.instance.currentPhase);\n }\n\n /** Get info at start of next phase, without forcing lazy types.\n */\n public final Type rawNextInfo() {\n Global.instance.nextPhase();\n Type info = rawInfo();\n Global.instance.prevPhase();\n return info;\n }\n\n /** Get info at start of given phase, without forcing lazy types.\n */\n private final Type rawInfoAt(Phase phase) {\n //if (infos == null) return Type.NoType;//DEBUG\n assert infos != null : this;\n assert phase != null : this;\n if (infos.limit().id <= phase.id) {\n switch (infos.info) {\n case LazyType():\n // don't force lazy types\n return infos.info;\n }\n while (infos.limit() != phase) {\n Phase limit = infos.limit();\n Type info = transformInfo(limit, infos.info);\n assert info != null: Debug.show(this) + \" -- \" + limit;\n if (info != infos.info) {\n infos = new TypeIntervalList(infos, info, limit.next);\n } else {\n infos.setLimit(limit.next);\n }\n }\n return infos.info;\n } else {\n TypeIntervalList infos = this.infos;\n while (phase.id < infos.start.id && infos.prev != null)\n infos = infos.prev;\n return infos.info;\n }\n }\n // where\n private Type transformInfo(Phase phase, Type info) {\n Global global = phase.global;\n Phase current = global.currentPhase;\n switch (info) {\n case ErrorType:\n case NoType:\n return info;\n case OverloadedType(Symbol[] alts, Type[] alttypes):\n global.currentPhase = phase.next;\n for (int i = 0; i < alts.length; i++) {\n Type type = alts[i].info();\n if (type != alttypes[i]) {\n Type[] types = new Type[alttypes.length];\n for (int j = 0; j < i; j++) types[j] = alttypes[j];\n alttypes[i] = type;\n for (; i < alts.length; i++)\n types[i] = alts[i].info();\n global.currentPhase = current;\n return Type.OverloadedType(alts, types);\n }\n }\n global.currentPhase = current;\n return info;\n default:\n global.currentPhase = phase;\n info = phase.transformInfo(this, info);\n global.currentPhase = current;\n return info;\n }\n }\n\n /** Get first defined info, without forcing lazy types.\n */\n public final Type rawFirstInfo() {\n TypeIntervalList infos = this.infos;\n assert infos != null : this;\n while (infos.prev != null) infos = infos.prev;\n return infos.info;\n }\n\n /** Get phase that first defined an info, without forcing lazy types.\n */\n public final Phase rawFirstInfoStartPhase() {\n TypeIntervalList infos = this.infos;\n assert infos != null : this;\n while (infos.prev != null) infos = infos.prev;\n return infos.start;\n }\n\n /** Get type at start of current phase. The type of a symbol is:\n * for a type symbol, the type corresponding to the symbol itself\n * for a term symbol, its usual type\n */\n public Type type() {\n return info();\n }\n public Type getType() {\n return info();\n }\n\n /** Get type at start of next phase\n */\n public final Type nextType() {\n Global.instance.nextPhase();\n Type type = type();\n Global.instance.prevPhase();\n return type;\n }\n\n /** The infos of these symbols as an array.\n */\n static public Type[] info(Symbol[] syms) {\n Type[] tps = new Type[syms.length];\n for (int i = 0; i < syms.length; i++)\n tps[i] = syms[i].info();\n return tps;\n }\n\n /** The types of these symbols as an array.\n */\n static public Type[] type(Symbol[] syms) {\n Type[] tps = new Type[syms.length];\n for (int i = 0; i < syms.length; i++)\n tps[i] = syms[i].type();\n return tps;\n }\n static public Type[] getType(Symbol[] syms) {\n\treturn type(syms);\n }\n\n /** Get static type. */\n public final Type staticType() {\n return staticType(Type.EMPTY_ARRAY);\n }\n /** Get static type with given type argument. */\n public final Type staticType(Type arg0) {\n return staticType(new Type[]{arg0});\n }\n /** Get static type with given type arguments. */\n public final Type staticType(Type arg0, Type arg1) {\n return staticType(new Type[]{arg0, arg1});\n }\n /** Get static type with given type arguments. */\n public final Type staticType(Type[] args) {\n Type prefix = owner.staticPrefix();\n if (isType()) return Type.typeRef(prefix, this, args);\n assert args.length == 0: Debug.show(this, \" - \", args);\n return prefix.memberType(this);\n }\n\n /** Get static prefix. */\n public final Type staticPrefix() {\n assert isStaticOwner(): Debug.show(this) + \" - \" + isTerm() + \" - \" + isModuleClass() + \" - \" + owner().isStaticOwner() + \" - \" + isJava();\n Global global = Global.instance;\n if (global.PHASE.EXPLICITOUTER.id() < global.currentPhase.id)\n return Type.NoPrefix;\n if (isRoot()) return thisType();\n assert sourceModule().owner() == owner(): Debug.show(this);\n assert sourceModule().type().isObjectType(): Debug.show(this);\n return Type.singleType(owner.staticPrefix(), sourceModule());\n }\n\n /** The type constructor of a symbol is:\n * For a type symbol, the type corresponding to the symbol itself, excluding\n * parameters.\n * Not applicable for term symbols.\n */\n public Type typeConstructor() {\n throw new ApplicationError(\"typeConstructor inapplicable for \" + this);\n }\n\n /** The low bound of this type variable\n */\n public Type loBound() {\n return Global.instance.definitions.ALL_TYPE();\n }\n\n /** The view bound of this type variable\n */\n public Type vuBound() {\n return Global.instance.definitions.ANY_TYPE();\n }\n\n /** Get this.type corresponding to this symbol\n */\n public Type thisType() {\n return Type.NoPrefix;\n }\n\n /** Get type of `this' in current class.\n */\n public Type typeOfThis() {\n return type();\n }\n\n /** Get this symbol of current class\n */\n public Symbol thisSym() { return this; }\n\n\n /** A total ordering between symbols that refines the class\n * inheritance graph (i.e. subclass.isLess(superclass) always holds).\n */\n public boolean isLess(Symbol that) {\n if (this == that) return false;\n int diff;\n if (this.isType()) {\n if (that.isType()) {\n diff = this.closure().length - that.closure().length;\n if (diff > 0) return true;\n if (diff < 0) return false;\n } else {\n return true;\n }\n } else if (that.isType()) {\n return false;\n }\n return this.id < that.id;\n }\n\n /** Return the symbol's type itself followed by all its direct and indirect\n * base types, sorted by isLess(). Overridden for class symbols.\n */\n public Type[] closure() {\n return info().closure();\n }\n\n /** Return position of `c' in the closure of this type; -1 if not there.\n */\n public int closurePos(Symbol c) {\n if (this == c) return 0;\n if (c.isCompoundSym()) return -1;\n Type[] closure = closure();\n int lo = 0;\n int hi = closure.length - 1;\n while (lo <= hi) {\n int mid = (lo + hi) / 2;\n Symbol clsym = closure[mid].symbol();\n if (c == clsym) return mid;\n else if (c.isLess(clsym)) hi = mid - 1;\n else if (clsym.isLess(c)) lo = mid + 1;\n else throw new ApplicationError();\n }\n return -1;\n }\n\n public Type baseType(Symbol sym) {\n int i = closurePos(sym);\n if (i >= 0) return closure()[i];\n else return Type.NoType;\n }\n\n /** Is this class a subclass of `c'? I.e. does it have a type instance\n * of `c' as indirect base class?\n */\n public boolean isSubClass(Symbol c) {\n return this == c ||\n c.isError() ||\n closurePos(c) >= 0 ||\n this == Global.instance.definitions.ALL_CLASS ||\n (this == Global.instance.definitions.ALLREF_CLASS &&\n c != Global.instance.definitions.ALL_CLASS &&\n c.isSubClass(Global.instance.definitions.ANYREF_CLASS));\n }\n\n /** Get base types of this symbol */\n public Type[] parents() {\n return info().parents();\n }\n\n// ToString -------------------------------------------------------------------\n\n /** String representation of symbol's simple name.\n * Translates expansions of operators back to operator symbol. E.g.\n * $eq => =.\n */\n public String nameString() {\n return NameTransformer.decode(simpleName());\n }\n\n /** String representation, including symbol's kind\n * e.g., \"class Foo\", \"function Bar\".\n */\n public String toString() {\n return new SymbolTablePrinter().printSymbolKindAndName(this).toString();\n }\n\n /** String representation of location.\n */\n public String locationString() {\n if (owner.kind == CLASS &&\n !owner.isAnonymousClass() && !owner.isCompoundSym() ||\n Global.instance.debug)\n return \" in \" +\n (owner.isModuleClass() ? owner.sourceModule() : owner);\n else\n return \"\";\n }\n\n /** String representation of definition.\n */\n public String defString() {\n return new SymbolTablePrinter().printSignature(this).toString();\n }\n\n public static String[] defString(Symbol[] defs) {\n String[] strs = new String[defs.length];\n for (int i = 0; i < defs.length; i++)\n strs[i] = defs[i].defString();\n return strs;\n }\n\n// Overloading and Overriding -------------------------------------------\n\n /** Add another overloaded alternative to this symbol.\n */\n public Symbol overloadWith(Symbol that) {\n assert isTerm() : Debug.show(this);\n assert this.name == that.name : Debug.show(this) + \" <> \" + Debug.show(that);\n assert this.owner == that.owner : Debug.show(this) + \" != \" + Debug.show(that);\n assert this.isConstructor() == that.isConstructor();\n int overflags = (this.flags & that.flags & (JAVA | ACCESSFLAGS | DEFERRED | PARAM | SYNTHETIC)) |\n ((this.flags | that.flags) & ACCESSOR);\n Symbol overloaded = (this.isConstructor())\n ? this.constructorClass().newConstructor(this.constructorClass().pos, overflags)\n : owner.newTerm(pos, overflags, name, 0);\n overloaded.setInfo(new LazyOverloadedType(this, that));\n return overloaded;\n }\n\n /** A lazy type which, when forced computed the overloaded type\n * of symbols `sym1' and `sym2'. It also checks that this type is well-formed.\n */\n public static class LazyOverloadedType extends Type.LazyType {\n Symbol sym1;\n Symbol sym2;\n LazyOverloadedType(Symbol sym1, Symbol sym2) {\n this.sym1 = sym1;\n this.sym2 = sym2;\n }\n\n public Symbol[] alternativeSymbols() {\n Symbol[] alts1 = sym1.alternativeSymbols();\n Symbol[] alts2 = sym2.alternativeSymbols();\n Symbol[] alts3 = new Symbol[alts1.length + alts2.length];\n System.arraycopy(alts1, 0, alts3, 0, alts1.length);\n System.arraycopy(alts2, 0, alts3, alts1.length, alts2.length);\n return alts3;\n }\n\n public Type[] alternativeTypes() {\n Type[] alts1 = sym1.alternativeTypes();\n Type[] alts2 = sym2.alternativeTypes();\n Type[] alts3 = new Type[alts1.length + alts2.length];\n System.arraycopy(alts1, 0, alts3, 0, alts1.length);\n System.arraycopy(alts2, 0, alts3, alts1.length, alts2.length);\n return alts3;\n }\n\n public void complete(Symbol overloaded) {\n overloaded.setInfo(\n Type.OverloadedType(\n alternativeSymbols(), alternativeTypes()));\n }\n\n public String toString() {\n return \"LazyOverloadedType(\" + sym1 + \",\" + sym2 + \")\";\n }\n }\n\n /**\n * Returns the symbol in type \"base\" which is overridden by this\n * symbol in class \"this.owner()\". Returns \"NONE\" if no such\n * symbol exists. The type \"base\" must be a supertype of class\n * \"this.owner()\". If \"exact\" is true, overriding is restricted to\n * symbols that have the same type. The method may return this\n * symbol only if \"base.symbol()\" is equal to \"this.owner()\".\n */\n public final Symbol overriddenSymbol(Type base, boolean exact) {\n return overriddenSymbol(base, owner(), exact);\n }\n public final Symbol overriddenSymbol(Type base) {\n return overriddenSymbol(base, false);\n }\n\n /**\n * Returns the symbol in type \"base\" which is overridden by this\n * symbol in \"clasz\". Returns \"NONE\" if no such symbol exists. The\n * type \"base\" must be a supertype of \"clasz\" and \"this.owner()\"\n * must be a superclass of \"clasz\". If \"exact\" is true, overriding\n * is restricted to symbols that have the same type. The method\n * may return this symbol if \"base.symbol()\" is a subclass of\n * \"this.owner()\".\n */\n public final Symbol overriddenSymbol(Type base, Symbol clasz, boolean exact) {\n Type.Relation relation = exact\n ? Type.Relation.SameType\n : Type.Relation.SuperType;\n return base.lookup(this, clasz.thisType(), relation);\n }\n public final Symbol overriddenSymbol(Type base, Symbol clasz) {\n return overriddenSymbol(base, clasz, false);\n }\n\n /**\n * Returns the symbol in type \"sub\" which overrides this symbol in\n * class \"sub.symbol()\". Returns this symbol if no such symbol\n * exists. The class \"sub.symbol()\" must be a subclass of\n * \"this.owner()\". If \"exact\" is true, overriding is restricted to\n * symbols that have the same type.\n */\n public final Symbol overridingSymbol(Type sub, boolean exact) {\n Type.Relation relation = exact\n ? Type.Relation.SameType\n : Type.Relation.SubType;\n return sub.lookup(this, sub, relation);\n }\n public final Symbol overridingSymbol(Type sub) {\n return overridingSymbol(sub, false);\n }\n\n /** Does this symbol override that symbol?\n */\n public boolean overrides(Symbol that) {\n return\n ((this.flags | that.flags) & PRIVATE) == 0 &&\n this.name == that.name &&\n owner.thisType().memberType(this).derefDef().isSubType(\n owner.thisType().memberType(that).derefDef());\n }\n\n /** Reset symbol to initial state\n */\n public void reset(Type completer) {\n this.flags &= SOURCEFLAGS;\n this.pos = 0;\n this.infos = null;\n this.setInfo(completer);\n }\n\n /**\n * Returns the symbol to use in case of a rebinding due to a more\n * precise type prefix.\n */\n public Symbol rebindSym() {\n return this;\n }\n\n /** return a tag which (in the ideal case) uniquely identifies\n * class symbols\n */\n public int tag() {\n return name.toString().hashCode();\n }\n}\n\n/** A class for term symbols\n */\nclass TermSymbol extends Symbol {\n\n /** Constructor */\n TermSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(VAL, owner, pos, flags, name, attrs);\n assert name.isTermName(): Debug.show(this);\n }\n\n public boolean isInitializer() {\n return name == Names.INITIALIZER;\n }\n\n public Symbol[] typeParams() {\n return type().typeParams();\n }\n\n public Symbol[] valueParams() {\n return type().valueParams();\n }\n\n protected Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n return new TermSymbol(owner, pos, flags, name, attrs);\n }\n\n}\n\n/** A class for constructor symbols */\nfinal class ConstructorSymbol extends TermSymbol {\n\n /** The constructed class */\n private final Symbol clasz;\n\n /** Initializes this instance. */\n ConstructorSymbol(Symbol clasz, int pos, int flags) {\n super(clasz.owner(), pos, flags, Names.CONSTRUCTOR, IS_CONSTRUCTOR);\n this.clasz = clasz;\n }\n\n public boolean isInitializer() {\n return false;\n }\n\n public Symbol constructorClass() {\n return clasz;\n }\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n throw Debug.abort(\"illegal clone of constructor\", this);\n }\n\n}\n\n/** A class for module symbols */\npublic class ModuleSymbol extends TermSymbol {\n\n /** The module class */\n private final ModuleClassSymbol clasz;\n\n /** Initializes this instance. */\n private ModuleSymbol(Symbol owner, int pos, int flags, Name name,\n int attrs, ModuleClassSymbol clasz)\n {\n super(owner, pos, flags | MODUL | FINAL | STABLE, name, attrs);\n this.clasz = clasz != null ? clasz : new ModuleClassSymbol(this);\n setType(Type.typeRef(owner().thisType(), this.clasz,Type.EMPTY_ARRAY));\n }\n\n /** Initializes this instance. */\n ModuleSymbol(Symbol owner, int pos, int flags, Name name) {\n this(owner, pos, flags, name, 0, null);\n }\n\n public ModuleClassSymbol moduleClass() {\n // test may fail because loaded modules may be downgraded to\n // case class factory methods (see Symbol#linkedModule())\n\n assert isModule(): Debug.show(this);\n return clasz;\n }\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n return new ModuleSymbol(owner, pos, flags, name, attrs, clasz);\n }\n\n}\n\n/**\n * A class for linked module symbols\n *\n * @see Symbol#linkedModule()\n */\nfinal class LinkedModuleSymbol extends ModuleSymbol {\n\n /** The linked class */\n private final LinkedClassSymbol clasz;\n\n /** Initializes this instance. */\n LinkedModuleSymbol(LinkedClassSymbol clasz) {\n super(clasz.owner(), clasz.pos, clasz.flags & JAVA,\n clasz.name.toTermName());\n this.clasz = clasz;\n }\n\n public ClassSymbol linkedClass() {\n return clasz;\n }\n\n}\n\n/** A base class for all type symbols.\n * It has AliasTypeSymbol, AbsTypeSymbol, ClassSymbol as subclasses.\n */\nabstract class TypeSymbol extends Symbol {\n\n /** The history of closures of this symbol */\n private final History/**/ closures;\n\n /** A cache for type constructors\n */\n private Type tycon = null;\n\n /** The primary constructor of this type */\n private Symbol constructor;\n\n /** Constructor */\n public TypeSymbol(int kind, Symbol owner, int pos, int flags, Name name, int attrs) {\n super(kind, owner, pos, flags, name, attrs);\n this.closures = new ClosureHistory();\n assert name.isTypeName() : this;\n this.constructor = newConstructor(pos, flags & CONSTRFLAGS);\n }\n\n protected final void copyConstructorInfo(TypeSymbol other) {\n {\n Type info = primaryConstructor().info().cloneType(\n primaryConstructor(), other.primaryConstructor());\n if (!isTypeAlias()) info = fixConstrType(info, other);\n other.primaryConstructor().setInfo(info);\n }\n Symbol[] alts = allConstructors().alternativeSymbols();\n for (int i = 1; i < alts.length; i++) {\n Symbol constr = other.newConstructor(alts[i].pos, alts[i].flags);\n other.addConstructor(constr);\n Type info = alts[i].info().cloneType(alts[i], constr);\n if (!isTypeAlias()) info = fixConstrType(info, other);\n constr.setInfo(info);\n }\n }\n\n private final Type fixConstrType(Type type, Symbol clone) {\n switch (type) {\n case MethodType(Symbol[] vparams, Type result):\n result = fixConstrType(result, clone);\n return new Type.MethodType(vparams, result);\n case PolyType(Symbol[] tparams, Type result):\n result = fixConstrType(result, clone);\n return new Type.PolyType(tparams, result);\n case TypeRef(Type pre, Symbol sym, Type[] args):\n if (sym != this && isTypeAlias() && owner().isCompoundSym())\n return type;\n assert sym == this: Debug.show(sym) + \" != \" + Debug.show(this);\n return Type.typeRef(pre, clone, args);\n case LazyType():\n return type;\n default:\n throw Debug.abort(\"unexpected constructor type:\" + clone + \":\" + type);\n }\n }\n\n /** add a constructor\n */\n public final void addConstructor(Symbol constr) {\n assert constr.isConstructor(): Debug.show(constr);\n constructor = constructor.overloadWith(constr);\n }\n\n /** Get primary constructor */\n public final Symbol primaryConstructor() {\n return constructor.firstAlternative();\n }\n\n /** Get all constructors */\n public final Symbol allConstructors() {\n return constructor;\n }\n\n /** Get type parameters */\n public final Symbol[] typeParams() {\n return primaryConstructor().info().typeParams();\n }\n\n /** Get value parameters */\n public final Symbol[] valueParams() {\n return (kind == CLASS) ? primaryConstructor().info().valueParams()\n : Symbol.EMPTY_ARRAY;\n }\n\n /** Get type constructor */\n public final Type typeConstructor() {\n if (tycon == null)\n tycon = Type.typeRef(owner().thisType(), this, Type.EMPTY_ARRAY);\n return tycon;\n }\n\n public Symbol setOwner(Symbol owner) {\n tycon = null;\n constructor.setOwner0(owner);\n switch (constructor.type()) {\n case OverloadedType(Symbol[] alts, _):\n for (int i = 0; i < alts.length; i++) alts[i].setOwner0(owner);\n }\n return super.setOwner(owner);\n }\n\n /** Get type */\n public final Type type() {\n return primaryConstructor().type().resultType();\n }\n public final Type getType() {\n return primaryConstructor().type().resultType();\n }\n\n /**\n * Get closure at start of current phase. The closure of a symbol\n * is a list of types which contains the type of the symbol\n * followed by all its direct and indirect base types, sorted by\n * isLess().\n */\n public final Type[] closure() {\n if (kind == ALIAS) return info().symbol().closure();\n return (Type[])closures.getValue(this);\n }\n\n public void reset(Type completer) {\n super.reset(completer);\n closures.reset();\n tycon = null;\n }\n\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n TypeSymbol clone = cloneTypeSymbolImpl(owner, attrs);\n copyConstructorInfo(clone);\n return clone;\n }\n\n protected abstract TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs);\n}\n\nfinal class AliasTypeSymbol extends TypeSymbol {\n\n /** Initializes this instance. */\n AliasTypeSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(ALIAS, owner, pos, flags, name, attrs);\n }\n\n protected TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n return new AliasTypeSymbol(owner, pos, flags, name, attrs);\n }\n\n}\n\nfinal class AbsTypeSymbol extends TypeSymbol {\n\n private Type lobound = null;\n private Type vubound = null;\n\n /** Initializes this instance. */\n AbsTypeSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(TYPE, owner, pos, flags, name, attrs);\n allConstructors().setInfo(Type.MethodType(EMPTY_ARRAY, Type.typeRef(owner.thisType(), this, Type.EMPTY_ARRAY)));\n }\n\n public Type loBound() {\n initialize();\n return lobound == null ? Global.instance.definitions.ALL_TYPE() : lobound;\n }\n\n public Type vuBound() {\n initialize();\n return !isViewBounded() || vubound == null\n\t ? Global.instance.definitions.ANY_TYPE() : vubound;\n }\n\n public Symbol setLoBound(Type lobound) {\n this.lobound = lobound;\n return this;\n }\n\n public Symbol setVuBound(Type vubound) {\n\tthis.vubound = vubound;\n return this;\n }\n\n protected TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n TypeSymbol clone = new AbsTypeSymbol(owner, pos, flags, name, attrs);\n clone.setLoBound(loBound());\n clone.setVuBound(vuBound());\n return clone;\n }\n\n}\n\n/** A class for class symbols. */\npublic class ClassSymbol extends TypeSymbol {\n\n /** The given type of self, or NoType, if no explicit type was given.\n */\n private Symbol thisSym = this;\n\n public Symbol thisSym() { return thisSym; }\n\n /** A cache for this.thisType()\n */\n final private Type thistp = Type.ThisType(this);\n\n private final Symbol rebindSym;\n\n /** Initializes this instance. */\n ClassSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(CLASS, owner, pos, flags, name, attrs);\n this.rebindSym = owner.newTypeAlias(pos, 0, Names.ALIAS(this));\n Type rebindType = new ClassAliasLazyType();\n this.rebindSym.setInfo(rebindType);\n this.rebindSym.primaryConstructor().setInfo(rebindType);\n }\n\n private class ClassAliasLazyType extends Type.LazyType {\n public void complete(Symbol ignored) {\n Symbol clasz = ClassSymbol.this;\n Symbol alias = rebindSym;\n Type prefix = clasz.owner().thisType();\n Type constrtype = clasz.type();\n constrtype = Type.MethodType(Symbol.EMPTY_ARRAY, constrtype);\n constrtype = Type.PolyType(clasz.typeParams(), constrtype);\n constrtype = constrtype.cloneType(\n clasz.primaryConstructor(), alias.primaryConstructor());\n alias.primaryConstructor().setInfo(constrtype);\n alias.setInfo(constrtype.resultType());\n }\n }\n\n /** Creates the root class. */\n public static Symbol newRootClass(Global global) {\n int pos = Position.NOPOS;\n Name name = Names.ROOT.toTypeName();\n Symbol owner = Symbol.NONE;\n int flags = JAVA | PACKAGE | FINAL;\n int attrs = IS_ROOT;\n Symbol clasz = new ClassSymbol(owner, pos, flags, name, attrs);\n clasz.setInfo(global.getRootLoader());\n clasz.primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, clasz.typeConstructor()));\n // !!! Type.MethodType(Symbol.EMPTY_ARRAY, clasz.thisType()));\n return clasz;\n }\n\n /** Creates the this-type symbol associated to this class. */\n private final Symbol newThisType() {\n return newTerm(pos, SYNTHETIC, Names.this_, IS_THISTYPE);\n }\n\n public Type thisType() {\n Global global = Global.instance;\n if (global.currentPhase.id > global.PHASE.ERASURE.id()) return type();\n return thistp;\n }\n\n public Type typeOfThis() {\n return thisSym.type();\n }\n\n public Symbol setTypeOfThis(Type tp) {\n thisSym = newThisType();\n thisSym.setInfo(tp);\n return this;\n }\n\n /** Return the next enclosing class */\n public Symbol enclClass() {\n return this;\n }\n\n public Symbol caseFieldAccessor(int index) {\n assert (flags & CASE) != 0 : this;\n Scope.SymbolIterator it = info().members().iterator();\n Symbol sym = null;\n if ((flags & JAVA) == 0) {\n\t\t\tfor (int i = 0; i <= index; i++) {\n\t\t\t\tdo {\n\t\t\t\t\tsym = it.next();\n\t\t\t\t} while (sym.kind != VAL || (sym.flags & CASEACCESSOR) == 0 || !sym.isMethod());\n\t\t\t}\n\t\t\t//System.out.println(this + \", case field[\" + index + \"] = \" + sym);//DEBUG\n\t\t} else {\n\t\t\tsym = it.next();\n\t\t\twhile ((sym.flags & SYNTHETIC) == 0) {\n\t\t\t //System.out.println(\"skipping \" + sym);\n\t\t\t sym = it.next();\n\t\t\t}\n\t\t\tfor (int i = 0; i < index; i++)\n\t\t\t\tsym = it.next();\n\t\t\t//System.out.println(\"field accessor = \" + sym);//DEBUG\n\t\t}\n\t\tassert sym != null : this;\n\t\treturn sym;\n }\n\n public final Symbol rebindSym() {\n return rebindSym;\n }\n\n public void reset(Type completer) {\n super.reset(completer);\n thisSym = this;\n }\n\n protected final TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n assert !isModuleClass(): Debug.show(this);\n ClassSymbol clone = new ClassSymbol(owner, pos, flags, name, attrs);\n if (thisSym != this) clone.setTypeOfThis(typeOfThis());\n return clone;\n }\n\n}\n\n/**\n * A class for module class symbols\n *\n * @see Symbol#sourceModule()\n */\npublic final class ModuleClassSymbol extends ClassSymbol {\n\n /** The source module */\n private final ModuleSymbol module;\n\n /** Initializes this instance. */\n ModuleClassSymbol(ModuleSymbol module) {\n super(module.owner(), module.pos,\n (module.flags & MODULE2CLASSFLAGS) | MODUL | FINAL,\n module.name.toTypeName(), 0);\n primaryConstructor().flags |= PRIVATE;\n primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, typeConstructor()));\n this.module = module;\n }\n\n public ModuleSymbol sourceModule() {\n return module;\n }\n\n}\n\n/**\n * A class for linked class symbols\n *\n * @see Symbol#linkedModule()\n */\nfinal class LinkedClassSymbol extends ClassSymbol {\n\n /** The linked module */\n private final LinkedModuleSymbol module;\n\n /** Initializes this instance. */\n LinkedClassSymbol(Symbol owner, int flags, Name name) {\n super(owner, Position.NOPOS, flags, name, 0);\n this.module = new LinkedModuleSymbol(this);\n }\n\n public ModuleSymbol linkedModule() {\n return module;\n }\n\n}\n\n/** The class of Symbol.NONE\n */\nfinal class NoSymbol extends Symbol {\n\n /** Constructor */\n public NoSymbol() {\n super(Kinds.NONE, null, Position.NOPOS, 0, Names.NOSYMBOL, 0);\n super.setInfo(Type.NoType);\n }\n\n /** Set type */\n public Symbol setInfo(Type info) {\n assert info == Type.NoType : info;\n return this;\n }\n\n /** Return the next enclosing class */\n public Symbol enclClass() {\n return this;\n }\n\n /** Return the next enclosing method */\n public Symbol enclMethod() {\n return this;\n }\n\n public Symbol owner() {\n throw new ApplicationError();\n }\n\n public Type thisType() {\n return Type.NoPrefix;\n }\n\n public void reset(Type completer) {\n }\n\n protected Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n throw Debug.abort(\"illegal clone\", this);\n }\n\n}\n\n/** An exception for signalling cyclic references.\n */\npublic class CyclicReference extends Type.Error {\n public Symbol sym;\n public Type info;\n public CyclicReference(Symbol sym, Type info) {\n super(\"illegal cyclic reference involving \" + sym);\n this.sym = sym;\n this.info = info;\n }\n}\n\n/** A base class for values indexed by phases. */\nabstract class IntervalList {\n\n /** Interval starts at start of phase \"start\" (inclusive) */\n public final Phase start;\n /** Interval ends at start of phase \"limit\" (inclusive) */\n private Phase limit;\n\n public IntervalList(IntervalList prev, Phase start) {\n this.start = start;\n this.limit = start;\n assert start != null && (prev == null || prev.limit.next == start) :\n Global.instance.currentPhase + \" - \" + prev + \" - \" + start;\n }\n\n public Phase limit() {\n return limit;\n }\n\n public void setLimit(Phase phase) {\n assert phase != null && !phase.precedes(start) : start + \" - \" + phase;\n limit = phase;\n }\n\n public String toString() {\n return \"[\" + start + \"->\" + limit + \"]\";\n }\n\n}\n\n/** A class for types indexed by phases. */\nclass TypeIntervalList extends IntervalList {\n\n /** Previous interval */\n public final TypeIntervalList prev;\n /** Info valid during this interval */\n public final Type info;\n\n public TypeIntervalList(TypeIntervalList prev, Type info, Phase start) {\n super(prev, start);\n this.prev = prev;\n this.info = info;\n assert info != null;\n }\n\n}\n"},"new_file":{"kind":"string","value":"sources/scalac/symtab/Symbol.java"},"old_contents":{"kind":"string","value":"/* ____ ____ ____ ____ ______ *\\\n** / __// __ \\/ __// __ \\/ ____/ SOcos COmpiles Scala **\n** __\\_ \\/ /_/ / /__/ /_/ /\\_ \\ (c) 2002, LAMP/EPFL **\n** /_____/\\____/\\___/\\____/____/ **\n**\n\n\\* */\n\n//todo check significance of JAVA flag.\n\npackage scalac.symtab;\n\nimport scala.tools.util.Position;\n\nimport scalac.ApplicationError;\nimport scalac.Global;\nimport scalac.Phase;\nimport scalac.framework.History;\nimport scalac.util.ArrayApply;\nimport scalac.util.Name;\nimport scalac.util.Names;\nimport scalac.util.NameTransformer;\nimport scalac.util.Debug;\n\n\npublic abstract class Symbol implements Modifiers, Kinds {\n\n /** An empty symbol array */\n public static final Symbol[] EMPTY_ARRAY = new Symbol[0];\n\n /** An empty array of symbol arrays */\n public static final Symbol[][] EMPTY_ARRAY_ARRAY = new Symbol[0][];\n\n /** The absent symbol */\n public static final Symbol NONE = new NoSymbol();\n\n// Attribues -------------------------------------------------------------\n\n public static final int IS_ROOT = 0x00000001;\n public static final int IS_ANONYMOUS = 0x00000002;\n public static final int IS_LABEL = 0x00000010;\n public static final int IS_CONSTRUCTOR = 0x00000020;\n public static final int IS_ACCESSMETHOD = 0x00000100;\n public static final int IS_ERROR = 0x10000000;\n public static final int IS_THISTYPE = 0x20000000;\n public static final int IS_LOCALDUMMY = 0x40000000;\n public static final int IS_COMPOUND = 0x80000000;\n\n// Fields -------------------------------------------------------------\n\n /** The unique identifier generator */\n private static int ids;\n\n /** The kind of the symbol */\n public int kind;\n\n /** The position of the symbol */\n public int pos;\n\n /** The name of the symbol */\n public Name name;\n\n /** The modifiers of the symbol */\n public int flags;\n\n /** The owner of the symbol */\n private Symbol owner;\n\n /** The infos of the symbol */\n private TypeIntervalList infos;\n\n /** The attributes of the symbol */\n private final int attrs;\n\n /** The unique identifier */\n public final int id;\n\n\n// Constructors -----------------------------------------------------------\n\n /** Generic symbol constructor */\n public Symbol(int kind, Symbol owner, int pos, int flags, Name name, int attrs) {\n this.kind = kind;\n this.pos = pos;\n this.name = name;\n this.owner = owner == null ? this : owner;\n this.flags = flags & ~(INITIALIZED | LOCKED); // safety first\n this.attrs = attrs;\n this.id = ids++;\n }\n\n// Factories --------------------------------------------------------------\n\n /** Creates a new term owned by this symbol. */\n public final Symbol newTerm(int pos, int flags, Name name) {\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new constructor of this symbol. */\n public final Symbol newConstructor(int pos, int flags) {\n assert isType(): Debug.show(this);\n return new ConstructorSymbol(this, pos, flags);\n }\n\n /** Creates a new method owned by this symbol. */\n public final Symbol newMethod(int pos, int flags, Name name) {\n assert isClass(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new access method owned by this symbol. */\n public final Symbol newAccessMethod(int pos, Name name) {\n assert isClass(): Debug.show(this);\n int flags = PRIVATE | FINAL | SYNTHETIC;\n return newTerm(pos, flags, name, IS_ACCESSMETHOD);\n }\n\n /** Creates a new function owned by this symbol. */\n public final Symbol newFunction(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new method or function owned by this symbol. */\n public final Symbol newMethodOrFunction(int pos, int flags, Name name){\n assert isClass() || isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new label owned by this symbol. */\n public final Symbol newLabel(int pos, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, 0, name, IS_LABEL);\n }\n\n /** Creates a new field owned by this symbol. */\n public final Symbol newField(int pos, int flags, Name name) {\n assert isClass(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new variable owned by this symbol. */\n public final Symbol newVariable(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new variable owned by this symbol. */\n public final Symbol newFieldOrVariable(int pos, int flags, Name name) {\n assert isClass() || isTerm(): Debug.show(this);\n return newTerm(pos, flags, name, 0);\n }\n\n /** Creates a new pattern variable owned by this symbol. */\n public final Symbol newPatternVariable(int pos, Name name) {\n return newVariable(pos, 0, name);\n }\n\n /** Creates a new value parameter owned by this symbol. */\n public final Symbol newVParam(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newTerm(pos, flags | PARAM, name);\n }\n\n /**\n * Creates a new value parameter owned by this symbol and\n * initializes it with the type.\n */\n public final Symbol newVParam(int pos, int flags, Name name, Type type) {\n Symbol tparam = newVParam(pos, flags, name);\n tparam.setInfo(type);\n return tparam;\n }\n\n /**\n * Creates a new initialized dummy symbol for template of this\n * class.\n */\n public final Symbol newLocalDummy() {\n assert isClass(): Debug.show(this);\n Symbol local = newTerm(pos, 0, Names.LOCAL(this), IS_LOCALDUMMY);\n local.setInfo(Type.NoType);\n return local;\n }\n\n /** Creates a new module owned by this symbol. */\n public final Symbol newModule(int pos, int flags, Name name) {\n return new ModuleSymbol(this, pos, flags, name);\n }\n\n /**\n * Creates a new package owned by this symbol and initializes it\n * with an empty scope.\n */\n public final Symbol newPackage(int pos, Name name) {\n return newPackage(pos, name, null);\n }\n\n /**\n * Creates a new package owned by this symbol, initializes it with\n * the loader and enters it in the scope if it's non-null.\n */\n public final Symbol newLoadedPackage(Name name, SymbolLoader loader,\n Scope scope)\n {\n assert loader != null: Debug.show(this) + \" - \" + name;\n Symbol peckage = newPackage(Position.NOPOS, name, loader);\n if (scope != null) scope.enterNoHide(peckage);\n return peckage;\n }\n\n /**\n * Creates a new error value owned by this symbol and initializes\n * it with an error type.\n */\n public Symbol newErrorValue(Name name) {\n Symbol symbol = newTerm(pos, SYNTHETIC, name, IS_ERROR);\n symbol.setInfo(Type.ErrorType);\n return symbol;\n }\n\n /** Creates a new type alias owned by this symbol. */\n public final Symbol newTypeAlias(int pos, int flags, Name name) {\n return new AliasTypeSymbol(this, pos, flags, name, 0);\n }\n\n /** Creates a new abstract type owned by this symbol. */\n public final Symbol newAbstractType(int pos, int flags, Name name) {\n return new AbsTypeSymbol(this, pos, flags, name, 0);\n }\n\n /** Creates a new type parameter owned by this symbol. */\n public final Symbol newTParam(int pos, int flags, Name name) {\n assert isTerm(): Debug.show(this);\n return newAbstractType(pos, flags | PARAM, name);\n }\n\n /**\n * Creates a new type parameter owned by this symbol and\n * initializes it with the type.\n */\n public final Symbol newTParam(int pos, int flags, Name name, Type type) {\n Symbol tparam = newTParam(pos, flags, name);\n tparam.setInfo(type);\n return tparam;\n }\n\n /**\n * Creates a new type alias owned by this symbol and initializes\n * it with the info.\n */\n public final Symbol newTypeAlias(int pos, int flags, Name name, Type info){\n Symbol alias = newTypeAlias(pos, flags, name);\n alias.setInfo(info);\n alias.allConstructors().setInfo(Type.MethodType(EMPTY_ARRAY, info));\n return alias;\n }\n\n /** Creates a new class owned by this symbol. */\n public final ClassSymbol newClass(int pos, int flags, Name name) {\n return newClass(pos, flags, name, 0);\n }\n\n /** Creates a new anonymous class owned by this symbol. */\n public final ClassSymbol newAnonymousClass(int pos, Name name) {\n assert isTerm(): Debug.show(this);\n return newClass(pos, 0, name, IS_ANONYMOUS);\n }\n\n /**\n * Creates a new class with a linked module, both owned by this\n * symbol, initializes them with the loader and enters the class\n * and the module in the scope if it's non-null.\n */\n public final ClassSymbol newLoadedClass(int flags, Name name,\n SymbolLoader loader, Scope scope)\n {\n assert isPackageClass(): Debug.show(this);\n assert loader != null: Debug.show(this) + \" - \" + name;\n ClassSymbol clasz = new LinkedClassSymbol(this, flags, name);\n clasz.setInfo(loader);\n clasz.allConstructors().setInfo(loader);\n clasz.linkedModule().setInfo(loader);\n clasz.linkedModule().moduleClass().setInfo(loader);\n if (scope != null) scope.enterNoHide(clasz);\n if (scope != null) scope.enterNoHide(clasz.linkedModule());\n return clasz;\n }\n\n /**\n * Creates a new error class owned by this symbol and initializes\n * it with an error type.\n */\n public ClassSymbol newErrorClass(Name name) {\n ClassSymbol symbol = newClass(pos, SYNTHETIC, name, IS_ERROR);\n Scope scope = new ErrorScope(this);\n symbol.setInfo(Type.compoundType(Type.EMPTY_ARRAY, scope, this));\n symbol.allConstructors().setInfo(Type.ErrorType);\n return symbol;\n }\n\n /** Creates a new term owned by this symbol. */\n final Symbol newTerm(int pos, int flags, Name name, int attrs) {\n return new TermSymbol(this, pos, flags, name, attrs);\n }\n\n /** Creates a new package owned by this symbol. */\n final Symbol newPackage(int pos, Name name, Type info) {\n assert isPackageClass(): Debug.show(this);\n Symbol peckage = newModule(pos, JAVA | PACKAGE, name);\n if (info == null) info = Type.compoundType(\n Type.EMPTY_ARRAY, new Scope(), peckage.moduleClass());\n peckage.moduleClass().setInfo(info);\n return peckage;\n }\n\n /** Creates a new class owned by this symbol. */\n final ClassSymbol newClass(int pos, int flags, Name name, int attrs) {\n return new ClassSymbol(this, pos, flags, name, attrs);\n }\n\n /** Creates a new compound class owned by this symbol. */\n final ClassSymbol newCompoundClass(Type info) {\n int pos = Position.FIRSTPOS;\n Name name = Names.COMPOUND_NAME.toTypeName();\n int flags = ABSTRACT | SYNTHETIC;\n int attrs = IS_COMPOUND;\n ClassSymbol clasz = newClass(pos, flags, name, attrs);\n clasz.setInfo(info);\n clasz.primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, clasz.typeConstructor()));\n return clasz;\n }\n\n// Copying & cloning ------------------------------------------------------\n\n /** Return a fresh symbol with the same fields as this one.\n */\n public final Symbol cloneSymbol() {\n return cloneSymbol(owner);\n }\n\n /** Return a fresh symbol with the same fields as this one and the\n * given owner.\n */\n public final Symbol cloneSymbol(Symbol owner) {\n Symbol clone = cloneSymbolImpl(owner, attrs);\n clone.setInfo(info());\n return clone;\n }\n\n protected abstract Symbol cloneSymbolImpl(Symbol owner, int attrs);\n\n /** Returns a shallow copy of the given array. */\n public static Symbol[] cloneArray(Symbol[] array) {\n return cloneArray(0, array, 0);\n }\n\n /**\n * Returns a shallow copy of the given array prefixed by \"prefix\"\n * null items.\n */\n public static Symbol[] cloneArray(int prefix, Symbol[] array) {\n return cloneArray(prefix, array, 0);\n }\n\n /**\n * Returns a shallow copy of the given array suffixed by \"suffix\"\n * null items.\n */\n public static Symbol[] cloneArray(Symbol[] array, int suffix) {\n return cloneArray(0, array, suffix);\n }\n\n /**\n * Returns a shallow copy of the given array prefixed by \"prefix\"\n * null items and suffixed by \"suffix\" null items.\n */\n public static Symbol[] cloneArray(int prefix, Symbol[] array, int suffix) {\n assert prefix >= 0 && suffix >= 0: prefix + \" - \" + suffix;\n int size = prefix + array.length + suffix;\n if (size == 0) return EMPTY_ARRAY;\n Symbol[] clone = new Symbol[size];\n for (int i = 0; i < array.length; i++) clone[prefix + i] = array[i];\n return clone;\n }\n\n /** Returns the concatenation of the two arrays. */\n public static Symbol[] concat(Symbol[] array1, Symbol[] array2) {\n if (array1.length == 0) return array2;\n if (array2.length == 0) return array1;\n Symbol[] clone = cloneArray(array1.length, array2);\n for (int i = 0; i < array1.length; i++) clone[i] = array1[i];\n return clone;\n }\n\n// Setters ---------------------------------------------------------------\n\n /** Set owner */\n public Symbol setOwner(Symbol owner) {\n assert !isConstructor() && !isNone() && !isError(): Debug.show(this);\n setOwner0(owner);\n return this;\n }\n protected void setOwner0(Symbol owner) {\n this.owner = owner;\n }\n\n /** Set type -- this is an alias for setInfo(Type info) */\n public final Symbol setType(Type info) { return setInfo(info); }\n\n /**\n * Set initial information valid from start of current phase. This\n * information is visible in the current phase and will be\n * transformed by the current phase (except if current phase is\n * the first one).\n */\n public Symbol setInfo(Type info) {\n return setInfoAt(info, Global.instance.currentPhase);\n }\n\n /**\n * Set initial information valid from start of given phase. This\n * information is visible in the given phase and will be\n * transformed by the given phase.\n */\n private final Symbol setInfoAt(Type info, Phase phase) {\n assert info != null: Debug.show(this);\n assert phase != null: Debug.show(this);\n assert !isConstructor()\n || info instanceof Type.LazyType\n || info == Type.NoType\n || info == Type.ErrorType\n || info instanceof Type.MethodType\n || info instanceof Type.OverloadedType\n || info instanceof Type.PolyType\n : \"illegal type for \" + this + \": \" + info;\n infos = new TypeIntervalList(null, info, phase);\n if (info instanceof Type.LazyType) flags &= ~INITIALIZED;\n else flags |= INITIALIZED;\n return this;\n }\n\n /**\n * Set new information valid from start of next phase. This\n * information is only visible in next phase or through\n * \"nextInfo\". It will not be transformed by the current phase.\n */\n public final Symbol updateInfo(Type info) {\n return updateInfoAt(info, Global.instance.currentPhase.next);\n }\n\n /**\n * Set new information valid from start of given phase. This\n * information is only visible from the start of the given phase\n * which is also the first phase that will transform this\n * information.\n */\n private final Symbol updateInfoAt(Type info, Phase phase) {\n assert info != null: Debug.show(this);\n assert phase != null: Debug.show(this);\n assert infos != null: Debug.show(this);\n assert !phase.precedes(infos.limit()) :\n Debug.show(this) + \" -- \" + phase + \" -- \" + infos.limit();\n if (infos.limit() == phase) {\n if (infos.start == phase)\n infos = infos.prev;\n else\n infos.setLimit(infos.limit().prev);\n }\n infos = new TypeIntervalList(infos, info, phase);\n return this;\n }\n\n /** Set type of `this' in current class\n */\n public Symbol setTypeOfThis(Type tp) {\n throw new ApplicationError(this + \".setTypeOfThis\");\n }\n\n /** Set the low bound of this type variable\n */\n public Symbol setLoBound(Type lobound) {\n throw new ApplicationError(\"setLoBound inapplicable for \" + this);\n }\n\n /** Set the view bound of this type variable\n */\n public Symbol setVuBound(Type lobound) {\n throw new ApplicationError(\"setVuBound inapplicable for \" + this);\n }\n\n /** Add an auxiliary constructor to class.\n */\n public void addConstructor(Symbol constr) {\n throw new ApplicationError(\"addConstructor inapplicable for \" + this);\n }\n\n// Symbol classification ----------------------------------------------------\n\n /** Does this symbol denote an error symbol? */\n public final boolean isError() {\n return (attrs & IS_ERROR) != 0;\n }\n\n /** Does this symbol denote the none symbol? */\n public final boolean isNone() {\n return kind == Kinds.NONE;\n }\n\n /** Does this symbol denote a type? */\n public final boolean isType() {\n return kind == TYPE || kind == CLASS || kind == ALIAS;\n }\n\n /** Does this symbol denote a term? */\n public final boolean isTerm() {\n return kind == VAL;\n }\n\n /** Does this symbol denote a value? */\n public final boolean isValue() {\n preInitialize();\n return kind == VAL && !(isModule() && isJava()) && !isPackage();\n }\n\n /** Does this symbol denote a stable value? */\n public final boolean isStable() {\n return kind == VAL &&\n\t ((flags & DEF) == 0) &&\n ((flags & STABLE) != 0 ||\n (flags & MUTABLE) == 0 && type().isObjectType());\n }\n\n /** Does this symbol have the STABLE flag? */\n public final boolean hasStableFlag() {\n return (flags & STABLE) != 0;\n }\n\n /** Is this symbol static (i.e. with no outer instance)? */\n public final boolean isStatic() {\n return owner.isStaticOwner();\n }\n\n /** Does this symbol denote a class that defines static symbols? */\n public final boolean isStaticOwner() {\n return isRoot() || (isStatic() && isModuleClass()\n // !!! remove later? translation does not work (yet?)\n && isJava());\n }\n\n /** Is this symbol final?\n */\n public final boolean isFinal() {\n\treturn\n\t (flags & (FINAL | PRIVATE)) != 0 || isLocal() || owner.isModuleClass();\n }\n\n /** Does this symbol denote a variable? */\n public final boolean isVariable() {\n return kind == VAL && (flags & MUTABLE) != 0;\n }\n\n /** Does this symbol denote a view bounded type variable? */\n public final boolean isViewBounded() {\n\tGlobal global = Global.instance;\n return kind == TYPE && (flags & VIEWBOUND) != 0 &&\n\t global.currentPhase.id <= global.PHASE.REFCHECK.id();\n }\n\n /**\n * Does this symbol denote a final method? A final method is one\n * that can't be overridden in a subclass. This method assumes\n * that this symbol denotes a method. It doesn't test it.\n */\n public final boolean isMethodFinal() {\n return (flags & FINAL) != 0 || isPrivate() || isLifted();\n }\n\n /** Does this symbol denote a sealed class symbol? */\n public final boolean isSealed() {\n return (flags & SEALED) != 0;\n }\n\n /** Does this symbol denote a method?\n */\n public final boolean isInitializedMethod() {\n if (infos == null) return false;\n switch (rawInfo()) {\n case MethodType(_, _):\n case PolyType(_, _):\n return true;\n case OverloadedType(Symbol[] alts, _):\n for (int i = 0; i < alts.length; i++)\n if (alts[i].isMethod()) return true;\n return false;\n default:\n return false;\n }\n }\n\n public final boolean isMethod() {\n initialize();\n return isInitializedMethod();\n }\n\n public final boolean isCaseFactory() {\n return isMethod() && !isConstructor() && (flags & CASE) != 0;\n }\n\n public final boolean isAbstractClass() {\n preInitialize();\n return kind == CLASS && (flags & ABSTRACT) != 0 &&\n this != Global.instance.definitions.ARRAY_CLASS;\n }\n\n public final boolean isAbstractOverride() {\n preInitialize();\n return (flags & (ABSTRACT | OVERRIDE)) == (ABSTRACT | OVERRIDE);\n }\n\n /* Does this symbol denote an anonymous class? */\n public final boolean isAnonymousClass() {\n return isClass() && (attrs & IS_ANONYMOUS) != 0;\n }\n\n /** Does this symbol denote the root class or root module?\n */\n public final boolean isRoot() {\n return (attrs & IS_ROOT) != 0;\n }\n\n /** Does this symbol denote something loaded from a Java class? */\n public final boolean isJava() {\n preInitialize();\n return (flags & JAVA) != 0;\n }\n\n /** Does this symbol denote a Java package? */\n public final boolean isPackage() {\n return kind == VAL && (flags & PACKAGE) != 0;\n }\n\n /** Does this symbol denote a Java package class? */\n public final boolean isPackageClass() {\n return kind == CLASS && (flags & PACKAGE) != 0;\n }\n\n /** Does this symbol denote a module? */\n public final boolean isModule() {\n return kind == VAL && (flags & MODUL) != 0;\n }\n\n /** Does this symbol denote a module class? */\n public final boolean isModuleClass() {\n return kind == CLASS && (flags & MODUL) != 0;\n }\n\n /** Does this symbol denote a class? */\n public final boolean isClass() {\n return kind == CLASS && (flags & PACKAGE) == 0;\n }\n\n /** Does this symbol denote a case class?\n */\n public final boolean isCaseClass() {\n preInitialize();\n return kind == CLASS && (flags & CASE) != 0;\n }\n\n /** Does this symbol denote a uniform (i.e. parameterless) class? */\n public final boolean isTrait() {\n //preInitialize(); todo: enable, problem is that then we cannot print\n // during unpickle\n return kind == CLASS && (flags & TRAIT) != 0;\n }\n\n /** Does this class symbol denote a compound type symbol? */\n public final boolean isCompoundSym() {\n return (attrs & IS_COMPOUND) != 0;\n }\n\n /** Does this symbol denote a this symbol? */\n public final boolean isThisSym() {\n return (attrs & IS_THISTYPE) != 0;\n }\n\n /** Does this symbol denote an interface? */\n public final boolean isInterface() {\n info(); // force delayed transformInfos that may change this flag\n return (flags & INTERFACE) != 0;\n }\n\n /** Does this symbol denote a type alias? */\n public final boolean isTypeAlias() {\n return kind == ALIAS;\n }\n\n /** Does this symbol denote an abstract type? */\n public final boolean isAbstractType() {\n return kind == TYPE;\n }\n\n /** Does this symbol denote a class type? */\n public final boolean isClassType() {\n return kind == CLASS;\n }\n\n /** Does this symbol denote a public symbol? */\n public final boolean isPublic() {\n return !isProtected() && !isPrivate();\n }\n\n /** Does this symbol denote a protected symbol? */\n public final boolean isProtected() {\n preInitialize();\n return (flags & PROTECTED) != 0;\n }\n\n /** Does this symbol denote a private symbol? */\n public final boolean isPrivate() {\n preInitialize();\n return (flags & PRIVATE) != 0;\n }\n\n /** Has this symbol been lifted? */\n public final boolean isLifted() {\n preInitialize();\n return (flags & LIFTED) != 0;\n }\n\n /** Does this symbol denote a deferred symbol? */\n public final boolean isDeferred() {\n return (flags & DEFERRED) != 0;\n }\n\n /** Does this symbol denote a synthetic symbol? */\n public final boolean isSynthetic() {\n return (flags & SYNTHETIC) != 0;\n }\n\n /** Does this symbol denote an accessor? */\n public final boolean isAccessor() {\n return (flags & ACCESSOR) != 0;\n }\n\n /** Does this symbol denote an access method? (a method to access\n * private of protected members from inner classes) */\n public final boolean isAccessMethod() {\n return (attrs & IS_ACCESSMETHOD) != 0;\n }\n\n /** Is this symbol locally defined? I.e. not a member of a class or module */\n public final boolean isLocal() {\n return owner.kind == VAL &&\n !((flags & PARAM) != 0 && owner.isPrimaryConstructor());\n }\n\n /** Is this symbol a parameter? Includes type parameters of methods.\n */\n public final boolean isParameter() {\n return (flags & PARAM) != 0;\n }\n\n /** Is this symbol a def parameter?\n */\n public final boolean isDefParameter() {\n return (flags & (PARAM | DEF)) == (PARAM | DEF);\n }\n\n /** Is this class locally defined?\n * A class is local, if\n * - it is anonymous, or\n * - its owner is a value\n * - it is defined within a local class\n */\n public final boolean isLocalClass() {\n return isClass() &&\n (isAnonymousClass() ||\n owner.isValue() ||\n owner.isLocalClass());\n }\n\n /** Is this symbol an instance initializer? */\n public boolean isInitializer() {\n return false;\n }\n\n /** Is this symbol a constructor? */\n public final boolean isConstructor() {\n return (attrs & IS_CONSTRUCTOR) != 0;\n }\n\n /** Is this symbol the primary constructor of a type? */\n public final boolean isPrimaryConstructor() {\n return isConstructor() && this == constructorClass().primaryConstructor();\n }\n\n /** Symbol was preloaded from package\n */\n public final boolean isExternal() {\n return pos == Position.NOPOS;\n }\n\n /** Is this symbol an overloaded symbol? */\n public final boolean isOverloaded() {\n switch (info()) {\n case OverloadedType(_,_): return true;\n default : return false;\n }\n }\n\n /** Does this symbol denote a label? */\n public final boolean isLabel() {\n return (attrs & IS_LABEL) != 0;\n }\n\n /** Is this symbol accessed? */\n public final boolean isAccessed() {\n return (flags & ACCESSED) != 0;\n }\n\n /** The variance of this symbol as an integer\n */\n public int variance() {\n if ((flags & COVARIANT) != 0) return 1;\n else if ((flags & CONTRAVARIANT) != 0) return -1;\n else return 0;\n }\n\n// Symbol names ----------------------------------------------------------------\n\n /** Get the fully qualified name of this Symbol\n * (this is always a normal name, never a type name)\n */\n\n /** Get the simple name of this Symbol (this is always a term name)\n */\n public Name simpleName() {\n if (isConstructor()) return constructorClass().name.toTermName();\n return name;\n }\n\n// Acess to related symbols -----------------------------------------------------\n\n /** Get type parameters */\n public Symbol[] typeParams() {\n return EMPTY_ARRAY;\n }\n\n /** Get value parameters */\n public Symbol[] valueParams() {\n return EMPTY_ARRAY;\n }\n\n /** Get result type */\n public final Type resultType() {\n return type().resultType();\n }\n\n /** Get type parameters at start of next phase */\n public final Symbol[] nextTypeParams() {\n Global.instance.nextPhase();\n Symbol[] tparams = typeParams();\n Global.instance.prevPhase();\n return tparams;\n }\n\n /** Get value parameters at start of next phase */\n public final Symbol[] nextValueParams() {\n Global.instance.nextPhase();\n Symbol[] vparams = valueParams();\n Global.instance.prevPhase();\n return vparams;\n }\n\n /** Get result type at start of next phase */\n public final Type nextResultType() {\n return nextType().resultType();\n }\n\n /** Get all constructors of class */\n public Symbol allConstructors() {\n return NONE;\n }\n\n /** Get primary constructor of class */\n public Symbol primaryConstructor() {\n return NONE;\n }\n\n /**\n * Returns the class linked to this module or null if there is no\n * such class. The returned value remains the same for the whole\n * life of the symbol.\n *\n * See method \"linkedModule\" to learn more about linked modules\n * and classes.\n */\n public ClassSymbol linkedClass() {\n assert isModule(): \"not a module: \" + Debug.show(this);\n return null;\n }\n\n /**\n * Returns the module linked to this class or null if there is no\n * such module. The returned value remains the same for the whole\n * life of the symbol.\n *\n * Linked modules and classes are intended to be used by the\n * symbol table loader. They are needed because it is impossible\n * to know from the name of a class or source file if it defines a\n * class or a module. For that reason a class and a module (each\n * linked to the other) are created for each of those files. Once\n * the file is read the class, the module or both are initialized\n * depending on what the file defines.\n *\n * It is guaranteed that if a class \"c\" has a linked module then\n * \"c.linkedModule().linkedClasss() == c\" and that if a module \"m\"\n * has a linked class then \"m.linkedClasss().linkedModule() == m\".\n *\n * The linked module of a Java class, is the module that contains\n * the static members of that class. A Java class has always a\n * linked module.\n *\n * The linked module of a Scala class, is the module with the same\n * name declared in the same scope. A Scala class may or may not\n * have a linked module. However, this does not depend on the\n * presence or absence of a module with the same name but on how\n * the class is created. Therefore a Scala class may have no\n * linked module even though there exists a module with the same\n * name in the same scope. A Scala class may also have a linked\n * module even though there exists no module with the same name in\n * the same scope. In the latter case, the linked would be\n * initialized to NoType (which prevents accesses to it).\n *\n * There is a last catch about linked modules. It may happen that\n * the symbol returned by \"linkedModule\" is not a module (and that\n * method \"linkedClass\" works on a non-module symbol). At creation\n * time, linked modules are always modules, but at initialization\n * time, it may be discovered that the module is in fact a case\n * class factory method. In that case, the module is downgraded to\n * a non-module term. This implies that from then on calls to its\n * method \"moduleClass\" will fail, but the links defined by the\n * methods \"linkedModule\" and \"linkedClass\" remain unchanged.\n */\n public ModuleSymbol linkedModule() {\n assert isClassType(): \"not a class: \" + Debug.show(this);\n return null;\n }\n\n /** Get owner */\n public Symbol owner() {\n return owner;\n }\n\n /** Get owner, but if owner is primary constructor of a class,\n * get class symbol instead. This is useful for type parameters\n * and value parameters in classes which have the primary constructor\n * as owner.\n */\n public Symbol classOwner() {\n Symbol owner = owner();\n Symbol clazz = owner.constructorClass();\n if (clazz.primaryConstructor() == owner) return clazz;\n else return owner;\n }\n\n /** The next enclosing class */\n public Symbol enclClass() {\n return owner().enclClass();\n }\n\n /** The next enclosing method */\n public Symbol enclMethod() {\n return isMethod() ? this : owner().enclMethod();\n }\n\n /** If this is a constructor, return the class it constructs.\n * Otherwise return the symbol itself.\n */\n public Symbol constructorClass() {\n return this;\n }\n\n /** Return first alternative if this has a (possibly lazy)\n * overloaded type, otherwise symbol itself.\n * Needed in ClassSymbol.primaryConstructor() and in UnPickle.\n */\n public Symbol firstAlternative() {\n if (infos == null)\n return this;\n else if (infos.info instanceof Type.OverloadedType)\n return infos.info.alternativeSymbols()[0];\n else if (infos.info instanceof LazyOverloadedType)\n return ((LazyOverloadedType) infos.info).sym1.firstAlternative();\n else\n return this;\n }\n\n /**\n * Returns the class of this module. This method may be invoked\n * only on module symbols. It returns always a non-null module\n * class symbol whose identity never changes.\n */\n public ModuleClassSymbol moduleClass() {\n throw Debug.abort(\"not a module\", this);\n }\n\n /**\n * Returns the source module of this module class. This method may\n * be invoked only on module class symbols. It returns always a\n * non-null module symbol whose identity never changes.\n *\n * This method should be used with great care. If possible, one\n * should always use moduleClass instead. For example, one should\n * write \"m.moduleClass()==c\" rather than \"m==c.sourceModule()\".\n *\n * This method is problematic because the module - module-class\n * relation is not a one - one relation. There might be more than\n * one module that share the same module class. In that case, the\n * source module of the module class is the module that created\n * the class. This implies that \"m.moduleClass().sourceModule()\"\n * may be different of \"m\". However, its is guaranteed that\n * \"c.sourceModule().moduleClass()\" always returns \"c\".\n *\n * Phases like \"AddInterfaces\" and \"ExpandMixins\" are two examples\n * of phases that create additional modules referring the same\n * module class.\n *\n * Even if a module class is related to only one module, the use\n * of this method may still be dangerous. The problem is that\n * modules and module classes are not always as related as one\n * might expect. For example, modules declared in a function are\n * lifted out of the function by phase \"LambdaLift\". During this\n * process, the module value is transformed into a module method\n * with a \"Ref\" argument. If the \"sourceModule\" method is used to\n * replace references to module classes by references to their\n * source modules and this is done it naively with the class of a\n * lifted module, it will yield wrong code because the the \"Ref\"\n * argument will be missing.\n */\n public ModuleSymbol sourceModule() {\n throw Debug.abort(\"not a module class\", this);\n }\n\n /** if type is a (possibly lazy) overloaded type, return its alternatves\n * else return array consisting of symbol itself\n */\n public Symbol[] alternativeSymbols() {\n Symbol[] alts = type().alternativeSymbols();\n if (alts.length == 0) return new Symbol[]{this};\n else return alts;\n }\n\n /** if type is a (possibly lazy) overloaded type, return its alternatves\n * else return array consisting of type itself\n */\n public Type[] alternativeTypes() {\n return type().alternativeTypes();\n }\n\n /** The symbol accessed by this accessor function.\n */\n public Symbol accessed() {\n assert (flags & ACCESSOR) != 0;\n String name1 = name.toString();\n if (name1.endsWith(Names._EQ.toString()))\n name1 = name1.substring(0, name1.length() - Names._EQ.length());\n return owner.info().lookup(Name.fromString(name1 + \"$\"));\n }\n\n /** The members of this class or module symbol\n */\n public Scope members() {\n return info().members();\n }\n\n /** Lookup symbol with given name; return Symbol.NONE if not found.\n */\n public Symbol lookup(Name name) {\n return info().lookup(name);\n }\n\n// Symbol types --------------------------------------------------------------\n\n /** Was symbol's type updated during given phase? */\n public final boolean isUpdatedAt(Phase phase) {\n Phase next = phase.next;\n TypeIntervalList infos = this.infos;\n while (infos != null) {\n if (infos.start == next) return true;\n if (infos.limit().precedes(next)) return false;\n infos = infos.prev;\n }\n return false;\n }\n\n /** Is this symbol locked? */\n public final boolean isLocked() {\n return (flags & LOCKED) != 0;\n }\n\n /** Is this symbol initialized? */\n public final boolean isInitialized() {\n return (flags & INITIALIZED) != 0;\n }\n\n /** Initialize the symbol */\n public final Symbol initialize() {\n info();\n return this;\n }\n\n /** Make sure symbol is entered\n */\n public final void preInitialize() {\n //todo: clean up\n if (infos.info instanceof SymbolLoader)\n infos.info.complete(this);\n }\n\n /** Get info at start of current phase; This is:\n * for a term symbol, its type\n * for a type variable, its bound\n * for a type alias, its right-hand side\n * for a class symbol, the compound type consisting of\n * its baseclasses and members.\n */\n public final Type info() {\n //if (isModule()) moduleClass().initialize();\n if ((flags & INITIALIZED) == 0) {\n Global global = Global.instance;\n Phase current = global.currentPhase;\n global.currentPhase = rawFirstInfoStartPhase();\n Type info = rawFirstInfo();\n assert info != null : this;\n if ((flags & LOCKED) != 0) {\n setInfo(Type.ErrorType);\n flags |= INITIALIZED;\n throw new CyclicReference(this, info);\n }\n flags |= LOCKED;\n //System.out.println(\"completing \" + this);//DEBUG\n info.complete(this);\n flags = flags & ~LOCKED;\n if (info instanceof SourceCompleter && (flags & SNDTIME) == 0) {\n flags |= SNDTIME;\n Type tp = info();\n flags &= ~SNDTIME;\n } else {\n assert !(rawInfo() instanceof Type.LazyType) : this;\n //flags |= INITIALIZED;\n }\n //System.out.println(\"done: \" + this);//DEBUG\n global.currentPhase = current;\n }\n return rawInfo();\n }\n\n /** Get info at start of next phase\n */\n public final Type nextInfo() {\n Global.instance.nextPhase();\n Type info = info();\n Global.instance.prevPhase();\n return info;\n }\n\n /** Get info at start of given phase\n */\n protected final Type infoAt(Phase phase) {\n Global global = phase.global;\n Phase current = global.currentPhase;\n global.currentPhase = phase;\n Type info = info();\n global.currentPhase = current;\n return info;\n }\n\n /** Get info at start of current phase, without forcing lazy types.\n */\n public final Type rawInfo() {\n return rawInfoAt(Global.instance.currentPhase);\n }\n\n /** Get info at start of next phase, without forcing lazy types.\n */\n public final Type rawNextInfo() {\n Global.instance.nextPhase();\n Type info = rawInfo();\n Global.instance.prevPhase();\n return info;\n }\n\n /** Get info at start of given phase, without forcing lazy types.\n */\n private final Type rawInfoAt(Phase phase) {\n //if (infos == null) return Type.NoType;//DEBUG\n assert infos != null : this;\n assert phase != null : this;\n if (infos.limit().id <= phase.id) {\n switch (infos.info) {\n case LazyType():\n // don't force lazy types\n return infos.info;\n }\n while (infos.limit() != phase) {\n Phase limit = infos.limit();\n Type info = transformInfo(limit, infos.info);\n assert info != null: Debug.show(this) + \" -- \" + limit;\n if (info != infos.info) {\n infos = new TypeIntervalList(infos, info, limit.next);\n } else {\n infos.setLimit(limit.next);\n }\n }\n return infos.info;\n } else {\n TypeIntervalList infos = this.infos;\n while (phase.id < infos.start.id && infos.prev != null)\n infos = infos.prev;\n return infos.info;\n }\n }\n // where\n private Type transformInfo(Phase phase, Type info) {\n Global global = phase.global;\n Phase current = global.currentPhase;\n switch (info) {\n case ErrorType:\n case NoType:\n return info;\n case OverloadedType(Symbol[] alts, Type[] alttypes):\n global.currentPhase = phase.next;\n for (int i = 0; i < alts.length; i++) {\n Type type = alts[i].info();\n if (type != alttypes[i]) {\n Type[] types = new Type[alttypes.length];\n for (int j = 0; j < i; j++) types[j] = alttypes[j];\n alttypes[i] = type;\n for (; i < alts.length; i++)\n types[i] = alts[i].info();\n global.currentPhase = current;\n return Type.OverloadedType(alts, types);\n }\n }\n global.currentPhase = current;\n return info;\n default:\n global.currentPhase = phase;\n info = phase.transformInfo(this, info);\n global.currentPhase = current;\n return info;\n }\n }\n\n /** Get first defined info, without forcing lazy types.\n */\n public final Type rawFirstInfo() {\n TypeIntervalList infos = this.infos;\n assert infos != null : this;\n while (infos.prev != null) infos = infos.prev;\n return infos.info;\n }\n\n /** Get phase that first defined an info, without forcing lazy types.\n */\n public final Phase rawFirstInfoStartPhase() {\n TypeIntervalList infos = this.infos;\n assert infos != null : this;\n while (infos.prev != null) infos = infos.prev;\n return infos.start;\n }\n\n /** Get type at start of current phase. The type of a symbol is:\n * for a type symbol, the type corresponding to the symbol itself\n * for a term symbol, its usual type\n */\n public Type type() {\n return info();\n }\n public Type getType() {\n return info();\n }\n\n /** Get type at start of next phase\n */\n public final Type nextType() {\n Global.instance.nextPhase();\n Type type = type();\n Global.instance.prevPhase();\n return type;\n }\n\n /** The infos of these symbols as an array.\n */\n static public Type[] info(Symbol[] syms) {\n Type[] tps = new Type[syms.length];\n for (int i = 0; i < syms.length; i++)\n tps[i] = syms[i].info();\n return tps;\n }\n\n /** The types of these symbols as an array.\n */\n static public Type[] type(Symbol[] syms) {\n Type[] tps = new Type[syms.length];\n for (int i = 0; i < syms.length; i++)\n tps[i] = syms[i].type();\n return tps;\n }\n static public Type[] getType(Symbol[] syms) {\n\treturn type(syms);\n }\n\n /** Get static type. */\n public final Type staticType() {\n return staticType(Type.EMPTY_ARRAY);\n }\n /** Get static type with given type argument. */\n public final Type staticType(Type arg0) {\n return staticType(new Type[]{arg0});\n }\n /** Get static type with given type arguments. */\n public final Type staticType(Type arg0, Type arg1) {\n return staticType(new Type[]{arg0, arg1});\n }\n /** Get static type with given type arguments. */\n public final Type staticType(Type[] args) {\n Type prefix = owner.staticPrefix();\n if (isType()) return Type.typeRef(prefix, this, args);\n assert args.length == 0: Debug.show(this, \" - \", args);\n return prefix.memberType(this);\n }\n\n /** Get static prefix. */\n public final Type staticPrefix() {\n assert isStaticOwner(): Debug.show(this) + \" - \" + isTerm() + \" - \" + isModuleClass() + \" - \" + owner().isStaticOwner() + \" - \" + isJava();\n Global global = Global.instance;\n if (global.PHASE.EXPLICITOUTER.id() < global.currentPhase.id)\n return Type.NoPrefix;\n if (isRoot()) return thisType();\n assert sourceModule().owner() == owner(): Debug.show(this);\n assert sourceModule().type().isObjectType(): Debug.show(this);\n return Type.singleType(owner.staticPrefix(), sourceModule());\n }\n\n /** The type constructor of a symbol is:\n * For a type symbol, the type corresponding to the symbol itself, excluding\n * parameters.\n * Not applicable for term symbols.\n */\n public Type typeConstructor() {\n throw new ApplicationError(\"typeConstructor inapplicable for \" + this);\n }\n\n /** The low bound of this type variable\n */\n public Type loBound() {\n return Global.instance.definitions.ALL_TYPE();\n }\n\n /** The view bound of this type variable\n */\n public Type vuBound() {\n return Global.instance.definitions.ANY_TYPE();\n }\n\n /** Get this.type corresponding to this symbol\n */\n public Type thisType() {\n return Type.NoPrefix;\n }\n\n /** Get type of `this' in current class.\n */\n public Type typeOfThis() {\n return type();\n }\n\n /** Get this symbol of current class\n */\n public Symbol thisSym() { return this; }\n\n\n /** A total ordering between symbols that refines the class\n * inheritance graph (i.e. subclass.isLess(superclass) always holds).\n */\n public boolean isLess(Symbol that) {\n if (this == that) return false;\n int diff;\n if (this.isType()) {\n if (that.isType()) {\n diff = this.closure().length - that.closure().length;\n if (diff > 0) return true;\n if (diff < 0) return false;\n } else {\n return true;\n }\n } else if (that.isType()) {\n return false;\n }\n return this.id < that.id;\n }\n\n /** Return the symbol's type itself followed by all its direct and indirect\n * base types, sorted by isLess(). Overridden for class symbols.\n */\n public Type[] closure() {\n return info().closure();\n }\n\n /** Return position of `c' in the closure of this type; -1 if not there.\n */\n public int closurePos(Symbol c) {\n if (this == c) return 0;\n if (c.isCompoundSym()) return -1;\n Type[] closure = closure();\n int lo = 0;\n int hi = closure.length - 1;\n while (lo <= hi) {\n int mid = (lo + hi) / 2;\n Symbol clsym = closure[mid].symbol();\n if (c == clsym) return mid;\n else if (c.isLess(clsym)) hi = mid - 1;\n else if (clsym.isLess(c)) lo = mid + 1;\n else throw new ApplicationError();\n }\n return -1;\n }\n\n public Type baseType(Symbol sym) {\n int i = closurePos(sym);\n if (i >= 0) return closure()[i];\n else return Type.NoType;\n }\n\n /** Is this class a subclass of `c'? I.e. does it have a type instance\n * of `c' as indirect base class?\n */\n public boolean isSubClass(Symbol c) {\n return this == c ||\n c.isError() ||\n closurePos(c) >= 0 ||\n this == Global.instance.definitions.ALL_CLASS ||\n (this == Global.instance.definitions.ALLREF_CLASS &&\n c != Global.instance.definitions.ALL_CLASS &&\n c.isSubClass(Global.instance.definitions.ANYREF_CLASS));\n }\n\n /** Get base types of this symbol */\n public Type[] parents() {\n return info().parents();\n }\n\n// ToString -------------------------------------------------------------------\n\n /** String representation of symbol's simple name.\n * Translates expansions of operators back to operator symbol. E.g.\n * $eq => =.\n */\n public String nameString() {\n return NameTransformer.decode(simpleName());\n }\n\n /** String representation, including symbol's kind\n * e.g., \"class Foo\", \"function Bar\".\n */\n public String toString() {\n return new SymbolTablePrinter().printSymbolKindAndName(this).toString();\n }\n\n /** String representation of location.\n */\n public String locationString() {\n if (owner.kind == CLASS &&\n !owner.isAnonymousClass() && !owner.isCompoundSym() ||\n Global.instance.debug)\n return \" in \" +\n (owner.isModuleClass() ? owner.sourceModule() : owner);\n else\n return \"\";\n }\n\n /** String representation of definition.\n */\n public String defString() {\n return new SymbolTablePrinter().printSignature(this).toString();\n }\n\n public static String[] defString(Symbol[] defs) {\n String[] strs = new String[defs.length];\n for (int i = 0; i < defs.length; i++)\n strs[i] = defs[i].defString();\n return strs;\n }\n\n// Overloading and Overriding -------------------------------------------\n\n /** Add another overloaded alternative to this symbol.\n */\n public Symbol overloadWith(Symbol that) {\n assert isTerm() : Debug.show(this);\n assert this.name == that.name : Debug.show(this) + \" <> \" + Debug.show(that);\n assert this.owner == that.owner : Debug.show(this) + \" != \" + Debug.show(that);\n assert this.isConstructor() == that.isConstructor();\n int overflags = (this.flags & that.flags & (JAVA | ACCESSFLAGS | DEFERRED | PARAM | SYNTHETIC)) |\n ((this.flags | that.flags) & ACCESSOR);\n Symbol overloaded = (this.isConstructor())\n ? this.constructorClass().newConstructor(this.constructorClass().pos, overflags)\n : owner.newTerm(pos, overflags, name, 0);\n overloaded.setInfo(new LazyOverloadedType(this, that));\n return overloaded;\n }\n\n /** A lazy type which, when forced computed the overloaded type\n * of symbols `sym1' and `sym2'. It also checks that this type is well-formed.\n */\n public static class LazyOverloadedType extends Type.LazyType {\n Symbol sym1;\n Symbol sym2;\n LazyOverloadedType(Symbol sym1, Symbol sym2) {\n this.sym1 = sym1;\n this.sym2 = sym2;\n }\n\n public Symbol[] alternativeSymbols() {\n Symbol[] alts1 = sym1.alternativeSymbols();\n Symbol[] alts2 = sym2.alternativeSymbols();\n Symbol[] alts3 = new Symbol[alts1.length + alts2.length];\n System.arraycopy(alts1, 0, alts3, 0, alts1.length);\n System.arraycopy(alts2, 0, alts3, alts1.length, alts2.length);\n return alts3;\n }\n\n public Type[] alternativeTypes() {\n Type[] alts1 = sym1.alternativeTypes();\n Type[] alts2 = sym2.alternativeTypes();\n Type[] alts3 = new Type[alts1.length + alts2.length];\n System.arraycopy(alts1, 0, alts3, 0, alts1.length);\n System.arraycopy(alts2, 0, alts3, alts1.length, alts2.length);\n return alts3;\n }\n\n public void complete(Symbol overloaded) {\n overloaded.setInfo(\n Type.OverloadedType(\n alternativeSymbols(), alternativeTypes()));\n }\n\n public String toString() {\n return \"LazyOverloadedType(\" + sym1 + \",\" + sym2 + \")\";\n }\n }\n\n /**\n * Returns the symbol in type \"base\" which is overridden by this\n * symbol in class \"this.owner()\". Returns \"NONE\" if no such\n * symbol exists. The type \"base\" must be a supertype of class\n * \"this.owner()\". If \"exact\" is true, overriding is restricted to\n * symbols that have the same type. The method may return this\n * symbol only if \"base.symbol()\" is equal to \"this.owner()\".\n */\n public final Symbol overriddenSymbol(Type base, boolean exact) {\n return overriddenSymbol(base, owner(), exact);\n }\n public final Symbol overriddenSymbol(Type base) {\n return overriddenSymbol(base, false);\n }\n\n /**\n * Returns the symbol in type \"base\" which is overridden by this\n * symbol in \"clasz\". Returns \"NONE\" if no such symbol exists. The\n * type \"base\" must be a supertype of \"clasz\" and \"this.owner()\"\n * must be a superclass of \"clasz\". If \"exact\" is true, overriding\n * is restricted to symbols that have the same type. The method\n * may return this symbol if \"base.symbol()\" is a subclass of\n * \"this.owner()\".\n */\n public final Symbol overriddenSymbol(Type base, Symbol clasz, boolean exact) {\n Type.Relation relation = exact\n ? Type.Relation.SameType\n : Type.Relation.SuperType;\n return base.lookup(this, clasz.thisType(), relation);\n }\n public final Symbol overriddenSymbol(Type base, Symbol clasz) {\n return overriddenSymbol(base, clasz, false);\n }\n\n /**\n * Returns the symbol in type \"sub\" which overrides this symbol in\n * class \"sub.symbol()\". Returns this symbol if no such symbol\n * exists. The class \"sub.symbol()\" must be a subclass of\n * \"this.owner()\". If \"exact\" is true, overriding is restricted to\n * symbols that have the same type.\n */\n public final Symbol overridingSymbol(Type sub, boolean exact) {\n Type.Relation relation = exact\n ? Type.Relation.SameType\n : Type.Relation.SubType;\n return sub.lookup(this, sub, relation);\n }\n public final Symbol overridingSymbol(Type sub) {\n return overridingSymbol(sub, false);\n }\n\n /** Does this symbol override that symbol?\n */\n public boolean overrides(Symbol that) {\n return\n ((this.flags | that.flags) & PRIVATE) == 0 &&\n this.name == that.name &&\n owner.thisType().memberType(this).derefDef().isSubType(\n owner.thisType().memberType(that).derefDef());\n }\n\n /** Reset symbol to initial state\n */\n public void reset(Type completer) {\n this.flags &= SOURCEFLAGS;\n this.pos = 0;\n this.infos = null;\n this.setInfo(completer);\n }\n\n /**\n * Returns the symbol to use in case of a rebinding due to a more\n * precise type prefix.\n */\n public Symbol rebindSym() {\n return this;\n }\n\n /** return a tag which (in the ideal case) uniquely identifies\n * class symbols\n */\n public int tag() {\n return name.toString().hashCode();\n }\n}\n\n/** A class for term symbols\n */\nclass TermSymbol extends Symbol {\n\n /** Constructor */\n TermSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(VAL, owner, pos, flags, name, attrs);\n assert name.isTermName(): Debug.show(this);\n }\n\n public boolean isInitializer() {\n return name == Names.INITIALIZER;\n }\n\n public Symbol[] typeParams() {\n return type().typeParams();\n }\n\n public Symbol[] valueParams() {\n return type().valueParams();\n }\n\n protected Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n return new TermSymbol(owner, pos, flags, name, attrs);\n }\n\n}\n\n/** A class for constructor symbols */\nfinal class ConstructorSymbol extends TermSymbol {\n\n /** The constructed class */\n private final Symbol clasz;\n\n /** Initializes this instance. */\n ConstructorSymbol(Symbol clasz, int pos, int flags) {\n super(clasz.owner(), pos, flags, Names.CONSTRUCTOR, IS_CONSTRUCTOR);\n this.clasz = clasz;\n }\n\n public boolean isInitializer() {\n return false;\n }\n\n public Symbol constructorClass() {\n return clasz;\n }\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n throw Debug.abort(\"illegal clone of constructor\", this);\n }\n\n}\n\n/** A class for module symbols */\npublic class ModuleSymbol extends TermSymbol {\n\n /** The module class */\n private final ModuleClassSymbol clasz;\n\n /** Initializes this instance. */\n private ModuleSymbol(Symbol owner, int pos, int flags, Name name,\n int attrs, ModuleClassSymbol clasz)\n {\n super(owner, pos, flags | MODUL | FINAL | STABLE, name, attrs);\n this.clasz = clasz != null ? clasz : new ModuleClassSymbol(this);\n setType(Type.typeRef(owner().thisType(), this.clasz,Type.EMPTY_ARRAY));\n }\n\n /** Initializes this instance. */\n ModuleSymbol(Symbol owner, int pos, int flags, Name name) {\n this(owner, pos, flags, name, 0, null);\n }\n\n public ModuleClassSymbol moduleClass() {\n // test may fail because loaded modules may be downgraded to\n // case class factory methods (see Symbol#linkedModule())\n\n assert isModule(): Debug.show(this);\n return clasz;\n }\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n return new ModuleSymbol(owner, pos, flags, name, attrs, clasz);\n }\n\n}\n\n/**\n * A class for linked module symbols\n *\n * @see Symbol#linkedModule()\n */\nfinal class LinkedModuleSymbol extends ModuleSymbol {\n\n /** The linked class */\n private final LinkedClassSymbol clasz;\n\n /** Initializes this instance. */\n LinkedModuleSymbol(LinkedClassSymbol clasz) {\n super(clasz.owner(), clasz.pos, clasz.flags & JAVA,\n clasz.name.toTermName());\n this.clasz = clasz;\n }\n\n public ClassSymbol linkedClass() {\n return clasz;\n }\n\n}\n\n/** A base class for all type symbols.\n * It has AliasTypeSymbol, AbsTypeSymbol, ClassSymbol as subclasses.\n */\nabstract class TypeSymbol extends Symbol {\n\n /** The history of closures of this symbol */\n private final History/**/ closures;\n\n /** A cache for type constructors\n */\n private Type tycon = null;\n\n /** The primary constructor of this type */\n private Symbol constructor;\n\n /** Constructor */\n public TypeSymbol(int kind, Symbol owner, int pos, int flags, Name name, int attrs) {\n super(kind, owner, pos, flags, name, attrs);\n this.closures = new ClosureHistory();\n assert name.isTypeName() : this;\n this.constructor = newConstructor(pos, flags & CONSTRFLAGS);\n }\n\n protected final void copyConstructorInfo(TypeSymbol other) {\n {\n Type info = primaryConstructor().info().cloneType(\n primaryConstructor(), other.primaryConstructor());\n if (!isTypeAlias()) info = fixConstrType(info, other);\n other.primaryConstructor().setInfo(info);\n }\n Symbol[] alts = allConstructors().alternativeSymbols();\n for (int i = 1; i < alts.length; i++) {\n Symbol constr = other.newConstructor(alts[i].pos, alts[i].flags);\n other.addConstructor(constr);\n Type info = alts[i].info().cloneType(alts[i], constr);\n if (!isTypeAlias()) info = fixConstrType(info, other);\n constr.setInfo(info);\n }\n }\n\n private final Type fixConstrType(Type type, Symbol clone) {\n switch (type) {\n case MethodType(Symbol[] vparams, Type result):\n result = fixConstrType(result, clone);\n return new Type.MethodType(vparams, result);\n case PolyType(Symbol[] tparams, Type result):\n result = fixConstrType(result, clone);\n return new Type.PolyType(tparams, result);\n case TypeRef(Type pre, Symbol sym, Type[] args):\n if (sym != this && isTypeAlias() && owner().isCompoundSym())\n return type;\n assert sym == this: Debug.show(sym) + \" != \" + Debug.show(this);\n return Type.typeRef(pre, clone, args);\n case LazyType():\n return type;\n default:\n throw Debug.abort(\"unexpected constructor type:\" + clone + \":\" + type);\n }\n }\n\n /** add a constructor\n */\n public final void addConstructor(Symbol constr) {\n assert constr.isConstructor(): Debug.show(constr);\n constructor = constructor.overloadWith(constr);\n }\n\n /** Get primary constructor */\n public final Symbol primaryConstructor() {\n return constructor.firstAlternative();\n }\n\n /** Get all constructors */\n public final Symbol allConstructors() {\n return constructor;\n }\n\n /** Get type parameters */\n public final Symbol[] typeParams() {\n return primaryConstructor().info().typeParams();\n }\n\n /** Get value parameters */\n public final Symbol[] valueParams() {\n return (kind == CLASS) ? primaryConstructor().info().valueParams()\n : Symbol.EMPTY_ARRAY;\n }\n\n /** Get type constructor */\n public final Type typeConstructor() {\n if (tycon == null)\n tycon = Type.typeRef(owner().thisType(), this, Type.EMPTY_ARRAY);\n return tycon;\n }\n\n public Symbol setOwner(Symbol owner) {\n tycon = null;\n constructor.setOwner0(owner);\n switch (constructor.type()) {\n case OverloadedType(Symbol[] alts, _):\n for (int i = 0; i < alts.length; i++) alts[i].setOwner0(owner);\n }\n return super.setOwner(owner);\n }\n\n /** Get type */\n public final Type type() {\n return primaryConstructor().type().resultType();\n }\n public final Type getType() {\n return primaryConstructor().type().resultType();\n }\n\n /**\n * Get closure at start of current phase. The closure of a symbol\n * is a list of types which contains the type of the symbol\n * followed by all its direct and indirect base types, sorted by\n * isLess().\n */\n public final Type[] closure() {\n if (kind == ALIAS) return info().symbol().closure();\n return (Type[])closures.getValue(this);\n }\n\n public void reset(Type completer) {\n super.reset(completer);\n closures.reset();\n tycon = null;\n }\n\n\n protected final Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n TypeSymbol clone = cloneTypeSymbolImpl(owner, attrs);\n copyConstructorInfo(clone);\n return clone;\n }\n\n protected abstract TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs);\n}\n\nfinal class AliasTypeSymbol extends TypeSymbol {\n\n /** Initializes this instance. */\n AliasTypeSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(ALIAS, owner, pos, flags, name, attrs);\n }\n\n protected TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n return new AliasTypeSymbol(owner, pos, flags, name, attrs);\n }\n\n}\n\nfinal class AbsTypeSymbol extends TypeSymbol {\n\n private Type lobound = null;\n private Type vubound = null;\n\n /** Initializes this instance. */\n AbsTypeSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(TYPE, owner, pos, flags, name, attrs);\n allConstructors().setInfo(Type.MethodType(EMPTY_ARRAY, Type.typeRef(owner.thisType(), this, Type.EMPTY_ARRAY)));\n }\n\n public Type loBound() {\n initialize();\n return lobound == null ? Global.instance.definitions.ALL_TYPE() : lobound;\n }\n\n public Type vuBound() {\n initialize();\n return !isViewBounded() || vubound == null\n\t ? Global.instance.definitions.ANY_TYPE() : vubound;\n }\n\n public Symbol setLoBound(Type lobound) {\n this.lobound = lobound;\n return this;\n }\n\n public Symbol setVuBound(Type vubound) {\n\tthis.vubound = vubound;\n return this;\n }\n\n protected TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n TypeSymbol clone = new AbsTypeSymbol(owner, pos, flags, name, attrs);\n clone.setLoBound(loBound());\n clone.setVuBound(vuBound());\n return clone;\n }\n\n}\n\n/** A class for class symbols. */\npublic class ClassSymbol extends TypeSymbol {\n\n /** The given type of self, or NoType, if no explicit type was given.\n */\n private Symbol thisSym = this;\n\n public Symbol thisSym() { return thisSym; }\n\n /** A cache for this.thisType()\n */\n final private Type thistp = Type.ThisType(this);\n\n private final Symbol rebindSym;\n\n /** Initializes this instance. */\n ClassSymbol(Symbol owner, int pos, int flags, Name name, int attrs) {\n super(CLASS, owner, pos, flags, name, attrs);\n this.rebindSym = owner.newTypeAlias(pos, 0, Names.ALIAS(this));\n Type rebindType = new ClassAliasLazyType();\n this.rebindSym.setInfo(rebindType);\n this.rebindSym.primaryConstructor().setInfo(rebindType);\n }\n\n private class ClassAliasLazyType extends Type.LazyType {\n public void complete(Symbol ignored) {\n Symbol clasz = ClassSymbol.this;\n Symbol alias = rebindSym;\n Type prefix = clasz.owner().thisType();\n Type constrtype = clasz.type();\n constrtype = Type.MethodType(Symbol.EMPTY_ARRAY, constrtype);\n constrtype = Type.PolyType(clasz.typeParams(), constrtype);\n constrtype = constrtype.cloneType(\n clasz.primaryConstructor(), alias.primaryConstructor());\n alias.primaryConstructor().setInfo(constrtype);\n alias.setInfo(constrtype.resultType());\n }\n }\n\n /** Creates the root class. */\n public static Symbol newRootClass(Global global) {\n int pos = Position.NOPOS;\n Name name = Names.ROOT.toTypeName();\n Symbol owner = Symbol.NONE;\n int flags = JAVA | PACKAGE | FINAL;\n int attrs = IS_ROOT;\n Symbol clasz = new ClassSymbol(owner, pos, flags, name, attrs);\n clasz.setInfo(global.getRootLoader());\n clasz.primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, clasz.typeConstructor()));\n // !!! Type.MethodType(Symbol.EMPTY_ARRAY, clasz.thisType()));\n return clasz;\n }\n\n /** Creates the this-type symbol associated to this class. */\n private final Symbol newThisType() {\n return newTerm(pos, SYNTHETIC, Names.this_, IS_THISTYPE);\n }\n\n public Type thisType() {\n Global global = Global.instance;\n if (global.currentPhase.id > global.PHASE.ERASURE.id()) return type();\n return thistp;\n }\n\n public Type typeOfThis() {\n return thisSym.type();\n }\n\n public Symbol setTypeOfThis(Type tp) {\n thisSym = newThisType();\n thisSym.setInfo(tp);\n return this;\n }\n\n /** Return the next enclosing class */\n public Symbol enclClass() {\n return this;\n }\n\n public Symbol caseFieldAccessor(int index) {\n assert (flags & CASE) != 0 : this;\n Scope.SymbolIterator it = info().members().iterator();\n Symbol sym = null;\n if ((flags & JAVA) == 0) {\n\t\t\tfor (int i = 0; i <= index; i++) {\n\t\t\t\tdo {\n\t\t\t\t\tsym = it.next();\n\t\t\t\t} while (sym.kind != VAL || (sym.flags & CASEACCESSOR) == 0 || !sym.isMethod());\n\t\t\t}\n\t\t\t//System.out.println(this + \", case field[\" + index + \"] = \" + sym);//DEBUG\n\t\t} else {\n\t\t\tsym = it.next();\n\t\t\twhile ((sym.flags & SYNTHETIC) == 0) {\n\t\t\t //System.out.println(\"skipping \" + sym);\n\t\t\t sym = it.next();\n\t\t\t}\n\t\t\tfor (int i = 0; i < index; i++)\n\t\t\t\tsym = it.next();\n\t\t\t//System.out.println(\"field accessor = \" + sym);//DEBUG\n\t\t}\n\t\tassert sym != null : this;\n\t\treturn sym;\n }\n\n public final Symbol rebindSym() {\n return rebindSym;\n }\n\n public void reset(Type completer) {\n super.reset(completer);\n thisSym = this;\n }\n\n protected final TypeSymbol cloneTypeSymbolImpl(Symbol owner, int attrs) {\n assert !isModuleClass(): Debug.show(this);\n ClassSymbol clone = new ClassSymbol(owner, pos, flags, name, attrs);\n if (thisSym != this) clone.setTypeOfThis(typeOfThis());\n return clone;\n }\n\n}\n\n/**\n * A class for module class symbols\n *\n * @see Symbol#sourceModule()\n */\npublic final class ModuleClassSymbol extends ClassSymbol {\n\n /** The source module */\n private final ModuleSymbol module;\n\n /** Initializes this instance. */\n ModuleClassSymbol(ModuleSymbol module) {\n super(module.owner(), module.pos,\n (module.flags & MODULE2CLASSFLAGS) | MODUL | FINAL,\n module.name.toTypeName(), 0);\n primaryConstructor().flags |= PRIVATE;\n primaryConstructor().setInfo(\n Type.MethodType(Symbol.EMPTY_ARRAY, typeConstructor()));\n this.module = module;\n }\n\n public ModuleSymbol sourceModule() {\n return module;\n }\n\n}\n\n/**\n * A class for linked class symbols\n *\n * @see Symbol#linkedModule()\n */\nfinal class LinkedClassSymbol extends ClassSymbol {\n\n /** The linked module */\n private final LinkedModuleSymbol module;\n\n /** Initializes this instance. */\n LinkedClassSymbol(Symbol owner, int flags, Name name) {\n super(owner, Position.NOPOS, flags, name, 0);\n this.module = new LinkedModuleSymbol(this);\n }\n\n public ModuleSymbol linkedModule() {\n return module;\n }\n\n}\n\n/** The class of Symbol.NONE\n */\nfinal class NoSymbol extends Symbol {\n\n /** Constructor */\n public NoSymbol() {\n super(Kinds.NONE, null, Position.NOPOS, 0, Names.NOSYMBOL, 0);\n super.setInfo(Type.NoType);\n }\n\n /** Set type */\n public Symbol setInfo(Type info) {\n assert info == Type.NoType : info;\n return this;\n }\n\n /** Return the next enclosing class */\n public Symbol enclClass() {\n return this;\n }\n\n /** Return the next enclosing method */\n public Symbol enclMethod() {\n return this;\n }\n\n public Symbol owner() {\n throw new ApplicationError();\n }\n\n public Type thisType() {\n return Type.NoPrefix;\n }\n\n public void reset(Type completer) {\n }\n\n protected Symbol cloneSymbolImpl(Symbol owner, int attrs) {\n throw Debug.abort(\"illegal clone\", this);\n }\n\n}\n\n/** An exception for signalling cyclic references.\n */\npublic class CyclicReference extends Type.Error {\n public Symbol sym;\n public Type info;\n public CyclicReference(Symbol sym, Type info) {\n super(\"illegal cyclic reference involving \" + sym);\n this.sym = sym;\n this.info = info;\n }\n}\n\n/** A base class for values indexed by phases. */\nabstract class IntervalList {\n\n /** Interval starts at start of phase \"start\" (inclusive) */\n public final Phase start;\n /** Interval ends at start of phase \"limit\" (inclusive) */\n private Phase limit;\n\n public IntervalList(IntervalList prev, Phase start) {\n this.start = start;\n this.limit = start;\n assert start != null && (prev == null || prev.limit.next == start) :\n Global.instance.currentPhase + \" - \" + prev + \" - \" + start;\n }\n\n public Phase limit() {\n return limit;\n }\n\n public void setLimit(Phase phase) {\n assert phase != null && !phase.precedes(start) : start + \" - \" + phase;\n limit = phase;\n }\n\n public String toString() {\n return \"[\" + start + \"->\" + limit + \"]\";\n }\n\n}\n\n/** A class for types indexed by phases. */\nclass TypeIntervalList extends IntervalList {\n\n /** Previous interval */\n public final TypeIntervalList prev;\n /** Info valid during this interval */\n public final Type info;\n\n public TypeIntervalList(TypeIntervalList prev, Type info, Phase start) {\n super(prev, start);\n this.prev = prev;\n this.info = info;\n assert info != null;\n }\n\n}\n"},"message":{"kind":"string","value":"- Fixed and optimized isStatic and isStaticOwner\n\n"},"old_file":{"kind":"string","value":"sources/scalac/symtab/Symbol.java"},"subject":{"kind":"string","value":"- Fixed and optimized isStatic and isStaticOwner"},"git_diff":{"kind":"string","value":"ources/scalac/symtab/Symbol.java\n \n /** Is this symbol static (i.e. with no outer instance)? */\n public final boolean isStatic() {\n return owner.isStaticOwner();\n return isRoot() || owner.isStaticOwner();\n }\n \n /** Does this symbol denote a class that defines static symbols? */\n public final boolean isStaticOwner() {\n return isRoot() || (isStatic() && isModuleClass()\n return isPackageClass() || (isStatic() && isModuleClass()\n // !!! remove later? translation does not work (yet?)\n && isJava());\n }"}}},{"rowIdx":2085,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"48c5399917bbde398c130c6a764deb0bcbd21d6f"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"exclude/kusaba-quickreply"},"new_contents":{"kind":"string","value":"function QuickReply() {\n document.addEventListener('mousemove', this.mousePos, false);\n\n this.addButtons();\n this.createForm();\n}\n\nQuickReply.prototype.mousePos = function(e) {\n window.pageX = e.pageX;\n window.pageY = e.pageY;\n}\n\nQuickReply.prototype.addButtons = function() {\n replies = document.getElementsByClassName('reply');\n\n for (i = 0; i < replies.length; i++) {\n reply = replies[i];\n parentid = reply.parentNode.parentNode.parentNode.parentNode.id.split('replies')[1][0] // FUCK THE POLICE!!\n postid = reply.id.split('reply')[1]\n\n extrabtns = reply.getElementsByClassName('extrabtns')[0]\n extrabtns.appendChild(this.createButton(parentid, postid));\n }\n}\n\nQuickReply.prototype.createButton = function(parentid, postid) { \n a = document.createElement('a');\n a.href = '#';\n a.onclick = this.formHandler;\n\n img = document.createElement('img');\n img.src = '/css/icons/blank.gif';\n img.className = 'quickreply' // kusaba's default class\n\n a.appendChild(img);\n\n return a; // we need to return it because closures in javascript are awful\n}\n\nQuickReply.prototype.createForm = function() {\n // Maybe, only maybe we can copy default form to a div and put this div\n // under mouse cursor instead of creating it from zero\n\n var quickform = document.getElementById('postform').cloneNode(true);\n\n var container = document.createElement('div')\n container.style.display = 'none';\n container.style.position = 'absolute';\n container.style.width = '450px';\n container.style.background = '#fff';\n container.id = 'quickreplyx';\n\n var header = document.createElement('div');\n header.innerHTML = '

      Quickreply

      ';\n\n var content = document.createElement('div');\n content.appendChild(quickform)\n\n container.appendChild(header);\n container.appendChild(content);\n\n document.body.appendChild(container);\n}\n\nQuickReply.prototype.formHandler = function(e) {\n // try {\n // document.getElementById('qr-name').value = getCookie(\"name\");\n // }\n // catch (e) {\n // // ALL GLORY TO HYPNOTOAD!!\n // }\n // document.getElementById('qr-em').value = getCookie(\"email\");\n // document.getElementById('qr-password').value = get_password(\"postpassword\");\n // document.getElementById('qr-parentid').value = parentid;\n // document.getElementById('qr-info').innerHTML = 'Reply to: #' + postid + ' in: #' + parentid;\n\n // var messagearea = document.getElementById('qr-message');\n // messagearea.value = '>>' + postid + '\\n';\n // // messagearea.onfocus = function() {\n // // this.value = this.value; // just to move cursor to the end\n // // }\n\n var qr = document.getElementById('quickreplyx');\n qr.style.display = 'block';\n qr.style.left = window.pageX + 'px';\n qr.style.top = window.pageY + 'px';\n\n // messagearea.focus();\n\n e.preventDefault();\n}\n\nwindow.onload = function() {\n new QuickReply();\n}\n"},"new_file":{"kind":"string","value":"lib/javascript/quickreply.js"},"old_contents":{"kind":"string","value":"function QuickReply() {\n document.addEventListener('mousemove', this.mousePos, false);\n\n this.addButtons();\n}\n\nQuickReply.prototype.mousePos = function(e) {\n window.pageX = e.pageX;\n window.pageY = e.pageY;\n}\n\nQuickReply.prototype.addButtons = function() {\n replies = document.getElementsByClassName('reply');\n\n for (i = 0; i < replies.length; i++) {\n reply = replies[i];\n parent_id = reply.parentNode.parentNode.parentNode.parentNode.id.split('replies')[1][0] // this shit is serious?!?!!\n post_id = reply.id.split('reply')[1]\n\n extrabtns = reply.getElementsByClassName('extrabtns')[0]\n extrabtns.appendChild(this.createButton(parent_id, post_id));\n }\n}\n\nQuickReply.prototype.createButton = function(parent_id, post_id) { \n a = document.createElement('a');\n a.href = '#';\n a.onclick = function(e) {\n var qr = document.getElementById('quickreply');\n\n // copy post password\n // copy post name\n // put thread id\n // fill message box\n\n qr.style.display = 'block';\n qr.style.left = window.pageX;\n qr.style.top = window.pageY;\n\n e.preventDefault();\n }\n\n img = document.createElement('img');\n img.src = '/css/icons/blank.gif';\n img.className = 'quickreply' // kusaba's default class\n\n a.appendChild(img);\n\n return a\n}\n\nwindow.onload = function() {\n new QuickReply();\n}\n"},"message":{"kind":"string","value":"add progress\n"},"old_file":{"kind":"string","value":"lib/javascript/quickreply.js"},"subject":{"kind":"string","value":"add progress"},"git_diff":{"kind":"string","value":"ib/javascript/quickreply.js\n document.addEventListener('mousemove', this.mousePos, false);\n \n this.addButtons();\n this.createForm();\n }\n \n QuickReply.prototype.mousePos = function(e) {\n \n for (i = 0; i < replies.length; i++) {\n reply = replies[i];\n parent_id = reply.parentNode.parentNode.parentNode.parentNode.id.split('replies')[1][0] // this shit is serious?!?!!\n post_id = reply.id.split('reply')[1]\n parentid = reply.parentNode.parentNode.parentNode.parentNode.id.split('replies')[1][0] // FUCK THE POLICE!!\n postid = reply.id.split('reply')[1]\n \n extrabtns = reply.getElementsByClassName('extrabtns')[0]\n extrabtns.appendChild(this.createButton(parent_id, post_id));\n extrabtns.appendChild(this.createButton(parentid, postid));\n }\n }\n \nQuickReply.prototype.createButton = function(parent_id, post_id) { \nQuickReply.prototype.createButton = function(parentid, postid) { \n a = document.createElement('a');\n a.href = '#';\n a.onclick = function(e) {\n var qr = document.getElementById('quickreply');\n\n // copy post password\n // copy post name\n // put thread id\n // fill message box\n\n qr.style.display = 'block';\n qr.style.left = window.pageX;\n qr.style.top = window.pageY;\n\n e.preventDefault();\n }\n a.onclick = this.formHandler;\n \n img = document.createElement('img');\n img.src = '/css/icons/blank.gif';\n \n a.appendChild(img);\n \n return a\n return a; // we need to return it because closures in javascript are awful\n}\n\nQuickReply.prototype.createForm = function() {\n // Maybe, only maybe we can copy default form to a div and put this div\n // under mouse cursor instead of creating it from zero\n\n var quickform = document.getElementById('postform').cloneNode(true);\n\n var container = document.createElement('div')\n container.style.display = 'none';\n container.style.position = 'absolute';\n container.style.width = '450px';\n container.style.background = '#fff';\n container.id = 'quickreplyx';\n\n var header = document.createElement('div');\n header.innerHTML = '

      Quickreply

      ';\n\n var content = document.createElement('div');\n content.appendChild(quickform)\n\n container.appendChild(header);\n container.appendChild(content);\n\n document.body.appendChild(container);\n}\n\nQuickReply.prototype.formHandler = function(e) {\n // try {\n // document.getElementById('qr-name').value = getCookie(\"name\");\n // }\n // catch (e) {\n // // ALL GLORY TO HYPNOTOAD!!\n // }\n // document.getElementById('qr-em').value = getCookie(\"email\");\n // document.getElementById('qr-password').value = get_password(\"postpassword\");\n // document.getElementById('qr-parentid').value = parentid;\n // document.getElementById('qr-info').innerHTML = 'Reply to: #' + postid + ' in: #' + parentid;\n\n // var messagearea = document.getElementById('qr-message');\n // messagearea.value = '>>' + postid + '\\n';\n // // messagearea.onfocus = function() {\n // // this.value = this.value; // just to move cursor to the end\n // // }\n\n var qr = document.getElementById('quickreplyx');\n qr.style.display = 'block';\n qr.style.left = window.pageX + 'px';\n qr.style.top = window.pageY + 'px';\n\n // messagearea.focus();\n\n e.preventDefault();\n }\n \n window.onload = function() {"}}},{"rowIdx":2086,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"78dabdf3fa2554fe1182331473064ae716ae6189"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"esaunders/autopsy,rcordovano/autopsy,rcordovano/autopsy,wschaeferB/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy,wschaeferB/autopsy,esaunders/autopsy,rcordovano/autopsy,wschaeferB/autopsy,esaunders/autopsy,rcordovano/autopsy,rcordovano/autopsy,esaunders/autopsy,wschaeferB/autopsy"},"new_contents":{"kind":"string","value":"/*\n * Autopsy Forensic Browser\n *\n * Copyright 2013-2018 Basis Technology Corp.\n * Contact: carrier sleuthkit org\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.sleuthkit.autopsy.imagegallery;\n\nimport java.beans.PropertyChangeEvent;\nimport java.beans.PropertyChangeListener;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.EnumSet;\nimport java.util.Set;\nimport java.util.logging.Level;\nimport javafx.application.Platform;\nimport javax.annotation.concurrent.GuardedBy;\nimport javax.swing.JOptionPane;\nimport javax.swing.SwingUtilities;\nimport static org.apache.commons.lang3.StringUtils.isNotBlank;\nimport org.openide.util.NbBundle;\nimport org.sleuthkit.autopsy.casemodule.Case;\nimport org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;\nimport org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;\nimport org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;\nimport org.sleuthkit.autopsy.core.RuntimeProperties;\nimport org.sleuthkit.autopsy.coreutils.Logger;\nimport org.sleuthkit.autopsy.events.AutopsyEvent;\nimport org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;\nimport org.sleuthkit.autopsy.ingest.IngestManager;\nimport static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;\nimport static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE;\nimport org.sleuthkit.autopsy.ingest.ModuleDataEvent;\nimport org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent;\nimport org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;\nimport org.sleuthkit.datamodel.AbstractFile;\nimport org.sleuthkit.datamodel.BlackboardArtifact;\nimport org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;\nimport org.sleuthkit.datamodel.Content;\nimport org.sleuthkit.datamodel.TskCoreException;\nimport org.sleuthkit.datamodel.TskData;\n\n/**\n * This class is reponsible for handling selected application events for the\n * image gallery module, managing the image gallery module's per case MVC\n * controller and keeping track of the following state: the module name, the\n * module output directory and whether or not the ingest gallery module is\n * enabled for the current case.\n */\n@NbBundle.Messages({\"ImageGalleryModule.moduleName=Image Gallery\"})\npublic class ImageGalleryModule {\n\n private static final Logger logger = Logger.getLogger(ImageGalleryModule.class.getName());\n private static final String MODULE_NAME = Bundle.ImageGalleryModule_moduleName();\n private static final Set CASE_EVENTS_OF_INTEREST = EnumSet.of(\n Case.Events.CURRENT_CASE,\n Case.Events.DATA_SOURCE_ADDED,\n Case.Events.CONTENT_TAG_ADDED,\n Case.Events.CONTENT_TAG_DELETED\n );\n private static final Object controllerLock = new Object();\n @GuardedBy(\"controllerLock\")\n private static ImageGalleryController controller;\n\n /**\n * Gets the per case image gallery controller for the current case. The\n * controller is changed in the case event listener.\n *\n * @return The image gallery controller for the current case.\n *\n * @throws TskCoreException If there is a problem creating the controller.\n */\n public static ImageGalleryController getController() throws TskCoreException {\n synchronized (controllerLock) {\n if (controller == null) {\n try {\n Case currentCase = Case.getCurrentCaseThrows();\n controller = new ImageGalleryController(currentCase);\n } catch (NoCurrentCaseException ex) {\n throw new TskCoreException(\"Failed to get \", ex);\n }\n }\n return controller;\n }\n }\n\n /**\n * Sets the implicit exit property attribute of the JavaFX runtime to false\n * and sets up listeners for application events. It is invoked at\n * application start up by virtue of the OnStart annotation on the OnStart\n * class in this package.\n */\n static void onStart() {\n Platform.setImplicitExit(false);\n IngestManager.getInstance().addIngestJobEventListener(new IngestJobEventListener());\n IngestManager.getInstance().addIngestModuleEventListener(new IngestModuleEventListener());\n Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, new CaseEventListener());\n }\n\n /**\n * Gets the image gallery module name.\n *\n * @return The module name,\n */\n static String getModuleName() {\n return MODULE_NAME;\n }\n\n /**\n * Gets the path to the image gallery module output folder for a given case.\n *\n * @param theCase The case.\n *\n * @return The path to the image gallery module output folder for the case.\n */\n public static Path getModuleOutputDir(Case theCase) {\n return Paths.get(theCase.getModuleDirectory(), getModuleName());\n }\n\n /**\n * Prevents instantiation.\n */\n private ImageGalleryModule() {\n }\n\n /**\n * Indicates whether or not the image gallery module is enabled for a given\n * case.\n *\n * @param theCase The case.\n *\n * @return True or false.\n */\n static boolean isEnabledforCase(Case theCase) {\n String enabledforCaseProp = new PerCaseProperties(theCase).getConfigSetting(ImageGalleryModule.MODULE_NAME, PerCaseProperties.ENABLED);\n return isNotBlank(enabledforCaseProp) ? Boolean.valueOf(enabledforCaseProp) : ImageGalleryPreferences.isEnabledByDefault();\n }\n\n /**\n * Indicates whether or not a given file is of interest to the image gallery\n * module (is \"drawable\") and is not marked as a \"known\" file (e.g., is not\n * a file in the NSRL hash set).\n *\n * @param file The file.\n *\n * @return True if the file is \"drawable\" and not \"known\", false otherwise.\n *\n * @throws FileTypeDetectorInitException If there is an error determining\n * the type of the file.\n */\n private static boolean isDrawableAndNotKnown(AbstractFile abstractFile) throws FileTypeDetector.FileTypeDetectorInitException {\n return (abstractFile.getKnown() != TskData.FileKnown.KNOWN) && FileTypeUtils.isDrawable(abstractFile);\n }\n\n /**\n * A listener for ingest module application events.\n */\n static private class IngestModuleEventListener implements PropertyChangeListener {\n\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n /*\n * Only process individual files and artifacts in \"real time\" on the\n * node that is running the ingest job. On a remote node, image\n * files are processed as a group when the ingest job is complete.\n */\n if (((AutopsyEvent) event).getSourceType() != AutopsyEvent.SourceType.LOCAL) {\n return;\n }\n\n ImageGalleryController currentController;\n try {\n currentController = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event.\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n String eventType = event.getPropertyName();\n switch (IngestManager.IngestModuleEvent.valueOf(eventType)) {\n case FILE_DONE:\n AbstractFile file = (AbstractFile) event.getNewValue();\n if (!file.isFile()) {\n return;\n }\n if (currentController.isListeningEnabled()) {\n try {\n if (isDrawableAndNotKnown(file)) {\n currentController.queueDBTask(new ImageGalleryController.UpdateFileTask(file, currentController.getDatabase()));\n }\n } catch (FileTypeDetector.FileTypeDetectorInitException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to determine if file is of interest to the image gallery module, ignoring file (obj_id=%d)\", file.getId()), ex); //NON-NLS\n }\n }\n break;\n case DATA_ADDED:\n ModuleDataEvent artifactAddedEvent = (ModuleDataEvent) event.getOldValue();\n if (artifactAddedEvent.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {\n DrawableDB drawableDB = currentController.getDatabase();\n if (artifactAddedEvent.getArtifacts() != null) {\n for (BlackboardArtifact art : artifactAddedEvent.getArtifacts()) {\n drawableDB.addExifCache(art.getObjectID());\n }\n }\n } else if (artifactAddedEvent.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {\n DrawableDB drawableDB = currentController.getDatabase();\n if (artifactAddedEvent.getArtifacts() != null) {\n for (BlackboardArtifact art : artifactAddedEvent.getArtifacts()) {\n drawableDB.addHashSetCache(art.getObjectID());\n }\n }\n }\n break;\n default:\n break;\n }\n }\n }\n\n /**\n * A listener for case application events.\n */\n // RJCTODO: This code would be easier to read if there were two case event \n // listeners, one that handled CURRENT_CASE events and one that handled \n // the other events.\n static private class CaseEventListener implements PropertyChangeListener {\n\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n Case.Events eventType = Case.Events.valueOf(event.getPropertyName());\n if (eventType == Case.Events.CURRENT_CASE) {\n synchronized (controllerLock) {\n if (event.getNewValue() != null) {\n /*\n * CURRENT_CASE(_OPENED) event.\n */\n Case newCase = (Case) event.getNewValue();\n try {\n controller = new ImageGalleryController(newCase);\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to construct controller for new case %s (%s)\", newCase.getDisplayName(), newCase.getName()), ex);\n }\n } else if (event.getOldValue() != null) {\n /*\n * CURRENT_CASE(_CLOSED) event.\n */\n SwingUtilities.invokeLater(ImageGalleryTopComponent::closeTopComponent);\n controller.shutDown();\n }\n }\n } else {\n ImageGalleryController currentController;\n try {\n currentController = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event.\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n switch (eventType) {\n case DATA_SOURCE_ADDED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n Content newDataSource = (Content) event.getNewValue();\n if (currentController.isListeningEnabled()) {\n currentController.getDatabase().insertOrUpdateDataSource(newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN);\n }\n }\n break;\n case CONTENT_TAG_ADDED:\n final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) event;\n long objId = tagAddedEvent.getAddedTag().getContent().getId();\n DrawableDB drawableDB = currentController.getDatabase();\n drawableDB.addTagCache(objId); // RJCTODO: Why add the tag to the cache before doing the in DB check?\n if (drawableDB.isInDB(objId)) {\n currentController.getTagsManager().fireTagAddedEvent(tagAddedEvent);\n }\n break;\n case CONTENT_TAG_DELETED:\n final ContentTagDeletedEvent tagDeletedEvent = (ContentTagDeletedEvent) event;\n if (currentController.getDatabase().isInDB(tagDeletedEvent.getDeletedTagInfo().getContentID())) {\n currentController.getTagsManager().fireTagDeletedEvent(tagDeletedEvent);\n } // RJCTODO: Why not remove the tag from the cache?\n break;\n default:\n logger.log(Level.SEVERE, String.format(\"Received %s event with no subscription\", event.getPropertyName())); //NON-NLS\n break;\n }\n }\n }\n }\n\n /**\n * A listener for ingest job application events.\n */\n static private class IngestJobEventListener implements PropertyChangeListener {\n\n @NbBundle.Messages({\n \"ImageGalleryController.dataSourceAnalyzed.confDlg.msg= A new data source was added and finished ingest.\\n\"\n + \"The image / video database may be out of date. \"\n + \"Do you want to update the database with ingest results?\\n\",\n \"ImageGalleryController.dataSourceAnalyzed.confDlg.title=Image Gallery\"\n })\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n /*\n * Only handling data source analysis events.\n */\n if (!(event instanceof DataSourceAnalysisEvent)) {\n return;\n }\n\n ImageGalleryController controller;\n try {\n controller = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event. \n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n DataSourceAnalysisEvent dataSourceEvent = (DataSourceAnalysisEvent) event;\n Content dataSource = dataSourceEvent.getDataSource();\n long dataSourceObjId = dataSource.getId();\n String eventType = dataSourceEvent.getPropertyName();\n try {\n switch (IngestManager.IngestJobEvent.valueOf(eventType)) {\n case DATA_SOURCE_ANALYSIS_STARTED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n if (controller.isListeningEnabled()) {\n DrawableDB drawableDb = controller.getDatabase();\n // Don't update status if it is is already marked as COMPLETE\n if (drawableDb.getDataSourceDbBuildStatus(dataSourceObjId) != DrawableDB.DrawableDbBuildStatusEnum.COMPLETE) {\n drawableDb.insertOrUpdateDataSource(dataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS);\n }\n }\n }\n break;\n case DATA_SOURCE_ANALYSIS_COMPLETED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n /*\n * This node just completed analysis of a data\n * source. Set the state of the local drawables\n * database.\n */\n if (controller.isListeningEnabled()) {\n DrawableDB drawableDb = controller.getDatabase();\n if (drawableDb.getDataSourceDbBuildStatus(dataSourceObjId) == DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS) {\n\n // If at least one file in CaseDB has mime type, then set to COMPLETE\n // Otherwise, back to UNKNOWN since we assume file type module was not run \n DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus\n = controller.hasFilesWithMimeType(dataSourceObjId)\n ? DrawableDB.DrawableDbBuildStatusEnum.COMPLETE\n : DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN;\n\n controller.getDatabase().insertOrUpdateDataSource(dataSource.getId(), datasourceDrawableDBStatus);\n }\n }\n } else if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.REMOTE) {\n /*\n * A remote node just completed analysis of a data\n * source. The local drawables database is therefore\n * stale. If the image gallery top component is\n * open, give the user an opportunity to update the\n * drawables database now.\n */\n controller.setCaseStale(true);\n if (controller.isListeningEnabled()) {\n SwingUtilities.invokeLater(() -> {\n if (ImageGalleryTopComponent.isImageGalleryOpen()) {\n int showAnswer = JOptionPane.showConfirmDialog(ImageGalleryTopComponent.getTopComponent(),\n Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_msg(),\n Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_title(),\n JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE);\n switch (showAnswer) {\n case JOptionPane.YES_OPTION:\n controller.rebuildDB();\n break;\n case JOptionPane.NO_OPTION:\n case JOptionPane.CANCEL_OPTION:\n default:\n break;\n }\n }\n });\n }\n }\n break;\n default:\n break;\n }\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event for %s (objId=%d)\", dataSourceEvent.getPropertyName(), dataSource.getName(), dataSourceObjId), ex);\n }\n }\n }\n}\n"},"new_file":{"kind":"string","value":"ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java"},"old_contents":{"kind":"string","value":"/*\n * Autopsy Forensic Browser\n *\n * Copyright 2013-2018 Basis Technology Corp.\n * Contact: carrier sleuthkit org\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.sleuthkit.autopsy.imagegallery;\n\nimport java.beans.PropertyChangeEvent;\nimport java.beans.PropertyChangeListener;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.EnumSet;\nimport java.util.Set;\nimport java.util.logging.Level;\nimport javafx.application.Platform;\nimport javax.annotation.concurrent.GuardedBy;\nimport javax.swing.JOptionPane;\nimport javax.swing.SwingUtilities;\nimport static org.apache.commons.lang3.StringUtils.isNotBlank;\nimport org.openide.util.NbBundle;\nimport org.sleuthkit.autopsy.casemodule.Case;\nimport org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;\nimport org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;\nimport org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;\nimport org.sleuthkit.autopsy.core.RuntimeProperties;\nimport org.sleuthkit.autopsy.coreutils.Logger;\nimport org.sleuthkit.autopsy.events.AutopsyEvent;\nimport org.sleuthkit.autopsy.imagegallery.datamodel.DrawableDB;\nimport org.sleuthkit.autopsy.ingest.IngestManager;\nimport static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;\nimport static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.FILE_DONE;\nimport org.sleuthkit.autopsy.ingest.ModuleDataEvent;\nimport org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent;\nimport org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;\nimport org.sleuthkit.datamodel.AbstractFile;\nimport org.sleuthkit.datamodel.BlackboardArtifact;\nimport org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;\nimport org.sleuthkit.datamodel.Content;\nimport org.sleuthkit.datamodel.TskCoreException;\nimport org.sleuthkit.datamodel.TskData;\n\n/**\n * This class is reponsible for handling selected application events for the\n * image gallery module, managing the image gallery module's per case MVC\n * controller and keeping track of the following state: the module name, the\n * module output directory and whether or not the ingest gallery module is\n * enabled for the current case.\n */\n@NbBundle.Messages({\"ImageGalleryModule.moduleName=Image Gallery\"})\npublic class ImageGalleryModule {\n\n private static final Logger logger = Logger.getLogger(ImageGalleryModule.class.getName());\n private static final String MODULE_NAME = Bundle.ImageGalleryModule_moduleName();\n private static final Set CASE_EVENTS_OF_INTEREST = EnumSet.of(\n Case.Events.CURRENT_CASE,\n Case.Events.DATA_SOURCE_ADDED,\n Case.Events.CONTENT_TAG_ADDED,\n Case.Events.CONTENT_TAG_DELETED\n );\n private static final Object controllerLock = new Object();\n @GuardedBy(\"controllerLock\")\n private static ImageGalleryController controller;\n\n /**\n * Gets the per case image gallery controller for the current case. The\n * controller is changed in the case event listener.\n *\n * @return The image gallery controller for the current case.\n *\n * @throws TskCoreException If there is a problem creating the controller.\n */\n public static ImageGalleryController getController() throws TskCoreException {\n synchronized (controllerLock) {\n if (controller == null) {\n try {\n Case currentCase = Case.getCurrentCaseThrows();\n controller = new ImageGalleryController(currentCase);\n } catch (NoCurrentCaseException ex) {\n throw new TskCoreException(\"Failed to get \", ex);\n }\n }\n return controller;\n }\n }\n\n /**\n * Sets the implicit exit property attribute of the JavaFX runtime to false\n * and sets up listeners for application events. It is invoked at\n * application start up by virtue of the OnStart annotation on the OnStart\n * class in this package.\n */\n static void onStart() {\n Platform.setImplicitExit(false);\n IngestManager.getInstance().addIngestJobEventListener(new IngestJobEventListener());\n IngestManager.getInstance().addIngestModuleEventListener(new IngestModuleEventListener());\n Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, new CaseEventListener());\n }\n\n /**\n * Gets the image gallery module name.\n *\n * @return The module name,\n */\n static String getModuleName() {\n return MODULE_NAME;\n }\n\n /**\n * Gets the path to the image gallery module output folder for a given case.\n *\n * @param theCase The case.\n *\n * @return The path to the image gallery module output folder for the case.\n */\n public static Path getModuleOutputDir(Case theCase) {\n return Paths.get(theCase.getModuleDirectory(), getModuleName());\n }\n\n /**\n * Prevents instantiation.\n */\n private ImageGalleryModule() {\n }\n\n /**\n * Indicates whether or not the image gallery module is enabled for a given\n * case.\n *\n * @param theCase The case.\n *\n * @return True or false.\n */\n static boolean isEnabledforCase(Case theCase) {\n String enabledforCaseProp = new PerCaseProperties(theCase).getConfigSetting(ImageGalleryModule.MODULE_NAME, PerCaseProperties.ENABLED);\n return isNotBlank(enabledforCaseProp) ? Boolean.valueOf(enabledforCaseProp) : ImageGalleryPreferences.isEnabledByDefault();\n }\n\n /**\n * Indicates whether or not a given file is of interest to the image gallery\n * module (is \"drawable\") and is not marked as a \"known\" file (e.g., is not\n * a file in the NSRL hash set).\n *\n * @param file The file.\n *\n * @return True if the file is \"drawable\" and not \"known\", false otherwise.\n *\n * @throws FileTypeDetectorInitException If there is an error determining\n * the type of the file.\n */\n private static boolean isDrawableAndNotKnown(AbstractFile abstractFile) throws FileTypeDetector.FileTypeDetectorInitException {\n return (abstractFile.getKnown() != TskData.FileKnown.KNOWN) && FileTypeUtils.isDrawable(abstractFile);\n }\n\n /**\n * A listener for ingest module application events.\n */\n static private class IngestModuleEventListener implements PropertyChangeListener {\n\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n /*\n * Only process individual files and artifacts in \"real time\" on the\n * node that is running the ingest job. On a remote node, image\n * files are processed as a group when the ingest job is complete.\n */\n if (((AutopsyEvent) event).getSourceType() != AutopsyEvent.SourceType.LOCAL) {\n return;\n }\n\n ImageGalleryController currentController;\n try {\n currentController = getController();\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n String eventType = event.getPropertyName();\n switch (IngestManager.IngestModuleEvent.valueOf(eventType)) {\n case FILE_DONE:\n AbstractFile file = (AbstractFile) event.getNewValue();\n if (!file.isFile()) {\n return;\n }\n if (currentController.isListeningEnabled()) {\n try {\n if (isDrawableAndNotKnown(file)) {\n currentController.queueDBTask(new ImageGalleryController.UpdateFileTask(file, currentController.getDatabase()));\n }\n } catch (FileTypeDetector.FileTypeDetectorInitException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to determine if file is of interest to the image gallery module, ignoring file (obj_id=%d)\", file.getId()), ex); //NON-NLS\n }\n }\n break;\n case DATA_ADDED:\n ModuleDataEvent artifactAddedEvent = (ModuleDataEvent) event.getOldValue();\n if (artifactAddedEvent.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {\n DrawableDB drawableDB = currentController.getDatabase();\n if (artifactAddedEvent.getArtifacts() != null) {\n for (BlackboardArtifact art : artifactAddedEvent.getArtifacts()) {\n drawableDB.addExifCache(art.getObjectID());\n }\n }\n } else if (artifactAddedEvent.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()) {\n DrawableDB drawableDB = currentController.getDatabase();\n if (artifactAddedEvent.getArtifacts() != null) {\n for (BlackboardArtifact art : artifactAddedEvent.getArtifacts()) {\n drawableDB.addHashSetCache(art.getObjectID());\n }\n }\n }\n break;\n default:\n break;\n }\n }\n }\n\n /**\n * A listener for case application events.\n */\n static private class CaseEventListener implements PropertyChangeListener {\n\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n Case.Events eventType = Case.Events.valueOf(event.getPropertyName());\n if (eventType == Case.Events.CURRENT_CASE) {\n synchronized (controllerLock) {\n if (event.getNewValue() != null) {\n /*\n * CURRENT_CASE(_OPENED) event.\n */\n Case newCase = (Case) event.getNewValue();\n try {\n controller = new ImageGalleryController(newCase);\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to construct controller for new case %s (%s)\", newCase.getDisplayName(), newCase.getName()), ex);\n }\n } else if (event.getOldValue() != null) {\n /*\n * CURRENT_CASE(_CLOSED) event.\n */\n SwingUtilities.invokeLater(ImageGalleryTopComponent::closeTopComponent);\n controller.shutDown();\n controller = null;\n }\n }\n } else {\n ImageGalleryController currentController;\n try {\n currentController = getController();\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n switch (eventType) {\n case DATA_SOURCE_ADDED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n Content newDataSource = (Content) event.getNewValue();\n if (currentController.isListeningEnabled()) {\n currentController.getDatabase().insertOrUpdateDataSource(newDataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN);\n }\n }\n break;\n case CONTENT_TAG_ADDED:\n final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) event;\n long objId = tagAddedEvent.getAddedTag().getContent().getId();\n DrawableDB drawableDB = currentController.getDatabase();\n drawableDB.addTagCache(objId); // RJCTODO: Why add the tag to the cache before doing the in DB check?\n if (drawableDB.isInDB(objId)) {\n currentController.getTagsManager().fireTagAddedEvent(tagAddedEvent);\n }\n break;\n case CONTENT_TAG_DELETED:\n final ContentTagDeletedEvent tagDeletedEvent = (ContentTagDeletedEvent) event;\n if (currentController.getDatabase().isInDB(tagDeletedEvent.getDeletedTagInfo().getContentID())) {\n currentController.getTagsManager().fireTagDeletedEvent(tagDeletedEvent);\n } // RJCTODO: Why not remove the tag from the cache?\n break;\n default:\n logger.log(Level.SEVERE, String.format(\"Received %s event with no subscription\", event.getPropertyName())); //NON-NLS\n break;\n }\n }\n }\n }\n\n /**\n * A listener for ingest job application events.\n */\n static private class IngestJobEventListener implements PropertyChangeListener {\n\n @NbBundle.Messages({\n \"ImageGalleryController.dataSourceAnalyzed.confDlg.msg= A new data source was added and finished ingest.\\n\"\n + \"The image / video database may be out of date. \"\n + \"Do you want to update the database with ingest results?\\n\",\n \"ImageGalleryController.dataSourceAnalyzed.confDlg.title=Image Gallery\"\n })\n @Override\n public void propertyChange(PropertyChangeEvent event) {\n /*\n * Only handling data source analysis events.\n */\n if (!(event instanceof DataSourceAnalysisEvent)) {\n return;\n }\n\n ImageGalleryController controller;\n try {\n controller = getController();\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n }\n\n DataSourceAnalysisEvent dataSourceEvent = (DataSourceAnalysisEvent) event;\n Content dataSource = dataSourceEvent.getDataSource();\n long dataSourceObjId = dataSource.getId();\n String eventType = dataSourceEvent.getPropertyName();\n try {\n switch (IngestManager.IngestJobEvent.valueOf(eventType)) {\n case DATA_SOURCE_ANALYSIS_STARTED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n if (controller.isListeningEnabled()) {\n DrawableDB drawableDb = controller.getDatabase();\n // Don't update status if it is is already marked as COMPLETE\n if (drawableDb.getDataSourceDbBuildStatus(dataSourceObjId) != DrawableDB.DrawableDbBuildStatusEnum.COMPLETE) {\n drawableDb.insertOrUpdateDataSource(dataSource.getId(), DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS);\n }\n }\n }\n break;\n case DATA_SOURCE_ANALYSIS_COMPLETED:\n if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.LOCAL) {\n /*\n * This node just completed analysis of a data\n * source. Set the state of the local drawables\n * database.\n */\n if (controller.isListeningEnabled()) {\n DrawableDB drawableDb = controller.getDatabase();\n if (drawableDb.getDataSourceDbBuildStatus(dataSourceObjId) == DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS) {\n\n // If at least one file in CaseDB has mime type, then set to COMPLETE\n // Otherwise, back to UNKNOWN since we assume file type module was not run \n DrawableDB.DrawableDbBuildStatusEnum datasourceDrawableDBStatus\n = controller.hasFilesWithMimeType(dataSourceObjId)\n ? DrawableDB.DrawableDbBuildStatusEnum.COMPLETE\n : DrawableDB.DrawableDbBuildStatusEnum.UNKNOWN;\n\n controller.getDatabase().insertOrUpdateDataSource(dataSource.getId(), datasourceDrawableDBStatus);\n }\n }\n } else if (((AutopsyEvent) event).getSourceType() == AutopsyEvent.SourceType.REMOTE) {\n /*\n * A remote node just completed analysis of a data\n * source. The local drawables database is therefore\n * stale. If the image gallery top component is\n * open, give the user an opportunity to update the\n * drawables database now.\n */\n controller.setCaseStale(true);\n if (controller.isListeningEnabled()) {\n SwingUtilities.invokeLater(() -> {\n if (ImageGalleryTopComponent.isImageGalleryOpen()) {\n int showAnswer = JOptionPane.showConfirmDialog(ImageGalleryTopComponent.getTopComponent(),\n Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_msg(),\n Bundle.ImageGalleryController_dataSourceAnalyzed_confDlg_title(),\n JOptionPane.YES_NO_CANCEL_OPTION, JOptionPane.WARNING_MESSAGE);\n switch (showAnswer) {\n case JOptionPane.YES_OPTION:\n controller.rebuildDB();\n break;\n case JOptionPane.NO_OPTION:\n case JOptionPane.CANCEL_OPTION:\n default:\n break;\n }\n }\n });\n }\n }\n break;\n default:\n break;\n }\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event for %s (objId=%d)\", dataSourceEvent.getPropertyName(), dataSource.getName(), dataSourceObjId), ex);\n }\n }\n }\n}\n"},"message":{"kind":"string","value":"Add comments TODOs ImageGalleryModule\n"},"old_file":{"kind":"string","value":"ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java"},"subject":{"kind":"string","value":"Add comments TODOs ImageGalleryModule"},"git_diff":{"kind":"string","value":"mageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java\n ImageGalleryController currentController;\n try {\n currentController = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event.\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n /**\n * A listener for case application events.\n */\n // RJCTODO: This code would be easier to read if there were two case event \n // listeners, one that handled CURRENT_CASE events and one that handled \n // the other events.\n static private class CaseEventListener implements PropertyChangeListener {\n \n @Override\n */\n SwingUtilities.invokeLater(ImageGalleryTopComponent::closeTopComponent);\n controller.shutDown();\n controller = null;\n }\n }\n } else {\n ImageGalleryController currentController;\n try {\n currentController = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event.\n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;\n ImageGalleryController controller;\n try {\n controller = getController();\n // RJCTODO: If a closed controller had a method that could be\n // queried to determine whether it was shut down, we could \n // bail out here. The older code that used to try to check for\n // a current case was flawed; there was no guarantee the current\n // case was the same case associated with the event. \n } catch (TskCoreException ex) {\n logger.log(Level.SEVERE, String.format(\"Failed to handle %s event\", event.getPropertyName()), ex); //NON-NLS\n return;"}}},{"rowIdx":2087,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"341c6729df9e726a05e37c20e23987ce1f90d108"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"FIRST-Team-1699/autonomous-code"},"new_contents":{"kind":"string","value":"package org.usfirst.frc.team1699.utils.autonomous;\r\n\r\nimport java.util.ArrayList;\r\n\r\nimport org.usfirst.frc.team1699.utils.autonomous.Tokenizer.Token;\r\nimport org.usfirst.frc.team1699.utils.command.Command;\r\n\r\npublic class IfConditionalUtils {\r\n\tprivate static final String[] validConditionalSymbols = {\"<\", \">\", \"==\", \"<=\", \">=\"}; //Stores valid conditional symbols\r\n\t\r\n\t/**\r\n\t * Returns true if string is a conditional\r\n\t * \r\n\t * @param fileAsString\r\n\t * @param startLine\r\n\t * @param reader\r\n\t * @return\r\n\t */\r\n\tpublic static boolean ifConditional(ArrayList fileAsString, int startLine, Tokenizer reader){\r\n\t\tString[] conLine = fileAsString.get(startLine).split(\" \");\r\n\t\tString conditional = \"\";\r\n\t\tint conditionalStart = 0;\r\n\t\tint conditionalEnd = 0;\r\n\t\t\r\n\t\tfor(int i = 0; i < conLine.length; i++){\r\n\t\t\t if(conLine[i].equals(\"if\")){\r\n\t\t\t\t conditionalStart = i + 1;\r\n\t\t\t }\r\n\t\t}\r\n\t\t\r\n\t\tfor(int i = 0; i < conLine.length; i++){\r\n\t\t\t if(conLine[i].equals(\"then:\")){\r\n\t\t\t\t conditionalEnd = i;\r\n\t\t\t }\r\n\t\t}\r\n\t\t\r\n\t\tfor(int i = conditionalStart; i < conditionalEnd; i++){\r\n\t\t\tconditional += conLine[i] + \" \";\r\n\t\t}\r\n\t\t\r\n\t\treturn evaluateConditional(conditional, reader);\r\n\t}\r\n\t\r\n\t/**\r\n\t * Returns true if conditional string evaluates to true\r\n\t * \r\n\t * @param conditional\r\n\t * @param tokenizer\r\n\t * @return\r\n\t */\r\n\tpublic static boolean evaluateConditional(String conditional, Tokenizer tokenizer){\r\n\t\tString firstStatement = \"\";\r\n\t\tString secondStatement = \"\";\r\n\t\tString conditionalSymbol = \"\";\r\n\t\tType firstType;\r\n\t\tType secondType;\r\n\t\t\r\n\t\tif(conditional.contains(\">\") || conditional.contains(\"<\") || conditional.contains(\">=\") || conditional.contains(\"<=\") || conditional.contains(\"==\") || conditional.contains(\"!=\")){\r\n\t\t\tfor(int i = 0; i < conditional.length(); i++){\r\n\t\t\t\tif(conditional.substring(i, i + 1).equals(\">\") || conditional.substring(i, i + 1).equals(\"<\") || conditional.substring(i, i + 1).equals(\">=\") || conditional.substring(i, i + 1).equals(\"<=\") || conditional.substring(i, i + 1).equals(\"==\") || conditional.substring(i, i + 1).equals(\"!=\")){\r\n\t\t\t\t\tconditionalSymbol = conditional.substring(i, i + 1);\r\n\t\t\t\t\tfirstStatement = conditional.substring(0, i);\r\n\t\t\t\t\tsecondStatement = conditional.substring(i + 1).trim();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}else{\r\n\t\t\t//boolean\r\n\t\t}\r\n\t\t\r\n\t\tfirstType = getType(firstStatement);\r\n\t\tsecondType = getType(secondStatement);\r\n\t\t\r\n\t\tif((firstType.equals(Type.DOUBLE) || firstType.equals(Type.INTEGER)) && (secondType.equals(Type.DOUBLE) || secondType.equals(Type.INTEGER))){\r\n\t\t\ttokenizer.tokenize(conditionalSymbol);\r\n\t\t\tToken tok = tokenizer.getTokens().get(0);\r\n\t\t\tswitch(tok.token){\r\n\t\t\t\tcase 0: return AutoUtils.parseDouble(firstStatement) < AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 1: return AutoUtils.parseDouble(firstStatement) > AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 2: return AutoUtils.parseDouble(firstStatement) <= AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 3: return AutoUtils.parseDouble(firstStatement) >= AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 4: return AutoUtils.parseDouble(firstStatement) == AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 5: return AutoUtils.parseDouble(firstStatement) != AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tdefault: return false;\r\n\t\t\t}\r\n\t\t}else if(firstType.equals(Type.STRING) && secondType.equals(Type.STRING)){\r\n\t\t\treturn firstStatement.equals(secondStatement);\r\n\t\t}else{\r\n\t\t\treturn false;\r\n\t\t}\r\n\t}\r\n\t\r\n\t/**\r\n\t * Gets Type of string\r\n\t * \r\n\t * @param str\r\n\t * @return\r\n\t */\r\n\tpublic static Type getType(String str){\r\n\t\tif((!str.contains(\".\") && (!str.contains(\"\\\"\")))){\r\n\t\t\treturn Type.INTEGER;\r\n\t\t}else if((str.contains(\".\")) && (!str.contains(\"\\\"\"))){\r\n\t\t\treturn Type.DOUBLE;\r\n\t\t}else{\r\n\t\t\treturn Type.STRING;\r\n\t\t}\r\n\t}\r\n\t\r\n\t/**\r\n\t * Gets length of if\r\n\t * \r\n\t * @param strArr\r\n\t * @param currentLine\r\n\t * @return\r\n\t */\r\n\tpublic static int getIfLength(ArrayList strArr, int currentLine){\r\n\t\tfor(int i = currentLine; i < strArr.size(); i++){\r\n\t\t\tif(strArr.get(i).trim().equals(\"end\")){\r\n\t\t\t\treturn i - currentLine;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn 0;\r\n\t}\r\n\t\r\n\t/**\r\n\t * Returns true if string is a conditional symbol\r\n\t * \r\n\t * @param conditional\r\n\t * @return\r\n\t */\r\n\tpublic static boolean isConditional(String conditional){\t\r\n\t\tfor(String x: validConditionalSymbols){\r\n\t\t\treturn x.equals(conditional);\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\t\r\n\t/**\r\n\t * Returns conditional symbol from string\r\n\t * \r\n\t * @param conditional\r\n\t * @return\r\n\t */\r\n\tpublic static String getConditional(String conditional){\t\r\n\t\tfor(String x: validConditionalSymbols){\r\n\t\t\tif(x.equals(conditional)){\r\n\t\t\t\treturn x;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn null;\r\n\t}\r\n\t\r\n\t/**\r\n\t * Returns true if string contains a conditional\r\n\t * \r\n\t * @param string\r\n\t * @return\r\n\t */\r\n\tpublic static boolean containsIfConditional(String string){\r\n\t\tString[] inp = string.split(\" \");\r\n\t\tfor(int i = 0; i < inp.length; i++){\r\n\t\t\tif(inp[i].equals(\"if\")){\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n}\r\n"},"new_file":{"kind":"string","value":"src/org/usfirst/frc/team1699/utils/autonomous/IfConditionalUtils.java"},"old_contents":{"kind":"string","value":"package org.usfirst.frc.team1699.utils.autonomous;\r\n\r\nimport java.util.ArrayList;\r\n\r\nimport org.usfirst.frc.team1699.utils.autonomous.Tokenizer.Token;\r\nimport org.usfirst.frc.team1699.utils.command.Command;\r\n\r\npublic class IfConditionalUtils {\r\n\tprivate static final String[] validConditionalSymbols = {\"<\", \">\", \"==\", \"<=\", \">=\"}; //Stores valid conditional symbols\r\n\t\r\n\t\r\n\t\r\n\tpublic static boolean ifConditional(ArrayList fileAsString, int startLine, Tokenizer reader){\r\n\t\tString[] conLine = fileAsString.get(startLine).split(\" \");\r\n\t\tString conditional = \"\";\r\n\t\tint conditionalStart = 0;\r\n\t\tint conditionalEnd = 0;\r\n\t\t\r\n\t\tfor(int i = 0; i < conLine.length; i++){\r\n\t\t\t if(conLine[i].equals(\"if\")){\r\n\t\t\t\t conditionalStart = i + 1;\r\n\t\t\t }\r\n\t\t}\r\n\t\t\r\n\t\tfor(int i = 0; i < conLine.length; i++){\r\n\t\t\t if(conLine[i].equals(\"then:\")){\r\n\t\t\t\t conditionalEnd = i;\r\n\t\t\t }\r\n\t\t}\r\n\t\t\r\n\t\tfor(int i = conditionalStart; i < conditionalEnd; i++){\r\n\t\t\tconditional += conLine[i] + \" \";\r\n\t\t}\r\n\t\t\r\n\t\treturn evaluateConditional(conditional, reader);\r\n\t}\r\n\t\r\n\tpublic static boolean evaluateConditional(String conditional, Tokenizer tokenizer){\r\n\t\tString firstStatement = \"\";\r\n\t\tString secondStatement = \"\";\r\n\t\tString conditionalSymbol = \"\";\r\n\t\tType firstType;\r\n\t\tType secondType;\r\n\t\t\r\n\t\tif(conditional.contains(\">\") || conditional.contains(\"<\") || conditional.contains(\">=\") || conditional.contains(\"<=\") || conditional.contains(\"==\") || conditional.contains(\"!=\")){\r\n\t\t\tfor(int i = 0; i < conditional.length(); i++){\r\n\t\t\t\tif(conditional.substring(i, i + 1).equals(\">\") || conditional.substring(i, i + 1).equals(\"<\") || conditional.substring(i, i + 1).equals(\">=\") || conditional.substring(i, i + 1).equals(\"<=\") || conditional.substring(i, i + 1).equals(\"==\") || conditional.substring(i, i + 1).equals(\"!=\")){\r\n\t\t\t\t\tconditionalSymbol = conditional.substring(i, i + 1);\r\n\t\t\t\t\tfirstStatement = conditional.substring(0, i);\r\n\t\t\t\t\tsecondStatement = conditional.substring(i + 1).trim();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}else{\r\n\t\t\t//boolean\r\n\t\t}\r\n\t\t\r\n\t\tfirstType = getType(firstStatement);\r\n\t\tsecondType = getType(secondStatement);\r\n\t\t\r\n\t\tif((firstType.equals(Type.DOUBLE) || firstType.equals(Type.INTEGER)) && (secondType.equals(Type.DOUBLE) || secondType.equals(Type.INTEGER))){\r\n\t\t\ttokenizer.tokenize(conditionalSymbol);\r\n\t\t\tToken tok = tokenizer.getTokens().get(0);\r\n\t\t\tswitch(tok.token){\r\n\t\t\t\tcase 0: return AutoUtils.parseDouble(firstStatement) < AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 1: return AutoUtils.parseDouble(firstStatement) > AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 2: return AutoUtils.parseDouble(firstStatement) <= AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 3: return AutoUtils.parseDouble(firstStatement) >= AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 4: return AutoUtils.parseDouble(firstStatement) == AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tcase 5: return AutoUtils.parseDouble(firstStatement) != AutoUtils.parseDouble(secondStatement);\r\n\t\t\t\tdefault: return false;\r\n\t\t\t}\r\n\t\t}else if(firstType.equals(Type.STRING) && secondType.equals(Type.STRING)){\r\n\t\t\treturn firstStatement.equals(secondStatement);\r\n\t\t}else{\r\n\t\t\treturn false;\r\n\t\t}\r\n\t}\r\n\t\r\n\tpublic static Type getType(String str){\r\n\t\tif((!str.contains(\".\") && (!str.contains(\"\\\"\")))){\r\n\t\t\treturn Type.INTEGER;\r\n\t\t}else if((str.contains(\".\")) && (!str.contains(\"\\\"\"))){\r\n\t\t\treturn Type.DOUBLE;\r\n\t\t}else{\r\n\t\t\treturn Type.STRING;\r\n\t\t}\r\n\t}\r\n\t\r\n\tpublic static int getIfLength(ArrayList strArr, int currentLine){\r\n\t\tfor(int i = currentLine; i < strArr.size(); i++){\r\n\t\t\tif(strArr.get(i).trim().equals(\"end\")){\r\n\t\t\t\treturn i - currentLine;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn 0;\r\n\t}\r\n\t\r\n\tpublic static boolean isConditional(String conditional){\t\r\n\t\tfor(String x: validConditionalSymbols){\r\n\t\t\treturn x.equals(conditional);\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\t\r\n\tpublic static String getConditional(String conditional){\t\r\n\t\tfor(String x: validConditionalSymbols){\r\n\t\t\tif(x.equals(conditional)){\r\n\t\t\t\treturn x;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn null;\r\n\t}\r\n\t\r\n\tpublic static boolean containsIfConditional(String string){\r\n\t\tString[] inp = string.split(\" \");\r\n\t\tfor(int i = 0; i < inp.length; i++){\r\n\t\t\tif(inp[i].equals(\"if\")){\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n}\r\n"},"message":{"kind":"string","value":"JavaDocs"},"old_file":{"kind":"string","value":"src/org/usfirst/frc/team1699/utils/autonomous/IfConditionalUtils.java"},"subject":{"kind":"string","value":"JavaDocs"},"git_diff":{"kind":"string","value":"rc/org/usfirst/frc/team1699/utils/autonomous/IfConditionalUtils.java\n public class IfConditionalUtils {\n \tprivate static final String[] validConditionalSymbols = {\"<\", \">\", \"==\", \"<=\", \">=\"}; //Stores valid conditional symbols\n \t\n\t\n\t\n\t/**\n\t * Returns true if string is a conditional\n\t * \n\t * @param fileAsString\n\t * @param startLine\n\t * @param reader\n\t * @return\n\t */\n \tpublic static boolean ifConditional(ArrayList fileAsString, int startLine, Tokenizer reader){\n \t\tString[] conLine = fileAsString.get(startLine).split(\" \");\n \t\tString conditional = \"\";\n \t\treturn evaluateConditional(conditional, reader);\n \t}\n \t\n\t/**\n\t * Returns true if conditional string evaluates to true\n\t * \n\t * @param conditional\n\t * @param tokenizer\n\t * @return\n\t */\n \tpublic static boolean evaluateConditional(String conditional, Tokenizer tokenizer){\n \t\tString firstStatement = \"\";\n \t\tString secondStatement = \"\";\n \t\t}\n \t}\n \t\n\t/**\n\t * Gets Type of string\n\t * \n\t * @param str\n\t * @return\n\t */\n \tpublic static Type getType(String str){\n \t\tif((!str.contains(\".\") && (!str.contains(\"\\\"\")))){\n \t\t\treturn Type.INTEGER;\n \t\t}\n \t}\n \t\n\t/**\n\t * Gets length of if\n\t * \n\t * @param strArr\n\t * @param currentLine\n\t * @return\n\t */\n \tpublic static int getIfLength(ArrayList strArr, int currentLine){\n \t\tfor(int i = currentLine; i < strArr.size(); i++){\n \t\t\tif(strArr.get(i).trim().equals(\"end\")){\n \t\treturn 0;\n \t}\n \t\n\t/**\n\t * Returns true if string is a conditional symbol\n\t * \n\t * @param conditional\n\t * @return\n\t */\n \tpublic static boolean isConditional(String conditional){\t\n \t\tfor(String x: validConditionalSymbols){\n \t\t\treturn x.equals(conditional);\n \t\treturn false;\n \t}\n \t\n\t/**\n\t * Returns conditional symbol from string\n\t * \n\t * @param conditional\n\t * @return\n\t */\n \tpublic static String getConditional(String conditional){\t\n \t\tfor(String x: validConditionalSymbols){\n \t\t\tif(x.equals(conditional)){\n \t\treturn null;\n \t}\n \t\n\t/**\n\t * Returns true if string contains a conditional\n\t * \n\t * @param string\n\t * @return\n\t */\n \tpublic static boolean containsIfConditional(String string){\n \t\tString[] inp = string.split(\" \");\n \t\tfor(int i = 0; i < inp.length; i++){"}}},{"rowIdx":2088,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"bsd-3-clause"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"f34d443059f61928439f703979d12314445c723c"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"terabyte/Jira-Commit-Acceptance-Plugin,terabyte/Jira-Commit-Acceptance-Plugin,terabyte/Jira-Commit-Acceptance-Plugin,terabyte/Jira-Commit-Acceptance-Plugin,terabyte/Jira-Commit-Acceptance-Plugin"},"new_contents":{"kind":"string","value":"package com.atlassian.jira.ext.commitacceptance.server.action;\r\n\r\nimport org.apache.commons.lang.builder.HashCodeBuilder;\r\n\r\n/**\r\n * Wraps the commit acceptance settings for one project\r\n * or globally.\r\n *\r\n * @author Ferenc Kiss\r\n * @version $Id$\r\n */\r\npublic class AcceptanceSettings {\r\n\t/**\r\n\t * If true the global settings override\r\n\t * the project-specific settings.\r\n\t */\r\n\tprivate boolean useGlobalRules;\r\n\r\n\t/**\r\n * If true the commit message must contain\r\n * at least valid issue key.\r\n */\r\n private boolean mustHaveIssue;\r\n /**\r\n * If true, all the issues must be unresolved.\r\n */\r\n private boolean mustBeUnresolved;\r\n /**\r\n * If true, all the issues must be assigned to\r\n * the commiter.\r\n */\r\n private boolean mustBeAssignedToCommiter;\r\n\r\n public boolean getUseGlobalRules() {\r\n\t\treturn useGlobalRules;\r\n\t}\r\n\r\n\tpublic void setUseGlobalRules(boolean useGlobalRules) {\r\n\t\tthis.useGlobalRules = useGlobalRules;\r\n\t}\r\n\r\n\tpublic boolean isMustHaveIssue() {\r\n return mustHaveIssue;\r\n }\r\n\r\n public void setMustHaveIssue(boolean mustHaveIssue) {\r\n this.mustHaveIssue = mustHaveIssue;\r\n }\r\n\r\n public boolean isMustBeUnresolved() {\r\n return mustBeUnresolved;\r\n }\r\n\r\n public void setMustBeUnresolved(boolean mustBeUnresolved) {\r\n this.mustBeUnresolved = mustBeUnresolved;\r\n }\r\n\r\n public boolean isMustBeAssignedToCommiter() {\r\n return mustBeAssignedToCommiter;\r\n }\r\n\r\n public void setMustBeAssignedToCommiter(boolean mustBeAssignedToCommiter) {\r\n this.mustBeAssignedToCommiter = mustBeAssignedToCommiter;\r\n }\r\n\r\n\tpublic boolean equals(Object obj) {\r\n\t\tif (obj == this) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tAcceptanceSettings other = (AcceptanceSettings)obj;\r\n\t\treturn (useGlobalRules == other.getUseGlobalRules()) &&\r\n\t\t\t\t(mustHaveIssue == other.isMustHaveIssue()) &&\r\n\t\t\t\t(mustBeUnresolved == other.isMustBeUnresolved()) &&\r\n\t\t\t\t(mustBeAssignedToCommiter == other.isMustBeAssignedToCommiter());\r\n\t}\r\n\r\n\tpublic int hashCode() {\r\n\t\treturn new HashCodeBuilder(79, 11).append(useGlobalRules).append(mustHaveIssue).append(mustBeUnresolved).append(mustBeAssignedToCommiter).hashCode();\r\n\t}\r\n}\r\n"},"new_file":{"kind":"string","value":"src/java/com/atlassian/jira/ext/commitacceptance/server/action/AcceptanceSettings.java"},"old_contents":{"kind":"string","value":"package com.atlassian.jira.ext.commitacceptance.server.action;\r\n\r\nimport org.apache.commons.lang.builder.HashCodeBuilder;\r\n\r\n/**\r\n * Wraps the commit acceptance settings for one project\r\n * or globally.\r\n *\r\n * @author Ferenc Kiss\r\n * @version $Id$\r\n */\r\npublic class AcceptanceSettings {\r\n\t/**\r\n\t * If true the global settings override\r\n\t * the project-specific settings.\r\n\t */\r\n\tprivate boolean useGlobalRules;\r\n /**\r\n * If true the commit message must contain\r\n * at least valid issue key.\r\n */\r\n private boolean mustHaveIssue;\r\n\r\n /**\r\n * If true, all the issues must be assigned to\r\n * the commiter.\r\n */\r\n private boolean mustBeAssignedToCommiter;\r\n\r\n /**\r\n * If true, all the issues must be unresolved.\r\n */\r\n private boolean mustBeUnresolved;\r\n\r\n\r\n public boolean getUseGlobalRules() {\r\n\t\treturn useGlobalRules;\r\n\t}\r\n\r\n\tpublic void setUseGlobalRules(boolean useGlobalRules) {\r\n\t\tthis.useGlobalRules = useGlobalRules;\r\n\t}\r\n\r\n\tpublic boolean isMustHaveIssue() {\r\n return mustHaveIssue;\r\n }\r\n\r\n public void setMustHaveIssue(boolean mustHaveIssue) {\r\n this.mustHaveIssue = mustHaveIssue;\r\n }\r\n\r\n public boolean isMustBeUnresolved() {\r\n return mustBeUnresolved;\r\n }\r\n\r\n public void setMustBeUnresolved(boolean mustBeUnresolved) {\r\n this.mustBeUnresolved = mustBeUnresolved;\r\n }\r\n\r\n public boolean isMustBeAssignedToCommiter() {\r\n return mustBeAssignedToCommiter;\r\n }\r\n\r\n public void setMustBeAssignedToCommiter(boolean mustBeAssignedToCommiter) {\r\n this.mustBeAssignedToCommiter = mustBeAssignedToCommiter;\r\n }\r\n\r\n\tpublic boolean equals(Object obj) {\r\n\t\tif (obj == this) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tAcceptanceSettings other = (AcceptanceSettings)obj;\r\n\t\treturn (useGlobalRules == other.getUseGlobalRules()) &&\r\n\t\t\t\t(mustHaveIssue == other.isMustHaveIssue()) &&\r\n\t\t\t\t(mustBeUnresolved == other.isMustBeUnresolved()) &&\r\n\t\t\t\t(mustBeAssignedToCommiter == other.isMustBeAssignedToCommiter());\r\n\t}\r\n\r\n\tpublic int hashCode() {\r\n\t\treturn new HashCodeBuilder(79, 11).append(useGlobalRules).append(mustHaveIssue).append(mustBeUnresolved).append(mustBeAssignedToCommiter).hashCode();\r\n\t}\r\n}\r\n"},"message":{"kind":"string","value":"Property declarations in consistent order\n\ngit-svn-id: d0eb299132fa1d4c22ea7ae7c6220ead72f92c2a@6642 2c54a935-e501-0410-bc05-97a93f6bca70\n"},"old_file":{"kind":"string","value":"src/java/com/atlassian/jira/ext/commitacceptance/server/action/AcceptanceSettings.java"},"subject":{"kind":"string","value":"Property declarations in consistent order"},"git_diff":{"kind":"string","value":"rc/java/com/atlassian/jira/ext/commitacceptance/server/action/AcceptanceSettings.java\n \t * the project-specific settings.\n \t */\n \tprivate boolean useGlobalRules;\n /**\n\n\t/**\n * If true the commit message must contain\n * at least valid issue key.\n */\n private boolean mustHaveIssue;\n\n /**\n * If true, all the issues must be unresolved.\n */\n private boolean mustBeUnresolved;\n /**\n * If true, all the issues must be assigned to\n * the commiter.\n */\n private boolean mustBeAssignedToCommiter;\n\n /**\n * If true, all the issues must be unresolved.\n */\n private boolean mustBeUnresolved;\n\n \n public boolean getUseGlobalRules() {\n \t\treturn useGlobalRules;"}}},{"rowIdx":2089,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"b206507178eaa6d780654120ce5f3bede01cdaeb"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"akamai-open/AkamaiOPEN-edgegrid-java,akamai-open/edgegrid-auth-java"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2016 Copyright 2016 Akamai Technologies, Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.akamai.edgegrid.signer;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Objects;\n\nimport org.apache.commons.lang3.Validate;\nimport org.apache.commons.lang3.builder.Builder;\nimport org.apache.commons.lang3.builder.CompareToBuilder;\nimport org.apache.commons.lang3.builder.ToStringBuilder;\nimport org.apache.commons.lang3.builder.ToStringStyle;\n\nimport com.akamai.edgegrid.signer.exceptions.RequestSigningException;\n\n\n/**\n * Library-agnostic representation of an HTTP request. This object is immutable, so you probably\n * want to build an instance using {@link RequestBuilder}. Extenders of\n * {@link AbstractEdgeGridRequestSigner} will need to build one of these as part of their\n * implementation.\n *\n * @author mgawinec@akamai.com\n * @author mmeyer@akamai.com\n */\npublic class Request implements Comparable {\n\n private final byte[] body;\n private final String method;\n private final URI uri;\n private final Map headers;\n\n private Request(RequestBuilder b) {\n this.body = b.body;\n this.method = b.method;\n this.headers = b.headers;\n this.uri = b.uri;\n }\n\n /**\n * Returns a new builder. The returned builder is equivalent to the builder\n * generated by {@link RequestBuilder}.\n *\n * @return a fresh {@link RequestBuilder}\n */\n public static RequestBuilder builder() {\n return new RequestBuilder();\n }\n\n @Override\n public int compareTo(Request that) {\n return new CompareToBuilder()\n .append(this.body, that.body)\n .append(this.headers, that.headers)\n .append(this.method, that.method)\n .append(this.uri, that.uri)\n .build();\n }\n\n @Override\n public boolean equals(Object o) {\n if (o == null) return false;\n if (getClass() != o.getClass()) return false;\n final Request that = (Request) o;\n return compareTo(that) == 0;\n }\n\n @Override\n public int hashCode() {\n return Objects.hash(body, headers, method, uri);\n }\n\n @Override\n public String toString() {\n return new ToStringBuilder(this, ToStringStyle.JSON_STYLE)\n .append(\"body\", body)\n .append(\"headers\", headers)\n .append(\"method\", method)\n .append(\"uri\", uri)\n .build();\n }\n\n byte[] getBody() {\n return body;\n }\n\n Map getHeaders() {\n return Collections.unmodifiableMap(headers);\n }\n\n String getMethod() {\n return method;\n }\n\n URI getUri() {\n return uri;\n }\n\n /**\n * Creates a new builder. The returned builder is equivalent to the builder\n * generated by {@link Request#builder()}.\n */\n public static class RequestBuilder implements Builder {\n\n private byte[] body = new byte[]{};\n private Map headers = new HashMap<>();\n private String method;\n private URI uri;\n\n /**\n * Sets a content of HTTP request body. If not set, body is empty by default.\n *\n * @param requestBody a request body, in bytes\n * @return reference back to this builder instance\n */\n public RequestBuilder body(byte[] requestBody) {\n Validate.notNull(body, \"body cannot be blank\");\n this.body = Arrays.copyOf(requestBody, requestBody.length);\n return this;\n }\n\n /**\n *

      \n * Adds a single header for an HTTP request. This can be called multiple times to add as\n * many headers as needed.\n *

      \n *

      \n * NOTE: All header names are lower-cased for storage. In HTTP, header names are\n * case-insensitive anyway, and EdgeGrid does not support multiple headers with the same\n * name. Forcing to lowercase here improves our chance of detecting bad requests early.\n *

      \n *\n * @param headerName a header name\n * @param value a header value\n * @return reference back to this builder instance\n * @throws RequestSigningException if a duplicate header name is encountered\n */\n public RequestBuilder header(String headerName, String value) throws RequestSigningException {\n Validate.notEmpty(headerName, \"headerName cannot be empty\");\n Validate.notEmpty(value, \"value cannot be empty\");\n headerName = headerName.toLowerCase();\n if (this.headers.containsKey(headerName)) {\n throw new RequestSigningException(\"Duplicate header found: \" + headerName);\n }\n headers.put(headerName, value);\n return this;\n }\n\n /**\n *

      \n * Sets headers of HTTP request. The {@code headers} parameter is copied so that changes\n * to the original {@link Map} will not impact the stored reference.\n *

      \n *

      \n * NOTE: All header names are lower-cased for storage. In HTTP, header names are\n * case-insensitive anyway, and EdgeGrid does not support multiple headers with the same\n * name. Forcing to lowercase here improves our chance of detecting bad requests early.\n *

      \n *\n * @param headers a {@link Map} of headers\n * @return reference back to this builder instance\n * @throws RequestSigningException if a duplicate header name is encountered\n */\n public RequestBuilder headers(Map headers) throws RequestSigningException {\n Validate.notNull(headers, \"headers cannot be null\");\n for (Map.Entry entry : headers.entrySet()) {\n header(entry.getKey(), entry.getValue());\n }\n return this;\n }\n\n /**\n * Sets HTTP method: GET, PUT, POST, DELETE. Mandatory to set.\n *\n * @param method an HTTP method\n * @return reference back to this builder instance\n */\n public RequestBuilder method(String method) {\n Validate.notBlank(method, \"method cannot be blank\");\n this.method = method;\n return this;\n }\n\n /**\n *

      \n * Sets the URI of the HTTP request. This URI MUST have the correct path and query\n * segments set. Scheme is assumed to be \"HTTPS\" for the purpose of this library. Host is\n * actually taken from a {@link ClientCredential} as signing time; any value in this URI is\n * discarded. Fragments are not signed.\n *

      \n *

      \n * A path and/or query string is required.\n *

      \n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n */\n public RequestBuilder uri(String uri) {\n Validate.notEmpty(uri, \"uri cannot be blank\");\n return uri(URI.create(uri));\n }\n\n /**\n *

      \n * Sets the URI of the HTTP request. This URI MUST have the correct path and query\n * segments set. Scheme is assumed to be \"HTTPS\" for the purpose of this library. Host is\n * actually taken from a {@link ClientCredential} as signing time; any value in this URI is\n * discarded. Fragments are not signed.\n *

      \n *

      \n * A path and/or query string is required.\n *

      \n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n */\n public RequestBuilder uri(URI uri) {\n Validate.notNull(uri, \"uri cannot be null\");\n try {\n this.uri = new URI(null, null, uri.getPath(), uri.getQuery(), null);\n } catch (URISyntaxException e) {\n throw new IllegalArgumentException(\"Error setting URI\", e);\n }\n return this;\n }\n\n\n /**\n * Returns a newly-created immutable HTTP request.\n */\n @Override\n public Request build() {\n Validate.notNull(body, \"body cannot be blank\");\n Validate.notBlank(method, \"method cannot be blank\");\n Validate.notNull(uri, \"uriWithQuery cannot be blank\");\n return new Request(this);\n }\n\n }\n\n}\n"},"new_file":{"kind":"string","value":"edgegrid-signer-core/src/main/java/com/akamai/edgegrid/signer/Request.java"},"old_contents":{"kind":"string","value":"/*\n * Copyright 2016 Copyright 2016 Akamai Technologies, Inc. All Rights Reserved.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\npackage com.akamai.edgegrid.signer;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.Arrays;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.Objects;\n\nimport org.apache.commons.lang3.Validate;\nimport org.apache.commons.lang3.builder.Builder;\nimport org.apache.commons.lang3.builder.CompareToBuilder;\nimport org.apache.commons.lang3.builder.ToStringBuilder;\nimport org.apache.commons.lang3.builder.ToStringStyle;\n\nimport com.akamai.edgegrid.signer.exceptions.RequestSigningException;\n\n\n/**\n * Library-agnostic representation of an HTTP request. This object is immutable, so you probably\n * want to build an instance using {@link RequestBuilder}. Extenders of\n * {@link AbstractEdgeGridRequestSigner} will need to build one of these as part of their\n * implementation.\n *\n * @author mgawinec@akamai.com\n * @author mmeyer@akamai.com\n */\npublic class Request implements Comparable {\n\n private final byte[] body;\n private final String method;\n private final URI uri;\n private final Map headers;\n\n private Request(RequestBuilder b) {\n this.body = b.body;\n this.method = b.method;\n this.headers = b.headers;\n this.uri = b.uri;\n }\n\n /**\n * Returns a new builder. The returned builder is equivalent to the builder\n * generated by {@link RequestBuilder}.\n *\n * @return a fresh {@link RequestBuilder}\n */\n public static RequestBuilder builder() {\n return new RequestBuilder();\n }\n\n @Override\n public int compareTo(Request that) {\n return new CompareToBuilder()\n .append(this.body, that.body)\n .append(this.headers, that.headers)\n .append(this.method, that.method)\n .append(this.uri, that.uri)\n .build();\n }\n\n @Override\n public boolean equals(Object o) {\n if (o == null) return false;\n if (getClass() != o.getClass()) return false;\n final Request that = (Request) o;\n return compareTo(that) == 0;\n }\n\n @Override\n public int hashCode() {\n return Objects.hash(body, headers, method, uri);\n }\n\n @Override\n public String toString() {\n return new ToStringBuilder(this, ToStringStyle.JSON_STYLE)\n .append(\"body\", body)\n .append(\"headers\", headers)\n .append(\"method\", method)\n .append(\"uri\", uri)\n .build();\n }\n\n byte[] getBody() {\n return body;\n }\n\n Map getHeaders() {\n return Collections.unmodifiableMap(headers);\n }\n\n String getMethod() {\n return method;\n }\n\n URI getUri() {\n return uri;\n }\n\n /**\n * Creates a new builder. The returned builder is equivalent to the builder\n * generated by {@link Request#builder()}.\n */\n public static class RequestBuilder implements Builder {\n\n private byte[] body = new byte[]{};\n private Map headers = new HashMap<>();\n private String method;\n private URI uri;\n\n /**\n * Sets a content of HTTP request body. If not set, body is empty by default.\n *\n * @param requestBody a request body, in bytes\n * @return reference back to this builder instance\n */\n public RequestBuilder body(byte[] requestBody) {\n Validate.notNull(body, \"body cannot be blank\");\n this.body = Arrays.copyOf(requestBody, requestBody.length);\n return this;\n }\n\n /**\n *

      \n * Adds a single header for an HTTP request. This can be called multiple times to add as\n * many headers as needed.\n *

      \n *

      \n * NOTE: All header names are lower-cased for storage. In HTTP, header names are\n * case-insensitive anyway, and EdgeGrid does not support multiple headers with the same\n * name. Forcing to lowercase here improves our chance of detecting bad requests early.\n *

      \n *\n * @param headerName a header name\n * @param value a header value\n * @return reference back to this builder instance\n * @throws RequestSigningException if a duplicate header name is encountered\n */\n public RequestBuilder header(String headerName, String value) throws RequestSigningException {\n Validate.notEmpty(headerName, \"headerName cannot be empty\");\n Validate.notEmpty(value, \"value cannot be empty\");\n headerName = headerName.toLowerCase();\n if (this.headers.containsKey(headerName)) {\n throw new RequestSigningException(\"Duplicate header found: \" + headerName);\n }\n headers.put(headerName, value);\n return this;\n }\n\n /**\n *

      \n * Sets headers of HTTP request. The {@code headers} parameter is copied so that changes\n * to the original {@link Map} will not impact the stored reference.\n *

      \n *

      \n * NOTE: All header names are lower-cased for storage. In HTTP, header names are\n * case-insensitive anyway, and EdgeGrid does not support multiple headers with the same\n * name. Forcing to lowercase here improves our chance of detecting bad requests early.\n *

      \n *\n * @param headers a {@link Map} of headers\n * @return reference back to this builder instance\n * @throws RequestSigningException if a duplicate header name is encountered\n */\n public RequestBuilder headers(Map headers) throws RequestSigningException {\n Validate.notNull(headers, \"headers cannot be null\");\n for (Map.Entry entry : headers.entrySet()) {\n header(entry.getKey(), entry.getValue());\n }\n return this;\n }\n\n /**\n * Sets HTTP method: GET, PUT, POST, DELETE. Mandatory to set.\n *\n * @param method an HTTP method\n * @return reference back to this builder instance\n */\n public RequestBuilder method(String method) {\n Validate.notBlank(method, \"method cannot be blank\");\n this.method = method;\n return this;\n }\n\n /**\n *

      \n * Sets the URI of the HTTP request. This URI MUST have the correct path and query\n * segments set. Scheme is assumed to be \"HTTPS\" for the purpose of this library. Host is\n * actually taken from a {@link ClientCredential} as signing time; any value in this URI is\n * discarded. Fragments are not signed.\n *

      \n *

      \n * A path and/or query string is required.\n *

      \n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n */\n public RequestBuilder uri(String uri) {\n Validate.notEmpty(uri, \"uri cannot be blank\");\n return uri(URI.create(uri));\n }\n\n /**\n *

      \n * Sets the URI of the HTTP request. This URI MUST have the correct path and query\n * segments set. Scheme is assumed to be \"HTTPS\" for the purpose of this library. Host is\n * actually taken from a {@link ClientCredential} as signing time; any value in this URI is\n * discarded. Fragments are not signed.\n *

      \n *

      \n * A path and/or query string is required.\n *

      \n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n */\n public RequestBuilder uri(URI uri) {\n Validate.notNull(uri, \"uri cannot be null\");\n try {\n this.uri = new URI(null, null, uri.getPath(), uri.getQuery(), null);\n } catch (URISyntaxException e) {\n throw new IllegalArgumentException(\"Error setting URI\", e);\n }\n return this;\n }\n\n /**\n * Please use {@link #uri(URI)} instead.\n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n * @deprecated\n */\n @Deprecated\n public RequestBuilder uriWithQuery(URI uri) {\n return uri(uri);\n }\n\n /**\n * Returns a newly-created immutable HTTP request.\n */\n @Override\n public Request build() {\n Validate.notNull(body, \"body cannot be blank\");\n Validate.notBlank(method, \"method cannot be blank\");\n Validate.notNull(uri, \"uriWithQuery cannot be blank\");\n return new Request(this);\n }\n\n }\n\n}\n"},"message":{"kind":"string","value":"Remove deprecated method RequestBuilder#uriWithQuery\n"},"old_file":{"kind":"string","value":"edgegrid-signer-core/src/main/java/com/akamai/edgegrid/signer/Request.java"},"subject":{"kind":"string","value":"Remove deprecated method RequestBuilder#uriWithQuery"},"git_diff":{"kind":"string","value":"dgegrid-signer-core/src/main/java/com/akamai/edgegrid/signer/Request.java\n return this;\n }\n \n /**\n * Please use {@link #uri(URI)} instead.\n *\n * @param uri a {@link URI}\n * @return reference back to this builder instance\n * @deprecated\n */\n @Deprecated\n public RequestBuilder uriWithQuery(URI uri) {\n return uri(uri);\n }\n \n /**\n * Returns a newly-created immutable HTTP request."}}},{"rowIdx":2090,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"789d0ba711d70fcc5715959840e43b8aee3b8cb7"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Undev/redmine_cut_tag,Undev/redmine_cut_tag,Undev/redmine_cut_tag"},"new_contents":{"kind":"string","value":"// due to this task http://www.redmine.org/issues/11445\n// check which framework is installed\n\nif (typeof(jQuery) == 'undefined') {\n // using prototype\n\n document.observe('click', function(event) {\n\n var switchesSelector = '.cut_tag_show,.cut_tag_hide';\n var contentSelector = '.cut_tag_content';\n\n var switcher = event.findElement(switchesSelector);\n if (switcher) {\n var cutTagEl = switcher.parentNode;\n var selector = switchesSelector + ',' + contentSelector;\n Selector.matchElements(cutTagEl.childElements(), selector).map(Element.toggle);\n Event.stop(event);\n }\n });\n} else {\n // using jquery\n\n $(document).ready(function() {\n\n var switchesSelector = '.cut_tag_show,.cut_tag_hide';\n var contentSelector = '.cut_tag_content';\n\n $(document).on('click', switchesSelector, function() {\n var selector = switchesSelector + ',' + contentSelector;\n $(this).parent().children(selector).toggle();\n return false;\n });\n });\n}\n\n\n"},"new_file":{"kind":"string","value":"assets/javascripts/redmine_cut_tag.js"},"old_contents":{"kind":"string","value":"// due to this task http://www.redmine.org/issues/11445\n// check which framework is installed\n\nif (typeof(jQuery) == 'undefined') {\n // using prototype\n\n document.observe('click', function(event) {\n\n var switchesSelector = '.cut_tag_show,.cut_tag_hide';\n var contentSelector = '.cut_tag_content';\n\n var switcher = event.findElement(switchesSelector);\n if (switcher) {\n var cutTagEl = switcher.parentNode;\n var selector = switchesSelector + ',' + contentSelector;\n Selector.matchElements(cutTagEl.childElements(), selector).map(Element.toggle);\n Event.stop(event);\n }\n });\n} else {\n // using jquery\n\n // compatibility code to use 'live' method on jquery 1.9 (redmine 2.5+)\n // borrowed from jQuery 1.8.3's source code\n jQuery.fn.extend({\n live: function( types, data, fn ) {\n jQuery( this.context ).on( types, this.selector, data, fn );\n return this;\n }\n });\n\n $(document).ready(function() {\n\n var switchesSelector = '.cut_tag_show,.cut_tag_hide';\n var contentSelector = '.cut_tag_content';\n\n $(switchesSelector).live('click', function() {\n var selector = switchesSelector + ',' + contentSelector;\n $(this).parent().children(selector).toggle();\n return false;\n });\n });\n}\n\n\n"},"message":{"kind":"string","value":"Compatibility with redmine 2.6+\n\nProperly migrate from .live to .on as described on http://api.jquery.com/live/ instead of porting a deprecated function.\n"},"old_file":{"kind":"string","value":"assets/javascripts/redmine_cut_tag.js"},"subject":{"kind":"string","value":"Compatibility with redmine 2.6+"},"git_diff":{"kind":"string","value":"ssets/javascripts/redmine_cut_tag.js\n } else {\n // using jquery\n \n // compatibility code to use 'live' method on jquery 1.9 (redmine 2.5+)\n // borrowed from jQuery 1.8.3's source code\n jQuery.fn.extend({\n live: function( types, data, fn ) {\n jQuery( this.context ).on( types, this.selector, data, fn );\n return this;\n }\n });\n\n $(document).ready(function() {\n \n var switchesSelector = '.cut_tag_show,.cut_tag_hide';\n var contentSelector = '.cut_tag_content';\n \n $(switchesSelector).live('click', function() {\n $(document).on('click', switchesSelector, function() {\n var selector = switchesSelector + ',' + contentSelector;\n $(this).parent().children(selector).toggle();\n return false;"}}},{"rowIdx":2091,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"445a12fbd808a5af95ec0aba31e31519839d1f29"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community"},"new_contents":{"kind":"string","value":"// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.openapi.vfs.impl;\n\nimport com.intellij.openapi.util.Comparing;\nimport com.intellij.openapi.util.SystemInfo;\nimport com.intellij.openapi.util.io.FileUtil;\nimport com.intellij.openapi.util.text.StringUtil;\nimport com.intellij.openapi.util.text.StringUtilRt;\nimport com.intellij.openapi.vfs.*;\nimport com.intellij.openapi.vfs.newvfs.ArchiveFileSystem;\nimport com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;\nimport com.intellij.openapi.vfs.newvfs.impl.FileNameCache;\nimport com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;\nimport com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;\nimport com.intellij.util.ArrayUtil;\nimport com.intellij.util.ObjectUtils;\nimport com.intellij.util.PathUtil;\nimport com.intellij.util.containers.MultiMap;\nimport com.intellij.util.io.URLUtil;\nimport org.jetbrains.annotations.Contract;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.function.Consumer;\n\n/**\n * Trie data structure for succinct storage and fast retrieval of file pointers.\n * File pointer \"a/b/x.txt\" is stored in the tree with nodes a->b->x.txt\n */\nclass FilePartNode {\n private static final FilePartNode[] EMPTY_ARRAY = new FilePartNode[0];\n static final int JAR_SEPARATOR_NAME_ID = -2;\n private final int nameId; // name id of the VirtualFile corresponding to this node\n FilePartNode @NotNull [] children = EMPTY_ARRAY; // sorted by this.getName(). elements never updated inplace\n // file pointers for this exact path (i.e. concatenation of all getName() down from the root).\n // Either VirtualFilePointerImpl or VirtualFilePointerImpl[] (when it so happened that several pointers merged into one node - e.g. after file rename onto existing pointer)\n private Object leaves;\n @NotNull\n volatile Object myFileOrUrl;\n final NewVirtualFileSystem myFS; // the file system of this particular component. E.g. for path \"/x.jar!/foo.txt\" the node \"x.jar\" fs is LocalFileSystem, the node \"foo.txt\" fs is JarFileSystem\n\n FilePartNode(int nameId,\n @NotNull Object fileOrUrl,\n @NotNull NewVirtualFileSystem fs) {\n myFS = fs;\n assert nameId > 0 || nameId == JAR_SEPARATOR_NAME_ID : nameId + \"; \" + getClass();\n this.nameId = nameId;\n myFileOrUrl = fileOrUrl;\n if (fileOrUrl instanceof VirtualFile) {\n assert myFile().getFileSystem() == myFS : \"myFs=\" + myFS + \"; myFile().getFileSystem()=\" + myFile().getFileSystem() + \"; \" + fileOrUrl;\n if (myFile().getParent() == null && fs instanceof ArchiveFileSystem) {\n assert nameId == JAR_SEPARATOR_NAME_ID : nameId;\n }\n }\n }\n\n private VirtualFile myFile() {\n return myFile(myFileOrUrl);\n }\n\n void addLeaf(@NotNull VirtualFilePointerImpl pointer) {\n Object leaves = this.leaves;\n Object newLeaves;\n if (leaves == null) {\n newLeaves = pointer;\n }\n else if (leaves instanceof VirtualFilePointerImpl) {\n newLeaves = new VirtualFilePointerImpl[]{(VirtualFilePointerImpl)leaves, pointer};\n }\n else {\n newLeaves = ArrayUtil.append((VirtualFilePointerImpl[])leaves, pointer);\n }\n associate(newLeaves);\n }\n // return remaining leaves number\n int removeLeaf(@NotNull VirtualFilePointerImpl pointer) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return 0;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n if (leaves == pointer) {\n this.leaves = null;\n return 0;\n }\n return 1;\n }\n VirtualFilePointerImpl[] newLeaves = ArrayUtil.remove((VirtualFilePointerImpl[])leaves, pointer);\n if (newLeaves.length == 0) newLeaves = null;\n this.leaves = newLeaves;\n return newLeaves == null ? 0 : newLeaves.length;\n }\n\n static VirtualFile myFile(@NotNull Object fileOrUrl) {\n return fileOrUrl instanceof VirtualFile ? (VirtualFile)fileOrUrl : null;\n }\n\n @NotNull\n private String myUrl() {\n return myUrl(myFileOrUrl);\n }\n\n @NotNull\n static String myUrl(Object fileOrUrl) {\n return fileOrUrl instanceof VirtualFile ? ((VirtualFile)fileOrUrl).getUrl() : (String)fileOrUrl;\n }\n\n // for creating fake root\n FilePartNode(@NotNull NewVirtualFileSystem fs) {\n nameId = -1;\n myFileOrUrl = \"\";\n myFS = fs;\n }\n\n @NotNull\n static CharSequence fromNameId(int nameId) {\n return nameId == JAR_SEPARATOR_NAME_ID ? JarFileSystem.JAR_SEPARATOR : FileNameCache.getVFileName(nameId);\n }\n\n @NotNull\n CharSequence getName() {\n return fromNameId(nameId);\n }\n\n @Override\n public String toString() {\n return getName() + (children.length == 0 ? \"\" : \" -> \"+children.length);\n }\n\n static int getNameId(@NotNull VirtualFile file) {\n VirtualFileSystem fs = file.getFileSystem();\n if (fs instanceof ArchiveFileSystem && file.getParent() == null) {\n return JAR_SEPARATOR_NAME_ID;\n }\n \n return ((VirtualFileSystemEntry)file).getNameId();\n }\n\n @Contract(\"_, _, true, _ -> !null\")\n FilePartNode findChildByNameId(@Nullable VirtualFile file,\n int nameId,\n boolean createIfNotFound,\n @NotNull NewVirtualFileSystem childFs) {\n if (nameId <= 0 && nameId != JAR_SEPARATOR_NAME_ID) throw new IllegalArgumentException(\"invalid argument nameId: \"+nameId);\n for (FilePartNode child : children) {\n if (child.nameEqualTo(nameId)) return child;\n }\n if (createIfNotFound) {\n CharSequence name = fromNameId(nameId);\n int index = children.length == 0 ? -1 : binarySearchChildByName(name);\n FilePartNode child;\n assert index < 0 : index + \" : child= '\" + (child = children[index]) + \"'\"\n + \"; child.nameEqualTo(nameId)=\" + child.nameEqualTo(nameId)\n + \"; child.getClass()=\" + child.getClass()\n + \"; child.nameId=\" + child.nameId\n + \"; child.getName()='\" + child.getName() + \"'\"\n + \"; nameId=\" + nameId\n + \"; name='\" + name + \"'\"\n + \"; compare(child) = \" + StringUtil.compare(child.getName(), name, !SystemInfo.isFileSystemCaseSensitive) + \";\"\n + \" UrlPart.nameEquals: \" + FileUtil.PATH_CHAR_SEQUENCE_HASHING_STRATEGY.equals(child.getName(), fromNameId(nameId))\n + \"; name.equals(child.getName())=\" + child.getName().equals(name)\n ;\n Object fileOrUrl = file;\n if (fileOrUrl == null) {\n fileOrUrl = this.nameId == -1 ? name.toString() : childUrl(myUrl(), name, childFs);\n }\n child = new FilePartNode(nameId, fileOrUrl, childFs);\n children = ArrayUtil.insert(children, -index-1, child);\n return child;\n }\n return null;\n }\n\n boolean nameEqualTo(int nameId) {\n return this.nameId == nameId;\n }\n\n int binarySearchChildByName(@NotNull CharSequence name) {\n return ObjectUtils.binarySearch(0, children.length, i -> {\n FilePartNode child = children[i];\n CharSequence childName = child.getName();\n return StringUtil.compare(childName, name, !SystemInfo.isFileSystemCaseSensitive);\n });\n }\n\n void addRecursiveDirectoryPtrTo(@NotNull MultiMap toFirePointers) {\n processPointers(pointer -> { if (pointer.isRecursive()) toFirePointers.putValue(pointer.myListener, pointer); });\n }\n\n void doCheckConsistency(@Nullable VirtualFile parent, @NotNull String pathFromRoot) {\n String name = getName().toString();\n VirtualFile myFile = myFile();\n\n if (!(this instanceof FilePartNodeRoot)) {\n if (myFile == null) {\n String myUrl = myUrl();\n String expectedUrl = VirtualFileManager.constructUrl(myFS.getProtocol(), pathFromRoot + (pathFromRoot.endsWith(\"/\") ? \"\" : \"/\"));\n String actualUrl = myUrl + (myUrl.endsWith(\"/\") ? \"\" : \"/\");\n assert FileUtil.PATH_HASHING_STRATEGY.equals(actualUrl, expectedUrl) : \"Expected url: '\" + expectedUrl + \"' but got: '\" + actualUrl + \"'\";\n }\n else {\n assert Comparing.equal(getParentThroughJar(myFile, myFS), parent) : \"parent: \" + parent + \"; myFile: \" + myFile;\n }\n }\n assert !\"..\".equals(name) && !\".\".equals(name) : \"url must not contain '.' or '..' but got: \" + this;\n for (int i = 0; i < children.length; i++) {\n FilePartNode child = children[i];\n CharSequence childName = child.getName();\n String childPathRoot = pathFromRoot +\n (pathFromRoot.isEmpty() || pathFromRoot.endsWith(\"/\") || childName.equals(JarFileSystem.JAR_SEPARATOR) ? \"\" : \"/\") +\n childName;\n child.doCheckConsistency(myFile, childPathRoot);\n if (i != 0) {\n assert !FileUtil.namesEqual(childName.toString(), children[i - 1].getName().toString()) : \"child[\" + i + \"] = \" + child + \"; [-1] = \" + children[i - 1];\n }\n }\n int[] leafNumber = new int[1];\n processPointers(p -> { assert p.myNode == this; leafNumber[0]++; });\n int useCount = leafNumber[0];\n assert (useCount == 0) == (leaves == null) : useCount + \" - \" + (leaves instanceof VirtualFilePointerImpl ? leaves : Arrays.toString((VirtualFilePointerImpl[])leaves));\n\n if (myFileOrUrl instanceof String) {\n String myPath = VfsUtilCore.urlToPath(myUrl());\n String nameFromPath = nameId == JAR_SEPARATOR_NAME_ID || myPath.endsWith(JarFileSystem.JAR_SEPARATOR) ? JarFileSystem.JAR_SEPARATOR : PathUtil.getFileName(myPath);\n if (!myPath.isEmpty() && nameFromPath.isEmpty()) {\n nameFromPath = \"/\";\n }\n assert StringUtilRt.equal(nameFromPath, name, SystemInfo.isFileSystemCaseSensitive) : \"fileAndUrl: \" + myFileOrUrl + \"; but this: \" + this + \"; nameFromPath: \" + nameFromPath + \"; name: \" + name + \"; myPath: \" + myPath + \"; url: \" + myUrl() + \";\";\n if (myFile != null) {\n String fileName = myFile.getParent() == null && myFile.getFileSystem() instanceof ArchiveFileSystem ? JarFileSystem.JAR_SEPARATOR : myFile.getName();\n assert fileName.equals(name) : \"fileAndUrl: \" + myFileOrUrl + \"; but this: \" + this;\n assert myFile.getFileSystem() == myFS;\n }\n }\n }\n\n // update myFileOrUrl to a VirtualFile and replace UrlPartNode with FilePartNode if the file exists, including all subnodes\n void update(@NotNull FilePartNode parent, @NotNull FilePartNodeRoot root) {\n Object fileOrUrl = myFileOrUrl;\n VirtualFile file = myFile(fileOrUrl);\n boolean changed = false;\n boolean nameChanged = false;\n boolean fileIsValid = false;\n if (file != null) {\n fileIsValid = file.isValid();\n if (fileIsValid && file.getParent() == null && file.getFileSystem() instanceof ArchiveFileSystem) {\n VirtualFile local = ((ArchiveFileSystem)file.getFileSystem()).getLocalByEntry(file);\n fileIsValid = local != null;\n }\n if (!fileIsValid) {\n file = null;\n changed = true;\n }\n }\n\n Object parentFileOrUrl;\n parentFileOrUrl = parent.myFileOrUrl;\n String myName = getName().toString();\n String url = null;\n String parentUrl = null;\n\n VirtualFile parentFile = myFile(parentFileOrUrl);\n if (file == null) {\n file = parentFile == null || !parentFile.isValid() ? null : findChildThroughJar(parentFile, myName, myFS);\n if (file == null) {\n parentUrl = myUrl(parentFileOrUrl);\n url = childUrl(parentUrl, myName, myFS);\n changed |= nameChanged = !Comparing.strEqual(url, myUrl(fileOrUrl));\n }\n else {\n changed = true;\n }\n fileIsValid = file != null && file.isValid();\n }\n if (parent.nameId != -1 && !(parentFileOrUrl instanceof VirtualFile) && file != null) {\n // if parent file can't be found then the child is not valid too\n file = null;\n fileIsValid = false;\n url = myUrl(fileOrUrl);\n }\n if (file != null) {\n if (fileIsValid) {\n changed |= nameChanged = !StringUtil.equals(file.getNameSequence(), myName);\n }\n else {\n file = null; // can't find, try next time\n changed = true;\n url = myUrl(fileOrUrl);\n }\n }\n Object result = file == null ? url : file;\n changed |= !Objects.equals(fileOrUrl, result);\n FilePartNode thisNode = this;\n if (changed) {\n myFileOrUrl = result;\n if (file != null && (this instanceof UrlPartNode || nameChanged)) {\n // replace with FPPN if the actual file's appeared on disk to save memory with nameIds\n thisNode = replaceWithFPPN(file, parent);\n }\n }\n if (file != null && !Objects.equals(getParentThroughJar(file, myFS), parentFile)) {\n // this node file must be moved to the other dir. remove and re-insert from the root to the correct path, preserving all children\n FilePartNode newNode = root.findOrCreateByFile(file).node;\n processPointers(p-> newNode.addLeaf(p));\n newNode.children = children;\n children = EMPTY_ARRAY;\n changed = true;\n String myOldPath = VfsUtilCore.urlToPath(childUrl(parentUrl=myUrl(parentFileOrUrl), myName, myFS));\n root.removeEmptyNodesByPath(FilePartNodeRoot.splitNames(myOldPath));\n thisNode = newNode;\n nameChanged = true;\n }\n if (nameChanged) {\n String myOldPath = VfsUtilCore.urlToPath(childUrl(parentUrl == null ? myUrl(parentFileOrUrl) : parentUrl, myName, myFS));\n String myNewPath = VfsUtilCore.urlToPath(myUrl(result));\n // fix UrlPartNodes with (now) wrong url start\n thisNode.fixUrlPartNodes(myOldPath, myNewPath);\n }\n\n if (changed) {\n for (FilePartNode child : thisNode.children) {\n child.update(thisNode, root);\n }\n }\n }\n\n private void fixUrlPartNodes(@NotNull String oldPath, @NotNull String newPath) {\n if (this instanceof UrlPartNode) {\n String protocol = myFS.getProtocol();\n String myUrl = myUrl();\n if (StringUtil.startsWith(myUrl, protocol.length()+URLUtil.SCHEME_SEPARATOR.length(), oldPath)) {\n myFileOrUrl = protocol + URLUtil.SCHEME_SEPARATOR + newPath + myUrl.substring(protocol.length() + URLUtil.SCHEME_SEPARATOR.length()+oldPath.length());\n }\n }\n for (FilePartNode child : children) {\n child.fixUrlPartNodes(oldPath, newPath);\n }\n }\n\n @NotNull\n private FilePartNode replaceWithFPPN(@NotNull VirtualFile file, @NotNull FilePartNode parent) {\n int nameId = getNameId(file);\n parent.children = ArrayUtil.remove(parent.children, this);\n FilePartNode newNode = parent.findChildByNameId(file, nameId, true, (NewVirtualFileSystem)file.getFileSystem());\n newNode.children = children; // old children are destroyed when renamed onto their parent\n processPointers(pointer-> newNode.addLeaf(pointer));\n\n leaves = null;\n\n return newNode;\n }\n\n @NotNull\n static String childUrl(@NotNull String parentUrl, @NotNull CharSequence childName, @NotNull NewVirtualFileSystem fs) {\n if (childName.equals(JarFileSystem.JAR_SEPARATOR) && fs instanceof ArchiveFileSystem) {\n return VirtualFileManager.constructUrl(fs.getProtocol(), StringUtil.trimEnd(VfsUtilCore.urlToPath(parentUrl), '/')) + childName;\n }\n return parentUrl.isEmpty() ? VirtualFileManager.constructUrl(fs.getProtocol(), childName.toString()) :\n VirtualFileManager.constructUrl(fs.getProtocol(), StringUtil.trimEnd(VfsUtilCore.urlToPath(parentUrl), '/')) + '/' + childName;\n }\n\n private void associate(@Nullable Object leaves) {\n this.leaves = leaves;\n if (leaves != null) {\n if (leaves instanceof VirtualFilePointerImpl) {\n ((VirtualFilePointerImpl)leaves).myNode = this;\n }\n else {\n for (VirtualFilePointerImpl pointer : (VirtualFilePointerImpl[])leaves) {\n pointer.myNode = this;\n }\n }\n }\n }\n\n VirtualFilePointerImpl getPointer(VirtualFilePointerListener listener) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return null;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n VirtualFilePointerImpl leaf = (VirtualFilePointerImpl)leaves;\n return leaf.myListener == listener ? leaf : null;\n }\n VirtualFilePointerImpl[] array = (VirtualFilePointerImpl[])leaves;\n for (VirtualFilePointerImpl pointer : array) {\n if (pointer.myListener == listener) return pointer;\n }\n return null;\n }\n\n void addAllPointersTo(@NotNull Collection outList) {\n processPointers(p->{ if (p.myNode != null) outList.add(p); });\n }\n\n void processPointers(@NotNull Consumer processor) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n processor.accept((VirtualFilePointerImpl)leaves);\n return;\n }\n VirtualFilePointerImpl[] pointers = (VirtualFilePointerImpl[])leaves;\n for (VirtualFilePointerImpl pointer : pointers) {\n processor.accept(pointer);\n }\n }\n\n // for \"file://a/b/c.txt\" return \"a/b\", for \"jar://a/b/j.jar!\" return \"file://a/b/j.jar\"\n static VirtualFile getParentThroughJar(@NotNull VirtualFile file, @NotNull NewVirtualFileSystem fs) {\n VirtualFile parent = file.getParent();\n if (parent == null && fs instanceof ArchiveFileSystem) {\n parent = ((ArchiveFileSystem)fs).getLocalByEntry(file);\n }\n return parent;\n }\n\n static VirtualFile findChildThroughJar(@NotNull VirtualFile file, @NotNull String name, @NotNull NewVirtualFileSystem childFs) {\n VirtualFile child;\n if (name.equals(JarFileSystem.JAR_SEPARATOR) && childFs instanceof ArchiveFileSystem) {\n child = ((ArchiveFileSystem)childFs).getRootByLocal(file);\n }\n else {\n child = file.findChild(name);\n }\n return child;\n }\n\n boolean removeEmptyNodesByFile(@NotNull List parts) {\n if (parts.isEmpty()) {\n return children.length == 0;\n }\n VirtualFile file = parts.remove(parts.size()-1);\n FilePartNode child = findChildByNameId(null, getNameId(file), false, (NewVirtualFileSystem)file.getFileSystem());\n if (child == null) {\n return false;\n }\n boolean toRemove = child.removeEmptyNodesByFile(parts);\n if (toRemove) {\n children = children.length == 1 ? EMPTY_ARRAY : ArrayUtil.remove(children, child);\n return children.length == 0 && leaves == null;\n }\n return false;\n }\n\n boolean removeEmptyNodesByPath(@NotNull List parts) {\n if (parts.isEmpty()) {\n return children.length == 0;\n }\n String name = parts.remove(parts.size()-1);\n int index = binarySearchChildByName(name);\n if (index < 0) {\n return false;\n }\n FilePartNode child = children[index];\n boolean toRemove = child.removeEmptyNodesByPath(parts);\n if (toRemove) {\n children = children.length == 1 ? EMPTY_ARRAY : ArrayUtil.remove(children, child);\n return children.length == 0 && leaves == null;\n }\n return false;\n }\n}\n"},"new_file":{"kind":"string","value":"platform/platform-impl/src/com/intellij/openapi/vfs/impl/FilePartNode.java"},"old_contents":{"kind":"string","value":"// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.openapi.vfs.impl;\n\nimport com.intellij.openapi.util.Comparing;\nimport com.intellij.openapi.util.SystemInfo;\nimport com.intellij.openapi.util.io.FileUtil;\nimport com.intellij.openapi.util.text.StringUtil;\nimport com.intellij.openapi.util.text.StringUtilRt;\nimport com.intellij.openapi.vfs.*;\nimport com.intellij.openapi.vfs.newvfs.ArchiveFileSystem;\nimport com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;\nimport com.intellij.openapi.vfs.newvfs.impl.FileNameCache;\nimport com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;\nimport com.intellij.openapi.vfs.pointers.VirtualFilePointerListener;\nimport com.intellij.util.ArrayUtil;\nimport com.intellij.util.ObjectUtils;\nimport com.intellij.util.PathUtil;\nimport com.intellij.util.containers.MultiMap;\nimport com.intellij.util.io.URLUtil;\nimport org.jetbrains.annotations.Contract;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.function.Consumer;\n\n/**\n * Trie data structure for succinct storage and fast retrieval of file pointers.\n * File pointer \"a/b/x.txt\" is stored in the tree with nodes a->b->x.txt\n */\nclass FilePartNode {\n private static final FilePartNode[] EMPTY_ARRAY = new FilePartNode[0];\n static final int JAR_SEPARATOR_NAME_ID = -2;\n private final int nameId; // name id of the VirtualFile corresponding to this node\n FilePartNode @NotNull [] children = EMPTY_ARRAY; // sorted by this.getName(). elements never updated inplace\n // file pointers for this exact path (i.e. concatenation of all getName() down from the root).\n // Either VirtualFilePointerImpl or VirtualFilePointerImpl[] (when it so happened that several pointers merged into one node - e.g. after file rename onto existing pointer)\n private Object leaves;\n @NotNull\n volatile Object myFileOrUrl;\n final NewVirtualFileSystem myFS; // the file system of this particular component. E.g. for path \"/x.jar!/foo.txt\" the node \"x.jar\" fs is LocalFileSystem, the node \"foo.txt\" fs is JarFileSystem\n\n FilePartNode(int nameId,\n @NotNull Object fileOrUrl,\n @NotNull NewVirtualFileSystem fs) {\n myFS = fs;\n assert nameId > 0 || nameId == JAR_SEPARATOR_NAME_ID : nameId + \"; \" + getClass();\n this.nameId = nameId;\n myFileOrUrl = fileOrUrl;\n if (fileOrUrl instanceof VirtualFile) {\n assert myFile().getFileSystem() == myFS : \"myFs=\" + myFS + \"; myFile().getFileSystem()=\" + myFile().getFileSystem() + \"; \" + fileOrUrl;\n if (myFile().getParent() == null && fs instanceof ArchiveFileSystem) {\n assert nameId == JAR_SEPARATOR_NAME_ID : nameId;\n }\n }\n }\n\n private VirtualFile myFile() {\n return myFile(myFileOrUrl);\n }\n\n void addLeaf(@NotNull VirtualFilePointerImpl pointer) {\n Object leaves = this.leaves;\n Object newLeaves;\n if (leaves == null) {\n newLeaves = pointer;\n }\n else if (leaves instanceof VirtualFilePointerImpl) {\n newLeaves = new VirtualFilePointerImpl[]{(VirtualFilePointerImpl)leaves, pointer};\n }\n else {\n newLeaves = ArrayUtil.append((VirtualFilePointerImpl[])leaves, pointer);\n }\n associate(newLeaves);\n }\n // return remaining leaves number\n int removeLeaf(@NotNull VirtualFilePointerImpl pointer) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return 0;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n if (leaves == pointer) {\n this.leaves = null;\n return 0;\n }\n return 1;\n }\n VirtualFilePointerImpl[] newLeaves = ArrayUtil.remove((VirtualFilePointerImpl[])leaves, pointer);\n if (newLeaves.length == 0) newLeaves = null;\n this.leaves = newLeaves;\n return newLeaves == null ? 0 : newLeaves.length;\n }\n\n static VirtualFile myFile(@NotNull Object fileOrUrl) {\n return fileOrUrl instanceof VirtualFile ? (VirtualFile)fileOrUrl : null;\n }\n\n @NotNull\n private String myUrl() {\n return myUrl(myFileOrUrl);\n }\n\n @NotNull\n static String myUrl(Object fileOrUrl) {\n return fileOrUrl instanceof VirtualFile ? ((VirtualFile)fileOrUrl).getUrl() : (String)fileOrUrl;\n }\n\n // for creating fake root\n FilePartNode(@NotNull NewVirtualFileSystem fs) {\n nameId = -1;\n myFileOrUrl = \"\";\n myFS = fs;\n }\n\n @NotNull\n static CharSequence fromNameId(int nameId) {\n return nameId == JAR_SEPARATOR_NAME_ID ? JarFileSystem.JAR_SEPARATOR : FileNameCache.getVFileName(nameId);\n }\n\n @NotNull\n CharSequence getName() {\n return fromNameId(nameId);\n }\n\n @Override\n public String toString() {\n return getName() + (children.length == 0 ? \"\" : \" -> \"+children.length);\n }\n\n static int getNameId(@NotNull VirtualFile file) {\n VirtualFileSystem fs = file.getFileSystem();\n if (fs instanceof ArchiveFileSystem && file.getParent() == null) {\n return JAR_SEPARATOR_NAME_ID;\n }\n \n return ((VirtualFileSystemEntry)file).getNameId();\n }\n\n @Contract(\"_, _, true, _ -> !null\")\n FilePartNode findChildByNameId(@Nullable VirtualFile file,\n int nameId,\n boolean createIfNotFound,\n @NotNull NewVirtualFileSystem childFs) {\n if (nameId <= 0 && nameId != JAR_SEPARATOR_NAME_ID) throw new IllegalArgumentException(\"invalid argument nameId: \"+nameId);\n for (FilePartNode child : children) {\n if (child.nameEqualTo(nameId)) return child;\n }\n if (createIfNotFound) {\n CharSequence name = fromNameId(nameId);\n int index = children.length == 0 ? -1 : binarySearchChildByName(name);\n FilePartNode child;\n assert index < 0 : index + \" : child= '\" + (child = children[index]) + \"'\"\n + \"; child.nameEqualTo(nameId)=\" + child.nameEqualTo(nameId)\n + \"; child.getClass()=\" + child.getClass()\n + \"; child.nameId=\" + child.nameId\n + \"; child.getName()='\" + child.getName() + \"'\"\n + \"; nameId=\" + nameId\n + \"; name='\" + name + \"'\"\n + \"; compare(child) = \" + StringUtil.compare(child.getName(), name, !SystemInfo.isFileSystemCaseSensitive) + \";\"\n + \" UrlPart.nameEquals: \" + FileUtil.PATH_CHAR_SEQUENCE_HASHING_STRATEGY.equals(child.getName(), fromNameId(nameId))\n + \"; name.equals(child.getName())=\" + child.getName().equals(name)\n ;\n Object fileOrUrl = file;\n if (fileOrUrl == null) {\n fileOrUrl = this.nameId == -1 ? name.toString() : childUrl(myUrl(), name, childFs);\n }\n child = new FilePartNode(nameId, fileOrUrl, childFs);\n children = ArrayUtil.insert(children, -index-1, child);\n return child;\n }\n return null;\n }\n\n boolean nameEqualTo(int nameId) {\n return this.nameId == nameId;\n }\n\n int binarySearchChildByName(@NotNull CharSequence name) {\n return ObjectUtils.binarySearch(0, children.length, i -> {\n FilePartNode child = children[i];\n CharSequence childName = child.getName();\n return StringUtil.compare(childName, name, !SystemInfo.isFileSystemCaseSensitive);\n });\n }\n\n void addRecursiveDirectoryPtrTo(@NotNull MultiMap toFirePointers) {\n processPointers(pointer -> { if (pointer.isRecursive()) toFirePointers.putValue(pointer.myListener, pointer); });\n }\n\n void doCheckConsistency(@Nullable VirtualFile parent, @NotNull String pathFromRoot) {\n String name = getName().toString();\n VirtualFile myFile = myFile();\n\n if (!(this instanceof FilePartNodeRoot)) {\n if (myFile == null) {\n String myUrl = myUrl();\n String expectedUrl = VirtualFileManager.constructUrl(myFS.getProtocol(), pathFromRoot + (pathFromRoot.endsWith(\"/\") ? \"\" : \"/\"));\n String actualUrl = myUrl + (myUrl.endsWith(\"/\") ? \"\" : \"/\");\n assert actualUrl.equals(expectedUrl) : \"Expected url: '\" + expectedUrl + \"' but got: '\" + actualUrl + \"'\";\n }\n else {\n assert Comparing.equal(getParentThroughJar(myFile, myFS), parent) : \"parent: \" + parent + \"; myFile: \" + myFile;\n }\n }\n assert !\"..\".equals(name) && !\".\".equals(name) : \"url must not contain '.' or '..' but got: \" + this;\n for (int i = 0; i < children.length; i++) {\n FilePartNode child = children[i];\n CharSequence childName = child.getName();\n String childPathRoot = pathFromRoot +\n (pathFromRoot.isEmpty() || pathFromRoot.endsWith(\"/\") || childName.equals(JarFileSystem.JAR_SEPARATOR) ? \"\" : \"/\") +\n childName;\n child.doCheckConsistency(myFile, childPathRoot);\n if (i != 0) {\n assert !FileUtil.namesEqual(childName.toString(), children[i - 1].getName().toString()) : \"child[\" + i + \"] = \" + child + \"; [-1] = \" + children[i - 1];\n }\n }\n int[] leafNumber = new int[1];\n processPointers(p -> { assert p.myNode == this; leafNumber[0]++; });\n int useCount = leafNumber[0];\n assert (useCount == 0) == (leaves == null) : useCount + \" - \" + (leaves instanceof VirtualFilePointerImpl ? leaves : Arrays.toString((VirtualFilePointerImpl[])leaves));\n\n if (myFileOrUrl instanceof String) {\n String myPath = VfsUtilCore.urlToPath(myUrl());\n String nameFromPath = nameId == JAR_SEPARATOR_NAME_ID || myPath.endsWith(JarFileSystem.JAR_SEPARATOR) ? JarFileSystem.JAR_SEPARATOR : PathUtil.getFileName(myPath);\n if (!myPath.isEmpty() && nameFromPath.isEmpty()) {\n nameFromPath = \"/\";\n }\n assert StringUtilRt.equal(nameFromPath, name, SystemInfo.isFileSystemCaseSensitive) : \"fileAndUrl: \" + myFileOrUrl + \"; but this: \" + this + \"; nameFromPath: \" + nameFromPath + \"; name: \" + name + \"; myPath: \" + myPath + \"; url: \" + myUrl() + \";\";\n if (myFile != null) {\n String fileName = myFile.getParent() == null && myFile.getFileSystem() instanceof ArchiveFileSystem ? JarFileSystem.JAR_SEPARATOR : myFile.getName();\n assert fileName.equals(name) : \"fileAndUrl: \" + myFileOrUrl + \"; but this: \" + this;\n assert myFile.getFileSystem() == myFS;\n }\n }\n }\n\n // update myFileOrUrl to a VirtualFile and replace UrlPartNode with FilePartNode if the file exists, including all subnodes\n void update(@NotNull FilePartNode parent, @NotNull FilePartNodeRoot root) {\n Object fileOrUrl = myFileOrUrl;\n VirtualFile file = myFile(fileOrUrl);\n boolean changed = false;\n boolean nameChanged = false;\n boolean fileIsValid = false;\n if (file != null) {\n fileIsValid = file.isValid();\n if (fileIsValid && file.getParent() == null && file.getFileSystem() instanceof ArchiveFileSystem) {\n VirtualFile local = ((ArchiveFileSystem)file.getFileSystem()).getLocalByEntry(file);\n fileIsValid = local != null;\n }\n if (!fileIsValid) {\n file = null;\n changed = true;\n }\n }\n\n Object parentFileOrUrl;\n parentFileOrUrl = parent.myFileOrUrl;\n String myName = getName().toString();\n String url = null;\n String parentUrl = null;\n\n VirtualFile parentFile = myFile(parentFileOrUrl);\n if (file == null) {\n file = parentFile == null || !parentFile.isValid() ? null : findChildThroughJar(parentFile, myName, myFS);\n if (file == null) {\n parentUrl = myUrl(parentFileOrUrl);\n url = childUrl(parentUrl, myName, myFS);\n changed |= nameChanged = !Comparing.strEqual(url, myUrl(fileOrUrl));\n }\n else {\n changed = true;\n }\n fileIsValid = file != null && file.isValid();\n }\n if (parent.nameId != -1 && !(parentFileOrUrl instanceof VirtualFile) && file != null) {\n // if parent file can't be found then the child is not valid too\n file = null;\n fileIsValid = false;\n url = myUrl(fileOrUrl);\n }\n if (file != null) {\n if (fileIsValid) {\n changed |= nameChanged = !StringUtil.equals(file.getNameSequence(), myName);\n }\n else {\n file = null; // can't find, try next time\n changed = true;\n url = myUrl(fileOrUrl);\n }\n }\n Object result = file == null ? url : file;\n changed |= !Objects.equals(fileOrUrl, result);\n FilePartNode thisNode = this;\n if (changed) {\n myFileOrUrl = result;\n if (file != null && (this instanceof UrlPartNode || nameChanged)) {\n // replace with FPPN if the actual file's appeared on disk to save memory with nameIds\n thisNode = replaceWithFPPN(file, parent);\n }\n }\n if (file != null && !Objects.equals(getParentThroughJar(file, myFS), parentFile)) {\n // this node file must be moved to the other dir. remove and re-insert from the root to the correct path, preserving all children\n FilePartNode newNode = root.findOrCreateByFile(file).node;\n processPointers(p-> newNode.addLeaf(p));\n newNode.children = children;\n children = EMPTY_ARRAY;\n changed = true;\n String myOldPath = VfsUtilCore.urlToPath(childUrl(parentUrl=myUrl(parentFileOrUrl), myName, myFS));\n root.removeEmptyNodesByPath(FilePartNodeRoot.splitNames(myOldPath));\n thisNode = newNode;\n nameChanged = true;\n }\n if (nameChanged) {\n String myOldPath = VfsUtilCore.urlToPath(childUrl(parentUrl == null ? myUrl(parentFileOrUrl) : parentUrl, myName, myFS));\n String myNewPath = VfsUtilCore.urlToPath(myUrl(result));\n // fix UrlPartNodes with (now) wrong url start\n thisNode.fixUrlPartNodes(myOldPath, myNewPath);\n }\n\n if (changed) {\n for (FilePartNode child : thisNode.children) {\n child.update(thisNode, root);\n }\n }\n }\n\n private void fixUrlPartNodes(@NotNull String oldPath, @NotNull String newPath) {\n if (this instanceof UrlPartNode) {\n String protocol = myFS.getProtocol();\n String myUrl = myUrl();\n if (StringUtil.startsWith(myUrl, protocol.length()+URLUtil.SCHEME_SEPARATOR.length(), oldPath)) {\n myFileOrUrl = protocol + URLUtil.SCHEME_SEPARATOR + newPath + myUrl.substring(protocol.length() + URLUtil.SCHEME_SEPARATOR.length()+oldPath.length());\n }\n }\n for (FilePartNode child : children) {\n child.fixUrlPartNodes(oldPath, newPath);\n }\n }\n\n @NotNull\n private FilePartNode replaceWithFPPN(@NotNull VirtualFile file, @NotNull FilePartNode parent) {\n int nameId = getNameId(file);\n parent.children = ArrayUtil.remove(parent.children, this);\n FilePartNode newNode = parent.findChildByNameId(file, nameId, true, (NewVirtualFileSystem)file.getFileSystem());\n newNode.children = children; // old children are destroyed when renamed onto their parent\n processPointers(pointer-> newNode.addLeaf(pointer));\n\n leaves = null;\n\n return newNode;\n }\n\n @NotNull\n static String childUrl(@NotNull String parentUrl, @NotNull CharSequence childName, @NotNull NewVirtualFileSystem fs) {\n if (childName.equals(JarFileSystem.JAR_SEPARATOR) && fs instanceof ArchiveFileSystem) {\n return VirtualFileManager.constructUrl(fs.getProtocol(), StringUtil.trimEnd(VfsUtilCore.urlToPath(parentUrl), '/')) + childName;\n }\n return parentUrl.isEmpty() ? VirtualFileManager.constructUrl(fs.getProtocol(), childName.toString()) :\n VirtualFileManager.constructUrl(fs.getProtocol(), StringUtil.trimEnd(VfsUtilCore.urlToPath(parentUrl), '/')) + '/' + childName;\n }\n\n private void associate(@Nullable Object leaves) {\n this.leaves = leaves;\n if (leaves != null) {\n if (leaves instanceof VirtualFilePointerImpl) {\n ((VirtualFilePointerImpl)leaves).myNode = this;\n }\n else {\n for (VirtualFilePointerImpl pointer : (VirtualFilePointerImpl[])leaves) {\n pointer.myNode = this;\n }\n }\n }\n }\n\n VirtualFilePointerImpl getPointer(VirtualFilePointerListener listener) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return null;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n VirtualFilePointerImpl leaf = (VirtualFilePointerImpl)leaves;\n return leaf.myListener == listener ? leaf : null;\n }\n VirtualFilePointerImpl[] array = (VirtualFilePointerImpl[])leaves;\n for (VirtualFilePointerImpl pointer : array) {\n if (pointer.myListener == listener) return pointer;\n }\n return null;\n }\n\n void addAllPointersTo(@NotNull Collection outList) {\n processPointers(p->{ if (p.myNode != null) outList.add(p); });\n }\n\n void processPointers(@NotNull Consumer processor) {\n Object leaves = this.leaves;\n if (leaves == null) {\n return;\n }\n if (leaves instanceof VirtualFilePointerImpl) {\n processor.accept((VirtualFilePointerImpl)leaves);\n return;\n }\n VirtualFilePointerImpl[] pointers = (VirtualFilePointerImpl[])leaves;\n for (VirtualFilePointerImpl pointer : pointers) {\n processor.accept(pointer);\n }\n }\n\n // for \"file://a/b/c.txt\" return \"a/b\", for \"jar://a/b/j.jar!\" return \"file://a/b/j.jar\"\n static VirtualFile getParentThroughJar(@NotNull VirtualFile file, @NotNull NewVirtualFileSystem fs) {\n VirtualFile parent = file.getParent();\n if (parent == null && fs instanceof ArchiveFileSystem) {\n parent = ((ArchiveFileSystem)fs).getLocalByEntry(file);\n }\n return parent;\n }\n\n static VirtualFile findChildThroughJar(@NotNull VirtualFile file, @NotNull String name, @NotNull NewVirtualFileSystem childFs) {\n VirtualFile child;\n if (name.equals(JarFileSystem.JAR_SEPARATOR) && childFs instanceof ArchiveFileSystem) {\n child = ((ArchiveFileSystem)childFs).getRootByLocal(file);\n }\n else {\n child = file.findChild(name);\n }\n return child;\n }\n\n boolean removeEmptyNodesByFile(@NotNull List parts) {\n if (parts.isEmpty()) {\n return children.length == 0;\n }\n VirtualFile file = parts.remove(parts.size()-1);\n FilePartNode child = findChildByNameId(null, getNameId(file), false, (NewVirtualFileSystem)file.getFileSystem());\n if (child == null) {\n return false;\n }\n boolean toRemove = child.removeEmptyNodesByFile(parts);\n if (toRemove) {\n children = children.length == 1 ? EMPTY_ARRAY : ArrayUtil.remove(children, child);\n return children.length == 0 && leaves == null;\n }\n return false;\n }\n\n boolean removeEmptyNodesByPath(@NotNull List parts) {\n if (parts.isEmpty()) {\n return children.length == 0;\n }\n String name = parts.remove(parts.size()-1);\n int index = binarySearchChildByName(name);\n if (index < 0) {\n return false;\n }\n FilePartNode child = children[index];\n boolean toRemove = child.removeEmptyNodesByPath(parts);\n if (toRemove) {\n children = children.length == 1 ? EMPTY_ARRAY : ArrayUtil.remove(children, child);\n return children.length == 0 && leaves == null;\n }\n return false;\n }\n}\n"},"message":{"kind":"string","value":"fix CMake tests\n\nGitOrigin-RevId: d910b0321e0703cc2c0330d2d811043f01489131"},"old_file":{"kind":"string","value":"platform/platform-impl/src/com/intellij/openapi/vfs/impl/FilePartNode.java"},"subject":{"kind":"string","value":"fix CMake tests"},"git_diff":{"kind":"string","value":"latform/platform-impl/src/com/intellij/openapi/vfs/impl/FilePartNode.java\n String myUrl = myUrl();\n String expectedUrl = VirtualFileManager.constructUrl(myFS.getProtocol(), pathFromRoot + (pathFromRoot.endsWith(\"/\") ? \"\" : \"/\"));\n String actualUrl = myUrl + (myUrl.endsWith(\"/\") ? \"\" : \"/\");\n assert actualUrl.equals(expectedUrl) : \"Expected url: '\" + expectedUrl + \"' but got: '\" + actualUrl + \"'\";\n assert FileUtil.PATH_HASHING_STRATEGY.equals(actualUrl, expectedUrl) : \"Expected url: '\" + expectedUrl + \"' but got: '\" + actualUrl + \"'\";\n }\n else {\n assert Comparing.equal(getParentThroughJar(myFile, myFS), parent) : \"parent: \" + parent + \"; myFile: \" + myFile;"}}},{"rowIdx":2092,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"685a2435fa898d6e7e5e96ea222022afc0494577"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"ibinti/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,slisson/intellij-community,orekyuu/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,retomerz/intellij-community,amith01994/intellij-community,signed/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,retomerz/intellij-community,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,semonte/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,semonte/intellij-community,signed/intellij-community,clumsy/intellij-community,fitermay/intellij-community,xfournet/intellij-community,petteyg/intellij-community,retomerz/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,ernestp/consulo,jexp/idea2,consulo/consulo,xfournet/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,da1z/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,caot/intellij-community,fitermay/intellij-community,Lekanich/intellij-community,caot/intellij-community,amith01994/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,adedayo/intellij-community,consulo/consulo,ol-loginov/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,supersven/intellij-community,retomerz/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,ernestp/consulo,wreckJ/intellij-community,robovm/robovm-studio,robovm/robovm-studio,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,jagguli/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,supersven/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,clumsy/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,signed/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,izonder/intellij-community,allotria/intellij-community,supersven/intellij-community,vladmm/intellij-community,samthor/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,diorcety/intellij-community,jagguli/intellij-community,signed/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,caot/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,kool79/intellij-community,kdwink/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,clumsy/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,consulo/consulo,salguarnieri/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,joewalnes/idea-community,Lekanich/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,jexp/idea2,wreckJ/intellij-community,suncycheng/intellij-community,izonder/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,hurricup/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,jexp/idea2,slisson/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,amith01994/intellij-community,FHannes/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,izonder/intellij-community,semonte/intellij-community,FHannes/intellij-community,ibinti/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,ryano144/intellij-community,petteyg/intellij-community,ibinti/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,ernestp/consulo,akosyakov/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,allotria/intellij-community,clumsy/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,supersven/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,izonder/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,izonder/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,allotria/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,ernestp/consulo,adedayo/intellij-community,Lekanich/intellij-community,allotria/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,joewalnes/idea-community,asedunov/intellij-community,holmes/intellij-community,ryano144/intellij-community,blademainer/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,da1z/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,joewalnes/idea-community,salguarnieri/intellij-community,supersven/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,fitermay/intellij-community,adedayo/intellij-community,apixandru/intellij-community,kdwink/intellij-community,supersven/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,vladmm/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,samthor/intellij-community,clumsy/intellij-community,retomerz/intellij-community,adedayo/intellij-community,fnouama/intellij-community,supersven/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,jexp/idea2,mglukhikh/intellij-community,kool79/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,caot/intellij-community,samthor/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,samthor/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,dslomov/intellij-community,asedunov/intellij-community,signed/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,consulo/consulo,ol-loginov/intellij-community,supersven/intellij-community,slisson/intellij-community,vladmm/intellij-community,signed/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,caot/intellij-community,izonder/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,caot/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,da1z/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,xfournet/intellij-community,ernestp/consulo,suncycheng/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,fitermay/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,holmes/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,da1z/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,supersven/intellij-community,jexp/idea2,lucafavatella/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,izonder/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,slisson/intellij-community,tmpgit/intellij-community,jexp/idea2,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,signed/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,fnouama/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,signed/intellij-community,caot/intellij-community,da1z/intellij-community,fitermay/intellij-community,slisson/intellij-community,caot/intellij-community,salguarnieri/intellij-community,jexp/idea2,ftomassetti/intellij-community,retomerz/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,jexp/idea2,kool79/intellij-community,apixandru/intellij-community,caot/intellij-community,dslomov/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,petteyg/intellij-community,Distrotech/intellij-community,ernestp/consulo,izonder/intellij-community,samthor/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,vladmm/intellij-community,FHannes/intellij-community,blademainer/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,holmes/intellij-community,da1z/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,hurricup/intellij-community,diorcety/intellij-community,fnouama/intellij-community,kool79/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,blademainer/intellij-community,allotria/intellij-community,kool79/intellij-community,kdwink/intellij-community,holmes/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,joewalnes/idea-community,youdonghai/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,clumsy/intellij-community,asedunov/intellij-community,retomerz/intellij-community,kdwink/intellij-community,jagguli/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,izonder/intellij-community,caot/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,ahb0327/intellij-community,da1z/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,ryano144/intellij-community,kool79/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,fnouama/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,signed/intellij-community,apixandru/intellij-community,semonte/intellij-community,Lekanich/intellij-community,kdwink/intellij-community,izonder/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,petteyg/intellij-community,clumsy/intellij-community,retomerz/intellij-community,holmes/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,pwoodworth/intellij-community,robovm/robovm-studio,kdwink/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,da1z/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,ryano144/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,holmes/intellij-community,ryano144/intellij-community,robovm/robovm-studio,supersven/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,caot/intellij-community,ryano144/intellij-community,da1z/intellij-community,allotria/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,ryano144/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,joewalnes/idea-community,alphafoobar/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,fitermay/intellij-community,robovm/robovm-studio,jagguli/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,xfournet/intellij-community,hurricup/intellij-community,allotria/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,da1z/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,consulo/consulo,lucafavatella/intellij-community,michaelgallacher/intellij-community,ivan-fedorov/intellij-community,clumsy/intellij-community,asedunov/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,consulo/consulo,idea4bsd/idea4bsd,amith01994/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,slisson/intellij-community,kool79/intellij-community,kdwink/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,caot/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community"},"new_contents":{"kind":"string","value":"package com.intellij.openapi.vfs.impl.local;\n\nimport com.intellij.Patches;\nimport com.intellij.ide.ui.UISettings;\nimport com.intellij.openapi.application.ApplicationManager;\nimport com.intellij.openapi.application.ModalityState;\nimport com.intellij.openapi.diagnostic.Logger;\nimport com.intellij.openapi.progress.ProgressIndicator;\nimport com.intellij.openapi.util.SystemInfo;\nimport com.intellij.openapi.util.text.StringUtil;\nimport com.intellij.openapi.vfs.*;\nimport com.intellij.openapi.vfs.ex.ProvidedContent;\nimport com.intellij.util.LocalTimeCounter;\nimport org.jetbrains.annotations.NonNls;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\nimport java.awt.*;\nimport java.io.*;\n\npublic class VirtualFileImpl extends VirtualFile {\n\n private static final Logger LOG = Logger.getInstance(\"#com.intellij.openapi.vfs.impl.local.VirtualFileImpl\");\n\n private static final LocalFileSystemImpl ourFileSystem = (LocalFileSystemImpl)LocalFileSystem.getInstance();\n\n private static char[] myBuffer = new char[1024];\n\n private VirtualFileImpl myParent;\n private String myName;\n private VirtualFileImpl[] myChildren = null; // null, if not defined yet\n private boolean myDirectoryFlag;\n private Boolean myWritableFlag = null; // null, if not defined yet\n private long myModificationStamp = LocalTimeCounter.currentTime();\n private long myTimeStamp = -1; // -1, if file content has not been requested yet\n\n private static final VirtualFileImpl[] EMPTY_VIRTUAL_FILE_ARRAY = new VirtualFileImpl[0];\n\n // used by tests\n public void setTimeStamp(long timeStamp) {\n myTimeStamp = timeStamp;\n }\n\n VirtualFileImpl(\n VirtualFileImpl parent,\n PhysicalFile file,\n boolean isDirectory\n ) {\n myParent = parent;\n setName(file.getName());\n if (myName.length() == 0) {\n LOG.error(\"file:\" + file.getPath());\n }\n myDirectoryFlag = isDirectory;\n if (!myDirectoryFlag) {\n myTimeStamp = file.lastModified();\n }\n }\n\n //for constructing roots\n VirtualFileImpl(String path) {\n int lastSlash = path.lastIndexOf('/');\n LOG.assertTrue(lastSlash >= 0);\n if (lastSlash == path.length() - 1) { // 'c:/' or '/'\n setName(path);\n myDirectoryFlag = true;\n }\n else {\n setName(path.substring(lastSlash + 1));\n String systemPath = path.replace('/', File.separatorChar);\n myDirectoryFlag = new IoFile(systemPath).isDirectory();\n }\n LOG.assertTrue(myName.length() > 0);\n }\n\n boolean areChildrenCached() {\n synchronized (ourFileSystem.LOCK) {\n return myChildren != null;\n }\n }\n\n void setParent(VirtualFileImpl parent) {\n synchronized (ourFileSystem.LOCK) {\n myParent = parent;\n }\n }\n\n PhysicalFile getPhysicalFile() {\n String path = getPath(File.separatorChar);\n return new IoFile(path);\n }\n\n @NotNull\n public VirtualFileSystem getFileSystem() {\n return ourFileSystem;\n }\n\n public String getPath() {\n return getPath('/');\n }\n\n private String getPath(char separatorChar) {\n //ApplicationManager.getApplication().assertReadAccessAllowed();\n synchronized (ourFileSystem.LOCK) {\n try {\n int length = appendPath(myBuffer, separatorChar);\n return new String(myBuffer, 0, length);\n }\n catch (ArrayIndexOutOfBoundsException aiob) {\n myBuffer = new char[myBuffer.length * 2];\n return getPath(separatorChar);\n }\n }\n }\n\n private int appendPath(char[] buffer, char separatorChar) {\n int currentLength = myParent == null ? 0 : myParent.appendPath(buffer, separatorChar);\n\n if (currentLength > 0 && buffer[currentLength - 1] != separatorChar) {\n buffer[currentLength++] = separatorChar;\n }\n\n String name = myName;\n final int nameLength = name.length();\n\n name.getChars(0, nameLength, buffer, currentLength);\n int newLength = currentLength + nameLength;\n if (currentLength == 0 && separatorChar != '/' ) {\n StringUtil.replaceChar(buffer, '/', separatorChar, currentLength, newLength); // root may contain '/' char\n }\n return newLength;\n }\n\n @NotNull\n public String getName() {\n return myName;\n }\n\n public String getPresentableName() {\n if (UISettings.getInstance().HIDE_KNOWN_EXTENSION_IN_TABS) {\n final String nameWithoutExtension = getNameWithoutExtension();\n return nameWithoutExtension.length() == 0 ? getName() : nameWithoutExtension;\n }\n return getName();\n }\n\n public boolean isWritable() {\n synchronized (ourFileSystem.LOCK) {\n if (myWritableFlag == null) {\n myWritableFlag = isWritable(getPhysicalFile(), isDirectory()) ? Boolean.TRUE : Boolean.FALSE;\n }\n }\n return myWritableFlag.booleanValue();\n }\n\n private static boolean isWritable(PhysicalFile physicalFile, boolean isDirectory) {\n if (Patches.ALL_FOLDERS_ARE_WRITABLE && isDirectory) {\n return true;\n }\n else {\n return physicalFile.canWrite();\n }\n }\n\n public boolean isDirectory() {\n return myDirectoryFlag;\n }\n\n public boolean isValid() {\n synchronized (ourFileSystem.LOCK) {\n if (myParent == null) {\n return ourFileSystem.isRoot(this);\n }\n\n return myParent.isValid();\n }\n }\n\n @Nullable\n public VirtualFileImpl getParent() {\n synchronized (ourFileSystem.LOCK) {\n return myParent;\n }\n }\n\n public VirtualFile[] getChildren() {\n if (!isDirectory()) return null;\n synchronized (ourFileSystem.LOCK) {\n if (myChildren == null) {\n PhysicalFile file = getPhysicalFile();\n PhysicalFile[] files = file.listFiles();\n final int length = files.length;\n if (length == 0) {\n myChildren = EMPTY_VIRTUAL_FILE_ARRAY;\n }\n else {\n myChildren = new VirtualFileImpl[ length ];\n for (int i = 0; i < length; ++i) {\n PhysicalFile f = files[i];\n myChildren[i] = new VirtualFileImpl(this, f, f.isDirectory());\n }\n }\n }\n }\n return myChildren;\n }\n\n void replaceChild(VirtualFileImpl oldChild, VirtualFileImpl newChild) {\n for (int i = 0; i < myChildren.length; i++) {\n VirtualFileImpl child = myChildren[i];\n if (child == oldChild) {\n myChildren[i] = newChild;\n return;\n }\n }\n }\n\n public InputStream getInputStream() throws IOException {\n return getProvidedContent().getInputStream();\n }\n\n public long getLength() {\n LOG.assertTrue(!isDirectory());\n ProvidedContent content;\n try {\n content = getProvidedContent();\n }\n catch (IOException e) {\n throw new RuntimeException(e);\n }\n return content.getLength();\n }\n\n\n private ProvidedContent getProvidedContent() throws IOException {\n ApplicationManager.getApplication().assertReadAccessAllowed();\n\n if (isDirectory()) {\n throw new IOException(VfsBundle.message(\"file.read.error\", getPhysicalFile().getPath()));\n }\n\n if (myTimeStamp < 0) return physicalContent();\n\n ProvidedContent content = ourFileSystem.getManager().getProvidedContent(this);\n return content == null ? physicalContent() : content;\n\n }\n\n private ProvidedContent physicalContent() {\n return new ProvidedContent() {\n public InputStream getInputStream() throws IOException {\n return new BufferedInputStream(getPhysicalFileInputStream());\n }\n\n public int getLength() {\n return getPhysicalFileLength();\n }\n };\n }\n\n protected InputStream getPhysicalFileInputStream() throws IOException {\n getTimeStamp();\n return getPhysicalFile().createInputStream();\n }\n\n public OutputStream getOutputStream(final Object requestor,\n final long newModificationStamp,\n final long newTimeStamp) throws IOException {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n\n PhysicalFile physicalFile = getPhysicalFile();\n if (isDirectory()) {\n throw new IOException(VfsBundle.message(\"file.write.error\", physicalFile.getPath()));\n }\n ourFileSystem.fireBeforeContentsChange(requestor, this);\n final OutputStream out = new BufferedOutputStream(physicalFile.createOutputStream());\n if (getBOM() != null) {\n out.write(getBOM());\n }\n return new OutputStream() {\n public void write(int b) throws IOException {\n out.write(b);\n }\n\n public void write(byte[] b) throws IOException {\n out.write(b);\n }\n\n public void write(byte[] b, int off, int len) throws IOException {\n out.write(b, off, len);\n }\n\n public void flush() throws IOException {\n out.flush();\n }\n\n public void close() throws IOException {\n out.close();\n long oldModificationStamp = getModificationStamp();\n myModificationStamp = newModificationStamp >= 0 ? newModificationStamp : LocalTimeCounter.currentTime();\n if (newTimeStamp >= 0) {\n getPhysicalFile().setLastModified(newTimeStamp);\n }\n myTimeStamp = getPhysicalFile().lastModified();\n ourFileSystem.fireContentsChanged(requestor, VirtualFileImpl.this, oldModificationStamp);\n }\n };\n }\n\n public byte[] contentsToByteArray() throws IOException {\n InputStream in = getInputStream();\n byte[] bytes = new byte[(int)getLength()];\n try {\n int count = 0;\n while (true) {\n int n = in.read(bytes, count, bytes.length - count);\n if (n <= 0) break;\n count += n;\n }\n }\n finally {\n in.close();\n }\n return bytes;\n }\n\n public long getModificationStamp() {\n return myModificationStamp;\n }\n\n public long getTimeStamp() {\n if (myTimeStamp < 0) {\n myTimeStamp = getPhysicalFile().lastModified();\n }\n return myTimeStamp;\n }\n\n public long getActualTimeStamp() {\n return getPhysicalFile().lastModified();\n }\n\n public void refresh(final boolean asynchronous, final boolean recursive, final Runnable postRunnable) {\n if (!asynchronous) {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n }\n\n final ModalityState modalityState = EventQueue.isDispatchThread() ? ModalityState.current() : ModalityState.NON_MMODAL;\n\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"VirtualFile.refresh():\" + getPresentableUrl() + \", recursive = \" + recursive + \", modalityState = \" + modalityState);\n }\n\n final Runnable runnable = new Runnable() {\n public void run() {\n ourFileSystem.getManager().beforeRefreshStart(asynchronous, modalityState, postRunnable);\n\n PhysicalFile physicalFile = getPhysicalFile();\n if (!physicalFile.exists()) {\n Runnable runnable = new Runnable() {\n public void run() {\n if (!isValid()) return;\n VirtualFileImpl parent = (VirtualFileImpl)getParent();\n if (parent != null) {\n ourFileSystem.fireBeforeFileDeletion(null, VirtualFileImpl.this);\n parent.removeChild(VirtualFileImpl.this);\n ourFileSystem.fireFileDeleted(null, VirtualFileImpl.this, myName, myDirectoryFlag, parent);\n }\n }\n };\n ourFileSystem.getManager().addEventToFireByRefresh(runnable, asynchronous, modalityState);\n }\n else {\n ourFileSystem.refresh(VirtualFileImpl.this, recursive, true, modalityState, asynchronous, false);\n }\n }\n };\n\n final Runnable endTask = new Runnable() {\n public void run() {\n ourFileSystem.getManager().afterRefreshFinish(asynchronous, modalityState);\n }\n };\n\n if (asynchronous) {\n Runnable runnable1 = new Runnable() {\n public void run() {\n LOG.info(\"Executing request:\" + this);\n\n final ProgressIndicator indicator = ourFileSystem.getManager().getRefreshIndicator();\n indicator.start();\n indicator.setText(VfsBundle.message(\"file.synchronize.progress\"));\n\n ApplicationManager.getApplication().runReadAction(runnable);\n\n indicator.stop();\n\n endTask.run();\n }\n };\n\n ourFileSystem.getSynchronizeExecutor().submit(runnable1);\n }\n else {\n runnable.run();\n endTask.run();\n }\n }\n\n public boolean nameEquals(String name) {\n return SystemInfo.isFileSystemCaseSensitive ? getName().equals(name) : getName().equalsIgnoreCase(name);\n }\n\n public int getPhysicalFileLength() {\n return (int)getPhysicalFile().length();\n }\n\n void refreshInternal(final boolean recursive,\n final ModalityState modalityState,\n final boolean forceRefresh,\n final boolean asynchronous) {\n if (!asynchronous) {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n }\n\n if (!isValid()) return;\n\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"refreshInternal recursive = \" + recursive + \" asynchronous = \" + asynchronous + \" file = \" + myName);\n }\n\n PhysicalFile physicalFile = getPhysicalFile();\n\n final boolean isDirectory = physicalFile.isDirectory();\n if (isDirectory != myDirectoryFlag) {\n final PhysicalFile _physicalFile = physicalFile;\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n VirtualFileImpl parent = (VirtualFileImpl)getParent();\n if (parent == null) return;\n\n ourFileSystem.fireBeforeFileDeletion(null, VirtualFileImpl.this);\n parent.removeChild(VirtualFileImpl.this);\n ourFileSystem.fireFileDeleted(null, VirtualFileImpl.this, myName, myDirectoryFlag, parent);\n VirtualFileImpl newChild = new VirtualFileImpl(parent, _physicalFile, isDirectory);\n parent.addChild(newChild);\n ourFileSystem.fireFileCreated(null, newChild);\n }\n },\n asynchronous,\n modalityState\n );\n return;\n }\n\n if (isDirectory) {\n if (myChildren == null) return;\n PhysicalFile[] files = physicalFile.listFiles();\n\n final boolean[] found = new boolean[myChildren.length];\n\n VirtualFileImpl[] children = myChildren;\n for (int i = 0; i < files.length; i++) {\n final PhysicalFile file = files[i];\n final String name = file.getName();\n int index = -1;\n if (i < children.length && children[i].myName.equals(name)) {\n index = i;\n }\n else {\n for (int j = 0; j < children.length; j++) {\n VirtualFileImpl child = myChildren[j];\n if (child.myName.equals(name)) index = j;\n }\n }\n\n if (index < 0) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (VirtualFileImpl.this.isValid()) {\n if (findChild(file.getName()) != null) return; // was already created\n VirtualFileImpl newChild = new VirtualFileImpl(\n VirtualFileImpl.this,\n file,\n file.isDirectory()\n );\n addChild(newChild);\n ourFileSystem.fireFileCreated(null, newChild);\n }\n }\n },\n asynchronous,\n modalityState\n );\n }\n else {\n found[index] = true;\n }\n }\n for (int i = 0; i < children.length; i++) {\n final VirtualFileImpl child = children[i];\n if (found[i]) {\n if (recursive) {\n child.refreshInternal(recursive, modalityState, false, asynchronous);\n }\n }\n else {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (child.isValid()) {\n ourFileSystem.fireBeforeFileDeletion(null, child);\n removeChild(child);\n ourFileSystem.fireFileDeleted(null, child, child.myName, child.myDirectoryFlag, VirtualFileImpl.this);\n }\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n else {\n if (myTimeStamp > 0) {\n final long timeStamp = physicalFile.lastModified();\n if (timeStamp != myTimeStamp || forceRefresh) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n\n ourFileSystem.fireBeforeContentsChange(null, VirtualFileImpl.this);\n long oldModificationStamp = getModificationStamp();\n myTimeStamp = timeStamp;\n myModificationStamp = LocalTimeCounter.currentTime();\n ourFileSystem.fireContentsChanged(null, VirtualFileImpl.this, oldModificationStamp);\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n\n if (myWritableFlag != null) {\n final boolean isWritable = isWritable(physicalFile, isDirectory());\n if (isWritable != myWritableFlag.booleanValue()) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n\n ourFileSystem.fireBeforePropertyChange(\n null, VirtualFileImpl.this, PROP_WRITABLE,\n myWritableFlag, isWritable ? Boolean.TRUE : Boolean.FALSE\n );\n myWritableFlag = isWritable ? Boolean.TRUE : Boolean.FALSE;\n ourFileSystem.firePropertyChanged(\n null, VirtualFileImpl.this, PROP_WRITABLE,\n isWritable ? Boolean.FALSE : Boolean.TRUE, myWritableFlag\n );\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n\n\n void addChild(VirtualFileImpl child) {\n getChildren(); // to initialize myChildren\n\n synchronized (ourFileSystem.LOCK) {\n VirtualFileImpl[] newChildren = new VirtualFileImpl[myChildren.length + 1];\n System.arraycopy(myChildren, 0, newChildren, 0, myChildren.length);\n newChildren[myChildren.length] = child;\n myChildren = newChildren;\n child.setParent(this);\n }\n }\n\n void removeChild(VirtualFileImpl child) {\n getChildren(); // to initialize myChildren\n\n synchronized (ourFileSystem.LOCK) {\n for (int i = 0; i < myChildren.length; i++) {\n if (myChildren[i] == child) {\n VirtualFileImpl[] newChildren = new VirtualFileImpl[myChildren.length - 1];\n System.arraycopy(myChildren, 0, newChildren, 0, i);\n System.arraycopy(myChildren, i + 1, newChildren, i, newChildren.length - i);\n myChildren = newChildren;\n child.myParent = null;\n return;\n }\n }\n }\n }\n\n @NonNls\n public String toString() {\n return \"VirtualFile: \" + getPresentableUrl();\n }\n\n void setName(String name) {\n myName = name;\n }\n}\n"},"new_file":{"kind":"string","value":"source/com/intellij/openapi/vfs/impl/local/VirtualFileImpl.java"},"old_contents":{"kind":"string","value":"package com.intellij.openapi.vfs.impl.local;\n\nimport com.intellij.Patches;\nimport com.intellij.ide.ui.UISettings;\nimport com.intellij.openapi.application.ApplicationManager;\nimport com.intellij.openapi.application.ModalityState;\nimport com.intellij.openapi.diagnostic.Logger;\nimport com.intellij.openapi.progress.ProgressIndicator;\nimport com.intellij.openapi.util.SystemInfo;\nimport com.intellij.openapi.util.text.StringUtil;\nimport com.intellij.openapi.vfs.*;\nimport com.intellij.openapi.vfs.ex.ProvidedContent;\nimport com.intellij.util.LocalTimeCounter;\nimport org.jetbrains.annotations.NonNls;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\nimport java.awt.*;\nimport java.io.*;\n\npublic class VirtualFileImpl extends VirtualFile {\n\n private static final Logger LOG = Logger.getInstance(\"#com.intellij.openapi.vfs.impl.local.VirtualFileImpl\");\n\n private static final LocalFileSystemImpl ourFileSystem = (LocalFileSystemImpl)LocalFileSystem.getInstance();\n\n private VirtualFileImpl myParent;\n private String myName;\n private VirtualFileImpl[] myChildren = null; // null, if not defined yet\n private boolean myDirectoryFlag;\n private Boolean myWritableFlag = null; // null, if not defined yet\n private long myModificationStamp = LocalTimeCounter.currentTime();\n private long myTimeStamp = -1; // -1, if file content has not been requested yet\n\n private static final VirtualFileImpl[] EMPTY_VIRTUAL_FILE_ARRAY = new VirtualFileImpl[0];\n\n // used by tests\n public void setTimeStamp(long timeStamp) {\n myTimeStamp = timeStamp;\n }\n\n VirtualFileImpl(\n VirtualFileImpl parent,\n PhysicalFile file,\n boolean isDirectory\n ) {\n myParent = parent;\n setName(file.getName());\n if (myName.length() == 0) {\n LOG.error(\"file:\" + file.getPath());\n }\n myDirectoryFlag = isDirectory;\n if (!myDirectoryFlag) {\n myTimeStamp = file.lastModified();\n }\n }\n\n //for constructing roots\n VirtualFileImpl(String path) {\n int lastSlash = path.lastIndexOf('/');\n LOG.assertTrue(lastSlash >= 0);\n if (lastSlash == path.length() - 1) { // 'c:/' or '/'\n setName(path);\n myDirectoryFlag = true;\n }\n else {\n setName(path.substring(lastSlash + 1));\n String systemPath = path.replace('/', File.separatorChar);\n myDirectoryFlag = new IoFile(systemPath).isDirectory();\n }\n LOG.assertTrue(myName.length() > 0);\n }\n\n boolean areChildrenCached() {\n synchronized (ourFileSystem.LOCK) {\n return myChildren != null;\n }\n }\n\n void setParent(VirtualFileImpl parent) {\n synchronized (ourFileSystem.LOCK) {\n myParent = parent;\n }\n }\n\n PhysicalFile getPhysicalFile() {\n String path = getPath(File.separatorChar, 1024);\n return new IoFile(path);\n }\n\n @NotNull\n public VirtualFileSystem getFileSystem() {\n return ourFileSystem;\n }\n\n public String getPath() {\n return getPath('/', 1024);\n }\n\n private String getPath(char separatorChar, int bufferLength) {\n //ApplicationManager.getApplication().assertReadAccessAllowed();\n try {\n char[] buffer = new char[bufferLength];\n int length;\n synchronized (ourFileSystem.LOCK) {\n length = appendPath(buffer, separatorChar);\n }\n return StringFactory.createStringFromConstantArray(buffer, 0, length);\n }\n catch(ArrayIndexOutOfBoundsException aiob) {\n return getPath(separatorChar, bufferLength * 2);\n }\n }\n\n private int appendPath(char[] buffer, char separatorChar) {\n int currentLength = myParent == null ? 0 : myParent.appendPath(buffer, separatorChar);\n\n if (currentLength > 0 && buffer[currentLength - 1] != separatorChar) {\n buffer[currentLength++] = separatorChar;\n }\n\n String name = myName;\n final int nameLength = name.length();\n\n name.getChars(0, nameLength, buffer, currentLength);\n int newLength = currentLength + nameLength;\n if (currentLength == 0) {\n StringUtil.replaceChar(buffer, '/', separatorChar, currentLength, newLength); // root may contain '/' char\n }\n return newLength;\n }\n\n @NotNull\n public String getName() {\n return myName;\n }\n\n public String getPresentableName() {\n if (UISettings.getInstance().HIDE_KNOWN_EXTENSION_IN_TABS) {\n final String nameWithoutExtension = getNameWithoutExtension();\n return nameWithoutExtension.length() == 0 ? getName() : nameWithoutExtension;\n }\n return getName();\n }\n\n public boolean isWritable() {\n synchronized (ourFileSystem.LOCK) {\n if (myWritableFlag == null) {\n myWritableFlag = isWritable(getPhysicalFile(), isDirectory()) ? Boolean.TRUE : Boolean.FALSE;\n }\n }\n return myWritableFlag.booleanValue();\n }\n\n private static boolean isWritable(PhysicalFile physicalFile, boolean isDirectory) {\n if (Patches.ALL_FOLDERS_ARE_WRITABLE && isDirectory) {\n return true;\n }\n else {\n return physicalFile.canWrite();\n }\n }\n\n public boolean isDirectory() {\n return myDirectoryFlag;\n }\n\n public boolean isValid() {\n synchronized (ourFileSystem.LOCK) {\n if (myParent == null) {\n return ourFileSystem.isRoot(this);\n }\n\n return myParent.isValid();\n }\n }\n\n @Nullable\n public VirtualFileImpl getParent() {\n synchronized (ourFileSystem.LOCK) {\n return myParent;\n }\n }\n\n public VirtualFile[] getChildren() {\n if (!isDirectory()) return null;\n synchronized (ourFileSystem.LOCK) {\n if (myChildren == null) {\n PhysicalFile file = getPhysicalFile();\n PhysicalFile[] files = file.listFiles();\n final int length = files.length;\n if (length == 0) {\n myChildren = EMPTY_VIRTUAL_FILE_ARRAY;\n }\n else {\n myChildren = new VirtualFileImpl[ length ];\n for (int i = 0; i < length; ++i) {\n PhysicalFile f = files[i];\n myChildren[i] = new VirtualFileImpl(this, f, f.isDirectory());\n }\n }\n }\n }\n return myChildren;\n }\n\n void replaceChild(VirtualFileImpl oldChild, VirtualFileImpl newChild) {\n for (int i = 0; i < myChildren.length; i++) {\n VirtualFileImpl child = myChildren[i];\n if (child == oldChild) {\n myChildren[i] = newChild;\n return;\n }\n }\n }\n\n public InputStream getInputStream() throws IOException {\n return getProvidedContent().getInputStream();\n }\n\n public long getLength() {\n LOG.assertTrue(!isDirectory());\n ProvidedContent content;\n try {\n content = getProvidedContent();\n }\n catch (IOException e) {\n throw new RuntimeException(e);\n }\n return content.getLength();\n }\n\n\n private ProvidedContent getProvidedContent() throws IOException {\n ApplicationManager.getApplication().assertReadAccessAllowed();\n\n if (isDirectory()) {\n throw new IOException(VfsBundle.message(\"file.read.error\", getPhysicalFile().getPath()));\n }\n\n if (myTimeStamp < 0) return physicalContent();\n\n ProvidedContent content = ourFileSystem.getManager().getProvidedContent(this);\n return content == null ? physicalContent() : content;\n\n }\n\n private ProvidedContent physicalContent() {\n return new ProvidedContent() {\n public InputStream getInputStream() throws IOException {\n return new BufferedInputStream(getPhysicalFileInputStream());\n }\n\n public int getLength() {\n return getPhysicalFileLength();\n }\n };\n }\n\n protected InputStream getPhysicalFileInputStream() throws IOException {\n getTimeStamp();\n return getPhysicalFile().createInputStream();\n }\n\n public OutputStream getOutputStream(final Object requestor,\n final long newModificationStamp,\n final long newTimeStamp) throws IOException {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n\n PhysicalFile physicalFile = getPhysicalFile();\n if (isDirectory()) {\n throw new IOException(VfsBundle.message(\"file.write.error\", physicalFile.getPath()));\n }\n ourFileSystem.fireBeforeContentsChange(requestor, this);\n final OutputStream out = new BufferedOutputStream(physicalFile.createOutputStream());\n if (getBOM() != null) {\n out.write(getBOM());\n }\n return new OutputStream() {\n public void write(int b) throws IOException {\n out.write(b);\n }\n\n public void write(byte[] b) throws IOException {\n out.write(b);\n }\n\n public void write(byte[] b, int off, int len) throws IOException {\n out.write(b, off, len);\n }\n\n public void flush() throws IOException {\n out.flush();\n }\n\n public void close() throws IOException {\n out.close();\n long oldModificationStamp = getModificationStamp();\n myModificationStamp = newModificationStamp >= 0 ? newModificationStamp : LocalTimeCounter.currentTime();\n if (newTimeStamp >= 0) {\n getPhysicalFile().setLastModified(newTimeStamp);\n }\n myTimeStamp = getPhysicalFile().lastModified();\n ourFileSystem.fireContentsChanged(requestor, VirtualFileImpl.this, oldModificationStamp);\n }\n };\n }\n\n public byte[] contentsToByteArray() throws IOException {\n InputStream in = getInputStream();\n byte[] bytes = new byte[(int)getLength()];\n try {\n int count = 0;\n while (true) {\n int n = in.read(bytes, count, bytes.length - count);\n if (n <= 0) break;\n count += n;\n }\n }\n finally {\n in.close();\n }\n return bytes;\n }\n\n public long getModificationStamp() {\n return myModificationStamp;\n }\n\n public long getTimeStamp() {\n if (myTimeStamp < 0) {\n myTimeStamp = getPhysicalFile().lastModified();\n }\n return myTimeStamp;\n }\n\n public long getActualTimeStamp() {\n return getPhysicalFile().lastModified();\n }\n\n public void refresh(final boolean asynchronous, final boolean recursive, final Runnable postRunnable) {\n if (!asynchronous) {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n }\n\n final ModalityState modalityState = EventQueue.isDispatchThread() ? ModalityState.current() : ModalityState.NON_MMODAL;\n\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"VirtualFile.refresh():\" + getPresentableUrl() + \", recursive = \" + recursive + \", modalityState = \" + modalityState);\n }\n\n final Runnable runnable = new Runnable() {\n public void run() {\n ourFileSystem.getManager().beforeRefreshStart(asynchronous, modalityState, postRunnable);\n\n PhysicalFile physicalFile = getPhysicalFile();\n if (!physicalFile.exists()) {\n Runnable runnable = new Runnable() {\n public void run() {\n if (!isValid()) return;\n VirtualFileImpl parent = (VirtualFileImpl)getParent();\n if (parent != null) {\n ourFileSystem.fireBeforeFileDeletion(null, VirtualFileImpl.this);\n parent.removeChild(VirtualFileImpl.this);\n ourFileSystem.fireFileDeleted(null, VirtualFileImpl.this, myName, myDirectoryFlag, parent);\n }\n }\n };\n ourFileSystem.getManager().addEventToFireByRefresh(runnable, asynchronous, modalityState);\n }\n else {\n ourFileSystem.refresh(VirtualFileImpl.this, recursive, true, modalityState, asynchronous, false);\n }\n }\n };\n\n final Runnable endTask = new Runnable() {\n public void run() {\n ourFileSystem.getManager().afterRefreshFinish(asynchronous, modalityState);\n }\n };\n\n if (asynchronous) {\n Runnable runnable1 = new Runnable() {\n public void run() {\n LOG.info(\"Executing request:\" + this);\n\n final ProgressIndicator indicator = ourFileSystem.getManager().getRefreshIndicator();\n indicator.start();\n indicator.setText(VfsBundle.message(\"file.synchronize.progress\"));\n\n ApplicationManager.getApplication().runReadAction(runnable);\n\n indicator.stop();\n\n endTask.run();\n }\n };\n\n ourFileSystem.getSynchronizeExecutor().submit(runnable1);\n }\n else {\n runnable.run();\n endTask.run();\n }\n }\n\n public boolean nameEquals(String name) {\n return SystemInfo.isFileSystemCaseSensitive ? getName().equals(name) : getName().equalsIgnoreCase(name);\n }\n\n public int getPhysicalFileLength() {\n return (int)getPhysicalFile().length();\n }\n\n void refreshInternal(final boolean recursive,\n final ModalityState modalityState,\n final boolean forceRefresh,\n final boolean asynchronous) {\n if (!asynchronous) {\n ApplicationManager.getApplication().assertWriteAccessAllowed();\n }\n\n if (!isValid()) return;\n\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"refreshInternal recursive = \" + recursive + \" asynchronous = \" + asynchronous + \" file = \" + myName);\n }\n\n PhysicalFile physicalFile = getPhysicalFile();\n\n final boolean isDirectory = physicalFile.isDirectory();\n if (isDirectory != myDirectoryFlag) {\n final PhysicalFile _physicalFile = physicalFile;\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n VirtualFileImpl parent = (VirtualFileImpl)getParent();\n if (parent == null) return;\n\n ourFileSystem.fireBeforeFileDeletion(null, VirtualFileImpl.this);\n parent.removeChild(VirtualFileImpl.this);\n ourFileSystem.fireFileDeleted(null, VirtualFileImpl.this, myName, myDirectoryFlag, parent);\n VirtualFileImpl newChild = new VirtualFileImpl(parent, _physicalFile, isDirectory);\n parent.addChild(newChild);\n ourFileSystem.fireFileCreated(null, newChild);\n }\n },\n asynchronous,\n modalityState\n );\n return;\n }\n\n if (isDirectory) {\n if (myChildren == null) return;\n PhysicalFile[] files = physicalFile.listFiles();\n\n final boolean[] found = new boolean[myChildren.length];\n\n VirtualFileImpl[] children = myChildren;\n for (int i = 0; i < files.length; i++) {\n final PhysicalFile file = files[i];\n final String name = file.getName();\n int index = -1;\n if (i < children.length && children[i].myName.equals(name)) {\n index = i;\n } else {\n for (int j = 0; j < children.length; j++) {\n VirtualFileImpl child = myChildren[j];\n if (child.myName.equals(name)) index = j;\n }\n }\n\n if (index < 0) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (VirtualFileImpl.this.isValid()) {\n if (findChild(file.getName()) != null) return; // was already created\n VirtualFileImpl newChild = new VirtualFileImpl(\n VirtualFileImpl.this,\n file,\n file.isDirectory()\n );\n addChild(newChild);\n ourFileSystem.fireFileCreated(null, newChild);\n }\n }\n },\n asynchronous,\n modalityState\n );\n }\n else {\n found[index] = true;\n }\n }\n for (int i = 0; i < children.length; i++) {\n final VirtualFileImpl child = children[i];\n if (found[i]) {\n if (recursive) {\n child.refreshInternal(recursive, modalityState, false, asynchronous);\n }\n }\n else {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (child.isValid()) {\n ourFileSystem.fireBeforeFileDeletion(null, child);\n removeChild(child);\n ourFileSystem.fireFileDeleted(null, child, child.myName, child.myDirectoryFlag, VirtualFileImpl.this);\n }\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n else {\n if (myTimeStamp > 0) {\n final long timeStamp = physicalFile.lastModified();\n if (timeStamp != myTimeStamp || forceRefresh) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n\n ourFileSystem.fireBeforeContentsChange(null, VirtualFileImpl.this);\n long oldModificationStamp = getModificationStamp();\n myTimeStamp = timeStamp;\n myModificationStamp = LocalTimeCounter.currentTime();\n ourFileSystem.fireContentsChanged(null, VirtualFileImpl.this, oldModificationStamp);\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n\n if (myWritableFlag != null) {\n final boolean isWritable = isWritable(physicalFile, isDirectory());\n if (isWritable != myWritableFlag.booleanValue()) {\n ourFileSystem.getManager().addEventToFireByRefresh(\n new Runnable() {\n public void run() {\n if (!isValid()) return;\n\n ourFileSystem.fireBeforePropertyChange(\n null, VirtualFileImpl.this, PROP_WRITABLE,\n myWritableFlag, isWritable ? Boolean.TRUE : Boolean.FALSE\n );\n myWritableFlag = isWritable ? Boolean.TRUE : Boolean.FALSE;\n ourFileSystem.firePropertyChanged(\n null, VirtualFileImpl.this, PROP_WRITABLE,\n isWritable ? Boolean.FALSE : Boolean.TRUE, myWritableFlag\n );\n }\n },\n asynchronous,\n modalityState\n );\n }\n }\n }\n\n\n void addChild(VirtualFileImpl child) {\n getChildren(); // to initialize myChildren\n\n synchronized (ourFileSystem.LOCK) {\n VirtualFileImpl[] newChildren = new VirtualFileImpl[myChildren.length + 1];\n System.arraycopy(myChildren, 0, newChildren, 0, myChildren.length);\n newChildren[myChildren.length] = child;\n myChildren = newChildren;\n child.setParent(this);\n }\n }\n\n void removeChild(VirtualFileImpl child) {\n getChildren(); // to initialize myChildren\n\n synchronized (ourFileSystem.LOCK) {\n for (int i = 0; i < myChildren.length; i++) {\n if (myChildren[i] == child) {\n VirtualFileImpl[] newChildren = new VirtualFileImpl[myChildren.length - 1];\n System.arraycopy(myChildren, 0, newChildren, 0, i);\n System.arraycopy(myChildren, i + 1, newChildren, i, newChildren.length - i);\n myChildren = newChildren;\n child.myParent = null;\n return;\n }\n }\n }\n }\n\n @NonNls\n public String toString() {\n return \"VirtualFile: \" + getPresentableUrl();\n }\n\n void setName(String name) {\n myName = name;\n }\n}\n"},"message":{"kind":"string","value":"optimization: getPath() uses static char buffer\n"},"old_file":{"kind":"string","value":"source/com/intellij/openapi/vfs/impl/local/VirtualFileImpl.java"},"subject":{"kind":"string","value":"optimization: getPath() uses static char buffer"},"git_diff":{"kind":"string","value":"ource/com/intellij/openapi/vfs/impl/local/VirtualFileImpl.java\n \n private static final LocalFileSystemImpl ourFileSystem = (LocalFileSystemImpl)LocalFileSystem.getInstance();\n \n private static char[] myBuffer = new char[1024];\n\n private VirtualFileImpl myParent;\n private String myName;\n private VirtualFileImpl[] myChildren = null; // null, if not defined yet\n }\n \n PhysicalFile getPhysicalFile() {\n String path = getPath(File.separatorChar, 1024);\n String path = getPath(File.separatorChar);\n return new IoFile(path);\n }\n \n }\n \n public String getPath() {\n return getPath('/', 1024);\n }\n\n private String getPath(char separatorChar, int bufferLength) {\n return getPath('/');\n }\n\n private String getPath(char separatorChar) {\n //ApplicationManager.getApplication().assertReadAccessAllowed();\n try {\n char[] buffer = new char[bufferLength];\n int length;\n synchronized (ourFileSystem.LOCK) {\n length = appendPath(buffer, separatorChar);\n }\n return StringFactory.createStringFromConstantArray(buffer, 0, length);\n }\n catch(ArrayIndexOutOfBoundsException aiob) {\n return getPath(separatorChar, bufferLength * 2);\n synchronized (ourFileSystem.LOCK) {\n try {\n int length = appendPath(myBuffer, separatorChar);\n return new String(myBuffer, 0, length);\n }\n catch (ArrayIndexOutOfBoundsException aiob) {\n myBuffer = new char[myBuffer.length * 2];\n return getPath(separatorChar);\n }\n }\n }\n \n \n name.getChars(0, nameLength, buffer, currentLength);\n int newLength = currentLength + nameLength;\n if (currentLength == 0) {\n if (currentLength == 0 && separatorChar != '/' ) {\n StringUtil.replaceChar(buffer, '/', separatorChar, currentLength, newLength); // root may contain '/' char\n }\n return newLength;\n int index = -1;\n if (i < children.length && children[i].myName.equals(name)) {\n index = i;\n } else {\n }\n else {\n for (int j = 0; j < children.length; j++) {\n VirtualFileImpl child = myChildren[j];\n if (child.myName.equals(name)) index = j;"}}},{"rowIdx":2093,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"30faf46f0acf9c7777bae99ba401dbf32efbca5c"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"alpertuna/react-metismenu,alpertuna/react-metismenu"},"new_contents":{"kind":"string","value":"import React, {Component} from 'react'\nimport {render} from 'react-dom'\nimport MetisMenu from '../src/main.js'\nimport '../less/style.less'\n\nclass App extends Component{\n render(){\n var menu=[\n {\n icon: 'dashboard',\n label: 'Menu 1',\n href: '#menu-1'\n },\n {\n icon: 'bell',\n label: 'Menu 2',\n href: '#menu-2'\n },\n {\n icon: 'bolt',\n label: 'Menu 3',\n content: [\n {\n icon: 'bolt',\n label: 'Test',\n href: '#test'\n }\n ]\n },\n {\n icon: 'bars',\n label: 'Menu 4',\n content: [\n {\n icon: 'bold',\n label: 'Sub Menu 1',\n href: '#sub-menu-1'\n },\n {\n icon: 'italic',\n label: 'Sub Menu 2',\n content: [\n {\n icon: 'cog',\n label: 'Level 3 Menu 1',\n href: '#level-3-menu-1'\n },\n {\n icon: 'group',\n label: 'Level 3 Menu 2',\n href: '#level-3-menu-2'\n }\n ]\n },\n {\n icon: 'image',\n label: 'Sub Menu 3',\n content: [\n {\n icon: 'cog',\n label: 'Level 3 Menu 1',\n href: '#level-3-menu-1'\n },\n {\n icon: 'group',\n label: 'Level 3 Menu 2',\n href: '#level-3-menu-2'\n }\n ]\n },\n {\n icon: 'check',\n label: 'Sub Menu 4',\n href: '#sub-menu-4'\n }\n ]\n },\n {\n icon: 'user',\n label: 'Menu 5',\n href: '#menu-5'\n }\n ];\n\n return
      \n \n
      \n }\n}\n\nrender(, document.getElementById('root'));\n"},"new_file":{"kind":"string","value":"dev/App.js"},"old_contents":{"kind":"string","value":"import React, {Component} from 'react'\nimport {render} from 'react-dom'\nimport MetisMenu from '../src/main.js'\nimport '../less/style.less'\n\nclass App extends Component{\n render(){\n var menu=[\n {\n icon: 'dashboard',\n label: 'Menu 1',\n href: '#menu-1'\n },\n {\n icon: 'bell',\n label: 'Menu 2',\n href: '#menu-2'\n },\n {\n icon: 'bolt',\n label: 'Menu 3',\n content: [\n {\n icon: 'bolt',\n label: 'Test',\n href: '#test'\n }\n ]\n },\n {\n icon: 'bars',\n label: 'Menu 4',\n content: [\n {\n icon: 'bold',\n label: 'Sub Menu 1',\n href: '#sub-menu-1'\n },\n {\n icon: 'italic',\n label: 'Sub Menu 2',\n content: [\n {\n icon: 'cog',\n label: 'Level 3 Menu 1',\n href: '#level-3-menu-1'\n },\n {\n icon: 'group',\n label: 'Level 3 Menu 2',\n href: '#level-3-menu-2'\n }\n ]\n },\n {\n icon: 'image',\n label: 'Sub Menu 3',\n content: [\n {\n icon: 'cog',\n label: 'Level 3 Menu 1',\n href: '#level-3-menu-1'\n },\n {\n icon: 'group',\n label: 'Level 3 Menu 2',\n href: '#level-3-menu-2'\n }\n ]\n },\n {\n icon: 'check',\n label: 'Sub Menu 4',\n href: '#sub-menu-4'\n }\n ]\n },\n {\n icon: 'user',\n label: 'Menu 5',\n href: '#menu-5'\n }\n ];\n\n return
      \n \n
      \n }\n}\n\nrender(, document.getElementById('root'));\n"},"message":{"kind":"string","value":"Update dev/App.js\n"},"old_file":{"kind":"string","value":"dev/App.js"},"subject":{"kind":"string","value":"Update dev/App.js"},"git_diff":{"kind":"string","value":"ev/App.js\n \n return
      \n iconClassPrefix=\"fa fa-\"\n iconLevelDown=\"arrow-down\"\n iconLevelUp=\"arrow-up\"\n content={menu}\n />\n
      "}}},{"rowIdx":2094,"cells":{"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"bsd-3-clause"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"5cc0cd6ae5a66d1a2e1a7d1033b297309547e269"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"jCoderZ/fawkez-old,jCoderZ/fawkez-old,jCoderZ/fawkez-old"},"new_contents":{"kind":"string","value":"/*\r\n * $Id: LogMessageGenerator.java 1 2006-11-25 14:41:52Z amandel $\r\n *\r\n * Copyright 2006, The jCoderZ.org Project. All rights reserved.\r\n *\r\n * Redistribution and use in source and binary forms, with or without\r\n * modification, are permitted provided that the following conditions are\r\n * met:\r\n *\r\n * * Redistributions of source code must retain the above copyright\r\n * notice, this list of conditions and the following disclaimer.\r\n * * Redistributions in binary form must reproduce the above\r\n * copyright notice, this list of conditions and the following\r\n * disclaimer in the documentation and/or other materials\r\n * provided with the distribution.\r\n * * Neither the name of the jCoderZ.org Project nor the names of\r\n * its contributors may be used to endorse or promote products\r\n * derived from this software without specific prior written\r\n * permission.\r\n *\r\n * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS \"AS IS\" AND\r\n * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\r\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\r\n * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS\r\n * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\r\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\r\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\r\n * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\r\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR\r\n * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\r\n * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\r\n */\r\n\r\npackage org.jcoderz.commons.taskdefs;\r\n\r\nimport java.io.ByteArrayOutputStream;\r\nimport java.io.File;\r\nimport java.io.FileNotFoundException;\r\nimport java.io.FileOutputStream;\r\nimport java.io.IOException;\r\nimport java.io.InputStream;\r\nimport java.io.OutputStream;\r\nimport java.net.HttpURLConnection;\r\nimport java.net.MalformedURLException;\r\nimport java.net.URL;\r\nimport java.util.ArrayList;\r\nimport java.util.Iterator;\r\nimport java.util.List;\r\n\r\nimport org.apache.tools.ant.BuildException;\r\nimport org.apache.tools.ant.Project;\r\nimport org.apache.tools.ant.Task;\r\n\r\nimport com.caucho.hessian.client.HessianProxyFactory;\r\nimport com.luntsys.luntbuild.facades.BuildParams;\r\nimport com.luntsys.luntbuild.facades.Constants;\r\nimport com.luntsys.luntbuild.facades.ILuntbuild;\r\nimport com.luntsys.luntbuild.facades.lb12.BuildFacade;\r\nimport com.luntsys.luntbuild.facades.lb12.ScheduleFacade;\r\n\r\n/**\r\n * Ant task to trigger a build on the Luntbuild system and download results\r\n * afterwards.\r\n * \r\n * @author Albrecht Messner\r\n */\r\npublic class LuntBuildTask\r\n extends Task\r\n{\r\n /**\r\n * Schedule start policy: allows multiple schedules to be running\r\n * simultaneously.\r\n */\r\n public static final String START_MULTIPLE = \"startMultiple\";\r\n /**\r\n * Schedule start policy: skips execution if schedule is currently running.\r\n */\r\n public static final String SKIP_IF_RUNNING = \"skipIfRunning\";\r\n /**\r\n * Schedule start policy: fails this task if schedule is currently running.\r\n */\r\n public static final String FAIL_IF_RUNNING = \"failIfRunning\";\r\n \r\n private static final List POLICY_LIST = new ArrayList();\r\n static\r\n {\r\n POLICY_LIST.add(START_MULTIPLE);\r\n POLICY_LIST.add(SKIP_IF_RUNNING);\r\n POLICY_LIST.add(FAIL_IF_RUNNING);\r\n }\r\n \r\n /** Wait time between kicking off schedule and start polling status\r\n * and between schedule termination and log retrieval. */\r\n private static final int WAIT_PERIOD = 5000;\r\n /** Interval to poll build server. */\r\n private static final int POLL_INTERVAL = 2000;\r\n /** Buffer size to read from HTTP stream when retrieving artifacts. */\r\n private static final int BUFFER_SIZE = 512;\r\n\r\n private String mLuntUrl;\r\n private String mUserName;\r\n private String mPassword;\r\n private String mProjectName;\r\n private String mScheduleName;\r\n private String mStartPolicy = FAIL_IF_RUNNING;\r\n private boolean mWaitForSchedule = true;\r\n private String mToDir;\r\n private final List mArtifacts = new ArrayList();\r\n\r\n private ILuntbuild mLuntServer;\r\n \r\n /**\r\n * @param luntUrl The luntUrl to set.\r\n */\r\n public void setLuntUrl (String luntUrl)\r\n {\r\n mLuntUrl = luntUrl;\r\n }\r\n\r\n /**\r\n * @param userName The userName to set.\r\n */\r\n public void setUserName (String userName)\r\n {\r\n mUserName = userName;\r\n }\r\n \r\n /**\r\n * @param password The password to set.\r\n */\r\n public void setPassword (String password)\r\n {\r\n mPassword = password;\r\n }\r\n\r\n /**\r\n * @param projectName The projectName to set.\r\n */\r\n public void setProjectName (String projectName)\r\n {\r\n mProjectName = projectName;\r\n }\r\n \r\n /**\r\n * @param scheduleName The scheduleName to set.\r\n */\r\n public void setScheduleName (String scheduleName)\r\n {\r\n mScheduleName = scheduleName;\r\n }\r\n \r\n /**\r\n * @param startPolicy The startPolicy to set.\r\n */\r\n public void setStartPolicy (String startPolicy)\r\n {\r\n if (!POLICY_LIST.contains(startPolicy))\r\n {\r\n throw new BuildException(\"Invalid start policy \" + startPolicy\r\n + \", must be one of \" + POLICY_LIST);\r\n }\r\n mStartPolicy = startPolicy;\r\n }\r\n \r\n /**\r\n * @param waitForSchedule The waitForSchedule to set.\r\n */\r\n public void setWaitForSchedule (boolean waitForSchedule)\r\n {\r\n mWaitForSchedule = waitForSchedule;\r\n }\r\n\r\n /**\r\n * @param toDir The toDir to set.\r\n */\r\n public void setToDir (String toDir)\r\n {\r\n mToDir = toDir;\r\n }\r\n\r\n /**\r\n * Adds an artifact for retrieval.\r\n * @param artifact\r\n */\r\n public void addArtifact (Artifact artifact)\r\n {\r\n mArtifacts.add(artifact);\r\n }\r\n \r\n public void execute () throws BuildException\r\n {\r\n checkParameters();\r\n try\r\n {\r\n \r\n final ScheduleFacade schedule = getSchedule();\r\n final boolean startSchedule;\r\n if (schedule.getStatus() == Constants.SCHEDULE_STATUS_RUNNING)\r\n {\r\n if (mStartPolicy.equals(START_MULTIPLE))\r\n {\r\n startSchedule = true;\r\n }\r\n else if (mStartPolicy.equals(SKIP_IF_RUNNING))\r\n {\r\n startSchedule = false; \r\n }\r\n else\r\n {\r\n throw new BuildException(\r\n \"Can't start build because schedule is already running\");\r\n }\r\n }\r\n else\r\n {\r\n startSchedule = true;\r\n }\r\n \r\n if (startSchedule)\r\n {\r\n startSchedule();\r\n }\r\n \r\n }\r\n catch (BuildException x)\r\n {\r\n throw x;\r\n }\r\n catch (Exception x)\r\n {\r\n throw new BuildException(x);\r\n }\r\n }\r\n\r\n private void startSchedule () throws InterruptedException, IOException\r\n {\r\n log(\"Starting build for \" + mProjectName + \"/\" + mScheduleName \r\n + \" on server \" + mLuntUrl, Project.MSG_INFO);\r\n getLuntServer().triggerBuild(mProjectName, mScheduleName, getBuildParams());\r\n\r\n if (mWaitForSchedule)\r\n {\r\n waitForSchedule();\r\n }\r\n }\r\n\r\n private void waitForSchedule () throws InterruptedException, IOException\r\n {\r\n Thread.sleep(WAIT_PERIOD);\r\n log(\"Waiting for build \"\r\n + getLuntServer().getLastBuild(mProjectName, mScheduleName).getVersion()\r\n + \" to finish\");\r\n \r\n while (getSchedule().getStatus() == Constants.SCHEDULE_STATUS_RUNNING)\r\n {\r\n log(\"Schedule running\", Project.MSG_VERBOSE);\r\n Thread.sleep(POLL_INTERVAL);\r\n }\r\n final int termStatus = getSchedule().getStatus();\r\n switch (termStatus)\r\n {\r\n case Constants.SCHEDULE_STATUS_SUCCESS:\r\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName\r\n + \" succeeded\");\r\n dumpLogFile();\r\n retrieveArtifacts();\r\n break;\r\n case Constants.SCHEDULE_STATUS_FAILED:\r\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName\r\n + \" FAILED\");\r\n dumpLogFile();\r\n throw new BuildException(\"LuntBuild schedule \" + mProjectName + \"/\"\r\n + mScheduleName + \" FAILED\");\r\n default:\r\n throw new BuildException(\"Unexpected status for schedule \"\r\n + mProjectName + \"/\" + mScheduleName + \": \" + termStatus);\r\n }\r\n }\r\n\r\n private ScheduleFacade getSchedule () throws MalformedURLException\r\n {\r\n return getLuntServer().getScheduleByName(mProjectName, mScheduleName);\r\n }\r\n\r\n /**\r\n * @throws IOException \r\n * \r\n */\r\n private void retrieveArtifacts () throws IOException\r\n {\r\n final BuildFacade currentBuild\r\n = getLuntServer().getLastBuild(mProjectName, mScheduleName);\r\n final String buildLogUrl = currentBuild.getBuildLogUrl();\r\n final String path = buildLogUrl.substring(0, buildLogUrl.lastIndexOf('/'));\r\n final String artifactsBaseUrl = path + \"/artifacts/\";\r\n log(\"Artifacts base URL: \" + artifactsBaseUrl, Project.MSG_VERBOSE);\r\n HttpURLConnection.setFollowRedirects(true);\r\n \r\n for (final Iterator it = mArtifacts.iterator(); it.hasNext(); )\r\n {\r\n final String artifactName = ((Artifact)it.next()).getName();\r\n final File outputFile = new File(new File(mToDir), artifactName);\r\n if (outputFile.exists())\r\n {\r\n throw new BuildException(\"Output file \" + outputFile\r\n + \" already exists\");\r\n }\r\n final String artifactUrl = artifactsBaseUrl + artifactName;\r\n log(\"Retrieving artifact \" + artifactName);\r\n log(\"Retrieving from URL: \" + artifactUrl, Project.MSG_VERBOSE);\r\n log(\"Writing to file: \" + mToDir + File.separator + outputFile,\r\n Project.MSG_VERBOSE);\r\n final HttpURLConnection con\r\n = (HttpURLConnection)new URL(artifactUrl).openConnection();\r\n con.setDoOutput(true);\r\n con.addRequestProperty(\"Keep-alive\", \"false\");\r\n \r\n con.connect();\r\n if (con.getResponseCode() != HttpURLConnection.HTTP_OK)\r\n {\r\n throw new BuildException(\"Failed while retrieving artifact \"\r\n + artifactUrl + \": \" + con.getResponseMessage());\r\n }\r\n \r\n writeArtifactToFile(outputFile, con);\r\n }\r\n }\r\n\r\n private void writeArtifactToFile (\r\n final File outputFile, final HttpURLConnection con)\r\n throws IOException, FileNotFoundException\r\n {\r\n int read = 0;\r\n final byte[] buf = new byte[BUFFER_SIZE];\r\n InputStream artifactInput = null;\r\n OutputStream artifactOutput = null;\r\n try\r\n {\r\n artifactInput = con.getInputStream();\r\n artifactOutput = new FileOutputStream(outputFile);\r\n while ((read = artifactInput.read(buf)) > 0)\r\n {\r\n artifactOutput.write(buf, 0, read);\r\n }\r\n }\r\n finally\r\n {\r\n close(artifactInput);\r\n close(artifactOutput);\r\n }\r\n }\r\n\r\n private void close (InputStream inputStream)\r\n {\r\n if (inputStream != null)\r\n {\r\n try\r\n {\r\n inputStream.close();\r\n }\r\n catch (IOException x)\r\n {\r\n log(\"Failed to close java.io.InputStream: \" + x.getMessage(),\r\n Project.MSG_WARN);\r\n }\r\n }\r\n }\r\n\r\n private void close (OutputStream outputStream)\r\n {\r\n if (outputStream != null)\r\n {\r\n try\r\n {\r\n outputStream.close();\r\n }\r\n catch (IOException x)\r\n {\r\n log(\"Failed to close java.io.OutputStream: \" + x.getMessage(),\r\n Project.MSG_WARN);\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * @throws InterruptedException \r\n * @throws IOException \r\n * \r\n */\r\n private void dumpLogFile () throws InterruptedException, IOException\r\n {\r\n Thread.sleep(WAIT_PERIOD);\r\n final BuildFacade currentBuild\r\n = getLuntServer().getLastBuild(mProjectName, mScheduleName);\r\n\r\n final String buildLogUrlHtml = currentBuild.getBuildLogUrl();\r\n log(\"Build log URL (HTML format): \" + buildLogUrlHtml, Project.MSG_VERBOSE);\r\n final String buildLogUrlTxt = buildLogUrlHtml.substring(0,\r\n buildLogUrlHtml.lastIndexOf(\".html\")) + \".txt\";\r\n log(\"Build log URL (Text format): \" + buildLogUrlTxt, Project.MSG_VERBOSE);\r\n\r\n final URL buildLog = new URL(buildLogUrlTxt);\r\n final HttpURLConnection con = (HttpURLConnection)buildLog.openConnection();\r\n log(\"Got HTTP code \" + con.getResponseCode(), Project.MSG_VERBOSE);\r\n\r\n final InputStream is = con.getInputStream();\r\n int read = 0;\r\n final byte[] buf = new byte[256];\r\n final ByteArrayOutputStream bout = new ByteArrayOutputStream();\r\n while ((read = is.read(buf)) > 0)\r\n {\r\n bout.write(buf, 0, read);\r\n }\r\n final String buildLogData = new String(bout.toByteArray());\r\n log(\"===== START Build log =====\", Project.MSG_INFO);\r\n log(buildLogData, Project.MSG_INFO);\r\n log(\"===== END Build log =====\", Project.MSG_INFO);\r\n }\r\n\r\n private BuildParams getBuildParams ()\r\n {\r\n final BuildParams params = new BuildParams();\r\n \r\n params.setBuildNecessaryCondition(\"always\");\r\n params.setBuildType(Constants.BUILD_TYPE_CLEAN);\r\n params.setLabelStrategy(Constants.LABEL_NONE);\r\n params.setNotifyStrategy(Constants.NOTIFY_NONE);\r\n params.setPostbuildStrategy(Constants.POSTBUILD_NONE);\r\n // params.setScheduleId()\r\n params.setTriggerDependencyStrategy(\r\n Constants.TRIGGER_NONE_DEPENDENT_SCHEDULES);\r\n \r\n return params;\r\n }\r\n \r\n /**\r\n * @param luntUrl\r\n */\r\n private void checkNotNull (Object obj, String name) \r\n {\r\n if (obj == null)\r\n {\r\n throw new BuildException(\"Parameter \" + name + \" missing\");\r\n }\r\n }\r\n\r\n /**\r\n * \r\n */\r\n private void checkParameters ()\r\n {\r\n checkNotNull(mLuntUrl, \"luntUrl\");\r\n checkNotNull(mUserName, \"userName\");\r\n checkNotNull(mPassword, \"password\");\r\n checkNotNull(mProjectName, \"projectName\");\r\n checkNotNull(mScheduleName, \"scheduleName\");\r\n if (mArtifacts.size() > 0)\r\n {\r\n if (mToDir == null)\r\n {\r\n throw new BuildException(\"'toDir' must be set if artifacts are set\");\r\n }\r\n else\r\n {\r\n AntTaskUtil.ensureDirectory(new File(mToDir));\r\n }\r\n if (!mWaitForSchedule)\r\n {\r\n throw new BuildException(\r\n \"Can't retrieve artifacts when waitForBuild == false\");\r\n }\r\n }\r\n }\r\n\r\n private ILuntbuild getLuntServer () throws MalformedURLException\r\n {\r\n if (mLuntServer == null)\r\n {\r\n HessianProxyFactory factory = new HessianProxyFactory();\r\n factory.setUser(mUserName);\r\n factory.setPassword(mPassword);\r\n mLuntServer = (ILuntbuild)factory.create(ILuntbuild.class, mLuntUrl);\r\n }\r\n return mLuntServer;\r\n }\r\n \r\n public static final class Artifact\r\n {\r\n private String mName;\r\n \r\n /**\r\n * @param name The name to set.\r\n */\r\n public void setName (String name)\r\n {\r\n mName = name;\r\n }\r\n \r\n /**\r\n * @return Returns the name.\r\n */\r\n public String getName ()\r\n {\r\n return mName;\r\n }\r\n }\r\n}\r\n"},"new_file":{"kind":"string","value":"src/java/org/jcoderz/commons/taskdefs/LuntBuildTask.java"},"old_contents":{"kind":"string","value":"//\r\n// Copyright (C) 2006 Media Saturn Systemzentrale. All rights reserved.\r\n//\r\n// $Project: Inventory$\r\n// $Revision:$\r\n// $Date:$\r\n// $Log[10]$\r\n//\r\n\r\npackage org.jcoderz.commons.taskdefs;\r\n\r\nimport java.io.ByteArrayOutputStream;\r\nimport java.io.File;\r\nimport java.io.FileNotFoundException;\r\nimport java.io.FileOutputStream;\r\nimport java.io.IOException;\r\nimport java.io.InputStream;\r\nimport java.io.OutputStream;\r\nimport java.net.HttpURLConnection;\r\nimport java.net.MalformedURLException;\r\nimport java.net.URL;\r\nimport java.util.ArrayList;\r\nimport java.util.Iterator;\r\nimport java.util.List;\r\n\r\nimport org.apache.tools.ant.BuildException;\r\nimport org.apache.tools.ant.DefaultLogger;\r\nimport org.apache.tools.ant.Project;\r\nimport org.apache.tools.ant.Task;\r\n\r\nimport com.caucho.hessian.client.HessianProxyFactory;\r\nimport com.luntsys.luntbuild.facades.BuildParams;\r\nimport com.luntsys.luntbuild.facades.Constants;\r\nimport com.luntsys.luntbuild.facades.ILuntbuild;\r\nimport com.luntsys.luntbuild.facades.lb12.BuildFacade;\r\nimport com.luntsys.luntbuild.facades.lb12.ScheduleFacade;\r\n\r\n/**\r\n *\r\n */\r\npublic class LuntBuildTask\r\n extends Task\r\n{\r\n /** Schedule start policy: allows multiple schedules to be running simultaneously. */\r\n public static final String START_MULTIPLE = \"startMultiple\";\r\n /** Schedule start policy: skips execution if schedule is currently running. */\r\n public static final String SKIP_IF_RUNNING = \"skipIfRunning\";\r\n /** Schedule start policy: fails this task if schedule is currently running. */\r\n public static final String FAIL_IF_RUNNING = \"failIfRunning\";\r\n \r\n private static final List POLICY_LIST = new ArrayList();\r\n static\r\n {\r\n POLICY_LIST.add(START_MULTIPLE);\r\n POLICY_LIST.add(SKIP_IF_RUNNING);\r\n POLICY_LIST.add(FAIL_IF_RUNNING);\r\n }\r\n \r\n /** Wait time between kicking off schedule and start polling status\r\n * and between schedule termination and log retrieval. */\r\n private static final int WAIT_PERIOD = 5000;\r\n /** Interval to poll build server. */\r\n private static final int POLL_INTERVAL = 2000;\r\n /** Buffer size to read from HTTP stream when retrieving artifacts. */\r\n private static final int BUFFER_SIZE = 512;\r\n\r\n private String mLuntUrl;\r\n private String mUserName;\r\n private String mPassword;\r\n private String mProjectName;\r\n private String mScheduleName;\r\n private String mStartPolicy = FAIL_IF_RUNNING;\r\n private boolean mWaitForSchedule = true;\r\n private String mToDir;\r\n private List mArtifacts = new ArrayList();\r\n\r\n private ILuntbuild mLuntServer;\r\n \r\n /**\r\n * @param luntUrl The luntUrl to set.\r\n */\r\n public void setLuntUrl(String luntUrl)\r\n {\r\n mLuntUrl = luntUrl;\r\n }\r\n\r\n /**\r\n * @param userName The userName to set.\r\n */\r\n public void setUserName(String userName)\r\n {\r\n mUserName = userName;\r\n }\r\n \r\n /**\r\n * @param password The password to set.\r\n */\r\n public void setPassword(String password)\r\n {\r\n mPassword = password;\r\n }\r\n\r\n /**\r\n * @param projectName The projectName to set.\r\n */\r\n public void setProjectName(String projectName)\r\n {\r\n mProjectName = projectName;\r\n }\r\n \r\n /**\r\n * @param scheduleName The scheduleName to set.\r\n */\r\n public void setScheduleName(String scheduleName)\r\n {\r\n mScheduleName = scheduleName;\r\n }\r\n \r\n /**\r\n * @param startPolicy The startPolicy to set.\r\n */\r\n public void setStartPolicy(String startPolicy)\r\n {\r\n if (!POLICY_LIST.contains(startPolicy))\r\n {\r\n throw new BuildException(\"Invalid start policy \" + startPolicy + \", must be one of \" + POLICY_LIST);\r\n }\r\n mStartPolicy = startPolicy;\r\n }\r\n \r\n /**\r\n * @param waitForSchedule The waitForSchedule to set.\r\n */\r\n public void setWaitForSchedule(boolean waitForSchedule)\r\n {\r\n mWaitForSchedule = waitForSchedule;\r\n }\r\n\r\n /**\r\n * @param toDir The toDir to set.\r\n */\r\n public void setToDir(String toDir)\r\n {\r\n mToDir = toDir;\r\n }\r\n\r\n /**\r\n * Adds an artifact for retrieval.\r\n * @param artifact\r\n */\r\n public void addArtifact(Artifact artifact)\r\n {\r\n mArtifacts.add(artifact);\r\n }\r\n \r\n public void execute() throws BuildException\r\n {\r\n checkParameters();\r\n try\r\n {\r\n \r\n final ScheduleFacade schedule = getSchedule();\r\n final boolean startSchedule;\r\n if (schedule.getStatus() == Constants.SCHEDULE_STATUS_RUNNING)\r\n {\r\n if (mStartPolicy.equals(START_MULTIPLE))\r\n {\r\n startSchedule = true;\r\n }\r\n else if (mStartPolicy.equals(SKIP_IF_RUNNING))\r\n {\r\n startSchedule = false; \r\n }\r\n else\r\n {\r\n throw new BuildException(\"Can't start build because schedule is already running\");\r\n }\r\n }\r\n else\r\n {\r\n startSchedule = true;\r\n }\r\n \r\n if (startSchedule)\r\n {\r\n startSchedule();\r\n }\r\n \r\n }\r\n catch (BuildException x)\r\n {\r\n throw x;\r\n }\r\n catch (Exception x)\r\n {\r\n throw new BuildException(x);\r\n }\r\n }\r\n\r\n private void startSchedule() throws InterruptedException, IOException\r\n {\r\n log(\"Starting build for \" + mProjectName + \"/\" + mScheduleName + \" on server \" + mLuntUrl, Project.MSG_INFO);\r\n getLuntServer().triggerBuild(mProjectName, mScheduleName, getBuildParams());\r\n\r\n if (mWaitForSchedule)\r\n {\r\n waitForSchedule();\r\n }\r\n }\r\n\r\n private void waitForSchedule() throws InterruptedException, IOException\r\n {\r\n Thread.sleep(WAIT_PERIOD);\r\n log(\"Waiting for build \" + getLuntServer().getLastBuild(mProjectName, mScheduleName).getVersion() + \" to finish\");\r\n \r\n while (getSchedule().getStatus() == Constants.SCHEDULE_STATUS_RUNNING)\r\n {\r\n log(\"Schedule running\", Project.MSG_VERBOSE);\r\n Thread.sleep(POLL_INTERVAL);\r\n }\r\n final int termStatus = getSchedule().getStatus();\r\n switch (termStatus)\r\n {\r\n case Constants.SCHEDULE_STATUS_SUCCESS:\r\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" succeeded\");\r\n dumpLogFile();\r\n retrieveArtifacts();\r\n break;\r\n case Constants.SCHEDULE_STATUS_FAILED:\r\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" FAILED\");\r\n dumpLogFile();\r\n throw new BuildException(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" FAILED\");\r\n default:\r\n throw new BuildException(\"Unexpected status for schedule \" + mProjectName + \"/\" + mScheduleName + \": \" + termStatus);\r\n }\r\n }\r\n\r\n private ScheduleFacade getSchedule() throws MalformedURLException\r\n {\r\n return getLuntServer().getScheduleByName(mProjectName, mScheduleName);\r\n }\r\n\r\n /**\r\n * @throws IOException \r\n * \r\n */\r\n private void retrieveArtifacts() throws IOException\r\n {\r\n final BuildFacade currentBuild = getLuntServer().getLastBuild(mProjectName, mScheduleName);\r\n final String buildLogUrl = currentBuild.getBuildLogUrl();\r\n final String path = buildLogUrl.substring(0, buildLogUrl.lastIndexOf('/'));\r\n final String artifactsBaseUrl = path + \"/artifacts/\";\r\n log(\"Artifacts base URL: \" + artifactsBaseUrl, Project.MSG_VERBOSE);\r\n HttpURLConnection.setFollowRedirects(true);\r\n \r\n for (final Iterator it = mArtifacts.iterator(); it.hasNext(); )\r\n {\r\n final String artifactName = ((Artifact)it.next()).getName();\r\n final File outputFile = new File(new File(mToDir), artifactName);\r\n if (outputFile.exists())\r\n {\r\n throw new BuildException(\"Output file \" + outputFile + \" already exists\");\r\n }\r\n final String artifactUrl = artifactsBaseUrl + artifactName;\r\n log(\"Retrieving artifact \" + artifactName);\r\n log(\"Retrieving from URL: \" + artifactUrl, Project.MSG_VERBOSE);\r\n log(\"Writing to file: \" + mToDir + File.separator + outputFile, Project.MSG_VERBOSE);\r\n final HttpURLConnection con = (HttpURLConnection)new URL(artifactUrl).openConnection();\r\n con.setDoOutput(true);\r\n con.addRequestProperty(\"Keep-alive\", \"false\");\r\n \r\n con.connect();\r\n if (con.getResponseCode() != HttpURLConnection.HTTP_OK)\r\n {\r\n throw new BuildException(\"Failed while retrieving artifact \" + artifactUrl + \": \" + con.getResponseMessage());\r\n }\r\n \r\n writeArtifactToFile(outputFile, con);\r\n }\r\n }\r\n\r\n private void writeArtifactToFile(final File outputFile, final HttpURLConnection con) throws IOException, FileNotFoundException\r\n {\r\n int read = 0;\r\n final byte[] buf = new byte[BUFFER_SIZE];\r\n InputStream artifactInput = null;\r\n OutputStream artifactOutput = null;\r\n try\r\n {\r\n artifactInput = con.getInputStream();\r\n artifactOutput = new FileOutputStream(outputFile);\r\n while ((read = artifactInput.read(buf)) > 0)\r\n {\r\n artifactOutput.write(buf, 0, read);\r\n }\r\n }\r\n finally\r\n {\r\n close(artifactInput);\r\n close(artifactOutput);\r\n }\r\n }\r\n\r\n private void close(InputStream inputStream)\r\n {\r\n if (inputStream != null)\r\n {\r\n try\r\n {\r\n inputStream.close();\r\n }\r\n catch (IOException x)\r\n {\r\n log(\"Failed to close java.io.InputStream: \" + x.getMessage(), Project.MSG_WARN);\r\n }\r\n }\r\n }\r\n\r\n private void close(OutputStream outputStream)\r\n {\r\n if (outputStream != null)\r\n {\r\n try\r\n {\r\n outputStream.close();\r\n }\r\n catch (IOException x)\r\n {\r\n log(\"Failed to close java.io.OutputStream: \" + x.getMessage(), Project.MSG_WARN);\r\n }\r\n }\r\n }\r\n\r\n /**\r\n * @throws InterruptedException \r\n * @throws IOException \r\n * \r\n */\r\n private void dumpLogFile() throws InterruptedException, IOException\r\n {\r\n Thread.sleep(WAIT_PERIOD);\r\n final BuildFacade currentBuild = getLuntServer().getLastBuild(mProjectName, mScheduleName);\r\n\r\n final String buildLogUrlHtml = currentBuild.getBuildLogUrl();\r\n log(\"Build log URL (HTML format): \" + buildLogUrlHtml, Project.MSG_VERBOSE);\r\n final String buildLogUrlTxt = buildLogUrlHtml.substring(0, buildLogUrlHtml.lastIndexOf(\".html\")) + \".txt\";\r\n log(\"Build log URL (Text format): \" + buildLogUrlTxt, Project.MSG_VERBOSE);\r\n\r\n final URL buildLog = new URL(buildLogUrlTxt);\r\n final HttpURLConnection con = (HttpURLConnection)buildLog.openConnection();\r\n log(\"Got HTTP code \" + con.getResponseCode(), Project.MSG_VERBOSE);\r\n\r\n final InputStream is = con.getInputStream();\r\n int read = 0;\r\n final byte[] buf = new byte[256];\r\n final ByteArrayOutputStream bout = new ByteArrayOutputStream();\r\n while ((read = is.read(buf)) > 0)\r\n {\r\n bout.write(buf, 0, read);\r\n }\r\n final String buildLogData = new String(bout.toByteArray());\r\n log(\"===== START Build log =====\", Project.MSG_INFO);\r\n log(buildLogData, Project.MSG_INFO);\r\n log(\"===== END Build log =====\", Project.MSG_INFO);\r\n }\r\n\r\n private BuildParams getBuildParams()\r\n {\r\n final BuildParams params = new BuildParams();\r\n \r\n params.setBuildNecessaryCondition(\"always\");\r\n params.setBuildType(Constants.BUILD_TYPE_CLEAN);\r\n params.setLabelStrategy(Constants.LABEL_NONE);\r\n params.setNotifyStrategy(Constants.NOTIFY_NONE);\r\n params.setPostbuildStrategy(Constants.POSTBUILD_NONE);\r\n // params.setScheduleId()\r\n params.setTriggerDependencyStrategy(Constants.TRIGGER_NONE_DEPENDENT_SCHEDULES);\r\n \r\n return params;\r\n }\r\n \r\n /**\r\n * @param luntUrl\r\n */\r\n private void checkNotNull(Object obj, String name) \r\n {\r\n if (obj == null)\r\n {\r\n throw new BuildException(\"Parameter \" + name + \" missing\");\r\n }\r\n }\r\n\r\n /**\r\n * \r\n */\r\n private void checkParameters()\r\n {\r\n checkNotNull(mLuntUrl, \"luntUrl\");\r\n checkNotNull(mUserName, \"userName\");\r\n checkNotNull(mPassword, \"password\");\r\n checkNotNull(mProjectName, \"projectName\");\r\n checkNotNull(mScheduleName, \"scheduleName\");\r\n if (mArtifacts.size() > 0)\r\n {\r\n if (mToDir == null)\r\n {\r\n throw new BuildException(\"'toDir' must be set if artifacts are set\");\r\n }\r\n else\r\n {\r\n AntTaskUtil.ensureDirectory(new File(mToDir));\r\n }\r\n if (!mWaitForSchedule)\r\n {\r\n throw new BuildException(\"Can't retrieve artifacts when waitForBuild == false\");\r\n }\r\n }\r\n }\r\n\r\n private ILuntbuild getLuntServer() throws MalformedURLException\r\n {\r\n if (mLuntServer == null)\r\n {\r\n HessianProxyFactory factory = new HessianProxyFactory();\r\n factory.setUser(mUserName);\r\n factory.setPassword(mPassword);\r\n mLuntServer = (ILuntbuild)factory.create(ILuntbuild.class, mLuntUrl);\r\n }\r\n return mLuntServer;\r\n }\r\n \r\n public static final class Artifact\r\n {\r\n private String mName;\r\n \r\n /**\r\n * @param name The name to set.\r\n */\r\n public void setName(String name)\r\n {\r\n mName = name;\r\n }\r\n \r\n /**\r\n * @return Returns the name.\r\n */\r\n public String getName()\r\n {\r\n return mName;\r\n }\r\n }\r\n\r\n public static void main(String[] args)\r\n {\r\n final Project dummy = new Project();\r\n DefaultLogger logger = new DefaultLogger();\r\n\r\n logger.setMessageOutputLevel(Project.MSG_INFO);\r\n logger.setOutputPrintStream(System.out);\r\n logger.setErrorPrintStream(System.err);\r\n dummy.addBuildListener(logger);\r\n \r\n final LuntBuildTask lbr = new LuntBuildTask();\r\n lbr.setProject(dummy);\r\n lbr.setProjectName(\"test\");\r\n lbr.setScheduleName(\"on-demand\");\r\n lbr.setLuntUrl(\"http://dev130wks0007:8080/luntbuild/app.do?service=hessian\");\r\n lbr.setUserName(\"luntbuild\");\r\n lbr.setPassword(\"geheim42\");\r\n\r\n final Artifact a1 = new Artifact();\r\n a1.setName(\"artifact1.txt\");\r\n lbr.addArtifact(a1);\r\n final Artifact a2 = new Artifact();\r\n a2.setName(\"artifact2.txt\");\r\n lbr.addArtifact(a2);\r\n\r\n lbr.setToDir(\"C:\\\\temp\");\r\n lbr.execute();\r\n }\r\n}\r\n"},"message":{"kind":"string","value":"code cleanup\n"},"old_file":{"kind":"string","value":"src/java/org/jcoderz/commons/taskdefs/LuntBuildTask.java"},"subject":{"kind":"string","value":"code cleanup"},"git_diff":{"kind":"string","value":"rc/java/org/jcoderz/commons/taskdefs/LuntBuildTask.java\n//\n// Copyright (C) 2006 Media Saturn Systemzentrale. All rights reserved.\n//\n// $Project: Inventory$\n// $Revision:$\n// $Date:$\n// $Log[10]$\n//\n/*\n * $Id: LogMessageGenerator.java 1 2006-11-25 14:41:52Z amandel $\n *\n * Copyright 2006, The jCoderZ.org Project. All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are\n * met:\n *\n * * Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * * Redistributions in binary form must reproduce the above\n * copyright notice, this list of conditions and the following\n * disclaimer in the documentation and/or other materials\n * provided with the distribution.\n * * Neither the name of the jCoderZ.org Project nor the names of\n * its contributors may be used to endorse or promote products\n * derived from this software without specific prior written\n * permission.\n *\n * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS \"AS IS\" AND\n * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\n * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS\n * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\n * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR\n * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF\n * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n \n package org.jcoderz.commons.taskdefs;\n \n import java.util.List;\n \n import org.apache.tools.ant.BuildException;\nimport org.apache.tools.ant.DefaultLogger;\n import org.apache.tools.ant.Project;\n import org.apache.tools.ant.Task;\n \n import com.luntsys.luntbuild.facades.lb12.ScheduleFacade;\n \n /**\n *\n * Ant task to trigger a build on the Luntbuild system and download results\n * afterwards.\n * \n * @author Albrecht Messner\n */\n public class LuntBuildTask\n extends Task\n {\n /** Schedule start policy: allows multiple schedules to be running simultaneously. */\n /**\n * Schedule start policy: allows multiple schedules to be running\n * simultaneously.\n */\n public static final String START_MULTIPLE = \"startMultiple\";\n /** Schedule start policy: skips execution if schedule is currently running. */\n /**\n * Schedule start policy: skips execution if schedule is currently running.\n */\n public static final String SKIP_IF_RUNNING = \"skipIfRunning\";\n /** Schedule start policy: fails this task if schedule is currently running. */\n /**\n * Schedule start policy: fails this task if schedule is currently running.\n */\n public static final String FAIL_IF_RUNNING = \"failIfRunning\";\n \n private static final List POLICY_LIST = new ArrayList();\n private String mStartPolicy = FAIL_IF_RUNNING;\n private boolean mWaitForSchedule = true;\n private String mToDir;\n private List mArtifacts = new ArrayList();\n private final List mArtifacts = new ArrayList();\n \n private ILuntbuild mLuntServer;\n \n /**\n * @param luntUrl The luntUrl to set.\n */\n public void setLuntUrl(String luntUrl)\n public void setLuntUrl (String luntUrl)\n {\n mLuntUrl = luntUrl;\n }\n /**\n * @param userName The userName to set.\n */\n public void setUserName(String userName)\n public void setUserName (String userName)\n {\n mUserName = userName;\n }\n /**\n * @param password The password to set.\n */\n public void setPassword(String password)\n public void setPassword (String password)\n {\n mPassword = password;\n }\n /**\n * @param projectName The projectName to set.\n */\n public void setProjectName(String projectName)\n public void setProjectName (String projectName)\n {\n mProjectName = projectName;\n }\n /**\n * @param scheduleName The scheduleName to set.\n */\n public void setScheduleName(String scheduleName)\n public void setScheduleName (String scheduleName)\n {\n mScheduleName = scheduleName;\n }\n /**\n * @param startPolicy The startPolicy to set.\n */\n public void setStartPolicy(String startPolicy)\n public void setStartPolicy (String startPolicy)\n {\n if (!POLICY_LIST.contains(startPolicy))\n {\n throw new BuildException(\"Invalid start policy \" + startPolicy + \", must be one of \" + POLICY_LIST);\n throw new BuildException(\"Invalid start policy \" + startPolicy\n + \", must be one of \" + POLICY_LIST);\n }\n mStartPolicy = startPolicy;\n }\n /**\n * @param waitForSchedule The waitForSchedule to set.\n */\n public void setWaitForSchedule(boolean waitForSchedule)\n public void setWaitForSchedule (boolean waitForSchedule)\n {\n mWaitForSchedule = waitForSchedule;\n }\n /**\n * @param toDir The toDir to set.\n */\n public void setToDir(String toDir)\n public void setToDir (String toDir)\n {\n mToDir = toDir;\n }\n * Adds an artifact for retrieval.\n * @param artifact\n */\n public void addArtifact(Artifact artifact)\n public void addArtifact (Artifact artifact)\n {\n mArtifacts.add(artifact);\n }\n \n public void execute() throws BuildException\n public void execute () throws BuildException\n {\n checkParameters();\n try\n }\n else\n {\n throw new BuildException(\"Can't start build because schedule is already running\");\n throw new BuildException(\n \"Can't start build because schedule is already running\");\n }\n }\n else\n }\n }\n \n private void startSchedule() throws InterruptedException, IOException\n {\n log(\"Starting build for \" + mProjectName + \"/\" + mScheduleName + \" on server \" + mLuntUrl, Project.MSG_INFO);\n private void startSchedule () throws InterruptedException, IOException\n {\n log(\"Starting build for \" + mProjectName + \"/\" + mScheduleName \n + \" on server \" + mLuntUrl, Project.MSG_INFO);\n getLuntServer().triggerBuild(mProjectName, mScheduleName, getBuildParams());\n \n if (mWaitForSchedule)\n }\n }\n \n private void waitForSchedule() throws InterruptedException, IOException\n private void waitForSchedule () throws InterruptedException, IOException\n {\n Thread.sleep(WAIT_PERIOD);\n log(\"Waiting for build \" + getLuntServer().getLastBuild(mProjectName, mScheduleName).getVersion() + \" to finish\");\n log(\"Waiting for build \"\n + getLuntServer().getLastBuild(mProjectName, mScheduleName).getVersion()\n + \" to finish\");\n \n while (getSchedule().getStatus() == Constants.SCHEDULE_STATUS_RUNNING)\n {\n switch (termStatus)\n {\n case Constants.SCHEDULE_STATUS_SUCCESS:\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" succeeded\");\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName\n + \" succeeded\");\n dumpLogFile();\n retrieveArtifacts();\n break;\n case Constants.SCHEDULE_STATUS_FAILED:\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" FAILED\");\n log(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName\n + \" FAILED\");\n dumpLogFile();\n throw new BuildException(\"LuntBuild schedule \" + mProjectName + \"/\" + mScheduleName + \" FAILED\");\n throw new BuildException(\"LuntBuild schedule \" + mProjectName + \"/\"\n + mScheduleName + \" FAILED\");\n default:\n throw new BuildException(\"Unexpected status for schedule \" + mProjectName + \"/\" + mScheduleName + \": \" + termStatus);\n }\n }\n\n private ScheduleFacade getSchedule() throws MalformedURLException\n throw new BuildException(\"Unexpected status for schedule \"\n + mProjectName + \"/\" + mScheduleName + \": \" + termStatus);\n }\n }\n\n private ScheduleFacade getSchedule () throws MalformedURLException\n {\n return getLuntServer().getScheduleByName(mProjectName, mScheduleName);\n }\n * @throws IOException \n * \n */\n private void retrieveArtifacts() throws IOException\n {\n final BuildFacade currentBuild = getLuntServer().getLastBuild(mProjectName, mScheduleName);\n private void retrieveArtifacts () throws IOException\n {\n final BuildFacade currentBuild\n = getLuntServer().getLastBuild(mProjectName, mScheduleName);\n final String buildLogUrl = currentBuild.getBuildLogUrl();\n final String path = buildLogUrl.substring(0, buildLogUrl.lastIndexOf('/'));\n final String artifactsBaseUrl = path + \"/artifacts/\";\n final File outputFile = new File(new File(mToDir), artifactName);\n if (outputFile.exists())\n {\n throw new BuildException(\"Output file \" + outputFile + \" already exists\");\n throw new BuildException(\"Output file \" + outputFile\n + \" already exists\");\n }\n final String artifactUrl = artifactsBaseUrl + artifactName;\n log(\"Retrieving artifact \" + artifactName);\n log(\"Retrieving from URL: \" + artifactUrl, Project.MSG_VERBOSE);\n log(\"Writing to file: \" + mToDir + File.separator + outputFile, Project.MSG_VERBOSE);\n final HttpURLConnection con = (HttpURLConnection)new URL(artifactUrl).openConnection();\n log(\"Writing to file: \" + mToDir + File.separator + outputFile,\n Project.MSG_VERBOSE);\n final HttpURLConnection con\n = (HttpURLConnection)new URL(artifactUrl).openConnection();\n con.setDoOutput(true);\n con.addRequestProperty(\"Keep-alive\", \"false\");\n \n con.connect();\n if (con.getResponseCode() != HttpURLConnection.HTTP_OK)\n {\n throw new BuildException(\"Failed while retrieving artifact \" + artifactUrl + \": \" + con.getResponseMessage());\n throw new BuildException(\"Failed while retrieving artifact \"\n + artifactUrl + \": \" + con.getResponseMessage());\n }\n \n writeArtifactToFile(outputFile, con);\n }\n }\n \n private void writeArtifactToFile(final File outputFile, final HttpURLConnection con) throws IOException, FileNotFoundException\n private void writeArtifactToFile (\n final File outputFile, final HttpURLConnection con)\n throws IOException, FileNotFoundException\n {\n int read = 0;\n final byte[] buf = new byte[BUFFER_SIZE];\n }\n }\n \n private void close(InputStream inputStream)\n private void close (InputStream inputStream)\n {\n if (inputStream != null)\n {\n }\n catch (IOException x)\n {\n log(\"Failed to close java.io.InputStream: \" + x.getMessage(), Project.MSG_WARN);\n }\n }\n }\n\n private void close(OutputStream outputStream)\n log(\"Failed to close java.io.InputStream: \" + x.getMessage(),\n Project.MSG_WARN);\n }\n }\n }\n\n private void close (OutputStream outputStream)\n {\n if (outputStream != null)\n {\n }\n catch (IOException x)\n {\n log(\"Failed to close java.io.OutputStream: \" + x.getMessage(), Project.MSG_WARN);\n log(\"Failed to close java.io.OutputStream: \" + x.getMessage(),\n Project.MSG_WARN);\n }\n }\n }\n * @throws IOException \n * \n */\n private void dumpLogFile() throws InterruptedException, IOException\n private void dumpLogFile () throws InterruptedException, IOException\n {\n Thread.sleep(WAIT_PERIOD);\n final BuildFacade currentBuild = getLuntServer().getLastBuild(mProjectName, mScheduleName);\n final BuildFacade currentBuild\n = getLuntServer().getLastBuild(mProjectName, mScheduleName);\n \n final String buildLogUrlHtml = currentBuild.getBuildLogUrl();\n log(\"Build log URL (HTML format): \" + buildLogUrlHtml, Project.MSG_VERBOSE);\n final String buildLogUrlTxt = buildLogUrlHtml.substring(0, buildLogUrlHtml.lastIndexOf(\".html\")) + \".txt\";\n final String buildLogUrlTxt = buildLogUrlHtml.substring(0,\n buildLogUrlHtml.lastIndexOf(\".html\")) + \".txt\";\n log(\"Build log URL (Text format): \" + buildLogUrlTxt, Project.MSG_VERBOSE);\n \n final URL buildLog = new URL(buildLogUrlTxt);\n log(\"===== END Build log =====\", Project.MSG_INFO);\n }\n \n private BuildParams getBuildParams()\n private BuildParams getBuildParams ()\n {\n final BuildParams params = new BuildParams();\n \n params.setNotifyStrategy(Constants.NOTIFY_NONE);\n params.setPostbuildStrategy(Constants.POSTBUILD_NONE);\n // params.setScheduleId()\n params.setTriggerDependencyStrategy(Constants.TRIGGER_NONE_DEPENDENT_SCHEDULES);\n params.setTriggerDependencyStrategy(\n Constants.TRIGGER_NONE_DEPENDENT_SCHEDULES);\n \n return params;\n }\n /**\n * @param luntUrl\n */\n private void checkNotNull(Object obj, String name) \n private void checkNotNull (Object obj, String name) \n {\n if (obj == null)\n {\n /**\n * \n */\n private void checkParameters()\n private void checkParameters ()\n {\n checkNotNull(mLuntUrl, \"luntUrl\");\n checkNotNull(mUserName, \"userName\");\n }\n if (!mWaitForSchedule)\n {\n throw new BuildException(\"Can't retrieve artifacts when waitForBuild == false\");\n }\n }\n }\n\n private ILuntbuild getLuntServer() throws MalformedURLException\n throw new BuildException(\n \"Can't retrieve artifacts when waitForBuild == false\");\n }\n }\n }\n\n private ILuntbuild getLuntServer () throws MalformedURLException\n {\n if (mLuntServer == null)\n {\n /**\n * @param name The name to set.\n */\n public void setName(String name)\n public void setName (String name)\n {\n mName = name;\n }\n /**\n * @return Returns the name.\n */\n public String getName()\n public String getName ()\n {\n return mName;\n }\n }\n\n public static void main(String[] args)\n {\n final Project dummy = new Project();\n DefaultLogger logger = new DefaultLogger();\n\n logger.setMessageOutputLevel(Project.MSG_INFO);\n logger.setOutputPrintStream(System.out);\n logger.setErrorPrintStream(System.err);\n dummy.addBuildListener(logger);\n \n final LuntBuildTask lbr = new LuntBuildTask();\n lbr.setProject(dummy);\n lbr.setProjectName(\"test\");\n lbr.setScheduleName(\"on-demand\");\n lbr.setLuntUrl(\"http://dev130wks0007:8080/luntbuild/app.do?service=hessian\");\n lbr.setUserName(\"luntbuild\");\n lbr.setPassword(\"geheim42\");\n\n final Artifact a1 = new Artifact();\n a1.setName(\"artifact1.txt\");\n lbr.addArtifact(a1);\n final Artifact a2 = new Artifact();\n a2.setName(\"artifact2.txt\");\n lbr.addArtifact(a2);\n\n lbr.setToDir(\"C:\\\\temp\");\n lbr.execute();\n }\n }"}}},{"rowIdx":2095,"cells":{"lang":{"kind":"string","value":"JavaScript"},"license":{"kind":"string","value":"mit"},"stderr":{"kind":"string","value":""},"commit":{"kind":"string","value":"59f49a94651d9884aac8f26b70bd6edd72e4e26b"},"returncode":{"kind":"number","value":0,"string":"0"},"repos":{"kind":"string","value":"Revionics/SlickGrid,Revionics/SlickGrid"},"new_contents":{"kind":"string","value":"/**\n * @license\n * (c) 2009-2013 Michael Leibman\n * michael{dot}leibman{at}gmail{dot}com\n * http://github.com/mleibman/slickgrid\n *\n * Distributed under MIT license.\n * All rights reserved.\n *\n * SlickGrid v2.2\n *\n * NOTES:\n * Cell/row DOM manipulations are done directly bypassing jQuery's DOM manipulation methods.\n * This increases the speed dramatically, but can only be done safely because there are no event handlers\n * or data associated with any cell/row DOM nodes. Cell editors must make sure they implement .destroy()\n * and do proper cleanup.\n *\n */\n\n// make sure required JavaScript modules are loaded\nif (typeof jQuery === \"undefined\") {\n throw \"SlickGrid requires jquery module to be loaded\";\n}\nif (!jQuery.fn.drag) {\n throw \"SlickGrid requires jquery.event.drag module to be loaded\";\n}\nif (typeof Slick === \"undefined\") {\n throw \"slick.core.js not loaded\";\n}\n\n(function ($) {\n // Slick.Grid\n $.extend(true, window, {\n Slick: {\n Grid: SlickGrid\n }\n });\n\n // shared across all grids on the page\n var scrollbarDimensions;\n var maxSupportedCssHeight; // browser's breaking point\n\n // ////////////////////////////////////////////////////////////////////////////////////////////\n // SlickGrid class implementation (available as Slick.Grid)\n\n /**\n * Creates a new instance of the grid.\n * @class SlickGrid\n * @constructor\n * @param {Node} container Container node to create the grid in.\n * @param {Array,Object} data An array of objects for databinding.\n * @param {Array} columns An array of column definitions.\n * @param {Object} options Grid options.\n **/\n function SlickGrid(container, data, columns, options) {\n // settings\n var defaults = {\n explicitInitialization: false,\n rowHeight: 25,\n defaultColumnWidth: 80,\n enableAddRow: false,\n leaveSpaceForNewRows: false,\n editable: false,\n autoEdit: true,\n enableCellNavigation: true,\n enableColumnReorder: true,\n asyncEditorLoading: false,\n asyncEditorLoadDelay: 100,\n forceFitColumns: false,\n enableAsyncPostRender: false,\n asyncPostRenderDelay: 50,\n autoHeight: false,\n editorLock: Slick.GlobalEditorLock,\n showHeaderRow: false,\n headerRowHeight: 25,\n showTopPanel: false,\n topPanelHeight: 25,\n formatterFactory: null,\n editorFactory: null,\n cellFlashingCssClass: \"flashing\",\n selectedCellCssClass: \"selected\",\n multiSelect: true,\n enableTextSelectionOnCells: false,\n dataItemColumnValueExtractor: null,\n frozenBottom: false,\n frozenColumn: -1,\n frozenRow: -1,\n fullWidthRows: false,\n multiColumnSort: false,\n defaultFormatter: defaultFormatter,\n forceSyncScrolling: false\n };\n\n var columnDefaults = {\n name: \"\",\n resizable: true,\n sortable: false,\n minWidth: 30,\n rerenderOnResize: false,\n headerCssClass: null,\n defaultSortAsc: true,\n focusable: true,\n selectable: true\n };\n\n // scroller\n var th; // virtual height\n var h; // real scrollable height\n var ph; // page height\n var n; // number of pages\n var cj; // \"jumpiness\" coefficient\n\n var page = 0; // current page\n var offset = 0; // current page offset\n var vScrollDir = 1;\n\n // private\n var initialized = false;\n var $container;\n var uid = \"slickgrid_\" + Math.round(1000000 * Math.random());\n var self = this;\n var $focusSink, $focusSink2;\n var $headerScroller;\n var $headers;\n var $headerRow, $headerRowScroller, $headerRowSpacerL, $headerRowSpacerR;\n var $topPanelScroller;\n var $topPanel;\n var $viewport;\n var $canvas;\n var $style;\n var $boundAncestors;\n var stylesheet, columnCssRulesL, columnCssRulesR;\n var viewportH, viewportW;\n var canvasWidth, canvasWidthL, canvasWidthR;\n var headersWidth, headersWidthL, headersWidthR;\n var viewportHasHScroll, viewportHasVScroll;\n var headerColumnWidthDiff = 0,\n headerColumnHeightDiff = 0,\n // border+padding\n cellWidthDiff = 0,\n cellHeightDiff = 0;\n var absoluteColumnMinWidth;\n var hasFrozenRows = false;\n var frozenRowsHeight = 0;\n var actualFrozenRow = -1;\n var paneTopH = 0;\n var paneBottomH = 0;\n var viewportTopH = 0;\n var viewportBottomH = 0;\n var topPanelH = 0;\n var headerRowH = 0;\n\n var tabbingDirection = 1;\n var $activeCanvasNode;\n var $activeViewportNode;\n var activePosX;\n var activeRow, activeCell;\n var activeCellNode = null;\n var currentEditor = null;\n var serializedEditorValue;\n var editController;\n\n var rowsCache = {};\n var renderedRows = 0;\n var numVisibleRows = 0;\n var prevScrollTop = 0;\n var scrollTop = 0;\n var lastRenderedScrollTop = 0;\n var lastRenderedScrollLeft = 0;\n var prevScrollLeft = 0;\n var scrollLeft = 0;\n\n var selectionModel;\n var selectedRows = [];\n\n var plugins = [];\n var cellCssClasses = {};\n\n var columnsById = {};\n var sortColumns = [];\n var columnPosLeft = [];\n var columnPosRight = [];\n\n // async call handles\n var h_editorLoader = null;\n var h_render = null;\n var h_postrender = null;\n var postProcessedRows = {};\n var postProcessToRow = null;\n var postProcessFromRow = null;\n\n // perf counters\n var counter_rows_rendered = 0;\n var counter_rows_removed = 0;\n\n var $paneHeaderL;\n var $paneHeaderR;\n var $paneTopL;\n var $paneTopR;\n var $paneBottomL;\n var $paneBottomR;\n\n var $headerScrollerL;\n var $headerScrollerR;\n\n var $headerL;\n var $headerR;\n\n var $headerRowScrollerL;\n var $headerRowScrollerR;\n\n var $headerRowL;\n var $headerRowR;\n\n var $topPanelScrollerL;\n var $topPanelScrollerR;\n\n var $topPanelL;\n var $topPanelR;\n\n var $viewportTopL;\n var $viewportTopR;\n var $viewportBottomL;\n var $viewportBottomR;\n\n var $canvasTopL;\n var $canvasTopR;\n var $canvasBottomL;\n var $canvasBottomR;\n\n var $viewportScrollContainerX;\n var $viewportScrollContainerY;\n var $headerScrollContainer;\n var $headerRowScrollContainer;\n\n // ////////////////////////////////////////////////////////////////////////////////////////////\n // Initialization\n\n function init() {\n $container = $(container);\n if ($container.length < 1) {\n throw new Error(\"SlickGrid requires a valid container, \" + container + \" does not exist in the DOM.\");\n }\n\n // calculate these only once and share between grid instances\n maxSupportedCssHeight = maxSupportedCssHeight || getMaxSupportedCssHeight();\n scrollbarDimensions = scrollbarDimensions || measureScrollbar();\n\n options = $.extend({}, defaults, options);\n validateAndEnforceOptions();\n columnDefaults.width = options.defaultColumnWidth;\n\n columnsById = {};\n for (var i = 0; i < columns.length; i++) {\n var m = columns[i] = $.extend({}, columnDefaults, columns[i]);\n columnsById[m.id] = i;\n if (m.minWidth && m.width < m.minWidth) {\n m.width = m.minWidth;\n }\n if (m.maxWidth && m.width > m.maxWidth) {\n m.width = m.maxWidth;\n }\n }\n\n // validate loaded JavaScript modules against requested options\n if (options.enableColumnReorder && !$.fn.sortable) {\n throw new Error(\"SlickGrid's 'enableColumnReorder = true' option requires jquery-ui.sortable module to be loaded\");\n }\n\n editController = {\n \"commitCurrentEdit\": commitCurrentEdit,\n \"cancelCurrentEdit\": cancelCurrentEdit\n };\n\n $container\n .empty()\n .css(\"overflow\", \"hidden\")\n .css(\"outline\", 0)\n .addClass(uid)\n .addClass(\"ui-widget\");\n\n // set up a positioning container if needed\n if (!/relative|absolute|fixed/.test($container.css(\"position\"))) {\n $container.css(\"position\", \"relative\");\n }\n\n $focusSink = $(\"
      \").appendTo($container);\n\n // Containers used for scrolling frozen columns and rows\n $paneHeaderL = $(\"
      \").appendTo($container);\n $paneHeaderR = $(\"
      \").appendTo($container);\n $paneTopL = $(\"
      \").appendTo($container);\n $paneTopR = $(\"
      \").appendTo($container);\n $paneBottomL = $(\"
      \").appendTo($container);\n $paneBottomR = $(\"
      \").appendTo($container);\n\n // Append the header scroller containers\n $headerScrollerL = $(\"
      \").appendTo($paneHeaderL);\n $headerScrollerR = $(\"
      \").appendTo($paneHeaderR);\n\n // Cache the header scroller containers\n $headerScroller = $().add($headerScrollerL).add($headerScrollerR);\n\n // Append the columnn containers to the headers\n $headerL = $(\"
      \").appendTo($headerScrollerL);\n $headerR = $(\"
      \").appendTo($headerScrollerR);\n\n // Cache the header columns\n $headers = $().add($headerL).add($headerR);\n\n $headerRowScrollerL = $(\"
      \").appendTo($paneTopL);\n $headerRowScrollerR = $(\"
      \").appendTo($paneTopR);\n\n $headerRowScroller = $().add($headerRowScrollerL).add($headerRowScrollerR);\n\n $headerRowSpacerL = $(\"
      \")\n .css(\"width\", getCanvasWidth() + scrollbarDimensions.width + \"px\")\n .appendTo($headerRowScrollerL);\n $headerRowSpacerR = $(\"
      \")\n .css(\"width\", getCanvasWidth() + scrollbarDimensions.width + \"px\")\n .appendTo($headerRowScrollerR);\n\n\n $headerRowL = $(\"
      \").appendTo($headerRowScrollerL);\n $headerRowR = $(\"
      \").appendTo($headerRowScrollerR);\n\n $headerRow = $().add($headerRowL).add($headerRowR);\n\n // Append the top panel scroller\n $topPanelScrollerL = $(\"
      \").appendTo($paneTopL);\n $topPanelScrollerR = $(\"
      \").appendTo($paneTopR);\n\n $topPanelScroller = $().add($topPanelScrollerL).add($topPanelScrollerR);\n\n // Append the top panel\n $topPanelL = $(\"
      \").appendTo($topPanelScrollerL);\n $topPanelR = $(\"
      \").appendTo($topPanelScrollerR);\n\n $topPanel = $().add($topPanelL).add($topPanelR);\n\n if (!options.showTopPanel) {\n $topPanelScroller.hide();\n }\n\n if (!options.showHeaderRow) {\n $headerRowScroller.hide();\n }\n\n // Append the viewport containers\n $viewportTopL = $(\"
      \").appendTo($paneTopL);\n $viewportTopR = $(\"
      \").appendTo($paneTopR);\n $viewportBottomL = $(\"
      \").appendTo($paneBottomL);\n $viewportBottomR = $(\"
      \").appendTo($paneBottomR);\n\n // Cache the viewports\n $viewport = $().add($viewportTopL).add($viewportTopR).add($viewportBottomL).add($viewportBottomR);\n\n // Default the active viewport to the top left\n $activeViewportNode = $viewportTopL;\n\n // Append the canvas containers\n $canvasTopL = $(\"
      \").appendTo($viewportTopL);\n $canvasTopR = $(\"
      \").appendTo($viewportTopR);\n $canvasBottomL = $(\"
      \").appendTo($viewportBottomL);\n $canvasBottomR = $(\"
      \").appendTo($viewportBottomR);\n\n // Cache the canvases\n $canvas = $().add($canvasTopL).add($canvasTopR).add($canvasBottomL).add($canvasBottomR);\n\n // Default the active canvas to the top left\n $activeCanvasNode = $canvasTopL;\n\n $focusSink2 = $focusSink.clone().appendTo($container);\n\n if (!options.explicitInitialization) {\n finishInitialization();\n }\n }\n\n function finishInitialization() {\n if (!initialized) {\n initialized = true;\n\n getViewportWidth();\n getViewportHeight();\n\n // header columns and cells may have different padding/border\n // skewing width calculations (box-sizing, hello?)\n // calculate the diff so we can set consistent sizes\n measureCellPaddingAndBorder();\n\n // for usability reasons, all text selection in SlickGrid is\n // disabled with the exception of input and textarea elements (selection\n // must be enabled there so that editors work as expected); note that\n // selection in grid cells (grid body) is already unavailable in\n // all browsers except IE\n disableSelection($headers); // disable all text selection in header (including input and textarea)\n\n if (!options.enableTextSelectionOnCells) {\n // disable text selection in grid cells except in input and textarea elements\n // (this is IE-specific, because selectstart event will only fire in IE)\n $viewport.bind(\"selectstart.ui\", function (event) {\n return $(event.target).is(\"input,textarea\");\n });\n }\n\n setFrozenOptions();\n setPaneVisibility();\n setScroller();\n setOverflow();\n\n updateColumnCaches();\n createColumnHeaders();\n setupColumnSort();\n createCssRules();\n resizeCanvas();\n bindAncestorScrollEvents();\n\n $container\n .bind(\"resize.slickgrid\", resizeCanvas);\n $viewport\n //.bind(\"click\", handleClick)\n .bind(\"scroll\", handleScroll);\n if (jQuery.fn.mousewheel && ( options.frozenColumn > -1 || hasFrozenRows )) {\n $viewport\n .bind(\"mousewheel\", handleMouseWheel);\n }\n $headerScroller\n .bind(\"contextmenu\", handleHeaderContextMenu)\n .bind(\"click\", handleHeaderClick)\n .delegate(\".slick-header-column\", \"mouseenter\", handleHeaderMouseEnter)\n .delegate(\".slick-header-column\", \"mouseleave\", handleHeaderMouseLeave);\n $headerRowScroller\n .bind(\"scroll\", handleHeaderRowScroll);\n $focusSink.add($focusSink2)\n .bind(\"keydown\", handleKeyDown);\n $canvas\n .bind(\"keydown\", handleKeyDown)\n .bind(\"click\", handleClick)\n .bind(\"dblclick\", handleDblClick)\n .bind(\"contextmenu\", handleContextMenu)\n .bind(\"draginit\", handleDragInit)\n .bind(\"dragstart\", {distance: 3}, handleDragStart)\n .bind(\"drag\", handleDrag)\n .bind(\"dragend\", handleDragEnd)\n .delegate(\".slick-cell\", \"mouseenter\", handleMouseEnter)\n .delegate(\".slick-cell\", \"mouseleave\", handleMouseLeave);\n }\n }\n\n function registerPlugin(plugin) {\n plugins.unshift(plugin);\n plugin.init(self);\n }\n\n function unregisterPlugin(plugin) {\n for (var i = plugins.length; i >= 0; i--) {\n if (plugins[i] === plugin) {\n if (plugins[i].destroy) {\n plugins[i].destroy();\n }\n plugins.splice(i, 1);\n break;\n }\n }\n }\n\n function setSelectionModel(model) {\n if (selectionModel) {\n selectionModel.onSelectedRangesChanged.unsubscribe(handleSelectedRangesChanged);\n if (selectionModel.destroy) {\n selectionModel.destroy();\n }\n }\n\n selectionModel = model;\n if (selectionModel) {\n selectionModel.init(self);\n selectionModel.onSelectedRangesChanged.subscribe(handleSelectedRangesChanged);\n }\n }\n\n function getSelectionModel() {\n return selectionModel;\n }\n\n function getCanvasNode() {\n return $canvas[0];\n }\n\n function getActiveCanvasNode(element) {\n setActiveCanvasNode(element);\n\n return $activeCanvasNode[0];\n }\n\n function getCanvases() {\n return $canvas;\n }\n\n function setActiveCanvasNode(element) {\n if (element) {\n $activeCanvasNode = $(element.target).closest('.grid-canvas');\n }\n }\n\n function getViewportNode() {\n return $viewport[0];\n }\n\n function getActiveViewportNode(element) {\n setActiveViewPortNode(element);\n\n return $activeViewportNode[0];\n }\n\n function setActiveViewportNode(element) {\n if (element) {\n $activeViewportNode = $(element.target).closest('.slick-viewport');\n }\n }\n\n function measureScrollbar() {\n var $c = $(\"
      \").appendTo(\"body\");\n var dim = {\n width: $c.width() - $c[0].clientWidth,\n height: $c.height() - $c[0].clientHeight\n };\n $c.remove();\n return dim;\n }\n\n function getHeadersWidth() {\n headersWidth = headersWidthL = headersWidthR = 0;\n\n for (var i = 0, ii = columns.length; i < ii; i++) {\n var width = columns[ i ].width;\n\n if (( options.frozenColumn ) > -1 && ( i > options.frozenColumn )) {\n headersWidthR += width;\n } else {\n headersWidthL += width;\n }\n }\n\n if (options.frozenColumn > -1) {\n headersWidthL = headersWidthL + 1000;\n\n headersWidthR = Math.max(headersWidthR, viewportW) + headersWidthL;\n headersWidthR += scrollbarDimensions.width;\n } else {\n headersWidthL += scrollbarDimensions.width;\n headersWidthL = Math.max(headersWidthL, viewportW) + 1000;\n }\n\n headersWidth = headersWidthL + headersWidthR;\n }\n\n function getCanvasWidth() {\n var availableWidth = viewportHasVScroll ? viewportW - scrollbarDimensions.width : viewportW;\n\n var i = columns.length;\n\n canvasWidthL = canvasWidthR = 0;\n\n while (i--) {\n if ((options.frozenColumn > -1) && (i > options.frozenColumn)) {\n canvasWidthR += columns[i].width;\n } else {\n canvasWidthL += columns[i].width;\n }\n }\n\n var totalRowWidth = canvasWidthL + canvasWidthR;\n\n return options.fullWidthRows ? Math.max(totalRowWidth, availableWidth) : totalRowWidth;\n }\n\n function updateCanvasWidth(forceColumnWidthsUpdate) {\n var oldCanvasWidth = canvasWidth;\n var oldCanvasWidthL = canvasWidthL;\n var oldCanvasWidthR = canvasWidthR;\n var widthChanged;\n canvasWidth = getCanvasWidth();\n\n widthChanged = canvasWidth !== oldCanvasWidth || canvasWidthL !== oldCanvasWidthL || canvasWidthR !== oldCanvasWidthR;\n\n if (widthChanged || options.frozenColumn > -1 || hasFrozenRows) {\n $canvasTopL.width(canvasWidthL);\n\n getHeadersWidth();\n\n $headerL.width(headersWidthL);\n $headerR.width(headersWidthR);\n\n if (options.frozenColumn > -1) {\n $canvasTopR.width(canvasWidthR);\n\n $paneHeaderL.width(canvasWidthL);\n $paneHeaderR.css('left', canvasWidthL);\n $paneHeaderR.css('width', viewportW - canvasWidthL);\n\n $paneTopL.width(canvasWidthL);\n $paneTopR.css('left', canvasWidthL);\n $paneTopR.css('width', viewportW - canvasWidthL);\n\n $headerRowScrollerL.width(canvasWidthL);\n $headerRowScrollerR.width(viewportW - canvasWidthL);\n\n $headerRowL.width(canvasWidthL);\n $headerRowR.width(canvasWidthR);\n\n $viewportTopL.width(canvasWidthL);\n $viewportTopR.width(viewportW - canvasWidthL);\n\n if (hasFrozenRows) {\n $paneBottomL.width(canvasWidthL);\n $paneBottomR.css('left', canvasWidthL);\n\n $viewportBottomL.width(canvasWidthL);\n $viewportBottomR.width(viewportW - canvasWidthL);\n\n $canvasBottomL.width(canvasWidthL);\n $canvasBottomR.width(canvasWidthR);\n }\n } else {\n $paneHeaderL.width('100%');\n\n $paneTopL.width('100%');\n\n $headerRowScrollerL.width('100%');\n\n $headerRowL.width(canvasWidth);\n\n $viewportTopL.width('100%');\n\n if (hasFrozenRows) {\n $viewportBottomL.width('100%');\n $canvasBottomL.width(canvasWidthL);\n }\n }\n\n viewportHasHScroll = (canvasWidth > viewportW - scrollbarDimensions.width);\n }\n\n $headerRowSpacerL.width(canvasWidth + (viewportHasVScroll ? scrollbarDimensions.width : 0));\n $headerRowSpacerR.width(canvasWidth + (viewportHasVScroll ? scrollbarDimensions.width : 0));\n\n if (widthChanged || forceColumnWidthsUpdate) {\n applyColumnWidths();\n }\n }\n\n function disableSelection($target) {\n if ($target && $target.jquery) {\n $target.attr(\"unselectable\", \"on\").css(\"MozUserSelect\", \"none\").bind(\"selectstart.ui\", function () {\n return false;\n }); // from jquery:ui.core.js 1.7.2\n }\n }\n\n function getMaxSupportedCssHeight() {\n var supportedHeight = 1000000;\n // FF reports the height back but still renders blank after ~6M px\n var testUpTo = navigator.userAgent.toLowerCase().match(/firefox/) ? 6000000 : 1000000000;\n var div = $(\"
      \").appendTo(document.body);\n\n while (true) {\n var test = supportedHeight * 2;\n div.css(\"height\", test);\n if (test > testUpTo || div.height() !== test) {\n break;\n } else {\n supportedHeight = test;\n }\n }\n\n div.remove();\n return supportedHeight;\n }\n\n // TODO: this is static. need to handle page mutation.\n function bindAncestorScrollEvents() {\n var elem = (hasFrozenRows && !options.frozenBottom) ? $canvasBottomL[0] : $canvasTopL[0];\n while ((elem = elem.parentNode) != document.body && elem != null) {\n // bind to scroll containers only\n if (elem == $viewportTopL[0] || elem.scrollWidth != elem.clientWidth || elem.scrollHeight != elem.clientHeight) {\n var $elem = $(elem);\n if (!$boundAncestors) {\n $boundAncestors = $elem;\n } else {\n $boundAncestors = $boundAncestors.add($elem);\n }\n $elem.bind(\"scroll.\" + uid, handleActiveCellPositionChange);\n }\n }\n }\n\n function unbindAncestorScrollEvents() {\n if (!$boundAncestors) {\n return;\n }\n $boundAncestors.unbind(\"scroll.\" + uid);\n $boundAncestors = null;\n }\n\n function updateColumnHeader(columnId, title, toolTip) {\n if (!initialized) {\n return;\n }\n var idx = getColumnIndex(columnId);\n if (idx == null) {\n return;\n }\n\n var columnDef = columns[idx];\n var $header = $headers.children().eq(idx);\n if ($header) {\n if (title !== undefined) {\n columns[idx].name = title;\n }\n if (toolTip !== undefined) {\n columns[idx].toolTip = toolTip;\n }\n\n trigger(self.onBeforeHeaderCellDestroy, {\n \"node\": $header[0],\n \"column\": columnDef\n });\n\n $header.attr(\"title\", toolTip || \"\").children().eq(0).html(title);\n\n trigger(self.onHeaderCellRendered, {\n \"node\": $header[0],\n \"column\": columnDef\n });\n }\n }\n\n function getHeaderRow() {\n return (options.frozenColumn > -1) ? $headerRow : $headerRow[0];\n }\n\n function getHeaderRowColumn(columnId) {\n var idx = getColumnIndex(columnId);\n\n var $headerRowTarget;\n\n if (options.frozenColumn > -1) {\n if (idx <= options.frozenColumn) {\n $headerRowTarget = $headerRowL;\n } else {\n $headerRowTarget = $headerRowR;\n\n idx -= options.frozenColumn + 1;\n }\n } else {\n $headerRowTarget = $headerRowL;\n }\n\n var $header = $headerRowTarget.children().eq(idx);\n return $header && $header[0];\n }\n\n function createColumnHeaders() {\n function onMouseEnter() {\n $(this).addClass(\"ui-state-hover\");\n }\n\n function onMouseLeave() {\n $(this).removeClass(\"ui-state-hover\");\n }\n\n $headers.find(\".slick-header-column\")\n .each(function () {\n var columnDef = $(this).data(\"column\");\n if (columnDef) {\n trigger(self.onBeforeHeaderCellDestroy, {\n \"node\": this,\n \"column\": columnDef\n });\n }\n });\n\n $headerL.empty();\n $headerR.empty();\n\n getHeadersWidth();\n\n $headerL.width(headersWidthL);\n $headerR.width(headersWidthR);\n\n $headerRow.find(\".slick-headerrow-column\")\n .each(function () {\n var columnDef = $(this).data(\"column\");\n if (columnDef) {\n trigger(self.onBeforeHeaderRowCellDestroy, {\n \"node\": this,\n \"column\": columnDef\n });\n }\n });\n\n $headerRowL.empty();\n $headerRowR.empty();\n\n for (var i = 0; i < columns.length; i++) {\n var m = columns[i];\n\n var $headerTarget = (options.frozenColumn > -1) ? ((i <= options.frozenColumn) ? $headerL : $headerR) : $headerL;\n var $headerRowTarget = (options.frozenColumn > -1) ? ((i <= options.frozenColumn) ? $headerRowL : $headerRowR) : $headerRowL;\n\n var header = $(\"
      \")\n .html(\"\" + m.name + \"\")\n .width(m.width - headerColumnWidthDiff)\n .attr(\"id\", \"\" + uid + m.id)\n .attr(\"title\", m.toolTip || \"\")\n .data(\"column\", m)\n .addClass(m.headerCssClass || \"\")\n .appendTo($headerTarget);\n\n if (options.enableColumnReorder || m.sortable) {\n header\n .on('mouseenter', onMouseEnter)\n .on('mouseleave', onMouseLeave);\n }\n\n if (m.sortable) {\n header.addClass(\"slick-header-sortable\");\n header.append(\"\");\n }\n\n trigger(self.onHeaderCellRendered, {\n \"node\": header[0],\n \"column\": m\n });\n\n if (options.showHeaderRow) {\n var headerRowCell = $(\"
      \")\n .data(\"column\", m)\n .appendTo($headerRowTarget);\n\n trigger(self.onHeaderRowCellRendered, {\n \"node\": headerRowCell[0],\n \"column\": m\n });\n }\n }\n\n setSortColumns(sortColumns);\n setupColumnResize();\n if (options.enableColumnReorder) {\n setupColumnReorder();\n }\n }\n\n function setupColumnSort() {\n $headers.click(function (e) {\n // temporary workaround for a bug in jQuery 1.7.1\n // (http://bugs.jquery.com/ticket/11328)\n e.metaKey = e.metaKey || e.ctrlKey;\n\n if ($(e.target).hasClass(\"slick-resizable-handle\")) {\n return;\n }\n\n var $col = $(e.target).closest(\".slick-header-column\");\n if (!$col.length) {\n return;\n }\n\n var column = $col.data(\"column\");\n if (column.sortable) {\n if (!getEditorLock().commitCurrentEdit()) {\n return;\n }\n\n var sortOpts = null;\n var i = 0;\n for (; i < sortColumns.length; i++) {\n if (sortColumns[i].columnId == column.id) {\n sortOpts = sortColumns[i];\n sortOpts.sortAsc = !sortOpts.sortAsc;\n break;\n }\n }\n\n if (e.metaKey && options.multiColumnSort) {\n if (sortOpts) {\n sortColumns.splice(i, 1);\n }\n } else {\n if ((!e.shiftKey && !e.metaKey) || !options.multiColumnSort) {\n sortColumns = [];\n }\n\n if (!sortOpts) {\n sortOpts = {\n columnId: column.id,\n sortAsc: true\n };\n sortColumns.push(sortOpts);\n } else if (sortColumns.length == 0) {\n sortColumns.push(sortOpts);\n }\n }\n\n setSortColumns(sortColumns);\n\n if (!options.multiColumnSort) {\n trigger(self.onSort, {\n multiColumnSort: false,\n sortCol: column,\n sortAsc: sortOpts.sortAsc\n }, e);\n } else {\n trigger(\n self.onSort, {\n multiColumnSort: true,\n sortCols: $.map(\n sortColumns, function (col) {\n return {\n sortCol: columns[getColumnIndex(col.columnId)],\n sortAsc: col.sortAsc\n };\n })\n }, e);\n }\n }\n });\n }\n\n function setupColumnReorder() {\n $headers.filter(\":ui-sortable\").sortable(\"destroy\");\n var columnScrollTimer = null;\n\n function scrollColumnsRight() {\n $viewportScrollContainerX[0].scrollLeft = $viewportScrollContainerX[0].scrollLeft + 10;\n }\n\n function scrollColumnsLeft() {\n $viewportScrollContainerX[0].scrollLeft = $viewportScrollContainerX[0].scrollLeft - 10;\n }\n\n $headers.sortable({\n containment: \"parent\",\n distance: 3,\n axis: \"x\",\n cursor: \"default\",\n tolerance: \"intersection\",\n helper: \"clone\",\n placeholder: \"slick-sortable-placeholder ui-state-default slick-header-column\",\n start: function (e, ui) {\n ui.placeholder.width(ui.helper.outerWidth() - headerColumnWidthDiff);\n $(ui.helper).addClass(\"slick-header-column-active\");\n },\n beforeStop: function (e, ui) {\n $(ui.helper).removeClass(\"slick-header-column-active\");\n },\n sort: function (e, ui) {\n if (e.originalEvent.pageX > $container[0].clientWidth) {\n if (!(columnScrollTimer)) {\n columnScrollTimer = setInterval(\n scrollColumnsRight, 100);\n }\n } else if (e.originalEvent.pageX < $viewportScrollContainerX.offset().left) {\n if (!(columnScrollTimer)) {\n columnScrollTimer = setInterval(\n scrollColumnsLeft, 100);\n }\n } else {\n clearInterval(columnScrollTimer);\n columnScrollTimer = null;\n }\n },\n stop: function (e) {\n clearInterval(columnScrollTimer);\n columnScrollTimer = null;\n\n if (!getEditorLock().commitCurrentEdit()) {\n $(this).sortable(\"cancel\");\n return;\n }\n\n var reorderedIds = $headerL.sortable(\"toArray\");\n reorderedIds = reorderedIds.concat($headerR.sortable(\"toArray\"));\n\n var reorderedColumns = [];\n for (var i = 0; i < reorderedIds.length; i++) {\n reorderedColumns.push(columns[getColumnIndex(reorderedIds[i].replace(uid, \"\"))]);\n }\n setColumns(reorderedColumns);\n\n trigger(self.onColumnsReordered, {});\n e.stopPropagation();\n setupColumnResize();\n }\n });\n }\n\n function setupColumnResize() {\n var $col, j, c, pageX, columnElements, minPageX, maxPageX, firstResizable, lastResizable;\n columnElements = $headers.children();\n columnElements.find(\".slick-resizable-handle\").remove();\n columnElements.each(function (i, e) {\n if (columns[i].resizable) {\n if (firstResizable === undefined) {\n firstResizable = i;\n }\n lastResizable = i;\n }\n });\n if (firstResizable === undefined) {\n return;\n }\n columnElements.each(function (i, e) {\n if (i < firstResizable || (options.forceFitColumns && i >= lastResizable)) {\n return;\n }\n $col = $(e);\n $(\"
      \")\n .appendTo(e)\n .bind(\"dragstart\",function (e, dd) {\n if (!getEditorLock().commitCurrentEdit()) {\n return false;\n }\n pageX = e.pageX;\n $(this).parent().addClass(\"slick-header-column-active\");\n var shrinkLeewayOnRight = null,\n stretchLeewayOnRight = null;\n // lock each column's width option to current width\n columnElements.each(function (i, e) {\n columns[i].previousWidth = $(e).outerWidth();\n });\n if (options.forceFitColumns) {\n shrinkLeewayOnRight = 0;\n stretchLeewayOnRight = 0;\n // colums on right affect maxPageX/minPageX\n for (j = i + 1; j < columnElements.length; j++) {\n c = columns[j];\n if (c.resizable) {\n if (stretchLeewayOnRight !== null) {\n if (c.maxWidth) {\n stretchLeewayOnRight += c.maxWidth - c.previousWidth;\n } else {\n stretchLeewayOnRight = null;\n }\n }\n shrinkLeewayOnRight += c.previousWidth - Math.max(c.minWidth || 0, absoluteColumnMinWidth);\n }\n }\n }\n var shrinkLeewayOnLeft = 0,\n stretchLeewayOnLeft = 0;\n for (j = 0; j <= i; j++) {\n // columns on left only affect minPageX\n c = columns[j];\n if (c.resizable) {\n if (stretchLeewayOnLeft !== null) {\n if (c.maxWidth) {\n stretchLeewayOnLeft += c.maxWidth - c.previousWidth;\n } else {\n stretchLeewayOnLeft = null;\n }\n }\n shrinkLeewayOnLeft += c.previousWidth - Math.max(c.minWidth || 0, absoluteColumnMinWidth);\n }\n }\n if (shrinkLeewayOnRight === null) {\n shrinkLeewayOnRight = 100000;\n }\n if (shrinkLeewayOnLeft === null) {\n shrinkLeewayOnLeft = 100000;\n }\n if (stretchLeewayOnRight === null) {\n stretchLeewayOnRight = 100000;\n }\n if (stretchLeewayOnLeft === null) {\n stretchLeewayOnLeft = 100000;\n }\n maxPageX = pageX + Math.min(shrinkLeewayOnRight, stretchLeewayOnLeft);\n minPageX = pageX - Math.min(shrinkLeewayOnLeft, stretchLeewayOnRight);\n }).bind(\"drag\",function (e, dd) {\n var actualMinWidth, d = Math.min(maxPageX, Math.max(minPageX, e.pageX)) - pageX,\n x;\n\n if (d < 0) { // shrink column\n x = d;\n\n var newCanvasWidthL = 0, newCanvasWidthR = 0;\n\n for (j = i; j >= 0; j--) {\n c = columns[j];\n if (c.resizable) {\n actualMinWidth = Math.max(c.minWidth || 0, absoluteColumnMinWidth);\n if (x && c.previousWidth + x < actualMinWidth) {\n x += c.previousWidth - actualMinWidth;\n c.width = actualMinWidth;\n } else {\n c.width = c.previousWidth + x;\n x = 0;\n }\n }\n }\n\n for (k = 0; k <= i; k++) {\n c = columns[k];\n\n if ((options.frozenColumn > -1) && (k > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n\n if (options.forceFitColumns) {\n x = -d;\n\n for (j = i + 1; j < columnElements.length; j++) {\n c = columns[j];\n if (c.resizable) {\n if (x && c.maxWidth && (c.maxWidth - c.previousWidth < x)) {\n x -= c.maxWidth - c.previousWidth;\n c.width = c.maxWidth;\n } else {\n c.width = c.previousWidth + x;\n x = 0;\n }\n\n if ((options.frozenColumn > -1) && (j > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n }\n } else {\n for (j = i + 1; j < columnElements.length; j++) {\n c = columns[j];\n\n if ((options.frozenColumn > -1) && (j > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n }\n } else { // stretch column\n x = d;\n\n var newCanvasWidthL = 0, newCanvasWidthR = 0;\n\n for (j = i; j >= 0; j--) {\n c = columns[j];\n if (c.resizable) {\n if (x && c.maxWidth && (c.maxWidth - c.previousWidth < x)) {\n x -= c.maxWidth - c.previousWidth;\n c.width = c.maxWidth;\n } else {\n c.width = c.previousWidth + x;\n x = 0;\n }\n }\n }\n\n for (k = 0; k <= i; k++) {\n c = columns[k];\n\n if ((options.frozenColumn > -1) && (k > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n\n if (options.forceFitColumns) {\n x = -d;\n\n for (j = i + 1; j < columnElements.length; j++) {\n c = columns[j];\n if (c.resizable) {\n actualMinWidth = Math.max(c.minWidth || 0, absoluteColumnMinWidth);\n if (x && c.previousWidth + x < actualMinWidth) {\n x += c.previousWidth - actualMinWidth;\n c.width = actualMinWidth;\n } else {\n c.width = c.previousWidth + x;\n x = 0;\n }\n\n if ((options.frozenColumn > -1) && (j > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n }\n } else {\n for (j = i + 1; j < columnElements.length; j++) {\n c = columns[j];\n\n if ((options.frozenColumn > -1) && (j > options.frozenColumn)) {\n newCanvasWidthR += c.width;\n } else {\n newCanvasWidthL += c.width;\n }\n }\n }\n }\n\n if (options.frozenColumn > -1 && newCanvasWidthL != canvasWidthL) {\n $headerL.width(newCanvasWidthL + 1000);\n $paneHeaderR.css('left', newCanvasWidthL);\n }\n\n applyColumnHeaderWidths();\n if (options.syncColumnCellResize) {\n updateCanvasWidth();\n applyColumnWidths();\n }\n }).bind(\"dragend\", function (e, dd) {\n var newWidth;\n $(this).parent().removeClass(\"slick-header-column-active\");\n for (j = 0; j < columnElements.length; j++) {\n c = columns[j];\n newWidth = $(columnElements[j]).outerWidth();\n\n if (c.previousWidth !== newWidth && c.rerenderOnResize) {\n invalidateAllRows();\n }\n }\n\n updateCanvasWidth(true);\n render();\n trigger(self.onColumnsResized, {});\n });\n });\n }\n\n function getVBoxDelta($el) {\n var p = [\"borderTopWidth\", \"borderBottomWidth\", \"paddingTop\", \"paddingBottom\"];\n var delta = 0;\n $.each(p, function (n, val) {\n delta += parseFloat($el.css(val)) || 0;\n });\n return delta;\n }\n\n function setFrozenOptions() {\n options.frozenColumn = ( options.frozenColumn >= 0\n && options.frozenColumn < columns.length\n )\n ? parseInt(options.frozenColumn)\n : -1;\n\n options.frozenRow = ( options.frozenRow >= 0\n && options.frozenRow < numVisibleRows\n )\n ? parseInt(options.frozenRow)\n : -1;\n\n if (options.frozenRow > -1) {\n hasFrozenRows = true;\n frozenRowsHeight = ( options.frozenRow ) * options.rowHeight;\n\n var dataLength = getDataLength() || this.data.length;\n\n actualFrozenRow = ( options.frozenBottom )\n ? ( dataLength - options.frozenRow )\n : options.frozenRow;\n } else {\n hasFrozenRows = false;\n }\n }\n\n function setPaneVisibility() {\n if (options.frozenColumn > -1) {\n $paneHeaderR.show();\n $paneTopR.show();\n\n if (hasFrozenRows) {\n $paneBottomL.show();\n $paneBottomR.show();\n } else {\n $paneBottomR.hide();\n $paneBottomL.hide();\n }\n } else {\n $paneHeaderR.hide();\n $paneTopR.hide();\n $paneBottomR.hide();\n\n if (hasFrozenRows) {\n $paneBottomL.show();\n } else {\n $paneBottomR.hide();\n $paneBottomL.hide();\n }\n }\n }\n\n function setOverflow() {\n $viewportTopL.css({\n 'overflow-x': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'hidden' : 'scroll' : ( hasFrozenRows ) ? 'hidden' : 'auto',\n 'overflow-y': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'hidden' : 'hidden' : ( hasFrozenRows ) ? 'scroll' : 'auto'\n });\n\n $viewportTopR.css({\n 'overflow-x': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'hidden' : 'scroll' : ( hasFrozenRows ) ? 'hidden' : 'auto',\n 'overflow-y': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'scroll' : 'auto' : ( hasFrozenRows ) ? 'scroll' : 'auto'\n });\n\n $viewportBottomL.css({\n 'overflow-x': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'scroll' : 'auto' : ( hasFrozenRows ) ? 'auto' : 'auto',\n 'overflow-y': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'hidden' : 'hidden' : ( hasFrozenRows ) ? 'scroll' : 'auto'\n });\n\n $viewportBottomR.css({\n 'overflow-x': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'scroll' : 'auto' : ( hasFrozenRows ) ? 'auto' : 'auto',\n 'overflow-y': ( options.frozenColumn > -1 ) ? ( hasFrozenRows ) ? 'auto' : 'auto' : ( hasFrozenRows ) ? 'auto' : 'auto'\n });\n }\n\n function setScroller() {\n if (options.frozenColumn > -1) {\n $headerScrollContainer = $headerScrollerR;\n $headerRowScrollContainer = $headerRowScrollerR;\n\n if (hasFrozenRows) {\n if (options.frozenBottom) {\n $viewportScrollContainerX = $viewportBottomR;\n $viewportScrollContainerY = $viewportTopR;\n } else {\n $viewportScrollContainerX = $viewportScrollContainerY = $viewportBottomR;\n }\n } else {\n $viewportScrollContainerX = $viewportScrollContainerY = $viewportTopR;\n }\n } else {\n $headerScrollContainer = $headerScrollerL;\n $headerRowScrollContainer = $headerRowScrollerL;\n\n if (hasFrozenRows) {\n if (options.frozenBottom) {\n $viewportScrollContainerX = $viewportBottomL;\n $viewportScrollContainerY = $viewportTopL;\n } else {\n $viewportScrollContainerX = $viewportScrollContainerY = $viewportBottomL;\n }\n } else {\n $viewportScrollContainerX = $viewportScrollContainerY = $viewportTopL;\n }\n }\n }\n\n function measureCellPaddingAndBorder() {\n var el;\n var h = [\"borderLeftWidth\", \"borderRightWidth\", \"paddingLeft\", \"paddingRight\"];\n var v = [\"borderTopWidth\", \"borderBottomWidth\", \"paddingTop\", \"paddingBottom\"];\n\n el = $(\"\").appendTo($headers);\n headerColumnWidthDiff = headerColumnHeightDiff = 0;\n if (el.css(\"box-sizing\") != \"border-box\" && el.css(\"-moz-box-sizing\") != \"border-box\" && el.css(\"-webkit-box-sizing\") != \"border-box\") {\n $.each(h, function (n, val) {\n headerColumnWidthDiff += parseFloat(el.css(val)) || 0;\n });\n $.each(v, function (n, val) {\n headerColumnHeightDiff += parseFloat(el.css(val)) || 0;\n });\n }\n el.remove();\n\n var r = $(\"
      \").appendTo($canvas);\n el = $(\"\").appendTo(r);\n cellWidthDiff = cellHeightDiff = 0;\n if (el.css(\"box-sizing\") != \"border-box\" && el.css(\"-moz-box-sizing\") != \"border-box\" && el.css(\"-webkit-box-sizing\") != \"border-box\") {\n $.each(h, function (n, val) {\n cellWidthDiff += parseFloat(el.css(val)) || 0;\n });\n $.each(v, function (n, val) {\n cellHeightDiff += parseFloat(el.css(val)) || 0;\n });\n }\n r.remove();\n\n absoluteColumnMinWidth = Math.max(headerColumnWidthDiff, cellWidthDiff);\n }\n\n function createCssRules() {\n $style = $(\"