{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n|]\n\napplyUpdateDescription :: Text\napplyUpdateDescription = [text|\nApply the next available update proposal from the blockchain. Note that this\nwill immediately shutdown the node and makes it unavailable for a short while.\n|]\n\npostponeUpdateDescription :: Text\npostponeUpdateDescription = [text|\nDiscard the next available update from the node's local state. Yet, this doesn't\nreject the update which will still be applied as soon as the node is restarted.\n|]\n\nresetWalletStateDescription :: Text\nresetWalletStateDescription = [text|\nWipe-out the node's local state entirely. The only intended use-case for this\nendpoint is during API integration testing. Note also that this will fail by\ndefault unless the node is running in debug mode.\n|]\n\nestimateFeesDescription :: Text\nestimateFeesDescription = [text|\nEstimate the fees which would incur from the input payment. This endpoint\n**does not** require a _spending password_ to be supplied as it generates\nunder the hood an unsigned transaction.\n|]\n\ngetAddressDescription :: Text\ngetAddressDescription = [text|\nThe previous version of this endpoint failed with an HTTP error when the given\naddress was unknown to the wallet.\n\nThis was misleading since an address that is unknown to the wallet may still\nbelong to the wallet (since it could be part of a pending transaction in\nanother instance of the same wallet).\n\nTo reflect this, the V1 endpoint does not fail when an address is not recognised\nand returns a new field which indicates the address' ownership status, from the\nnode point of view.\n|]\n\n--\n-- The API\n--\n\ndata DescriptionEnvironment = DescriptionEnvironment\n { deErrorExample :: !T.Text\n , deDefaultPerPage :: !T.Text\n , deWalletErrorTable :: !T.Text\n , deGitRevision :: !T.Text\n , deSoftwareVersion :: !T.Text\n , deMnemonicExample :: !T.Text\n }\n\napi :: HasSwagger a\n => (CompileTimeInfo, SoftwareVersion)\n -> Proxy a\n -> (DescriptionEnvironment -> T.Text)\n -> Swagger\napi (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI\n & info.title .~ \"Sealchain Wallet API\"\n & info.version .~ fromString (show curSoftwareVersion)\n & host ?~ \"127.0.0.1:8090\"\n & info.description ?~ mkDescription DescriptionEnvironment\n { deErrorExample = decodeUtf8 $ encodePretty WalletNotFound\n , deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase)\n , deDefaultPerPage = fromString (show defaultPerPageEntries)\n , deWalletErrorTable = errorsDescription\n , deGitRevision = ctiGitRevision compileInfo\n , deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion)\n }\n & info.license ?~ (\"MIT\" & url ?~ URL \"-project/sealchain/develop/LICENSE\")\n & paths %~ (POST, \"/api/internal/apply-update\") `setDescription` applyUpdateDescription\n & paths %~ (POST, \"/api/internal/postpone-update\") `setDescription` postponeUpdateDescription\n & paths %~ (DELETE, \"/api/internal/reset-wallet-state\") `setDescription` resetWalletStateDescription\n & paths %~ (POST, \"/api/v1/transactions/fees\") `setDescription` estimateFeesDescription\n & paths %~ (GET, \"/api/v1/addresses/{address}\") `setDescription` getAddressDescription\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/wallet/src/Cardano/Wallet/API/V1/Swagger.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE DataKinds #\n# LANGUAGE QuasiQuotes #\n# LANGUAGE RankNTypes #\n# LANGUAGE TypeFamilies #\n\n Helper functions\n\n | Surround a Text with another\n | Display a multi-line code-block inline (e.g. in tables)\n | Drill in the 'Swagger' file in an unsafe way to modify a specific operation\n identified by a tuple (verb, path). The function looks a bit scary to use\n but is actually rather simple (see example below).\n\n Note that if the identified path doesn't exist, the function will throw\n at runtime when trying to read the underlying swagger structure!\n\n Example:\n\n swagger\n\n | A combinator to modify the description of an operation, using\n 'alterOperation' under the hood.\n\n\n Example:\n\n swagger\n\n Instances\n\n\n\n-----------------------|-----------------|---------\n 'WalletError'\n 'JSONValidationError'\n 'UnsupportedMimeTypeError'\n TODO 'MnemonicError' ?\n-----------------|-------------------\n | Provide additional insights on V1 documentation\n-----------------|-------------------\n-------------------\ncert ./scripts/tls-files/client.pem \\\ncacert ./scripts/tls-files/ca.crt \\\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n-----------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n---------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n---------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\n proof in hex\n--------------\n------------\n--------------------\n---------------------------\nno-tls` flag to the wallet or by running a wallet in debug mode with `--wallet-debug` turned on.\n----------------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\n----------------------\n-------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\n---------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n---------------------\n----------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n-------------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n----------------------------\n--------------------------| ------------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n-------------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem\n------------------------------------------------------------\ncacert ./scripts/tls-files/ca.crt \\\ncert ./scripts/tls-files/client.pem \\\n | Provide an alternative UI (ReDoc) for rendering Swagger documentation.\n\n The API\n"},"code":{"kind":"string","value":"# LANGUAGE FlexibleContexts #\n# LANGUAGE FlexibleInstances #\n# LANGUAGE LambdaCase #\n# LANGUAGE UndecidableInstances #\n# LANGUAGE ViewPatterns #\n# OPTIONS_GHC -fno - warn - orphans #\nmodule Cardano.Wallet.API.V1.Swagger where\n\nimport Universum hiding (get, put)\n\nimport Cardano.Wallet.API.Indices (ParamNames)\nimport Cardano.Wallet.API.Request.Filter\nimport Cardano.Wallet.API.Request.Pagination\nimport Cardano.Wallet.API.Request.Sort\nimport Cardano.Wallet.API.Response\nimport Cardano.Wallet.API.V1.Generic (gconsName)\nimport Cardano.Wallet.API.V1.Parameters\nimport Cardano.Wallet.API.V1.Swagger.Example\nimport Cardano.Wallet.API.V1.Types\nimport Cardano.Wallet.TypeLits (KnownSymbols (..))\n\nimport Pos.Chain.Update (SoftwareVersion (svNumber))\nimport Pos.Core.NetworkMagic (NetworkMagic (..))\nimport Pos.Util.CompileInfo (CompileTimeInfo, ctiGitRevision)\nimport Pos.Util.Servant (LoggingApi)\n\nimport Control.Lens (At, Index, IxValue, at, (?~))\nimport Data.Aeson (encode)\nimport Data.Aeson.Encode.Pretty\nimport Data.Map (Map)\nimport Data.Swagger hiding (Example)\nimport Data.Typeable\nimport Formatting (build, sformat)\nimport NeatInterpolation\nimport Servant (Handler, ServantErr (..), Server, StdMethod (..))\nimport Servant.API.Sub\nimport Servant.Swagger\nimport Servant.Swagger.UI (SwaggerSchemaUI')\nimport Servant.Swagger.UI.Core (swaggerSchemaUIServerImpl)\nimport Servant.Swagger.UI.ReDoc (redocFiles)\n\nimport qualified Data.ByteString.Lazy as BL\nimport qualified Data.Map.Strict as M\nimport qualified Data.Text as T\nimport qualified Data.Text.Encoding as T\nimport qualified Pos.Core as Core\nimport qualified Pos.Core.Attributes as Core\nimport qualified Pos.Crypto.Hashing as Crypto\n\n\n\nsurroundedBy :: Text -> Text -> Text\nsurroundedBy wrap context = wrap <> context <> wrap\n\ninlineCodeBlock :: Text -> Text\ninlineCodeBlock txt = \"
\" <> replaceNewLines (replaceWhiteSpaces txt) <> \"
\"\n where\n replaceNewLines = T.replace \"\\n\" \"
\"\n replaceWhiteSpaces = T.replace \" \" \"&nbsp;\"\n\n\n & paths % ~ ( POST , \" /api / v1 / wallets \" ) ` alterOperation ` ( description ? ~ \" foo \" )\n & paths % ~ ( GET , \" /api / v1 / wallets/{walletId } \" ) ` alterOperation ` ( description ? ~ \" bar \" )\nalterOperation ::\n ( IxValue m ~ item\n , Index m ~ FilePath\n , At m\n , HasGet item (Maybe Operation)\n , HasPut item (Maybe Operation)\n , HasPatch item (Maybe Operation)\n , HasPost item (Maybe Operation)\n , HasDelete item (Maybe Operation)\n )\n => (StdMethod, FilePath)\n -> (Operation -> Operation)\n -> m\n -> m\nalterOperation (verb, path) alter =\n at path %~ (Just . unsafeAlterItem)\n where\n errUnreachableEndpoint :: Text\n errUnreachableEndpoint =\n \"Unreachable endpoint: \" <> show verb <> \" \" <> show path\n\n errUnsupportedVerb :: Text\n errUnsupportedVerb =\n \"Used unsupported verb to identify an endpoint: \" <> show verb\n\n unsafeAlterItem ::\n ( HasGet item (Maybe Operation)\n , HasPut item (Maybe Operation)\n , HasPatch item (Maybe Operation)\n , HasPost item (Maybe Operation)\n , HasDelete item (Maybe Operation)\n )\n => Maybe item\n -> item\n unsafeAlterItem = maybe\n (error errUnreachableEndpoint)\n (unsafeLensFor verb %~ (Just . unsafeAlterOperation))\n\n unsafeAlterOperation :: Maybe Operation -> Operation\n unsafeAlterOperation = maybe\n (error errUnreachableEndpoint)\n alter\n\n unsafeLensFor ::\n ( Functor f\n , HasGet item (Maybe Operation)\n , HasPut item (Maybe Operation)\n , HasPatch item (Maybe Operation)\n , HasPost item (Maybe Operation)\n , HasDelete item (Maybe Operation)\n )\n => StdMethod\n -> (Maybe Operation -> f (Maybe Operation))\n -> item\n -> f item\n unsafeLensFor = \\case\n GET -> get\n PUT -> put\n PATCH -> patch\n POST -> post\n DELETE -> delete\n _ -> error errUnsupportedVerb\n\n\n & paths % ~ ( POST , \" /api / v1 / wallets \" ) ` setDescription ` \" foo \"\n & paths % ~ ( GET , \" /api / v1 / wallets/{walletId } \" ) ` setDescription ` \" bar \"\nsetDescription\n :: (IxValue m ~ PathItem, Index m ~ FilePath, At m)\n => (StdMethod, FilePath)\n -> Text\n -> m\n -> m\nsetDescription endpoint str =\n endpoint `alterOperation` (description ?~ str)\n\n\n\ninstance HasSwagger a => HasSwagger (LoggingApi config a) where\n toSwagger _ = toSwagger (Proxy @a)\n\ninstance\n ( Typeable res\n , KnownSymbols syms\n , HasSwagger subApi\n , syms ~ ParamNames res params\n ) => HasSwagger (FilterBy params res :> subApi) where\n toSwagger _ =\n let swgr = toSwagger (Proxy @subApi)\n allOps = map toText $ symbolVals (Proxy @syms)\n in swgr & over (operationsOf swgr . parameters) (addFilterOperations allOps)\n where\n addFilterOperations :: [Text] -> [Referenced Param] -> [Referenced Param]\n addFilterOperations ops xs = map (Inline . newParam) ops <> xs\n\n newParam :: Text -> Param\n newParam opName =\n let typeOfRes = fromString $ show $ typeRep (Proxy @ res)\n in Param {\n _paramName = opName\n , _paramRequired = Nothing\n , _paramDescription = Just $ filterDescription typeOfRes\n , _paramSchema = ParamOther ParamOtherSchema {\n _paramOtherSchemaIn = ParamQuery\n , _paramOtherSchemaAllowEmptyValue = Nothing\n , _paramOtherSchemaParamSchema = mempty\n }\n }\n\nfilterDescription :: Text -> Text\nfilterDescription typeOfRes = mconcat\n [ \"A **FILTER** operation on a \" <> typeOfRes <> \". \"\n , \"Filters support a variety of queries on the resource. \"\n , \"These are: \\n\\n\"\n , \"- `EQ[value]` : only allow values equal to `value`\\n\"\n , \"- `LT[value]` : allow resource with attribute less than the `value`\\n\"\n , \"- `GT[value]` : allow objects with an attribute greater than the `value`\\n\"\n , \"- `GTE[value]` : allow objects with an attribute at least the `value`\\n\"\n , \"- `LTE[value]` : allow objects with an attribute at most the `value`\\n\"\n , \"- `RANGE[lo,hi]` : allow objects with the attribute in the range between `lo` and `hi`\\n\"\n , \"- `IN[a,b,c,d]` : allow objects with the attribute belonging to one provided.\\n\\n\"\n ]\n\ninstance\n ( Typeable res\n , KnownSymbols syms\n , syms ~ ParamNames res params\n , HasSwagger subApi\n ) => HasSwagger (SortBy params res :> subApi) where\n toSwagger _ =\n let swgr = toSwagger (Proxy @subApi)\n in swgr & over (operationsOf swgr . parameters) addSortOperation\n where\n addSortOperation :: [Referenced Param] -> [Referenced Param]\n addSortOperation xs = Inline newParam : xs\n\n newParam :: Param\n newParam =\n let typeOfRes = fromString $ show $ typeRep (Proxy @ res)\n allowedKeys = T.intercalate \",\" (map toText $ symbolVals (Proxy @syms))\n in Param {\n _paramName = \"sort_by\"\n , _paramRequired = Just False\n , _paramDescription = Just (sortDescription typeOfRes allowedKeys)\n , _paramSchema = ParamOther ParamOtherSchema {\n _paramOtherSchemaIn = ParamQuery\n , _paramOtherSchemaAllowEmptyValue = Just True\n , _paramOtherSchemaParamSchema = mempty\n }\n }\n\ninstance (HasSwagger subApi) => HasSwagger (WalletRequestParams :> subApi) where\n toSwagger _ =\n let swgr = toSwagger (Proxy @(WithWalletRequestParams subApi))\n in swgr & over (operationsOf swgr . parameters) (map toDescription)\n where\n toDescription :: Referenced Param -> Referenced Param\n toDescription (Inline p@(_paramName -> pName)) =\n case M.lookup pName requestParameterToDescription of\n Nothing -> Inline p\n Just d -> Inline (p & description .~ Just d)\n toDescription x = x\n\ninstance ToParamSchema WalletId\n\ninstance ToSchema Core.Address where\n declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions\n\ninstance ToParamSchema Core.Address where\n toParamSchema _ = mempty\n & type_ .~ SwaggerString\n\ninstance ToParamSchema (V1 Core.Address) where\n toParamSchema _ = toParamSchema (Proxy @Core.Address)\n\n\n Descriptions\n\ncustomQueryFlagToDescription :: Map T.Text T.Text\ncustomQueryFlagToDescription = M.fromList [\n (\"force_ntp_check\", forceNtpCheckDescription)\n ]\n\nrequestParameterToDescription :: Map T.Text T.Text\nrequestParameterToDescription = M.fromList [\n (\"page\", pageDescription)\n , (\"per_page\", perPageDescription (fromString $ show maxPerPageEntries) (fromString $ show defaultPerPageEntries))\n ]\n\nforceNtpCheckDescription :: T.Text\nforceNtpCheckDescription = [text|\nIn some cases, API Clients need to force a new NTP check as a previous result gets cached. A typical use-case is after asking a user to fix its system clock. If this flag is set, request will block until NTP server responds or it will timeout if NTP server is not available within a short delay.\n|]\n\npageDescription :: T.Text\npageDescription = [text|\nThe page number to fetch for this request. The minimum is **1**. If nothing is specified, **this value defaults to 1** and always shows the first entries in the requested collection.\n|]\n\nperPageDescription :: T.Text -> T.Text -> T.Text\nperPageDescription maxValue defaultValue = [text|\nThe number of entries to display for each page. The minimum is **1**, whereas the maximum is **$maxValue**. If nothing is specified, **this value defaults to $defaultValue**.\n|]\n\nsortDescription :: Text -> Text -> Text\nsortDescription resource allowedKeys = [text|\nA **SORT** operation on this $resource. Allowed keys: `$allowedKeys`.\n|]\n\nerrorsDescription :: Text\nerrorsDescription = [text|\nError Name / Description | HTTP Error code | Example\n$errors\n|] where\n errors = T.intercalate \"\\n\" rows\n rows =\n [ mkRow fmtErr $ NotEnoughMoney (ErrAvailableBalanceIsInsufficient 1400)\n , mkRow fmtErr $ OutputIsRedeem sampleAddress\n , mkRow fmtErr $ UnknownError \"Unexpected internal error.\"\n , mkRow fmtErr $ InvalidAddressFormat \"Provided address format is not valid.\"\n , mkRow fmtErr WalletNotFound\n , mkRow fmtErr $ WalletAlreadyExists exampleWalletId\n , mkRow fmtErr AddressNotFound\n , mkRow fmtErr $ InvalidPublicKey \"Extended public key (for external wallet) is invalid.\"\n , mkRow fmtErr UnsignedTxCreationError\n , mkRow fmtErr $ SignedTxSubmitError \"Unable to submit externally-signed transaction.\"\n , mkRow fmtErr TooBigTransaction\n , mkRow fmtErr TxFailedToStabilize\n , mkRow fmtErr TxRedemptionDepleted\n , mkRow fmtErr $ TxSafeSignerNotFound sampleAddress\n , mkRow fmtErr $ MissingRequiredParams ((\"wallet_id\", \"walletId\") :| [])\n , mkRow fmtErr $ WalletIsNotReadyToProcessPayments genExample\n , mkRow fmtErr $ NodeIsStillSyncing genExample\n , mkRow fmtErr $ CannotCreateAddress \"Cannot create derivation path for new address in external wallet.\"\n , mkRow fmtErr $ RequestThrottled 42\n\n , mkRow fmtErr $ JSONValidationFailed \"Expected String, found Null.\"\n\n , mkRow fmtErr $ UnsupportedMimeTypePresent \"Expected Content-Type's main MIME-type to be 'application/json'.\"\n , mkRow fmtErr $ UtxoNotEnoughFragmented (ErrUtxoNotEnoughFragmented 1 msgUtxoNotEnoughFragmented)\n ]\n mkRow fmt err = T.intercalate \"|\" (fmt err)\n fmtErr err =\n [ surroundedBy \"`\" (gconsName err) <> \"
\" <> toText (sformat build err)\n , show $ errHTTPCode $ toServantError err\n , inlineCodeBlock (T.decodeUtf8 $ BL.toStrict $ encodePretty err)\n ]\n\n sampleAddress = V1 Core.Address\n { Core.addrRoot =\n Crypto.unsafeAbstractHash (\"asdfasdf\" :: String)\n , Core.addrAttributes =\n Core.mkAttributes $ Core.AddrAttributes Nothing Core.BootstrapEraDistr NetworkMainOrStage\n , Core.addrType =\n Core.ATPubKey\n }\n\n\n | Shorter version of the doc below , only for Dev & V0 documentations\nhighLevelShortDescription :: DescriptionEnvironment -> T.Text\nhighLevelShortDescription DescriptionEnvironment{..} = [text|\nThis is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).\n\nProtocol Version | Git Revision\n$deSoftwareVersion | $deGitRevision\n|]\n\n\nhighLevelDescription :: DescriptionEnvironment -> T.Text\nhighLevelDescription DescriptionEnvironment{..} = [text|\nThis is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain).\n\nProtocol Version | Git Revision\n$deSoftwareVersion | $deGitRevision\n\n\nGetting Started\n===============\n\nIn the following examples, we will use *curl* to illustrate request to an API running on the default port **8090**.\n\nPlease note that wallet web API uses TLS for secure communication. Requests to the API need to\nsend a client CA certificate that was used when launching the node and identifies the client as\nbeing permitted to invoke the server API.\n\nCreating a New Wallet\n\nYou can create your first wallet using the [`POST /api/v1/wallets`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets%2Fpost) endpoint as follow:\n\n```\ncurl -X POST :8090/api/v1/wallets \\\n -H \"Accept: application/json; charset=utf-8\" \\\n -H \"Content-Type: application/json; charset=utf-8\" \\\n -d '{\n \"operation\": \"create\",\n \"backupPhrase\": $deMnemonicExample,\n \"assuranceLevel\": \"normal\",\n \"name\": \"MyFirstWallet\",\n \"spendingPassword\": \"5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0\"\n}'\n```\n\n> **Warning**: Those 12 mnemonic words given for the backup phrase act as an example. **Do\n> not** use them on a production system. See the section below about mnemonic codes for more\n> information.\n\nThe `spendingPassword` is optional but highly recommended. It a string of 32\ncharacters, encoded in base 16, yielding to an hexadecimal sequence of 64 bytes.\nThis passphrase is required for sensitive operations on the wallet and adds\nan extra security layer to it.\n\nTo generate a valid `spendingPassword`, please follow the following steps:\n\n- Pick a long sentence using a wide variety of characters (uppercase, lowercase,\n whitespace, punctuation, etc). Using a computer to randomly generate\n a passphrase is best, as humans aren't a good source of randomness.\n\n- Compute an appropriate hash of this passphrase. You'll need to use an\n algorithm that yields a 32-byte long string (e.g. *SHA256* or *BLAKE2b*).\n\n- Hex-encode the 32-byte hash into a 64-byte sequence of bytes.\n\nAs a response, the API provides you with a unique wallet `id` to be used in subsequent\nrequests. Make sure to store it / write it down. Note that every API response is\n[jsend-compliant](); Sealchain also augments responses with\nmeta-data specific to pagination. More details in the section below about [Pagination](#section/Pagination)\n\n```json\n$createWallet\n```\n\nYou have just created your first wallet. Information about this wallet can be retrieved using the [`GET /api/v1/wallets/{walletId}`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets~1{walletId}%2Fget)\nendpoint as follows:\n\n```\ncurl -X GET :8090/api/v1/wallets/{{walletId}} \\\n -H \"Accept: application/json; charset=utf-8\" \\\n```\n\nReceiving SEAL (or GD)\n\nTo receive _SEAL_ (or GD) from other users you should provide your address. This address can be obtained\nfrom an account. Each wallet contains at least one account. An account is like a pocket inside\nof your wallet. Vew all existing accounts of a wallet by using the [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)\nendpoint:\n\n```\ncurl -X GET :8090/api/v1/wallets/{{walletId}}/accounts?page=1&per_page=10 \\\n -H \"Accept: application/json; charset=utf-8\" \\\n```\n\nSince you have, for now, only a single wallet, you'll see something like this:\n\n```json\n$readAccounts\n```\n\nAll the wallet's accounts are listed under the `addresses` field. You can communicate one of\nthese addresses to receive _SEAL_(or GD) on the associated account.\n\n\nSending SEAL(or GD)\n\nIn order to send _SEAL_(or GD) from one of your accounts to another address, you must create a new\npayment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1payment%2Fpost)\nendpoint as follows:\n\n```\ncurl -X POST :8090/api/v1/transactions/payment \\\n -H \"Accept: application/json; charset=utf-8\" \\\n -H \"Content-Type: application/json; charset=utf-8\" \\\n -d '{\n \"destinations\": [{\n \"amount\": {\n \"coins\": 100000000,\n \"gds\": 100\n }\n \"address\": \"A7k5bz1QR2...Tx561NNmfF\"\n }],\n \"source\": {\n \"accountIndex\": 0,\n \"walletId\": \"Ae2tdPwUPE...8V3AVTnqGZ\"\n },\n \"spendingPassword\": \"5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0\"\n}'\n```\n\nNote that, in order to perform a transaction, you need to have enough existing _SEAL_(or GD) on the\nsource account! The Sealchain API is designed to accomodate multiple recipients payments\nout-of-the-box; notice how `destinations` is a list of addresses (and corresponding amounts).\n\nWhen the transaction succeeds, funds are no longer available in the sources addresses, and are\nsoon made available to the destinations within a short delay. Note that, you can at any time see\nthe status of your wallets by using the [`GET /api/v1/transactions/payment`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)\nendpoint as follows:\n\n```\ncurl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwUPE...8V3AVTnqGZ\\\n -H \"Accept: application/json; charset=utf-8\" \\\n```\n\nHere we constrained the request to a specific account. After our previous transaction the output\nshould look roughly similar to this:\n\n```json\n$readTransactions\n```\n\nIn addition, and because it is not possible to _preview_ a transaction, one can lookup a\ntransaction's fees using the [`POST /api/v1/transactions/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)\nendpoint to get an estimation of those fees.\nSee [Estimating Transaction Fees](#section/Common-Use-Cases/Estimating-Transaction-Fees) for more details.\n\nIssue GD\n\nTo increase or decrease GD total supply, The issuer (the GD operator) can create a new\npayment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1issurance%2Fpost)\nendpoint as follows:\n\n```\ncurl -X POST :8090/api/v1/transactions/issurance \\\n -H \"Accept: application/json; charset=utf-8\" \\\n -H \"Content-Type: application/json; charset=utf-8\" \\\n -d '{\n \"info\": {\n \"increment\": 10000000,\n },\n \"source\": {\n \"accountIndex\": 0,\n \"walletId\": \"Ae2tdPwUPE...8V3AVTnqGZ\"\n },\n \"spendingPassword\": \"5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0\"\n}'\n```\n\n\nPagination\n==========\n\n**All GET requests of the API are paginated by default**. Whilst this can be a source of\nsurprise, is the best way of ensuring the performance of GET requests is not affected by the\nsize of the data storage.\n\nVersion `V1` introduced a different way of requesting information to the API. In particular,\nGET requests which returns a _collection_ (i.e. typically a JSON array of resources) lists\nextra parameters which can be used to modify the shape of the response. In particular, those\nare:\n\n* `page`: (Default value: **1**).\n* `per_page`: (Default value: **$deDefaultPerPage**)\n\nFor a more accurate description, see the section `Parameters` of each GET request, but as a\nbrief overview the first two control how many results and which results to access in a\npaginated request.\n\n\nFiltering and Sorting\n=====================\n\n`GET` endpoints which list collection of resources supports filters & sort operations, which\nare clearly marked in the swagger docs with the `FILTER` or `SORT` labels. The query format is\nquite simple, and it goes this way:\n\n\nFilter Operators\n\n| Operator | Description | Example |\n\n| - | If **no operator** is passed, this is equivalent to `EQ` (see below). | `balance=10` |\n| `EQ` | Retrieves the resources with index _equal_ to the one provided. | `balance=EQ[10]` |\n| `LT` | Retrieves the resources with index _less than_ the one provided. | `balance=LT[10]` |\n| `LTE` | Retrieves the resources with index _less than equal_ the one provided. | `balance=LTE[10]` |\n| `GT` | Retrieves the resources with index _greater than_ the one provided. | `balance=GT[10]` |\n| `GTE` | Retrieves the resources with index _greater than equal_ the one provided. | `balance=GTE[10]` |\n| `RANGE` | Retrieves the resources with index _within the inclusive range_ [k,k]. | `balance=RANGE[10,20]` |\n\nSort Operators\n\n| Operator | Description | Example |\n\n| `ASC` | Sorts the resources with the given index in _ascending_ order. | `sort_by=ASC[balance]` |\n| `DES` | Sorts the resources with the given index in _descending_ order. | `sort_by=DES[balance]` |\n| - | If **no operator** is passed, this is equivalent to `DES` (see above). | `sort_by=balance` |\n\n\nErrors\n======\n\nIn case a request cannot be served by the API, a non-2xx HTTP response will be issued, together\nwith a [JSend-compliant]() JSON Object describing the error\nin detail together with a numeric error code which can be used by API consumers to implement\nproper error handling in their application. For example, here's a typical error which might be\nissued:\n\n``` json\n$deErrorExample\n```\n\nExisting Wallet Errors\n\n$deWalletErrorTable\n\n\nMonetary Denomination & Units\n=============================\n\nSealchain's platform currency is called _SEAL_. _SEAL_ has up to **8** decimal places; hence the\nsmallest monetary unit that can be represented in the Seaichain's blockhain is: 0.00000001. \n\nSealchain originaly includes stablecoin called GD (GoldDollar), GD has up to **2** decimal places.\n\n> **Warning**: All amounts manipulated in the API are given and expected in smallest monetary unit.\n\n\nMnemonic Codes\n==============\n\nThe full list of accepted mnemonic codes to secure a wallet is defined by the [BIP-39\nspecifications](-0039.mediawiki). Note that\npicking up 12 random words from the list **is not enough** and leads to poor security. Make\nsure to carefully follow the steps described in the protocol when you generate words for a new\nwallet.\n\n\nVersioning & Legacy\n===================\n\nThe API is **versioned**, meaning that is possible to access different versions of the API by adding the _version number_ in the URL.\n\n**For the sake of backward compatibility, we expose the legacy version of the API, available simply as unversioned endpoints.**\n\nThis means that _omitting_ the version number would call the old version of the API. Deprecated\nendpoints are currently grouped under an appropriate section; they would be removed in upcoming\nreleased, if you're starting a new integration with Sealchain, please ignore these.\n\nNote that Compatibility between major versions is not _guaranteed_, i.e. the request & response formats might differ.\n\n\nDisable TLS (Not Recommended)\n\n\n\nCommon Use-Cases\n================\n\nSending Money to Multiple Recipients\n\nAs seen in [Sending SEAL](#section/Getting-Started/Sending-SEAL), you can send _SEAL_ to\nanother party using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fpost) endpoint.\nImportant to notice is the type of the field `destinations`: it's a list, enabling you to provide more\nthan one destination. Each destination is composed of:\n\n- An address\n- A corresponding amount\n\nThe overall transaction corresponds to the sum of each outputs. For instance, to send money to\ntwo parties simultaneously:\n\n```\ncurl -X POST :8090/api/v1/transactions \\\n -H \"Accept: application/json; charset=utf-8\" \\\n -H \"Content-Type: application/json; charset=utf-8\" \\\n -d '{\n \"destinations\": [\n {\n \"amount\": 14,\n \"address\": \"A7k5bz1QR2...Tx561NNmfF\"\n },\n {\n \"amount\": 42,\n \"address\": \"B56n78WKE8...jXAa34NUFz\"\n }\n ],\n \"source\": {\n \"accountIndex\": 0,\n \"walletId\": \"Ae2tdPwUPE...8V3AVTnqGZ\"\n },\n \"spendingPassword\": \"5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0\"\n}'\n```\n\n\nAbout UTXO Fragmentation\n\nAs described in [Sending Money to Multiple Recipients](#section/Common-Use-Cases/Sending-Money-to-Multiple-Recipients), it is possible to send ada to more than one destination. Sealchain only allows a given UTXO to cover at most one single transaction output. As a result,\nwhen the number of transaction outputs is greater than the number the API returns a `UtxoNotEnoughFragmented` error which\nlooks like the following\n```\n{\n \"status\": \"error\",\n \"diagnostic\": {\n \"details\": {\n \"help\": \"Utxo is not enough fragmented to handle the number of outputs of this transaction. Query /api/v1/wallets/{walletId}/statistics/utxos endpoint for more information\",\n \"missingUtxos\": 1\n }\n },\n \"message\": \"UtxoNotEnoughFragmented\"\n}\n```\n\nTo make sure the source account has a sufficient level of UTXO fragmentation (i.e. number of UTXOs),\nplease monitor the state of the UTXOs as described in [Getting UTXO Statistics](#section/Common-Use-Cases/Getting-Utxo-Statistics). The\nnumber of wallet UTXOs should be no less than the transaction outputs, and the sum of all UTXOs should be enough to cover the total\ntransaction amount, including fees.\n\nContrary to a classic accounting model, there's no such thing as spending _part of a UTXO_, and one has to wait for a transaction to be included in a\nblock before spending the remaining change. This is very similar to using bank notes: one can't spend a USD 20 bill at two different shops at the same time,\neven if it is enough to cover both purchases — one has to wait for change from the first transaction before making the second one.\nThere's no \"ideal\" level of fragmentation; it depends on one's needs. However, the more UTXOs that are available, the higher the concurrency capacity\nof one's wallet, allowing multiple transactions to be made at the same time.\n\nSimilarly, there's no practical maximum number of UTXOs, but there is nevertheless a maximum transaction size. By having many small UTXOs,\none is taking the risk of hitting that restriction, should too many inputs be selected to fill a transaction. The only way to\nwork around this is to make multiple smaller transactions.\n\nEstimating Transaction Fees\n\nWhen you submit a transaction to the network, some fees apply depending on, but not only, the\nselected grouping policy and the available inputs on the source wallet. There's actually a\ntrade-off between fees, cryptographic security, throughput and privacy. The more inputs are\nselected, the bigger is the payload, the bigger are the fees.\n\nThe API lets you estimate fees for a given transaction via the [`POST /api/v1/transaction/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost)\nendpoint. The request payload is identical to the one you would make to create a transaction:\n\n```\ncurl -X POST :8090/api/v1/transactions/fees \\\n -H \"Accept: application/json; charset=utf-8\" \\\n -H \"Content-Type: application/json; charset=utf-8\" \\\n -d '{\n \"destinations\": [{\n \"amount\": 14,\n \"address\": \"A7k5bz1QR2...Tx561NNmfF\"\n }],\n \"source\": {\n \"accountIndex\": 0,\n \"walletId\": \"Ae2tdPwUPE...8V3AVTnqGZ\"\n }\n}'\n```\n\nThe API resolves with an estimated amount in _SEAL_. This estimation highly depends on the\ncurrent state of the ledger and diverges with time.\n\n```json\n$readFees\n```\n\n\nManaging Accounts\n\nA wallet isn't limited to one account. It can actually be useful to have more than one account\nin order to separate business activities. With the API, you can retrieve a specific account,\ncreate new ones, list all existing accounts of a wallet or edit a few things on an existing\naccount. By default, your wallet comes with a provided account. Let's see how to create a fresh\nnew account on a wallet using [`POST /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fpost):\n\n```\ncurl -X POST \\\n :8090/api/v1/Ae2tdPwUPE...8V3AVTnqGZ/accounts \\\n -H 'Content-Type: application/json;charset=utf-8' \\\n -H 'Accept: application/json;charset=utf-8' \\\n -d '{\n \"name\": \"MyOtherAccount\",\n \"spendingPassword\": \"5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0\"\n}'\n```\n\nNote that the `spendingPassword` here should match the one provided earlier in [Creating a\nNew Wallet](#section/Getting-Started/Creating-a-New-Wallet).\n\n\n```json\n$createAccount\n```\n\nYou can always retrieve this account description later if needed via [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts~1{accountId}%2Fget).\n\nFor example:\n\n```\ncurl -X GET \\\n :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts/2902829384 \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\nFor a broader view, the full list of accounts of a given wallet can be retrieved using [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget)\n```\ncurl -X GET \\\n :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n```json\n$readAccounts\n```\n\nPartial Representations\n\nThe previous endpoint gives you a list of full representations. However, in some cases, it might be interesting to retrieve only a partial representation of an account (e.g. only the balance). There are two extra endpoints one could use to either fetch a given account's balance, and another to retrieve the list of addresses associated to a specific account.\n\n[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/addresses`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1addresses%2Fget)\n\n```json\n$readAccountAddresses\n```\n\nNote that this endpoint is paginated and allow basic filtering and sorting on\naddresses. Similarly, you can retrieve only the account balance with:\n\n[`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/amount`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1amount%2Fget)\n\n\n```json\n$readAccountBalance\n```\n\n\nManaging Addresses\n\nBy default, wallets you create are provided with an account which has one default address. It\nis possible (and recommended) for an account to manage multiple addresses. Address reuse\nactually reduces privacy for it tights more transactions to a small set of addresses.\n\nWhen paying, the wallet makes many of these choices for you. Addresses are\nselected from a wallet's account based on several different strategies and\npolicies.\n\nTo create a new address, use the [`POST /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fpost)\nendpoint:\n\n```\ncurl -X POST \\\n :8090/api/v1/addresses \\\n -H 'Content-Type: application/json;charset=utf-8' \\\n -H 'Accept: application/json;charset=utf-8' \\\n -d '{\n \"walletId\": \"Ae2tdPwUPE...V3AVTnqGZ4\",\n \"accountIndex\": 2147483648\n}'\n```\n\n```json\n$createAddress\n```\n\nIf your wallet is protected with a password, this password is also required in order to create\nnew addresses for that wallet. In such case, the field `spendingPassword` should match the one\ndefined earlier to protect your wallet.\n\nAddresses generated as just described are always valid. When the API encounters\nan invalid address however (e.g. when provided by another party), it will fail with a\nclient error.\n\nYou can always view all your available addresses across all your wallets by using\n[`GET /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fget):\n\n```\ncurl -X GET :8090/api/v1/addresses \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n```json\n$readAddresses\n```\n\nChecking Synchronization Progress\n\nYou can control the synchronization progress of the underlying node hosting the wallet's server\nvia [`GET /api/v1/node-info`](#tag/Info%2Fpaths%2F~1api~1v1~1node-info%2Fget). The output is\nrather verbose and gives real-time progress updates about the current node.\n\n```\ncurl -X GET :8090/api/v1/node-info \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n```json\n$readNodeInfo\n```\n\n\nRetrieving Transaction History\n\nIf needed, applications may regularly poll the wallet's backend to retrieve the history of\ntransactions of a given wallet. Using the [`GET /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget)\nendpoint, you can view the status of all transactions that ever sent or took money from the\nwallet.\n\nThe following table sums up the available filters (also detailed in the endpoint documentation details):\n\nFilter On | Corresponding Query Parameter(s)\nWallet | `wallet_id`\nWallet's account | `account_index` + `wallet_id`\nAddress | `address`\nTransaction's creation time | `created_at`\nTransaction's id | `id`\n\nFor example, in order to retrieve the last 50 transactions of a particular account,\nordered by descending date:\n\n```\ncurl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&account_index=2902829384&sort_by=DES\\[created_at\\]&per_page=50' \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\nFor example, in order to retrieve the last 50 transactions, ordered by descending date:\n\n```\ncurl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ &sort_by=DES\\[created_at\\]&per_page=50' \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n\nAnother example, if you were to look for all transactions made since the 1st of January 2018:\n\n```\ncurl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&created_at=GT\\[2018-01-01T00:00:00.00000\\]' \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n\nGetting Utxo statistics\n\nYou can get Utxo statistics of a given wallet using\n [`GET /api/v1/wallets/{{walletId}}/statistics/utxos`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1statistics~1utxos%2Fget)\n\n```\ncurl -X GET \\\n :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/statistics/utxos \\\n -H 'Accept: application/json;charset=utf-8' \\\n```\n\n```json\n$readUtxoStatistics\n```\nMake sure to carefully read the section about [Pagination](#section/Pagination) to fully\nleverage the API capabilities.\n\n\nImporting (Unused) Addresses From a Previous Node (or Version)\n\nWhen restoring a wallet, only the information available on the blockchain can\nbe retrieved. Some pieces of information aren't stored on\nthe blockchain and are only defined as _Metadata_ of the wallet backend. This\nincludes:\n\n- The wallet's name\n- The wallet's assurance level\n- The wallet's spending password\n- The wallet's unused addresses\n\nUnused addresses are not recorded on the blockchain and, in the case of random\nderivation, it is unlikely that the same addresses will be generated on two\ndifferent node instances. However, some API users may wish to preserve unused\naddresses between different instances of the wallet backend.\n\nTo enable this, the wallet backend provides an endpoint ([`POST /api/v1/wallets/{{walletId}}/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1addresses%2Fpost))\nto import a list of addresses into a given account. Note that this endpoint is\nquite lenient when it comes to errors: it tries to import all provided addresses\none by one, and ignores any that can't be imported for whatever reason. The\nserver will respond with the total number of successes and, if any, a list of\naddresses that failed to be imported. Trying to import an address that is already\npresent will behave as a no-op.\n\nFor example:\n\n```\ncurl -X POST \\\n :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/addresses \\\n -H 'Accept: application/json;charset=utf-8' \\\n -d '[\n \"Ae2tdPwUPE...8V3AVTnqGZ\",\n \"Ae2odDwvbA...b6V104CTV8\"\n ]'\n```\n\n> **IMPORTANT**: This feature is experimental and performance is\n> not guaranteed. Users are advised to import small batches only.\n\n|]\n where\n createAccount = decodeUtf8 $ encodePretty $ genExample @(APIResponse Account)\n createAddress = decodeUtf8 $ encodePretty $ genExample @(APIResponse WalletAddress)\n createWallet = decodeUtf8 $ encodePretty $ genExample @(APIResponse Wallet)\n readAccounts = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Account])\n readAccountBalance = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountBalance)\n readAccountAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountAddresses)\n readAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Address])\n readFees = decodeUtf8 $ encodePretty $ genExample @(APIResponse EstimatedFees)\n readNodeInfo = decodeUtf8 $ encodePretty $ genExample @(APIResponse NodeInfo)\n readTransactions = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Transaction])\n readUtxoStatistics = decodeUtf8 $ encodePretty $ genExample @(APIResponse UtxoStatistics)\n\nswaggerSchemaUIServer\n :: (Server api ~ Handler Swagger)\n => Swagger -> Server (SwaggerSchemaUI' dir api)\nswaggerSchemaUIServer =\n swaggerSchemaUIServerImpl redocIndexTemplate redocFiles\n where\n redocIndexTemplate :: Text\n redocIndexTemplate = [text|\n\n\n \n ReDoc\n \n \n \n \n \n \n \n \n \n|]\n\napplyUpdateDescription :: Text\napplyUpdateDescription = [text|\nApply the next available update proposal from the blockchain. Note that this\nwill immediately shutdown the node and makes it unavailable for a short while.\n|]\n\npostponeUpdateDescription :: Text\npostponeUpdateDescription = [text|\nDiscard the next available update from the node's local state. Yet, this doesn't\nreject the update which will still be applied as soon as the node is restarted.\n|]\n\nresetWalletStateDescription :: Text\nresetWalletStateDescription = [text|\nWipe-out the node's local state entirely. The only intended use-case for this\nendpoint is during API integration testing. Note also that this will fail by\ndefault unless the node is running in debug mode.\n|]\n\nestimateFeesDescription :: Text\nestimateFeesDescription = [text|\nEstimate the fees which would incur from the input payment. This endpoint\n**does not** require a _spending password_ to be supplied as it generates\nunder the hood an unsigned transaction.\n|]\n\ngetAddressDescription :: Text\ngetAddressDescription = [text|\nThe previous version of this endpoint failed with an HTTP error when the given\naddress was unknown to the wallet.\n\nThis was misleading since an address that is unknown to the wallet may still\nbelong to the wallet (since it could be part of a pending transaction in\nanother instance of the same wallet).\n\nTo reflect this, the V1 endpoint does not fail when an address is not recognised\nand returns a new field which indicates the address' ownership status, from the\nnode point of view.\n|]\n\n\ndata DescriptionEnvironment = DescriptionEnvironment\n { deErrorExample :: !T.Text\n , deDefaultPerPage :: !T.Text\n , deWalletErrorTable :: !T.Text\n , deGitRevision :: !T.Text\n , deSoftwareVersion :: !T.Text\n , deMnemonicExample :: !T.Text\n }\n\napi :: HasSwagger a\n => (CompileTimeInfo, SoftwareVersion)\n -> Proxy a\n -> (DescriptionEnvironment -> T.Text)\n -> Swagger\napi (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI\n & info.title .~ \"Sealchain Wallet API\"\n & info.version .~ fromString (show curSoftwareVersion)\n & host ?~ \"127.0.0.1:8090\"\n & info.description ?~ mkDescription DescriptionEnvironment\n { deErrorExample = decodeUtf8 $ encodePretty WalletNotFound\n , deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase)\n , deDefaultPerPage = fromString (show defaultPerPageEntries)\n , deWalletErrorTable = errorsDescription\n , deGitRevision = ctiGitRevision compileInfo\n , deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion)\n }\n & info.license ?~ (\"MIT\" & url ?~ URL \"-project/sealchain/develop/LICENSE\")\n & paths %~ (POST, \"/api/internal/apply-update\") `setDescription` applyUpdateDescription\n & paths %~ (POST, \"/api/internal/postpone-update\") `setDescription` postponeUpdateDescription\n & paths %~ (DELETE, \"/api/internal/reset-wallet-state\") `setDescription` resetWalletStateDescription\n & paths %~ (POST, \"/api/v1/transactions/fees\") `setDescription` estimateFeesDescription\n & paths %~ (GET, \"/api/v1/addresses/{address}\") `setDescription` getAddressDescription\n"}}},{"rowIdx":610239,"cells":{"_id":{"kind":"string","value":"418f2e4a28c1eee5c23b1e2879f3f0f450980f61b7b287beb97fcf50477102db"},"repository":{"kind":"string","value":"arenadotio/pgx"},"name":{"kind":"string","value":"test_pgx_value_core.ml"},"content":{"kind":"string","value":"open Core_kernel\nmodule Value = Pgx_value_core\n\nlet time_roundtrip str = Value.of_string str |> Value.to_time_exn\nlet printer = Time.to_string_abs ~zone:Time.Zone.utc\n\nlet time_testable =\n Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal\n;;\n\nlet check_time = Alcotest.check time_testable\nlet check_string = Alcotest.(check string)\n\nlet test_time_of_string _ =\n let expected = Time.of_string \"2016-03-15 19:55:18.123456-04:00\" in\n check_time \"without TZ\" expected (time_roundtrip \"2016-03-15 23:55:18.123456\");\n check_time \"zulu\" expected (time_roundtrip \"2016-03-15 23:55:18.123456Z\");\n check_time \"hour TZ\" expected (time_roundtrip \"2016-03-15 19:55:18.123456-04\");\n check_time \"full TZ\" expected (time_roundtrip \"2016-03-15 19:55:18.123456-04:00\")\n;;\n\nlet test_time_of_string_no_ms _ =\n let expected = Time.of_string \"2016-03-15 19:55:18-04:00\" in\n check_time \"without TZ\" expected (time_roundtrip \"2016-03-15 23:55:18\");\n check_time \"zulu\" expected (time_roundtrip \"2016-03-15 23:55:18Z\");\n check_time \"hour TZ\" expected (time_roundtrip \"2016-03-15 19:55:18-04\");\n check_time \"full TZ\" expected (time_roundtrip \"2016-03-15 19:55:18-04:00\")\n;;\n\nlet test_time_conversion_roundtrip _ =\n let expected_str = \"2016-03-15 23:55:18.123456Z\" in\n check_string \"parse-print\" expected_str (time_roundtrip expected_str |> printer);\n let expected_time = Time.of_string expected_str in\n check_time \"print-parse\" expected_time (Value.of_time expected_time |> Value.to_time_exn)\n;;\n\nlet time_tests =\n [ Alcotest.test_case \"test time_of_string\" `Quick test_time_of_string\n ; Alcotest.test_case\n \"test time_of_string no milliseconds\"\n `Quick\n test_time_of_string_no_ms\n ; Alcotest.test_case\n \"test time conversion roundtrip\"\n `Quick\n test_time_conversion_roundtrip\n ]\n;;\n\nlet () = Alcotest.run \"pgx_async_conversions\" [ \"time\", time_tests ]\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/arenadotio/pgx/8d5ca02213faa69e692c5d0dc3e81408db3774a1/pgx_value_core/test/test_pgx_value_core.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"open Core_kernel\nmodule Value = Pgx_value_core\n\nlet time_roundtrip str = Value.of_string str |> Value.to_time_exn\nlet printer = Time.to_string_abs ~zone:Time.Zone.utc\n\nlet time_testable =\n Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal\n;;\n\nlet check_time = Alcotest.check time_testable\nlet check_string = Alcotest.(check string)\n\nlet test_time_of_string _ =\n let expected = Time.of_string \"2016-03-15 19:55:18.123456-04:00\" in\n check_time \"without TZ\" expected (time_roundtrip \"2016-03-15 23:55:18.123456\");\n check_time \"zulu\" expected (time_roundtrip \"2016-03-15 23:55:18.123456Z\");\n check_time \"hour TZ\" expected (time_roundtrip \"2016-03-15 19:55:18.123456-04\");\n check_time \"full TZ\" expected (time_roundtrip \"2016-03-15 19:55:18.123456-04:00\")\n;;\n\nlet test_time_of_string_no_ms _ =\n let expected = Time.of_string \"2016-03-15 19:55:18-04:00\" in\n check_time \"without TZ\" expected (time_roundtrip \"2016-03-15 23:55:18\");\n check_time \"zulu\" expected (time_roundtrip \"2016-03-15 23:55:18Z\");\n check_time \"hour TZ\" expected (time_roundtrip \"2016-03-15 19:55:18-04\");\n check_time \"full TZ\" expected (time_roundtrip \"2016-03-15 19:55:18-04:00\")\n;;\n\nlet test_time_conversion_roundtrip _ =\n let expected_str = \"2016-03-15 23:55:18.123456Z\" in\n check_string \"parse-print\" expected_str (time_roundtrip expected_str |> printer);\n let expected_time = Time.of_string expected_str in\n check_time \"print-parse\" expected_time (Value.of_time expected_time |> Value.to_time_exn)\n;;\n\nlet time_tests =\n [ Alcotest.test_case \"test time_of_string\" `Quick test_time_of_string\n ; Alcotest.test_case\n \"test time_of_string no milliseconds\"\n `Quick\n test_time_of_string_no_ms\n ; Alcotest.test_case\n \"test time conversion roundtrip\"\n `Quick\n test_time_conversion_roundtrip\n ]\n;;\n\nlet () = Alcotest.run \"pgx_async_conversions\" [ \"time\", time_tests ]\n"}}},{"rowIdx":610240,"cells":{"_id":{"kind":"string","value":"8d35a45e41a48d54970a4d4b22cc2ddb8b1634a954206029ec680281a4a49f75"},"repository":{"kind":"string","value":"bytekid/mkbtt"},"name":{"kind":"string","value":"codeTree.ml"},"content":{"kind":"string","value":" Copyright 2010 \n * GNU Lesser General Public License \n * \n * This file is part of MKBtt . \n * \n * is free software : you can redistribute it and/or modify it under \n * the terms of the GNU Lesser General Public License as published by the \n * Free Software Foundation , either version 3 of the License , or ( at your \n * option ) any later version . \n * \n * is distributed in the hope that it will be useful , but WITHOUT \n * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or \n * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public \n * License for more details . \n * \n * You should have received a copy of the GNU Lesser General Public \n * License along with MKBtt . If not , see < / > . \n \n * GNU Lesser General Public License\n *\n * This file is part of MKBtt.\n * \n * MKBtt is free software: you can redistribute it and/or modify it under\n * the terms of the GNU Lesser General Public License as published by the\n * Free Software Foundation, either version 3 of the License, or (at your\n * option) any later version.\n * \n * MKBtt is distributed in the hope that it will be useful, but WITHOUT\n * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or\n * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public\n * License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public\n * License along with MKBtt. If not, see .\n *)\n\n* \n @author \n @since 2009/07/21\n@author Sarah Winkler\n@since 2009/07/21 *)\n\n(** Term indexing using code trees *)\n\n(*** OPENS ********************************************************************)\nopen Util;;\n(*** EXCEPTIONS **********************************************************)\nexception No_back_pointer\n\nexception Malformed_tree of string\n\nexception Not_in_index\n\nexception Empty_branch\n\n(*** MODULES *************************************************************)\nmodule Fun = Rewriting.Function;;\nmodule Pos = Rewriting.Position;;\nmodule Var = Rewriting.Variable;;\nmodule T = U.Term;;\nmodule M = U.Monad;;\n\nopen M;;\n\n: TermIndex . T with type entry = Entry.t\n = functor (Entry: TermIndex.ENTRY_TYPE) ->\n struct\n\n(*** SUBMODULES **********************************************************)\nmodule EL = TermIndex.EntryList(Entry);;\n\n(*** TYPES ***************************************************************)\ntype entry = Entry.t\n\n type instruction =\n | Check of Fun.t * instruction * instruction\n | Put of int * instruction * instruction\n | Compare of int * int * instruction * instruction\n | Success of Entry.t list\n | Fail\n ;;\n\n type t = instruction\n\n(* convenient for generalization retrievals *)\ntype flatterm =\n | Fun of Fun.t * flatterm * flatterm * T.t (* next, after, subterm here *)\n | Var of Var.t * flatterm (* next = after *)\n | End\n;;\n\n\n(*** GLOBALS *************************************************************)\n\n(*** FUNCTIONS ***********************************************************)\n\nlet is_empty t = return (t == Fail)\n\nlet cont n = function\n | Check (_, c, _ )\n | Put (_, c, _ )\n | Compare (_, _, c, _ ) -> c\n | _ -> raise (Malformed_tree \"cont does not exist\")\n;;\n\nlet back n = function\n | Check (_, _, b)\n | Put (_, _, b)\n | Compare (_, _, _, b) -> b\n | _ -> raise (Malformed_tree \"cont does not exist\")\n;;\n\nlet set_back instruction b' =\n match instruction with\n | Check (f, c, b) -> Check (f, c, b')\n | Put (n, c, b) -> Put (n, c, b')\n | Compare (m, k, c, b) -> Compare (m, k, c, b')\n | _ -> raise (Malformed_tree \"back does not exist\")\n;;\n\n(* output code *)\nlet rec code_to_string c =\n match c with\n | Check(f, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n \"Check(\" ^ (Fun.to_string f) ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Put(k, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n \"Put(\" ^ (string_of_int k) ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Compare(m, k, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n let sk, sm = string_of_int k, string_of_int m in\n \"Compare(\" ^ sm ^ \", \" ^ sk ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Success values -> \"Success\" ^ (List.join Entry.to_string \" \" values) \n | Fail -> \"Fail\"\n;;\n\nlet lookup table x i code =\n try\n let j = List.assoc x table in\n (Compare(j, i, Fail, Fail)) :: code, table\n with Not_found ->\n code, (x, i) :: table\n;;\n\nlet rec code_list tcodes ccodes i table = function\n | T.Var x -> \n let ccodes', table' = lookup table x i ccodes in\n (Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1\n | T.Fun(f, ts) ->\n let tcodes' = (Check(f, Fail, Fail)) :: tcodes in\n List.fold_left app_tcode (tcodes', ccodes, table, i) ts\nand app_tcode (tcodes, ccodes, table, i) t =\n code_list tcodes ccodes i table t\n;;\n\nlet rec combine_code instruction = function\n | [] -> instruction\n | Check(f, _, _) :: l ->\n combine_code (Check(f, instruction, Fail)) l\n | Put(k, _, _) :: l ->\n combine_code (Put(k, instruction, Fail)) l\n | Compare(k, m,_, _) :: l ->\n combine_code (Compare(k, m, instruction, Fail)) l\n | _ -> raise (Malformed_tree \"Compare/Fail/Success not expected\")\n;;\n\nlet code_for_term t =\n let success = Success [] in \n let tcode, ccode, _, _ = code_list [] [] 0 [] t in\n combine_code (combine_code success ccode) tcode\n;;\n\nlet code_for_value (t, v) = \n let success = Success [v] in\n let tcode, ccode, _, _ = code_list [] [] 0 [] t in\n combine_code (combine_code success ccode) tcode\n;;\n\n(* ****************** CONSTRUCTION OF CODE TREES ********************** *)\nlet make () = Fail\n\n(* assume code is just code, not tree (otherwise, change case for \n Success in tree *)\nlet rec insert' code tree =\n match code, tree with\n | _, Fail -> code\n | Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 ->\n Check(g, insert' c c', b')\n | Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') ->\n Compare(m', k', insert' c c', b')\n | Put(k, c, _), Put(k', c', b') when k = k' ->\n Put(k, insert' c c', b')\n | _, Check(_, _, b)\n | _, Compare (_, _, _, b) \n | _, Put (_, _, b) ->\n set_back tree (insert' code b)\n | Check(_, _, b), Success vs (* cases relevant? *)\n | Compare(_, _, _, b), Success vs\n | Put(_, _, b), Success vs ->\n set_back code (Success vs)\n | Success v, Success values ->\n Success (EL.union v values) (* variant *)\n | Fail, Success _ -> raise (Malformed_tree \"Fail, Success not expected\")\n;;\n\n(* add entry element into code tree *)\nlet insert tree (term, value) =\nT.to_stringm term > > = fun s - > \n Format.printf \" Insert into index term % s\\n% ! \" s ; \n Format.printf \" Tree before is % s\\n \" ( code_to_string tree ) ;\n Format.printf \"Insert into index term %s\\n%!\" s;\n Format.printf \"Tree before is %s\\n\" (code_to_string tree);*)\n let code = code_for_value (term, value) in\n let tree' = insert' code tree in\n(* Format.printf \"Code is %s\\n\" (code_to_string code);\n Format.printf \"Tree is %s\\n\" (code_to_string tree');*)\n return tree'\n;;\n\nlet rec remove_code code tree v =\n match code, tree with\n | Fail, _ -> raise (Malformed_tree \"Fail in code not expected\")\n | Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 ->\n (try Check(g, remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') ->\n (try Compare(m', k', remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | Put(k, c, b), Put(k', c', b') when k = k' ->\n (try Put(k', remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | _, Check(_, _, b)\n | _, Compare(_, _, _, b)\n | _, Put(_, _, b) ->\n (try set_back tree (remove_code code b v)\n with Empty_branch -> set_back tree Fail)\n | Success v, Success values -> \n if (List.length values) == 1 then raise Empty_branch\n else Success (EL.diff values v) (* variant *)\n | _, Success _ -> raise (Malformed_tree \"Success in tree not expected\")\n | _ -> raise Not_in_index\n;;\n\n(* removes the value from the index. if not found, Not_in_index is raised *)\nlet delete tree value =\nT.to_stringm ( fst value ) > > = fun s - > \n Format.printf \" Remove term % s\\n% ! \" s ;\n Format.printf \"Remove term %s\\n%!\" s;*)\n let code = code_for_value value in\n let tree' = try remove_code code tree value with Empty_branch -> Fail in\n return tree'\n;;\n\n(********* RETRIEVAL OPERATIONS ******************************************)\n\n(***** VARIANTS *****)\n\nlet rec retrieve_variants tree code =\n match tree, code with\n | Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 ->\n retrieve_variants c c'\n | Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') ->\n retrieve_variants c c'\n | Put(k, c, b), Put(k', c', _) when k = k' ->\n retrieve_variants c c'\n | Check(_, _, b), _\n | Compare(_, _, _, b), _\n | Put(_, _, b), _ ->\n retrieve_variants b code\n | Success variants, Success _ -> variants\n | Fail, _ \n | Success _, _ -> []\n;;\n\nlet variant_candidates tree term =\n let code = code_for_term term in\n let vars = retrieve_variants tree code in\n U.Term.to_stringm term > > = fun s - > \n Format.printf \" CT : vars 4 % s : % i:\\n%s\\n \" s ( vars ) \n ( List.foldl ( fun s x - > ( Entry.to_string x)^s ) \" \" vars ) ;\n Format.printf \"CT: vars 4 %s: %i:\\n%s\\n\" s (List.length vars)\n (List.foldl (fun s x -> (Entry.to_string x)^s) \"\" vars);*)\n return vars\n;;\n\n(***** GENERALIZATIONS *****)\n\nlet rec flatten' after t = \n match t with\n | T.Var x -> Var (x, after)\n | T.Fun(f, ts) ->\n let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in\n Fun(f, flat_ts, after, t) (* add t here, required in gen retrieve *)\n;;\n\nlet flatten = flatten' End\n\nlet subst table i =\n try\n List.assoc i table\n with\n Not_found -> raise (Malformed_tree \"compare without put\")\n;;\n\nlet rec retrieve_generalizations tree t_flat sub =\n match tree, t_flat with\n | Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 ->\n let gens = retrieve_generalizations c next sub in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Compare(m, k, c, b), End ->\n let gens = retrieve_generalizations b End sub in\n if (compare (subst sub m) (subst sub k)) == 0 then\n EL.union (retrieve_generalizations c End sub) gens\n else\n gens\n | Put(k, c, b), Var (x, after) ->\n let subterm = T.Var x in\n let gens = retrieve_generalizations c after ((k, subterm) :: sub) in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Put(k, c, b), Fun (_, _, after, subterm) ->\n let gens = retrieve_generalizations c after ((k, subterm) :: sub) in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Check(_, _, b), _ ->\n retrieve_generalizations b t_flat sub\n | Success entries, End -> entries\n | Fail, _\n | Compare _, _\n | Success _, _ -> []\n | Put _, End -> raise (Malformed_tree \"not malformed?\")\n;;\n\n find generalizations for a given term in dtree\nlet generalization_candidates tree term =\n let t_flat = flatten term in\n let gens = retrieve_generalizations tree t_flat [] in\n return gens\n;;\n\n(***** ENCOMPASSMENTS *****)\n given a term , non - var generalization of subterms are returned , \n paired with the subterm 's position . Not strict ! Also not possible \n as indexing destroys nonlinearity .\n paired with the subterm's position. Not strict! Also not possible\n as indexing destroys nonlinearity. *)\n\n let encompassment_candidates tree term = \n let pos_st = Termx.nonvar_pos_proper_subterms term in \n let ecs = \n List.fold_left \n ( fun r ( t , p ) - > \n let gs = retrieve_generalizations tree ( flatten t ) [ ] in \n ( List.map ( fun n - > ( n , p ) ) gs ) @ r ) \n [ ] ( ( term , Pos.root ) : : pos_st ) \n in \n return ecs \n ; ;\nlet encompassment_candidates tree term =\n let pos_st = Termx.nonvar_pos_proper_subterms term in\n let ecs =\n List.fold_left\n (fun r (t, p) ->\n let gs = retrieve_generalizations tree (flatten t) [] in\n (List.map (fun n -> (n, p)) gs) @ r)\n [] ((term,Pos.root) :: pos_st)\n in\n return ecs\n;;*)\n\n\n given a term , non - var generalization of subterms are returned , \n paired with the subterm 's position . Not strict !\n paired with the subterm's position. Not strict! *)\nlet encompassment_candidates_below_root tree term =\n let pos_st = Termx.nonvar_pos_proper_subterms term in\n let ecs =\n List.fold_left\n (fun r (t, p) ->\n let gs = retrieve_generalizations tree (flatten t) [] in\n (List.map (fun n -> (n, p)) gs) @ r)\n [] pos_st\n in\n return ecs\n;;\n\nlet encompassment_candidates tree term =\n let at_root = retrieve_generalizations tree (flatten term) [] in\n encompassment_candidates_below_root tree term >>= fun below ->\n let root = flip Pair.make Pos.root in\n return (List.rev_append (List.map root at_root) below)\n;;\n\nlet size t = is_empty t >>= fun b -> return (if b then 0 else 1)\n\nlet overlap1_candidates t = failwith \"CodeTree: overlaps not implemented\"\n\nlet overlap1_candidates_below_root t = \n failwith \"CodeTree: overlaps not implemented\"\n;;\n\nlet overlap2_candidates t = failwith \"CodeTree: overlaps not implemented\"\n\nlet unification_candidates t = \n failwith \"CodeTree: unification not implemented\"\n;;\n\n\n\nend (* Make *)\n\nmodule TermCodeTree = Make(TermIndex.TermEntry)\n\n let test ( ) = \n Format.printf \" testing module CodeTree\\n \" ; \n let c = Fun.of_string \" c \" 0 in \n let f = Fun.of_string \" f \" 1 in \n let g = Fun.of_string \" g \" 2 in \n let x = Term . ( Var.of_string \" x \" ) in \n let y = Term . ( Var.of_string \" y \" ) in \n let f_x = Term . Fun ( f , [ x ] ) in \n let f_f_x = Term . Fun ( f , [ f_x ] ) in \n let c _ = Term . Fun ( c , [ ] ) in \n let g_x_x = Term . Fun(g , [ x ; x ] ) in \n Format.printf \" Code for % s : \\n % s\\n \" \n ( Term.to_string f_f_x ) \n ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ; \n Format.printf \" Code for % s : \\n % s\\n \" \n ( Term.to_string g_x_x ) \n ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ; \n let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string g_f_f_x_c ) \n ( TermCodeTree.code_to_string ( ) ) ; \n let = Term . Fun ( g , [ f_f_x ; f_x ] ) in \n let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string g_f_f_x_f_x ) \n ( TermCodeTree.code_to_string ( ) ) ; \n let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in \n let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string t ) \n ( TermCodeTree.code_to_string ( t ) ) ; \n ( * INSERT\nlet test () =\n Format.printf \"testing module CodeTree\\n\";\n let c = Fun.of_string \"c\" 0 in\n let f = Fun.of_string \"f\" 1 in\n let g = Fun.of_string \"g\" 2 in\n let x = Term.Var (Var.of_string \"x\") in\n let y = Term.Var (Var.of_string \"y\") in\n let f_x = Term.Fun (f, [x]) in\n let f_f_x = Term.Fun (f, [f_x]) in\n let c_ = Term.Fun (c, []) in\n let g_x_x = Term.Fun(g, [x; x]) in\n Format.printf \"Code for %s: \\n %s\\n\"\n (Term.to_string f_f_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x));\n Format.printf \"Code for %s: \\n %s\\n\"\n (Term.to_string g_x_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x));\n let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\" \n (Term.to_string g_f_f_x_c) \n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c));\n let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in\n let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\"\n (Term.to_string g_f_f_x_f_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x));\n let t = Term.Fun (g, [g_f_f_x_f_x; y]) in\n let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\"\n (Term.to_string t)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value t));\n(* INSERT *)\n let tree =\n TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y \n in\n Format.printf \"Code for insert: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree);\n let tree' = TermCodeTree.insert tree t in\n Format.printf \"Code for insert: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree');\n let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in\n let tree' = TermCodeTree.insert tree' g_f_f_y_c in\n Format.printf \"Code for insert g_f_f_y_c: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree');\n(* DELETE *)\n let tree'' = TermCodeTree.delete tree' g_f_f_y_c in\n Format.printf \"Code for delete g_f_f_y_c again: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree'');\n Format.printf \" Code for delete g_x_x : \\n % s\\n\\n \" \n ( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ;\n (TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*)\n(* VARIANTS *)\n let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in\n let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in\n Format.printf \"variants for %s: %s, %s: %s\\n\"\n (Term.to_string g_f_f_x_f_x)\n (List.to_string Term.to_string \"\" variants)\n (Term.to_string g_f_f_x_y)\n (List.to_string Term.to_string \"\" variants');\n let tree' = TermCodeTree.insert tree' t' in\n GENERALIZATIONS\n let u = Term.Fun (g, [f_x; y]) in\n let tree' = TermCodeTree.insert tree' u in\n let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string g_f_f_y_c)\n (List.to_string Term.to_string \"\" gens); (* ok *)\n let gens = TermCodeTree.generalization_candidates tree' u in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string u)\n (List.to_string Term.to_string \"\" gens); (* ok *)\n let s = Term.Fun (g, [f_x; x]) in\n let tree' = TermCodeTree.insert tree' s in\n let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string g_f_f_x_f_x)\n (List.to_string Term.to_string \"\" gens);\n(***** ENCOMPASSMENTS *****)\n let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in\n let f (t, p) = (Term.to_string t) ^ \"@\" ^ (Position.to_string p) ^ \"\\n\" in \n Format.printf \"encompassments for %s: %s\\n\"\n (Term.to_string t)\n (List.to_string f \"\" gens);\n;;\n*)\n(* test ()*)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mkbtt/termindexing/codeTree.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"* Term indexing using code trees \n** OPENS *******************************************************************\n** EXCEPTIONS *********************************************************\n** MODULES ************************************************************\n** SUBMODULES *********************************************************\n** TYPES **************************************************************\n convenient for generalization retrievals \n next, after, subterm here \n next = after \n** GLOBALS ************************************************************\n** FUNCTIONS **********************************************************\n output code \n ****************** CONSTRUCTION OF CODE TREES ********************** \n assume code is just code, not tree (otherwise, change case for \n Success in tree \n cases relevant? \n variant \n add entry element into code tree \n Format.printf \"Code is %s\\n\" (code_to_string code);\n Format.printf \"Tree is %s\\n\" (code_to_string tree');\n variant \n removes the value from the index. if not found, Not_in_index is raised \n******** RETRIEVAL OPERATIONS *****************************************\n**** VARIANTS ****\n**** GENERALIZATIONS ****\n add t here, required in gen retrieve \n**** ENCOMPASSMENTS ****\n Make \n INSERT \n DELETE \n VARIANTS \n ok \n ok \n**** ENCOMPASSMENTS ****\n test ()"},"code":{"kind":"string","value":" Copyright 2010 \n * GNU Lesser General Public License \n * \n * This file is part of MKBtt . \n * \n * is free software : you can redistribute it and/or modify it under \n * the terms of the GNU Lesser General Public License as published by the \n * Free Software Foundation , either version 3 of the License , or ( at your \n * option ) any later version . \n * \n * is distributed in the hope that it will be useful , but WITHOUT \n * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or \n * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public \n * License for more details . \n * \n * You should have received a copy of the GNU Lesser General Public \n * License along with MKBtt . If not , see < / > . \n \n * GNU Lesser General Public License\n *\n * This file is part of MKBtt.\n * \n * MKBtt is free software: you can redistribute it and/or modify it under\n * the terms of the GNU Lesser General Public License as published by the\n * Free Software Foundation, either version 3 of the License, or (at your\n * option) any later version.\n * \n * MKBtt is distributed in the hope that it will be useful, but WITHOUT\n * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or\n * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public\n * License for more details.\n * \n * You should have received a copy of the GNU Lesser General Public\n * License along with MKBtt. If not, see .\n *)\n\n* \n @author \n @since 2009/07/21\n@author Sarah Winkler\n@since 2009/07/21 *)\n\n\nopen Util;;\nexception No_back_pointer\n\nexception Malformed_tree of string\n\nexception Not_in_index\n\nexception Empty_branch\n\nmodule Fun = Rewriting.Function;;\nmodule Pos = Rewriting.Position;;\nmodule Var = Rewriting.Variable;;\nmodule T = U.Term;;\nmodule M = U.Monad;;\n\nopen M;;\n\n: TermIndex . T with type entry = Entry.t\n = functor (Entry: TermIndex.ENTRY_TYPE) ->\n struct\n\nmodule EL = TermIndex.EntryList(Entry);;\n\ntype entry = Entry.t\n\n type instruction =\n | Check of Fun.t * instruction * instruction\n | Put of int * instruction * instruction\n | Compare of int * int * instruction * instruction\n | Success of Entry.t list\n | Fail\n ;;\n\n type t = instruction\n\ntype flatterm =\n | End\n;;\n\n\n\n\nlet is_empty t = return (t == Fail)\n\nlet cont n = function\n | Check (_, c, _ )\n | Put (_, c, _ )\n | Compare (_, _, c, _ ) -> c\n | _ -> raise (Malformed_tree \"cont does not exist\")\n;;\n\nlet back n = function\n | Check (_, _, b)\n | Put (_, _, b)\n | Compare (_, _, _, b) -> b\n | _ -> raise (Malformed_tree \"cont does not exist\")\n;;\n\nlet set_back instruction b' =\n match instruction with\n | Check (f, c, b) -> Check (f, c, b')\n | Put (n, c, b) -> Put (n, c, b')\n | Compare (m, k, c, b) -> Compare (m, k, c, b')\n | _ -> raise (Malformed_tree \"back does not exist\")\n;;\n\nlet rec code_to_string c =\n match c with\n | Check(f, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n \"Check(\" ^ (Fun.to_string f) ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Put(k, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n \"Put(\" ^ (string_of_int k) ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Compare(m, k, c, b) ->\n let cs, bs = code_to_string c, code_to_string b in\n let sk, sm = string_of_int k, string_of_int m in\n \"Compare(\" ^ sm ^ \", \" ^ sk ^ \", \" ^ cs ^ \", \" ^ bs ^ \")\"\n | Success values -> \"Success\" ^ (List.join Entry.to_string \" \" values) \n | Fail -> \"Fail\"\n;;\n\nlet lookup table x i code =\n try\n let j = List.assoc x table in\n (Compare(j, i, Fail, Fail)) :: code, table\n with Not_found ->\n code, (x, i) :: table\n;;\n\nlet rec code_list tcodes ccodes i table = function\n | T.Var x -> \n let ccodes', table' = lookup table x i ccodes in\n (Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1\n | T.Fun(f, ts) ->\n let tcodes' = (Check(f, Fail, Fail)) :: tcodes in\n List.fold_left app_tcode (tcodes', ccodes, table, i) ts\nand app_tcode (tcodes, ccodes, table, i) t =\n code_list tcodes ccodes i table t\n;;\n\nlet rec combine_code instruction = function\n | [] -> instruction\n | Check(f, _, _) :: l ->\n combine_code (Check(f, instruction, Fail)) l\n | Put(k, _, _) :: l ->\n combine_code (Put(k, instruction, Fail)) l\n | Compare(k, m,_, _) :: l ->\n combine_code (Compare(k, m, instruction, Fail)) l\n | _ -> raise (Malformed_tree \"Compare/Fail/Success not expected\")\n;;\n\nlet code_for_term t =\n let success = Success [] in \n let tcode, ccode, _, _ = code_list [] [] 0 [] t in\n combine_code (combine_code success ccode) tcode\n;;\n\nlet code_for_value (t, v) = \n let success = Success [v] in\n let tcode, ccode, _, _ = code_list [] [] 0 [] t in\n combine_code (combine_code success ccode) tcode\n;;\n\nlet make () = Fail\n\nlet rec insert' code tree =\n match code, tree with\n | _, Fail -> code\n | Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 ->\n Check(g, insert' c c', b')\n | Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') ->\n Compare(m', k', insert' c c', b')\n | Put(k, c, _), Put(k', c', b') when k = k' ->\n Put(k, insert' c c', b')\n | _, Check(_, _, b)\n | _, Compare (_, _, _, b) \n | _, Put (_, _, b) ->\n set_back tree (insert' code b)\n | Compare(_, _, _, b), Success vs\n | Put(_, _, b), Success vs ->\n set_back code (Success vs)\n | Success v, Success values ->\n | Fail, Success _ -> raise (Malformed_tree \"Fail, Success not expected\")\n;;\n\nlet insert tree (term, value) =\nT.to_stringm term > > = fun s - > \n Format.printf \" Insert into index term % s\\n% ! \" s ; \n Format.printf \" Tree before is % s\\n \" ( code_to_string tree ) ;\n Format.printf \"Insert into index term %s\\n%!\" s;\n Format.printf \"Tree before is %s\\n\" (code_to_string tree);*)\n let code = code_for_value (term, value) in\n let tree' = insert' code tree in\n return tree'\n;;\n\nlet rec remove_code code tree v =\n match code, tree with\n | Fail, _ -> raise (Malformed_tree \"Fail in code not expected\")\n | Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 ->\n (try Check(g, remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') ->\n (try Compare(m', k', remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | Put(k, c, b), Put(k', c', b') when k = k' ->\n (try Put(k', remove_code c c' v, b')\n with Empty_branch -> if b' != Fail then b' else raise Empty_branch)\n | _, Check(_, _, b)\n | _, Compare(_, _, _, b)\n | _, Put(_, _, b) ->\n (try set_back tree (remove_code code b v)\n with Empty_branch -> set_back tree Fail)\n | Success v, Success values -> \n if (List.length values) == 1 then raise Empty_branch\n | _, Success _ -> raise (Malformed_tree \"Success in tree not expected\")\n | _ -> raise Not_in_index\n;;\n\nlet delete tree value =\nT.to_stringm ( fst value ) > > = fun s - > \n Format.printf \" Remove term % s\\n% ! \" s ;\n Format.printf \"Remove term %s\\n%!\" s;*)\n let code = code_for_value value in\n let tree' = try remove_code code tree value with Empty_branch -> Fail in\n return tree'\n;;\n\n\n\nlet rec retrieve_variants tree code =\n match tree, code with\n | Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 ->\n retrieve_variants c c'\n | Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') ->\n retrieve_variants c c'\n | Put(k, c, b), Put(k', c', _) when k = k' ->\n retrieve_variants c c'\n | Check(_, _, b), _\n | Compare(_, _, _, b), _\n | Put(_, _, b), _ ->\n retrieve_variants b code\n | Success variants, Success _ -> variants\n | Fail, _ \n | Success _, _ -> []\n;;\n\nlet variant_candidates tree term =\n let code = code_for_term term in\n let vars = retrieve_variants tree code in\n U.Term.to_stringm term > > = fun s - > \n Format.printf \" CT : vars 4 % s : % i:\\n%s\\n \" s ( vars ) \n ( List.foldl ( fun s x - > ( Entry.to_string x)^s ) \" \" vars ) ;\n Format.printf \"CT: vars 4 %s: %i:\\n%s\\n\" s (List.length vars)\n (List.foldl (fun s x -> (Entry.to_string x)^s) \"\" vars);*)\n return vars\n;;\n\n\nlet rec flatten' after t = \n match t with\n | T.Var x -> Var (x, after)\n | T.Fun(f, ts) ->\n let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in\n;;\n\nlet flatten = flatten' End\n\nlet subst table i =\n try\n List.assoc i table\n with\n Not_found -> raise (Malformed_tree \"compare without put\")\n;;\n\nlet rec retrieve_generalizations tree t_flat sub =\n match tree, t_flat with\n | Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 ->\n let gens = retrieve_generalizations c next sub in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Compare(m, k, c, b), End ->\n let gens = retrieve_generalizations b End sub in\n if (compare (subst sub m) (subst sub k)) == 0 then\n EL.union (retrieve_generalizations c End sub) gens\n else\n gens\n | Put(k, c, b), Var (x, after) ->\n let subterm = T.Var x in\n let gens = retrieve_generalizations c after ((k, subterm) :: sub) in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Put(k, c, b), Fun (_, _, after, subterm) ->\n let gens = retrieve_generalizations c after ((k, subterm) :: sub) in\n EL.union (retrieve_generalizations b t_flat sub) gens\n | Check(_, _, b), _ ->\n retrieve_generalizations b t_flat sub\n | Success entries, End -> entries\n | Fail, _\n | Compare _, _\n | Success _, _ -> []\n | Put _, End -> raise (Malformed_tree \"not malformed?\")\n;;\n\n find generalizations for a given term in dtree\nlet generalization_candidates tree term =\n let t_flat = flatten term in\n let gens = retrieve_generalizations tree t_flat [] in\n return gens\n;;\n\n given a term , non - var generalization of subterms are returned , \n paired with the subterm 's position . Not strict ! Also not possible \n as indexing destroys nonlinearity .\n paired with the subterm's position. Not strict! Also not possible\n as indexing destroys nonlinearity. *)\n\n let encompassment_candidates tree term = \n let pos_st = Termx.nonvar_pos_proper_subterms term in \n let ecs = \n List.fold_left \n ( fun r ( t , p ) - > \n let gs = retrieve_generalizations tree ( flatten t ) [ ] in \n ( List.map ( fun n - > ( n , p ) ) gs ) @ r ) \n [ ] ( ( term , Pos.root ) : : pos_st ) \n in \n return ecs \n ; ;\nlet encompassment_candidates tree term =\n let pos_st = Termx.nonvar_pos_proper_subterms term in\n let ecs =\n List.fold_left\n (fun r (t, p) ->\n let gs = retrieve_generalizations tree (flatten t) [] in\n (List.map (fun n -> (n, p)) gs) @ r)\n [] ((term,Pos.root) :: pos_st)\n in\n return ecs\n;;*)\n\n\n given a term , non - var generalization of subterms are returned , \n paired with the subterm 's position . Not strict !\n paired with the subterm's position. Not strict! *)\nlet encompassment_candidates_below_root tree term =\n let pos_st = Termx.nonvar_pos_proper_subterms term in\n let ecs =\n List.fold_left\n (fun r (t, p) ->\n let gs = retrieve_generalizations tree (flatten t) [] in\n (List.map (fun n -> (n, p)) gs) @ r)\n [] pos_st\n in\n return ecs\n;;\n\nlet encompassment_candidates tree term =\n let at_root = retrieve_generalizations tree (flatten term) [] in\n encompassment_candidates_below_root tree term >>= fun below ->\n let root = flip Pair.make Pos.root in\n return (List.rev_append (List.map root at_root) below)\n;;\n\nlet size t = is_empty t >>= fun b -> return (if b then 0 else 1)\n\nlet overlap1_candidates t = failwith \"CodeTree: overlaps not implemented\"\n\nlet overlap1_candidates_below_root t = \n failwith \"CodeTree: overlaps not implemented\"\n;;\n\nlet overlap2_candidates t = failwith \"CodeTree: overlaps not implemented\"\n\nlet unification_candidates t = \n failwith \"CodeTree: unification not implemented\"\n;;\n\n\n\n\nmodule TermCodeTree = Make(TermIndex.TermEntry)\n\n let test ( ) = \n Format.printf \" testing module CodeTree\\n \" ; \n let c = Fun.of_string \" c \" 0 in \n let f = Fun.of_string \" f \" 1 in \n let g = Fun.of_string \" g \" 2 in \n let x = Term . ( Var.of_string \" x \" ) in \n let y = Term . ( Var.of_string \" y \" ) in \n let f_x = Term . Fun ( f , [ x ] ) in \n let f_f_x = Term . Fun ( f , [ f_x ] ) in \n let c _ = Term . Fun ( c , [ ] ) in \n let g_x_x = Term . Fun(g , [ x ; x ] ) in \n Format.printf \" Code for % s : \\n % s\\n \" \n ( Term.to_string f_f_x ) \n ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ; \n Format.printf \" Code for % s : \\n % s\\n \" \n ( Term.to_string g_x_x ) \n ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ; \n let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string g_f_f_x_c ) \n ( TermCodeTree.code_to_string ( ) ) ; \n let = Term . Fun ( g , [ f_f_x ; f_x ] ) in \n let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string g_f_f_x_f_x ) \n ( TermCodeTree.code_to_string ( ) ) ; \n let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in \n let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in \n Format.printf \" Code for % s : \\n % s\\n\\n \" \n ( Term.to_string t ) \n ( TermCodeTree.code_to_string ( t ) ) ; \n ( * INSERT\nlet test () =\n Format.printf \"testing module CodeTree\\n\";\n let c = Fun.of_string \"c\" 0 in\n let f = Fun.of_string \"f\" 1 in\n let g = Fun.of_string \"g\" 2 in\n let x = Term.Var (Var.of_string \"x\") in\n let y = Term.Var (Var.of_string \"y\") in\n let f_x = Term.Fun (f, [x]) in\n let f_f_x = Term.Fun (f, [f_x]) in\n let c_ = Term.Fun (c, []) in\n let g_x_x = Term.Fun(g, [x; x]) in\n Format.printf \"Code for %s: \\n %s\\n\"\n (Term.to_string f_f_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x));\n Format.printf \"Code for %s: \\n %s\\n\"\n (Term.to_string g_x_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x));\n let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\" \n (Term.to_string g_f_f_x_c) \n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c));\n let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in\n let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\"\n (Term.to_string g_f_f_x_f_x)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x));\n let t = Term.Fun (g, [g_f_f_x_f_x; y]) in\n let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in\n Format.printf \"Code for %s: \\n %s\\n\\n\"\n (Term.to_string t)\n (TermCodeTree.code_to_string (TermCodeTree.code_for_value t));\n let tree =\n TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y \n in\n Format.printf \"Code for insert: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree);\n let tree' = TermCodeTree.insert tree t in\n Format.printf \"Code for insert: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree');\n let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in\n let tree' = TermCodeTree.insert tree' g_f_f_y_c in\n Format.printf \"Code for insert g_f_f_y_c: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree');\n let tree'' = TermCodeTree.delete tree' g_f_f_y_c in\n Format.printf \"Code for delete g_f_f_y_c again: \\n %s\\n\\n\"\n (TermCodeTree.code_to_string tree'');\n Format.printf \" Code for delete g_x_x : \\n % s\\n\\n \" \n ( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ;\n (TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*)\n let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in\n let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in\n Format.printf \"variants for %s: %s, %s: %s\\n\"\n (Term.to_string g_f_f_x_f_x)\n (List.to_string Term.to_string \"\" variants)\n (Term.to_string g_f_f_x_y)\n (List.to_string Term.to_string \"\" variants');\n let tree' = TermCodeTree.insert tree' t' in\n GENERALIZATIONS\n let u = Term.Fun (g, [f_x; y]) in\n let tree' = TermCodeTree.insert tree' u in\n let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string g_f_f_y_c)\n let gens = TermCodeTree.generalization_candidates tree' u in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string u)\n let s = Term.Fun (g, [f_x; x]) in\n let tree' = TermCodeTree.insert tree' s in\n let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in\n Format.printf \"generalizations for %s: %s\\n\"\n (Term.to_string g_f_f_x_f_x)\n (List.to_string Term.to_string \"\" gens);\n let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in\n let f (t, p) = (Term.to_string t) ^ \"@\" ^ (Position.to_string p) ^ \"\\n\" in \n Format.printf \"encompassments for %s: %s\\n\"\n (Term.to_string t)\n (List.to_string f \"\" gens);\n;;\n*)\n"}}},{"rowIdx":610241,"cells":{"_id":{"kind":"string","value":"2d15462f3dfb7abb6a87dfbb0692273cd9c76cdfcae05b89b48377f7c0244d7c"},"repository":{"kind":"string","value":"haskellari/indexed-traversable"},"name":{"kind":"string","value":"GhcList.hs"},"content":{"kind":"string","value":"{-# LANGUAGE CPP #-}\n#if MIN_VERSION_base(4,17,0)\n{-# LANGUAGE Safe #-}\n#elif __GLASGOW_HASKELL__ >= 702\n# LANGUAGE Trustworthy #\n#endif\nmodule GhcList (\n build,\n) where\n\n#if MIN_VERSION_base(4,17,0)\nimport GHC.List (build)\n#else\nimport GHC.Exts (build)\n#endif\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/haskellari/indexed-traversable/8403a52163e5b8f3ec32a2846b53ccc2e8088a6f/indexed-traversable/src/GhcList.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE CPP #\n# LANGUAGE Safe #"},"code":{"kind":"string","value":"#if MIN_VERSION_base(4,17,0)\n#elif __GLASGOW_HASKELL__ >= 702\n# LANGUAGE Trustworthy #\n#endif\nmodule GhcList (\n build,\n) where\n\n#if MIN_VERSION_base(4,17,0)\nimport GHC.List (build)\n#else\nimport GHC.Exts (build)\n#endif\n"}}},{"rowIdx":610242,"cells":{"_id":{"kind":"string","value":"5f4bf801f0e07c26630f9b98714dcb90238998b8f63796e2642b954c933544e2"},"repository":{"kind":"string","value":"alanz/ghc-exactprint"},"name":{"kind":"string","value":"SH_Overlap9.hs"},"content":{"kind":"string","value":"# OPTIONS_GHC -fwarn - safe #\n# LANGUAGE FlexibleInstances #\n\n-- | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred\n-- unsafe due to overlapping instances at call site `f`.\nmodule SH_Overlap9 where\n\nimport SH_Overlap9_A\n\ninstance\n C [a] where\n f _ = \"[a]\"\n\ntest :: String\ntest = f ([1,2,3,4] :: [Int])\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/SH_Overlap9.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred\n unsafe due to overlapping instances at call site `f`."},"code":{"kind":"string","value":"# OPTIONS_GHC -fwarn - safe #\n# LANGUAGE FlexibleInstances #\n\nmodule SH_Overlap9 where\n\nimport SH_Overlap9_A\n\ninstance\n C [a] where\n f _ = \"[a]\"\n\ntest :: String\ntest = f ([1,2,3,4] :: [Int])\n\n"}}},{"rowIdx":610243,"cells":{"_id":{"kind":"string","value":"85cccff35599098082d332fcc983d93f978779fc63b43311cca246dda1f99ee0"},"repository":{"kind":"string","value":"janestreet/async_rpc_kernel"},"name":{"kind":"string","value":"rpc_metadata.mli"},"content":{"kind":"string","value":"* Metadata is arbitrary information provided by a caller along with the query . It is \n opaque to the Async RPC protocol , and may not be present on all queries . Metadata \n should generally be small , middleware - provided data that does not affect the callee 's \n behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are \n too large . See [ Connection.create ] for more info .\n opaque to the Async RPC protocol, and may not be present on all queries. Metadata\n should generally be small, middleware-provided data that does not affect the callee's\n behavior (e.g. tracing ids). It may be subject to truncation if values provided are\n too large. See [Connection.create] for more info. *)\n\nopen! Core\n\ntype t = string [@@deriving sexp_of]\n\n* Retrieves the metadata in the context of the current RPC call , if it is available .\nval get : unit -> t option\n\nmodule Private : sig\n val with_metadata : t option -> f:(unit -> 'a) -> 'a\nend\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/janestreet/async_rpc_kernel/541fb417b39fad5c930ac73b729a7aaf59bd1001/src/rpc_metadata.mli"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"* Metadata is arbitrary information provided by a caller along with the query . It is \n opaque to the Async RPC protocol , and may not be present on all queries . Metadata \n should generally be small , middleware - provided data that does not affect the callee 's \n behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are \n too large . See [ Connection.create ] for more info .\n opaque to the Async RPC protocol, and may not be present on all queries. Metadata\n should generally be small, middleware-provided data that does not affect the callee's\n behavior (e.g. tracing ids). It may be subject to truncation if values provided are\n too large. See [Connection.create] for more info. *)\n\nopen! Core\n\ntype t = string [@@deriving sexp_of]\n\n* Retrieves the metadata in the context of the current RPC call , if it is available .\nval get : unit -> t option\n\nmodule Private : sig\n val with_metadata : t option -> f:(unit -> 'a) -> 'a\nend\n"}}},{"rowIdx":610244,"cells":{"_id":{"kind":"string","value":"6771c3d64b28efe1bd2eead1a00e0d49576f11f430ff39602d538a7c8f4d162a"},"repository":{"kind":"string","value":"pqwy/notty"},"name":{"kind":"string","value":"notty_top_init.ml"},"content":{"kind":"string","value":" Copyright ( c ) 2017 . All rights reserved . \n See LICENSE.md .\n See LICENSE.md. *)\n\nopen Notty;;\n\n#install_printer Notty.Render.pp_image;;\n#install_printer Notty.Render.pp_attr;;\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/pqwy/notty/389366c023396017aa21efcdbb07ade5ba0974c5/src/notty_top_init.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":" Copyright ( c ) 2017 . All rights reserved . \n See LICENSE.md .\n See LICENSE.md. *)\n\nopen Notty;;\n\n#install_printer Notty.Render.pp_image;;\n#install_printer Notty.Render.pp_attr;;\n"}}},{"rowIdx":610245,"cells":{"_id":{"kind":"string","value":"c6b904a109064fdbcdd47b942a4448d1892030c35f153fa778071da196ef2869"},"repository":{"kind":"string","value":"hasktorch/ffi-experimental"},"name":{"kind":"string","value":"Scalar.hs"},"content":{"kind":"string","value":"# LANGUAGE MultiParamTypeClasses #\n# LANGUAGE FlexibleContexts #\n# LANGUAGE FlexibleInstances #\n\nmodule Torch.Scalar where\n\nimport Foreign.ForeignPtr\n\nimport qualified ATen.Const as ATen\nimport qualified ATen.Managed.Type.Scalar as ATen\nimport qualified ATen.Type as ATen\nimport ATen.Managed.Cast\nimport ATen.Class (Castable(..))\nimport ATen.Cast\n\ninstance Castable Float (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_d (realToFrac x) >>= f\n uncast x f = undefined\n\ninstance Castable Double (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_d (realToFrac x) >>= f\n uncast x f = undefined\n\ninstance Castable Int (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_i (fromIntegral x) >>= f\n uncast x f = undefined\n\nclass (Castable a (ForeignPtr ATen.Scalar)) => Scalar a\ninstance Scalar Float\ninstance Scalar Double\ninstance Scalar Int\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/hasktorch/ffi-experimental/54192297742221c4d50398586ba8d187451f9ee0/hasktorch/src/Torch/Scalar.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"# LANGUAGE MultiParamTypeClasses #\n# LANGUAGE FlexibleContexts #\n# LANGUAGE FlexibleInstances #\n\nmodule Torch.Scalar where\n\nimport Foreign.ForeignPtr\n\nimport qualified ATen.Const as ATen\nimport qualified ATen.Managed.Type.Scalar as ATen\nimport qualified ATen.Type as ATen\nimport ATen.Managed.Cast\nimport ATen.Class (Castable(..))\nimport ATen.Cast\n\ninstance Castable Float (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_d (realToFrac x) >>= f\n uncast x f = undefined\n\ninstance Castable Double (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_d (realToFrac x) >>= f\n uncast x f = undefined\n\ninstance Castable Int (ForeignPtr ATen.Scalar) where\n cast x f = ATen.newScalar_i (fromIntegral x) >>= f\n uncast x f = undefined\n\nclass (Castable a (ForeignPtr ATen.Scalar)) => Scalar a\ninstance Scalar Float\ninstance Scalar Double\ninstance Scalar Int\n\n"}}},{"rowIdx":610246,"cells":{"_id":{"kind":"string","value":"e8e44553c6bc715e30d1f0cfe4aee8dba9a3f8964713f76f40af4bf96db3856f"},"repository":{"kind":"string","value":"threatgrid/ctia"},"name":{"kind":"string","value":"crud.clj"},"content":{"kind":"string","value":"(ns ctia.stores.es.crud\n (:require\n [clojure.set :as set]\n [clojure.string :as string]\n [clojure.tools.logging :as log]\n [ctia.domain.access-control :as ac\n :refer [allow-read? allow-write? restricted-read?]]\n [ctia.lib.pagination :refer [list-response-schema]]\n [ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]]\n [ctia.schemas.search-agg\n :refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]]\n [ctia.stores.es.sort :as es.sort]\n [ctia.stores.es.query :as es.query]\n [ctia.stores.es.schemas :refer [ESConnState]]\n [ductile.document :as ductile.doc]\n [ductile.query :as q]\n [ring.swagger.coerce :as sc]\n [schema-tools.core :as st]\n [schema.coerce :as c]\n [schema.core :as s]))\n\n(defn make-es-read-params\n \"Prepare ES Params for read operations, setting the _source field\n and including ACL mandatory ones.\"\n [{:keys [fields]\n :as es-params}]\n (cond-> es-params\n (coll? fields)\n (-> (assoc :_source (concat fields ac/acl-fields))\n (dissoc :fields))))\n\n(defn coerce-to-fn\n [Model]\n (c/coercer! Model sc/json-schema-coercion-matcher))\n\n(defn ensure-document-id\n \"Returns a document ID. if id is a object ID, it extract the\n document ID, if it's a document ID already, it will just return\n that.\"\n [id]\n (let [[_orig docid] (re-matches #\".*?([^/]+)\\z\" id)]\n docid))\n\n(defn ensure-document-id-in-map\n \"Ensure a document ID in a given filter map\"\n [{:keys [id] :as m}]\n (cond-> m\n (string? id) (update :id list)\n id (update :id #(map ensure-document-id %))))\n\n(defn remove-es-actions\n \"Removes the ES action level\n\n [{:index {:_id \\\"1\\\"}}\n {:index {:_id \\\"2\\\"}}]\n\n ->\n\n [{:_id \\\"1\\\"}\n {:_id \\\"2\\\"}]\n \"\n [items]\n (map (comp first vals) items))\n\n(defn build-create-result\n [item coerce-fn]\n (-> item\n (dissoc :_id :_index :_type)\n coerce-fn))\n\n(defn partial-results\n \"Build partial results when an error occurs for one or more items\n in the bulk operation.\n\n Ex:\n\n [{model1}\n {:error \\\"Error message item2\\\"}\n {model3}]\"\n [exception-data models coerce-fn]\n (let [{{:keys [items]}\n :es-http-res-body} exception-data]\n {:data (map (fn [{:keys [error _id]} model]\n (if error\n {:error error\n :id _id}\n (build-create-result model coerce-fn)))\n (remove-es-actions items) models)}))\n\n(s/defn get-docs-with-indices\n \"Retrieves a documents from a search \\\"ids\\\" query. It enables to retrieves\n documents from an alias that points to multiple indices.\nIt returns the documents with full hits meta data including the real index in which is stored the document.\"\n [{:keys [conn index] :as _conn-state} :- ESConnState\n ids :- [s/Str]\n es-params]\n (let [limit (count ids)\n ids-query (q/ids (map ensure-document-id ids))\n res (ductile.doc/query conn\n index\n ids-query\n (assoc (make-es-read-params es-params)\n :limit limit\n :full-hits? true))]\n (:data res)))\n\n(s/defn get-doc-with-index\n \"Retrieves a document from a search \\\"ids\\\" query. It is used to perform a get query on an alias that points to multiple indices.\n It returns the document with full hits meta data including the real index in which is stored the document.\"\n [conn-state :- ESConnState\n _id :- s/Str\n es-params]\n (first (get-docs-with-indices conn-state [_id] es-params)))\n\n(defn ^:private prepare-opts\n [{:keys [props]}\n {:keys [refresh]}]\n {:refresh (or refresh\n (:refresh props)\n \"false\")})\n\n(s/defn bulk-schema\n [Model :- (s/pred map?)]\n (st/optional-keys\n {:create [Model]\n :index [Model]\n :update [(st/optional-keys Model)]\n :delete [s/Str]}))\n\n(s/defn ^:private prepare-bulk-doc\n [{:keys [props]} :- ESConnState\n mapping :- s/Keyword\n doc :- (s/pred map?)]\n (assoc doc\n :_id (:id doc)\n :_index (:write-index props)\n :_type (name mapping)))\n\n(defn handle-create\n \"Generate an ES create handler using some mapping and schema\"\n [mapping Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- [Model]\n [{:keys [conn] :as conn-state} :- ESConnState\n docs :- [Model]\n _ident\n es-params]\n (let [prepare-doc (partial prepare-bulk-doc conn-state mapping)\n prepared (mapv prepare-doc docs)]\n (try\n (ductile.doc/bulk-index-docs conn\n prepared\n (prepare-opts conn-state es-params))\n docs\n (catch Exception e\n (throw\n (if-let [ex-data (ex-data e)]\n ;; Add partial results to the exception data map\n (ex-info (.getMessage e)\n (partial-results ex-data docs coerce!))\n e))))))))\n\n(defn handle-update\n \"Generate an ES update handler using some mapping and schema\"\n [mapping Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- (s/maybe Model)\n [{:keys [conn] :as conn-state} :- ESConnState\n id :- s/Str\n realized :- Model\n ident\n es-params]\n (when-let [[{index :_index current-doc :_source}]\n (get-docs-with-indices conn-state [id] {})]\n (if (allow-write? current-doc ident)\n (let [update-doc (assoc realized\n :id (ensure-document-id id))]\n (ductile.doc/index-doc conn\n index\n (name mapping)\n update-doc\n (prepare-opts conn-state es-params))\n (coerce! update-doc))\n (throw (ex-info \"You are not allowed to update this document\"\n {:type :access-control-error})))))))\n\n(defn handle-read\n \"Generate an ES read handler using some mapping and schema\"\n [Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- (s/maybe Model)\n [{{{:keys [get-in-config]} :ConfigService}\n :services\n :as conn-state}\n :- ESConnState\n id :- s/Str\n ident\n es-params]\n (when-let [doc (-> (get-doc-with-index conn-state\n id\n (make-es-read-params es-params))\n :_source\n coerce!)]\n (if (allow-read? doc ident get-in-config)\n doc\n (throw (ex-info \"You are not allowed to read this document\"\n {:type :access-control-error})))))))\n\n(defn handle-read-many\n \"Generate an ES read-many handler using some mapping and schema\"\n [Model]\n (let [coerce! (coerce-to-fn Model)]\n (s/fn :- [(s/maybe Model)]\n [{{{:keys [get-in-config]} :ConfigService}\n :services\n :as conn-state}\n :- ESConnState\n ids :- [s/Str]\n ident\n {:keys [suppress-access-control-error?]\n :or {suppress-access-control-error? false}\n :as es-params}]\n (sequence\n (comp (map :_source)\n (map coerce!)\n (map (fn [record]\n (if (allow-read? record ident get-in-config)\n record\n (let [ex (ex-info \"You are not allowed to read this document\"\n {:type :access-control-error})]\n (if suppress-access-control-error?\n (log/error ex)\n (throw ex)))))))\n (get-docs-with-indices conn-state ids (make-es-read-params es-params))))))\n\n(defn access-control-filter-list\n \"Given an ident, keep only documents it is allowed to read\"\n [docs ident get-in-config]\n (filter #(allow-read? % ident get-in-config) docs))\n\n(s/defschema BulkResult\n (st/optional-keys\n {:deleted [s/Str]\n :updated [s/Str]\n :errors (st/optional-keys\n {:forbidden [s/Str]\n :not-found [s/Str]\n :internal-error [s/Str]})}))\n\n(s/defschema ESActionResult\n (st/open-schema\n {:_id s/Str\n :_index s/Str\n :status s/Int\n :result s/Str}))\n\n TODO move it to ductile\n(s/defschema ESBulkRes\n {:took s/Int\n :errors s/Bool\n :items [{ductile.doc/BulkOps ESActionResult}]})\n\n(s/defn ^:private format-bulk-res\n \"transform an elasticsearch bulk result into a CTIA Bulk Result.\n ex: -bulk.html#docs-bulk-api-example\"\n [bulk-res :- ESBulkRes]\n (let [{:keys [deleted updated not_found]}\n (->> (:items bulk-res)\n (map (comp first vals))\n (group-by :result)\n (into {}\n (map (fn [[result items]]\n {(keyword result) (map :_id items)}))))]\n (cond-> {}\n deleted (assoc :deleted deleted)\n updated (assoc :updated updated)\n not_found (assoc-in [:errors :not-found] not_found))))\n\n(s/defn check-and-prepare-bulk\n :- (st/assoc BulkResult\n (s/optional-key :prepared)\n [(s/pred map?)])\n \"prepare a bulk query:\n - retrieve actual indices, deletion cannot be performed on the alias.\n - filter out forbidden entitites\n - forbidden and not_found errors are prepared for the response.\"\n [conn-state :- ESConnState\n ids :- [s/Str]\n ident]\n (let [get-in-config (get-in conn-state [:services :ConfigService])\n doc-ids (map ensure-document-id ids)\n docs-with-indices (get-docs-with-indices conn-state doc-ids {})\n {authorized true forbidden-write false}\n (group-by #(allow-write? (:_source %) ident)\n docs-with-indices)\n {forbidden true not-visible false}\n (group-by #(allow-read? (:_source %) ident get-in-config)\n forbidden-write)\n missing (set/difference (set doc-ids)\n (set (map :_id docs-with-indices)))\n not-found (into (map :_id not-visible) missing)\n prepared-docs (map #(select-keys % [:_index :_type :_id])\n authorized)]\n (cond-> {}\n forbidden (assoc-in [:errors :forbidden] (map :_id forbidden))\n (seq not-found) (assoc-in [:errors :not-found] not-found)\n authorized (assoc :prepared prepared-docs))))\n\n(s/defn bulk-delete :- BulkResult\n [{:keys [conn] :as conn-state}\n ids :- [s/Str]\n ident\n es-params]\n (let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident)\n bulk-res (when prepared\n (try\n (format-bulk-res\n (ductile.doc/bulk-delete-docs conn\n prepared\n (prepare-opts conn-state es-params)))\n (catch Exception e\n (log/error e\n (str \"bulk delete failed: \" (.getMessage e))\n (pr-str prepared))\n {:errors {:internal-error (map :_id prepared)}})))]\n (cond-> bulk-res\n errors (update :errors\n #(merge-with concat errors %)))))\n\n(s/defn bulk-update\n \"Generate an ES bulk update handler using some mapping and schema\"\n [Model]\n (s/fn :- BulkResult\n [{:keys [conn] :as conn-state}\n docs :- [Model]\n ident\n es-params]\n (let [by-id (group-by :id docs)\n ids (seq (keys by-id))\n {:keys [prepared errors]} (check-and-prepare-bulk conn-state\n ids\n ident)\n prepared-docs (map (fn [meta]\n (-> (:_id meta)\n by-id\n first\n (into meta)))\n prepared)\n bulk-res (when prepared\n (try\n (format-bulk-res\n (ductile.doc/bulk-index-docs conn\n prepared-docs\n (prepare-opts conn-state es-params)))\n (catch Exception e\n (log/error (str \"bulk update failed: \" (.getMessage e))\n (pr-str prepared))\n {:errors {:internal-error (map :_id prepared)}})))]\n (cond-> bulk-res\n errors (update :errors\n #(merge-with concat errors %))))))\n\n(defn handle-delete\n \"Generate an ES delete handler using some mapping\"\n [mapping]\n (s/fn :- s/Bool\n [{:keys [conn] :as conn-state} :- ESConnState\n id :- s/Str\n ident\n es-params]\n (if-let [{index :_index doc :_source}\n (get-doc-with-index conn-state id {})]\n (if (allow-write? doc ident)\n (ductile.doc/delete-doc conn\n index\n (name mapping)\n (ensure-document-id id)\n (prepare-opts conn-state es-params))\n (throw (ex-info \"You are not allowed to delete this document\"\n {:type :access-control-error})))\n false)))\n\n(s/defschema FilterSchema\n (st/optional-keys\n {:all-of {s/Any s/Any}\n :one-of {s/Any s/Any}\n :query s/Str}))\n\n(def enumerable-fields-mapping\n \"Mapping table for all fields which needs to be renamed\n for the sorting or aggregation. Instead of using fielddata we can have\n a text field for full text searches, and an unanalysed keyword\n field with doc_values enabled for sorting or aggregation\"\n {\"title\" \"title.whole\"\n \"reason\" \"reason.whole\"})\n\n(s/defn parse-sort-by :- [SortExtension]\n \"Parses the sort_by parameter\n Ex:\n \\\"title:ASC,revision:DESC\\\"\n ->\n [{:op :field :field-name \\\"title\\\" :sort_order \\\"ASC\\\"}\n {:op :field :field-name \\\"revision\\\" :sort_order \\\"DESC\\\"}]\"\n [sort_by]\n (if ((some-fn string? simple-ident?) sort_by)\n (map\n (fn [field]\n (let [[field-name field-order] (string/split field #\":\")]\n (cond-> {:op :field\n :field-name (keyword field-name)}\n field-order (assoc :sort_order field-order))))\n (string/split (name sort_by) #\",\"))\n sort_by))\n\n(defn with-default-sort-field\n [es-params {:keys [default-sort]}]\n (assert (not (:sort_by es-params)))\n (update es-params :sort #(or %\n (some->> default-sort\n parse-sort-by\n (mapv (fn [m] (es.sort/parse-sort-params-op m :asc))))\n [{\"_doc\" :asc} {\"id\" :asc}])))\n\n(s/defn rename-sort-fields\n \"Renames sort fields based on the content of the `enumerable-fields-mapping` table\n and remaps to script extensions.\"\n [{:keys [sort_by sort_order] :as es-params}\n sort-extension-definitions :- (s/maybe SortExtensionDefinitions)]\n (cond-> (dissoc es-params :sort_by :sort_order)\n (and sort_by (not (:sort es-params)))\n (assoc :sort\n (->> sort_by\n parse-sort-by\n (mapv (fn [field]\n {:pre [(= :field (:op field))]}\n (let [{:keys [field-name] :as field}\n (update field :field-name #(or (keyword (enumerable-fields-mapping (name %)))\n %))]\n (assert (simple-keyword? field-name))\n (-> (or (some-> (get sort-extension-definitions field-name)\n (into (select-keys field [:sort_order]))\n (update :field-name #(or % (:field-name field))))\n field)\n (es.sort/parse-sort-params-op (or sort_order :asc))))))))))\n\n(s/defschema MakeQueryParamsArgs\n {:params s/Any\n :props s/Any\n (s/optional-key :sort-extension-definitions) SortExtensionDefinitions})\n\n(s/defn make-query-params :- {s/Keyword s/Any}\n [{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs]\n (cond-> (-> params\n (rename-sort-fields sort-extension-definitions)\n (with-default-sort-field props)\n make-es-read-params)\n (<= 7 (:version props)) (assoc :track_total_hits true)))\n\n(defn handle-find\n \"Generate an ES find/list handler using some mapping and schema\"\n [Model]\n (let [response-schema (list-response-schema Model)\n coerce! (coerce-to-fn response-schema)]\n (s/fn :- response-schema\n [{{{:keys [get-in-config]} :ConfigService} :services\n :keys [conn index props]} :- ESConnState\n {:keys [all-of one-of query]\n :or {all-of {} one-of {}}} :- FilterSchema\n ident\n es-params]\n (let [filter-val (cond-> (q/prepare-terms all-of)\n (restricted-read? ident)\n (conj (es.query/find-restriction-query-part ident get-in-config)))\n query_string {:query_string {:query query}}\n date-range-query (es.query/make-date-range-query es-params)\n bool-params (cond-> {:filter filter-val}\n (seq one-of) (into\n {:should (q/prepare-terms one-of)\n :minimum_should_match 1})\n query (update :filter conj query_string)\n (seq date-range-query) (update :filter conj {:range date-range-query}))\n query-params (make-query-params {:params es-params :props props})]\n (cond-> (coerce! (ductile.doc/query conn\n index\n (q/bool bool-params)\n query-params))\n (restricted-read? ident) (update :data\n access-control-filter-list\n ident\n get-in-config))))))\n\n(s/defn make-search-query :- {s/Keyword s/Any}\n \"Translate SearchQuery map into ES Query DSL map\"\n [es-conn-state :- ESConnState\n search-query :- SearchQuery\n ident]\n (let [{:keys [services]} es-conn-state\n {{:keys [get-in-config]} :ConfigService} services\n {:keys [filter-map range full-text]} search-query\n range-query (when range\n {:range range})\n filter-terms (-> (ensure-document-id-in-map filter-map)\n q/prepare-terms)]\n {:bool\n {:filter\n (cond-> [(es.query/find-restriction-query-part ident get-in-config)]\n (seq filter-map) (into filter-terms)\n (seq range) (conj range-query)\n (seq full-text) (into (es.query/refine-full-text-query-parts\n es-conn-state full-text)))}}))\n\n(defn handle-query-string-search\n \"Generate an ES query handler for given schema schema\"\n [Model]\n (let [response-schema (list-response-schema Model)\n coerce! (coerce-to-fn response-schema)]\n (s/fn :- response-schema\n [{:keys [props] :as es-conn-state} :- ESConnState\n {:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs]\n (let [{conn :conn, index :index\n {{:keys [get-in-config]} :ConfigService}\n :services} es-conn-state\n query (make-search-query es-conn-state search-query ident)\n query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions])\n (assoc :props props)))]\n (cond-> (coerce! (ductile.doc/query\n conn\n index\n query\n query-params))\n (restricted-read? ident) (update\n :data\n access-control-filter-list\n ident\n get-in-config))))))\n\n(s/defn handle-delete-search\n \"ES delete by query handler\"\n [{:keys [conn index] :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n ident\n es-params]\n (let [query (make-search-query es-conn-state search-query ident)]\n (:deleted\n (ductile.doc/delete-by-query conn\n [index]\n query\n (prepare-opts es-conn-state es-params)))))\n\n(s/defn handle-query-string-count :- (s/pred nat-int?)\n \"ES count handler\"\n [{conn :conn\n index :index\n :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n ident]\n (let [query (make-search-query es-conn-state search-query ident)]\n (ductile.doc/count-docs conn\n index\n query)))\n\n(s/defn make-histogram\n [{:keys [aggregate-on granularity timezone]\n :or {timezone \"+00:00\"}} :- HistogramQuery]\n {:date_histogram\n {:field aggregate-on\n TODO switch to calendar_interval with ES7\n :time_zone timezone}})\n\n(s/defn make-topn\n [{:keys [aggregate-on limit sort_order]\n :or {limit 10 sort_order :desc}} :- TopnQuery]\n {:terms\n {:field (get enumerable-fields-mapping aggregate-on aggregate-on)\n :size limit\n :order {:_count sort_order}}})\n\n(s/defn make-cardinality\n [{:keys [aggregate-on]} :- CardinalityQuery]\n {:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on)\n :precision_threshold 10000}})\n\n(s/defn make-aggregation\n [{:keys [agg-type agg-key aggs]\n :or {agg-key :metric}\n :as agg-query} :- AggQuery]\n (let [root-agg (dissoc agg-query :aggs)\n agg-fn\n (case agg-type\n :topn make-topn\n :cardinality make-cardinality\n :histogram make-histogram\n (throw (ex-info (str \"invalid aggregation type: \" (pr-str agg-type))\n {})))]\n (cond-> {agg-key (agg-fn root-agg)}\n (seq aggs) (assoc :aggs (make-aggregation aggs)))))\n\n(defn format-agg-result\n [agg-type\n {:keys [value buckets] :as _metric-res}]\n (case agg-type\n :cardinality value\n :topn (map #(array-map :key (:key %)\n :value (:doc_count %))\n buckets)\n :histogram (map #(array-map :key (:key_as_string %)\n :value (:doc_count %))\n buckets)))\n\n(s/defn handle-aggregate\n \"Generate an ES aggregation handler for given schema\"\n [{:keys [conn index] :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n {:keys [agg-type] :as agg-query} :- AggQuery\n ident]\n (let [query (make-search-query es-conn-state search-query ident)\n agg (make-aggregation (assoc agg-query :agg-key :metric))\n es-res (ductile.doc/query conn\n index\n query\n agg\n {:limit 0})]\n (format-agg-result agg-type\n (get-in es-res [:aggs :metric]))))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/threatgrid/ctia/6c11ba6a7c57a44de64c16601d3914f5b0cf308e/src/ctia/stores/es/crud.clj"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":" Add partial results to the exception data map"},"code":{"kind":"string","value":"(ns ctia.stores.es.crud\n (:require\n [clojure.set :as set]\n [clojure.string :as string]\n [clojure.tools.logging :as log]\n [ctia.domain.access-control :as ac\n :refer [allow-read? allow-write? restricted-read?]]\n [ctia.lib.pagination :refer [list-response-schema]]\n [ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]]\n [ctia.schemas.search-agg\n :refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]]\n [ctia.stores.es.sort :as es.sort]\n [ctia.stores.es.query :as es.query]\n [ctia.stores.es.schemas :refer [ESConnState]]\n [ductile.document :as ductile.doc]\n [ductile.query :as q]\n [ring.swagger.coerce :as sc]\n [schema-tools.core :as st]\n [schema.coerce :as c]\n [schema.core :as s]))\n\n(defn make-es-read-params\n \"Prepare ES Params for read operations, setting the _source field\n and including ACL mandatory ones.\"\n [{:keys [fields]\n :as es-params}]\n (cond-> es-params\n (coll? fields)\n (-> (assoc :_source (concat fields ac/acl-fields))\n (dissoc :fields))))\n\n(defn coerce-to-fn\n [Model]\n (c/coercer! Model sc/json-schema-coercion-matcher))\n\n(defn ensure-document-id\n \"Returns a document ID. if id is a object ID, it extract the\n document ID, if it's a document ID already, it will just return\n that.\"\n [id]\n (let [[_orig docid] (re-matches #\".*?([^/]+)\\z\" id)]\n docid))\n\n(defn ensure-document-id-in-map\n \"Ensure a document ID in a given filter map\"\n [{:keys [id] :as m}]\n (cond-> m\n (string? id) (update :id list)\n id (update :id #(map ensure-document-id %))))\n\n(defn remove-es-actions\n \"Removes the ES action level\n\n [{:index {:_id \\\"1\\\"}}\n {:index {:_id \\\"2\\\"}}]\n\n ->\n\n [{:_id \\\"1\\\"}\n {:_id \\\"2\\\"}]\n \"\n [items]\n (map (comp first vals) items))\n\n(defn build-create-result\n [item coerce-fn]\n (-> item\n (dissoc :_id :_index :_type)\n coerce-fn))\n\n(defn partial-results\n \"Build partial results when an error occurs for one or more items\n in the bulk operation.\n\n Ex:\n\n [{model1}\n {:error \\\"Error message item2\\\"}\n {model3}]\"\n [exception-data models coerce-fn]\n (let [{{:keys [items]}\n :es-http-res-body} exception-data]\n {:data (map (fn [{:keys [error _id]} model]\n (if error\n {:error error\n :id _id}\n (build-create-result model coerce-fn)))\n (remove-es-actions items) models)}))\n\n(s/defn get-docs-with-indices\n \"Retrieves a documents from a search \\\"ids\\\" query. It enables to retrieves\n documents from an alias that points to multiple indices.\nIt returns the documents with full hits meta data including the real index in which is stored the document.\"\n [{:keys [conn index] :as _conn-state} :- ESConnState\n ids :- [s/Str]\n es-params]\n (let [limit (count ids)\n ids-query (q/ids (map ensure-document-id ids))\n res (ductile.doc/query conn\n index\n ids-query\n (assoc (make-es-read-params es-params)\n :limit limit\n :full-hits? true))]\n (:data res)))\n\n(s/defn get-doc-with-index\n \"Retrieves a document from a search \\\"ids\\\" query. It is used to perform a get query on an alias that points to multiple indices.\n It returns the document with full hits meta data including the real index in which is stored the document.\"\n [conn-state :- ESConnState\n _id :- s/Str\n es-params]\n (first (get-docs-with-indices conn-state [_id] es-params)))\n\n(defn ^:private prepare-opts\n [{:keys [props]}\n {:keys [refresh]}]\n {:refresh (or refresh\n (:refresh props)\n \"false\")})\n\n(s/defn bulk-schema\n [Model :- (s/pred map?)]\n (st/optional-keys\n {:create [Model]\n :index [Model]\n :update [(st/optional-keys Model)]\n :delete [s/Str]}))\n\n(s/defn ^:private prepare-bulk-doc\n [{:keys [props]} :- ESConnState\n mapping :- s/Keyword\n doc :- (s/pred map?)]\n (assoc doc\n :_id (:id doc)\n :_index (:write-index props)\n :_type (name mapping)))\n\n(defn handle-create\n \"Generate an ES create handler using some mapping and schema\"\n [mapping Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- [Model]\n [{:keys [conn] :as conn-state} :- ESConnState\n docs :- [Model]\n _ident\n es-params]\n (let [prepare-doc (partial prepare-bulk-doc conn-state mapping)\n prepared (mapv prepare-doc docs)]\n (try\n (ductile.doc/bulk-index-docs conn\n prepared\n (prepare-opts conn-state es-params))\n docs\n (catch Exception e\n (throw\n (if-let [ex-data (ex-data e)]\n (ex-info (.getMessage e)\n (partial-results ex-data docs coerce!))\n e))))))))\n\n(defn handle-update\n \"Generate an ES update handler using some mapping and schema\"\n [mapping Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- (s/maybe Model)\n [{:keys [conn] :as conn-state} :- ESConnState\n id :- s/Str\n realized :- Model\n ident\n es-params]\n (when-let [[{index :_index current-doc :_source}]\n (get-docs-with-indices conn-state [id] {})]\n (if (allow-write? current-doc ident)\n (let [update-doc (assoc realized\n :id (ensure-document-id id))]\n (ductile.doc/index-doc conn\n index\n (name mapping)\n update-doc\n (prepare-opts conn-state es-params))\n (coerce! update-doc))\n (throw (ex-info \"You are not allowed to update this document\"\n {:type :access-control-error})))))))\n\n(defn handle-read\n \"Generate an ES read handler using some mapping and schema\"\n [Model]\n (let [coerce! (coerce-to-fn (s/maybe Model))]\n (s/fn :- (s/maybe Model)\n [{{{:keys [get-in-config]} :ConfigService}\n :services\n :as conn-state}\n :- ESConnState\n id :- s/Str\n ident\n es-params]\n (when-let [doc (-> (get-doc-with-index conn-state\n id\n (make-es-read-params es-params))\n :_source\n coerce!)]\n (if (allow-read? doc ident get-in-config)\n doc\n (throw (ex-info \"You are not allowed to read this document\"\n {:type :access-control-error})))))))\n\n(defn handle-read-many\n \"Generate an ES read-many handler using some mapping and schema\"\n [Model]\n (let [coerce! (coerce-to-fn Model)]\n (s/fn :- [(s/maybe Model)]\n [{{{:keys [get-in-config]} :ConfigService}\n :services\n :as conn-state}\n :- ESConnState\n ids :- [s/Str]\n ident\n {:keys [suppress-access-control-error?]\n :or {suppress-access-control-error? false}\n :as es-params}]\n (sequence\n (comp (map :_source)\n (map coerce!)\n (map (fn [record]\n (if (allow-read? record ident get-in-config)\n record\n (let [ex (ex-info \"You are not allowed to read this document\"\n {:type :access-control-error})]\n (if suppress-access-control-error?\n (log/error ex)\n (throw ex)))))))\n (get-docs-with-indices conn-state ids (make-es-read-params es-params))))))\n\n(defn access-control-filter-list\n \"Given an ident, keep only documents it is allowed to read\"\n [docs ident get-in-config]\n (filter #(allow-read? % ident get-in-config) docs))\n\n(s/defschema BulkResult\n (st/optional-keys\n {:deleted [s/Str]\n :updated [s/Str]\n :errors (st/optional-keys\n {:forbidden [s/Str]\n :not-found [s/Str]\n :internal-error [s/Str]})}))\n\n(s/defschema ESActionResult\n (st/open-schema\n {:_id s/Str\n :_index s/Str\n :status s/Int\n :result s/Str}))\n\n TODO move it to ductile\n(s/defschema ESBulkRes\n {:took s/Int\n :errors s/Bool\n :items [{ductile.doc/BulkOps ESActionResult}]})\n\n(s/defn ^:private format-bulk-res\n \"transform an elasticsearch bulk result into a CTIA Bulk Result.\n ex: -bulk.html#docs-bulk-api-example\"\n [bulk-res :- ESBulkRes]\n (let [{:keys [deleted updated not_found]}\n (->> (:items bulk-res)\n (map (comp first vals))\n (group-by :result)\n (into {}\n (map (fn [[result items]]\n {(keyword result) (map :_id items)}))))]\n (cond-> {}\n deleted (assoc :deleted deleted)\n updated (assoc :updated updated)\n not_found (assoc-in [:errors :not-found] not_found))))\n\n(s/defn check-and-prepare-bulk\n :- (st/assoc BulkResult\n (s/optional-key :prepared)\n [(s/pred map?)])\n \"prepare a bulk query:\n - retrieve actual indices, deletion cannot be performed on the alias.\n - filter out forbidden entitites\n - forbidden and not_found errors are prepared for the response.\"\n [conn-state :- ESConnState\n ids :- [s/Str]\n ident]\n (let [get-in-config (get-in conn-state [:services :ConfigService])\n doc-ids (map ensure-document-id ids)\n docs-with-indices (get-docs-with-indices conn-state doc-ids {})\n {authorized true forbidden-write false}\n (group-by #(allow-write? (:_source %) ident)\n docs-with-indices)\n {forbidden true not-visible false}\n (group-by #(allow-read? (:_source %) ident get-in-config)\n forbidden-write)\n missing (set/difference (set doc-ids)\n (set (map :_id docs-with-indices)))\n not-found (into (map :_id not-visible) missing)\n prepared-docs (map #(select-keys % [:_index :_type :_id])\n authorized)]\n (cond-> {}\n forbidden (assoc-in [:errors :forbidden] (map :_id forbidden))\n (seq not-found) (assoc-in [:errors :not-found] not-found)\n authorized (assoc :prepared prepared-docs))))\n\n(s/defn bulk-delete :- BulkResult\n [{:keys [conn] :as conn-state}\n ids :- [s/Str]\n ident\n es-params]\n (let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident)\n bulk-res (when prepared\n (try\n (format-bulk-res\n (ductile.doc/bulk-delete-docs conn\n prepared\n (prepare-opts conn-state es-params)))\n (catch Exception e\n (log/error e\n (str \"bulk delete failed: \" (.getMessage e))\n (pr-str prepared))\n {:errors {:internal-error (map :_id prepared)}})))]\n (cond-> bulk-res\n errors (update :errors\n #(merge-with concat errors %)))))\n\n(s/defn bulk-update\n \"Generate an ES bulk update handler using some mapping and schema\"\n [Model]\n (s/fn :- BulkResult\n [{:keys [conn] :as conn-state}\n docs :- [Model]\n ident\n es-params]\n (let [by-id (group-by :id docs)\n ids (seq (keys by-id))\n {:keys [prepared errors]} (check-and-prepare-bulk conn-state\n ids\n ident)\n prepared-docs (map (fn [meta]\n (-> (:_id meta)\n by-id\n first\n (into meta)))\n prepared)\n bulk-res (when prepared\n (try\n (format-bulk-res\n (ductile.doc/bulk-index-docs conn\n prepared-docs\n (prepare-opts conn-state es-params)))\n (catch Exception e\n (log/error (str \"bulk update failed: \" (.getMessage e))\n (pr-str prepared))\n {:errors {:internal-error (map :_id prepared)}})))]\n (cond-> bulk-res\n errors (update :errors\n #(merge-with concat errors %))))))\n\n(defn handle-delete\n \"Generate an ES delete handler using some mapping\"\n [mapping]\n (s/fn :- s/Bool\n [{:keys [conn] :as conn-state} :- ESConnState\n id :- s/Str\n ident\n es-params]\n (if-let [{index :_index doc :_source}\n (get-doc-with-index conn-state id {})]\n (if (allow-write? doc ident)\n (ductile.doc/delete-doc conn\n index\n (name mapping)\n (ensure-document-id id)\n (prepare-opts conn-state es-params))\n (throw (ex-info \"You are not allowed to delete this document\"\n {:type :access-control-error})))\n false)))\n\n(s/defschema FilterSchema\n (st/optional-keys\n {:all-of {s/Any s/Any}\n :one-of {s/Any s/Any}\n :query s/Str}))\n\n(def enumerable-fields-mapping\n \"Mapping table for all fields which needs to be renamed\n for the sorting or aggregation. Instead of using fielddata we can have\n a text field for full text searches, and an unanalysed keyword\n field with doc_values enabled for sorting or aggregation\"\n {\"title\" \"title.whole\"\n \"reason\" \"reason.whole\"})\n\n(s/defn parse-sort-by :- [SortExtension]\n \"Parses the sort_by parameter\n Ex:\n \\\"title:ASC,revision:DESC\\\"\n ->\n [{:op :field :field-name \\\"title\\\" :sort_order \\\"ASC\\\"}\n {:op :field :field-name \\\"revision\\\" :sort_order \\\"DESC\\\"}]\"\n [sort_by]\n (if ((some-fn string? simple-ident?) sort_by)\n (map\n (fn [field]\n (let [[field-name field-order] (string/split field #\":\")]\n (cond-> {:op :field\n :field-name (keyword field-name)}\n field-order (assoc :sort_order field-order))))\n (string/split (name sort_by) #\",\"))\n sort_by))\n\n(defn with-default-sort-field\n [es-params {:keys [default-sort]}]\n (assert (not (:sort_by es-params)))\n (update es-params :sort #(or %\n (some->> default-sort\n parse-sort-by\n (mapv (fn [m] (es.sort/parse-sort-params-op m :asc))))\n [{\"_doc\" :asc} {\"id\" :asc}])))\n\n(s/defn rename-sort-fields\n \"Renames sort fields based on the content of the `enumerable-fields-mapping` table\n and remaps to script extensions.\"\n [{:keys [sort_by sort_order] :as es-params}\n sort-extension-definitions :- (s/maybe SortExtensionDefinitions)]\n (cond-> (dissoc es-params :sort_by :sort_order)\n (and sort_by (not (:sort es-params)))\n (assoc :sort\n (->> sort_by\n parse-sort-by\n (mapv (fn [field]\n {:pre [(= :field (:op field))]}\n (let [{:keys [field-name] :as field}\n (update field :field-name #(or (keyword (enumerable-fields-mapping (name %)))\n %))]\n (assert (simple-keyword? field-name))\n (-> (or (some-> (get sort-extension-definitions field-name)\n (into (select-keys field [:sort_order]))\n (update :field-name #(or % (:field-name field))))\n field)\n (es.sort/parse-sort-params-op (or sort_order :asc))))))))))\n\n(s/defschema MakeQueryParamsArgs\n {:params s/Any\n :props s/Any\n (s/optional-key :sort-extension-definitions) SortExtensionDefinitions})\n\n(s/defn make-query-params :- {s/Keyword s/Any}\n [{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs]\n (cond-> (-> params\n (rename-sort-fields sort-extension-definitions)\n (with-default-sort-field props)\n make-es-read-params)\n (<= 7 (:version props)) (assoc :track_total_hits true)))\n\n(defn handle-find\n \"Generate an ES find/list handler using some mapping and schema\"\n [Model]\n (let [response-schema (list-response-schema Model)\n coerce! (coerce-to-fn response-schema)]\n (s/fn :- response-schema\n [{{{:keys [get-in-config]} :ConfigService} :services\n :keys [conn index props]} :- ESConnState\n {:keys [all-of one-of query]\n :or {all-of {} one-of {}}} :- FilterSchema\n ident\n es-params]\n (let [filter-val (cond-> (q/prepare-terms all-of)\n (restricted-read? ident)\n (conj (es.query/find-restriction-query-part ident get-in-config)))\n query_string {:query_string {:query query}}\n date-range-query (es.query/make-date-range-query es-params)\n bool-params (cond-> {:filter filter-val}\n (seq one-of) (into\n {:should (q/prepare-terms one-of)\n :minimum_should_match 1})\n query (update :filter conj query_string)\n (seq date-range-query) (update :filter conj {:range date-range-query}))\n query-params (make-query-params {:params es-params :props props})]\n (cond-> (coerce! (ductile.doc/query conn\n index\n (q/bool bool-params)\n query-params))\n (restricted-read? ident) (update :data\n access-control-filter-list\n ident\n get-in-config))))))\n\n(s/defn make-search-query :- {s/Keyword s/Any}\n \"Translate SearchQuery map into ES Query DSL map\"\n [es-conn-state :- ESConnState\n search-query :- SearchQuery\n ident]\n (let [{:keys [services]} es-conn-state\n {{:keys [get-in-config]} :ConfigService} services\n {:keys [filter-map range full-text]} search-query\n range-query (when range\n {:range range})\n filter-terms (-> (ensure-document-id-in-map filter-map)\n q/prepare-terms)]\n {:bool\n {:filter\n (cond-> [(es.query/find-restriction-query-part ident get-in-config)]\n (seq filter-map) (into filter-terms)\n (seq range) (conj range-query)\n (seq full-text) (into (es.query/refine-full-text-query-parts\n es-conn-state full-text)))}}))\n\n(defn handle-query-string-search\n \"Generate an ES query handler for given schema schema\"\n [Model]\n (let [response-schema (list-response-schema Model)\n coerce! (coerce-to-fn response-schema)]\n (s/fn :- response-schema\n [{:keys [props] :as es-conn-state} :- ESConnState\n {:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs]\n (let [{conn :conn, index :index\n {{:keys [get-in-config]} :ConfigService}\n :services} es-conn-state\n query (make-search-query es-conn-state search-query ident)\n query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions])\n (assoc :props props)))]\n (cond-> (coerce! (ductile.doc/query\n conn\n index\n query\n query-params))\n (restricted-read? ident) (update\n :data\n access-control-filter-list\n ident\n get-in-config))))))\n\n(s/defn handle-delete-search\n \"ES delete by query handler\"\n [{:keys [conn index] :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n ident\n es-params]\n (let [query (make-search-query es-conn-state search-query ident)]\n (:deleted\n (ductile.doc/delete-by-query conn\n [index]\n query\n (prepare-opts es-conn-state es-params)))))\n\n(s/defn handle-query-string-count :- (s/pred nat-int?)\n \"ES count handler\"\n [{conn :conn\n index :index\n :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n ident]\n (let [query (make-search-query es-conn-state search-query ident)]\n (ductile.doc/count-docs conn\n index\n query)))\n\n(s/defn make-histogram\n [{:keys [aggregate-on granularity timezone]\n :or {timezone \"+00:00\"}} :- HistogramQuery]\n {:date_histogram\n {:field aggregate-on\n TODO switch to calendar_interval with ES7\n :time_zone timezone}})\n\n(s/defn make-topn\n [{:keys [aggregate-on limit sort_order]\n :or {limit 10 sort_order :desc}} :- TopnQuery]\n {:terms\n {:field (get enumerable-fields-mapping aggregate-on aggregate-on)\n :size limit\n :order {:_count sort_order}}})\n\n(s/defn make-cardinality\n [{:keys [aggregate-on]} :- CardinalityQuery]\n {:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on)\n :precision_threshold 10000}})\n\n(s/defn make-aggregation\n [{:keys [agg-type agg-key aggs]\n :or {agg-key :metric}\n :as agg-query} :- AggQuery]\n (let [root-agg (dissoc agg-query :aggs)\n agg-fn\n (case agg-type\n :topn make-topn\n :cardinality make-cardinality\n :histogram make-histogram\n (throw (ex-info (str \"invalid aggregation type: \" (pr-str agg-type))\n {})))]\n (cond-> {agg-key (agg-fn root-agg)}\n (seq aggs) (assoc :aggs (make-aggregation aggs)))))\n\n(defn format-agg-result\n [agg-type\n {:keys [value buckets] :as _metric-res}]\n (case agg-type\n :cardinality value\n :topn (map #(array-map :key (:key %)\n :value (:doc_count %))\n buckets)\n :histogram (map #(array-map :key (:key_as_string %)\n :value (:doc_count %))\n buckets)))\n\n(s/defn handle-aggregate\n \"Generate an ES aggregation handler for given schema\"\n [{:keys [conn index] :as es-conn-state} :- ESConnState\n search-query :- SearchQuery\n {:keys [agg-type] :as agg-query} :- AggQuery\n ident]\n (let [query (make-search-query es-conn-state search-query ident)\n agg (make-aggregation (assoc agg-query :agg-key :metric))\n es-res (ductile.doc/query conn\n index\n query\n agg\n {:limit 0})]\n (format-agg-result agg-type\n (get-in es-res [:aggs :metric]))))\n"}}},{"rowIdx":610247,"cells":{"_id":{"kind":"string","value":"1474dc7693ddebbfcbd686f2889a6603019a6cd4face2891224b437c797d7c3a"},"repository":{"kind":"string","value":"argp/bap"},"name":{"kind":"string","value":"bench_map.ml"},"content":{"kind":"string","value":" cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native\n\n(* The purpose of this test is to compare different implementation of\n the Map associative data structure. *)\n\nlet total_length = 500_000\n\nlet (%) = BatPervasives.(%)\n\nmodule MapBench (M : sig val input_length : int end) = struct\n let input_length = M.input_length\n\n let nb_iter =\n max 10 (total_length / input_length)\n\n let () = Printf.printf \"%d iterations\\n\" nb_iter\n\n let random_key () = Random.int input_length\n let random_value () = Random.int input_length\n\n let random_inputs random_elt () =\n BatList.init input_length (fun _ -> random_elt ())\n\n let make_samples input tests () = Bench.bench_funs tests input\n\n we do n't use BatInt to ensure that the same comparison function \n is used ( PMap use Pervasives.compare by default ) , in order to \n have comparable performance results .\n is used (PMap use Pervasives.compare by default), in order to\n have comparable performance results. *)\n module StdMap = BatMap.Make(struct type t = int let compare = compare end)\n\n module Map = BatMap\n\n let same_elts stdmap pmap =\n BatList.of_enum (StdMap.enum stdmap)\n = BatList.of_enum (Map.enum pmap)\n\n (* A benchmark for key insertion *)\n let create_std_map input =\n List.fold_left\n (fun t (k, v) -> StdMap.add k v t)\n StdMap.empty input\n\n let create_poly_map input =\n List.fold_left\n (fun t (k, v) -> Map.add k v t)\n Map.empty input\n\n let create_input =\n let keys = random_inputs random_key () in\n let values = random_inputs random_value () in\n BatList.combine keys values\n\n let std_created_map = create_std_map create_input\n let poly_created_map = create_poly_map create_input\n\n let () =\n assert (same_elts std_created_map poly_created_map)\n\n let samples_create = make_samples create_input\n [ \"stdmap create\", ignore % create_std_map;\n \"pmap create\", ignore % create_poly_map ]\n\n (* A benchmark for fast import *)\n let import_std_map input =\n StdMap.of_enum (BatList.enum input)\n\n let import_poly_map input =\n Map.of_enum (BatList.enum input)\n\n let import_input = create_input\n\n let () =\n let std_imported_map = import_std_map import_input in\n assert (same_elts std_imported_map poly_created_map);\n let poly_imported_map = import_poly_map import_input in\n assert (same_elts std_created_map poly_imported_map);\n ()\n\n let samples_import = make_samples import_input\n [ \"stdmap import\", ignore % import_std_map;\n \"pmap import\", ignore % import_poly_map ]\n\n (* A benchmark for key lookup *)\n let lookup_input =\n random_inputs random_key ()\n\n let lookup_std_map input =\n List.iter\n (fun k -> ignore (StdMap.mem k std_created_map))\n input\n\n let lookup_poly_map input =\n List.iter\n (fun k -> ignore (Map.mem k poly_created_map))\n input\n\n let samples_lookup = make_samples lookup_input\n [ \"stdmap lookup\", lookup_std_map;\n \"pmap lookup\", lookup_poly_map ]\n\n (* A benchmark for key removal *)\n let remove_input =\n random_inputs random_key ()\n\n let remove_std_map input =\n List.fold_left\n (fun t k -> StdMap.remove k t)\n std_created_map input\n\n let remove_poly_map input =\n List.fold_left\n (fun t k -> Map.remove k t)\n poly_created_map input\n\n let () =\n assert (same_elts\n (remove_std_map remove_input)\n (remove_poly_map remove_input))\n\n let samples_remove = make_samples remove_input\n [ \"stdmap remove\", ignore % remove_std_map;\n \"pmap remove\", ignore % remove_poly_map ]\n\n\n (* A benchmark for merging *)\n let random_pairlist () =\n BatList.combine\n (random_inputs random_key ())\n (random_inputs random_value ())\n\n let p1 = random_pairlist ()\n let p2 = random_pairlist ()\n\n let merge_fun k a b =\n if k mod 2 = 0 then None else Some ()\n\n let merge_std_map =\n let m1 = StdMap.of_enum (BatList.enum p1) in\n let m2 = StdMap.of_enum (BatList.enum p2) in\n fun () ->\n StdMap.merge merge_fun m1 m2\n\n let merge_poly_map =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n fun () ->\n Map.merge merge_fun m1 m2\n\n\n let samples_merge = make_samples () [\n \"stdmap merge\", ignore % merge_std_map;\n \"pmap merge\", ignore % merge_poly_map;\n ]\n\n (* compare fold-based and merge-based union, diff, intersect *)\n let pmap_union (m1, m2) = Map.union m1 m2\n let fold_union (m1, m2) =\n Map.foldi Map.add m1 m2\n let merge_union (m1, m2) =\n let merge_fun k a b = if a <> None then a else b in\n Map.merge merge_fun m1 m2\n\n let union_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_union =\n li (pmap_union union_input) = li (impl_union union_input) in\n assert (test fold_union);\n assert (test merge_union);\n ()\n\n let samples_union = make_samples union_input [\n \"pmap union\", ignore % pmap_union;\n \"fold-based union\", ignore % fold_union;\n \"merge-based union\", ignore % merge_union;\n ]\n\n let pmap_diff (m1, m2) =\n Map.diff m1 m2\n let fold_diff (m1, m2) =\n Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1\n let merge_diff (m1, m2) =\n let merge_fun k a b = if b <> None then None else a in\n Map.merge merge_fun m1 m2\n\n let diff_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_diff =\n li (pmap_diff diff_input) = li (impl_diff diff_input) in\n assert (test fold_diff);\n assert (test merge_diff);\n ()\n\n let samples_diff = make_samples diff_input [\n \"pmap diff\", ignore % pmap_diff;\n \"fold-based diff\", ignore % fold_diff;\n \"merge-based diff\", ignore % merge_diff;\n ]\n\n let pmap_intersect f (m1, m2) =\n Map.intersect f m1 m2\n\n let filter_intersect f (m1, m2) =\n let filter_fun k v1 =\n match\n try Some (Map.find k m2)\n with Not_found -> None\n with\n | None -> None\n | Some v2 -> Some (f v1 v2) in\n Map.filter_map filter_fun m1\n\n let merge_intersect f (m1, m2) =\n let merge_fun k a b =\n match a, b with\n | Some v1, Some v2 -> Some (f v1 v2)\n | None, _ | _, None -> None in\n Map.merge merge_fun m1 m2\n\n let intersect_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_intersect =\n li (pmap_intersect (-) intersect_input)\n = li (impl_intersect (-) intersect_input) in\n assert (test filter_intersect);\n assert (test merge_intersect);\n ()\n\n let samples_intersect = make_samples intersect_input [\n \"pmap intersect\", ignore % pmap_intersect (-);\n \"filter-based intersect\", ignore % filter_intersect (-);\n \"merge-based intersect\", ignore % merge_intersect (-);\n ]\n\n let () =\n let create = samples_create () in\n let import = samples_import () in\n let lookup = samples_lookup () in\n let remove = samples_remove () in\n let merge = samples_merge () in\n let union = samples_union () in\n let diff = samples_diff () in\n let intersect = samples_intersect () in\n List.iter\n (print_newline % Bench.summarize)\n [\n create;\n import;\n lookup;\n remove;\n merge;\n union;\n diff;\n intersect;\n ]\nend\n\nlet big_length = 100_000\nlet small_length = 500\n\nlet () =\n Printf.printf \"Test with small maps (length = %d)\\n%!\" small_length;\n let () =\n let module M = MapBench(struct let input_length = small_length end) in\n () in\n\n print_newline ();\n print_newline ();\n\n Printf.printf \"Test with big maps (length = %d)\\n%!\" big_length;\n Bench.config.Bench.samples <- 100;\n let () =\n let module M = MapBench(struct let input_length = big_length end) in\n () in\n\n\n ()\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/benchsuite/bench_map.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":" The purpose of this test is to compare different implementation of\n the Map associative data structure. \n A benchmark for key insertion \n A benchmark for fast import \n A benchmark for key lookup \n A benchmark for key removal \n A benchmark for merging \n compare fold-based and merge-based union, diff, intersect "},"code":{"kind":"string","value":" cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native\n\n\nlet total_length = 500_000\n\nlet (%) = BatPervasives.(%)\n\nmodule MapBench (M : sig val input_length : int end) = struct\n let input_length = M.input_length\n\n let nb_iter =\n max 10 (total_length / input_length)\n\n let () = Printf.printf \"%d iterations\\n\" nb_iter\n\n let random_key () = Random.int input_length\n let random_value () = Random.int input_length\n\n let random_inputs random_elt () =\n BatList.init input_length (fun _ -> random_elt ())\n\n let make_samples input tests () = Bench.bench_funs tests input\n\n we do n't use BatInt to ensure that the same comparison function \n is used ( PMap use Pervasives.compare by default ) , in order to \n have comparable performance results .\n is used (PMap use Pervasives.compare by default), in order to\n have comparable performance results. *)\n module StdMap = BatMap.Make(struct type t = int let compare = compare end)\n\n module Map = BatMap\n\n let same_elts stdmap pmap =\n BatList.of_enum (StdMap.enum stdmap)\n = BatList.of_enum (Map.enum pmap)\n\n let create_std_map input =\n List.fold_left\n (fun t (k, v) -> StdMap.add k v t)\n StdMap.empty input\n\n let create_poly_map input =\n List.fold_left\n (fun t (k, v) -> Map.add k v t)\n Map.empty input\n\n let create_input =\n let keys = random_inputs random_key () in\n let values = random_inputs random_value () in\n BatList.combine keys values\n\n let std_created_map = create_std_map create_input\n let poly_created_map = create_poly_map create_input\n\n let () =\n assert (same_elts std_created_map poly_created_map)\n\n let samples_create = make_samples create_input\n [ \"stdmap create\", ignore % create_std_map;\n \"pmap create\", ignore % create_poly_map ]\n\n let import_std_map input =\n StdMap.of_enum (BatList.enum input)\n\n let import_poly_map input =\n Map.of_enum (BatList.enum input)\n\n let import_input = create_input\n\n let () =\n let std_imported_map = import_std_map import_input in\n assert (same_elts std_imported_map poly_created_map);\n let poly_imported_map = import_poly_map import_input in\n assert (same_elts std_created_map poly_imported_map);\n ()\n\n let samples_import = make_samples import_input\n [ \"stdmap import\", ignore % import_std_map;\n \"pmap import\", ignore % import_poly_map ]\n\n let lookup_input =\n random_inputs random_key ()\n\n let lookup_std_map input =\n List.iter\n (fun k -> ignore (StdMap.mem k std_created_map))\n input\n\n let lookup_poly_map input =\n List.iter\n (fun k -> ignore (Map.mem k poly_created_map))\n input\n\n let samples_lookup = make_samples lookup_input\n [ \"stdmap lookup\", lookup_std_map;\n \"pmap lookup\", lookup_poly_map ]\n\n let remove_input =\n random_inputs random_key ()\n\n let remove_std_map input =\n List.fold_left\n (fun t k -> StdMap.remove k t)\n std_created_map input\n\n let remove_poly_map input =\n List.fold_left\n (fun t k -> Map.remove k t)\n poly_created_map input\n\n let () =\n assert (same_elts\n (remove_std_map remove_input)\n (remove_poly_map remove_input))\n\n let samples_remove = make_samples remove_input\n [ \"stdmap remove\", ignore % remove_std_map;\n \"pmap remove\", ignore % remove_poly_map ]\n\n\n let random_pairlist () =\n BatList.combine\n (random_inputs random_key ())\n (random_inputs random_value ())\n\n let p1 = random_pairlist ()\n let p2 = random_pairlist ()\n\n let merge_fun k a b =\n if k mod 2 = 0 then None else Some ()\n\n let merge_std_map =\n let m1 = StdMap.of_enum (BatList.enum p1) in\n let m2 = StdMap.of_enum (BatList.enum p2) in\n fun () ->\n StdMap.merge merge_fun m1 m2\n\n let merge_poly_map =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n fun () ->\n Map.merge merge_fun m1 m2\n\n\n let samples_merge = make_samples () [\n \"stdmap merge\", ignore % merge_std_map;\n \"pmap merge\", ignore % merge_poly_map;\n ]\n\n let pmap_union (m1, m2) = Map.union m1 m2\n let fold_union (m1, m2) =\n Map.foldi Map.add m1 m2\n let merge_union (m1, m2) =\n let merge_fun k a b = if a <> None then a else b in\n Map.merge merge_fun m1 m2\n\n let union_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_union =\n li (pmap_union union_input) = li (impl_union union_input) in\n assert (test fold_union);\n assert (test merge_union);\n ()\n\n let samples_union = make_samples union_input [\n \"pmap union\", ignore % pmap_union;\n \"fold-based union\", ignore % fold_union;\n \"merge-based union\", ignore % merge_union;\n ]\n\n let pmap_diff (m1, m2) =\n Map.diff m1 m2\n let fold_diff (m1, m2) =\n Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1\n let merge_diff (m1, m2) =\n let merge_fun k a b = if b <> None then None else a in\n Map.merge merge_fun m1 m2\n\n let diff_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_diff =\n li (pmap_diff diff_input) = li (impl_diff diff_input) in\n assert (test fold_diff);\n assert (test merge_diff);\n ()\n\n let samples_diff = make_samples diff_input [\n \"pmap diff\", ignore % pmap_diff;\n \"fold-based diff\", ignore % fold_diff;\n \"merge-based diff\", ignore % merge_diff;\n ]\n\n let pmap_intersect f (m1, m2) =\n Map.intersect f m1 m2\n\n let filter_intersect f (m1, m2) =\n let filter_fun k v1 =\n match\n try Some (Map.find k m2)\n with Not_found -> None\n with\n | None -> None\n | Some v2 -> Some (f v1 v2) in\n Map.filter_map filter_fun m1\n\n let merge_intersect f (m1, m2) =\n let merge_fun k a b =\n match a, b with\n | Some v1, Some v2 -> Some (f v1 v2)\n | None, _ | _, None -> None in\n Map.merge merge_fun m1 m2\n\n let intersect_input =\n let m1 = Map.of_enum (BatList.enum p1) in\n let m2 = Map.of_enum (BatList.enum p2) in\n m1, m2\n\n let () =\n let li m = BatList.of_enum (Map.enum m) in\n let test impl_intersect =\n li (pmap_intersect (-) intersect_input)\n = li (impl_intersect (-) intersect_input) in\n assert (test filter_intersect);\n assert (test merge_intersect);\n ()\n\n let samples_intersect = make_samples intersect_input [\n \"pmap intersect\", ignore % pmap_intersect (-);\n \"filter-based intersect\", ignore % filter_intersect (-);\n \"merge-based intersect\", ignore % merge_intersect (-);\n ]\n\n let () =\n let create = samples_create () in\n let import = samples_import () in\n let lookup = samples_lookup () in\n let remove = samples_remove () in\n let merge = samples_merge () in\n let union = samples_union () in\n let diff = samples_diff () in\n let intersect = samples_intersect () in\n List.iter\n (print_newline % Bench.summarize)\n [\n create;\n import;\n lookup;\n remove;\n merge;\n union;\n diff;\n intersect;\n ]\nend\n\nlet big_length = 100_000\nlet small_length = 500\n\nlet () =\n Printf.printf \"Test with small maps (length = %d)\\n%!\" small_length;\n let () =\n let module M = MapBench(struct let input_length = small_length end) in\n () in\n\n print_newline ();\n print_newline ();\n\n Printf.printf \"Test with big maps (length = %d)\\n%!\" big_length;\n Bench.config.Bench.samples <- 100;\n let () =\n let module M = MapBench(struct let input_length = big_length end) in\n () in\n\n\n ()\n"}}},{"rowIdx":610248,"cells":{"_id":{"kind":"string","value":"679d955390e90fde7e78b0a061ac74e3c7b9e356f35d688d8edc4745799d3804"},"repository":{"kind":"string","value":"Oblosys/proxima"},"name":{"kind":"string","value":"SemHsTokens.hs"},"content":{"kind":"string","value":"\n\n UUAGC 0.9.10 ( SemHsTokens.ag )\nmodule SemHsTokens where\n\nimport qualified Data.Sequence as Seq\nimport Data.Sequence(Seq,empty,singleton,(><))\nimport Data.Foldable(toList)\nimport Pretty\n\nimport TokenDef\nimport HsToken\nimport ErrorMessages\n\n\nimport CommonTypes\nimport UU.Scanner.Position(Pos)\n\nisNTname allnts (Just (NT nt _)) = nt `elem` allnts\nisNTname allnts _ = False\n -----------------------------------------------------\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n fieldnames : [ Identifier ] \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n tok : ( Pos , String ) \n : [ ( Identifier , Identifier ) ] \n usedFields : Seq Identifier \n usedLocals : [ Identifier ] \n alternatives : \n alternative AGField : \n child field : { Identifier } \n child attr : { Identifier } \n child pos : { Pos } \n child rdesc : { Maybe String } \n visit 0 : \n local addTrace : _ \n alternative AGLocal : \n child var : { Identifier } \n child pos : { Pos } \n child rdesc : { Maybe String } \n visit 0 : \n local _ tup1 : _ \n local errors : _ \n local tok : _ \n local usedLocals : _ \n alternative CharToken : \n child value : { String } \n child pos : { Pos } \n alternative : \n child mesg : { String } \n child pos : { Pos } \n alternative : \n child value : { String } \n child pos : { Pos } \n alternative StrToken : \n child value : { String } \n child pos : { Pos } \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n fieldnames : [Identifier]\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n tok : (Pos,String)\n usedAttrs : [(Identifier,Identifier)]\n usedFields : Seq Identifier\n usedLocals : [Identifier]\n alternatives:\n alternative AGField:\n child field : {Identifier}\n child attr : {Identifier}\n child pos : {Pos}\n child rdesc : {Maybe String}\n visit 0:\n local addTrace : _\n alternative AGLocal:\n child var : {Identifier}\n child pos : {Pos}\n child rdesc : {Maybe String}\n visit 0:\n local _tup1 : _\n local errors : _\n local tok : _\n local usedLocals : _\n alternative CharToken:\n child value : {String}\n child pos : {Pos}\n alternative Err:\n child mesg : {String}\n child pos : {Pos}\n alternative HsToken:\n child value : {String}\n child pos : {Pos}\n alternative StrToken:\n child value : {String}\n child pos : {Pos}\n-}\n-- cata\nsem_HsToken :: HsToken ->\n T_HsToken \nsem_HsToken (AGField _field _attr _pos _rdesc ) =\n (sem_HsToken_AGField _field _attr _pos _rdesc )\nsem_HsToken (AGLocal _var _pos _rdesc ) =\n (sem_HsToken_AGLocal _var _pos _rdesc )\nsem_HsToken (CharToken _value _pos ) =\n (sem_HsToken_CharToken _value _pos )\nsem_HsToken (Err _mesg _pos ) =\n (sem_HsToken_Err _mesg _pos )\nsem_HsToken (HsToken _value _pos ) =\n (sem_HsToken_HsToken _value _pos )\nsem_HsToken (StrToken _value _pos ) =\n (sem_HsToken_StrToken _value _pos )\n-- semantic domain\nnewtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n ([Identifier]) ->\n Identifier ->\n ( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))\ndata Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier}\ndata Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]}\nwrap_HsToken :: T_HsToken ->\n Inh_HsToken ->\n Syn_HsToken \nwrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )\n in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsToken_AGField :: Identifier ->\n Identifier ->\n Pos ->\n (Maybe String) ->\n T_HsToken \nsem_HsToken_AGField field_ attr_ pos_ rdesc_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOtok :: ((Pos,String))\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 74 , column 15 )\n _lhsOerrors =\n if (field_,attr_) `elem` _lhsIattrs\n then Seq.empty\n else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames))\n then Seq.singleton (UndefChild _lhsInt _lhsIcon field_)\n else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False)\n \" SemHsTokens.ag\"(line 88 , column 13 )\n _lhsOusedAttrs =\n [(field_,attr_)]\n \" SemHsTokens.ag\"(line 115 , column 8)\n _addTrace =\n case rdesc_ of\n Just d -> \\x -> \"(trace \" ++ show (d ++ \" -> \" ++ show field_ ++ \".\" ++ show attr_) ++ \" (\" ++ x ++ \"))\"\n Nothing -> id\n \" SemHsTokens.ag\"(line 118 , column 8)\n _lhsOtok =\n (pos_, _addTrace $ attrname True field_ attr_)\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_AGLocal :: Identifier ->\n Pos ->\n (Maybe String) ->\n T_HsToken \nsem_HsToken_AGLocal var_ pos_ rdesc_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOusedFields :: (Seq Identifier)\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedLocals :: ([Identifier])\n _lhsOtok :: ((Pos,String))\n \" SemHsTokens.ag\"(line 64 , column 19 )\n __tup1 =\n if var_ `elem` _lhsIfieldnames\n then if isNTname _lhsIallnts (lookup var_ (map (\\(n,t,_) -> (n,t)) _lhsIallfields))\n then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] )\n else (Seq.empty, (pos_,fieldname var_), [] )\n else if (_LOC,var_) `elem` _lhsIattrs\n then (Seq.empty , (pos_,locname var_), [var_])\n else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] )\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_errors,_,_) =\n __tup1\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_,_tok,_) =\n __tup1\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_,_,_usedLocals) =\n __tup1\n \" SemHsTokens.ag\"(line 96 , column 13 )\n _lhsOusedFields =\n if var_ `elem` _lhsIfieldnames\n then Seq.singleton var_\n else Seq.empty\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n _errors\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n _usedLocals\n -- copy rule (from local)\n _lhsOtok =\n _tok\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_CharToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_CharToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 122 , column 16 )\n _lhsOtok =\n (pos_, if null value_\n then \"\"\n else showCharShort (head value_)\n )\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_Err :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_Err mesg_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOerrors :: (Seq Error)\n _lhsOtok :: ((Pos,String))\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 50 , column 9 )\n _lhsOerrors =\n let m = text mesg_\n in Seq.singleton (CustomError False pos_ m)\n \" SemHsTokens.ag\"(line 128 , column 16 )\n _lhsOtok =\n (pos_, \"\")\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_HsToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_HsToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 120 , column 14 )\n _lhsOtok =\n (pos_, value_)\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_StrToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_StrToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 127 , column 16 )\n _lhsOtok =\n (pos_, showStrShort value_)\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\n HsTokens ----------------------------------------------------\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n fieldnames : [ Identifier ] \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n tks : [ ( Pos , String ) ] \n : [ ( Identifier , Identifier ) ] \n usedFields : Seq Identifier \n usedLocals : [ Identifier ] \n alternatives : \n alternative Cons : \n child hd : \n child tl : \n alternative : \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n fieldnames : [Identifier]\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n tks : [(Pos,String)]\n usedAttrs : [(Identifier,Identifier)]\n usedFields : Seq Identifier\n usedLocals : [Identifier]\n alternatives:\n alternative Cons:\n child hd : HsToken \n child tl : HsTokens \n alternative Nil:\n-}\n-- cata\nsem_HsTokens :: HsTokens ->\n T_HsTokens \nsem_HsTokens list =\n (Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) )\n-- semantic domain\nnewtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n ([Identifier]) ->\n Identifier ->\n ( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))\ndata Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier}\ndata Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]}\nwrap_HsTokens :: T_HsTokens ->\n Inh_HsTokens ->\n Syn_HsTokens \nwrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )\n in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsTokens_Cons :: T_HsToken ->\n T_HsTokens ->\n T_HsTokens \nsem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) =\n (T_HsTokens (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtks :: ([(Pos,String)])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n _hdOallfields :: ([(Identifier,Type,Bool)])\n _hdOallnts :: ([Identifier])\n _hdOattrs :: ([(Identifier,Identifier)])\n _hdOcon :: Identifier\n _hdOfieldnames :: ([Identifier])\n _hdOnt :: Identifier\n _tlOallfields :: ([(Identifier,Type,Bool)])\n _tlOallnts :: ([Identifier])\n _tlOattrs :: ([(Identifier,Identifier)])\n _tlOcon :: Identifier\n _tlOfieldnames :: ([Identifier])\n _tlOnt :: Identifier\n _hdIerrors :: (Seq Error)\n _hdItok :: ((Pos,String))\n _hdIusedAttrs :: ([(Identifier,Identifier)])\n _hdIusedFields :: (Seq Identifier)\n _hdIusedLocals :: ([Identifier])\n _tlIerrors :: (Seq Error)\n _tlItks :: ([(Pos,String)])\n _tlIusedAttrs :: ([(Identifier,Identifier)])\n _tlIusedFields :: (Seq Identifier)\n _tlIusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 110 , column 10 )\n _lhsOtks =\n _hdItok : _tlItks\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n _hdIerrors Seq.>< _tlIerrors\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n _hdIusedAttrs ++ _tlIusedAttrs\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n _hdIusedFields Seq.>< _tlIusedFields\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n _hdIusedLocals ++ _tlIusedLocals\n -- copy rule (down)\n _hdOallfields =\n _lhsIallfields\n -- copy rule (down)\n _hdOallnts =\n _lhsIallnts\n -- copy rule (down)\n _hdOattrs =\n _lhsIattrs\n -- copy rule (down)\n _hdOcon =\n _lhsIcon\n -- copy rule (down)\n _hdOfieldnames =\n _lhsIfieldnames\n -- copy rule (down)\n _hdOnt =\n _lhsInt\n -- copy rule (down)\n _tlOallfields =\n _lhsIallfields\n -- copy rule (down)\n _tlOallnts =\n _lhsIallnts\n -- copy rule (down)\n _tlOattrs =\n _lhsIattrs\n -- copy rule (down)\n _tlOcon =\n _lhsIcon\n -- copy rule (down)\n _tlOfieldnames =\n _lhsIfieldnames\n -- copy rule (down)\n _tlOnt =\n _lhsInt\n ( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) =\n (hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt )\n ( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) =\n (tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt )\n in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsTokens_Nil :: T_HsTokens \nsem_HsTokens_Nil =\n (T_HsTokens (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtks :: ([(Pos,String)])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 111 , column 10 )\n _lhsOtks =\n []\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\n HsTokensRoot ------------------------------------------------\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n textLines : [ String ] \n : [ ( Identifier , Identifier ) ] \n usedFields : [ Identifier ] \n usedLocals : [ Identifier ] \n alternatives : \n alternative HsTokensRoot : \n child tokens : HsTokens \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n textLines : [String]\n usedAttrs : [(Identifier,Identifier)]\n usedFields : [Identifier]\n usedLocals : [Identifier]\n alternatives:\n alternative HsTokensRoot:\n child tokens : HsTokens \n-}\n-- cata\nsem_HsTokensRoot :: HsTokensRoot ->\n T_HsTokensRoot \nsem_HsTokensRoot (HsTokensRoot _tokens ) =\n (sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) )\n-- semantic domain\nnewtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n Identifier ->\n ( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier])))\ndata Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier}\ndata Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]}\nwrap_HsTokensRoot :: T_HsTokensRoot ->\n Inh_HsTokensRoot ->\n Syn_HsTokensRoot \nwrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt )\n in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsTokensRoot_HsTokensRoot :: T_HsTokens ->\n T_HsTokensRoot \nsem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) =\n (T_HsTokensRoot (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsInt ->\n (let _tokensOfieldnames :: ([Identifier])\n _lhsOusedFields :: ([Identifier])\n _lhsOtextLines :: ([String])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedLocals :: ([Identifier])\n _tokensOallfields :: ([(Identifier,Type,Bool)])\n _tokensOallnts :: ([Identifier])\n _tokensOattrs :: ([(Identifier,Identifier)])\n _tokensOcon :: Identifier\n _tokensOnt :: Identifier\n _tokensIerrors :: (Seq Error)\n _tokensItks :: ([(Pos,String)])\n _tokensIusedAttrs :: ([(Identifier,Identifier)])\n _tokensIusedFields :: (Seq Identifier)\n _tokensIusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 38 , column 18 )\n _tokensOfieldnames =\n map (\\(n,_,_) -> n) _lhsIallfields\n \" SemHsTokens.ag\"(line 100 , column 18 )\n _lhsOusedFields =\n toList _tokensIusedFields\n \" SemHsTokens.ag\"(line 107 , column 18 )\n _lhsOtextLines =\n showTokens _tokensItks\n use rule \" SemHsTokens.ag\"(line 18 , column 18 )\n _lhsOerrors =\n _tokensIerrors\n -- copy rule (up)\n _lhsOusedAttrs =\n _tokensIusedAttrs\n -- copy rule (up)\n _lhsOusedLocals =\n _tokensIusedLocals\n -- copy rule (down)\n _tokensOallfields =\n _lhsIallfields\n -- copy rule (down)\n _tokensOallnts =\n _lhsIallnts\n -- copy rule (down)\n _tokensOattrs =\n _lhsIattrs\n -- copy rule (down)\n _tokensOcon =\n _lhsIcon\n -- copy rule (down)\n _tokensOnt =\n _lhsInt\n ( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) =\n (tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt )\n in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src-derived/SemHsTokens.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"---------------------------------------------------\n cata\n semantic domain\n copy rule (from local)\n--------------------------------------------------\n cata\n semantic domain\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n----------------------------------------------\n cata\n semantic domain\n copy rule (up)\n copy rule (up)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)\n copy rule (down)"},"code":{"kind":"string","value":"\n\n UUAGC 0.9.10 ( SemHsTokens.ag )\nmodule SemHsTokens where\n\nimport qualified Data.Sequence as Seq\nimport Data.Sequence(Seq,empty,singleton,(><))\nimport Data.Foldable(toList)\nimport Pretty\n\nimport TokenDef\nimport HsToken\nimport ErrorMessages\n\n\nimport CommonTypes\nimport UU.Scanner.Position(Pos)\n\nisNTname allnts (Just (NT nt _)) = nt `elem` allnts\nisNTname allnts _ = False\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n fieldnames : [ Identifier ] \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n tok : ( Pos , String ) \n : [ ( Identifier , Identifier ) ] \n usedFields : Seq Identifier \n usedLocals : [ Identifier ] \n alternatives : \n alternative AGField : \n child field : { Identifier } \n child attr : { Identifier } \n child pos : { Pos } \n child rdesc : { Maybe String } \n visit 0 : \n local addTrace : _ \n alternative AGLocal : \n child var : { Identifier } \n child pos : { Pos } \n child rdesc : { Maybe String } \n visit 0 : \n local _ tup1 : _ \n local errors : _ \n local tok : _ \n local usedLocals : _ \n alternative CharToken : \n child value : { String } \n child pos : { Pos } \n alternative : \n child mesg : { String } \n child pos : { Pos } \n alternative : \n child value : { String } \n child pos : { Pos } \n alternative StrToken : \n child value : { String } \n child pos : { Pos } \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n fieldnames : [Identifier]\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n tok : (Pos,String)\n usedAttrs : [(Identifier,Identifier)]\n usedFields : Seq Identifier\n usedLocals : [Identifier]\n alternatives:\n alternative AGField:\n child field : {Identifier}\n child attr : {Identifier}\n child pos : {Pos}\n child rdesc : {Maybe String}\n visit 0:\n local addTrace : _\n alternative AGLocal:\n child var : {Identifier}\n child pos : {Pos}\n child rdesc : {Maybe String}\n visit 0:\n local _tup1 : _\n local errors : _\n local tok : _\n local usedLocals : _\n alternative CharToken:\n child value : {String}\n child pos : {Pos}\n alternative Err:\n child mesg : {String}\n child pos : {Pos}\n alternative HsToken:\n child value : {String}\n child pos : {Pos}\n alternative StrToken:\n child value : {String}\n child pos : {Pos}\n-}\nsem_HsToken :: HsToken ->\n T_HsToken \nsem_HsToken (AGField _field _attr _pos _rdesc ) =\n (sem_HsToken_AGField _field _attr _pos _rdesc )\nsem_HsToken (AGLocal _var _pos _rdesc ) =\n (sem_HsToken_AGLocal _var _pos _rdesc )\nsem_HsToken (CharToken _value _pos ) =\n (sem_HsToken_CharToken _value _pos )\nsem_HsToken (Err _mesg _pos ) =\n (sem_HsToken_Err _mesg _pos )\nsem_HsToken (HsToken _value _pos ) =\n (sem_HsToken_HsToken _value _pos )\nsem_HsToken (StrToken _value _pos ) =\n (sem_HsToken_StrToken _value _pos )\nnewtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n ([Identifier]) ->\n Identifier ->\n ( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))\ndata Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier}\ndata Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]}\nwrap_HsToken :: T_HsToken ->\n Inh_HsToken ->\n Syn_HsToken \nwrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )\n in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsToken_AGField :: Identifier ->\n Identifier ->\n Pos ->\n (Maybe String) ->\n T_HsToken \nsem_HsToken_AGField field_ attr_ pos_ rdesc_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOtok :: ((Pos,String))\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 74 , column 15 )\n _lhsOerrors =\n if (field_,attr_) `elem` _lhsIattrs\n then Seq.empty\n else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames))\n then Seq.singleton (UndefChild _lhsInt _lhsIcon field_)\n else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False)\n \" SemHsTokens.ag\"(line 88 , column 13 )\n _lhsOusedAttrs =\n [(field_,attr_)]\n \" SemHsTokens.ag\"(line 115 , column 8)\n _addTrace =\n case rdesc_ of\n Just d -> \\x -> \"(trace \" ++ show (d ++ \" -> \" ++ show field_ ++ \".\" ++ show attr_) ++ \" (\" ++ x ++ \"))\"\n Nothing -> id\n \" SemHsTokens.ag\"(line 118 , column 8)\n _lhsOtok =\n (pos_, _addTrace $ attrname True field_ attr_)\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_AGLocal :: Identifier ->\n Pos ->\n (Maybe String) ->\n T_HsToken \nsem_HsToken_AGLocal var_ pos_ rdesc_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOusedFields :: (Seq Identifier)\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedLocals :: ([Identifier])\n _lhsOtok :: ((Pos,String))\n \" SemHsTokens.ag\"(line 64 , column 19 )\n __tup1 =\n if var_ `elem` _lhsIfieldnames\n then if isNTname _lhsIallnts (lookup var_ (map (\\(n,t,_) -> (n,t)) _lhsIallfields))\n then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] )\n else (Seq.empty, (pos_,fieldname var_), [] )\n else if (_LOC,var_) `elem` _lhsIattrs\n then (Seq.empty , (pos_,locname var_), [var_])\n else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] )\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_errors,_,_) =\n __tup1\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_,_tok,_) =\n __tup1\n \" SemHsTokens.ag\"(line 64 , column 19 )\n (_,_,_usedLocals) =\n __tup1\n \" SemHsTokens.ag\"(line 96 , column 13 )\n _lhsOusedFields =\n if var_ `elem` _lhsIfieldnames\n then Seq.singleton var_\n else Seq.empty\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n _errors\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n _usedLocals\n _lhsOtok =\n _tok\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_CharToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_CharToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 122 , column 16 )\n _lhsOtok =\n (pos_, if null value_\n then \"\"\n else showCharShort (head value_)\n )\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_Err :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_Err mesg_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOerrors :: (Seq Error)\n _lhsOtok :: ((Pos,String))\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 50 , column 9 )\n _lhsOerrors =\n let m = text mesg_\n in Seq.singleton (CustomError False pos_ m)\n \" SemHsTokens.ag\"(line 128 , column 16 )\n _lhsOtok =\n (pos_, \"\")\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_HsToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_HsToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 120 , column 14 )\n _lhsOtok =\n (pos_, value_)\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsToken_StrToken :: String ->\n Pos ->\n T_HsToken \nsem_HsToken_StrToken value_ pos_ =\n (T_HsToken (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtok :: ((Pos,String))\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 127 , column 16 )\n _lhsOtok =\n (pos_, showStrShort value_)\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n fieldnames : [ Identifier ] \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n tks : [ ( Pos , String ) ] \n : [ ( Identifier , Identifier ) ] \n usedFields : Seq Identifier \n usedLocals : [ Identifier ] \n alternatives : \n alternative Cons : \n child hd : \n child tl : \n alternative : \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n fieldnames : [Identifier]\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n tks : [(Pos,String)]\n usedAttrs : [(Identifier,Identifier)]\n usedFields : Seq Identifier\n usedLocals : [Identifier]\n alternatives:\n alternative Cons:\n child hd : HsToken \n child tl : HsTokens \n alternative Nil:\n-}\nsem_HsTokens :: HsTokens ->\n T_HsTokens \nsem_HsTokens list =\n (Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) )\nnewtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n ([Identifier]) ->\n Identifier ->\n ( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier])))\ndata Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier}\ndata Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]}\nwrap_HsTokens :: T_HsTokens ->\n Inh_HsTokens ->\n Syn_HsTokens \nwrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt )\n in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsTokens_Cons :: T_HsToken ->\n T_HsTokens ->\n T_HsTokens \nsem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) =\n (T_HsTokens (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtks :: ([(Pos,String)])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n _hdOallfields :: ([(Identifier,Type,Bool)])\n _hdOallnts :: ([Identifier])\n _hdOattrs :: ([(Identifier,Identifier)])\n _hdOcon :: Identifier\n _hdOfieldnames :: ([Identifier])\n _hdOnt :: Identifier\n _tlOallfields :: ([(Identifier,Type,Bool)])\n _tlOallnts :: ([Identifier])\n _tlOattrs :: ([(Identifier,Identifier)])\n _tlOcon :: Identifier\n _tlOfieldnames :: ([Identifier])\n _tlOnt :: Identifier\n _hdIerrors :: (Seq Error)\n _hdItok :: ((Pos,String))\n _hdIusedAttrs :: ([(Identifier,Identifier)])\n _hdIusedFields :: (Seq Identifier)\n _hdIusedLocals :: ([Identifier])\n _tlIerrors :: (Seq Error)\n _tlItks :: ([(Pos,String)])\n _tlIusedAttrs :: ([(Identifier,Identifier)])\n _tlIusedFields :: (Seq Identifier)\n _tlIusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 110 , column 10 )\n _lhsOtks =\n _hdItok : _tlItks\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n _hdIerrors Seq.>< _tlIerrors\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n _hdIusedAttrs ++ _tlIusedAttrs\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n _hdIusedFields Seq.>< _tlIusedFields\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n _hdIusedLocals ++ _tlIusedLocals\n _hdOallfields =\n _lhsIallfields\n _hdOallnts =\n _lhsIallnts\n _hdOattrs =\n _lhsIattrs\n _hdOcon =\n _lhsIcon\n _hdOfieldnames =\n _lhsIfieldnames\n _hdOnt =\n _lhsInt\n _tlOallfields =\n _lhsIallfields\n _tlOallnts =\n _lhsIallnts\n _tlOattrs =\n _lhsIattrs\n _tlOcon =\n _lhsIcon\n _tlOfieldnames =\n _lhsIfieldnames\n _tlOnt =\n _lhsInt\n ( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) =\n (hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt )\n ( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) =\n (tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt )\n in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\nsem_HsTokens_Nil :: T_HsTokens \nsem_HsTokens_Nil =\n (T_HsTokens (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsIfieldnames\n _lhsInt ->\n (let _lhsOtks :: ([(Pos,String)])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedFields :: (Seq Identifier)\n _lhsOusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 111 , column 10 )\n _lhsOtks =\n []\n use rule \" SemHsTokens.ag\"(line 43 , column 37 )\n _lhsOerrors =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 85 , column 40 )\n _lhsOusedAttrs =\n []\n use rule \" SemHsTokens.ag\"(line 93 , column 40 )\n _lhsOusedFields =\n Seq.empty\n use rule \" SemHsTokens.ag\"(line 84 , column 40 )\n _lhsOusedLocals =\n []\n in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )\n\n visit 0 : \n inherited attributes : \n allfields : [ ( Identifier , Type , ) ] \n : [ Identifier ] \n attrs : [ ( Identifier , Identifier ) ] \n con : Identifier \n nt : Identifier \n synthesized attributes : \n errors : Seq Error \n textLines : [ String ] \n : [ ( Identifier , Identifier ) ] \n usedFields : [ Identifier ] \n usedLocals : [ Identifier ] \n alternatives : \n alternative HsTokensRoot : \n child tokens : HsTokens \n\n visit 0:\n inherited attributes:\n allfields : [(Identifier,Type,Bool)]\n allnts : [Identifier]\n attrs : [(Identifier,Identifier)]\n con : Identifier\n nt : Identifier\n synthesized attributes:\n errors : Seq Error\n textLines : [String]\n usedAttrs : [(Identifier,Identifier)]\n usedFields : [Identifier]\n usedLocals : [Identifier]\n alternatives:\n alternative HsTokensRoot:\n child tokens : HsTokens \n-}\nsem_HsTokensRoot :: HsTokensRoot ->\n T_HsTokensRoot \nsem_HsTokensRoot (HsTokensRoot _tokens ) =\n (sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) )\nnewtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) ->\n ([Identifier]) ->\n ([(Identifier,Identifier)]) ->\n Identifier ->\n Identifier ->\n ( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier])))\ndata Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier}\ndata Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]}\nwrap_HsTokensRoot :: T_HsTokensRoot ->\n Inh_HsTokensRoot ->\n Syn_HsTokensRoot \nwrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) =\n (let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) =\n (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt )\n in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals ))\nsem_HsTokensRoot_HsTokensRoot :: T_HsTokens ->\n T_HsTokensRoot \nsem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) =\n (T_HsTokensRoot (\\ _lhsIallfields\n _lhsIallnts\n _lhsIattrs\n _lhsIcon\n _lhsInt ->\n (let _tokensOfieldnames :: ([Identifier])\n _lhsOusedFields :: ([Identifier])\n _lhsOtextLines :: ([String])\n _lhsOerrors :: (Seq Error)\n _lhsOusedAttrs :: ([(Identifier,Identifier)])\n _lhsOusedLocals :: ([Identifier])\n _tokensOallfields :: ([(Identifier,Type,Bool)])\n _tokensOallnts :: ([Identifier])\n _tokensOattrs :: ([(Identifier,Identifier)])\n _tokensOcon :: Identifier\n _tokensOnt :: Identifier\n _tokensIerrors :: (Seq Error)\n _tokensItks :: ([(Pos,String)])\n _tokensIusedAttrs :: ([(Identifier,Identifier)])\n _tokensIusedFields :: (Seq Identifier)\n _tokensIusedLocals :: ([Identifier])\n \" SemHsTokens.ag\"(line 38 , column 18 )\n _tokensOfieldnames =\n map (\\(n,_,_) -> n) _lhsIallfields\n \" SemHsTokens.ag\"(line 100 , column 18 )\n _lhsOusedFields =\n toList _tokensIusedFields\n \" SemHsTokens.ag\"(line 107 , column 18 )\n _lhsOtextLines =\n showTokens _tokensItks\n use rule \" SemHsTokens.ag\"(line 18 , column 18 )\n _lhsOerrors =\n _tokensIerrors\n _lhsOusedAttrs =\n _tokensIusedAttrs\n _lhsOusedLocals =\n _tokensIusedLocals\n _tokensOallfields =\n _lhsIallfields\n _tokensOallnts =\n _lhsIallnts\n _tokensOattrs =\n _lhsIattrs\n _tokensOcon =\n _lhsIcon\n _tokensOnt =\n _lhsInt\n ( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) =\n (tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt )\n in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )"}}},{"rowIdx":610249,"cells":{"_id":{"kind":"string","value":"9f01966b071397927bcb7d951499d0e2382d81176ce11c2fd6eaa3a32be64223"},"repository":{"kind":"string","value":"alex-gutev/tridash"},"name":{"kind":"string","value":"macros.lisp"},"content":{"kind":"string","value":" macros.lisp\n;;;;\n;;;; Tridash Programming Language.\n Copyright ( C ) 2019 - 2021 \n;;;;\n;;;; This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation , either version 3 of the License , or\n;;;; (at your option) any later version.\n;;;;\n;;;; This program is distributed in the hope that it will be useful,\n;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of\n;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n;;;; GNU General Public License for more details.\n;;;;\n You should have received a copy of the GNU General Public License\n;;;; along with this program. If not, see .\n\n;;;; User-Defined Macro Tests\n\n(defpackage :tridash/test.macros\n (:use :generic-cl\n :alexandria\n :anaphora\n :arrows\n :iterate\n :optima\n :named-readtables\n\n :tridash.parser\n :tridash.frontend\n\n :fiveam\n :tridash/test\n :tridash/test.util)\n\n (:shadowing-import-from :generic-cl\n :emptyp\n :multiply\n :accumulate)\n\n (:shadowing-import-from :fiveam :fail)\n\n (:import-from :lol\n :defmacro!\n :lol-syntax)\n\n (:import-from :tridash.frontend\n :tridash->cl-function\n :call-meta-node\n :call-tridash-meta-node\n :call-node\n\n :thunk\n :resolve\n :resolve%\n :tridash-fail\n\n :fail-thunk\n :+empty-list+\n :group-rest-args\n\n :check-arity\n :correct-arity?%\n :fail-arity-error\n\n :+optional-argument+\n :+rest-argument+))\n\n(in-package :tridash/test.macros)\n\n(in-readtable lol-syntax)\n\n\f\n;;; Test Suite Definition\n\n(def-suite macros\n :description \"Test user-defined Tridash macros.\"\n :in frontend)\n\n(in-suite macros)\n\n\f\n Utilities\n\n(defun functor (operator &rest arguments)\n \"Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments\n ARGUMENTS.\"\n\n (functor-expression operator arguments))\n\n(defun expression= (expected got)\n \"Checks that the CL expression GOT is equal to EXPECTED. Symbols in\n EXPECTED, beginning with $, are replaced with the symbol in GOT\n corresponding to the first occurrence.\"\n\n (let ((aliases (make-hash-map)))\n (flet ((equal? (got expected)\n (match* (got expected)\n (((type symbol) (type symbol))\n (= got\n (cond\n ((starts-with #\\$ (symbol-name expected))\n (ensure-get expected aliases got))\n\n ((starts-with #\\! (symbol-name expected))\n (id-symbol (subseq (symbol-name expected) 1)))\n\n (t\n expected))))\n\n ((_ _)\n (= got expected)))))\n\n (tree-equal got expected :test #'equal?))))\n\n(defmacro with-external-meta-nodes ((&rest names) &body body)\n \"Creates `EXTERNAL-META-NODE's with names NAMES and binds to\n variables with the same identifiers as the names upcased.\"\n\n `(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names)\n ,@body))\n\n(defmacro! with-core-nodes ((&rest names) &body body)\n \"Builds the core module and binds the node with names NAMES to\n variables with the same identifiers as the names, upcased.\"\n\n `(with-module-table ,g!modules\n (build-core-module)\n\n (with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules\n ,@body)))\n\n(defmacro mock-meta-node ((&rest operands) expression)\n \"Creates a `META-NODE' which takes operands OPERANDS and has a value\n function consisting of EXPRESSION. OPERANDS is a list of symbols\n naming the dependency nodes. EXPRESSION is evaluated in an\n environment where each symbol in OPERANDS is bound to the\n `NODE-LINK' object corresponding to the operand, and the symbol\n SELF is bound to the `META-NODE' object.\"\n\n (flet ((make-operand (operand)\n (match operand\n ((or (list 'optional symb value)\n (list 'optional symb))\n (list +optional-argument+ (make-instance 'node :name symb) value))\n\n ((list 'rest symb)\n (list +rest-argument+ (make-instance 'node :name symb)))\n\n (_ (make-instance 'node :name operand))))\n\n (operand-node (operand)\n (match operand\n ((list* 'optional symb _)\n symb)\n\n ((list 'rest symb)\n symb)\n\n (_ operand))))\n\n `(let ((self (make-instance 'final-meta-node\n :name 'test-meta-node\n :operands ',(map #'make-operand operands)))\n ,@(map #`(,a1 (node-link (make-instance 'node :name ',a1)))\n (map #'operand-node operands)))\n\n ;; Create an empty `FLAT-NODE-TABLE' to mark meta-node as\n ;; already built\n (setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set)))\n (setf (value-function (context self nil))\n ,expression)\n\n ,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1)\n (map #'operand-node operands))\n ,@(map #`(setf (get ',a1 (dependencies self)) ,a1)\n (map #'operand-node operands))\n\n self)))\n\n(defmacro test-compile-meta-node ((&rest operands) expression args body)\n \"Creates and compiles a `META-NODE' to a CL LAMBDA expression and\n checks that it has arguments ARGS and body BODY, by EXPRESSION=.\n\n OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION\n arguments of MOCK-META-NODE.\n\n ARGS (not evaluated) is the expected lambda-list of the function.\n\n BODY is the expected body expression within the BLOCK, TAGBODY,\n RETURN expression. The symbol $recur, occurring in BODY is\n substituted with the TAGBODY tag for tail-recursive self\n calls. BODY is evaluated in an environment in which the symbol SELF\n is bound to the `META-NODE' object.\"\n\n (flet ((lambda-args (lambda-list)\n (->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list)\n (map #'ensure-car)\n (map (compose #'gensym #'symbol-name)))))\n\n `(let ((self (mock-meta-node ,operands ,expression)))\n (is (expression=\n `(lambda ,',args\n (declare (ignorable ,@',(lambda-args args)))\n ,,body)\n\n (tridash->cl-function self))))))\n\n\f\n Tridash to CL Compilation Tests\n\n(test compile-functor-expression\n \"Test compilation of functor expressions to CL.\"\n\n (with-core-nodes (\"if\" \"<\" \"-\")\n (test-compile-meta-node\n\n (a b)\n (functor if (functor < a b) (functor - b a) (functor - a b))\n\n ($a $b)\n '(let nil\n (!|if| (!< $a $b)\n (thunk (!- $b $a))\n (thunk (!- $a $b)))))))\n\n(test compile-if-expression\n \"Test compilation of if expressions to CL.\"\n\n (with-core-nodes (\"<\" \"-\")\n (test-compile-meta-node\n\n (a b)\n (if-expression (functor < a b) (functor - b a) (functor - a b))\n\n ($a $b)\n '(let nil\n (!|if| (!< $a $b)\n (thunk (!- $b $a))\n (thunk (!- $a $b)))))))\n\n(test compile-object-expression\n \"Test compilation of object expressions to CL.\"\n\n (with-core-nodes (\"+\" \"-\")\n (test-compile-meta-node\n\n (x y)\n (object-expression\n `((sum ,(functor + x y))\n (diff ,(functor - x y))))\n\n ($x $y)\n '(let nil\n (alist-hash-map\n (list\n (cons 'sum (thunk (!+ $x $y)))\n (cons 'diff (thunk (!- $x $y)))))))))\n\n(test compile-member-expression\n \"Test compilation of member expressions to CL.\"\n\n (test-compile-meta-node\n\n (object)\n (member-expression\n (member-expression object 'key1) 'key2)\n\n ($obj)\n '(let nil\n (!|member| (!|member| $obj 'key1) 'key2))))\n\n(test compile-catch-expression\n \"Test compilation of catch expressions to CL.\"\n\n (with-core-nodes (\"/\" \"*\")\n (test-compile-meta-node\n\n (a b)\n (catch-expression\n (functor / a b)\n (functor * a b))\n\n ($a $b)\n\n '(let nil\n (!|catch| (!/ $a $b) (thunk (!* $a $b)))))))\n\n(test compile-fail-expression\n \"Test compilation of fail expressions to CL.\"\n\n (test-compile-meta-node\n ()\n (fail-expression)\n\n ()\n '(let nil\n (!|fail|))))\n\n(test compile-expression-block\n \"Test compilation of expression blocks, with reference count = 1, to CL.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n\n (a)\n (expression-block\n (functor + a 1))\n\n ($a)\n '(let nil\n (!+ $a 1)))))\n\n(test compile-expression-block-muliple-references\n \"Test compilation of expression blocks, with reference count > 1, to CL.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n\n (a)\n (let ((block (expression-block (functor + a 1) :count 2)))\n (functor + block block))\n\n ($a)\n '(let ($a+1)\n (setf $a+1 (thunk (!+ $a 1)))\n\n (!+ $a+1 $a+1)))))\n\n(test compile-meta-node-call\n \"Test compilation of calls to other meta-nodes, to CL.\"\n\n (with-core-nodes (\"-\")\n (let ((meta-node (mock-meta-node (a) a)))\n (test-compile-meta-node\n\n (a)\n (functor meta-node (functor - a))\n\n ($a)\n `(let nil\n (call-tridash-meta-node ,meta-node (list (!- $a))))))))\n\n(test compile-higher-order-external-meta-node\n \"Test compilation of higher order external meta-node.\"\n\n (with-core-nodes (\"not\")\n (let ((apply (mock-meta-node (f x) (functor f x))))\n (test-compile-meta-node\n\n (x)\n (functor apply (meta-node-ref not) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 1) (length $args))\n (apply #'!|not| $args)\n (fail-arity-error)))\n $x)))))))\n\n(test compile-higher-order-if-meta-node\n \"Test compilation of higher order if meta-node.\"\n\n (with-core-nodes (\"if\")\n (let ((apply (mock-meta-node (f x y z) (functor f x y z))))\n (test-compile-meta-node\n\n (x y z)\n (functor apply (meta-node-ref if) x y z)\n\n ($x $y $z)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 3) (length $args))\n (apply #'!|if| $args)\n (fail-arity-error)))\n $x $y $z)))))))\n\n(test compile-higher-order-and-meta-node\n \"Test compilation of higher order `and` meta-node.\"\n\n (with-core-nodes (\"and\")\n (let ((apply (mock-meta-node (f x y) (functor f x y))))\n (test-compile-meta-node\n\n (x y)\n (functor apply (meta-node-ref and) x y)\n\n ($x $y)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 2) (length $args))\n (apply #'!|and| $args)\n (fail-arity-error)))\n $x $y)))))))\n\n(test compile-higher-order-or-meta-node\n \"Test compilation of higher order `or` meta-node.\"\n\n (with-core-nodes (\"or\")\n (let ((apply (mock-meta-node (f x y) (functor f x y))))\n (test-compile-meta-node\n\n (x y)\n (functor apply (meta-node-ref or) x y)\n\n ($x $y)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 2) (length $args))\n (apply #'!|or| $args)\n (fail-arity-error)))\n $x $y)))))))\n\n(test compile-higher-order-meta-node\n \"Test compilation of higher-order user defined meta-node.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x) x)))\n\n (test-compile-meta-node\n\n (x)\n (functor apply (meta-node-ref f) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 1) (length $args))\n (destructuring-bind ($x2) $args\n (call-tridash-meta-node ,f (list $x2)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-higher-order-meta-node-optional-arguments\n \"Test compilation of higher-order meta-node with optional arguments.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x (optional y) (optional z)) x)))\n\n (test-compile-meta-node\n (x)\n (functor apply (meta-node-ref f :optional (list 1 2)) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 3) (length $args))\n (destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args\n (call-tridash-meta-node ,f (list $x2 $y $z)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-higher-order-meta-node-rest-arguments\n \"Test compilation of higher-order meta-node with rest arguments.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x y (rest xs)) xs)))\n\n (test-compile-meta-node\n (x)\n (functor apply (meta-node-ref f) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2) (length $args))\n (destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+)))\n $args\n (call-tridash-meta-node ,f (list $x2 $y $rest)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-invoke-higher-order-node\n \"Test compilation of invoking value nodes.\"\n\n (test-compile-meta-node\n\n (f x y)\n (functor f x y)\n\n ($f $x $y)\n `(let nil\n (call-node $f (list $x $y)))))\n\n(test compile-literals\n \"Test compilation of literal values.\"\n\n (with-core-nodes (\"and\")\n (test-compile-meta-node\n\n ()\n (functor and \"hello\" (functor and 1 (functor and 2.3 'symbol)))\n\n ()\n '(let nil\n (!|and| \"hello\"\n (thunk\n (!|and| 1\n (thunk\n (!|and| 2.3 'symbol)))))))))\n\n(test compile-core-arithmetic\n \"Test compilation of core arithmetic meta-nodes.\"\n\n (with-core-nodes (\"/\" \"*\" \"+\" \"-\")\n (test-compile-meta-node\n\n (a b c d)\n (functor\n /\n (functor * (functor + a b) (functor - c d))\n (functor - d))\n\n ($a $b $c $d)\n '(let nil\n (!/\n (!* (!+ $a $b) (!- $c $d))\n (!- $d))))))\n\n(test compile-core-comparison-and-logical\n \"Test compilation of core comparison and logical meta-nodes.\"\n\n (with-core-nodes (\"not\" \"or\" \"and\" \"=\" \"!=\" \"<\" \"<=\" \">\" \">=\")\n (test-compile-meta-node\n\n (x y)\n (functor\n not\n\n (functor\n or\n (functor and (functor < x y) (functor = y x))\n\n (functor\n or\n (functor <= x 10)\n\n (functor\n or\n (functor > 1 y)\n\n (functor\n or\n (functor >= 8 y)\n (functor != x y))))))\n\n ($x $y)\n '(let nil\n (!|not|\n (!|or|\n (!|and| (!< $x $y) (thunk (!= $y $x)))\n (thunk\n (!|or|\n (!<= $x 10)\n\n (thunk\n (!|or|\n (!> 1 $y)\n\n (thunk\n (!|or|\n (!>= 8 $y)\n (thunk (!!= $x $y))))))))))))))\n\n(test compile-core-type-checks\n \"Test compilation of core type checking meta-nodes.\"\n\n (with-core-nodes (\"or\" \"int?\" \"real?\" \"string?\")\n (test-compile-meta-node\n\n (x y z)\n (functor\n or\n (functor int? x)\n\n (functor\n or\n (functor real? y)\n (functor string? z)))\n\n ($x $y $z)\n '(let nil\n (!|or|\n (!|int?| $x)\n\n (thunk\n (!|or|\n (!|real?| $y)\n (thunk (!|string?| $z)))))))))\n\n(test compile-tail-recursive-if\n \"Test compilation of if expression in recursive tail position.\"\n\n (with-core-nodes (\"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (if-expression (functor < n 2)\n acc\n (functor self (functor - n 1) (functor * n acc)))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-if-functor\n \"Test compilation of if functor in recursive tail position.\"\n\n (with-core-nodes (\"if\" \"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (functor if\n (functor < n 2)\n acc\n (functor self (functor - n 1) (functor * n acc)))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-expression-block\n \"Test compilation of expression blocks in recursive tail position.\"\n\n (with-core-nodes (\"if\" \"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (functor if\n (functor < n 2)\n acc\n (expression-block\n (functor self (functor - n 1) (functor * n acc))))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-or-functor\n \"Test compilation of `or` functor in recursive tail position.\"\n\n (with-core-nodes (\"or\" \"=\" \"!=\" \"-\")\n (test-compile-meta-node\n\n (n)\n (functor or (functor = n 0) (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|or| (!= $n 0)\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-tail-recursive-and-functor\n \"Test compilation of `and` functor in recursive tail position.\"\n\n (with-core-nodes (\"and\" \"=\" \"!=\" \"-\")\n (test-compile-meta-node\n\n (n)\n (functor and (functor = n 0) (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|and| (!= $n 0)\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-tail-recursive-catch-expression\n \"Test compilation of catch expressions in recursive tail position.\"\n\n (with-core-nodes (\"-\" \"+\")\n (test-compile-meta-node\n\n (n)\n (catch-expression (functor self (functor + n 1))\n (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|catch| (call-tridash-meta-node ,self (list (!+ $n 1)))\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-meta-node-optional-arguments\n \"Test compilation of meta-node with optional arguments.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n (n (optional d 1))\n (functor + n d)\n\n ($n &optional ($d 1))\n '(let nil\n (!|+| $n $d)))))\n\n(test compile-meta-node-multiple-optional-arguments\n \"Test compilation of meta-node with multiple optional arguments.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n (n (optional d 1) (optional e 2))\n (functor + n (functor + d e))\n\n ($n &optional ($d 1) ($e 2))\n '(let nil\n (!|+| $n (!|+| $d $e))))))\n\n(test compile-meta-node-rest-argument\n \"Test compilation of meta-node with rest argument.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (x (rest xs))\n (functor cons x xs)\n\n ($x &optional ($xs +empty-list+))\n '(let nil\n (!|cons| $x $xs)))))\n\n(test compile-meta-node-optional-and-rest-arguments\n \"Test compilation of meta-node with optional and rest arguments.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (x (optional y 2) (rest xs))\n (functor cons x (functor cons y xs))\n\n ($x &optional ($y 2) ($xs +empty-list+))\n '(let nil\n (!|cons| $x (thunk (!|cons| $y $xs)))))))\n\n(test compile-cyclic-references\n \"Test compilation of cyclic references.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (a b)\n (aprog1 (expression-block nil :count 2)\n (setf (expression-block-expression it)\n (functor cons a (functor cons b (cyclic-reference it)))))\n\n ($a $b)\n '(let ($block)\n (setf $block\n (thunk (!|cons| $a (thunk (!|cons| $b $block)))))\n $block))))\n\n(test compile-error-usupported-external-meta-node\n \"Test that compiling an unsupported external-meta-node results in an error.\"\n\n (with-external-meta-nodes (\"not-a-function\")\n (signals\n unsupported-meta-node-error\n\n (tridash->cl-function\n (mock-meta-node\n (arg)\n (functor not-a-function arg))))))\n\n\f\n Test Calling Tridash Meta - Nodes from CL\n\n(test call-meta-node-single-expression\n \"Test calling a single expression meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"min(x,y) : case{x < y : x; y}\")\n\n (with-nodes ((min \"min\")) modules\n (is (= 2 (call-meta-node min '(2 10))))\n (is (= 2 (call-meta-node min '(10 2))))\n (is (= -5.3 (call-meta-node min '(-5.3 7.6))))\n (is (= 1 (call-meta-node min '(1 1)))))))\n\n(test call-meta-node-with-if-expression\n \"Test calling a meta-node with if expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : if(cond, x, 0)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 10 (call-meta-node f '(t 10))))\n (is (= 0 (call-meta-node f '(nil 5))))\n\n (signals tridash-fail (call-meta-node f '(1 5))))))\n\n(test call-meta-node-with-and-expression\n \"Test calling a meta-node with `and` expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : cond and x\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(t t)))\n (is (= nil (call-meta-node f '(nil t))))\n (is (= nil (call-meta-node f '(t nil))))\n (is (= nil (call-meta-node f '(nil nil)))))))\n\n(test call-meta-node-with-or-expression\n \"Test calling a meta-node with `or` expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : cond or x\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(t t)))\n (is-true (call-meta-node f '(nil t)))\n (is-true (call-meta-node f '(t nil)))\n (is (= nil (call-meta-node f '(nil nil)))))))\n\n(test call-meta-node-catch-fail-expression\n \"Test calling a meta-node with multiple nodes and CATCH-FAIL expressions.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }\")\n\n (with-nodes ((min \"min\")) modules\n (is (= 2 (resolve (call-meta-node min '(2 10)))))\n (is (= 2 (resolve (call-meta-node min '(10 2)))))\n (is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6)))))\n (is (= 1 (resolve (call-meta-node min '(1 1))))))))\n\n(test call-meta-node-recursive\n \"Test calling a recursive meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }\")\n\n (with-nodes ((fact \"fact\")) modules\n (is (= 6 (call-meta-node fact '(3))))\n (is (= 120 (call-meta-node fact '(5))))\n (is (= 1 (call-meta-node fact '(0)))))))\n\n(test call-meta-node-tail-recursive\n \"Test calling a tail-recursive meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }\")\n\n (with-nodes ((fact \"fact\")) modules\n (is (= 6 (call-meta-node fact '(3))))\n (is (= 120 (call-meta-node fact '(5))))\n (is (= 1 (call-meta-node fact '(0)))))))\n\n(test call-meta-node-with-meta-node-call\n \"Test calling a meta-node which calls other meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"1-(n) : n - 1\"\n \"1+(n) : n + 1\"\n \"f(a, b) : 1-(a) * 1+(b)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 0 (call-meta-node f '(1 5))))\n (is (= 45 (call-meta-node f '(10 4))))\n (is (= 33 (call-meta-node f '(4 10)))))))\n\n(test call-meta-node-nested-meta-nodes\n \"Test calling a meta-node with nested meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 0 (call-meta-node f '(1 2 3))))\n (is (= 2 (call-meta-node f '(2 3 7)))))))\n\n(test call-meta-node-optional-arguments-no-default\n \"Test calling a meta-node with optional arguments without default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, fail-type?)\"\n \"inc(n, :(d)) : n + d\"\n\n \"f(x) : inc(x)\"\n \"g(x) : inc(x, 2)\"\n\n \"h(x) : fail-type?(inc(x), &(No-Value%))\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (signals tridash-fail (call-meta-node f (list 3)))\n (is (= 7 (call-meta-node g (list 5))))\n\n (is-true (call-meta-node h (list 2))))))\n\n(test call-meta-node-optional-arguments-with-default\n \"Test calling a meta-node with optional arguments without default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"inc(n, d : 1) : n + d\"\n\n \"f(x) : inc(x)\"\n \"g(x) : inc(x, 2)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= 4 (call-meta-node f (list 3))))\n (is (= 7 (call-meta-node g (list 5)))))))\n\n(test call-meta-node-keyword-arguments\n \"Test calling a meta-node with keyword arguments\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"add(a, b, c : 3, d : 4) : a + b + c + d\"\n \"f(x, y) : add(x, d : 10, b : y)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 16 (call-meta-node f '(1 2))))\n (is (= 45 (call-meta-node f '(15 17)))))))\n\n(test call-meta-node-rest-argument\n \"Test calling a meta-node with rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, and, =, Empty)\"\n \"check(..(xs)) : xs = Empty\"\n\n \"f(x) : x and check()\"\n \"g(x) : check(x)\"\n \"h(x) : check(x, 1, 2, 3)\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (is-true (call-meta-node f '(t)))\n (is (= nil (call-meta-node g '(2))))\n (is (= nil (call-meta-node h '(2)))))))\n\n(test call-higher-order-meta-node\n \"Test calling meta-node with higher order meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, not)\"\n \"apply(f, x) : f(x)\"\n \"1+(n) : n + 1\"\n\n \"f(a) : apply(..(not), a)\"\n \"g(a) : apply(..(1+), a)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= t (call-meta-node f '(nil))))\n (is (= nil (call-meta-node f '(t))))\n\n (is (= 2 (call-meta-node g '(1))))\n (is (= 4 (call-meta-node g '(3)))))))\n\n(test call-higher-order-meta-node-optional-arguments\n \"Test calling meta-node with higher-order meta-node with optional arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, fail-type?)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n \"1+(n, :(d)) : n + d\"\n\n \"f(a) : apply(1+, a)\"\n \"g(a, b) : apply2(1+, a, b)\"\n \"h(x) : fail-type?(apply(1+, x), &(No-Value%))\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (signals tridash-fail (call-meta-node f '(0)))\n\n (is (= 3 (call-meta-node g '(1 2))))\n (is (= 8 (call-meta-node g '(5 3))))\n\n (is-true (call-meta-node h '(1))))))\n\n(test call-higher-order-meta-node-optional-argument-with-default\n \"Test calling meta-node with higher order meta-node with optional argument default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n \"1+(n, d : 1) : n + d\"\n\n \"f(a) : apply(1+, a)\"\n \"g(a, b) : apply2(1+, a, b)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= 1 (call-meta-node f '(0))))\n (is (= 2 (call-meta-node f '(1))))\n\n (is (= 3 (call-meta-node g '(1 2))))\n (is (= 8 (call-meta-node g '(5 3)))))))\n\n(test call-higher-order-meta-node-rest-argument\n \"Test calling meta-node with higher order meta-node with rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, cons)\"\n \"apply3(f, x, y, z) : f(x, y, z)\"\n \"apply(f, x) : f(x)\"\n \"l(x, ..(xs)) : cons(x + 1, xs)\"\n\n \"f(a, b, c) : apply3(l, a, b, c)\"\n \"g(x) : apply(l, x)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= '(2 3 4) (call-meta-node f '(1 3 4))))\n (is (= '(2) (call-meta-node g '(1)))))))\n\n(test call-higher-order-meta-node-rest-argument-empty\n \"Test calling meta-node with higher order meta-node with empty rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, Empty, =)\"\n\n \"apply(f, x) : f(x)\"\n \"l(x, ..(xs)) : xs = Empty\"\n\n \"f(a) : apply(l, a)\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (bool-value (call-meta-node f '(1)))))))\n\n(test call-higher-order-meta-node-optional-arguments-outer-nodes\n \"Test calling higher order meta-node with optional arguments and outer node references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n\t \"apply(f, x) : f(x)\"\n\t \"test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }\")\n\n (with-nodes ((test \"test\")) modules\n (is (= 6 (call-meta-node test '(2 3)))))))\n\n(test call-higher-order-external-meta-node\n \"Test calling meta-node with higher-order external meta-node.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, -)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n\n \"f(a) : apply(-, a)\"\n \"g(a, b) : apply2(-, a, b)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= -1 (call-meta-node f '(1))))\n (is (= -2 (call-meta-node f '(2))))\n\n (is (= 1 (call-meta-node g '(3 2))))\n (is (= 2 (call-meta-node g '(5 3)))))))\n\n(test call-higher-order-meta-node-error\n \"Test error when calling a non-meta-node.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"apply(f, x) : f(x)\"\n\n \"x+(n) : n + ..(x)\"\n \"x\"\n\n \"f(a) : apply(..(x+), a)\")\n\n (with-nodes ((f \"f\")) modules\n (signals semantic-error (call-meta-node f '(1))))))\n\n(test call-primitive-function-subtract-and-negate\n \"Test calling `-` meta-node with 2 arguments and 1 argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, -)\"\n \"sub(a, b) : a - b\"\n \"neg(x) : -(x)\")\n\n (with-nodes ((sub \"sub\") (neg \"neg\")) modules\n (is (= 3 (call-meta-node sub '(5 2))))\n (is (= -5 (call-meta-node neg '(5)))))))\n\n(test call-meta-node-object-expressions\n \"Test calling meta-node with object expressions.\"\n\n (with-module-table modules\n (build \"Person(first, last) : { first -> self.first; last -> self.last }\"\n \"get-first(p) : p.first\"\n \"get-last(p) : p.last\")\n\n (with-nodes ((person \"Person\") (get-first \"get-first\") (get-last \"get-last\"))\n modules\n\n (let ((p (call-meta-node person '(\"John\" \"Doe\"))))\n (is (= \"John\" (call-meta-node get-first (list p))))\n (is (= \"Doe\" (call-meta-node get-last (list p))))))))\n\n(test call-meta-node-catch-fail-operand\n \"Test catching failures in functor operand.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((fails \"fails\")) modules\n (is-false (bool-value (call-meta-node fails '(1))))\n\n (is-true\n (->> (thunk (error 'tridash-fail))\n list\n (call-meta-node fails))))))\n\n(test call-meta-node-catch-fail-operator\n \"Test catching failures in functor operator.\"\n\n ;; Test that failures in the operator of a functor are caught.\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=, >, -)\"\n \"neg(x) : -(x)\"\n\n \"getf(f, x) : { x > 0 -> (f -> self) }\"\n \"test(x) : fails((getf(neg, x))(x))\"\n\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((test \"test\")) modules\n (is (= nil (call-meta-node test '(1))))\n (is-true (call-meta-node test '(-1))))))\n\n(test call-meta-node-fail-types\n \"Test failure types.\"\n\n (with-module-table modules\n (build-core-module modules)\n (build-source-file \"./test/inputs/macros/failure-types.trd\" modules)\n\n (with-nodes ((check-range \"check-range\")) modules\n (is (= \"\" (call-meta-node check-range '(2 1 3))))\n (is (= \"Error: below minimum!\" (call-meta-node check-range '(0 1 3))))\n (is (= \"Error: above maximum!\" (call-meta-node check-range '(10 2 7)))))))\n\n(test call-meta-node-expression-block\n \"Test calling meta-node with one expression-block.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"f(x) : (x + 1) + (x + 1)\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(1)) 4)\n (is-true (call-meta-node f '(2)) 6))))\n\n(test call-meta-node-expression-block-multiple-references\n \"Test calling meta-node with expression-block with multiple references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, *, +, -)\"\n \"f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(3 7)) -65)\n (is-true (call-meta-node f '(5 2)) 20))))\n\n(test call-meta-node-cyclic-references\n \"Test calling a meta-node with cyclic references.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file \"./test/inputs/macros/cyclic-references.trd\" modules)\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(1 2)) '(1 2 1 2 1)))))\n\n(test call-meta-node-type-error-arithmetic-functions\n \"Test type errors in arithmetic functions.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, !=)\"\n \"1+(x) : fails(x + 1)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((1+ \"1+\")) modules\n (is (= nil (call-meta-node 1+ '(1))))\n (is-true (call-meta-node 1+ '(\"hello\"))))))\n\n(test call-meta-node-type-error-objects\n \"Test type errors in objects.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=)\"\n \"test(x) : fails(x.key)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((test \"test\")) modules\n (is-true (bool-value (call-meta-node test '(1))))\n\n (is-true (bool-value (call-meta-node test (list (make-hash-map)))))\n\n (is (= nil\n (->> (list (cons (id-symbol \"key\") 1))\n alist-hash-map\n list\n (call-meta-node test)\n bool-value))))))\n\n\f\n;;; Test Actual Macros\n\n(test macro-compile-time-computation\n \"Test macro which performs computation at compile-time.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : x * x\"\n \"/attribute(square, macro, 1)\"\n\n \"a * square(3) -> b\")\n\n (test-not-nodes modules\n '((\"/in\" \"core\" \"*\") \"a\" (\"square\" 3))\n '(\"square\" 3))\n\n (with-nodes ((a \"a\") (a*9 ((\"/in\" \"core\" \"*\") \"a\" 9))\n (b \"b\")\n (* \"*\"))\n modules\n\n (has-value-function (a) a*9 `(,* ,a 9))\n (test-simple-binding a*9 b))))\n\n(test macro-quoted-expression\n \"Test macro which returns quoted expression.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : list(/quote(*), x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-meta-node-reference\n \"Test macro which returns expression with meta-node references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : list(& *, x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-with-macros\n \"Test expansion of macros in macro meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, ->, list, *)\"\n\n \"'(x) : list(/quote(/quote), x)\"\n \"/attribute(', macro, 1)\"\n\n \"square(x) : list('(*), x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-multiple-arguments\n \"Test macros with multiple arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list, ->, if)\"\n\n \"'(x) : list(/quote(/quote), x)\"\n \"/attribute(', macro, 1)\"\n\n \"!-(a, b) : list('(if), a, b)\"\n \"/attribute(!-, macro, 1)\"\n \"/operator(!-, 25, left)\"\n\n \"a !- b -> out\")\n\n (test-not-nodes modules '(\"!-\" \"a\" \"b\"))\n\n (with-nodes ((a \"a\") (b \"b\") (out \"out\")\n (a!-b ((\"/in\" \"builtin\" \"if\") \"a\" \"b\"))\n (if \"if\"))\n modules\n\n (has-value-function (a b) a!-b `(,if ,a ,b :none))\n (test-simple-binding a!-b out))))\n\n(test macro-keyword-arguments\n \"Test passing macro arguments by keyword\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list)\"\n\n \"f(x) : x\"\n\n \"call(operator, operand) : list(operator, operand)\"\n \"/attribute(call, macro, True)\"\n\n \"call(operand : in1, operator : f) -> out1\")\n\n (test-not-nodes modules '(\"call\" (\":\" \"operand\" \"in1\") (\":\" \"operator\" \"f\")))\n\n (with-nodes ((in1 \"in1\") (out1 \"out1\")\n (f \"f\")\n (f-in1 (\"f\" \"in1\")))\n modules\n\n (has-value-function (in1) f-in1\n `(,f ,in1))\n\n (test-simple-binding f-in1 out1))))\n\n(test macro-arity-check-required-only\n \"Test macro arity checks with required arguments only.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, *, list)\"\n\n \"square(x) : list(/quote(*), x, x)\"\n \"/attribute(square, macro, 1)\")\n\n (signals arity-error (build \"square(x, y) -> out\"))))\n\n(test macro-arity-check-optional-not-enough\n \"Test macro optional argument arity checks with not enough arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))\"\n \"/attribute(add3, macro, 1)\")\n\n (signals arity-error (build \"add3(x)\"))))\n\n(test macro-arity-check-optional-too-many\n \"Test macro optional argument arity checks with too many arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(x, y, z)\"))))\n\n(test macro-arity-check-rest-arguments\n \"Test macro rest argument arity checks.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, cons, list)\"\n \"make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))\"\n \"/attribute(make-list, macro, 1)\"\n\n \"make-list(x, y, z) -> output\"\n\n \"/attribute(x, input, 1)\"\n \"/attribute(y, input, 1)\"\n \"/attribute(z, input, 1)\")\n\n (with-nodes ((x \"x\") (y \"y\") (z \"z\")\n (list \"list\")\n (output \"output\"))\n (finish-build)\n\n (has-value-function\n (x y z)\n output\n\n `(,list ,(argument-list (list x y z)))))))\n\n(test macro-arity-check-keyword-missing-required\n \"Test macro keyword argument arity check with missing required argument\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(d : 2)\"))))\n\n(test macro-arity-check-keyword-unknown\n \"Test macro keyword argument arity check with unknown keyword\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(d : 2, n : 1, delta : 100)\"))))\n\n(test macro-rest-argument-outer-nodes\n \"Test macros with rest arguments and outer nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, cons, list)\"\n \"make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))\"\n \"/attribute(make-list, macro, 1)\"\n\n \"/attribute(a, input, 1)\"\n \"/attribute(b, input, 1)\"\n \"/attribute(c, input, 1)\"\n \"/attribute(y, input, 1)\")\n\n (signals macro-outer-node-error (build \"make-list(a, b, c) -> output\"))))\n\n(test macro-build-meta-node-multiple-times\n \"Test building a meta-node multiple times when building macro.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, if, -, +, *, <)\"\n\t \"fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }\"\n\n\t \"eval-fact(n) : fact(n)\"\n\t \"/attribute(eval-fact, macro, 1)\"\n\n\t \"fact(in) + eval-fact(3) -> output\"\n\t \"/attribute(in, input, 1)\")\n\n (with-nodes ((in \"in\") (output \"output\")\n\t\t (fact \"fact\") (+ \"+\"))\n\t(finish-build)\n\n (has-value-function (in) output\n `(,+ (,fact ,in) 6))\n\n (with-nodes ((iter \"iter\") (n \"n\")) (definition fact)\n (has-value-function (n) fact\n `(,iter ,n 1))))))\n\n(test macro-error-compile-loop\n \"Test error when compilation loop detected in macro compilation.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list)\"\n \"test(x,y) : list(&(->), x, test(x,y))\"\n \"/attribute(test, macro, 1)\")\n\n (with-nodes ((test \"test\")) modules\n (signals compile-meta-node-loop-error (call-meta-node test '(1 2))))))\n\n(test macro-error-malformed-list\n \"Test error when macro returns a malformed list.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n\t \"mac(x, y) : cons(x, y)\"\n\t \"/attribute(mac, macro, 1)\"\n\n \"f(x) : x\"\n \"target-f(s, expr) : cons(s, head(tail(expr)))\"\n \"/attribute(f, target-transform, target-f)\")\n\n (signals tridash-fail (build \"mac(1, 2)\"))\n (signals tridash-fail (build \"a -> f(b)\"))))\n\n(test macro-error-return-empty-list-failure\n \"Test error when macro returns empty list failure.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"mac(x) : list(x, Empty!)\"\n \"/attribute(mac, macro, 1)\"\n\n \"f(x) : x\"\n \"target-f(s, expr) : list(s, Empty!)\"\n \"/attribute(f, target-transform, target-f)\")\n\n (signals tridash-fail (build \"mac(a)\"))\n (signals tridash-fail (build \"x -> f(y)\"))))\n\n\f\n Test Target Node Transform Macros\n\n(test target-transform-single-argument\n \"Test target transformation with single argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-1.trd\" modules)\n\n (with-nodes ((in \"in\")\n (out \"out\")\n (int \"int\"))\n\n (finish-build)\n\n (has-value-function (in) out `(,int ,in)))))\n\n(test target-transform-multiple-arguments\n \"Test target transformation with multiple arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-2.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-not-enough\n \"Test arity checks in target transform with not enough arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (signals arity-error\n (build-source-file #p\"./test/inputs/macros/target-transform-3.trd\" modules))))\n\n(test target-transform-arity-check-too-many\n \"Test arity checks in target transform with too many arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (signals arity-error\n (build-source-file #p\"./test/inputs/macros/target-transform-4.trd\" modules))))\n\n(test target-transform-arity-check-rest-argument\n \"Test arity checks in target transform with rest arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-5.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-optional-and-rest\n \"Test arity checks in target transform with optional and rest arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-6.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-optional-extra\n \"Test arity checks in target transform with optional extra arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-7.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n\f\n;;; Test Attribute Processor Nodes\n\n(test attribute-processor-meta-node\n \"Test attribute processor with meta-node.\"\n\n (with-module-table modules\n (build-source-file #p\"./test/inputs/macros/attribute-processor-1.trd\" modules)\n\n (with-nodes ((f \"f\") (match-f \"match-f\"))\n modules\n\n (is (eq match-f (attribute :matcher f))))))\n\n(test attribute-processor-external-meta-node\n \"Test attribute processor with external meta-node\"\n\n (with-module-table modules\n (build-source-file #p\"./test/inputs/macros/attribute-processor-2.trd\" modules)\n\n (with-nodes ((f \"f\") (match-f \"match-f\"))\n modules\n\n (is (eq match-f (attribute :matcher f))))))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/alex-gutev/tridash/c7dbb36efe32a14ad9c4484ed45b1000e2f7132e/test/macros.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":"\n Tridash Programming Language.\n\n This program is free software: you can redistribute it and/or modify\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n along with this program. If not, see .\n User-Defined Macro Tests\n Test Suite Definition\n Create an empty `FLAT-NODE-TABLE' to mark meta-node as\n already built\n Test that failures in the operator of a functor are caught.\n Test Actual Macros\n Test Attribute Processor Nodes"},"code":{"kind":"string","value":" macros.lisp\n Copyright ( C ) 2019 - 2021 \n it under the terms of the GNU General Public License as published by\n the Free Software Foundation , either version 3 of the License , or\n You should have received a copy of the GNU General Public License\n\n\n(defpackage :tridash/test.macros\n (:use :generic-cl\n :alexandria\n :anaphora\n :arrows\n :iterate\n :optima\n :named-readtables\n\n :tridash.parser\n :tridash.frontend\n\n :fiveam\n :tridash/test\n :tridash/test.util)\n\n (:shadowing-import-from :generic-cl\n :emptyp\n :multiply\n :accumulate)\n\n (:shadowing-import-from :fiveam :fail)\n\n (:import-from :lol\n :defmacro!\n :lol-syntax)\n\n (:import-from :tridash.frontend\n :tridash->cl-function\n :call-meta-node\n :call-tridash-meta-node\n :call-node\n\n :thunk\n :resolve\n :resolve%\n :tridash-fail\n\n :fail-thunk\n :+empty-list+\n :group-rest-args\n\n :check-arity\n :correct-arity?%\n :fail-arity-error\n\n :+optional-argument+\n :+rest-argument+))\n\n(in-package :tridash/test.macros)\n\n(in-readtable lol-syntax)\n\n\f\n\n(def-suite macros\n :description \"Test user-defined Tridash macros.\"\n :in frontend)\n\n(in-suite macros)\n\n\f\n Utilities\n\n(defun functor (operator &rest arguments)\n \"Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments\n ARGUMENTS.\"\n\n (functor-expression operator arguments))\n\n(defun expression= (expected got)\n \"Checks that the CL expression GOT is equal to EXPECTED. Symbols in\n EXPECTED, beginning with $, are replaced with the symbol in GOT\n corresponding to the first occurrence.\"\n\n (let ((aliases (make-hash-map)))\n (flet ((equal? (got expected)\n (match* (got expected)\n (((type symbol) (type symbol))\n (= got\n (cond\n ((starts-with #\\$ (symbol-name expected))\n (ensure-get expected aliases got))\n\n ((starts-with #\\! (symbol-name expected))\n (id-symbol (subseq (symbol-name expected) 1)))\n\n (t\n expected))))\n\n ((_ _)\n (= got expected)))))\n\n (tree-equal got expected :test #'equal?))))\n\n(defmacro with-external-meta-nodes ((&rest names) &body body)\n \"Creates `EXTERNAL-META-NODE's with names NAMES and binds to\n variables with the same identifiers as the names upcased.\"\n\n `(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names)\n ,@body))\n\n(defmacro! with-core-nodes ((&rest names) &body body)\n \"Builds the core module and binds the node with names NAMES to\n variables with the same identifiers as the names, upcased.\"\n\n `(with-module-table ,g!modules\n (build-core-module)\n\n (with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules\n ,@body)))\n\n(defmacro mock-meta-node ((&rest operands) expression)\n \"Creates a `META-NODE' which takes operands OPERANDS and has a value\n function consisting of EXPRESSION. OPERANDS is a list of symbols\n naming the dependency nodes. EXPRESSION is evaluated in an\n environment where each symbol in OPERANDS is bound to the\n `NODE-LINK' object corresponding to the operand, and the symbol\n SELF is bound to the `META-NODE' object.\"\n\n (flet ((make-operand (operand)\n (match operand\n ((or (list 'optional symb value)\n (list 'optional symb))\n (list +optional-argument+ (make-instance 'node :name symb) value))\n\n ((list 'rest symb)\n (list +rest-argument+ (make-instance 'node :name symb)))\n\n (_ (make-instance 'node :name operand))))\n\n (operand-node (operand)\n (match operand\n ((list* 'optional symb _)\n symb)\n\n ((list 'rest symb)\n symb)\n\n (_ operand))))\n\n `(let ((self (make-instance 'final-meta-node\n :name 'test-meta-node\n :operands ',(map #'make-operand operands)))\n ,@(map #`(,a1 (node-link (make-instance 'node :name ',a1)))\n (map #'operand-node operands)))\n\n (setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set)))\n (setf (value-function (context self nil))\n ,expression)\n\n ,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1)\n (map #'operand-node operands))\n ,@(map #`(setf (get ',a1 (dependencies self)) ,a1)\n (map #'operand-node operands))\n\n self)))\n\n(defmacro test-compile-meta-node ((&rest operands) expression args body)\n \"Creates and compiles a `META-NODE' to a CL LAMBDA expression and\n checks that it has arguments ARGS and body BODY, by EXPRESSION=.\n\n OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION\n arguments of MOCK-META-NODE.\n\n ARGS (not evaluated) is the expected lambda-list of the function.\n\n BODY is the expected body expression within the BLOCK, TAGBODY,\n RETURN expression. The symbol $recur, occurring in BODY is\n substituted with the TAGBODY tag for tail-recursive self\n calls. BODY is evaluated in an environment in which the symbol SELF\n is bound to the `META-NODE' object.\"\n\n (flet ((lambda-args (lambda-list)\n (->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list)\n (map #'ensure-car)\n (map (compose #'gensym #'symbol-name)))))\n\n `(let ((self (mock-meta-node ,operands ,expression)))\n (is (expression=\n `(lambda ,',args\n (declare (ignorable ,@',(lambda-args args)))\n ,,body)\n\n (tridash->cl-function self))))))\n\n\f\n Tridash to CL Compilation Tests\n\n(test compile-functor-expression\n \"Test compilation of functor expressions to CL.\"\n\n (with-core-nodes (\"if\" \"<\" \"-\")\n (test-compile-meta-node\n\n (a b)\n (functor if (functor < a b) (functor - b a) (functor - a b))\n\n ($a $b)\n '(let nil\n (!|if| (!< $a $b)\n (thunk (!- $b $a))\n (thunk (!- $a $b)))))))\n\n(test compile-if-expression\n \"Test compilation of if expressions to CL.\"\n\n (with-core-nodes (\"<\" \"-\")\n (test-compile-meta-node\n\n (a b)\n (if-expression (functor < a b) (functor - b a) (functor - a b))\n\n ($a $b)\n '(let nil\n (!|if| (!< $a $b)\n (thunk (!- $b $a))\n (thunk (!- $a $b)))))))\n\n(test compile-object-expression\n \"Test compilation of object expressions to CL.\"\n\n (with-core-nodes (\"+\" \"-\")\n (test-compile-meta-node\n\n (x y)\n (object-expression\n `((sum ,(functor + x y))\n (diff ,(functor - x y))))\n\n ($x $y)\n '(let nil\n (alist-hash-map\n (list\n (cons 'sum (thunk (!+ $x $y)))\n (cons 'diff (thunk (!- $x $y)))))))))\n\n(test compile-member-expression\n \"Test compilation of member expressions to CL.\"\n\n (test-compile-meta-node\n\n (object)\n (member-expression\n (member-expression object 'key1) 'key2)\n\n ($obj)\n '(let nil\n (!|member| (!|member| $obj 'key1) 'key2))))\n\n(test compile-catch-expression\n \"Test compilation of catch expressions to CL.\"\n\n (with-core-nodes (\"/\" \"*\")\n (test-compile-meta-node\n\n (a b)\n (catch-expression\n (functor / a b)\n (functor * a b))\n\n ($a $b)\n\n '(let nil\n (!|catch| (!/ $a $b) (thunk (!* $a $b)))))))\n\n(test compile-fail-expression\n \"Test compilation of fail expressions to CL.\"\n\n (test-compile-meta-node\n ()\n (fail-expression)\n\n ()\n '(let nil\n (!|fail|))))\n\n(test compile-expression-block\n \"Test compilation of expression blocks, with reference count = 1, to CL.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n\n (a)\n (expression-block\n (functor + a 1))\n\n ($a)\n '(let nil\n (!+ $a 1)))))\n\n(test compile-expression-block-muliple-references\n \"Test compilation of expression blocks, with reference count > 1, to CL.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n\n (a)\n (let ((block (expression-block (functor + a 1) :count 2)))\n (functor + block block))\n\n ($a)\n '(let ($a+1)\n (setf $a+1 (thunk (!+ $a 1)))\n\n (!+ $a+1 $a+1)))))\n\n(test compile-meta-node-call\n \"Test compilation of calls to other meta-nodes, to CL.\"\n\n (with-core-nodes (\"-\")\n (let ((meta-node (mock-meta-node (a) a)))\n (test-compile-meta-node\n\n (a)\n (functor meta-node (functor - a))\n\n ($a)\n `(let nil\n (call-tridash-meta-node ,meta-node (list (!- $a))))))))\n\n(test compile-higher-order-external-meta-node\n \"Test compilation of higher order external meta-node.\"\n\n (with-core-nodes (\"not\")\n (let ((apply (mock-meta-node (f x) (functor f x))))\n (test-compile-meta-node\n\n (x)\n (functor apply (meta-node-ref not) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 1) (length $args))\n (apply #'!|not| $args)\n (fail-arity-error)))\n $x)))))))\n\n(test compile-higher-order-if-meta-node\n \"Test compilation of higher order if meta-node.\"\n\n (with-core-nodes (\"if\")\n (let ((apply (mock-meta-node (f x y z) (functor f x y z))))\n (test-compile-meta-node\n\n (x y z)\n (functor apply (meta-node-ref if) x y z)\n\n ($x $y $z)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 3) (length $args))\n (apply #'!|if| $args)\n (fail-arity-error)))\n $x $y $z)))))))\n\n(test compile-higher-order-and-meta-node\n \"Test compilation of higher order `and` meta-node.\"\n\n (with-core-nodes (\"and\")\n (let ((apply (mock-meta-node (f x y) (functor f x y))))\n (test-compile-meta-node\n\n (x y)\n (functor apply (meta-node-ref and) x y)\n\n ($x $y)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 2) (length $args))\n (apply #'!|and| $args)\n (fail-arity-error)))\n $x $y)))))))\n\n(test compile-higher-order-or-meta-node\n \"Test compilation of higher order `or` meta-node.\"\n\n (with-core-nodes (\"or\")\n (let ((apply (mock-meta-node (f x y) (functor f x y))))\n (test-compile-meta-node\n\n (x y)\n (functor apply (meta-node-ref or) x y)\n\n ($x $y)\n `(let nil\n (call-tridash-meta-node\n ,apply\n\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2 . 2) (length $args))\n (apply #'!|or| $args)\n (fail-arity-error)))\n $x $y)))))))\n\n(test compile-higher-order-meta-node\n \"Test compilation of higher-order user defined meta-node.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x) x)))\n\n (test-compile-meta-node\n\n (x)\n (functor apply (meta-node-ref f) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 1) (length $args))\n (destructuring-bind ($x2) $args\n (call-tridash-meta-node ,f (list $x2)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-higher-order-meta-node-optional-arguments\n \"Test compilation of higher-order meta-node with optional arguments.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x (optional y) (optional z)) x)))\n\n (test-compile-meta-node\n (x)\n (functor apply (meta-node-ref f :optional (list 1 2)) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(1 . 3) (length $args))\n (destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args\n (call-tridash-meta-node ,f (list $x2 $y $z)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-higher-order-meta-node-rest-arguments\n \"Test compilation of higher-order meta-node with rest arguments.\"\n\n (let ((apply (mock-meta-node (f x) (functor f x)))\n (f (mock-meta-node (x y (rest xs)) xs)))\n\n (test-compile-meta-node\n (x)\n (functor apply (meta-node-ref f) x)\n\n ($x)\n `(let nil\n (call-tridash-meta-node\n ,apply\n (list\n #'(lambda (&rest $args)\n (if (correct-arity?% '(2) (length $args))\n (destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+)))\n $args\n (call-tridash-meta-node ,f (list $x2 $y $rest)))\n (fail-arity-error)))\n $x))))))\n\n(test compile-invoke-higher-order-node\n \"Test compilation of invoking value nodes.\"\n\n (test-compile-meta-node\n\n (f x y)\n (functor f x y)\n\n ($f $x $y)\n `(let nil\n (call-node $f (list $x $y)))))\n\n(test compile-literals\n \"Test compilation of literal values.\"\n\n (with-core-nodes (\"and\")\n (test-compile-meta-node\n\n ()\n (functor and \"hello\" (functor and 1 (functor and 2.3 'symbol)))\n\n ()\n '(let nil\n (!|and| \"hello\"\n (thunk\n (!|and| 1\n (thunk\n (!|and| 2.3 'symbol)))))))))\n\n(test compile-core-arithmetic\n \"Test compilation of core arithmetic meta-nodes.\"\n\n (with-core-nodes (\"/\" \"*\" \"+\" \"-\")\n (test-compile-meta-node\n\n (a b c d)\n (functor\n /\n (functor * (functor + a b) (functor - c d))\n (functor - d))\n\n ($a $b $c $d)\n '(let nil\n (!/\n (!* (!+ $a $b) (!- $c $d))\n (!- $d))))))\n\n(test compile-core-comparison-and-logical\n \"Test compilation of core comparison and logical meta-nodes.\"\n\n (with-core-nodes (\"not\" \"or\" \"and\" \"=\" \"!=\" \"<\" \"<=\" \">\" \">=\")\n (test-compile-meta-node\n\n (x y)\n (functor\n not\n\n (functor\n or\n (functor and (functor < x y) (functor = y x))\n\n (functor\n or\n (functor <= x 10)\n\n (functor\n or\n (functor > 1 y)\n\n (functor\n or\n (functor >= 8 y)\n (functor != x y))))))\n\n ($x $y)\n '(let nil\n (!|not|\n (!|or|\n (!|and| (!< $x $y) (thunk (!= $y $x)))\n (thunk\n (!|or|\n (!<= $x 10)\n\n (thunk\n (!|or|\n (!> 1 $y)\n\n (thunk\n (!|or|\n (!>= 8 $y)\n (thunk (!!= $x $y))))))))))))))\n\n(test compile-core-type-checks\n \"Test compilation of core type checking meta-nodes.\"\n\n (with-core-nodes (\"or\" \"int?\" \"real?\" \"string?\")\n (test-compile-meta-node\n\n (x y z)\n (functor\n or\n (functor int? x)\n\n (functor\n or\n (functor real? y)\n (functor string? z)))\n\n ($x $y $z)\n '(let nil\n (!|or|\n (!|int?| $x)\n\n (thunk\n (!|or|\n (!|real?| $y)\n (thunk (!|string?| $z)))))))))\n\n(test compile-tail-recursive-if\n \"Test compilation of if expression in recursive tail position.\"\n\n (with-core-nodes (\"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (if-expression (functor < n 2)\n acc\n (functor self (functor - n 1) (functor * n acc)))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-if-functor\n \"Test compilation of if functor in recursive tail position.\"\n\n (with-core-nodes (\"if\" \"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (functor if\n (functor < n 2)\n acc\n (functor self (functor - n 1) (functor * n acc)))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-expression-block\n \"Test compilation of expression blocks in recursive tail position.\"\n\n (with-core-nodes (\"if\" \"-\" \"*\" \"<\")\n (test-compile-meta-node\n\n (n acc)\n (functor if\n (functor < n 2)\n acc\n (expression-block\n (functor self (functor - n 1) (functor * n acc))))\n\n ($n $acc)\n `(let nil\n (!|if| (!< $n 2)\n $acc\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc)))))))))\n\n(test compile-tail-recursive-or-functor\n \"Test compilation of `or` functor in recursive tail position.\"\n\n (with-core-nodes (\"or\" \"=\" \"!=\" \"-\")\n (test-compile-meta-node\n\n (n)\n (functor or (functor = n 0) (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|or| (!= $n 0)\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-tail-recursive-and-functor\n \"Test compilation of `and` functor in recursive tail position.\"\n\n (with-core-nodes (\"and\" \"=\" \"!=\" \"-\")\n (test-compile-meta-node\n\n (n)\n (functor and (functor = n 0) (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|and| (!= $n 0)\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-tail-recursive-catch-expression\n \"Test compilation of catch expressions in recursive tail position.\"\n\n (with-core-nodes (\"-\" \"+\")\n (test-compile-meta-node\n\n (n)\n (catch-expression (functor self (functor + n 1))\n (functor self (functor - n 1)))\n\n ($n)\n `(let nil\n (!|catch| (call-tridash-meta-node ,self (list (!+ $n 1)))\n (thunk\n (call-tridash-meta-node ,self (list (!- $n 1)))))))))\n\n(test compile-meta-node-optional-arguments\n \"Test compilation of meta-node with optional arguments.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n (n (optional d 1))\n (functor + n d)\n\n ($n &optional ($d 1))\n '(let nil\n (!|+| $n $d)))))\n\n(test compile-meta-node-multiple-optional-arguments\n \"Test compilation of meta-node with multiple optional arguments.\"\n\n (with-core-nodes (\"+\")\n (test-compile-meta-node\n (n (optional d 1) (optional e 2))\n (functor + n (functor + d e))\n\n ($n &optional ($d 1) ($e 2))\n '(let nil\n (!|+| $n (!|+| $d $e))))))\n\n(test compile-meta-node-rest-argument\n \"Test compilation of meta-node with rest argument.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (x (rest xs))\n (functor cons x xs)\n\n ($x &optional ($xs +empty-list+))\n '(let nil\n (!|cons| $x $xs)))))\n\n(test compile-meta-node-optional-and-rest-arguments\n \"Test compilation of meta-node with optional and rest arguments.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (x (optional y 2) (rest xs))\n (functor cons x (functor cons y xs))\n\n ($x &optional ($y 2) ($xs +empty-list+))\n '(let nil\n (!|cons| $x (thunk (!|cons| $y $xs)))))))\n\n(test compile-cyclic-references\n \"Test compilation of cyclic references.\"\n\n (with-core-nodes (\"cons\")\n (test-compile-meta-node\n (a b)\n (aprog1 (expression-block nil :count 2)\n (setf (expression-block-expression it)\n (functor cons a (functor cons b (cyclic-reference it)))))\n\n ($a $b)\n '(let ($block)\n (setf $block\n (thunk (!|cons| $a (thunk (!|cons| $b $block)))))\n $block))))\n\n(test compile-error-usupported-external-meta-node\n \"Test that compiling an unsupported external-meta-node results in an error.\"\n\n (with-external-meta-nodes (\"not-a-function\")\n (signals\n unsupported-meta-node-error\n\n (tridash->cl-function\n (mock-meta-node\n (arg)\n (functor not-a-function arg))))))\n\n\f\n Test Calling Tridash Meta - Nodes from CL\n\n(test call-meta-node-single-expression\n \"Test calling a single expression meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"min(x,y) : case{x < y : x; y}\")\n\n (with-nodes ((min \"min\")) modules\n (is (= 2 (call-meta-node min '(2 10))))\n (is (= 2 (call-meta-node min '(10 2))))\n (is (= -5.3 (call-meta-node min '(-5.3 7.6))))\n (is (= 1 (call-meta-node min '(1 1)))))))\n\n(test call-meta-node-with-if-expression\n \"Test calling a meta-node with if expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : if(cond, x, 0)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 10 (call-meta-node f '(t 10))))\n (is (= 0 (call-meta-node f '(nil 5))))\n\n (signals tridash-fail (call-meta-node f '(1 5))))))\n\n(test call-meta-node-with-and-expression\n \"Test calling a meta-node with `and` expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : cond and x\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(t t)))\n (is (= nil (call-meta-node f '(nil t))))\n (is (= nil (call-meta-node f '(t nil))))\n (is (= nil (call-meta-node f '(nil nil)))))))\n\n(test call-meta-node-with-or-expression\n \"Test calling a meta-node with `or` expressions from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(cond, x) : cond or x\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(t t)))\n (is-true (call-meta-node f '(nil t)))\n (is-true (call-meta-node f '(t nil)))\n (is (= nil (call-meta-node f '(nil nil)))))))\n\n(test call-meta-node-catch-fail-expression\n \"Test calling a meta-node with multiple nodes and CATCH-FAIL expressions.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }\")\n\n (with-nodes ((min \"min\")) modules\n (is (= 2 (resolve (call-meta-node min '(2 10)))))\n (is (= 2 (resolve (call-meta-node min '(10 2)))))\n (is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6)))))\n (is (= 1 (resolve (call-meta-node min '(1 1))))))))\n\n(test call-meta-node-recursive\n \"Test calling a recursive meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }\")\n\n (with-nodes ((fact \"fact\")) modules\n (is (= 6 (call-meta-node fact '(3))))\n (is (= 120 (call-meta-node fact '(5))))\n (is (= 1 (call-meta-node fact '(0)))))))\n\n(test call-meta-node-tail-recursive\n \"Test calling a tail-recursive meta-node from CL.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }\")\n\n (with-nodes ((fact \"fact\")) modules\n (is (= 6 (call-meta-node fact '(3))))\n (is (= 120 (call-meta-node fact '(5))))\n (is (= 1 (call-meta-node fact '(0)))))))\n\n(test call-meta-node-with-meta-node-call\n \"Test calling a meta-node which calls other meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"1-(n) : n - 1\"\n \"1+(n) : n + 1\"\n \"f(a, b) : 1-(a) * 1+(b)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 0 (call-meta-node f '(1 5))))\n (is (= 45 (call-meta-node f '(10 4))))\n (is (= 33 (call-meta-node f '(4 10)))))))\n\n(test call-meta-node-nested-meta-nodes\n \"Test calling a meta-node with nested meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n \"f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 0 (call-meta-node f '(1 2 3))))\n (is (= 2 (call-meta-node f '(2 3 7)))))))\n\n(test call-meta-node-optional-arguments-no-default\n \"Test calling a meta-node with optional arguments without default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, fail-type?)\"\n \"inc(n, :(d)) : n + d\"\n\n \"f(x) : inc(x)\"\n \"g(x) : inc(x, 2)\"\n\n \"h(x) : fail-type?(inc(x), &(No-Value%))\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (signals tridash-fail (call-meta-node f (list 3)))\n (is (= 7 (call-meta-node g (list 5))))\n\n (is-true (call-meta-node h (list 2))))))\n\n(test call-meta-node-optional-arguments-with-default\n \"Test calling a meta-node with optional arguments without default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"inc(n, d : 1) : n + d\"\n\n \"f(x) : inc(x)\"\n \"g(x) : inc(x, 2)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= 4 (call-meta-node f (list 3))))\n (is (= 7 (call-meta-node g (list 5)))))))\n\n(test call-meta-node-keyword-arguments\n \"Test calling a meta-node with keyword arguments\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"add(a, b, c : 3, d : 4) : a + b + c + d\"\n \"f(x, y) : add(x, d : 10, b : y)\")\n\n (with-nodes ((f \"f\")) modules\n (is (= 16 (call-meta-node f '(1 2))))\n (is (= 45 (call-meta-node f '(15 17)))))))\n\n(test call-meta-node-rest-argument\n \"Test calling a meta-node with rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, and, =, Empty)\"\n \"check(..(xs)) : xs = Empty\"\n\n \"f(x) : x and check()\"\n \"g(x) : check(x)\"\n \"h(x) : check(x, 1, 2, 3)\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (is-true (call-meta-node f '(t)))\n (is (= nil (call-meta-node g '(2))))\n (is (= nil (call-meta-node h '(2)))))))\n\n(test call-higher-order-meta-node\n \"Test calling meta-node with higher order meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, not)\"\n \"apply(f, x) : f(x)\"\n \"1+(n) : n + 1\"\n\n \"f(a) : apply(..(not), a)\"\n \"g(a) : apply(..(1+), a)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= t (call-meta-node f '(nil))))\n (is (= nil (call-meta-node f '(t))))\n\n (is (= 2 (call-meta-node g '(1))))\n (is (= 4 (call-meta-node g '(3)))))))\n\n(test call-higher-order-meta-node-optional-arguments\n \"Test calling meta-node with higher-order meta-node with optional arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, fail-type?)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n \"1+(n, :(d)) : n + d\"\n\n \"f(a) : apply(1+, a)\"\n \"g(a, b) : apply2(1+, a, b)\"\n \"h(x) : fail-type?(apply(1+, x), &(No-Value%))\")\n\n (with-nodes ((f \"f\") (g \"g\") (h \"h\")) modules\n (signals tridash-fail (call-meta-node f '(0)))\n\n (is (= 3 (call-meta-node g '(1 2))))\n (is (= 8 (call-meta-node g '(5 3))))\n\n (is-true (call-meta-node h '(1))))))\n\n(test call-higher-order-meta-node-optional-argument-with-default\n \"Test calling meta-node with higher order meta-node with optional argument default values.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n \"1+(n, d : 1) : n + d\"\n\n \"f(a) : apply(1+, a)\"\n \"g(a, b) : apply2(1+, a, b)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= 1 (call-meta-node f '(0))))\n (is (= 2 (call-meta-node f '(1))))\n\n (is (= 3 (call-meta-node g '(1 2))))\n (is (= 8 (call-meta-node g '(5 3)))))))\n\n(test call-higher-order-meta-node-rest-argument\n \"Test calling meta-node with higher order meta-node with rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, cons)\"\n \"apply3(f, x, y, z) : f(x, y, z)\"\n \"apply(f, x) : f(x)\"\n \"l(x, ..(xs)) : cons(x + 1, xs)\"\n\n \"f(a, b, c) : apply3(l, a, b, c)\"\n \"g(x) : apply(l, x)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= '(2 3 4) (call-meta-node f '(1 3 4))))\n (is (= '(2) (call-meta-node g '(1)))))))\n\n(test call-higher-order-meta-node-rest-argument-empty\n \"Test calling meta-node with higher order meta-node with empty rest argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, Empty, =)\"\n\n \"apply(f, x) : f(x)\"\n \"l(x, ..(xs)) : xs = Empty\"\n\n \"f(a) : apply(l, a)\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (bool-value (call-meta-node f '(1)))))))\n\n(test call-higher-order-meta-node-optional-arguments-outer-nodes\n \"Test calling higher order meta-node with optional arguments and outer node references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n\t \"apply(f, x) : f(x)\"\n\t \"test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }\")\n\n (with-nodes ((test \"test\")) modules\n (is (= 6 (call-meta-node test '(2 3)))))))\n\n(test call-higher-order-external-meta-node\n \"Test calling meta-node with higher-order external meta-node.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, -)\"\n \"apply(f, x) : f(x)\"\n \"apply2(f, x, y) : f(x, y)\"\n\n \"f(a) : apply(-, a)\"\n \"g(a, b) : apply2(-, a, b)\")\n\n (with-nodes ((f \"f\") (g \"g\")) modules\n (is (= -1 (call-meta-node f '(1))))\n (is (= -2 (call-meta-node f '(2))))\n\n (is (= 1 (call-meta-node g '(3 2))))\n (is (= 2 (call-meta-node g '(5 3)))))))\n\n(test call-higher-order-meta-node-error\n \"Test error when calling a non-meta-node.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"apply(f, x) : f(x)\"\n\n \"x+(n) : n + ..(x)\"\n \"x\"\n\n \"f(a) : apply(..(x+), a)\")\n\n (with-nodes ((f \"f\")) modules\n (signals semantic-error (call-meta-node f '(1))))))\n\n(test call-primitive-function-subtract-and-negate\n \"Test calling `-` meta-node with 2 arguments and 1 argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, -)\"\n \"sub(a, b) : a - b\"\n \"neg(x) : -(x)\")\n\n (with-nodes ((sub \"sub\") (neg \"neg\")) modules\n (is (= 3 (call-meta-node sub '(5 2))))\n (is (= -5 (call-meta-node neg '(5)))))))\n\n(test call-meta-node-object-expressions\n \"Test calling meta-node with object expressions.\"\n\n (with-module-table modules\n (build \"Person(first, last) : { first -> self.first; last -> self.last }\"\n \"get-first(p) : p.first\"\n \"get-last(p) : p.last\")\n\n (with-nodes ((person \"Person\") (get-first \"get-first\") (get-last \"get-last\"))\n modules\n\n (let ((p (call-meta-node person '(\"John\" \"Doe\"))))\n (is (= \"John\" (call-meta-node get-first (list p))))\n (is (= \"Doe\" (call-meta-node get-last (list p))))))))\n\n(test call-meta-node-catch-fail-operand\n \"Test catching failures in functor operand.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((fails \"fails\")) modules\n (is-false (bool-value (call-meta-node fails '(1))))\n\n (is-true\n (->> (thunk (error 'tridash-fail))\n list\n (call-meta-node fails))))))\n\n(test call-meta-node-catch-fail-operator\n \"Test catching failures in functor operator.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=, >, -)\"\n \"neg(x) : -(x)\"\n\n \"getf(f, x) : { x > 0 -> (f -> self) }\"\n \"test(x) : fails((getf(neg, x))(x))\"\n\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((test \"test\")) modules\n (is (= nil (call-meta-node test '(1))))\n (is-true (call-meta-node test '(-1))))))\n\n(test call-meta-node-fail-types\n \"Test failure types.\"\n\n (with-module-table modules\n (build-core-module modules)\n (build-source-file \"./test/inputs/macros/failure-types.trd\" modules)\n\n (with-nodes ((check-range \"check-range\")) modules\n (is (= \"\" (call-meta-node check-range '(2 1 3))))\n (is (= \"Error: below minimum!\" (call-meta-node check-range '(0 1 3))))\n (is (= \"Error: above maximum!\" (call-meta-node check-range '(10 2 7)))))))\n\n(test call-meta-node-expression-block\n \"Test calling meta-node with one expression-block.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +)\"\n \"f(x) : (x + 1) + (x + 1)\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(1)) 4)\n (is-true (call-meta-node f '(2)) 6))))\n\n(test call-meta-node-expression-block-multiple-references\n \"Test calling meta-node with expression-block with multiple references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, *, +, -)\"\n \"f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }\")\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(3 7)) -65)\n (is-true (call-meta-node f '(5 2)) 20))))\n\n(test call-meta-node-cyclic-references\n \"Test calling a meta-node with cyclic references.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file \"./test/inputs/macros/cyclic-references.trd\" modules)\n\n (with-nodes ((f \"f\")) modules\n (is-true (call-meta-node f '(1 2)) '(1 2 1 2 1)))))\n\n(test call-meta-node-type-error-arithmetic-functions\n \"Test type errors in arithmetic functions.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, !=)\"\n \"1+(x) : fails(x + 1)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((1+ \"1+\")) modules\n (is (= nil (call-meta-node 1+ '(1))))\n (is-true (call-meta-node 1+ '(\"hello\"))))))\n\n(test call-meta-node-type-error-objects\n \"Test type errors in objects.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, !=)\"\n \"test(x) : fails(x.key)\"\n \"fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }\")\n\n (with-nodes ((test \"test\")) modules\n (is-true (bool-value (call-meta-node test '(1))))\n\n (is-true (bool-value (call-meta-node test (list (make-hash-map)))))\n\n (is (= nil\n (->> (list (cons (id-symbol \"key\") 1))\n alist-hash-map\n list\n (call-meta-node test)\n bool-value))))))\n\n\f\n\n(test macro-compile-time-computation\n \"Test macro which performs computation at compile-time.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : x * x\"\n \"/attribute(square, macro, 1)\"\n\n \"a * square(3) -> b\")\n\n (test-not-nodes modules\n '((\"/in\" \"core\" \"*\") \"a\" (\"square\" 3))\n '(\"square\" 3))\n\n (with-nodes ((a \"a\") (a*9 ((\"/in\" \"core\" \"*\") \"a\" 9))\n (b \"b\")\n (* \"*\"))\n modules\n\n (has-value-function (a) a*9 `(,* ,a 9))\n (test-simple-binding a*9 b))))\n\n(test macro-quoted-expression\n \"Test macro which returns quoted expression.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : list(/quote(*), x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-meta-node-reference\n \"Test macro which returns expression with meta-node references.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"square(x) : list(& *, x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-with-macros\n \"Test expansion of macros in macro meta-nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, ->, list, *)\"\n\n \"'(x) : list(/quote(/quote), x)\"\n \"/attribute(', macro, 1)\"\n\n \"square(x) : list('(*), x, x)\"\n \"/attribute(square, macro, 1)\"\n\n \"square(a) -> b\")\n\n (test-not-nodes modules '(\"square\" \"a\"))\n\n (with-nodes ((a \"a\") (b \"b\")\n (a*a ((\"/in\" \"core\" \"*\") \"a\" \"a\"))\n (* \"*\"))\n modules\n\n (has-value-function (a) a*a `(,* ,a ,a))\n (test-simple-binding a*a b))))\n\n(test macro-multiple-arguments\n \"Test macros with multiple arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list, ->, if)\"\n\n \"'(x) : list(/quote(/quote), x)\"\n \"/attribute(', macro, 1)\"\n\n \"!-(a, b) : list('(if), a, b)\"\n \"/attribute(!-, macro, 1)\"\n \"/operator(!-, 25, left)\"\n\n \"a !- b -> out\")\n\n (test-not-nodes modules '(\"!-\" \"a\" \"b\"))\n\n (with-nodes ((a \"a\") (b \"b\") (out \"out\")\n (a!-b ((\"/in\" \"builtin\" \"if\") \"a\" \"b\"))\n (if \"if\"))\n modules\n\n (has-value-function (a b) a!-b `(,if ,a ,b :none))\n (test-simple-binding a!-b out))))\n\n(test macro-keyword-arguments\n \"Test passing macro arguments by keyword\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list)\"\n\n \"f(x) : x\"\n\n \"call(operator, operand) : list(operator, operand)\"\n \"/attribute(call, macro, True)\"\n\n \"call(operand : in1, operator : f) -> out1\")\n\n (test-not-nodes modules '(\"call\" (\":\" \"operand\" \"in1\") (\":\" \"operator\" \"f\")))\n\n (with-nodes ((in1 \"in1\") (out1 \"out1\")\n (f \"f\")\n (f-in1 (\"f\" \"in1\")))\n modules\n\n (has-value-function (in1) f-in1\n `(,f ,in1))\n\n (test-simple-binding f-in1 out1))))\n\n(test macro-arity-check-required-only\n \"Test macro arity checks with required arguments only.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, *, list)\"\n\n \"square(x) : list(/quote(*), x, x)\"\n \"/attribute(square, macro, 1)\")\n\n (signals arity-error (build \"square(x, y) -> out\"))))\n\n(test macro-arity-check-optional-not-enough\n \"Test macro optional argument arity checks with not enough arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))\"\n \"/attribute(add3, macro, 1)\")\n\n (signals arity-error (build \"add3(x)\"))))\n\n(test macro-arity-check-optional-too-many\n \"Test macro optional argument arity checks with too many arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(x, y, z)\"))))\n\n(test macro-arity-check-rest-arguments\n \"Test macro rest argument arity checks.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, cons, list)\"\n \"make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))\"\n \"/attribute(make-list, macro, 1)\"\n\n \"make-list(x, y, z) -> output\"\n\n \"/attribute(x, input, 1)\"\n \"/attribute(y, input, 1)\"\n \"/attribute(z, input, 1)\")\n\n (with-nodes ((x \"x\") (y \"y\") (z \"z\")\n (list \"list\")\n (output \"output\"))\n (finish-build)\n\n (has-value-function\n (x y z)\n output\n\n `(,list ,(argument-list (list x y z)))))))\n\n(test macro-arity-check-keyword-missing-required\n \"Test macro keyword argument arity check with missing required argument\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(d : 2)\"))))\n\n(test macro-arity-check-keyword-unknown\n \"Test macro keyword argument arity check with unknown keyword\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, +, list)\"\n\n \"1+(n, d : 1) : list(/quote(+), x, d)\"\n \"/attribute(1+, macro, 1)\")\n\n (signals arity-error (build \"1+(d : 2, n : 1, delta : 100)\"))))\n\n(test macro-rest-argument-outer-nodes\n \"Test macros with rest arguments and outer nodes.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, cons, list)\"\n \"make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))\"\n \"/attribute(make-list, macro, 1)\"\n\n \"/attribute(a, input, 1)\"\n \"/attribute(b, input, 1)\"\n \"/attribute(c, input, 1)\"\n \"/attribute(y, input, 1)\")\n\n (signals macro-outer-node-error (build \"make-list(a, b, c) -> output\"))))\n\n(test macro-build-meta-node-multiple-times\n \"Test building a meta-node multiple times when building macro.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, if, -, +, *, <)\"\n\t \"fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }\"\n\n\t \"eval-fact(n) : fact(n)\"\n\t \"/attribute(eval-fact, macro, 1)\"\n\n\t \"fact(in) + eval-fact(3) -> output\"\n\t \"/attribute(in, input, 1)\")\n\n (with-nodes ((in \"in\") (output \"output\")\n\t\t (fact \"fact\") (+ \"+\"))\n\t(finish-build)\n\n (has-value-function (in) output\n `(,+ (,fact ,in) 6))\n\n (with-nodes ((iter \"iter\") (n \"n\")) (definition fact)\n (has-value-function (n) fact\n `(,iter ,n 1))))))\n\n(test macro-error-compile-loop\n \"Test error when compilation loop detected in macro compilation.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core, list)\"\n \"test(x,y) : list(&(->), x, test(x,y))\"\n \"/attribute(test, macro, 1)\")\n\n (with-nodes ((test \"test\")) modules\n (signals compile-meta-node-loop-error (call-meta-node test '(1 2))))))\n\n(test macro-error-malformed-list\n \"Test error when macro returns a malformed list.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n\t \"mac(x, y) : cons(x, y)\"\n\t \"/attribute(mac, macro, 1)\"\n\n \"f(x) : x\"\n \"target-f(s, expr) : cons(s, head(tail(expr)))\"\n \"/attribute(f, target-transform, target-f)\")\n\n (signals tridash-fail (build \"mac(1, 2)\"))\n (signals tridash-fail (build \"a -> f(b)\"))))\n\n(test macro-error-return-empty-list-failure\n \"Test error when macro returns empty list failure.\"\n\n (with-module-table modules\n (build-core-module)\n (build \"/import(core)\"\n\n \"mac(x) : list(x, Empty!)\"\n \"/attribute(mac, macro, 1)\"\n\n \"f(x) : x\"\n \"target-f(s, expr) : list(s, Empty!)\"\n \"/attribute(f, target-transform, target-f)\")\n\n (signals tridash-fail (build \"mac(a)\"))\n (signals tridash-fail (build \"x -> f(y)\"))))\n\n\f\n Test Target Node Transform Macros\n\n(test target-transform-single-argument\n \"Test target transformation with single argument.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-1.trd\" modules)\n\n (with-nodes ((in \"in\")\n (out \"out\")\n (int \"int\"))\n\n (finish-build)\n\n (has-value-function (in) out `(,int ,in)))))\n\n(test target-transform-multiple-arguments\n \"Test target transformation with multiple arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-2.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-not-enough\n \"Test arity checks in target transform with not enough arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (signals arity-error\n (build-source-file #p\"./test/inputs/macros/target-transform-3.trd\" modules))))\n\n(test target-transform-arity-check-too-many\n \"Test arity checks in target transform with too many arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (signals arity-error\n (build-source-file #p\"./test/inputs/macros/target-transform-4.trd\" modules))))\n\n(test target-transform-arity-check-rest-argument\n \"Test arity checks in target transform with rest arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-5.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-optional-and-rest\n \"Test arity checks in target transform with optional and rest arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-6.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n(test target-transform-arity-check-optional-extra\n \"Test arity checks in target transform with optional extra arguments.\"\n\n (with-module-table modules\n (build-core-module)\n (build-source-file #p\"./test/inputs/macros/target-transform-7.trd\" modules)\n\n (with-nodes ((in \"in\") (a \"a\") (b \"b\")\n (- \"-\"))\n (finish-build)\n\n (has-value-function (in a) b `(,- ,in ,a)))))\n\n\f\n\n(test attribute-processor-meta-node\n \"Test attribute processor with meta-node.\"\n\n (with-module-table modules\n (build-source-file #p\"./test/inputs/macros/attribute-processor-1.trd\" modules)\n\n (with-nodes ((f \"f\") (match-f \"match-f\"))\n modules\n\n (is (eq match-f (attribute :matcher f))))))\n\n(test attribute-processor-external-meta-node\n \"Test attribute processor with external meta-node\"\n\n (with-module-table modules\n (build-source-file #p\"./test/inputs/macros/attribute-processor-2.trd\" modules)\n\n (with-nodes ((f \"f\") (match-f \"match-f\"))\n modules\n\n (is (eq match-f (attribute :matcher f))))))\n"}}},{"rowIdx":610250,"cells":{"_id":{"kind":"string","value":"52096dd415375980c51bdb241c422076cda95fd89885d7a1deae03274dfa66d3"},"repository":{"kind":"string","value":"cstar/ec2nodefinder"},"name":{"kind":"string","value":"awssign.erl"},"content":{"kind":"string","value":"-module(awssign).\n-author('').\n-include_lib(\"xmerl/include/xmerl.hrl\").\n-export([sign_and_send/5, describe_instances/5]).\n\nsign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)->\n SortedParams = sort([{\"Timestamp\", create_timestamp()},\n {\"SignatureVersion\", \"2\"},\n {\"Version\", APIVersion},\n {\"AWSAccessKeyId\", AccessKey}, \n {\"SignatureMethod\", \"HmacSHA1\"}\n |Params]),\n EncodedParams = lists:foldl(\n fun({K,V}, Acc)->\n [url_encode(K) ++ \"=\" ++ url_encode(V)| Acc]\n end,[], SortedParams),\n QueryString = string:join(EncodedParams, \"&\"),\n ToSign = \"GET\\n\" ++ Host ++ \"\\n/\\n\" ++ QueryString,\n Signature = url_encode(\n binary_to_list(\n base64:encode(crypto:sha_mac(SecretKey, ToSign)))\n ),\n URL = \"http://\"++ Host ++ \"/?\" ++ QueryString ++ \"&Signature=\" ++ Signature,\n case http:request(URL) of\n {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body};\n {ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} }\n end.\n\n% lifted from \ncreate_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())).\ncreate_timestamp({{Y, M, D}, {H, Mn, S}}) ->\n\tto_str(Y) ++ \"-\" ++ to_str(M) ++ \"-\" ++ to_str(D) ++ \"T\" ++\n\tto_str(H) ++ \":\" ++ to_str(Mn)++ \":\" ++ to_str(S) ++ \"Z\".\nadd_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end.\nto_str(L) -> add_zeros(integer_to_list(L)).\n\n \nsort(Params)->\n lists:sort(fun({A, _}, {X, _}) -> A > X end, Params).\n \ndescribe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)->\n Params =[ {\"Action\", \"DescribeInstances\"}],\n Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey),\n case Res of\n {ok, XML} ->\n {R,_} = xmerl_scan:string(XML),\n [ V#xmlText.value\n || V<- xmerl_xpath:string(\"/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \\\"\"\n ++ SecurityGroup ++ \"\\\"]/instancesSet/item/privateDnsName/text()\", R)];\n {error, E} ->\n erlang:error ({ describe_instances_failed, E }),\n []\n end.\n\n% lifted from the ever precious yaws_utils.erl \ninteger_to_hex(I) ->\n case catch erlang:integer_to_list(I, 16) of\n {'EXIT', _} ->\n old_integer_to_hex(I);\n Int ->\n Int\n end.\n\nold_integer_to_hex(I) when I<10 ->\n integer_to_list(I);\nold_integer_to_hex(I) when I<16 ->\n [I-10+$A];\nold_integer_to_hex(I) when I>=16 ->\n N = trunc(I/16),\n old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16).\nurl_encode([H|T]) ->\n if\n H >= $a, $z >= H ->\n [H|url_encode(T)];\n H >= $A, $Z >= H ->\n [H|url_encode(T)];\n H >= $0, $9 >= H ->\n [H|url_encode(T)];\n H == $_; H == $.; H == $-; H == $/ -> % FIXME: more..\n [H|url_encode(T)];\n true ->\n case integer_to_hex(H) of\n [X, Y] ->\n [$%, X, Y | url_encode(T)];\n [X] ->\n, $ 0 , X | url_encode(T ) ]\n end\n end;\n\nurl_encode([]) ->\n []."},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/cstar/ec2nodefinder/42534509b88120d5581ad4a4e822bb806f3b950f/src/awssign.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":" lifted from \n lifted from the ever precious yaws_utils.erl \n FIXME: more..\n, X, Y | url_encode(T)];"},"code":{"kind":"string","value":"-module(awssign).\n-author('').\n-include_lib(\"xmerl/include/xmerl.hrl\").\n-export([sign_and_send/5, describe_instances/5]).\n\nsign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)->\n SortedParams = sort([{\"Timestamp\", create_timestamp()},\n {\"SignatureVersion\", \"2\"},\n {\"Version\", APIVersion},\n {\"AWSAccessKeyId\", AccessKey}, \n {\"SignatureMethod\", \"HmacSHA1\"}\n |Params]),\n EncodedParams = lists:foldl(\n fun({K,V}, Acc)->\n [url_encode(K) ++ \"=\" ++ url_encode(V)| Acc]\n end,[], SortedParams),\n QueryString = string:join(EncodedParams, \"&\"),\n ToSign = \"GET\\n\" ++ Host ++ \"\\n/\\n\" ++ QueryString,\n Signature = url_encode(\n binary_to_list(\n base64:encode(crypto:sha_mac(SecretKey, ToSign)))\n ),\n URL = \"http://\"++ Host ++ \"/?\" ++ QueryString ++ \"&Signature=\" ++ Signature,\n case http:request(URL) of\n {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body};\n {ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} }\n end.\n\ncreate_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())).\ncreate_timestamp({{Y, M, D}, {H, Mn, S}}) ->\n\tto_str(Y) ++ \"-\" ++ to_str(M) ++ \"-\" ++ to_str(D) ++ \"T\" ++\n\tto_str(H) ++ \":\" ++ to_str(Mn)++ \":\" ++ to_str(S) ++ \"Z\".\nadd_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end.\nto_str(L) -> add_zeros(integer_to_list(L)).\n\n \nsort(Params)->\n lists:sort(fun({A, _}, {X, _}) -> A > X end, Params).\n \ndescribe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)->\n Params =[ {\"Action\", \"DescribeInstances\"}],\n Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey),\n case Res of\n {ok, XML} ->\n {R,_} = xmerl_scan:string(XML),\n [ V#xmlText.value\n || V<- xmerl_xpath:string(\"/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \\\"\"\n ++ SecurityGroup ++ \"\\\"]/instancesSet/item/privateDnsName/text()\", R)];\n {error, E} ->\n erlang:error ({ describe_instances_failed, E }),\n []\n end.\n\ninteger_to_hex(I) ->\n case catch erlang:integer_to_list(I, 16) of\n {'EXIT', _} ->\n old_integer_to_hex(I);\n Int ->\n Int\n end.\n\nold_integer_to_hex(I) when I<10 ->\n integer_to_list(I);\nold_integer_to_hex(I) when I<16 ->\n [I-10+$A];\nold_integer_to_hex(I) when I>=16 ->\n N = trunc(I/16),\n old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16).\nurl_encode([H|T]) ->\n if\n H >= $a, $z >= H ->\n [H|url_encode(T)];\n H >= $A, $Z >= H ->\n [H|url_encode(T)];\n H >= $0, $9 >= H ->\n [H|url_encode(T)];\n [H|url_encode(T)];\n true ->\n case integer_to_hex(H) of\n [X, Y] ->\n [X] ->\n, $ 0 , X | url_encode(T ) ]\n end\n end;\n\nurl_encode([]) ->\n []."}}},{"rowIdx":610251,"cells":{"_id":{"kind":"string","value":"bc49153ea403dca985ea28d8ecf0e5ce1fa3402b9e79b81628f06680e6d03f53"},"repository":{"kind":"string","value":"gentoo-haskell/hackport"},"name":{"kind":"string","value":"Host.hs"},"content":{"kind":"string","value":"module Portage.Host\n ( getInfo -- :: IO [(String, String)]\n , LocalInfo(..)\n ) where\n\nimport Util (run_cmd)\nimport qualified Data.List.Split as DLS\nimport Data.Maybe (fromJust, isJust, mapMaybe)\n\nimport qualified System.Directory as D\nimport System.FilePath (())\n\nimport System.IO\n\ndata LocalInfo =\n LocalInfo { distfiles_dir :: String\n , overlay_list :: [FilePath]\n , portage_dir :: FilePath\n } deriving (Read, Show)\n\ndefaultInfo :: LocalInfo\ndefaultInfo = LocalInfo { distfiles_dir = \"/usr/portage/distfiles\"\n , overlay_list = []\n , portage_dir = \"/usr/portage\"\n }\n\n query and then emerge\ngetInfo :: IO LocalInfo\ngetInfo = fromJust `fmap`\n performMaybes [ readConfig\n , performMaybes [ getPaludisInfo\n , askPortageq\n , return (Just defaultInfo)\n ] >>= showAnnoyingWarning\n ]\n where performMaybes [] = return Nothing\n performMaybes (act:acts) =\n do r <- act\n if isJust r\n then return r\n else performMaybes acts\n\nshowAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo)\nshowAnnoyingWarning info = do\n hPutStr stderr $ unlines [ \"-- Consider creating ~/\" ++ hackport_config ++ \" file with contents:\"\n , show info\n , \"-- It will speed hackport startup time a bit.\"\n ]\n return info\n\n-- relative to home dir\nhackport_config :: FilePath\nhackport_config = \".hackport\" \"repositories\"\n\n--------------------------\n-- fastest: config reading\n--------------------------\nreadConfig :: IO (Maybe LocalInfo)\nreadConfig =\n do home_dir <- D.getHomeDirectory\n let config_path = home_dir hackport_config\n exists <- D.doesFileExist config_path\n if exists then read <$> readFile config_path else return Nothing\n\n----------\n-- Paludis\n----------\n\ngetPaludisInfo :: IO (Maybe LocalInfo)\ngetPaludisInfo = fmap parsePaludisInfo <$> run_cmd \"cave info\"\n\nparsePaludisInfo :: String -> LocalInfo\nparsePaludisInfo text =\n let chunks = DLS.splitOn [\"\"] . lines $ text\n repositories = mapMaybe parseRepository chunks\n in fromJust (mkLocalInfo repositories)\n where\n parseRepository :: [String] -> Maybe (String, (String, String))\n parseRepository [] = Nothing\n parseRepository (firstLine:lns) = do\n name <- case words firstLine of\n [\"Repository\", nm] -> return (init nm)\n _ -> fail \"not a repository chunk\"\n let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ]\n location <- lookup \"location\" dict\n distfiles <- lookup \"distdir\" dict\n return (name, (location, distfiles))\n\n mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo\n mkLocalInfo repos = do\n (gentooLocation, gentooDistfiles) <- lookup \"gentoo\" repos\n let overlays = [ loc | (_, (loc, _dist)) <- repos ]\n return (LocalInfo\n { distfiles_dir = gentooDistfiles\n , portage_dir = gentooLocation\n , overlay_list = overlays\n })\n\n---------\n-- Emerge\n---------\n\naskPortageq :: IO (Maybe LocalInfo)\naskPortageq = do\n distdir <- run_cmd \"portageq distdir\"\n portdir <- run_cmd \"portageq get_repo_path / gentoo\"\n hsRepo <- run_cmd \"portageq get_repo_path / haskell\"\nThere really ought to be both distdir and ,\n --but maybe no hsRepo defined yet.\n let info = if Nothing `elem` [distdir,portdir]\n then Nothing\n else Just LocalInfo\n { distfiles_dir = grab distdir\n , portage_dir = grab portdir\n , overlay_list = iffy hsRepo\n }\n --init: kill newline char\n where grab = init . fromJust\n iffy Nothing = []\n iffy (Just repo) = [init repo]\n return info\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/gentoo-haskell/hackport/61baf96390e7ddc071f9a49fc78919683988c0ca/src/Portage/Host.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" :: IO [(String, String)]\n relative to home dir\n------------------------\n fastest: config reading\n------------------------\n--------\n Paludis\n--------\n-------\n Emerge\n-------\nbut maybe no hsRepo defined yet.\ninit: kill newline char"},"code":{"kind":"string","value":"module Portage.Host\n , LocalInfo(..)\n ) where\n\nimport Util (run_cmd)\nimport qualified Data.List.Split as DLS\nimport Data.Maybe (fromJust, isJust, mapMaybe)\n\nimport qualified System.Directory as D\nimport System.FilePath (())\n\nimport System.IO\n\ndata LocalInfo =\n LocalInfo { distfiles_dir :: String\n , overlay_list :: [FilePath]\n , portage_dir :: FilePath\n } deriving (Read, Show)\n\ndefaultInfo :: LocalInfo\ndefaultInfo = LocalInfo { distfiles_dir = \"/usr/portage/distfiles\"\n , overlay_list = []\n , portage_dir = \"/usr/portage\"\n }\n\n query and then emerge\ngetInfo :: IO LocalInfo\ngetInfo = fromJust `fmap`\n performMaybes [ readConfig\n , performMaybes [ getPaludisInfo\n , askPortageq\n , return (Just defaultInfo)\n ] >>= showAnnoyingWarning\n ]\n where performMaybes [] = return Nothing\n performMaybes (act:acts) =\n do r <- act\n if isJust r\n then return r\n else performMaybes acts\n\nshowAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo)\nshowAnnoyingWarning info = do\n hPutStr stderr $ unlines [ \"-- Consider creating ~/\" ++ hackport_config ++ \" file with contents:\"\n , show info\n , \"-- It will speed hackport startup time a bit.\"\n ]\n return info\n\nhackport_config :: FilePath\nhackport_config = \".hackport\" \"repositories\"\n\nreadConfig :: IO (Maybe LocalInfo)\nreadConfig =\n do home_dir <- D.getHomeDirectory\n let config_path = home_dir hackport_config\n exists <- D.doesFileExist config_path\n if exists then read <$> readFile config_path else return Nothing\n\n\ngetPaludisInfo :: IO (Maybe LocalInfo)\ngetPaludisInfo = fmap parsePaludisInfo <$> run_cmd \"cave info\"\n\nparsePaludisInfo :: String -> LocalInfo\nparsePaludisInfo text =\n let chunks = DLS.splitOn [\"\"] . lines $ text\n repositories = mapMaybe parseRepository chunks\n in fromJust (mkLocalInfo repositories)\n where\n parseRepository :: [String] -> Maybe (String, (String, String))\n parseRepository [] = Nothing\n parseRepository (firstLine:lns) = do\n name <- case words firstLine of\n [\"Repository\", nm] -> return (init nm)\n _ -> fail \"not a repository chunk\"\n let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ]\n location <- lookup \"location\" dict\n distfiles <- lookup \"distdir\" dict\n return (name, (location, distfiles))\n\n mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo\n mkLocalInfo repos = do\n (gentooLocation, gentooDistfiles) <- lookup \"gentoo\" repos\n let overlays = [ loc | (_, (loc, _dist)) <- repos ]\n return (LocalInfo\n { distfiles_dir = gentooDistfiles\n , portage_dir = gentooLocation\n , overlay_list = overlays\n })\n\n\naskPortageq :: IO (Maybe LocalInfo)\naskPortageq = do\n distdir <- run_cmd \"portageq distdir\"\n portdir <- run_cmd \"portageq get_repo_path / gentoo\"\n hsRepo <- run_cmd \"portageq get_repo_path / haskell\"\nThere really ought to be both distdir and ,\n let info = if Nothing `elem` [distdir,portdir]\n then Nothing\n else Just LocalInfo\n { distfiles_dir = grab distdir\n , portage_dir = grab portdir\n , overlay_list = iffy hsRepo\n }\n where grab = init . fromJust\n iffy Nothing = []\n iffy (Just repo) = [init repo]\n return info\n"}}},{"rowIdx":610252,"cells":{"_id":{"kind":"string","value":"7e45d3afedd144feee769d4d2d1e918df0da9d7a490075a9bf08f7bd1b16e4dc"},"repository":{"kind":"string","value":"ghcjs/jsaddle-dom"},"name":{"kind":"string","value":"ConvolverNode.hs"},"content":{"kind":"string","value":"# LANGUAGE PatternSynonyms #\n-- For HasCallStack compatibility\n{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}\n# OPTIONS_GHC -fno - warn - unused - imports #\nmodule JSDOM.Generated.ConvolverNode\n (setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked,\n setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode)\n where\nimport Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))\nimport qualified Prelude (error)\nimport Data.Typeable (Typeable)\nimport Data.Traversable (mapM)\nimport Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))\nimport Data.Int (Int64)\nimport Data.Word (Word, Word64)\nimport JSDOM.Types\nimport Control.Applicative ((<$>))\nimport Control.Monad (void)\nimport Control.Lens.Operators ((^.))\nimport JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)\nimport JSDOM.Enums\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\nsetBuffer ::\n (MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m ()\nsetBuffer self val = liftDOM (self ^. jss \"buffer\" (toJSVal val))\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer)\ngetBuffer self = liftDOM ((self ^. js \"buffer\") >>= fromJSVal)\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBufferUnsafe ::\n (MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer\ngetBufferUnsafe self\n = liftDOM\n (((self ^. js \"buffer\") >>= fromJSVal) >>=\n maybe (Prelude.error \"Nothing to return\") return)\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBufferUnchecked ::\n (MonadDOM m) => ConvolverNode -> m AudioBuffer\ngetBufferUnchecked self\n = liftDOM ((self ^. js \"buffer\") >>= fromJSValUnchecked)\n\n | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >\nsetNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m ()\nsetNormalize self val\n = liftDOM (self ^. jss \"normalize\" (toJSVal val))\n\n | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >\ngetNormalize :: (MonadDOM m) => ConvolverNode -> m Bool\ngetNormalize self\n = liftDOM ((self ^. js \"normalize\") >>= valToBool)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/ConvolverNode.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" For HasCallStack compatibility\n# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #"},"code":{"kind":"string","value":"# LANGUAGE PatternSynonyms #\n# OPTIONS_GHC -fno - warn - unused - imports #\nmodule JSDOM.Generated.ConvolverNode\n (setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked,\n setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode)\n where\nimport Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))\nimport qualified Prelude (error)\nimport Data.Typeable (Typeable)\nimport Data.Traversable (mapM)\nimport Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))\nimport Data.Int (Int64)\nimport Data.Word (Word, Word64)\nimport JSDOM.Types\nimport Control.Applicative ((<$>))\nimport Control.Monad (void)\nimport Control.Lens.Operators ((^.))\nimport JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)\nimport JSDOM.Enums\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\nsetBuffer ::\n (MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m ()\nsetBuffer self val = liftDOM (self ^. jss \"buffer\" (toJSVal val))\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer)\ngetBuffer self = liftDOM ((self ^. js \"buffer\") >>= fromJSVal)\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBufferUnsafe ::\n (MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer\ngetBufferUnsafe self\n = liftDOM\n (((self ^. js \"buffer\") >>= fromJSVal) >>=\n maybe (Prelude.error \"Nothing to return\") return)\n\n | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation >\ngetBufferUnchecked ::\n (MonadDOM m) => ConvolverNode -> m AudioBuffer\ngetBufferUnchecked self\n = liftDOM ((self ^. js \"buffer\") >>= fromJSValUnchecked)\n\n | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >\nsetNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m ()\nsetNormalize self val\n = liftDOM (self ^. jss \"normalize\" (toJSVal val))\n\n | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation >\ngetNormalize :: (MonadDOM m) => ConvolverNode -> m Bool\ngetNormalize self\n = liftDOM ((self ^. js \"normalize\") >>= valToBool)\n"}}},{"rowIdx":610253,"cells":{"_id":{"kind":"string","value":"238edc5ab7a83ebe55c5f21ab46c7d5b18764a38ae8a2a9fc3e37b27511d2e31"},"repository":{"kind":"string","value":"realark/vert"},"name":{"kind":"string","value":"game-scene.lisp"},"content":{"kind":"string","value":"(in-package :recurse.vert)\n\n@export-class\n(defclass game-scene (scene gl-pipeline)\n ((scene-background :initform nil\n :initarg :background\n :type scene-background\n :accessor scene-background)\n (scene-audio-state :initform nil\n :documentation \"Used to resume audio-state when the scene deactivates.\")\n (scene-music :initarg :music\n :initform nil\n :accessor scene-music\n :documentation \"Music which will play when the scene initializes.\")\n (width :initarg :width\n :initform (error \":width required\")\n :reader width)\n (height :initarg :height\n :initform (error \":height required\")\n :reader height)\n (live-object-radius\n :initarg :live-object-radius\n :initform #.(* 15 16)\n :documentation \"Make a rect centered on camera.\nThe value of this slot will be the distance between the live area and camera rect.\nWhen camera moves outside this rect, rebuild objects to render and update\nThis is an optimization so we don't have to rebuild the render and update queues every frame.\")\n (render-queue :initform (make-instance 'render-queue\n :render-priority -1)\n :documentation \"Render scene objects and backgrounds.\")\n (updatable-objects :initform (make-array 100\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil))\n (updating-p :initform nil :reader updating-p)\n (pending-adds :initform (make-array 10\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil)\n :documentation \"Objects to be added to scene at the start of the next frame.\")\n (pending-removes :initform (make-array 10\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil)\n :documentation \"Objects to be removed from scene at the start of the next frame.\")\n (live-object-rebuild-camera-position\n :initform (vector2)\n :documentation \"Centered camera position used to compute render-queue rebuilds.\")\n (reset-instance-renderers\n :initform (make-array 5\n :adjustable t\n :fill-pointer 0)\n :documentation \"Sequence of instance renderers which have been reset in the current frame.\")\n (spatial-partition :initform nil\n :documentation \"Optimized spatial partition containing every object in the scene.\"\n :reader spatial-partition))\n (:documentation \"A scene which updates and renders game-objects.\"))\n\n(defmethod initialize-instance :after ((game-scene game-scene) &rest args)\n (declare (ignore args))\n (with-slots (spatial-partition render-queue) game-scene\n (gl-pipeline-add game-scene render-queue)\n (setf spatial-partition\n (make-instance 'quadtree))))\n\n@export\n(defgeneric add-to-scene (scene object)\n (:documentation \"Add an object to the game scene\")\n (:method ((scene scene) (overlay overlay))\n (with-slots (scene-overlays render-queue) scene\n (unless (find overlay scene-overlays)\n (vector-push-extend overlay scene-overlays)\n overlay)))\n (:method ((scene game-scene) (overlay overlay))\n (with-slots (scene-overlays render-queue) scene\n (unless (find overlay scene-overlays)\n (vector-push-extend overlay scene-overlays)\n ;; (render-queue-add render-queue overlay)\n overlay)))\n (:method ((scene game-scene) (object game-object))\n (if (updating-p scene)\n (with-slots (pending-adds pending-removes) scene\n (if (in-scene-p scene object)\n (when (find object pending-removes)\n (log:debug \"cancel ~A for scene remove\" object)\n (setf pending-removes (delete object pending-removes))\n object)\n (unless (find object pending-adds)\n (log:debug \"queuing ~A for scene add\" object)\n (vector-push-extend object pending-adds)\n object)))\n ;; fast path for adding objects outside of scene update (i.e. initialization)\n (%%add-object-to-scene scene object))))\n\n@export\n(defgeneric remove-from-scene (scene object)\n (:documentation \"Remove an object from the game scene\")\n (:method ((scene scene) (overlay overlay))\n (with-slots (scene-overlays) scene\n (when (find overlay scene-overlays)\n (setf scene-overlays (delete overlay scene-overlays))\n overlay)))\n (:method ((scene game-scene) (overlay overlay))\n (with-slots (scene-overlays) scene\n (when (find overlay scene-overlays)\n (setf scene-overlays (delete overlay scene-overlays))\n ;; (render-queue-remove (slot-value scene 'render-queue) overlay)\n overlay)))\n (:method ((scene game-scene) (object game-object))\n (with-slots (pending-adds pending-removes) scene\n (if (in-scene-p scene object)\n (unless (find object pending-removes)\n (log:debug \"queuing ~A for scene removal\" object)\n (vector-push-extend object pending-removes)\n (unless (updating-p scene)\n (%run-pending-removes scene))\n object)\n (when (find object pending-adds)\n (log:debug \"cancel ~A for scene add\" object)\n (setf pending-adds (delete object pending-adds))\n object)))))\n\n(defmethod scene-activated ((scene game-scene))\n (with-slots ((state scene-audio-state)) scene\n (if state\n (audio-player-load-state *audio* state)\n (audio-player-stop-all *audio*))))\n\n(defmethod scene-deactivated ((scene game-scene))\n (with-slots ((state scene-audio-state)) scene\n (with-sdl-mixer-lock-held\n (unless state\n (setf state (audio-player-copy-state *audio*)))\n (audio-player-copy-state *audio* state)\n (audio-player-stop-music *audio*)\n (audio-player-stop-sfx *audio*)))\n (values))\n\n@export\n(defun scene-teleport-object (scene object &optional new-x new-y new-z)\n \"Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation.\"\n (when new-x\n (setf (x object) new-x))\n (when new-y\n (setf (y object) new-y))\n (when new-z\n (setf (z object) new-z))\n (recycle object)\n (when (%in-live-object-area-p scene object)\n (with-slots (render-queue updatable-objects) scene\n (render-queue-add render-queue object)\n (unless (find object updatable-objects :test #'eq)\n (vector-push-extend object updatable-objects))))\n object)\n\n(defgeneric found-object-to-update (game-scene game-object)\n (:documentation \"for subclasses to hook object updates\")\n (:method ((scene game-scene) game-object)))\n\n(defun %%add-object-to-scene (scene object)\n (declare (optimize (speed 3))\n (game-scene scene)\n (game-object object))\n (with-slots (spatial-partition render-queue updatable-objects) scene\n (when (start-tracking spatial-partition object)\n (event-subscribe object scene killed)\n (when (%in-live-object-area-p scene object)\n (render-queue-add render-queue object)\n (unless (find object updatable-objects :test #'eq)\n (vector-push-extend object updatable-objects)))\n object)))\n\n(defun %run-pending-removes (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (with-slots (pending-removes spatial-partition render-queue updatable-objects) scene\n (declare (vector pending-removes updatable-objects))\n (when (> (length pending-removes) 0)\n (loop :for removed-object :across pending-removes :do\n (event-unsubscribe removed-object scene killed)\n (stop-tracking spatial-partition removed-object)\n (when (%in-live-object-area-p scene removed-object)\n (render-queue-remove render-queue removed-object)\n (setf updatable-objects (delete removed-object updatable-objects)))\n (log:debug \"removed ~A from scene\" removed-object)\n :finally\n (setf (fill-pointer pending-removes) 0))))\n (values))\n\n(defun %run-pending-adds (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (with-slots (pending-adds spatial-partition render-queue updatable-objects) scene\n (loop :for object :across pending-adds :do\n (%%add-object-to-scene scene object)\n :finally\n (setf (fill-pointer pending-adds) 0))))\n\n(defun %force-rebuild-live-objects (scene)\n (log:debug \"force live object rebuild.\")\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (if (float= 0.0 (x live-object-rebuild-camera-position))\n (setf (x live-object-rebuild-camera-position)\n (+ (width camera) live-object-radius))\n (setf (x live-object-rebuild-camera-position) 0.0))))\n\n(defun %in-live-object-area-p (scene game-object)\n \"T if OBJECT is inside SCENE's current live object area.\"\n (declare (optimize (speed 3))\n (game-scene scene)\n (game-object game-object))\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (let ((live-x-min (- (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-x-max (+ (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-y-min (- (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius))\n (live-y-max (+ (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius)))\n (multiple-value-bind (x y z w h) (world-dimensions game-object)\n (declare (ignore z)\n (single-float x y w h))\n (and (or (<= live-x-min x live-x-max)\n (<= live-x-min (+ x w) live-x-max)\n (and (<= x live-x-min)\n (>= (+ x w) live-x-max)))\n (or (<= live-y-min y live-y-max)\n (<= live-y-min (+ y h) live-y-max)\n (and (<= y live-y-min)\n (>= (+ y h) live-y-max))))))))\n\n(defun %rebuild-live-object-area-p (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (block camera-moved-outside-render-area-p\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera\n (declare (single-float c-x c-y c-w c-h))\n (let* ((camera-centered-x (+ c-x (/ c-w 2.0)))\n (camera-centered-y (+ c-y (/ c-h 2.0)))\n (delta (max\n (abs (- camera-centered-x (x live-object-rebuild-camera-position)))\n (abs (- camera-centered-y (y live-object-rebuild-camera-position))))))\n (when (>= delta live-object-radius)\n (setf (x live-object-rebuild-camera-position) camera-centered-x\n (y live-object-rebuild-camera-position) camera-centered-y)\n t))))))\n\n(defmethod update :around ((scene game-scene))\n (with-slots (updating-p) scene\n (setf updating-p t)\n (unwind-protect\n (call-next-method scene)\n (setf updating-p nil))))\n\n(defmethod update ((game-scene game-scene))\n (declare (optimize (speed 3)))\n (with-slots (live-object-rebuild-camera-position\n live-object-radius\n updatable-objects\n (queue render-queue)\n reset-instance-renderers\n (bg scene-background)\n scene-overlays\n pending-removes\n camera)\n game-scene\n (let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene)))\n (%run-pending-removes game-scene)\n (%run-pending-adds game-scene)\n (when rebuild-live-objects-p\n (setf (fill-pointer updatable-objects) 0)\n (render-queue-reset queue)\n (setf (fill-pointer reset-instance-renderers) 0))\n ;; pre-update frame to mark positions\n (pre-update (camera game-scene))\n (when bg\n (pre-update bg)\n (when rebuild-live-objects-p\n (render-queue-add queue bg)))\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (pre-update overlay))\n\n ;; call super\n (call-next-method game-scene)\n\n ;; update frame\n (when rebuild-live-objects-p\n (let ((num-objects-to-update 0)\n (num-objects-to-render 0)\n (live-x-min (- (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-x-max (+ (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-y-min (- (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius))\n (live-y-max (+ (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius)))\n (declare (fixnum num-objects-to-render num-objects-to-update)\n (single-float live-x-min live-x-max live-y-min live-y-max))\n (log:debug \"rebuilding live-objects\")\n (do-spatial-partition (game-object\n (spatial-partition game-scene)\n :static-iteration-p t\n :min-x live-x-min :max-x live-x-max\n :min-y live-y-min :max-y live-y-max)\n (block found-object-to-render\n ;; TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice\n ;; to fix this, the render queue should return different values if obj is already queued\n (block check-if-instance-rendered\n (if (typep game-object 'instance-rendered-drawable)\n (with-slots ((instance-renderer instance-renderer)) game-object\n (unless (find instance-renderer reset-instance-renderers)\n (incf num-objects-to-render)\n (vector-push-extend instance-renderer reset-instance-renderers)\n (instance-renderer-reset instance-renderer game-scene)))\n (incf num-objects-to-render)))\n (render-queue-add queue game-object))\n (block check-add-to-updatable-objects\n (when (and (not (typep game-object 'static-object))\n (not (find game-object updatable-objects :test #'eq)))\n (incf num-objects-to-update)\n (vector-push-extend game-object updatable-objects))))\n (log:debug \"Rebuild complete. Found ~A objects to render and ~A objects to update\"\n num-objects-to-render\n num-objects-to-update)))\n (update (camera game-scene))\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (update overlay)\n #+nil\n (when rebuild-live-objects-p\n (render-queue-add render-queue overlay)))\n (when rebuild-live-objects-p\n (render-queue-add queue camera))\n (when bg\n (update bg))\n (loop :for game-object :across updatable-objects :do\n (pre-update game-object)\n (found-object-to-update game-scene game-object)\n (update game-object))\n (values))))\n\n(defmethod render ((scene game-scene) update-percent camera gl-context)\n HACK scene transitions get messed up bc rendering occurs before setup stuff is done\n (prog1 (call-next-method scene update-percent camera gl-context)\n (with-slots (scene-overlays) scene\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (render overlay update-percent (camera scene) gl-context))))))\n\n(defevent-handler killed ((object obb) (game-scene game-scene))\n \"\"\n (remove-from-scene game-scene object))\n\n;; TODO: remove this fn and use scheduler util directly\n@export\n(defun schedule (game-scene timestamp zero-arg-fn)\n \"When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked.\"\n (scheduler-add game-scene timestamp zero-arg-fn)\n (values))\n\n@export\n(defun get-object-by-id (scene id)\n \"Return the (presumably) unique game-object identified by ID in SCENE.\"\n (declare (game-scene scene))\n (block find-object\n (do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t)\n (when (equalp (object-id game-object) id)\n (return-from find-object game-object)))))\n\n@export\n(defun in-scene-p (scene object)\n \"Return OBJECT if OBJECT is in SCENE, nil otherwise.\"\n (declare (optimize (speed 3))\n (game-scene scene))\n (block find-object\n (do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t)\n (when (eq obj object)\n (return-from find-object object)))))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/realark/vert/6b1938be9084224cf9ce1cfcb71f787f0ac14655/src/scene/game-scene.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":" (render-queue-add render-queue overlay)\n fast path for adding objects outside of scene update (i.e. initialization)\n (render-queue-remove (slot-value scene 'render-queue) overlay)\n pre-update frame to mark positions\n call super\n update frame\n TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice\n to fix this, the render queue should return different values if obj is already queued\n TODO: remove this fn and use scheduler util directly"},"code":{"kind":"string","value":"(in-package :recurse.vert)\n\n@export-class\n(defclass game-scene (scene gl-pipeline)\n ((scene-background :initform nil\n :initarg :background\n :type scene-background\n :accessor scene-background)\n (scene-audio-state :initform nil\n :documentation \"Used to resume audio-state when the scene deactivates.\")\n (scene-music :initarg :music\n :initform nil\n :accessor scene-music\n :documentation \"Music which will play when the scene initializes.\")\n (width :initarg :width\n :initform (error \":width required\")\n :reader width)\n (height :initarg :height\n :initform (error \":height required\")\n :reader height)\n (live-object-radius\n :initarg :live-object-radius\n :initform #.(* 15 16)\n :documentation \"Make a rect centered on camera.\nThe value of this slot will be the distance between the live area and camera rect.\nWhen camera moves outside this rect, rebuild objects to render and update\nThis is an optimization so we don't have to rebuild the render and update queues every frame.\")\n (render-queue :initform (make-instance 'render-queue\n :render-priority -1)\n :documentation \"Render scene objects and backgrounds.\")\n (updatable-objects :initform (make-array 100\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil))\n (updating-p :initform nil :reader updating-p)\n (pending-adds :initform (make-array 10\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil)\n :documentation \"Objects to be added to scene at the start of the next frame.\")\n (pending-removes :initform (make-array 10\n :adjustable t\n :fill-pointer 0\n :element-type '(or null game-object)\n :initial-element nil)\n :documentation \"Objects to be removed from scene at the start of the next frame.\")\n (live-object-rebuild-camera-position\n :initform (vector2)\n :documentation \"Centered camera position used to compute render-queue rebuilds.\")\n (reset-instance-renderers\n :initform (make-array 5\n :adjustable t\n :fill-pointer 0)\n :documentation \"Sequence of instance renderers which have been reset in the current frame.\")\n (spatial-partition :initform nil\n :documentation \"Optimized spatial partition containing every object in the scene.\"\n :reader spatial-partition))\n (:documentation \"A scene which updates and renders game-objects.\"))\n\n(defmethod initialize-instance :after ((game-scene game-scene) &rest args)\n (declare (ignore args))\n (with-slots (spatial-partition render-queue) game-scene\n (gl-pipeline-add game-scene render-queue)\n (setf spatial-partition\n (make-instance 'quadtree))))\n\n@export\n(defgeneric add-to-scene (scene object)\n (:documentation \"Add an object to the game scene\")\n (:method ((scene scene) (overlay overlay))\n (with-slots (scene-overlays render-queue) scene\n (unless (find overlay scene-overlays)\n (vector-push-extend overlay scene-overlays)\n overlay)))\n (:method ((scene game-scene) (overlay overlay))\n (with-slots (scene-overlays render-queue) scene\n (unless (find overlay scene-overlays)\n (vector-push-extend overlay scene-overlays)\n overlay)))\n (:method ((scene game-scene) (object game-object))\n (if (updating-p scene)\n (with-slots (pending-adds pending-removes) scene\n (if (in-scene-p scene object)\n (when (find object pending-removes)\n (log:debug \"cancel ~A for scene remove\" object)\n (setf pending-removes (delete object pending-removes))\n object)\n (unless (find object pending-adds)\n (log:debug \"queuing ~A for scene add\" object)\n (vector-push-extend object pending-adds)\n object)))\n (%%add-object-to-scene scene object))))\n\n@export\n(defgeneric remove-from-scene (scene object)\n (:documentation \"Remove an object from the game scene\")\n (:method ((scene scene) (overlay overlay))\n (with-slots (scene-overlays) scene\n (when (find overlay scene-overlays)\n (setf scene-overlays (delete overlay scene-overlays))\n overlay)))\n (:method ((scene game-scene) (overlay overlay))\n (with-slots (scene-overlays) scene\n (when (find overlay scene-overlays)\n (setf scene-overlays (delete overlay scene-overlays))\n overlay)))\n (:method ((scene game-scene) (object game-object))\n (with-slots (pending-adds pending-removes) scene\n (if (in-scene-p scene object)\n (unless (find object pending-removes)\n (log:debug \"queuing ~A for scene removal\" object)\n (vector-push-extend object pending-removes)\n (unless (updating-p scene)\n (%run-pending-removes scene))\n object)\n (when (find object pending-adds)\n (log:debug \"cancel ~A for scene add\" object)\n (setf pending-adds (delete object pending-adds))\n object)))))\n\n(defmethod scene-activated ((scene game-scene))\n (with-slots ((state scene-audio-state)) scene\n (if state\n (audio-player-load-state *audio* state)\n (audio-player-stop-all *audio*))))\n\n(defmethod scene-deactivated ((scene game-scene))\n (with-slots ((state scene-audio-state)) scene\n (with-sdl-mixer-lock-held\n (unless state\n (setf state (audio-player-copy-state *audio*)))\n (audio-player-copy-state *audio* state)\n (audio-player-stop-music *audio*)\n (audio-player-stop-sfx *audio*)))\n (values))\n\n@export\n(defun scene-teleport-object (scene object &optional new-x new-y new-z)\n \"Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation.\"\n (when new-x\n (setf (x object) new-x))\n (when new-y\n (setf (y object) new-y))\n (when new-z\n (setf (z object) new-z))\n (recycle object)\n (when (%in-live-object-area-p scene object)\n (with-slots (render-queue updatable-objects) scene\n (render-queue-add render-queue object)\n (unless (find object updatable-objects :test #'eq)\n (vector-push-extend object updatable-objects))))\n object)\n\n(defgeneric found-object-to-update (game-scene game-object)\n (:documentation \"for subclasses to hook object updates\")\n (:method ((scene game-scene) game-object)))\n\n(defun %%add-object-to-scene (scene object)\n (declare (optimize (speed 3))\n (game-scene scene)\n (game-object object))\n (with-slots (spatial-partition render-queue updatable-objects) scene\n (when (start-tracking spatial-partition object)\n (event-subscribe object scene killed)\n (when (%in-live-object-area-p scene object)\n (render-queue-add render-queue object)\n (unless (find object updatable-objects :test #'eq)\n (vector-push-extend object updatable-objects)))\n object)))\n\n(defun %run-pending-removes (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (with-slots (pending-removes spatial-partition render-queue updatable-objects) scene\n (declare (vector pending-removes updatable-objects))\n (when (> (length pending-removes) 0)\n (loop :for removed-object :across pending-removes :do\n (event-unsubscribe removed-object scene killed)\n (stop-tracking spatial-partition removed-object)\n (when (%in-live-object-area-p scene removed-object)\n (render-queue-remove render-queue removed-object)\n (setf updatable-objects (delete removed-object updatable-objects)))\n (log:debug \"removed ~A from scene\" removed-object)\n :finally\n (setf (fill-pointer pending-removes) 0))))\n (values))\n\n(defun %run-pending-adds (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (with-slots (pending-adds spatial-partition render-queue updatable-objects) scene\n (loop :for object :across pending-adds :do\n (%%add-object-to-scene scene object)\n :finally\n (setf (fill-pointer pending-adds) 0))))\n\n(defun %force-rebuild-live-objects (scene)\n (log:debug \"force live object rebuild.\")\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (if (float= 0.0 (x live-object-rebuild-camera-position))\n (setf (x live-object-rebuild-camera-position)\n (+ (width camera) live-object-radius))\n (setf (x live-object-rebuild-camera-position) 0.0))))\n\n(defun %in-live-object-area-p (scene game-object)\n \"T if OBJECT is inside SCENE's current live object area.\"\n (declare (optimize (speed 3))\n (game-scene scene)\n (game-object game-object))\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (let ((live-x-min (- (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-x-max (+ (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-y-min (- (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius))\n (live-y-max (+ (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius)))\n (multiple-value-bind (x y z w h) (world-dimensions game-object)\n (declare (ignore z)\n (single-float x y w h))\n (and (or (<= live-x-min x live-x-max)\n (<= live-x-min (+ x w) live-x-max)\n (and (<= x live-x-min)\n (>= (+ x w) live-x-max)))\n (or (<= live-y-min y live-y-max)\n (<= live-y-min (+ y h) live-y-max)\n (and (<= y live-y-min)\n (>= (+ y h) live-y-max))))))))\n\n(defun %rebuild-live-object-area-p (scene)\n (declare (optimize (speed 3))\n (game-scene scene))\n (block camera-moved-outside-render-area-p\n (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene\n (with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera\n (declare (single-float c-x c-y c-w c-h))\n (let* ((camera-centered-x (+ c-x (/ c-w 2.0)))\n (camera-centered-y (+ c-y (/ c-h 2.0)))\n (delta (max\n (abs (- camera-centered-x (x live-object-rebuild-camera-position)))\n (abs (- camera-centered-y (y live-object-rebuild-camera-position))))))\n (when (>= delta live-object-radius)\n (setf (x live-object-rebuild-camera-position) camera-centered-x\n (y live-object-rebuild-camera-position) camera-centered-y)\n t))))))\n\n(defmethod update :around ((scene game-scene))\n (with-slots (updating-p) scene\n (setf updating-p t)\n (unwind-protect\n (call-next-method scene)\n (setf updating-p nil))))\n\n(defmethod update ((game-scene game-scene))\n (declare (optimize (speed 3)))\n (with-slots (live-object-rebuild-camera-position\n live-object-radius\n updatable-objects\n (queue render-queue)\n reset-instance-renderers\n (bg scene-background)\n scene-overlays\n pending-removes\n camera)\n game-scene\n (let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene)))\n (%run-pending-removes game-scene)\n (%run-pending-adds game-scene)\n (when rebuild-live-objects-p\n (setf (fill-pointer updatable-objects) 0)\n (render-queue-reset queue)\n (setf (fill-pointer reset-instance-renderers) 0))\n (pre-update (camera game-scene))\n (when bg\n (pre-update bg)\n (when rebuild-live-objects-p\n (render-queue-add queue bg)))\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (pre-update overlay))\n\n (call-next-method game-scene)\n\n (when rebuild-live-objects-p\n (let ((num-objects-to-update 0)\n (num-objects-to-render 0)\n (live-x-min (- (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-x-max (+ (x live-object-rebuild-camera-position)\n (width camera)\n live-object-radius))\n (live-y-min (- (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius))\n (live-y-max (+ (y live-object-rebuild-camera-position)\n (height camera)\n live-object-radius)))\n (declare (fixnum num-objects-to-render num-objects-to-update)\n (single-float live-x-min live-x-max live-y-min live-y-max))\n (log:debug \"rebuilding live-objects\")\n (do-spatial-partition (game-object\n (spatial-partition game-scene)\n :static-iteration-p t\n :min-x live-x-min :max-x live-x-max\n :min-y live-y-min :max-y live-y-max)\n (block found-object-to-render\n (block check-if-instance-rendered\n (if (typep game-object 'instance-rendered-drawable)\n (with-slots ((instance-renderer instance-renderer)) game-object\n (unless (find instance-renderer reset-instance-renderers)\n (incf num-objects-to-render)\n (vector-push-extend instance-renderer reset-instance-renderers)\n (instance-renderer-reset instance-renderer game-scene)))\n (incf num-objects-to-render)))\n (render-queue-add queue game-object))\n (block check-add-to-updatable-objects\n (when (and (not (typep game-object 'static-object))\n (not (find game-object updatable-objects :test #'eq)))\n (incf num-objects-to-update)\n (vector-push-extend game-object updatable-objects))))\n (log:debug \"Rebuild complete. Found ~A objects to render and ~A objects to update\"\n num-objects-to-render\n num-objects-to-update)))\n (update (camera game-scene))\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (update overlay)\n #+nil\n (when rebuild-live-objects-p\n (render-queue-add render-queue overlay)))\n (when rebuild-live-objects-p\n (render-queue-add queue camera))\n (when bg\n (update bg))\n (loop :for game-object :across updatable-objects :do\n (pre-update game-object)\n (found-object-to-update game-scene game-object)\n (update game-object))\n (values))))\n\n(defmethod render ((scene game-scene) update-percent camera gl-context)\n HACK scene transitions get messed up bc rendering occurs before setup stuff is done\n (prog1 (call-next-method scene update-percent camera gl-context)\n (with-slots (scene-overlays) scene\n (loop :for overlay :across (the (vector overlay) scene-overlays) :do\n (render overlay update-percent (camera scene) gl-context))))))\n\n(defevent-handler killed ((object obb) (game-scene game-scene))\n \"\"\n (remove-from-scene game-scene object))\n\n@export\n(defun schedule (game-scene timestamp zero-arg-fn)\n \"When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked.\"\n (scheduler-add game-scene timestamp zero-arg-fn)\n (values))\n\n@export\n(defun get-object-by-id (scene id)\n \"Return the (presumably) unique game-object identified by ID in SCENE.\"\n (declare (game-scene scene))\n (block find-object\n (do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t)\n (when (equalp (object-id game-object) id)\n (return-from find-object game-object)))))\n\n@export\n(defun in-scene-p (scene object)\n \"Return OBJECT if OBJECT is in SCENE, nil otherwise.\"\n (declare (optimize (speed 3))\n (game-scene scene))\n (block find-object\n (do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t)\n (when (eq obj object)\n (return-from find-object object)))))\n"}}},{"rowIdx":610254,"cells":{"_id":{"kind":"string","value":"16f37d0f61a1dd10973c1d70eae652e9e4e695b83cb8aeb9e0189cb2bb4bd772"},"repository":{"kind":"string","value":"ndmitchell/catch"},"name":{"kind":"string","value":"Blur.hs"},"content":{"kind":"string","value":"\nmodule RegExp.Blur(blur) where\n\nimport RegExp.Type\n\n-- the output of blur must be a finite set\n-- assuming a finite input\nblur :: (Eq a, Show a) => RegExp a -> RegExp a\nblur x = f x\n where\n f (RegKleene x) = regKleene (f x)\n f (RegUnion x) = regUnion (map f x)\n f (RegConcat x) = regConcat (g x)\n f x = x\n \n g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c)\n \n g (RegKleene a : b : c : d : e) | a == b && b == c && c == d =\n g (RegKleene a : c : d : e)\n \n g (a:b:c:d) | a == b && b == c =\n g (RegKleene a : b : c : d)\n\n g (x:xs) = x : g xs\n g [] = []\n \n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/src/RegExp/Blur.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" the output of blur must be a finite set\n assuming a finite input"},"code":{"kind":"string","value":"\nmodule RegExp.Blur(blur) where\n\nimport RegExp.Type\n\nblur :: (Eq a, Show a) => RegExp a -> RegExp a\nblur x = f x\n where\n f (RegKleene x) = regKleene (f x)\n f (RegUnion x) = regUnion (map f x)\n f (RegConcat x) = regConcat (g x)\n f x = x\n \n g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c)\n \n g (RegKleene a : b : c : d : e) | a == b && b == c && c == d =\n g (RegKleene a : c : d : e)\n \n g (a:b:c:d) | a == b && b == c =\n g (RegKleene a : b : c : d)\n\n g (x:xs) = x : g xs\n g [] = []\n \n"}}},{"rowIdx":610255,"cells":{"_id":{"kind":"string","value":"6f38c355955756cad424182fdb05b92e563b0b1feff4286292687e994838d2c9"},"repository":{"kind":"string","value":"ropas/sparrow"},"name":{"kind":"string","value":"instrumentedMem.ml"},"content":{"kind":"string","value":"(***********************************************************************)\n(* *)\n Copyright ( c ) 2007 - present . \n Programming Research Laboratory ( ROPAS ) , Seoul National University .\n(* All rights reserved. *)\n(* *)\n This software is distributed under the term of the BSD license . \n(* See the LICENSE file for details. *)\n(* *)\n(***********************************************************************)\nmodule type S =\nsig\n include MapDom.CPO\n module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t\n val init_access : unit -> unit\n val return_access : unit -> Access.info\nend\n\nmodule Make (Mem : MapDom.CPO) =\nstruct\n include Mem\n\n module Loc = A\n module Val = B\n module Access = Access.Make(Mem)\n let access = ref Access.Info.empty\n let access_mode = ref false\n let init_access : unit -> unit\n = fun () -> access_mode := true; access := Access.Info.empty; ()\n\n let return_access : unit -> Access.info\n = fun () -> access_mode := false; !access\n\n let add k v m =\n (if !access_mode then\n access := Access.Info.add Access.Info.def k !access);\n add k v m\n\n let weak_add k v m =\n (if !access_mode then\n access := Access.Info.add Access.Info.all k !access);\n weak_add k v m\n\n let find : A.t -> t -> B.t\n = fun k m ->\n (if !access_mode && not (eq m bot) then\n access := Access.Info.add Access.Info.use k !access);\n find k m\nend\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/instrumentedMem.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"*********************************************************************\n \n All rights reserved. \n \n See the LICENSE file for details. \n \n*********************************************************************"},"code":{"kind":"string","value":" Copyright ( c ) 2007 - present . \n Programming Research Laboratory ( ROPAS ) , Seoul National University .\n This software is distributed under the term of the BSD license . \nmodule type S =\nsig\n include MapDom.CPO\n module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t\n val init_access : unit -> unit\n val return_access : unit -> Access.info\nend\n\nmodule Make (Mem : MapDom.CPO) =\nstruct\n include Mem\n\n module Loc = A\n module Val = B\n module Access = Access.Make(Mem)\n let access = ref Access.Info.empty\n let access_mode = ref false\n let init_access : unit -> unit\n = fun () -> access_mode := true; access := Access.Info.empty; ()\n\n let return_access : unit -> Access.info\n = fun () -> access_mode := false; !access\n\n let add k v m =\n (if !access_mode then\n access := Access.Info.add Access.Info.def k !access);\n add k v m\n\n let weak_add k v m =\n (if !access_mode then\n access := Access.Info.add Access.Info.all k !access);\n weak_add k v m\n\n let find : A.t -> t -> B.t\n = fun k m ->\n (if !access_mode && not (eq m bot) then\n access := Access.Info.add Access.Info.use k !access);\n find k m\nend\n"}}},{"rowIdx":610256,"cells":{"_id":{"kind":"string","value":"02dd07a645cc9e530baec9c86afa0826065c0687915f626790bb9d08de33a90d"},"repository":{"kind":"string","value":"apache/couchdb-mochiweb"},"name":{"kind":"string","value":"mochiweb_websocket_tests.erl"},"content":{"kind":"string","value":"-module(mochiweb_websocket_tests).\n\n-author('').\n\n The MIT License ( MIT )\n\n Copyright ( c ) 2012 Zadane.pl sp . \n\n%% Permission is hereby granted, free of charge, to any person obtaining a copy\n%% of this software and associated documentation files (the \"Software\"), to deal\n in the Software without restriction , including without limitation the rights\n%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n copies of the Software , and to permit persons to whom the Software is\n%% furnished to do so, subject to the following conditions:\n\n%% The above copyright notice and this permission notice shall be included in\n all copies or substantial portions of the Software .\n\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR\n%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,\n%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n%% THE SOFTWARE.\n\n-include_lib(\"eunit/include/eunit.hrl\").\n\nmake_handshake_for_correct_client_test() ->\n %% Hybi handshake\n Req1 = mochiweb_request:new(nil, 'GET', \"/foo\", {1, 1},\n\t\t\t\tmochiweb_headers:make([{\"Sec-WebSocket-Key\",\n\t\t\t\t\t\t\t\"Xn3fdKyc3qEXPuj2A3O+ZA==\"}])),\n {Version1, {HttpCode1, Headers1, _}} =\n\tmochiweb_websocket:make_handshake(Req1),\n ?assertEqual(hybi, Version1),\n ?assertEqual(101, HttpCode1),\n ?assertEqual(\"Upgrade\",\n\t\t (proplists:get_value(\"Connection\", Headers1))),\n ?assertEqual(<<\"BIFTHkJk4r5t8kuud82tZJaQsCE=\">>,\n\t\t (proplists:get_value(\"Sec-Websocket-Accept\",\n\t\t\t\t Headers1))),\n handshake\n {Version2, {HttpCode2, Headers2, Body2}} =\n\tmochiweb_websocket:hixie_handshake(\"ws://\", \"localhost\",\n\t\t\t\t\t \"/\", \"33j284 9 z63 e 9 7\",\n\t\t\t\t\t \"TF'3|6D12659H 7 70\",\n\t\t\t\t\t <<175, 181, 191, 215, 128, 195, 144,\n\t\t\t\t\t 120>>,\n\t\t\t\t\t \"null\"),\n ?assertEqual(hixie, Version2),\n ?assertEqual(101, HttpCode2),\n ?assertEqual(\"null\",\n\t\t (proplists:get_value(\"Sec-WebSocket-Origin\",\n\t\t\t\t Headers2))),\n ?assertEqual(\"ws/\",\n\t\t (proplists:get_value(\"Sec-WebSocket-Location\",\n\t\t\t\t Headers2))),\n ?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244,\n\t\t 150, 134, 167, 221, 103, 239, 246>>,\n\t\t Body2).\n\nhybi_frames_decode_test() ->\n ?assertEqual([{1, <<\"foo\">>}],\n\t\t (mochiweb_websocket:parse_hybi_frames(nil,\n\t\t\t\t\t\t <<129, 131, 118, 21, 153,\n\t\t\t\t\t\t\t 58, 16, 122, 246>>,\n\t\t\t\t\t\t []))),\n ?assertEqual([{1, <<\"foo\">>}, {1, <<\"bar\">>}],\n\t\t (mochiweb_websocket:parse_hybi_frames(nil,\n\t\t\t\t\t\t <<129, 131, 1, 225, 201,\n\t\t\t\t\t\t\t 42, 103, 142, 166, 129,\n\t\t\t\t\t\t\t 131, 93, 222, 214, 66,\n\t\t\t\t\t\t\t 63, 191, 164>>,\n\t\t\t\t\t\t []))).\n\nhixie_frames_decode_test() ->\n ?assertEqual([],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<>>, []))),\n ?assertEqual([<<\"foo\">>],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,\n\t\t\t\t\t\t\t 111, 255>>,\n\t\t\t\t\t\t\t[]))),\n ?assertEqual([<<\"foo\">>, <<\"bar\">>],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,\n\t\t\t\t\t\t\t 111, 255, 0, 98, 97,\n\t\t\t\t\t\t\t 114, 255>>,\n\t\t\t\t\t\t\t[]))).\n\nend_to_end_test_factory(ServerTransport) ->\n mochiweb_test_util:with_server(ServerTransport,\n\t\t\t\t fun end_to_end_server/1,\n\t\t\t\t fun (Transport, Port) ->\n\t\t\t\t\t end_to_end_client(mochiweb_test_util:sock_fun(Transport,\n\t\t\t\t\t\t\t\t\t\t\t Port))\n\t\t\t\t end).\n\nend_to_end_server(Req) ->\n ?assertEqual(\"Upgrade\",\n\t\t (mochiweb_request:get_header_value(\"connection\", Req))),\n ?assertEqual(\"websocket\",\n\t\t (mochiweb_request:get_header_value(\"upgrade\", Req))),\n {ReentryWs, _ReplyChannel} =\n\tmochiweb_websocket:upgrade_connection(Req,\n\t\t\t\t\t fun end_to_end_ws_loop/3),\n ReentryWs(ok).\n\nend_to_end_ws_loop(Payload, State, ReplyChannel) ->\n Echo server\n lists:foreach(ReplyChannel, Payload),\n State.\n\nend_to_end_client(S) ->\n %% Key and Accept per \n UpgradeReq = string:join([\"GET / HTTP/1.1\",\n\t\t\t \"Host: localhost\", \"Upgrade: websocket\",\n\t\t\t \"Connection: Upgrade\",\n\t\t\t \"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\", \"\",\n\t\t\t \"\"],\n\t\t\t \"\\r\\n\"),\n ok = S({send, UpgradeReq}),\n {ok, {http_response, {1, 1}, 101, _}} = S(recv),\n read_expected_headers(S,\n\t\t\t [{'Upgrade', \"websocket\"}, {'Connection', \"Upgrade\"},\n\t\t\t {'Content-Length', \"0\"},\n\t\t\t {\"Sec-Websocket-Accept\",\n\t\t\t \"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\"}]),\n The first message sent over telegraph :)\n SmallMessage = <<\"What hath God wrought?\">>,\n ok = S({send,\n\t <<1:1, %% Fin\n\t 0:1, %% Rsv1\n\t 0:1, %% Rsv2\n\t 0:1, %% Rsv3\n Opcode , 1 = text frame\n\t 1:1, %% Mask on\n Length , < 125 case\n\t 0:32, %% Mask (trivial)\n\t SmallMessage/binary>>}),\n {ok, WsFrames} = S(recv),\n <<1:1, %% Fin\n 0:1, %% Rsv1\n 0:1, %% Rsv2\n 0:1, %% Rsv3\n 1:4, %% Opcode, text frame (all mochiweb supports for now)\n MsgSize:8, %% Expecting small size\n SmallMessage/binary>> =\n\tWsFrames,\n ?assertEqual(MsgSize, (byte_size(SmallMessage))),\n ok.\n\nread_expected_headers(S, D) ->\n Headers = mochiweb_test_util:read_server_headers(S),\n lists:foreach(fun ({K, V}) ->\n\t\t\t ?assertEqual(V,\n\t\t\t\t (mochiweb_headers:get_value(K, Headers)))\n\t\t end,\n\t\t D).\n\nend_to_end_http_test() ->\n end_to_end_test_factory(plain).\n\nend_to_end_https_test() -> end_to_end_test_factory(ssl).\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/apache/couchdb-mochiweb/fce80ef5e2c105405a39d3cdf4615f21e0d1d734/test/mochiweb_websocket_tests.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":" Permission is hereby granted, free of charge, to any person obtaining a copy\n of this software and associated documentation files (the \"Software\"), to deal\n to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n furnished to do so, subject to the following conditions:\n The above copyright notice and this permission notice shall be included in\n IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n THE SOFTWARE.\n Hybi handshake\n Key and Accept per \n Fin\n Rsv1\n Rsv2\n Rsv3\n Mask on\n Mask (trivial)\n Fin\n Rsv1\n Rsv2\n Rsv3\n Opcode, text frame (all mochiweb supports for now)\n Expecting small size"},"code":{"kind":"string","value":"-module(mochiweb_websocket_tests).\n\n-author('').\n\n The MIT License ( MIT )\n\n Copyright ( c ) 2012 Zadane.pl sp . \n\n in the Software without restriction , including without limitation the rights\n copies of the Software , and to permit persons to whom the Software is\n\n all copies or substantial portions of the Software .\n\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR\n LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,\n\n-include_lib(\"eunit/include/eunit.hrl\").\n\nmake_handshake_for_correct_client_test() ->\n Req1 = mochiweb_request:new(nil, 'GET', \"/foo\", {1, 1},\n\t\t\t\tmochiweb_headers:make([{\"Sec-WebSocket-Key\",\n\t\t\t\t\t\t\t\"Xn3fdKyc3qEXPuj2A3O+ZA==\"}])),\n {Version1, {HttpCode1, Headers1, _}} =\n\tmochiweb_websocket:make_handshake(Req1),\n ?assertEqual(hybi, Version1),\n ?assertEqual(101, HttpCode1),\n ?assertEqual(\"Upgrade\",\n\t\t (proplists:get_value(\"Connection\", Headers1))),\n ?assertEqual(<<\"BIFTHkJk4r5t8kuud82tZJaQsCE=\">>,\n\t\t (proplists:get_value(\"Sec-Websocket-Accept\",\n\t\t\t\t Headers1))),\n handshake\n {Version2, {HttpCode2, Headers2, Body2}} =\n\tmochiweb_websocket:hixie_handshake(\"ws://\", \"localhost\",\n\t\t\t\t\t \"/\", \"33j284 9 z63 e 9 7\",\n\t\t\t\t\t \"TF'3|6D12659H 7 70\",\n\t\t\t\t\t <<175, 181, 191, 215, 128, 195, 144,\n\t\t\t\t\t 120>>,\n\t\t\t\t\t \"null\"),\n ?assertEqual(hixie, Version2),\n ?assertEqual(101, HttpCode2),\n ?assertEqual(\"null\",\n\t\t (proplists:get_value(\"Sec-WebSocket-Origin\",\n\t\t\t\t Headers2))),\n ?assertEqual(\"ws/\",\n\t\t (proplists:get_value(\"Sec-WebSocket-Location\",\n\t\t\t\t Headers2))),\n ?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244,\n\t\t 150, 134, 167, 221, 103, 239, 246>>,\n\t\t Body2).\n\nhybi_frames_decode_test() ->\n ?assertEqual([{1, <<\"foo\">>}],\n\t\t (mochiweb_websocket:parse_hybi_frames(nil,\n\t\t\t\t\t\t <<129, 131, 118, 21, 153,\n\t\t\t\t\t\t\t 58, 16, 122, 246>>,\n\t\t\t\t\t\t []))),\n ?assertEqual([{1, <<\"foo\">>}, {1, <<\"bar\">>}],\n\t\t (mochiweb_websocket:parse_hybi_frames(nil,\n\t\t\t\t\t\t <<129, 131, 1, 225, 201,\n\t\t\t\t\t\t\t 42, 103, 142, 166, 129,\n\t\t\t\t\t\t\t 131, 93, 222, 214, 66,\n\t\t\t\t\t\t\t 63, 191, 164>>,\n\t\t\t\t\t\t []))).\n\nhixie_frames_decode_test() ->\n ?assertEqual([],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<>>, []))),\n ?assertEqual([<<\"foo\">>],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,\n\t\t\t\t\t\t\t 111, 255>>,\n\t\t\t\t\t\t\t[]))),\n ?assertEqual([<<\"foo\">>, <<\"bar\">>],\n\t\t (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111,\n\t\t\t\t\t\t\t 111, 255, 0, 98, 97,\n\t\t\t\t\t\t\t 114, 255>>,\n\t\t\t\t\t\t\t[]))).\n\nend_to_end_test_factory(ServerTransport) ->\n mochiweb_test_util:with_server(ServerTransport,\n\t\t\t\t fun end_to_end_server/1,\n\t\t\t\t fun (Transport, Port) ->\n\t\t\t\t\t end_to_end_client(mochiweb_test_util:sock_fun(Transport,\n\t\t\t\t\t\t\t\t\t\t\t Port))\n\t\t\t\t end).\n\nend_to_end_server(Req) ->\n ?assertEqual(\"Upgrade\",\n\t\t (mochiweb_request:get_header_value(\"connection\", Req))),\n ?assertEqual(\"websocket\",\n\t\t (mochiweb_request:get_header_value(\"upgrade\", Req))),\n {ReentryWs, _ReplyChannel} =\n\tmochiweb_websocket:upgrade_connection(Req,\n\t\t\t\t\t fun end_to_end_ws_loop/3),\n ReentryWs(ok).\n\nend_to_end_ws_loop(Payload, State, ReplyChannel) ->\n Echo server\n lists:foreach(ReplyChannel, Payload),\n State.\n\nend_to_end_client(S) ->\n UpgradeReq = string:join([\"GET / HTTP/1.1\",\n\t\t\t \"Host: localhost\", \"Upgrade: websocket\",\n\t\t\t \"Connection: Upgrade\",\n\t\t\t \"Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==\", \"\",\n\t\t\t \"\"],\n\t\t\t \"\\r\\n\"),\n ok = S({send, UpgradeReq}),\n {ok, {http_response, {1, 1}, 101, _}} = S(recv),\n read_expected_headers(S,\n\t\t\t [{'Upgrade', \"websocket\"}, {'Connection', \"Upgrade\"},\n\t\t\t {'Content-Length', \"0\"},\n\t\t\t {\"Sec-Websocket-Accept\",\n\t\t\t \"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\"}]),\n The first message sent over telegraph :)\n SmallMessage = <<\"What hath God wrought?\">>,\n ok = S({send,\n Opcode , 1 = text frame\n Length , < 125 case\n\t SmallMessage/binary>>}),\n {ok, WsFrames} = S(recv),\n SmallMessage/binary>> =\n\tWsFrames,\n ?assertEqual(MsgSize, (byte_size(SmallMessage))),\n ok.\n\nread_expected_headers(S, D) ->\n Headers = mochiweb_test_util:read_server_headers(S),\n lists:foreach(fun ({K, V}) ->\n\t\t\t ?assertEqual(V,\n\t\t\t\t (mochiweb_headers:get_value(K, Headers)))\n\t\t end,\n\t\t D).\n\nend_to_end_http_test() ->\n end_to_end_test_factory(plain).\n\nend_to_end_https_test() -> end_to_end_test_factory(ssl).\n"}}},{"rowIdx":610257,"cells":{"_id":{"kind":"string","value":"ad0c2e6c0fa094fe033c197f80346df8bac985220b3467aa53920dafb8550612"},"repository":{"kind":"string","value":"naoto-ogawa/h-xproto-mysql"},"name":{"kind":"string","value":"NodeSession.hs"},"content":{"kind":"string","value":" | \n module : Database . MySQLX.NodeSession \n description : Session management \n copyright : ( c ) , 2017 \n license : MIT \n maintainer : \n stability : experimental \n portability : \n\n Session ( a.k.a . Connection ) \n\n\nmodule : Database.MySQLX.NodeSession\ndescription : Session management \ncopyright : (c) naoto ogawa, 2017\nlicense : MIT \nmaintainer : \nstability : experimental\nportability : \n\nSession (a.k.a. Connection)\n\n-}\n# LANGUAGE RecordWildCards #\n\nmodule DataBase.MySQLX.NodeSession \n (\n -- * Message\n Message\n -- * Session Infomation\n , NodeSessionInfo(..)\n , defaultNodeSesssionInfo\n -- * Node Session \n , NodeSession(clientId, auth_data)\n -- * Session Management\n , openNodeSession\n , closeNodeSession\n -- * Transaction\n , begenTrxNodeSession\n , commitNodeSession\n , rollbackNodeSession\n -- * Expectation\n , sendExpectNoError \n , sendExpectUnset\n , sendExpectClose \n -- \n , readMessagesR \n , writeMessageR\n , repeatreadMessagesR\n -- * Helper functions\n , isSocketConnected\n -- * Internal Use Only\n , readMsgLengthR \n , readAllMsgR\n ) where\n\n-- general, standard library\nimport qualified Data.Binary as BIN\nimport qualified Data.ByteString as B\nimport qualified Data.ByteString.Lazy as BL \nimport qualified Data.Int as I\nimport Data.Typeable (TypeRep, Typeable, typeRep, typeOf)\nimport qualified Data.Word as W \n\nimport Network.Socket hiding (recv) \nimport Network.Socket.ByteString (send, sendAll, recv)\n\nimport Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)\nimport Control.Monad\nimport Control.Monad.Trans.Reader\nimport Control.Monad.IO.Class\n\n-- protocol buffer library\nimport qualified Text.ProtocolBuffers as PB\nimport qualified Text.ProtocolBuffers.Basic as PBB\nimport qualified Text.ProtocolBuffers.Header as PBH\nimport qualified Text.ProtocolBuffers.TextMessage as PBT\nimport qualified Text.ProtocolBuffers.WireMessage as PBW\nimport qualified Text.ProtocolBuffers.Reflections as PBR\n\n-- generated library\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE \nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk\n\n-- my library\nimport DataBase.MySQLX.Exception\nimport DataBase.MySQLX.Model\nimport DataBase.MySQLX.Util \n\n-- -----------------------------------------------------------------------------\n-- \n-- -----------------------------------------------------------------------------\n\n-- | Node Session Object\ndata NodeSession = NodeSession\n { _socket :: Socket -- ^ socket \n , clientId :: W.Word64 -- ^ client id given by MySQL Server\n ^ auth_data given by MySQL Server\n } deriving Show\n\n-- | Infomation Object of Node Session\ndata NodeSessionInfo = NodeSessionInfo \n { host :: HostName -- ^ host name\n , port :: PortNumber -- ^ port nummber\n , database :: String -- ^ database name\n , user :: String -- ^ user\n , password :: String -- ^ password\n , charset :: String -- ^ charset\n } deriving Show\n\n-- | Default NodeSessionInfo\n-- \n * host : 127.0.0.1\n * port : 33600\n-- * database : \"\"\n-- * user : \"root\"\n-- * password : \"\"\n-- * charset : \"\"\n-- \ndefaultNodeSesssionInfo :: NodeSessionInfo \ndefaultNodeSesssionInfo = NodeSessionInfo \"127.0.0.1\" 33060 \"\" \"root\" \"\" \"\"\n\n-- | a message (type, payload)\ntype Message = (Int, B.ByteString) \n\n-- -----------------------------------------------------------------------------\n-- Session Management\n-- -----------------------------------------------------------------------------\n-- | Open node session.\nopenNodeSession :: (MonadIO m, MonadThrow m) \n => NodeSessionInfo -- ^ NodeSessionInfo\n -> m NodeSession -- ^ NodeSession\nopenNodeSession sessionInfo = do\n\n socket <- _client (host sessionInfo) (port sessionInfo)\n let session = NodeSession socket (fromIntegral 0) BL.empty \n\n x <- runReaderT _negociate session\n\n (t, msg):xs <- runReaderT (_auth sessionInfo) session\n case t of \n TODO\n -- debug \"success\"\n frm <- getFrame msg\n case PFr.payload frm of\n Just x -> do \n changed <- getSessionStateChanged $ BL.toStrict x\n -- debug changed\n ok <- mkAuthenticateOk $ snd $ head xs \n -- debug ok \n id <- getClientId changed\n -- debug $ \"NodeSession is opend; clientId =\" ++ (show id)\n return session {clientId = id} \n Nothing -> throwM $ XProtocolException \"Payload is Nothing\"\n TODO\n err <- getError msg\n throwM $ XProtocolError err\n _ -> error $ \"message type unknown, =\" ++ show t\n\n-- | Close node session.\ncloseNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m ()\ncloseNodeSession nodeSess = do\n runReaderT (sendClose >> recieveOk) nodeSess\n liftIO . close $ _socket nodeSess\n debug \" NodeSession is closed . \"\n return ()\n\n-- | Make a socket for session.\n_client :: (MonadIO m) => HostName -> PortNumber -> m Socket \n_client host port = liftIO $ withSocketsDo $ do\n addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port)\n let serverAddr = head addrInfo\n sock <- socket (addrFamily serverAddr) Stream defaultProtocol\n connect sock (addrAddress serverAddr)\n return sock\n\n_auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message]\n_auth NodeSessionInfo{..} = do\n sendAuthenticateStart user\n salt <- recieveSalt\n sendAutenticateContinue database user password salt\n msgs <- readMessagesR \n return msgs \n\nsendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m () \nsendCapabilitiesGet = writeMessageR mkCapabilitiesGet \n\n_negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message]\n_negociate = do\n sendCapabilitiesGet\n ret@(x:xs) <- readMessagesR \n if fst x == s_error \n then do\n msg <- getError $ snd x\n throwM $ XProtocolError msg\n else do\n return ret \n\nsendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () \nsendAuthenticateStart = writeMessageR . mkAuthenticateStart\n\nsendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m ()\nsendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password \n\n-- | Send Close message to the server.\nsendClose :: (MonadIO m) => ReaderT NodeSession m () \nsendClose = writeMessageR mkClose\n\n-- | Retreive a salt given by the server.\nrecieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString\nrecieveSalt = do\n msg <- getAuthenticateContinueR\n return $ BL.toStrict $ PAC.auth_data msg\n\nrecieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok\nrecieveOk = getOkR\n\n-- | Send NoError expectation message to the server.\nsendExpectNoError :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectNoError = writeMessageR mkExpectNoError\n\n-- | Send Unset expectation message to the server.\nsendExpectUnset :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectUnset = writeMessageR mkExpectUnset\n\n\n interfaces as follows : \n\n openNodeSession = do \n sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( ) \n salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString \n sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( ) \n reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( ) \n\n\ninterfaces as follows:\n\nopenNodeSession = do\n sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> ()\n salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString\n sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> ()\n reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> ()\n\n-}\n\n\n-- {- [C]->[S] -} -- putMsg sock $ getAuthMsg \"root\"\n--\n-- {- [S]->[C] -}\n-- x <- parse2AuthenticateContinue sock\n let salt = S.toStrict $ PAC.auth_data x\n-- print salt\n--\n-- {- [C]->[S] -}\n-- putMsg sock $ getAutCont \"world_x\" \"root\" salt (B8.pack \"root\")\n--\n-- {- [S]->[C] -}\n-- frame <- parse2Frame sock\n-- getSessionStateChanged frame\n-- parse2AuthenticateOK sock\n\n--\n Using NodeSession and making ReaderT\n--\n\nwriteMessage :: (PBT.TextMsg msg\n ,PBR.ReflectDescriptor msg\n ,PBW.Wire msg\n ,Show msg\n ,Typeable msg\n ,MonadIO m ) => NodeSession -> msg -> m () \nwriteMessage NodeSession{..} msg = do\n liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)\n -- liftIO $ putStrLn $ PBT.messagePutText msg \n where \n bytes = PBW.messagePut msg \n len = fromIntegral $ PBW.messageSize msg \n ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg\n\nsendExpectClose :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectClose = do\n nodeSess <- ask\n liftIO $ writeExpectClose nodeSess \n\nwriteExpectClose NodeSession{..} = do\n liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)\n where \n bytes = PBW.messagePut mkClose\n len = fromIntegral 0 \n ty = putMessageType $ fromIntegral 25\n\n-- | write a message.\nwriteMessageR :: (PBT.TextMsg msg\n ,PBR.ReflectDescriptor msg\n ,PBW.Wire msg\n ,Show msg\n ,Typeable msg\n ,MonadIO m ) => msg -> ReaderT NodeSession m () \nwriteMessageR msg = do \n session <- ask\n liftIO $ writeMessage session msg\n\ngetErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error \ngetErrorR = readOneMessageR >>= \\(_, msg) -> getError msg \n\ngetFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame \ngetFrameR = readOneMessageR >>= \\(_, msg) -> getFrame msg \n\ngetAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue\ngetAuthenticateContinueR = readOneMessageR >>= \\(_, msg) -> getAuthenticateContinue msg \n\ngetOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok\ngetOkR = readOneMessageR >>= \\(_, msg) -> getOk msg \n\ngetOneMessageR :: (MonadIO m\n ,MonadThrow m\n ,PBW.Wire a\n ,PBR.ReflectDescriptor a\n ,PBT.TextMsg a\n ,Typeable a) => ReaderT NodeSession m a\ngetOneMessageR = do \n session <- ask \n (_, msg) <- liftIO $ readOneMessage session\n getMessage msg \n\nreadMessages :: (MonadIO m) => NodeSession -> m [Message]\nreadMessages NodeSession{..} = do\n len <- runReaderT readMsgLengthR _socket\n debug $ \" 1st length = \" + + ( show $ getIntFromLE len )\n ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket\n return ret\n\nreadMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message])\nreadMessagesEither NodeSession{..} = do\n len <- runReaderT readMsgLengthR _socket\n debug $ \" 1st length = \" + + ( show $ getIntFromLE len )\n ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket\n if hasError ret \n then return $ Left ret -- Error\n Success\n where hasError r = length (filterError r) >= 1 \n filterError xs = filter (\\(t,m) -> t == s_error) xs\n\n-- | retrieve messages from Node session.\nreadMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message] \nreadMessagesR = ask >>= liftIO . readMessages\n\n-- | retrieve messages from Node session.\nrepeatreadMessagesR :: (MonadIO m) \n => Bool -- ^ True : Expectation No Error , False : Otherwise\n -> Int -- ^ The number of sending messages.\n -> ([Message], [Message]) -- ^ Initial empty value, whichi should be ([], [])\n ^ fst : Success messages , snd : Error messages\nrepeatreadMessagesR noError num acc = do\n if num == 0\n then return acc\n else do\n nodeSess <- ask\n r <- readMessagesEither nodeSess\n case r of\n Left m -> if noError \n then return (fst acc , m )\n else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m)\n Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc )\n\nreadOneMessage :: (MonadIO m) => NodeSession -> m Message\nreadOneMessage NodeSession{..} = runReaderT readOneMsgR _socket \n\nreadOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message\nreadOneMessageR = ask >>= liftIO . readOneMessage \n\nreadNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message]\nreadNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket \n\nreadNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message]\nreadNMessageR n = ask >>= liftIO . readNMessage n\n\n--\n-- Using Socket \n--\n\nreadSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString\nreadSocketR len = ask >>= (\\x -> liftIO $ recv x len) \n\nreadMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString\nreadMsgLengthR = readSocketR 4\n\nreadMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString\nreadMsgTypeR = readSocketR 1\n\nreadNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString)\nreadNextMsgR len = do \n bytes <- readSocketR (len + 4)\n return $ if B.length bytes == len \n then\n (bytes, B.empty)\n else \n B.splitAt len bytes\n\nreadOneMsgR :: (MonadIO m) => ReaderT Socket m Message\nreadOneMsgR = do\n l <- readMsgLengthR\n t <- readMsgTypeR\n m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1 \n return (byte2Int t, m)\n\nreadNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]\nreadNMsgR n = sequence $ take n . repeat $ readOneMsgR\n\nreadAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]\nreadAllMsgR len = do\n t <- readMsgTypeR\n let t' = byte2Int t \n if t' == s_sql_stmt_execute_ok \n SQL_STMT_EXECUTE_OK is the last message and has no data .\n return [(s_sql_stmt_execute_ok, B.empty)]\n else do\n debug $ \" type= \" + + ( show $ byte2Int t ) + + \" , reading len= \" + + ( show ( len-1 ` max ` 0 ) ) + + \" , plus 4 byte \"\n (msg, len) <- readNextMsgR (len-1)\n -- debug $ (show msg) ++ \" , next length of reading chunk byte is \" ++ (show $ if B.null len then 0 else getIntFromLE len)\n if B.null len \n then \n return [(t', msg)]\n else do\n msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len\n return $ (t', msg): msgs \n\n-- | Begin a transaction.\nbegenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\nbegenTrxNodeSession = doSimpleSessionStateChangeStmt \"begin\"\n\n-- | Commit a transaction.\ncommitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\ncommitNodeSession = doSimpleSessionStateChangeStmt \"commit\"\n\n-- | Rollback a transaction.\nrollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\nrollbackNodeSession = doSimpleSessionStateChangeStmt \"rollback\"\n\n-- \n-- helper\n-- \ndoSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64\ndoSimpleSessionStateChangeStmt sql nodeSess = do \n -- debug $ \"session state change statement : \" ++ sql\n runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess\n ret@(x:xs) <- runReaderT readMessagesR nodeSess -- [Message]\n if fst x == 1 \n then do\n msg <- getError $ snd x\n throwM $ XProtocolError msg\n else do\n frm <- (getFrame . snd ) $ head $ filter (\\(t, b) -> t == s_notice) ret -- Frame\n ssc <- getPayloadSessionStateChanged frm\n getRowsAffected ssc\n\n-- | check a raw socket connectin.\nisSocketConnected :: NodeSession -> IO Bool \nisSocketConnected NodeSession{..} = do \n isConnected _socket\n\n\n naming rule \n Application Data < -- recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ] \n Application Data -- > send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ] \n\n mkFoo -- > [ Protocol Buffer Object ] \n\n\n\n ( a ) client - > server message implementatin pattern \n\n 1 ) make pure function from some params to a PB object = = > hidden \n\n 2 ) make the above function to Reader -- > open package \n\n ex ) \n mkAuthenticateStart \n | \n V \n sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( ) \n sendAuthenticateStart = writeMessageR . mkAuthenticateStart \n\n\n ( b ) server - > client message implemention patten \n\n 1 ) make pure function from ByteString to a PB object \n ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden \n getAuthenticateContinue ' = getMessage \n\n 2 ) make the above function to Reader Monad \n\n 3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open \n ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString \n\n ( c ) client - > server - > client message implementation \n\n 1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server . \n\n\n\nnaming rule \n Application Data <-- recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket]\n Application Data --> send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket]\n\n mkFoo --> [Protocol Buffer Object]\n\n\n\n(a) client -> server message implementatin pattern\n\n1) make pure function from some params to a PB object ==> hidden\n\n2) make the above function to Reader Monad\n --> open package\n\nex)\nmkAuthenticateStart\n|\nV\nsendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () \nsendAuthenticateStart = writeMessageR . mkAuthenticateStart\n\n\n(b) server -> client message implemention patten\n\n1) make pure function from ByteString to a PB object \n ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden\n getAuthenticateContinue' = getMessage \n\n2) make the above function to Reader Monad\n\n3) make a function to get concrete data, not Protocol Buffer Objects ==> open\n ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString\n\n(c) client -> server -> client message implementation\n\n1) combine (a) and (b) so that we get a turn-around function between client and server. \n\n\n-}\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/DataBase/MySQLX/NodeSession.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" * Message\n * Session Infomation\n * Node Session \n * Session Management\n * Transaction\n * Expectation\n \n * Helper functions\n * Internal Use Only\n general, standard library\n protocol buffer library\n generated library\n my library\n -----------------------------------------------------------------------------\n \n -----------------------------------------------------------------------------\n | Node Session Object\n ^ socket \n ^ client id given by MySQL Server\n | Infomation Object of Node Session\n ^ host name\n ^ port nummber\n ^ database name\n ^ user\n ^ password\n ^ charset\n | Default NodeSessionInfo\n \n * database : \"\"\n * user : \"root\"\n * password : \"\"\n * charset : \"\"\n \n | a message (type, payload)\n -----------------------------------------------------------------------------\n Session Management\n -----------------------------------------------------------------------------\n | Open node session.\n ^ NodeSessionInfo\n ^ NodeSession\n debug \"success\"\n debug changed\n debug ok \n debug $ \"NodeSession is opend; clientId =\" ++ (show id)\n | Close node session.\n | Make a socket for session.\n | Send Close message to the server.\n | Retreive a salt given by the server.\n | Send NoError expectation message to the server.\n | Send Unset expectation message to the server.\n {- [C]->[S] -} -- putMsg sock $ getAuthMsg \"root\"\n\n {- [S]->[C] -}\n x <- parse2AuthenticateContinue sock\n print salt\n\n {- [C]->[S] -}\n putMsg sock $ getAutCont \"world_x\" \"root\" salt (B8.pack \"root\")\n\n {- [S]->[C] -}\n frame <- parse2Frame sock\n getSessionStateChanged frame\n parse2AuthenticateOK sock\n\n\n liftIO $ putStrLn $ PBT.messagePutText msg \n | write a message.\n Error\n | retrieve messages from Node session.\n | retrieve messages from Node session.\n ^ True : Expectation No Error , False : Otherwise\n ^ The number of sending messages.\n ^ Initial empty value, whichi should be ([], [])\n\n Using Socket \n\n debug $ (show msg) ++ \" , next length of reading chunk byte is \" ++ (show $ if B.null len then 0 else getIntFromLE len)\n | Begin a transaction.\n | Commit a transaction.\n | Rollback a transaction.\n \n helper\n \n debug $ \"session state change statement : \" ++ sql\n [Message]\n Frame\n | check a raw socket connectin.\n recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ] \n > send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ] \n > [ Protocol Buffer Object ] \n > open package \n recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket]\n> send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket]\n> [Protocol Buffer Object]\n> open package"},"code":{"kind":"string","value":" | \n module : Database . MySQLX.NodeSession \n description : Session management \n copyright : ( c ) , 2017 \n license : MIT \n maintainer : \n stability : experimental \n portability : \n\n Session ( a.k.a . Connection ) \n\n\nmodule : Database.MySQLX.NodeSession\ndescription : Session management \ncopyright : (c) naoto ogawa, 2017\nlicense : MIT \nmaintainer : \nstability : experimental\nportability : \n\nSession (a.k.a. Connection)\n\n-}\n# LANGUAGE RecordWildCards #\n\nmodule DataBase.MySQLX.NodeSession \n (\n Message\n , NodeSessionInfo(..)\n , defaultNodeSesssionInfo\n , NodeSession(clientId, auth_data)\n , openNodeSession\n , closeNodeSession\n , begenTrxNodeSession\n , commitNodeSession\n , rollbackNodeSession\n , sendExpectNoError \n , sendExpectUnset\n , sendExpectClose \n , readMessagesR \n , writeMessageR\n , repeatreadMessagesR\n , isSocketConnected\n , readMsgLengthR \n , readAllMsgR\n ) where\n\nimport qualified Data.Binary as BIN\nimport qualified Data.ByteString as B\nimport qualified Data.ByteString.Lazy as BL \nimport qualified Data.Int as I\nimport Data.Typeable (TypeRep, Typeable, typeRep, typeOf)\nimport qualified Data.Word as W \n\nimport Network.Socket hiding (recv) \nimport Network.Socket.ByteString (send, sendAll, recv)\n\nimport Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM)\nimport Control.Monad\nimport Control.Monad.Trans.Reader\nimport Control.Monad.IO.Class\n\nimport qualified Text.ProtocolBuffers as PB\nimport qualified Text.ProtocolBuffers.Basic as PBB\nimport qualified Text.ProtocolBuffers.Header as PBH\nimport qualified Text.ProtocolBuffers.TextMessage as PBT\nimport qualified Text.ProtocolBuffers.WireMessage as PBW\nimport qualified Text.ProtocolBuffers.Reflections as PBR\n\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE \nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC\nimport qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk\n\nimport DataBase.MySQLX.Exception\nimport DataBase.MySQLX.Model\nimport DataBase.MySQLX.Util \n\n\ndata NodeSession = NodeSession\n ^ auth_data given by MySQL Server\n } deriving Show\n\ndata NodeSessionInfo = NodeSessionInfo \n } deriving Show\n\n * host : 127.0.0.1\n * port : 33600\ndefaultNodeSesssionInfo :: NodeSessionInfo \ndefaultNodeSesssionInfo = NodeSessionInfo \"127.0.0.1\" 33060 \"\" \"root\" \"\" \"\"\n\ntype Message = (Int, B.ByteString) \n\nopenNodeSession :: (MonadIO m, MonadThrow m) \nopenNodeSession sessionInfo = do\n\n socket <- _client (host sessionInfo) (port sessionInfo)\n let session = NodeSession socket (fromIntegral 0) BL.empty \n\n x <- runReaderT _negociate session\n\n (t, msg):xs <- runReaderT (_auth sessionInfo) session\n case t of \n TODO\n frm <- getFrame msg\n case PFr.payload frm of\n Just x -> do \n changed <- getSessionStateChanged $ BL.toStrict x\n ok <- mkAuthenticateOk $ snd $ head xs \n id <- getClientId changed\n return session {clientId = id} \n Nothing -> throwM $ XProtocolException \"Payload is Nothing\"\n TODO\n err <- getError msg\n throwM $ XProtocolError err\n _ -> error $ \"message type unknown, =\" ++ show t\n\ncloseNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m ()\ncloseNodeSession nodeSess = do\n runReaderT (sendClose >> recieveOk) nodeSess\n liftIO . close $ _socket nodeSess\n debug \" NodeSession is closed . \"\n return ()\n\n_client :: (MonadIO m) => HostName -> PortNumber -> m Socket \n_client host port = liftIO $ withSocketsDo $ do\n addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port)\n let serverAddr = head addrInfo\n sock <- socket (addrFamily serverAddr) Stream defaultProtocol\n connect sock (addrAddress serverAddr)\n return sock\n\n_auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message]\n_auth NodeSessionInfo{..} = do\n sendAuthenticateStart user\n salt <- recieveSalt\n sendAutenticateContinue database user password salt\n msgs <- readMessagesR \n return msgs \n\nsendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m () \nsendCapabilitiesGet = writeMessageR mkCapabilitiesGet \n\n_negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message]\n_negociate = do\n sendCapabilitiesGet\n ret@(x:xs) <- readMessagesR \n if fst x == s_error \n then do\n msg <- getError $ snd x\n throwM $ XProtocolError msg\n else do\n return ret \n\nsendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () \nsendAuthenticateStart = writeMessageR . mkAuthenticateStart\n\nsendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m ()\nsendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password \n\nsendClose :: (MonadIO m) => ReaderT NodeSession m () \nsendClose = writeMessageR mkClose\n\nrecieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString\nrecieveSalt = do\n msg <- getAuthenticateContinueR\n return $ BL.toStrict $ PAC.auth_data msg\n\nrecieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok\nrecieveOk = getOkR\n\nsendExpectNoError :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectNoError = writeMessageR mkExpectNoError\n\nsendExpectUnset :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectUnset = writeMessageR mkExpectUnset\n\n\n interfaces as follows : \n\n openNodeSession = do \n sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( ) \n salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString \n sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( ) \n reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( ) \n\n\ninterfaces as follows:\n\nopenNodeSession = do\n sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> ()\n salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString\n sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> ()\n reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> ()\n\n-}\n\n\n let salt = S.toStrict $ PAC.auth_data x\n\n Using NodeSession and making ReaderT\n\nwriteMessage :: (PBT.TextMsg msg\n ,PBR.ReflectDescriptor msg\n ,PBW.Wire msg\n ,Show msg\n ,Typeable msg\n ,MonadIO m ) => NodeSession -> msg -> m () \nwriteMessage NodeSession{..} msg = do\n liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)\n where \n bytes = PBW.messagePut msg \n len = fromIntegral $ PBW.messageSize msg \n ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg\n\nsendExpectClose :: (MonadIO m) => ReaderT NodeSession m () \nsendExpectClose = do\n nodeSess <- ask\n liftIO $ writeExpectClose nodeSess \n\nwriteExpectClose NodeSession{..} = do\n liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes)\n where \n bytes = PBW.messagePut mkClose\n len = fromIntegral 0 \n ty = putMessageType $ fromIntegral 25\n\nwriteMessageR :: (PBT.TextMsg msg\n ,PBR.ReflectDescriptor msg\n ,PBW.Wire msg\n ,Show msg\n ,Typeable msg\n ,MonadIO m ) => msg -> ReaderT NodeSession m () \nwriteMessageR msg = do \n session <- ask\n liftIO $ writeMessage session msg\n\ngetErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error \ngetErrorR = readOneMessageR >>= \\(_, msg) -> getError msg \n\ngetFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame \ngetFrameR = readOneMessageR >>= \\(_, msg) -> getFrame msg \n\ngetAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue\ngetAuthenticateContinueR = readOneMessageR >>= \\(_, msg) -> getAuthenticateContinue msg \n\ngetOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok\ngetOkR = readOneMessageR >>= \\(_, msg) -> getOk msg \n\ngetOneMessageR :: (MonadIO m\n ,MonadThrow m\n ,PBW.Wire a\n ,PBR.ReflectDescriptor a\n ,PBT.TextMsg a\n ,Typeable a) => ReaderT NodeSession m a\ngetOneMessageR = do \n session <- ask \n (_, msg) <- liftIO $ readOneMessage session\n getMessage msg \n\nreadMessages :: (MonadIO m) => NodeSession -> m [Message]\nreadMessages NodeSession{..} = do\n len <- runReaderT readMsgLengthR _socket\n debug $ \" 1st length = \" + + ( show $ getIntFromLE len )\n ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket\n return ret\n\nreadMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message])\nreadMessagesEither NodeSession{..} = do\n len <- runReaderT readMsgLengthR _socket\n debug $ \" 1st length = \" + + ( show $ getIntFromLE len )\n ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket\n if hasError ret \n Success\n where hasError r = length (filterError r) >= 1 \n filterError xs = filter (\\(t,m) -> t == s_error) xs\n\nreadMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message] \nreadMessagesR = ask >>= liftIO . readMessages\n\nrepeatreadMessagesR :: (MonadIO m) \n ^ fst : Success messages , snd : Error messages\nrepeatreadMessagesR noError num acc = do\n if num == 0\n then return acc\n else do\n nodeSess <- ask\n r <- readMessagesEither nodeSess\n case r of\n Left m -> if noError \n then return (fst acc , m )\n else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m)\n Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc )\n\nreadOneMessage :: (MonadIO m) => NodeSession -> m Message\nreadOneMessage NodeSession{..} = runReaderT readOneMsgR _socket \n\nreadOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message\nreadOneMessageR = ask >>= liftIO . readOneMessage \n\nreadNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message]\nreadNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket \n\nreadNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message]\nreadNMessageR n = ask >>= liftIO . readNMessage n\n\n\nreadSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString\nreadSocketR len = ask >>= (\\x -> liftIO $ recv x len) \n\nreadMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString\nreadMsgLengthR = readSocketR 4\n\nreadMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString\nreadMsgTypeR = readSocketR 1\n\nreadNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString)\nreadNextMsgR len = do \n bytes <- readSocketR (len + 4)\n return $ if B.length bytes == len \n then\n (bytes, B.empty)\n else \n B.splitAt len bytes\n\nreadOneMsgR :: (MonadIO m) => ReaderT Socket m Message\nreadOneMsgR = do\n l <- readMsgLengthR\n t <- readMsgTypeR\n m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1 \n return (byte2Int t, m)\n\nreadNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]\nreadNMsgR n = sequence $ take n . repeat $ readOneMsgR\n\nreadAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message]\nreadAllMsgR len = do\n t <- readMsgTypeR\n let t' = byte2Int t \n if t' == s_sql_stmt_execute_ok \n SQL_STMT_EXECUTE_OK is the last message and has no data .\n return [(s_sql_stmt_execute_ok, B.empty)]\n else do\n debug $ \" type= \" + + ( show $ byte2Int t ) + + \" , reading len= \" + + ( show ( len-1 ` max ` 0 ) ) + + \" , plus 4 byte \"\n (msg, len) <- readNextMsgR (len-1)\n if B.null len \n then \n return [(t', msg)]\n else do\n msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len\n return $ (t', msg): msgs \n\nbegenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\nbegenTrxNodeSession = doSimpleSessionStateChangeStmt \"begin\"\n\ncommitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\ncommitNodeSession = doSimpleSessionStateChangeStmt \"commit\"\n\nrollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64\nrollbackNodeSession = doSimpleSessionStateChangeStmt \"rollback\"\n\ndoSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64\ndoSimpleSessionStateChangeStmt sql nodeSess = do \n runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess\n if fst x == 1 \n then do\n msg <- getError $ snd x\n throwM $ XProtocolError msg\n else do\n ssc <- getPayloadSessionStateChanged frm\n getRowsAffected ssc\n\nisSocketConnected :: NodeSession -> IO Bool \nisSocketConnected NodeSession{..} = do \n isConnected _socket\n\n\n naming rule \n\n\n\n\n ( a ) client - > server message implementatin pattern \n\n 1 ) make pure function from some params to a PB object = = > hidden \n\n\n ex ) \n mkAuthenticateStart \n | \n V \n sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( ) \n sendAuthenticateStart = writeMessageR . mkAuthenticateStart \n\n\n ( b ) server - > client message implemention patten \n\n 1 ) make pure function from ByteString to a PB object \n ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden \n getAuthenticateContinue ' = getMessage \n\n 2 ) make the above function to Reader Monad \n\n 3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open \n ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString \n\n ( c ) client - > server - > client message implementation \n\n 1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server . \n\n\n\nnaming rule \n\n\n\n\n(a) client -> server message implementatin pattern\n\n1) make pure function from some params to a PB object ==> hidden\n\n2) make the above function to Reader Monad\n\nex)\nmkAuthenticateStart\n|\nV\nsendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () \nsendAuthenticateStart = writeMessageR . mkAuthenticateStart\n\n\n(b) server -> client message implemention patten\n\n1) make pure function from ByteString to a PB object \n ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden\n getAuthenticateContinue' = getMessage \n\n2) make the above function to Reader Monad\n\n3) make a function to get concrete data, not Protocol Buffer Objects ==> open\n ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString\n\n(c) client -> server -> client message implementation\n\n1) combine (a) and (b) so that we get a turn-around function between client and server. \n\n\n-}\n"}}},{"rowIdx":610258,"cells":{"_id":{"kind":"string","value":"146dd9da32755c31eb2b8d40877a8ded50fee86dfa05ee58556bc2cf291c02bf"},"repository":{"kind":"string","value":"rubenbarroso/EOPL"},"name":{"kind":"string","value":"3_34.scm"},"content":{"kind":"string","value":"(load \"/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm\")\n(load \"/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm\")\n(load \"/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm\")\n\n(define-datatype environment nameless-environment?\n (empty-nameless-env-record)\n (extended-nameless-env-record\n (vals vector?)\n (env nameless-environment?)))\n\n(define empty-nameless-env\n (lambda ()\n (empty-nameless-env-record)))\n\n(define extend-nameless-env\n (lambda (vals env)\n (extended-nameless-env-record (list->vector vals) env)))\n\n(define extend-nameless-env-recursively\n (lambda (proc-names bodies old-env)\n (let ((len (length proc-names)))\n (let ((vec (make-vector len)))\n (let ((env (extended-nameless-env-record\n vec old-env)))\n (for-each\n (lambda (pos body)\n (vector-set! vec pos (closure body env)))\n (iota len) bodies)\n env)))))\n\n;> (apply-nameless-env\n; (extend-nameless-env-recursively\n; '(even odd)\n; '((var-exp h) (var-exp j))\n; (extend-nameless-env\n ' ( 5 28 )\n; (empty-nameless-env)))\n 1 1 )\n28\n\n(define apply-nameless-env\n (lambda (env depth pos)\n (if (= pos -1)\n (eopl:error 'apply-nameless-env\n \"Error accessing free variable at (~s ~s)\"\n depth pos))\n (cases environment env\n (empty-nameless-env-record ()\n (eopl:error 'apply-nameless-env \"No binding for ~s\" sym))\n (extended-nameless-env-record (vals env)\n (if (= depth 0)\n (vector-ref vals pos)\n (apply-nameless-env env (- depth 1) pos))))))\n\n(define scanner-spec-3-13\n '((white-sp\n (whitespace) skip)\n (comment\n (\"%\" (arbno (not #\\newline))) skip)\n (identifier\n (letter (arbno (or letter digit \"?\"))) symbol)\n (number\n (digit (arbno digit)) number)))\n\n(define grammar-3-13\n '((program\n (expression)\n a-program)\n (expression\n (number)\n lit-exp)\n (expression\n (identifier)\n var-exp)\n (expression\n (\"lexvar\" \"(\" number number \")\")\n lexvar-exp)\n (expression\n (primitive \"(\" (separated-list expression \",\") \")\")\n primapp-exp)\n (expression\n (\"if\" expression \"then\" expression \"else\" expression)\n if-exp)\n (expression\n (\"let\" (arbno identifier \"=\" expression) \"in\" expression)\n let-exp)\n (expression\n (\"letrec\" (arbno identifier \"(\" (separated-list identifier \",\") \")\" \"=\" expression) \"in\" expression)\n letrec-exp)\n (expression\n (\"proc\" \"(\" (separated-list identifier \",\") \")\" expression)\n proc-exp)\n (expression\n (\"(\" expression (arbno expression) \")\")\n app-exp)\n (primitive\n (\"+\")\n add-prim)\n (primitive\n (\"-\")\n substract-prim)\n (primitive\n (\"*\")\n mult-prim)\n (primitive\n (\"add1\")\n incr-prim)\n (primitive\n (\"sub1\")\n decr-prim)\n (primitive\n (\"equal?\")\n equal-prim)\n (primitive\n (\"zero?\")\n zero-prim)\n (primitive\n (\"greater?\")\n greater-prim)\n (primitive\n (\"less?\")\n less-prim)))\n\n(define scan&parse\n (sllgen:make-string-parser\n scanner-spec-3-13\n grammar-3-13))\n\n(sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13)\n\n(define run\n (lambda (string)\n (eval-program\n (lexical-address-calc\n (scan&parse string)))))\n\n;helpers\n(define true-value?\n (lambda (x)\n (not (zero? x))))\n\n; the interpreter\n(define eval-program\n (lambda (pgm)\n (cases program pgm\n (a-program (body)\n (eval-expression body (init-nameless-env))))))\n\n(define eval-expression\n (lambda (exp env)\n (cases expression exp\n (lit-exp (datum) datum)\n (var-exp (id) (eopl:error\n 'eval-expression\n \"var-exp should not appear in the instrumented interpreter\"))\n (lexvar-exp (depth pos) (apply-nameless-env env depth pos))\n (primapp-exp (prim rands)\n (let ((args (eval-rands rands env)))\n (apply-primitive prim args)))\n (if-exp (test-exp true-exp false-exp)\n (if (true-value? (eval-expression test-exp env))\n (eval-expression true-exp env)\n (eval-expression false-exp env)))\n (let-exp (ids rands body)\n (let ((args (eval-rands rands env)))\n (eval-expression body (extend-nameless-env (list->vector args) env))))\n (proc-exp (ids body) (closure body env))\n (app-exp (rator rands)\n (let ((proc (eval-expression rator env))\n (args (eval-rands rands env)))\n (if (procval? proc)\n (apply-procval proc args)\n (eopl:error 'eval-expression\n \"Attempt to apply a non-procedure ~s\" proc))))\n (letrec-exp (proc-names idss bodies letrec-body)\n (eval-expression\n letrec-body\n (extend-nameless-env-recursively\n proc-names bodies env))))))\n\n(define eval-rands\n (lambda (rands env)\n (map (lambda (x) (eval-rand x env)) rands)))\n\n(define eval-rand\n (lambda (rand env)\n (eval-expression rand env)))\n\n(define apply-primitive\n (lambda (prim args)\n (cases primitive prim\n (add-prim () (+ (car args) (cadr args)))\n (substract-prim () (- (car args) (cadr args)))\n (mult-prim () (* (car args) (cadr args)))\n (incr-prim () (+ (car args) 1))\n (decr-prim () (- (car args) 1))\n (equal-prim () (if (= (car args) (cadr args)) 1 0))\n (zero-prim () (if (zero? (car args)) 1 0))\n (greater-prim () (if (> (car args) (cadr args)) 1 0))\n (less-prim () (if (< (car args) (cadr args)) 1 0)))))\n\n(define-datatype procval procval?\n (closure\n (body expression?)\n (env nameless-environment?)))\n\n(define apply-procval\n (lambda (proc args)\n (cases procval proc\n (closure (body env)\n (eval-expression body\n (extend-nameless-env args env))))))\n\n(define init-nameless-env\n (lambda ()\n (extend-nameless-env\n '(1 5 10)\n (empty-nameless-env))))\n\n;Helper procedures from exercise 1.31\n\n(define make-lexical-address\n (lambda (v d p)\n (list v ': d p)))\n\n(define get-v\n (lambda (address)\n (car address)))\n\n(define get-d\n (lambda (address)\n (caddr address)))\n\n(define get-p\n (lambda (address)\n (cadddr address)))\n\n(define increment-depth\n (lambda (address)\n (make-lexical-address (get-v address)\n (+ 1 (get-d address))\n (get-p address))))\n\n(define get-lexical-address\n (lambda (exp addresses)\n (define iter\n (lambda (lst)\n (cond ((null? lst) (make-lexical-address exp -1 -1))\n ((eqv? exp (get-v (car lst))) (car lst))\n (else (get-lexical-address exp (cdr lst))))))\n (iter addresses)))\n\n(define index-of\n (lambda (v declarations)\n (define helper\n (lambda (lst index)\n (cond ((null? lst) 'free)\n ((eqv? (car lst) v) index)\n (else (helper (cdr lst) (+ index 1))))))\n (helper declarations 0)))\n\n(define cross-contour\n (lambda (declarations addresses)\n (let ((bound (filter-bound declarations))\n (free (filter-free declarations addresses)))\n (append bound free))))\n\n(define filter-bound\n (lambda (declarations)\n (map (lambda (decl)\n (make-lexical-address decl\n 0\n (index-of decl declarations)))\n declarations)))\n\n(define filter-free\n (lambda (declarations addresses)\n (define iter\n (lambda (lst)\n (cond ((null? lst) '())\n ((not (memq (get-v (car lst)) declarations))\n (cons (increment-depth (car lst))\n (iter (cdr lst))))\n (else (iter (cdr lst))))))\n (iter addresses)))\n\n(define lexical-address-calc-helper\n (lambda (exp addresses)\n (cases expression exp\n (lit-exp (datum)\n (lit-exp datum))\n (var-exp (id)\n (let ((lexical-address (get-lexical-address id addresses)))\n (lexvar-exp (get-d lexical-address)\n (get-p lexical-address))))\n (lexvar-exp (depth pos)\n (lexvar-exp depth pos))\n (primapp-exp (prim rands)\n (primapp-exp prim\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)))\n (if-exp (test-exp true-exp false-exp)\n (if-exp (lexical-address-calc-helper test-exp addresses)\n (lexical-address-calc-helper true-exp addresses)\n (lexical-address-calc-helper false-exp addresses)))\n (let-exp (ids rands body)\n (let-exp ids\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)\n (lexical-address-calc-helper\n body\n (cross-contour ids addresses))))\n (proc-exp (ids body)\n (proc-exp ids\n (lexical-address-calc-helper\n body\n (cross-contour ids addresses))))\n (app-exp (rator rands)\n (app-exp (lexical-address-calc-helper\n rator\n addresses)\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)))\n (letrec-exp (proc-names idss bodies letrec-body)\n (let ((new-addresses (cross-contour proc-names addresses)))\n (letrec-exp proc-names\n idss\n (map (lambda (ids body)\n (lexical-address-calc-helper\n body\n (cross-contour ids new-addresses)))\n idss\n bodies)\n (lexical-address-calc-helper\n letrec-body\n new-addresses)))))))\n\n(define letrec-cross-contour\n (lambda (proc-names idss addresses)\n (define iter\n (lambda (the-ids the-addresses)\n (if (null? the-ids)\n the-addresses\n (iter (cdr the-ids) (cross-contour (car the-ids) the-addresses)))))\n (iter idss (cross-contour proc-names addresses))))\n\n(define lexical-address-calc\n (lambda (pgm)\n (a-program\n (cases program pgm\n (a-program (body)\n (lexical-address-calc-helper body '()))))))\n\n;> (lexical-address-calc\n; (scan&parse\n; \"letrec\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n; in (odd 13)\"))\n;(a-program\n ( letrec - exp\n; (even odd)\n; ((x) (x))\n; ((if-exp\n ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )\n; (lit-exp 1)\n; (app-exp\n; (lexvar-exp 1 1)\n; ((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))\n; (if-exp\n ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )\n; (lit-exp 0)\n; (app-exp\n; (lexvar-exp 1 0)\n; ((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))))\n; (app-exp (lexvar-exp 0 1) ((lit-exp 13)))))\n;\n;> (run\n; \"letrec\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n; in (odd 13)\")\n1\n;> (run\n; \"letrec\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n in ( even 13 ) \" )\n0\n;> (run\n; \"letrec\n fact(x ) = if ) then 1 else * ( x,(fact ) ) )\n in ( fact 6 ) \" )\n720\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/rubenbarroso/EOPL/f9b3c03c2fcbaddf64694ee3243d54be95bfe31d/src/chapter3/3_34.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":"> (apply-nameless-env\n (extend-nameless-env-recursively\n '(even odd)\n '((var-exp h) (var-exp j))\n (extend-nameless-env\n (empty-nameless-env)))\nhelpers\n the interpreter\nHelper procedures from exercise 1.31\n> (lexical-address-calc\n (scan&parse\n \"letrec\n in (odd 13)\"))\n(a-program\n (even odd)\n ((x) (x))\n ((if-exp\n (lit-exp 1)\n (app-exp\n (lexvar-exp 1 1)\n ((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))\n (if-exp\n (lit-exp 0)\n (app-exp\n (lexvar-exp 1 0)\n ((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))))\n (app-exp (lexvar-exp 0 1) ((lit-exp 13)))))\n\n> (run\n \"letrec\n in (odd 13)\")\n> (run\n \"letrec\n> (run\n \"letrec"},"code":{"kind":"string","value":"(load \"/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm\")\n(load \"/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm\")\n(load \"/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm\")\n\n(define-datatype environment nameless-environment?\n (empty-nameless-env-record)\n (extended-nameless-env-record\n (vals vector?)\n (env nameless-environment?)))\n\n(define empty-nameless-env\n (lambda ()\n (empty-nameless-env-record)))\n\n(define extend-nameless-env\n (lambda (vals env)\n (extended-nameless-env-record (list->vector vals) env)))\n\n(define extend-nameless-env-recursively\n (lambda (proc-names bodies old-env)\n (let ((len (length proc-names)))\n (let ((vec (make-vector len)))\n (let ((env (extended-nameless-env-record\n vec old-env)))\n (for-each\n (lambda (pos body)\n (vector-set! vec pos (closure body env)))\n (iota len) bodies)\n env)))))\n\n ' ( 5 28 )\n 1 1 )\n28\n\n(define apply-nameless-env\n (lambda (env depth pos)\n (if (= pos -1)\n (eopl:error 'apply-nameless-env\n \"Error accessing free variable at (~s ~s)\"\n depth pos))\n (cases environment env\n (empty-nameless-env-record ()\n (eopl:error 'apply-nameless-env \"No binding for ~s\" sym))\n (extended-nameless-env-record (vals env)\n (if (= depth 0)\n (vector-ref vals pos)\n (apply-nameless-env env (- depth 1) pos))))))\n\n(define scanner-spec-3-13\n '((white-sp\n (whitespace) skip)\n (comment\n (\"%\" (arbno (not #\\newline))) skip)\n (identifier\n (letter (arbno (or letter digit \"?\"))) symbol)\n (number\n (digit (arbno digit)) number)))\n\n(define grammar-3-13\n '((program\n (expression)\n a-program)\n (expression\n (number)\n lit-exp)\n (expression\n (identifier)\n var-exp)\n (expression\n (\"lexvar\" \"(\" number number \")\")\n lexvar-exp)\n (expression\n (primitive \"(\" (separated-list expression \",\") \")\")\n primapp-exp)\n (expression\n (\"if\" expression \"then\" expression \"else\" expression)\n if-exp)\n (expression\n (\"let\" (arbno identifier \"=\" expression) \"in\" expression)\n let-exp)\n (expression\n (\"letrec\" (arbno identifier \"(\" (separated-list identifier \",\") \")\" \"=\" expression) \"in\" expression)\n letrec-exp)\n (expression\n (\"proc\" \"(\" (separated-list identifier \",\") \")\" expression)\n proc-exp)\n (expression\n (\"(\" expression (arbno expression) \")\")\n app-exp)\n (primitive\n (\"+\")\n add-prim)\n (primitive\n (\"-\")\n substract-prim)\n (primitive\n (\"*\")\n mult-prim)\n (primitive\n (\"add1\")\n incr-prim)\n (primitive\n (\"sub1\")\n decr-prim)\n (primitive\n (\"equal?\")\n equal-prim)\n (primitive\n (\"zero?\")\n zero-prim)\n (primitive\n (\"greater?\")\n greater-prim)\n (primitive\n (\"less?\")\n less-prim)))\n\n(define scan&parse\n (sllgen:make-string-parser\n scanner-spec-3-13\n grammar-3-13))\n\n(sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13)\n\n(define run\n (lambda (string)\n (eval-program\n (lexical-address-calc\n (scan&parse string)))))\n\n(define true-value?\n (lambda (x)\n (not (zero? x))))\n\n(define eval-program\n (lambda (pgm)\n (cases program pgm\n (a-program (body)\n (eval-expression body (init-nameless-env))))))\n\n(define eval-expression\n (lambda (exp env)\n (cases expression exp\n (lit-exp (datum) datum)\n (var-exp (id) (eopl:error\n 'eval-expression\n \"var-exp should not appear in the instrumented interpreter\"))\n (lexvar-exp (depth pos) (apply-nameless-env env depth pos))\n (primapp-exp (prim rands)\n (let ((args (eval-rands rands env)))\n (apply-primitive prim args)))\n (if-exp (test-exp true-exp false-exp)\n (if (true-value? (eval-expression test-exp env))\n (eval-expression true-exp env)\n (eval-expression false-exp env)))\n (let-exp (ids rands body)\n (let ((args (eval-rands rands env)))\n (eval-expression body (extend-nameless-env (list->vector args) env))))\n (proc-exp (ids body) (closure body env))\n (app-exp (rator rands)\n (let ((proc (eval-expression rator env))\n (args (eval-rands rands env)))\n (if (procval? proc)\n (apply-procval proc args)\n (eopl:error 'eval-expression\n \"Attempt to apply a non-procedure ~s\" proc))))\n (letrec-exp (proc-names idss bodies letrec-body)\n (eval-expression\n letrec-body\n (extend-nameless-env-recursively\n proc-names bodies env))))))\n\n(define eval-rands\n (lambda (rands env)\n (map (lambda (x) (eval-rand x env)) rands)))\n\n(define eval-rand\n (lambda (rand env)\n (eval-expression rand env)))\n\n(define apply-primitive\n (lambda (prim args)\n (cases primitive prim\n (add-prim () (+ (car args) (cadr args)))\n (substract-prim () (- (car args) (cadr args)))\n (mult-prim () (* (car args) (cadr args)))\n (incr-prim () (+ (car args) 1))\n (decr-prim () (- (car args) 1))\n (equal-prim () (if (= (car args) (cadr args)) 1 0))\n (zero-prim () (if (zero? (car args)) 1 0))\n (greater-prim () (if (> (car args) (cadr args)) 1 0))\n (less-prim () (if (< (car args) (cadr args)) 1 0)))))\n\n(define-datatype procval procval?\n (closure\n (body expression?)\n (env nameless-environment?)))\n\n(define apply-procval\n (lambda (proc args)\n (cases procval proc\n (closure (body env)\n (eval-expression body\n (extend-nameless-env args env))))))\n\n(define init-nameless-env\n (lambda ()\n (extend-nameless-env\n '(1 5 10)\n (empty-nameless-env))))\n\n\n(define make-lexical-address\n (lambda (v d p)\n (list v ': d p)))\n\n(define get-v\n (lambda (address)\n (car address)))\n\n(define get-d\n (lambda (address)\n (caddr address)))\n\n(define get-p\n (lambda (address)\n (cadddr address)))\n\n(define increment-depth\n (lambda (address)\n (make-lexical-address (get-v address)\n (+ 1 (get-d address))\n (get-p address))))\n\n(define get-lexical-address\n (lambda (exp addresses)\n (define iter\n (lambda (lst)\n (cond ((null? lst) (make-lexical-address exp -1 -1))\n ((eqv? exp (get-v (car lst))) (car lst))\n (else (get-lexical-address exp (cdr lst))))))\n (iter addresses)))\n\n(define index-of\n (lambda (v declarations)\n (define helper\n (lambda (lst index)\n (cond ((null? lst) 'free)\n ((eqv? (car lst) v) index)\n (else (helper (cdr lst) (+ index 1))))))\n (helper declarations 0)))\n\n(define cross-contour\n (lambda (declarations addresses)\n (let ((bound (filter-bound declarations))\n (free (filter-free declarations addresses)))\n (append bound free))))\n\n(define filter-bound\n (lambda (declarations)\n (map (lambda (decl)\n (make-lexical-address decl\n 0\n (index-of decl declarations)))\n declarations)))\n\n(define filter-free\n (lambda (declarations addresses)\n (define iter\n (lambda (lst)\n (cond ((null? lst) '())\n ((not (memq (get-v (car lst)) declarations))\n (cons (increment-depth (car lst))\n (iter (cdr lst))))\n (else (iter (cdr lst))))))\n (iter addresses)))\n\n(define lexical-address-calc-helper\n (lambda (exp addresses)\n (cases expression exp\n (lit-exp (datum)\n (lit-exp datum))\n (var-exp (id)\n (let ((lexical-address (get-lexical-address id addresses)))\n (lexvar-exp (get-d lexical-address)\n (get-p lexical-address))))\n (lexvar-exp (depth pos)\n (lexvar-exp depth pos))\n (primapp-exp (prim rands)\n (primapp-exp prim\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)))\n (if-exp (test-exp true-exp false-exp)\n (if-exp (lexical-address-calc-helper test-exp addresses)\n (lexical-address-calc-helper true-exp addresses)\n (lexical-address-calc-helper false-exp addresses)))\n (let-exp (ids rands body)\n (let-exp ids\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)\n (lexical-address-calc-helper\n body\n (cross-contour ids addresses))))\n (proc-exp (ids body)\n (proc-exp ids\n (lexical-address-calc-helper\n body\n (cross-contour ids addresses))))\n (app-exp (rator rands)\n (app-exp (lexical-address-calc-helper\n rator\n addresses)\n (map (lambda (rand)\n (lexical-address-calc-helper rand addresses))\n rands)))\n (letrec-exp (proc-names idss bodies letrec-body)\n (let ((new-addresses (cross-contour proc-names addresses)))\n (letrec-exp proc-names\n idss\n (map (lambda (ids body)\n (lexical-address-calc-helper\n body\n (cross-contour ids new-addresses)))\n idss\n bodies)\n (lexical-address-calc-helper\n letrec-body\n new-addresses)))))))\n\n(define letrec-cross-contour\n (lambda (proc-names idss addresses)\n (define iter\n (lambda (the-ids the-addresses)\n (if (null? the-ids)\n the-addresses\n (iter (cdr the-ids) (cross-contour (car the-ids) the-addresses)))))\n (iter idss (cross-contour proc-names addresses))))\n\n(define lexical-address-calc\n (lambda (pgm)\n (a-program\n (cases program pgm\n (a-program (body)\n (lexical-address-calc-helper body '()))))))\n\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n ( letrec - exp\n ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )\n ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) )\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n1\n even(x ) = if ) then 1 else ( odd ) )\n odd(x ) = if ) then 0 else ( even ) )\n in ( even 13 ) \" )\n0\n fact(x ) = if ) then 1 else * ( x,(fact ) ) )\n in ( fact 6 ) \" )\n720\n"}}},{"rowIdx":610259,"cells":{"_id":{"kind":"string","value":"ad9c4785c7b740ba34b9f2dc0d64d14cbfdc8c0ec6ff040bd568e927bec1d9cd"},"repository":{"kind":"string","value":"jappeace/awesome-project-name"},"name":{"kind":"string","value":"frontend.hs"},"content":{"kind":"string","value":"module Main where\n\nimport qualified Awe.Front.Main as App\nimport Reflex.Dom\n\nmain :: IO ()\nmain = mainWidget $ App.main $ App.IniState Nothing\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/jappeace/awesome-project-name/e80a52dc2673c748a922ec19945cf75368aa3a53/frontend/app/frontend.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"module Main where\n\nimport qualified Awe.Front.Main as App\nimport Reflex.Dom\n\nmain :: IO ()\nmain = mainWidget $ App.main $ App.IniState Nothing\n"}}},{"rowIdx":610260,"cells":{"_id":{"kind":"string","value":"0b661532ff59e0c8aa19c3913ebeb1f8d4a5a1d3f696805485357057203940b2"},"repository":{"kind":"string","value":"haskell-gi/gi-gtk-examples"},"name":{"kind":"string","value":"FastDraw.hs"},"content":{"kind":"string","value":"{-# LANGUAGE OverloadedStrings #-}\n# LANGUAGE PatternSynonyms #\n# LANGUAGE ScopedTypeVariables #\n{-# OPTIONS -O #-}\n\n-- Example of an drawing graphics onto a canvas.\nimport Control.Applicative\nimport Prelude\nimport Data.IORef\nimport Graphics.Rendering.Cairo\nimport Foreign (allocaArray)\nimport Graphics.Rendering.Cairo.Types (Cairo(..), PixelData)\nimport Foreign.Storable (Storable(..))\nimport Foreign.C (CUChar)\nimport qualified GI.Gtk as GI (init)\nimport GI.Gtk\n (dialogRun, widgetShow, boxPackStart, onWidgetDraw,\n widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest,\n drawingAreaNew, dialogGetContentArea, dialogAddButton,\n dialogNew)\nimport GI.Gtk.Enums (ResponseType(..))\nimport GI.GLib (pattern PRIORITY_LOW, idleAdd)\nimport GI.Cairo.Structs.Context (Context(..))\nimport Control.Monad.Trans.Reader (runReaderT)\nimport Foreign.Ptr (castPtr)\nimport Graphics.Rendering.Cairo.Internal (Render(..))\nimport Data.GI.Base.ManagedPtr (withManagedPtr)\n\n\nmain = do\n GI.init Nothing\n dia <- dialogNew\n dialogAddButton dia \"_OK\" (fromIntegral $ fromEnum ResponseTypeOk)\n contain <- dialogGetContentArea dia\n canvas <- drawingAreaNew\n let w = 256\n h = 256\n chan = 4\n row = w * chan\n stride = row\n setWidgetWidthRequest canvas 256\n setWidgetHeightRequest canvas 256\n\n create the Pixbuf\n allocaArray (w * h * chan) $ \\ pbData -> do\n\n draw into the Pixbuf\n doFromTo 0 (h-1) $ \\y ->\n doFromTo 0 (w-1) $ \\x -> do\n pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar)\n pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar)\n pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar)\n\n a function to update the Pixbuf\n blueRef <- newIORef (0 :: CUChar)\n dirRef <- newIORef True\n let updateBlue = do\n blue <- readIORef blueRef\n -- print blue\n doFromTo 0 (h-1) $ \\y ->\n doFromTo 0 (w-1) $ \\x ->\n pokeByteOff pbData (0+x*chan+y*row) blue -- unchecked indexing\n\n -- arrange for the canvas to be redrawn now that we've changed\n the Pixbuf\n widgetQueueDraw canvas\n\n -- update the blue state ready for next time\n dir <- readIORef dirRef\n let diff = 1\n let blue' = if dir then blue+diff else blue-diff\n if dir then\n if blue<=maxBound-diff then writeIORef blueRef blue' else\n writeIORef blueRef maxBound >> modifyIORef dirRef not\n else\n if blue>=minBound+diff then writeIORef blueRef blue' else\n writeIORef blueRef minBound >> modifyIORef dirRef not\n return True\n\n idleAdd PRIORITY_LOW updateBlue\n onWidgetDraw canvas $ \\(Context fp) -> withManagedPtr fp $ \\p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do\n updateCanvas pbData w h stride\n return True\n boxPackStart contain canvas True True 0\n widgetShow canvas\n dialogRun dia\n return ()\n\nupdateCanvas :: PixelData -> Int -> Int -> Int -> Render ()\nupdateCanvas pb w h stride = do\n s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride\n setSourceSurface s 0 0\n paint\n\n GHC is much better at opimising loops like this :\n--\n > doFromTo 0 255 $ \\y - >\n > doFromTo 0 255 $ \\x - > do ...\n--\n-- Than it is at optimising loops like this:\n--\n-- > sequence_ [ do ...\n > | x < - [ 0 .. 255 ]\n > , y < - [ 0 .. 255 ] ]\n--\n The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 )\n\n# INLINE doFromTo #\n-- do the action for [from..to], ie it's inclusive.\ndoFromTo :: Int -> Int -> (Int -> IO ()) -> IO ()\ndoFromTo from to action =\n let loop n | n > to = return ()\n | otherwise = do action n\n loop (n+1)\n in loop from\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/haskell-gi/gi-gtk-examples/4c4f06dc91fbb9b9f50cdad295c8afe782e0bdec/fastdraw/FastDraw.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE OverloadedStrings #\n# OPTIONS -O #\n Example of an drawing graphics onto a canvas.\n print blue\n unchecked indexing\n arrange for the canvas to be redrawn now that we've changed\n update the blue state ready for next time\n\n\n Than it is at optimising loops like this:\n\n > sequence_ [ do ...\n\n do the action for [from..to], ie it's inclusive."},"code":{"kind":"string","value":"# LANGUAGE PatternSynonyms #\n# LANGUAGE ScopedTypeVariables #\n\nimport Control.Applicative\nimport Prelude\nimport Data.IORef\nimport Graphics.Rendering.Cairo\nimport Foreign (allocaArray)\nimport Graphics.Rendering.Cairo.Types (Cairo(..), PixelData)\nimport Foreign.Storable (Storable(..))\nimport Foreign.C (CUChar)\nimport qualified GI.Gtk as GI (init)\nimport GI.Gtk\n (dialogRun, widgetShow, boxPackStart, onWidgetDraw,\n widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest,\n drawingAreaNew, dialogGetContentArea, dialogAddButton,\n dialogNew)\nimport GI.Gtk.Enums (ResponseType(..))\nimport GI.GLib (pattern PRIORITY_LOW, idleAdd)\nimport GI.Cairo.Structs.Context (Context(..))\nimport Control.Monad.Trans.Reader (runReaderT)\nimport Foreign.Ptr (castPtr)\nimport Graphics.Rendering.Cairo.Internal (Render(..))\nimport Data.GI.Base.ManagedPtr (withManagedPtr)\n\n\nmain = do\n GI.init Nothing\n dia <- dialogNew\n dialogAddButton dia \"_OK\" (fromIntegral $ fromEnum ResponseTypeOk)\n contain <- dialogGetContentArea dia\n canvas <- drawingAreaNew\n let w = 256\n h = 256\n chan = 4\n row = w * chan\n stride = row\n setWidgetWidthRequest canvas 256\n setWidgetHeightRequest canvas 256\n\n create the Pixbuf\n allocaArray (w * h * chan) $ \\ pbData -> do\n\n draw into the Pixbuf\n doFromTo 0 (h-1) $ \\y ->\n doFromTo 0 (w-1) $ \\x -> do\n pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar)\n pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar)\n pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar)\n\n a function to update the Pixbuf\n blueRef <- newIORef (0 :: CUChar)\n dirRef <- newIORef True\n let updateBlue = do\n blue <- readIORef blueRef\n doFromTo 0 (h-1) $ \\y ->\n doFromTo 0 (w-1) $ \\x ->\n\n the Pixbuf\n widgetQueueDraw canvas\n\n dir <- readIORef dirRef\n let diff = 1\n let blue' = if dir then blue+diff else blue-diff\n if dir then\n if blue<=maxBound-diff then writeIORef blueRef blue' else\n writeIORef blueRef maxBound >> modifyIORef dirRef not\n else\n if blue>=minBound+diff then writeIORef blueRef blue' else\n writeIORef blueRef minBound >> modifyIORef dirRef not\n return True\n\n idleAdd PRIORITY_LOW updateBlue\n onWidgetDraw canvas $ \\(Context fp) -> withManagedPtr fp $ \\p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do\n updateCanvas pbData w h stride\n return True\n boxPackStart contain canvas True True 0\n widgetShow canvas\n dialogRun dia\n return ()\n\nupdateCanvas :: PixelData -> Int -> Int -> Int -> Render ()\nupdateCanvas pb w h stride = do\n s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride\n setSourceSurface s 0 0\n paint\n\n GHC is much better at opimising loops like this :\n > doFromTo 0 255 $ \\y - >\n > doFromTo 0 255 $ \\x - > do ...\n > | x < - [ 0 .. 255 ]\n > , y < - [ 0 .. 255 ] ]\n The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 )\n\n# INLINE doFromTo #\ndoFromTo :: Int -> Int -> (Int -> IO ()) -> IO ()\ndoFromTo from to action =\n let loop n | n > to = return ()\n | otherwise = do action n\n loop (n+1)\n in loop from\n"}}},{"rowIdx":610261,"cells":{"_id":{"kind":"string","value":"3ccad40dd1db3c7b2e9ee962c405a8f537247873317933be62d5a75a48ee543c"},"repository":{"kind":"string","value":"spell-music/csound-expression"},"name":{"kind":"string","value":"Pretty.hs"},"content":{"kind":"string","value":"module Csound.Dynamic.Render.Pretty(\n Doc, vcatSep,\n ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur,\n PrettyE(..), PrettyShowE(..),\n ppE\n) where\n\nimport Control.Monad.Trans.State.Strict\nimport qualified Data.IntMap as IM\n\nimport Text.PrettyPrint.Leijen.Text\nimport Csound.Dynamic.Types\nimport Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..))\nimport Data.Text (Text)\nimport Data.Text qualified as Text\nimport Text.Show.Pretty (ppShow)\nimport Data.Fix (foldFix)\nimport Data.ByteString.Base64 qualified as Base64\nimport Data.Text.Encoding qualified as Text\n\nvcatSep :: [Doc] -> Doc\nvcatSep = vcat . punctuate line\n\nbinaries, unaries :: Text -> [Doc] -> Doc\n\nbinaries op as = binary op (as !! 0) (as !! 1)\nunaries op as = unary op (as !! 0)\n\nbinary :: Text -> Doc -> Doc -> Doc\nbinary op a b = parens $ a <+> textStrict op <+> b\n\nunary :: Text -> Doc -> Doc\nunary op a = parens $ textStrict op <> a\n\nfunc :: Text -> Doc -> Doc\nfunc op a = textStrict op <> parens a\n\nppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc\nppCsdFile flags orc sco plugins =\n tag \"CsoundSynthesizer\" $ vcatSep [\n tag \"CsOptions\" flags,\n tag \"CsInstruments\" orc,\n tag \"CsScore\" sco,\n ppPlugins plugins\n ]\n\nppPlugins :: [Plugin] -> Doc\nppPlugins plugins = vcatSep $ fmap (\\(Plugin name body) -> tag name (textStrict body)) plugins\n\ntag :: Text -> Doc -> Doc\ntag name content = vcatSep [\n char '<' <> textStrict name <> char '>',\n content,\n text \" textStrict name <> char '>']\n\nppNotes :: InstrId -> [CsdEvent] -> Doc\nppNotes instrId = vcat . fmap (ppNote instrId)\n\nppNote :: InstrId -> CsdEvent -> Doc\nppNote instrId evt = char 'i'\n <+> ppInstrId instrId\n <+> double (csdEventStart evt) <+> double (csdEventDur evt)\n <+> hsep (fmap ppPrim $ csdEventContent evt)\n\nppPrim :: Prim -> Doc\nppPrim x = case x of\n P n -> char 'p' <> int n\n PrimInstrId a -> ppInstrId a\n PString a -> int a\n PrimInt n -> int n\n PrimDouble d -> double d\n PrimString s -> dquotes $ textStrict s\n PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v\n where\n ppConverter dst src t\n | dst == src = t\n | dst == Ar && src == Kr = a(t)\n | dst == Ar && src == Ir = a(k(t))\n | dst == Kr = k(t)\n | dst == Ir && src == Kr = i(t)\n | dst == Ir && src == Ar = i(k(t))\n | otherwise = t\n where\n tfm ch v = hcat [char ch, parens v]\n a = tfm 'a'\n k = tfm 'k'\n i = tfm 'i'\n\n\nppGen :: Int -> Gen -> Doc\nppGen tabId ft = char 'f'\n <> int tabId\n <+> int 0\n <+> (int $ genSize ft)\n <+> (ppGenId $ genId ft)\n <+> (maybe empty (textStrict . Text.pack . show) $ genFile ft)\n <+> (hsep $ map double $ genArgs ft)\n\nppGenId :: GenId -> Doc\nppGenId x = case x of\n IntGenId a -> int a\n StringGenId a -> dquotes $ textStrict a\n\nppInstr :: InstrId -> Doc -> Doc\nppInstr instrId body = vcat [\n text \"instr\" <+> ppInstrHeadId instrId,\n body,\n text \"endin\"]\n\nppInstrHeadId :: InstrId -> Doc\nppInstrHeadId x = case x of\n InstrId den nom -> int nom <> maybe empty ppAfterDot den\n InstrLabel name -> textStrict name\n where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a\n\nppInstrId :: InstrId -> Doc\nppInstrId x = case x of\n InstrId den nom -> int nom <> maybe empty ppAfterDot den\n InstrLabel name -> dquotes $ textStrict name\n where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a\n\ntype TabDepth = Int\n\nppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc\nppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr)\n\nmaybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc)\nmaybeStringCopy outs expr = case (outs, expr) of\n ([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)\n ([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)\n ([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a)\n ([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as)\n ([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a)\n _ -> Nothing\n\nppStringCopy :: Doc -> Doc -> Doc\nppStringCopy outs src = ppOpc outs \"strcpyk\" [src]\n\nppExp :: Doc -> Exp R.Var -> State TabDepth Doc\nppExp res expr = case fmap ppPrimOrVar expr of\n ExpPrim (PString n) -> tab $ ppStrget res n\n ExpPrim p -> tab $ res $= ppPrim p\n Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b\n Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs\n Tfm info xs -> tab $ ppOpc res (infoName info) xs\n ConvertRate to from x -> tab $ ppConvertRate res to from x\n If _ifRate info t e -> tab $ ppIf res (ppCond info) t e\n ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as\n WriteVar v a -> tab $ ppVar v $= a\n InitVar v a -> tab $ ppOpc (ppVar v) \"init\" [a]\n ReadVar v -> tab $ res $= ppVar v\n\n InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) \"init\" as\n ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text \"strcpy\" <+> ppReadArr v as\n WriteArr v as b -> tab $ ppWriteArr v as b\n WriteInitArr v as b -> tab $ ppWriteInitArr v as b\n TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b\n TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args\n TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs\n\n InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) \"fillarray\" initVals\n ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text \"strcpy\" <+> ppReadPureArr arr [index]\n\n IfBegin _ a -> succTab $ text \"if \" <> ppCond a <> text \" then\"\n IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th\n IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el\n-- ElseIfBegin a -> left >> (succTab $ text \"elseif \" <> ppCond a <> text \" then\")\n ElseBegin -> left >> (succTab $ text \"else\")\n IfEnd -> left >> (tab $ text \"endif\")\n UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th\n WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th\n WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th\n\n UntilBegin _ a -> succTab $ text \"until \" <> ppCond a <> text \" do\"\n UntilEnd -> left >> (tab $ text \"od\")\n WhileBegin _ a -> succTab $ text \"while \" <> ppCond a <> text \" do\"\n WhileRefBegin var -> succTab $ text \"while \" <> ppVar var <+> equals <+> text \"1\" <+> text \"do\"\n WhileEnd -> left >> (tab $ text \"od\")\n InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue)\n InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue)\n InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue)\n ReadMacrosString name -> tab $ res <+> text \"strcpy\" <+> readMacro name\n ReadMacrosDouble name -> tab $ res $= readMacro name\n ReadMacrosInt name -> tab $ res $= readMacro name\n EmptyExp -> return empty\n Verbatim str -> return $ textStrict str\n\n Select _rate _n a -> tab $ res $= (\"SELECTS\" <+> a)\n Starts -> tab $ res $= \"STARTS\"\n Seq a b -> tab $ hsep [\"SEQ\", a, b]\n Ends _a -> tab $ \"ENDS\"\n ExpBool _ -> tab \"ExpBool\"\n\n -- x -> error $ \"unknown expression: \" ++ show x\n\n-- pp macros\n\nreadMacro :: Text -> Doc\nreadMacro name = char '$' <> textStrict name\n\ninitMacros :: Doc -> Doc -> Doc\ninitMacros name initValue = vcat\n [ text \"#ifndef\" <+> name\n , text \"#define \" <+> name <+> char '#' <> initValue <> char '#'\n , text \"#end\"\n ]\n\n-- pp arrays\n\nppTfmArrOut :: Bool -> Var -> Doc\nppTfmArrOut isInit v = ppVar v <> (if isInit then (text \"[]\") else empty)\n\nppArrIndex :: Var -> [Doc] -> Doc\nppArrIndex v as = ppVar v <> (hcat $ fmap brackets as)\n\nppArrVar :: Int -> Doc -> Doc\nppArrVar n v = v <> (hcat $ replicate n $ text \"[]\")\n\nppReadArr :: Var -> [Doc] -> Doc\nppReadArr v as = ppArrIndex v as\n\nppReadPureArr :: Doc -> [Doc] -> Doc\nppReadPureArr v as = v <> (hcat $ fmap brackets as)\n\nppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc\nppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b\n where equalsWord = if (varRate v == Sr) then text \"strcpy\" else equals\n\nppWriteInitArr :: Var -> [Doc] -> Doc -> Doc\nppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b\n where initWord = text $ if (varRate v == Sr) then \"strcpy\" else \"init\"\n\n-------------------------------------\n\ntab :: Monad m => Doc -> StateT TabDepth m Doc\ntab doc = fmap (shiftByTab doc) get\n\ntabWidth :: TabDepth\ntabWidth = 4\n\nshiftByTab :: Doc -> TabDepth -> Doc\nshiftByTab doc n\n | n == 0 = doc\n | otherwise = indent (tabWidth * n) doc\n\nleft :: State TabDepth ()\nleft = modify pred\n\nsuccTab :: Monad m => Doc -> StateT TabDepth m Doc\nsuccTab doc = do\n a <- tab doc\n modify succ\n return a\n\nprefix :: Text -> [Doc] -> Doc\nprefix name args = textStrict name <> tupled args\n\nppCond :: Inline CondOp Doc -> Doc\nppCond = ppInline ppCondOp\n\n($=) :: Doc -> Doc -> Doc\n($=) a b = a <+> equals <+> b\n\nppOuts :: [R.Var] -> Doc\nppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs\n\nppPrimOrVar :: PrimOr R.Var -> Doc\nppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x\n\nppStrget :: Doc -> Int -> Doc\nppStrget out n = ppOpc out \"strget\" [char 'p' <> int n]\n\nppIf :: Doc -> Doc -> Doc -> Doc -> Doc\nppIf res p t e = vcat\n [ text \"if\" <+> p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"else\"\n , text \" \" <> res <+> char '=' <+> e\n , text \"endif\"\n ]\n\nppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc\n\nppIf1 = ppIfBy \"if\"\nppWhile = ppIfBy \"while\"\nppUntil = ppIfBy \"until\"\n\nppIfBy :: Text -> Doc -> Doc -> Doc -> Doc\nppIfBy leadTag res p t = vcat\n [ textStrict leadTag <+> p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"endif\"\n ]\n\nppWhileRef :: Doc -> Var -> Doc -> Doc\nppWhileRef res p t = vcat\n [ textStrict \"while\" <+> ppVar p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"endif\"\n ]\n\nppOpc :: Doc -> Text -> [Doc] -> Doc\nppOpc out name xs = out <+> ppProc name xs\n\nppProc :: Text -> [Doc] -> Doc\nppProc name xs = textStrict name <+> (hsep $ punctuate comma xs)\n\nppVar :: Var -> Doc\nppVar v = case v of\n Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name)\n VarVerbatim _ name -> textStrict name\n\nvarPrefix :: VarType -> Char\nvarPrefix x = case x of\n LocalVar -> 'l'\n GlobalVar -> 'g'\n\nppVarType :: VarType -> Doc\nppVarType x = case x of\n LocalVar -> empty\n GlobalVar -> char 'g'\n\nppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc\nppConvertRate out to from var = case (to, from) of\n (Ar, Just Kr) -> upsamp var\n (Ar, Just Ir) -> upsamp $ toK var\n (Kr, Just Ar) -> downsamp var\n (Kr, Just Ir) -> out $= var\n (Ir, Just Ar) -> downsamp var\n (Ir, Just Kr) -> out $= toI var\n (Ar, Nothing) -> out $= toA var\n (Kr, Nothing) -> out $= toK var\n (Ir, Nothing) -> out $= toI var\n (a, Just b) | a == b -> out $= var\n (a, b) -> error $ \"bug: no rate conversion from \" ++ show b ++ \" to \" ++ show a ++ \".\"\n where\n upsamp x = ppOpc out \"upsamp\" [x]\n downsamp x = ppOpc out \"downsamp\" [x]\n toA = func \"a\"\n toK = func \"k\"\n toI = func \"i\"\n\n-- expressions\n\nppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc\nppInline ppNode a = iter $ inlineExp a\n where iter x = case x of\n InlinePrim n -> inlineEnv a IM.! n\n InlineExp op args -> ppNode op $ fmap iter args\n\n-- booleans\n\nppCondOp :: CondOp -> [Doc] -> Doc\nppCondOp op = case op of\n TrueOp -> const $ text \"(1 == 1)\"\n FalseOp -> const $ text \"(0 == 1)\"\n And -> bi \"&&\"\n Or -> bi \"||\"\n Equals -> bi \"==\"\n NotEquals -> bi \"!=\"\n Less -> bi \"<\"\n Greater -> bi \">\"\n LessEquals -> bi \"<=\"\n GreaterEquals -> bi \">=\"\n where bi = binaries\n\n-- numeric\n\nppNumOp :: NumOp -> [Doc] -> Doc\nppNumOp op = case op of\n Add -> bi \"+\"\n Sub -> bi \"-\"\n Mul -> bi \"*\"\n Div -> bi \"/\"\n Neg -> uno \"-\"\n Pow -> bi \"^\"\n Mod -> bi \"%\"\n where\n bi = binaries\n uno = unaries\n\nppRatedVar :: R.Var -> Doc\nppRatedVar v = ppRate (R.varType v) <> int (R.varId v)\n\nppRate :: Rate -> Doc\nppRate x = case removeArrRate x of\n Sr -> char 'S'\n _ -> phi x\n where phi = textStrict . Text.toLower . Text.pack . show\n\nppTotalDur :: Double -> Doc\nppTotalDur d = text \"f0\" <+> double d\n\n--------------------------------------------------------------\n-- debug\n\nnewtype PrettyShowE = PrettyShowE E\nnewtype PrettyE = PrettyE E\n\ninstance Show PrettyShowE where\n show (PrettyShowE expr) = ppShow expr\n\ninstance Show PrettyE where\n show (PrettyE expr) = show $ ppE expr\n\nppE :: E -> Doc\nppE = foldFix go\n where\n go :: RatedExp Doc -> Doc\n go x = fromExp (fromInfo x) x\n\n fromInfo :: RatedExp Doc -> Doc\n fromInfo RatedExp{..} =\n hsep\n [ ppHash ratedExpHash\n , maybe mempty ppRate ratedExpRate\n , maybe mempty pretty ratedExpDepends\n ]\n\n ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode\n\n fromExp :: Doc -> RatedExp Doc -> Doc\n fromExp info RatedExp{..} = indent 2 $ post $\n case ratedExpExp of\n ExpPrim p -> ppPrim p\n EmptyExp -> textStrict \"EMPTY_EXPR\"\n Tfm inf args -> ppTfm inf args\n ConvertRate to from a -> ppConvert to from a\n Select r n a -> ppSelect r n a\n If rate cond th el -> ppIff rate cond th el\n ExpBool args -> hsep [\"some bool expr\", pretty $ show args]\n ExpNum arg -> ppExpNum arg\n InitVar v a -> ppInitVar v a\n ReadVar v -> \"ReadVar\" <+> ppVar v\n WriteVar v a -> ppVar v $= pp a\n\n TODO\n InitArr _v _size -> undefined\n ReadArr _v _index -> undefined\n WriteArr _v _index _ -> undefined\n WriteInitArr _v _index _ -> undefined\n TfmArr _isInit _v _info _args -> undefined\n\n InitPureArr _outRate _procRate _vals -> undefined\n ReadPureArr _outRate _procRate _arr _index -> undefined\n\n IfBegin rate cond -> hsep [\"IF\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n\n IfBlock rate cond (CodeBlock th) -> ppIfBlockBy \"IF-BLOCK\" rate cond th\n IfElseBlock rate cond (CodeBlock th) (CodeBlock el) ->\n ppFun (hsep [\"IF-BLOCK\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])\n [ pp th\n , \"ELSE-BLOCK\"\n , pp el\n , \"END-BLOCK\"\n ]\n ElseBegin -> \"ELSE\"\n IfEnd -> \"END_IF\"\n UntilBegin rate cond -> hsep [\"UNTIL\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n UntilEnd -> \"END_UNTIL\"\n WhileBegin rate cond -> hsep [\"WHILE\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n WhileRefBegin v -> hsep [\"WHILE_REF\", ppVar v]\n WhileEnd -> \"END_WHILE\"\n\n UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy \"UNTIL-BLOCK\" rate cond th\n WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy \"WHILE-BLOCK\" rate cond th\n WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th\n\n Verbatim txt -> ppFun \"VERBATIM\" [textStrict txt]\n Starts -> \"STARTS\"\n Seq a b -> vcat [\"SEQ\", pp a, pp b]\n Ends a -> vcat [\"ENDS\", pp a]\n InitMacrosInt _name _n -> undefined\n InitMacrosDouble _name _d -> undefined\n InitMacrosString _name _str -> undefined\n ReadMacrosInt _name -> undefined\n ReadMacrosDouble _name -> undefined\n ReadMacrosString _name -> undefined\n where\n post a = hsep [hcat [\"{\",info, \"}:\"], a]\n\n ppIfBlockBy leadTag rate cond th =\n ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])\n [ pp th\n , \"END-BLOCK\"\n ]\n\n ppWhileRefBlock var th =\n ppFun (hsep [\"WHILE-REF-BLOCK\", ppVar var])\n [ pp th\n , \"END-BLOCK\"\n ]\n\n ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args)\n\n ppConvert to from a =\n ppFun (hsep [textStrict \"Convert-rate\", ppRate to, maybe mempty ppRate from]) [pp a]\n\n ppSelect rate n arg =\n ppFun (hsep [\"select\", ppRate rate, pretty n]) [pp arg]\n\n ppIff rate cond th el =\n vcat\n [ hsep [\"if\", ppRate (fromIfRate rate), ppCond $ fmap pp cond]\n , indent 2 $ vcat\n [ \"then\" <+> pp th\n , \"else\" <+> pp el\n ]\n ]\n\n ppExpNum (PreInline op as) = ppNumOp op (fmap pp as)\n\n ppInitVar v a =\n ppFun (hsep [\"InitVar\", ppVar v]) [pp a]\n\n ppFun name args =\n vcat\n [ name\n , indent 2 $ vcat args\n ]\n\n pp = either ppPrim id . unPrimOr\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/spell-music/csound-expression/345df2c91c9831dd895f58951990165598504814/csound-expression-dynamic/src/Csound/Dynamic/Render/Pretty.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" ElseIfBegin a -> left >> (succTab $ text \"elseif \" <> ppCond a <> text \" then\")\n x -> error $ \"unknown expression: \" ++ show x\n pp macros\n pp arrays\n-----------------------------------\n expressions\n booleans\n numeric\n------------------------------------------------------------\n debug"},"code":{"kind":"string","value":"module Csound.Dynamic.Render.Pretty(\n Doc, vcatSep,\n ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur,\n PrettyE(..), PrettyShowE(..),\n ppE\n) where\n\nimport Control.Monad.Trans.State.Strict\nimport qualified Data.IntMap as IM\n\nimport Text.PrettyPrint.Leijen.Text\nimport Csound.Dynamic.Types\nimport Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..))\nimport Data.Text (Text)\nimport Data.Text qualified as Text\nimport Text.Show.Pretty (ppShow)\nimport Data.Fix (foldFix)\nimport Data.ByteString.Base64 qualified as Base64\nimport Data.Text.Encoding qualified as Text\n\nvcatSep :: [Doc] -> Doc\nvcatSep = vcat . punctuate line\n\nbinaries, unaries :: Text -> [Doc] -> Doc\n\nbinaries op as = binary op (as !! 0) (as !! 1)\nunaries op as = unary op (as !! 0)\n\nbinary :: Text -> Doc -> Doc -> Doc\nbinary op a b = parens $ a <+> textStrict op <+> b\n\nunary :: Text -> Doc -> Doc\nunary op a = parens $ textStrict op <> a\n\nfunc :: Text -> Doc -> Doc\nfunc op a = textStrict op <> parens a\n\nppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc\nppCsdFile flags orc sco plugins =\n tag \"CsoundSynthesizer\" $ vcatSep [\n tag \"CsOptions\" flags,\n tag \"CsInstruments\" orc,\n tag \"CsScore\" sco,\n ppPlugins plugins\n ]\n\nppPlugins :: [Plugin] -> Doc\nppPlugins plugins = vcatSep $ fmap (\\(Plugin name body) -> tag name (textStrict body)) plugins\n\ntag :: Text -> Doc -> Doc\ntag name content = vcatSep [\n char '<' <> textStrict name <> char '>',\n content,\n text \" textStrict name <> char '>']\n\nppNotes :: InstrId -> [CsdEvent] -> Doc\nppNotes instrId = vcat . fmap (ppNote instrId)\n\nppNote :: InstrId -> CsdEvent -> Doc\nppNote instrId evt = char 'i'\n <+> ppInstrId instrId\n <+> double (csdEventStart evt) <+> double (csdEventDur evt)\n <+> hsep (fmap ppPrim $ csdEventContent evt)\n\nppPrim :: Prim -> Doc\nppPrim x = case x of\n P n -> char 'p' <> int n\n PrimInstrId a -> ppInstrId a\n PString a -> int a\n PrimInt n -> int n\n PrimDouble d -> double d\n PrimString s -> dquotes $ textStrict s\n PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v\n where\n ppConverter dst src t\n | dst == src = t\n | dst == Ar && src == Kr = a(t)\n | dst == Ar && src == Ir = a(k(t))\n | dst == Kr = k(t)\n | dst == Ir && src == Kr = i(t)\n | dst == Ir && src == Ar = i(k(t))\n | otherwise = t\n where\n tfm ch v = hcat [char ch, parens v]\n a = tfm 'a'\n k = tfm 'k'\n i = tfm 'i'\n\n\nppGen :: Int -> Gen -> Doc\nppGen tabId ft = char 'f'\n <> int tabId\n <+> int 0\n <+> (int $ genSize ft)\n <+> (ppGenId $ genId ft)\n <+> (maybe empty (textStrict . Text.pack . show) $ genFile ft)\n <+> (hsep $ map double $ genArgs ft)\n\nppGenId :: GenId -> Doc\nppGenId x = case x of\n IntGenId a -> int a\n StringGenId a -> dquotes $ textStrict a\n\nppInstr :: InstrId -> Doc -> Doc\nppInstr instrId body = vcat [\n text \"instr\" <+> ppInstrHeadId instrId,\n body,\n text \"endin\"]\n\nppInstrHeadId :: InstrId -> Doc\nppInstrHeadId x = case x of\n InstrId den nom -> int nom <> maybe empty ppAfterDot den\n InstrLabel name -> textStrict name\n where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a\n\nppInstrId :: InstrId -> Doc\nppInstrId x = case x of\n InstrId den nom -> int nom <> maybe empty ppAfterDot den\n InstrLabel name -> dquotes $ textStrict name\n where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a\n\ntype TabDepth = Int\n\nppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc\nppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr)\n\nmaybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc)\nmaybeStringCopy outs expr = case (outs, expr) of\n ([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)\n ([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var)\n ([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a)\n ([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as)\n ([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a)\n _ -> Nothing\n\nppStringCopy :: Doc -> Doc -> Doc\nppStringCopy outs src = ppOpc outs \"strcpyk\" [src]\n\nppExp :: Doc -> Exp R.Var -> State TabDepth Doc\nppExp res expr = case fmap ppPrimOrVar expr of\n ExpPrim (PString n) -> tab $ ppStrget res n\n ExpPrim p -> tab $ res $= ppPrim p\n Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b\n Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs\n Tfm info xs -> tab $ ppOpc res (infoName info) xs\n ConvertRate to from x -> tab $ ppConvertRate res to from x\n If _ifRate info t e -> tab $ ppIf res (ppCond info) t e\n ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as\n WriteVar v a -> tab $ ppVar v $= a\n InitVar v a -> tab $ ppOpc (ppVar v) \"init\" [a]\n ReadVar v -> tab $ res $= ppVar v\n\n InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) \"init\" as\n ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text \"strcpy\" <+> ppReadArr v as\n WriteArr v as b -> tab $ ppWriteArr v as b\n WriteInitArr v as b -> tab $ ppWriteInitArr v as b\n TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b\n TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args\n TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs\n\n InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) \"fillarray\" initVals\n ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text \"strcpy\" <+> ppReadPureArr arr [index]\n\n IfBegin _ a -> succTab $ text \"if \" <> ppCond a <> text \" then\"\n IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th\n IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el\n ElseBegin -> left >> (succTab $ text \"else\")\n IfEnd -> left >> (tab $ text \"endif\")\n UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th\n WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th\n WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th\n\n UntilBegin _ a -> succTab $ text \"until \" <> ppCond a <> text \" do\"\n UntilEnd -> left >> (tab $ text \"od\")\n WhileBegin _ a -> succTab $ text \"while \" <> ppCond a <> text \" do\"\n WhileRefBegin var -> succTab $ text \"while \" <> ppVar var <+> equals <+> text \"1\" <+> text \"do\"\n WhileEnd -> left >> (tab $ text \"od\")\n InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue)\n InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue)\n InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue)\n ReadMacrosString name -> tab $ res <+> text \"strcpy\" <+> readMacro name\n ReadMacrosDouble name -> tab $ res $= readMacro name\n ReadMacrosInt name -> tab $ res $= readMacro name\n EmptyExp -> return empty\n Verbatim str -> return $ textStrict str\n\n Select _rate _n a -> tab $ res $= (\"SELECTS\" <+> a)\n Starts -> tab $ res $= \"STARTS\"\n Seq a b -> tab $ hsep [\"SEQ\", a, b]\n Ends _a -> tab $ \"ENDS\"\n ExpBool _ -> tab \"ExpBool\"\n\n\n\nreadMacro :: Text -> Doc\nreadMacro name = char '$' <> textStrict name\n\ninitMacros :: Doc -> Doc -> Doc\ninitMacros name initValue = vcat\n [ text \"#ifndef\" <+> name\n , text \"#define \" <+> name <+> char '#' <> initValue <> char '#'\n , text \"#end\"\n ]\n\n\nppTfmArrOut :: Bool -> Var -> Doc\nppTfmArrOut isInit v = ppVar v <> (if isInit then (text \"[]\") else empty)\n\nppArrIndex :: Var -> [Doc] -> Doc\nppArrIndex v as = ppVar v <> (hcat $ fmap brackets as)\n\nppArrVar :: Int -> Doc -> Doc\nppArrVar n v = v <> (hcat $ replicate n $ text \"[]\")\n\nppReadArr :: Var -> [Doc] -> Doc\nppReadArr v as = ppArrIndex v as\n\nppReadPureArr :: Doc -> [Doc] -> Doc\nppReadPureArr v as = v <> (hcat $ fmap brackets as)\n\nppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc\nppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b\n where equalsWord = if (varRate v == Sr) then text \"strcpy\" else equals\n\nppWriteInitArr :: Var -> [Doc] -> Doc -> Doc\nppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b\n where initWord = text $ if (varRate v == Sr) then \"strcpy\" else \"init\"\n\n\ntab :: Monad m => Doc -> StateT TabDepth m Doc\ntab doc = fmap (shiftByTab doc) get\n\ntabWidth :: TabDepth\ntabWidth = 4\n\nshiftByTab :: Doc -> TabDepth -> Doc\nshiftByTab doc n\n | n == 0 = doc\n | otherwise = indent (tabWidth * n) doc\n\nleft :: State TabDepth ()\nleft = modify pred\n\nsuccTab :: Monad m => Doc -> StateT TabDepth m Doc\nsuccTab doc = do\n a <- tab doc\n modify succ\n return a\n\nprefix :: Text -> [Doc] -> Doc\nprefix name args = textStrict name <> tupled args\n\nppCond :: Inline CondOp Doc -> Doc\nppCond = ppInline ppCondOp\n\n($=) :: Doc -> Doc -> Doc\n($=) a b = a <+> equals <+> b\n\nppOuts :: [R.Var] -> Doc\nppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs\n\nppPrimOrVar :: PrimOr R.Var -> Doc\nppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x\n\nppStrget :: Doc -> Int -> Doc\nppStrget out n = ppOpc out \"strget\" [char 'p' <> int n]\n\nppIf :: Doc -> Doc -> Doc -> Doc -> Doc\nppIf res p t e = vcat\n [ text \"if\" <+> p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"else\"\n , text \" \" <> res <+> char '=' <+> e\n , text \"endif\"\n ]\n\nppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc\n\nppIf1 = ppIfBy \"if\"\nppWhile = ppIfBy \"while\"\nppUntil = ppIfBy \"until\"\n\nppIfBy :: Text -> Doc -> Doc -> Doc -> Doc\nppIfBy leadTag res p t = vcat\n [ textStrict leadTag <+> p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"endif\"\n ]\n\nppWhileRef :: Doc -> Var -> Doc -> Doc\nppWhileRef res p t = vcat\n [ textStrict \"while\" <+> ppVar p <+> text \"then\"\n , text \" \" <> res <+> char '=' <+> t\n , text \"endif\"\n ]\n\nppOpc :: Doc -> Text -> [Doc] -> Doc\nppOpc out name xs = out <+> ppProc name xs\n\nppProc :: Text -> [Doc] -> Doc\nppProc name xs = textStrict name <+> (hsep $ punctuate comma xs)\n\nppVar :: Var -> Doc\nppVar v = case v of\n Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name)\n VarVerbatim _ name -> textStrict name\n\nvarPrefix :: VarType -> Char\nvarPrefix x = case x of\n LocalVar -> 'l'\n GlobalVar -> 'g'\n\nppVarType :: VarType -> Doc\nppVarType x = case x of\n LocalVar -> empty\n GlobalVar -> char 'g'\n\nppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc\nppConvertRate out to from var = case (to, from) of\n (Ar, Just Kr) -> upsamp var\n (Ar, Just Ir) -> upsamp $ toK var\n (Kr, Just Ar) -> downsamp var\n (Kr, Just Ir) -> out $= var\n (Ir, Just Ar) -> downsamp var\n (Ir, Just Kr) -> out $= toI var\n (Ar, Nothing) -> out $= toA var\n (Kr, Nothing) -> out $= toK var\n (Ir, Nothing) -> out $= toI var\n (a, Just b) | a == b -> out $= var\n (a, b) -> error $ \"bug: no rate conversion from \" ++ show b ++ \" to \" ++ show a ++ \".\"\n where\n upsamp x = ppOpc out \"upsamp\" [x]\n downsamp x = ppOpc out \"downsamp\" [x]\n toA = func \"a\"\n toK = func \"k\"\n toI = func \"i\"\n\n\nppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc\nppInline ppNode a = iter $ inlineExp a\n where iter x = case x of\n InlinePrim n -> inlineEnv a IM.! n\n InlineExp op args -> ppNode op $ fmap iter args\n\n\nppCondOp :: CondOp -> [Doc] -> Doc\nppCondOp op = case op of\n TrueOp -> const $ text \"(1 == 1)\"\n FalseOp -> const $ text \"(0 == 1)\"\n And -> bi \"&&\"\n Or -> bi \"||\"\n Equals -> bi \"==\"\n NotEquals -> bi \"!=\"\n Less -> bi \"<\"\n Greater -> bi \">\"\n LessEquals -> bi \"<=\"\n GreaterEquals -> bi \">=\"\n where bi = binaries\n\n\nppNumOp :: NumOp -> [Doc] -> Doc\nppNumOp op = case op of\n Add -> bi \"+\"\n Sub -> bi \"-\"\n Mul -> bi \"*\"\n Div -> bi \"/\"\n Neg -> uno \"-\"\n Pow -> bi \"^\"\n Mod -> bi \"%\"\n where\n bi = binaries\n uno = unaries\n\nppRatedVar :: R.Var -> Doc\nppRatedVar v = ppRate (R.varType v) <> int (R.varId v)\n\nppRate :: Rate -> Doc\nppRate x = case removeArrRate x of\n Sr -> char 'S'\n _ -> phi x\n where phi = textStrict . Text.toLower . Text.pack . show\n\nppTotalDur :: Double -> Doc\nppTotalDur d = text \"f0\" <+> double d\n\n\nnewtype PrettyShowE = PrettyShowE E\nnewtype PrettyE = PrettyE E\n\ninstance Show PrettyShowE where\n show (PrettyShowE expr) = ppShow expr\n\ninstance Show PrettyE where\n show (PrettyE expr) = show $ ppE expr\n\nppE :: E -> Doc\nppE = foldFix go\n where\n go :: RatedExp Doc -> Doc\n go x = fromExp (fromInfo x) x\n\n fromInfo :: RatedExp Doc -> Doc\n fromInfo RatedExp{..} =\n hsep\n [ ppHash ratedExpHash\n , maybe mempty ppRate ratedExpRate\n , maybe mempty pretty ratedExpDepends\n ]\n\n ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode\n\n fromExp :: Doc -> RatedExp Doc -> Doc\n fromExp info RatedExp{..} = indent 2 $ post $\n case ratedExpExp of\n ExpPrim p -> ppPrim p\n EmptyExp -> textStrict \"EMPTY_EXPR\"\n Tfm inf args -> ppTfm inf args\n ConvertRate to from a -> ppConvert to from a\n Select r n a -> ppSelect r n a\n If rate cond th el -> ppIff rate cond th el\n ExpBool args -> hsep [\"some bool expr\", pretty $ show args]\n ExpNum arg -> ppExpNum arg\n InitVar v a -> ppInitVar v a\n ReadVar v -> \"ReadVar\" <+> ppVar v\n WriteVar v a -> ppVar v $= pp a\n\n TODO\n InitArr _v _size -> undefined\n ReadArr _v _index -> undefined\n WriteArr _v _index _ -> undefined\n WriteInitArr _v _index _ -> undefined\n TfmArr _isInit _v _info _args -> undefined\n\n InitPureArr _outRate _procRate _vals -> undefined\n ReadPureArr _outRate _procRate _arr _index -> undefined\n\n IfBegin rate cond -> hsep [\"IF\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n\n IfBlock rate cond (CodeBlock th) -> ppIfBlockBy \"IF-BLOCK\" rate cond th\n IfElseBlock rate cond (CodeBlock th) (CodeBlock el) ->\n ppFun (hsep [\"IF-BLOCK\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])\n [ pp th\n , \"ELSE-BLOCK\"\n , pp el\n , \"END-BLOCK\"\n ]\n ElseBegin -> \"ELSE\"\n IfEnd -> \"END_IF\"\n UntilBegin rate cond -> hsep [\"UNTIL\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n UntilEnd -> \"END_UNTIL\"\n WhileBegin rate cond -> hsep [\"WHILE\", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, \"\\n\"]\n WhileRefBegin v -> hsep [\"WHILE_REF\", ppVar v]\n WhileEnd -> \"END_WHILE\"\n\n UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy \"UNTIL-BLOCK\" rate cond th\n WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy \"WHILE-BLOCK\" rate cond th\n WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th\n\n Verbatim txt -> ppFun \"VERBATIM\" [textStrict txt]\n Starts -> \"STARTS\"\n Seq a b -> vcat [\"SEQ\", pp a, pp b]\n Ends a -> vcat [\"ENDS\", pp a]\n InitMacrosInt _name _n -> undefined\n InitMacrosDouble _name _d -> undefined\n InitMacrosString _name _str -> undefined\n ReadMacrosInt _name -> undefined\n ReadMacrosDouble _name -> undefined\n ReadMacrosString _name -> undefined\n where\n post a = hsep [hcat [\"{\",info, \"}:\"], a]\n\n ppIfBlockBy leadTag rate cond th =\n ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ])\n [ pp th\n , \"END-BLOCK\"\n ]\n\n ppWhileRefBlock var th =\n ppFun (hsep [\"WHILE-REF-BLOCK\", ppVar var])\n [ pp th\n , \"END-BLOCK\"\n ]\n\n ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args)\n\n ppConvert to from a =\n ppFun (hsep [textStrict \"Convert-rate\", ppRate to, maybe mempty ppRate from]) [pp a]\n\n ppSelect rate n arg =\n ppFun (hsep [\"select\", ppRate rate, pretty n]) [pp arg]\n\n ppIff rate cond th el =\n vcat\n [ hsep [\"if\", ppRate (fromIfRate rate), ppCond $ fmap pp cond]\n , indent 2 $ vcat\n [ \"then\" <+> pp th\n , \"else\" <+> pp el\n ]\n ]\n\n ppExpNum (PreInline op as) = ppNumOp op (fmap pp as)\n\n ppInitVar v a =\n ppFun (hsep [\"InitVar\", ppVar v]) [pp a]\n\n ppFun name args =\n vcat\n [ name\n , indent 2 $ vcat args\n ]\n\n pp = either ppPrim id . unPrimOr\n\n"}}},{"rowIdx":610262,"cells":{"_id":{"kind":"string","value":"3e249bf493c1ef45a931b1c5e58252c2274a7b8ef6ee58e09ad1ff3a8a392510"},"repository":{"kind":"string","value":"clash-lang/clash-compiler"},"name":{"kind":"string","value":"PatError.hs"},"content":{"kind":"string","value":"module PatError where\n\nimport Prelude\n\ntopEntity :: Maybe Int -> Int\ntopEntity (Just x) = x\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/Basic/PatError.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"module PatError where\n\nimport Prelude\n\ntopEntity :: Maybe Int -> Int\ntopEntity (Just x) = x\n"}}},{"rowIdx":610263,"cells":{"_id":{"kind":"string","value":"1183936d161e464928944a8c3599ab205acb38e27c191c9a3312d98b1103bce9"},"repository":{"kind":"string","value":"klajo/hacks"},"name":{"kind":"string","value":"beam_renamer_tests.erl"},"content":{"kind":"string","value":"%%%-------------------------------------------------------------------\n%%% @doc Test {@link beam_renamer}.\n @author ( )\n%%% @end\n%%%-------------------------------------------------------------------\n-module(beam_renamer_tests).\n\n-include_lib(\"eunit/include/eunit.hrl\").\n\nreplaces_in_atom_table_test() ->\n 'x^' = run_literal(x, 'x^', x).\n\nreplaces_in_constant_pool_test() ->\n ['x^'] = run_literal(x, 'x^', [x]),\n ['x^', 'x^'] = run_literal(x, 'x^', [x, x]),\n {'x^', 'x^'} = run_literal(x, 'x^', {x, x}),\n {[{'x^'}]} = run_literal(x, 'x^', {[{x}]}).\n\nrun_literal(Name0, Name, Term) ->\n run_with_renamed_module(\n fun() -> Name:f() end, \n mk_module(Name0, [erl_syntax:abstract(Term)]), \n Name).\n\nrun_with_renamed_module(Fun, BeamBin, Name) -> \n Bin = beam_renamer:rename(BeamBin, Name),\n unload_module(Name),\n {module, _} = code:load_binary(Name, \"dummy.beam\", Bin),\n try Fun() \n after unload_module(Name)\n end.\n\t \nunload_module(ModName) ->\n code:purge(ModName),\n code:delete(ModName).\n \n\nmk_module(ModName, FuncBody) ->\n {ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)),\n Bin.\n\nmk_module_forms(ModName, FuncBody) ->\n erl_syntax:revert_forms(\n [erl_syntax:attribute(\n\t erl_syntax:atom(module), \n\t [erl_syntax:atom(ModName)]),\n erl_syntax:attribute(\n\t erl_syntax:atom(compile), \n\t [erl_syntax:atom(export_all)]),\n erl_syntax:function(\n \t erl_syntax:atom(f), \n \t [erl_syntax:clause([], FuncBody)])]).\n \n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/klajo/hacks/80afdad130b9b914d410cb382ebb1b6ee1236e94/beam/test/beam_renamer_tests.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":"-------------------------------------------------------------------\n @doc Test {@link beam_renamer}.\n @end\n-------------------------------------------------------------------"},"code":{"kind":"string","value":" @author ( )\n-module(beam_renamer_tests).\n\n-include_lib(\"eunit/include/eunit.hrl\").\n\nreplaces_in_atom_table_test() ->\n 'x^' = run_literal(x, 'x^', x).\n\nreplaces_in_constant_pool_test() ->\n ['x^'] = run_literal(x, 'x^', [x]),\n ['x^', 'x^'] = run_literal(x, 'x^', [x, x]),\n {'x^', 'x^'} = run_literal(x, 'x^', {x, x}),\n {[{'x^'}]} = run_literal(x, 'x^', {[{x}]}).\n\nrun_literal(Name0, Name, Term) ->\n run_with_renamed_module(\n fun() -> Name:f() end, \n mk_module(Name0, [erl_syntax:abstract(Term)]), \n Name).\n\nrun_with_renamed_module(Fun, BeamBin, Name) -> \n Bin = beam_renamer:rename(BeamBin, Name),\n unload_module(Name),\n {module, _} = code:load_binary(Name, \"dummy.beam\", Bin),\n try Fun() \n after unload_module(Name)\n end.\n\t \nunload_module(ModName) ->\n code:purge(ModName),\n code:delete(ModName).\n \n\nmk_module(ModName, FuncBody) ->\n {ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)),\n Bin.\n\nmk_module_forms(ModName, FuncBody) ->\n erl_syntax:revert_forms(\n [erl_syntax:attribute(\n\t erl_syntax:atom(module), \n\t [erl_syntax:atom(ModName)]),\n erl_syntax:attribute(\n\t erl_syntax:atom(compile), \n\t [erl_syntax:atom(export_all)]),\n erl_syntax:function(\n \t erl_syntax:atom(f), \n \t [erl_syntax:clause([], FuncBody)])]).\n \n"}}},{"rowIdx":610264,"cells":{"_id":{"kind":"string","value":"3e8a841b1a590d0222f030e46885e26f247f3f9885e2f2c7d02e171facb46a0e"},"repository":{"kind":"string","value":"Lysxia/generic-data"},"name":{"kind":"string","value":"Prelude.hs"},"content":{"kind":"string","value":"# LANGUAGE FlexibleContexts #\n\n-- | Generic deriving for standard classes in base\n--\n-- === Warning\n--\n-- This is an internal module: it is not subject to any versioning policy,\n-- breaking changes can happen at any time.\n--\n-- If something here seems useful, please report it or create a pull request to\n-- export it from an external module.\n\nmodule Generic.Data.Internal.Prelude where\n\nimport Control.Applicative (liftA2, Alternative(..))\nimport Data.Function (on)\nimport Data.Functor.Classes\nimport Data.Semigroup\nimport GHC.Generics\n\nimport Generic.Data.Internal.Utils (from', to', liftG2)\n\n * ' '\n\n | Generic \n--\n-- @\n instance ' ' MyType where\n-- ('==') = 'geq'\n-- @\ngeq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool\ngeq = (==) `on` from'\n\n * ' '\n\n-- | Generic 'compare'.\n--\n-- @\n instance ' ' where\n-- 'compare' = 'gcompare'\n-- @\ngcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering\ngcompare = compare `on` from'\n\n-- * 'Semigroup'\n\n-- | Generic @('<>')@ (or 'mappend').\n--\n-- @\n instance ' Semigroup ' where\n-- ('<>') = 'gmappend'\n-- @\n--\n-- See also 'gmempty'.\ngmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a\ngmappend = \\a b -> to (from' a <> from' b)\n\n-- * 'Monoid'\n\n | Generic ' ' .\n--\n-- @\n-- instance 'Monoid' MyType where\n ' ' = ' gmempty '\n-- @\ngmempty :: (Generic a, Monoid (Rep a ())) => a\ngmempty = to' mempty\n\n-- | Generic @('<>')@ (or @'mappend'@).\n--\n-- The difference from `gmappend' is the 'Monoid' constraint instead of\n-- 'Semigroup', for older versions of base where 'Semigroup' is not a\n-- superclass of 'Monoid'.\ngmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a\ngmappend' = \\a b -> to (from' a `mappend` from' b)\n\n-- * 'Functor'\n\n-- | Generic 'fmap'.\n--\n-- @\n instance ' Functor ' where\n-- 'fmap' = 'gfmap'\n-- @\ngfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b\ngfmap = \\f -> to1 . fmap f . from1\n\n-- | Generic @('<$')@.\n--\n-- See also 'gfmap'.\ngconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a\ngconstmap = \\a -> to1 . (a <$) . from1\n\n-- * 'Applicative'\n\n-- | Generic 'pure'.\n--\n-- @\n instance ' Applicative ' where\n-- 'pure' = 'gpure'\n-- ('<*>') = 'gap'\n-- @\ngpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a\ngpure = to1 . pure\n\n-- | Generic @('<*>')@ (or 'Control.Monad.ap').\n--\n-- See also 'gpure'.\ngap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b\ngap = liftG2 (<*>)\n\n-- | Generic 'liftA2'.\n--\n-- See also 'gpure'.\ngliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c\ngliftA2 = liftG2 . liftA2\n\n-- * 'Alternative'\n\n-- | Generic 'empty'.\n--\n-- @\n instance ' Alternative ' where\n-- 'empty' = 'gempty'\n-- ('<|>') = 'galt'\n-- @\ngempty :: (Generic1 f, Alternative (Rep1 f)) => f a\ngempty = to1 empty\n\n-- | Generic ('<|>').\n--\n-- See also 'gempty'.\ngalt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a\ngalt = liftG2 (<|>)\n\n-- * 'Foldable'\n\n-- | Generic 'foldMap'.\n--\n-- @\n instance ' Foldable ' where\n-- 'foldMap' = 'gfoldMap'\n-- @\n--\n-- This is deprecated but kept around just for reference.\n# DEPRECATED gfoldMap \" This definition has been replaced with ' Generic . Data . ' . \" #\ngfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m\ngfoldMap = \\f -> foldMap f . from1\n\n-- | Generic 'foldr'.\n--\n-- @\n instance ' Foldable ' where\n-- 'foldr' = 'gfoldr'\n-- @\n--\n-- See also 'gfoldMap'.\ngfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b\ngfoldr = \\f b -> foldr f b . from1\n-- Note: this one is not deprecated because inlining Just Works.\n\n * ' '\n\n-- | Generic 'traverse'.\n--\n-- @\n instance ' ' where\n-- 'traverse' = 'gtraverse'\n-- @\n--\n-- This is deprecated but kept around just for reference.\n{-# DEPRECATED gtraverse \"This definition has been replaced with 'Generic.Data.Internal.gtraverse'.\" #-}\ngtraverse\n :: (Generic1 f, Traversable (Rep1 f), Applicative m)\n => (a -> m b) -> f a -> m (f b)\ngtraverse = \\f -> fmap to1 . traverse f . from1\n\n-- | Generic 'sequenceA'.\n--\n-- @\n instance ' ' where\n-- 'sequenceA' = 'gsequenceA'\n-- @\n--\n-- See also 'gtraverse'.\n--\n-- This is deprecated but kept around just for reference.\n{-# DEPRECATED gsequenceA \"This definition has been replaced with 'Generic.Data.Internal.gsequenceA'.\" #-}\ngsequenceA\n :: (Generic1 f, Traversable (Rep1 f), Applicative m)\n => f (m a) -> m (f a)\ngsequenceA = fmap to1 . sequenceA . from1\n\n * ' Eq1 '\n\n-- | Generic 'liftEq'.\ngliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool\ngliftEq = \\(==.) a b -> liftEq (==.) (from1 a) (from1 b)\n\n-- * 'Ord1'\n\n-- | Generic 'liftCompare'.\ngliftCompare\n :: (Generic1 f, Ord1 (Rep1 f))\n => (a -> b -> Ordering) -> f a -> f b -> Ordering\ngliftCompare = \\compare' a b -> liftCompare compare' (from1 a) (from1 b)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/Lysxia/generic-data/846fafb9ec1e4e60424e4f266451665fe25fdfa9/src/Generic/Data/Internal/Prelude.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" | Generic deriving for standard classes in base\n\n === Warning\n\n This is an internal module: it is not subject to any versioning policy,\n breaking changes can happen at any time.\n\n If something here seems useful, please report it or create a pull request to\n export it from an external module.\n\n @\n ('==') = 'geq'\n @\n | Generic 'compare'.\n\n @\n 'compare' = 'gcompare'\n @\n * 'Semigroup'\n | Generic @('<>')@ (or 'mappend').\n\n @\n ('<>') = 'gmappend'\n @\n\n See also 'gmempty'.\n * 'Monoid'\n\n @\n instance 'Monoid' MyType where\n @\n | Generic @('<>')@ (or @'mappend'@).\n\n The difference from `gmappend' is the 'Monoid' constraint instead of\n 'Semigroup', for older versions of base where 'Semigroup' is not a\n superclass of 'Monoid'.\n * 'Functor'\n | Generic 'fmap'.\n\n @\n 'fmap' = 'gfmap'\n @\n | Generic @('<$')@.\n\n See also 'gfmap'.\n * 'Applicative'\n | Generic 'pure'.\n\n @\n 'pure' = 'gpure'\n ('<*>') = 'gap'\n @\n | Generic @('<*>')@ (or 'Control.Monad.ap').\n\n See also 'gpure'.\n | Generic 'liftA2'.\n\n See also 'gpure'.\n * 'Alternative'\n | Generic 'empty'.\n\n @\n 'empty' = 'gempty'\n ('<|>') = 'galt'\n @\n | Generic ('<|>').\n\n See also 'gempty'.\n * 'Foldable'\n | Generic 'foldMap'.\n\n @\n 'foldMap' = 'gfoldMap'\n @\n\n This is deprecated but kept around just for reference.\n | Generic 'foldr'.\n\n @\n 'foldr' = 'gfoldr'\n @\n\n See also 'gfoldMap'.\n Note: this one is not deprecated because inlining Just Works.\n | Generic 'traverse'.\n\n @\n 'traverse' = 'gtraverse'\n @\n\n This is deprecated but kept around just for reference.\n# DEPRECATED gtraverse \"This definition has been replaced with 'Generic.Data.Internal.gtraverse'.\" #\n | Generic 'sequenceA'.\n\n @\n 'sequenceA' = 'gsequenceA'\n @\n\n See also 'gtraverse'.\n\n This is deprecated but kept around just for reference.\n# DEPRECATED gsequenceA \"This definition has been replaced with 'Generic.Data.Internal.gsequenceA'.\" #\n | Generic 'liftEq'.\n * 'Ord1'\n | Generic 'liftCompare'."},"code":{"kind":"string","value":"# LANGUAGE FlexibleContexts #\n\n\nmodule Generic.Data.Internal.Prelude where\n\nimport Control.Applicative (liftA2, Alternative(..))\nimport Data.Function (on)\nimport Data.Functor.Classes\nimport Data.Semigroup\nimport GHC.Generics\n\nimport Generic.Data.Internal.Utils (from', to', liftG2)\n\n * ' '\n\n | Generic \n instance ' ' MyType where\ngeq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool\ngeq = (==) `on` from'\n\n * ' '\n\n instance ' ' where\ngcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering\ngcompare = compare `on` from'\n\n\n instance ' Semigroup ' where\ngmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a\ngmappend = \\a b -> to (from' a <> from' b)\n\n\n | Generic ' ' .\n ' ' = ' gmempty '\ngmempty :: (Generic a, Monoid (Rep a ())) => a\ngmempty = to' mempty\n\ngmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a\ngmappend' = \\a b -> to (from' a `mappend` from' b)\n\n\n instance ' Functor ' where\ngfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b\ngfmap = \\f -> to1 . fmap f . from1\n\ngconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a\ngconstmap = \\a -> to1 . (a <$) . from1\n\n\n instance ' Applicative ' where\ngpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a\ngpure = to1 . pure\n\ngap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b\ngap = liftG2 (<*>)\n\ngliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c\ngliftA2 = liftG2 . liftA2\n\n\n instance ' Alternative ' where\ngempty :: (Generic1 f, Alternative (Rep1 f)) => f a\ngempty = to1 empty\n\ngalt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a\ngalt = liftG2 (<|>)\n\n\n instance ' Foldable ' where\n# DEPRECATED gfoldMap \" This definition has been replaced with ' Generic . Data . ' . \" #\ngfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m\ngfoldMap = \\f -> foldMap f . from1\n\n instance ' Foldable ' where\ngfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b\ngfoldr = \\f b -> foldr f b . from1\n\n * ' '\n\n instance ' ' where\ngtraverse\n :: (Generic1 f, Traversable (Rep1 f), Applicative m)\n => (a -> m b) -> f a -> m (f b)\ngtraverse = \\f -> fmap to1 . traverse f . from1\n\n instance ' ' where\ngsequenceA\n :: (Generic1 f, Traversable (Rep1 f), Applicative m)\n => f (m a) -> m (f a)\ngsequenceA = fmap to1 . sequenceA . from1\n\n * ' Eq1 '\n\ngliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool\ngliftEq = \\(==.) a b -> liftEq (==.) (from1 a) (from1 b)\n\n\ngliftCompare\n :: (Generic1 f, Ord1 (Rep1 f))\n => (a -> b -> Ordering) -> f a -> f b -> Ordering\ngliftCompare = \\compare' a b -> liftCompare compare' (from1 a) (from1 b)\n"}}},{"rowIdx":610265,"cells":{"_id":{"kind":"string","value":"b3a1dcdaf55a70a28bc1b23a9a68ef333dda5c9779984e25eb61316b079a3c7e"},"repository":{"kind":"string","value":"williamleferrand/accretio"},"name":{"kind":"string","value":"core_invite.ml"},"content":{"kind":"string","value":"(*\n * core - invite\n *\n * \n *\n *)\n\nopen Lwt\n\nopen Printf\nopen CalendarLib\n\nopen Api\n\nopen Eliom_content.Html5\nopen Eliom_content.Html5.D\n\nopen Message_parsers\n\nlet has_already_declined = sprintf \"core-invite-has-already-declined-%d\"\nlet tag_timer_reminder = sprintf \"core-invite-reminded-%d\"\nlet key_email_anchor = sprintf \"core-invite-anchor-%d\"\n\nlet invite context message =\n lwt content = context.get_message_content ~message in\n let emails = Ys_email.get_all_emails content in\n\n lwt supervisor = $society(context.society)->leader in\n lwt supervisor_name = $member(supervisor)->name in\n\n lwt already_members, already_declined, invited =\n Lwt_list.fold_left_s\n (fun (already_members, already_declined, invited) email ->\n context.log_info \"inviting member with email %s to society %d\" email context.society ;\n lwt member =\n match_lwt Object_member.Store.find_by_email email with\n | Some uid -> return uid\n | None ->\n match_lwt Object_member.Store.create\n ~preferred_email:email\n ~emails:[ email ]\n () with\n | `Object_already_exists (_, uid) -> return uid\n | `Object_created member -> return member.Object_member.uid\n in\n match_lwt context.is_member ~member with\n true -> return ((member, email) :: already_members, already_declined, invited)\n | false ->\n (* check if the member hasn't declined already *)\n match_lwt context.get ~key:(has_already_declined member) with\n Some _ -> return (already_members, (member, email) :: already_declined, invited)\n | None ->\n lwt _ =\n match_lwt\n context.message_member\n ~member\n ~subject:context.society_name\n ~content:[\n pcdata \"Greetings,\" ; br () ;\n br () ;\n pcdata \"I'm running a group called \" ; i [ pcdata context.society_name ] ; pcdata \". \"; pcdata context.society_description ; br ();\n br () ;\n pcdata \"Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email.\" ; br () ;\n br () ;\n pcdata \"Looking forward to hearing from you,\" ; br () ;\n br () ;\n pcdata supervisor_name ;\n ]\n () with\n None -> return_unit\n | Some message_id ->\n context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id)\n in\n lwt _ =\n context.set_timer\n ~label:(tag_timer_reminder member)\n ~duration:(Calendar.Period.lmake ~hour:26 ())\n (`RemindMember member)\n in\n return (already_members, already_declined, ((member, email) :: invited)))\n ([], [], [])\n emails\n in\n\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Great. Here is what I did:\" ; br () ;\n br () ;\n pcdata \"Already members:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ;\n br () ;\n pcdata \"Already declined:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ;\n br () ;\n pcdata \"Invited:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ;\n br () ;\n pcdata \"Let's see what comes back!\"\n ]\n ()\n in\n\n return `None\n\nlet remind context member =\n context.log_info \"sending reminder to member %d\" member ;\n lwt _ =\n context.cancel_timers ~query:(tag_timer_reminder member)\n in\n match_lwt context.get ~key:(key_email_anchor member) with\n None ->\n lwt _ =\n context.message_member\n ~member\n ~subject:context.society_name\n ~content:[\n pcdata \"My apologies for the reminder, but maybe have you missed my previous email.\" ; br () ;\n br () ;\n pcdata \"Would you be interested in hearing more about our \" ; i [ pcdata context.society_name ] ; pcdata \" group?\" ;\n ]\n ()\n in\n return `None\n | Some message ->\n let message = Ys_uid.of_string message in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?\" ;\n ]\n ()\n in\n return `None\n\nlet accepted context message =\n lwt member = context.get_message_sender ~message in\n context.log_info \"adding member %d to the society\" member ;\n lwt _ = context.add_member ~member in\n lwt _ =\n context.cancel_timers ~query:(tag_timer_reminder member)\n in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Great, I added you to the list of participants, stay tuned!\" ; br ()\n ]\n ()\n in\n return `None\n\nlet declined context message =\n lwt member = context.get_message_sender ~message in\n context.log_info \"removing member %d to the society\" member ;\n lwt _ = context.remove_member ~member in\n lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in\n lwt _ = context.set ~key:(has_already_declined member) ~value:\"true\" in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Ok!\" ; pcdata \" If you change you mind later, don't hesitate to be get back in touch!\" ; br ()\n ]\n ()\n in\n return `None\n\nlet initialize_invites context () =\n lwt _ =\n context.message_supervisor\n ~subject:\"Who do you want to invite?\"\n ~content:[\n pcdata \"Greetings,\" ; br () ;\n br () ;\n pcdata \"Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with\" ; br ()\n ]\n ()\n in\n return `None\n\nCOMPONENT\n\n *initialize_invites ~> `Message of email ~> invite\n\n remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted\n -invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted\n invite ~> `Accepted of email ~> accepted\n invite ~> `Declined of email ~> declined\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/playbooks/core_invite.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"\n * core - invite\n *\n * \n *\n \n check if the member hasn't declined already "},"code":{"kind":"string","value":"\nopen Lwt\n\nopen Printf\nopen CalendarLib\n\nopen Api\n\nopen Eliom_content.Html5\nopen Eliom_content.Html5.D\n\nopen Message_parsers\n\nlet has_already_declined = sprintf \"core-invite-has-already-declined-%d\"\nlet tag_timer_reminder = sprintf \"core-invite-reminded-%d\"\nlet key_email_anchor = sprintf \"core-invite-anchor-%d\"\n\nlet invite context message =\n lwt content = context.get_message_content ~message in\n let emails = Ys_email.get_all_emails content in\n\n lwt supervisor = $society(context.society)->leader in\n lwt supervisor_name = $member(supervisor)->name in\n\n lwt already_members, already_declined, invited =\n Lwt_list.fold_left_s\n (fun (already_members, already_declined, invited) email ->\n context.log_info \"inviting member with email %s to society %d\" email context.society ;\n lwt member =\n match_lwt Object_member.Store.find_by_email email with\n | Some uid -> return uid\n | None ->\n match_lwt Object_member.Store.create\n ~preferred_email:email\n ~emails:[ email ]\n () with\n | `Object_already_exists (_, uid) -> return uid\n | `Object_created member -> return member.Object_member.uid\n in\n match_lwt context.is_member ~member with\n true -> return ((member, email) :: already_members, already_declined, invited)\n | false ->\n match_lwt context.get ~key:(has_already_declined member) with\n Some _ -> return (already_members, (member, email) :: already_declined, invited)\n | None ->\n lwt _ =\n match_lwt\n context.message_member\n ~member\n ~subject:context.society_name\n ~content:[\n pcdata \"Greetings,\" ; br () ;\n br () ;\n pcdata \"I'm running a group called \" ; i [ pcdata context.society_name ] ; pcdata \". \"; pcdata context.society_description ; br ();\n br () ;\n pcdata \"Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email.\" ; br () ;\n br () ;\n pcdata \"Looking forward to hearing from you,\" ; br () ;\n br () ;\n pcdata supervisor_name ;\n ]\n () with\n None -> return_unit\n | Some message_id ->\n context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id)\n in\n lwt _ =\n context.set_timer\n ~label:(tag_timer_reminder member)\n ~duration:(Calendar.Period.lmake ~hour:26 ())\n (`RemindMember member)\n in\n return (already_members, already_declined, ((member, email) :: invited)))\n ([], [], [])\n emails\n in\n\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Great. Here is what I did:\" ; br () ;\n br () ;\n pcdata \"Already members:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ;\n br () ;\n pcdata \"Already declined:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ;\n br () ;\n pcdata \"Invited:\" ;\n ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ;\n br () ;\n pcdata \"Let's see what comes back!\"\n ]\n ()\n in\n\n return `None\n\nlet remind context member =\n context.log_info \"sending reminder to member %d\" member ;\n lwt _ =\n context.cancel_timers ~query:(tag_timer_reminder member)\n in\n match_lwt context.get ~key:(key_email_anchor member) with\n None ->\n lwt _ =\n context.message_member\n ~member\n ~subject:context.society_name\n ~content:[\n pcdata \"My apologies for the reminder, but maybe have you missed my previous email.\" ; br () ;\n br () ;\n pcdata \"Would you be interested in hearing more about our \" ; i [ pcdata context.society_name ] ; pcdata \" group?\" ;\n ]\n ()\n in\n return `None\n | Some message ->\n let message = Ys_uid.of_string message in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?\" ;\n ]\n ()\n in\n return `None\n\nlet accepted context message =\n lwt member = context.get_message_sender ~message in\n context.log_info \"adding member %d to the society\" member ;\n lwt _ = context.add_member ~member in\n lwt _ =\n context.cancel_timers ~query:(tag_timer_reminder member)\n in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Great, I added you to the list of participants, stay tuned!\" ; br ()\n ]\n ()\n in\n return `None\n\nlet declined context message =\n lwt member = context.get_message_sender ~message in\n context.log_info \"removing member %d to the society\" member ;\n lwt _ = context.remove_member ~member in\n lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in\n lwt _ = context.set ~key:(has_already_declined member) ~value:\"true\" in\n lwt _ =\n context.reply_to\n ~message\n ~content:[\n pcdata \"Ok!\" ; pcdata \" If you change you mind later, don't hesitate to be get back in touch!\" ; br ()\n ]\n ()\n in\n return `None\n\nlet initialize_invites context () =\n lwt _ =\n context.message_supervisor\n ~subject:\"Who do you want to invite?\"\n ~content:[\n pcdata \"Greetings,\" ; br () ;\n br () ;\n pcdata \"Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with\" ; br ()\n ]\n ()\n in\n return `None\n\nCOMPONENT\n\n *initialize_invites ~> `Message of email ~> invite\n\n remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted\n -invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted\n invite ~> `Accepted of email ~> accepted\n invite ~> `Declined of email ~> declined\n"}}},{"rowIdx":610266,"cells":{"_id":{"kind":"string","value":"6b79bf8204c2560a98fd1b57438c1c67a852a1f6bdfc379cf3ec924ba2c70262"},"repository":{"kind":"string","value":"LaurentRDC/pandoc-plot"},"name":{"kind":"string","value":"Prelude.hs"},"content":{"kind":"string","value":"{-# LANGUAGE OverloadedStrings #-}\n\n-- |\n-- Module : $header$\n Copyright : ( c ) , 2019 - present\n License : GNU GPL , version 2 or above\n-- Maintainer : \n-- Stability : internal\n-- Portability : portable\n--\n-- Prelude for renderers, containing some helpful utilities.\nmodule Text.Pandoc.Filter.Plot.Renderers.Prelude\n ( module Prelude,\n module Text.Pandoc.Filter.Plot.Monad,\n Text,\n st,\n unpack,\n findExecutable,\n appendCapture,\n toRPath,\n )\nwhere\n\nimport Data.Text (Text, unpack)\nimport System.Directory (findExecutable)\nimport System.FilePath (isPathSeparator)\nimport Text.Pandoc.Filter.Plot.Monad\nimport Text.Shakespeare.Text (st)\n\n-- | A shortcut to append capture script fragments to scripts\nappendCapture ::\n (FigureSpec -> FilePath -> Script) ->\n FigureSpec ->\n FilePath ->\n Script\nappendCapture f s fp = mconcat [script s, \"\\n\", f s fp]\n\n-- | R paths use the '/' path separator\ntoRPath :: FilePath -> FilePath\ntoRPath = fmap (\\c -> if isPathSeparator c then '/' else c)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/LaurentRDC/pandoc-plot/933daba593196bf3b1ae1f2022d17389552f275c/src/Text/Pandoc/Filter/Plot/Renderers/Prelude.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE OverloadedStrings #\n |\n Module : $header$\n Maintainer : \n Stability : internal\n Portability : portable\n\n Prelude for renderers, containing some helpful utilities.\n | A shortcut to append capture script fragments to scripts\n | R paths use the '/' path separator"},"code":{"kind":"string","value":"\n Copyright : ( c ) , 2019 - present\n License : GNU GPL , version 2 or above\nmodule Text.Pandoc.Filter.Plot.Renderers.Prelude\n ( module Prelude,\n module Text.Pandoc.Filter.Plot.Monad,\n Text,\n st,\n unpack,\n findExecutable,\n appendCapture,\n toRPath,\n )\nwhere\n\nimport Data.Text (Text, unpack)\nimport System.Directory (findExecutable)\nimport System.FilePath (isPathSeparator)\nimport Text.Pandoc.Filter.Plot.Monad\nimport Text.Shakespeare.Text (st)\n\nappendCapture ::\n (FigureSpec -> FilePath -> Script) ->\n FigureSpec ->\n FilePath ->\n Script\nappendCapture f s fp = mconcat [script s, \"\\n\", f s fp]\n\ntoRPath :: FilePath -> FilePath\ntoRPath = fmap (\\c -> if isPathSeparator c then '/' else c)\n"}}},{"rowIdx":610267,"cells":{"_id":{"kind":"string","value":"eb2259778274d44093ba70d8ba8192f57d237cb82c1d2726aae5f7cd0b2b8a8f"},"repository":{"kind":"string","value":"rob7hunter/leftparen"},"name":{"kind":"string","value":"loc.scm"},"content":{"kind":"string","value":";; how much code have you written?\n#lang scheme/base\n\n(require \"util.scm\")\n\n(provide loc)\n\n;; counts all lines except for comment lines and blank lines\n(define (loc #:comment-chars (comment-chars (list #\\;)) . filenames)\n (fold + 0\n (map (lambda (filename)\n (file-line-fold\n (lambda (line-str total-loc)\n (let ((trimmed (string-trim-both line-str #\\space)))\n (cond ((string=? trimmed \"\") total-loc)\n ((memq (string-ref trimmed 0) comment-chars) total-loc)\n (else (+ 1 total-loc)))))\n 0\n filename))\n filenames)))\n\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/rob7hunter/leftparen/169c896bda989b6a049fe49253a04d6f8b62402b/loc.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":" how much code have you written?\n counts all lines except for comment lines and blank lines\n)) . filenames)"},"code":{"kind":"string","value":"#lang scheme/base\n\n(require \"util.scm\")\n\n(provide loc)\n\n (fold + 0\n (map (lambda (filename)\n (file-line-fold\n (lambda (line-str total-loc)\n (let ((trimmed (string-trim-both line-str #\\space)))\n (cond ((string=? trimmed \"\") total-loc)\n ((memq (string-ref trimmed 0) comment-chars) total-loc)\n (else (+ 1 total-loc)))))\n 0\n filename))\n filenames)))\n\n\n"}}},{"rowIdx":610268,"cells":{"_id":{"kind":"string","value":"edcbdbbce0fdac8d366b70a28a717a3431f760e7d8d96f0c396af1b9675ca8df"},"repository":{"kind":"string","value":"madmax96/brave-clojure-solutions"},"name":{"kind":"string","value":"section_8.clj"},"content":{"kind":"string","value":"(ns clojure-brave.exercises.section-8)\n\nsetup for exercise 1\n(def order-details-validation\n {:name\n [\"Please enter a name\" not-empty]\n :email\n [\"Please enter an email address\" not-empty\n \"Your email address doesn't look like an email address\"\n #(or (empty? %) (re-seq #\"@\" %))]})\n\n(def order-details-good {:name \"user\" :email \"\"})\n(def order-details-bad {:name \"user\" :email \"usermail.com\"})\n\n(defn error-messages-for\n \"Return a seq of error messages\"\n [to-validate message-validator-pairs]\n (map first (filter #(not ((second %) to-validate))\n (partition 2 message-validator-pairs))))\n(defn validate\n \"Returns a map with a vector of errors for each key\"\n [to-validate validations]\n (reduce (fn [errors validation]\n (let [[fieldname validation-check-groups] validation\n value (get to-validate fieldname)\n error-messages (error-messages-for value validation-check-groups)]\n (if (empty? error-messages)\n errors\n (assoc errors fieldname error-messages))))\n {}\n validations))\n\n;we need if-valid macro in order to implement when-valid in most straightforward way,\n;similar to how 'when' macro from `clojure.core` is implemented in terms of 'if'\n(defmacro if-valid\n \"Handle validation more concisely\"\n [to-validate validations errors-name & then-else]\n `(let [~errors-name (validate ~to-validate ~validations)]\n (if (empty? ~errors-name)\n ~@then-else)))\n\n1\n(defmacro when-valid\n [data data-validation & actions]\n `(if-valid ~data ~data-validation ~'err (do ~@actions) false))\n\n;Should execute both functions\n(when-valid order-details-good order-details-validation\n (println \"It's a success!\")\n (println :success))\n\n;Should return false\n(when-valid order-details-bad order-details-validation\n (println \"It's a success!\")\n (println :success))\n\n;Check expanded forms\n(macroexpand '(when-valid order-details order-details-validation\n (println \"It's a success!\")\n (println :success)))\n2\n(defmacro my-or\n \"macro for or logic\"\n ([] nil)\n ([x] x)\n ([form & forms]\n `(let [sym# ~form]\n (if sym# sym# (my-or ~@forms)))))\n\n(my-or nil false 2 1)\n(macroexpand '(my-or nil false 2 1))\n\n3\n(defmacro defattrs\n [& assignments]\n `(do\n ~@(map\n (fn [[retr attr]] `(def ~retr ~attr))\n (partition 2 assignments))))\n\n(defattrs c-int :intelligence wokring? :should-work)\n\n(print wokring? c-int)\n\n(macroexpand '(defattrs c-int :intelligence test :should-work))\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/madmax96/brave-clojure-solutions/3be234bdcf3704acd2aca62d1a46fa03463e5735/section_8.clj"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":"we need if-valid macro in order to implement when-valid in most straightforward way,\nsimilar to how 'when' macro from `clojure.core` is implemented in terms of 'if'\nShould execute both functions\nShould return false\nCheck expanded forms"},"code":{"kind":"string","value":"(ns clojure-brave.exercises.section-8)\n\nsetup for exercise 1\n(def order-details-validation\n {:name\n [\"Please enter a name\" not-empty]\n :email\n [\"Please enter an email address\" not-empty\n \"Your email address doesn't look like an email address\"\n #(or (empty? %) (re-seq #\"@\" %))]})\n\n(def order-details-good {:name \"user\" :email \"\"})\n(def order-details-bad {:name \"user\" :email \"usermail.com\"})\n\n(defn error-messages-for\n \"Return a seq of error messages\"\n [to-validate message-validator-pairs]\n (map first (filter #(not ((second %) to-validate))\n (partition 2 message-validator-pairs))))\n(defn validate\n \"Returns a map with a vector of errors for each key\"\n [to-validate validations]\n (reduce (fn [errors validation]\n (let [[fieldname validation-check-groups] validation\n value (get to-validate fieldname)\n error-messages (error-messages-for value validation-check-groups)]\n (if (empty? error-messages)\n errors\n (assoc errors fieldname error-messages))))\n {}\n validations))\n\n(defmacro if-valid\n \"Handle validation more concisely\"\n [to-validate validations errors-name & then-else]\n `(let [~errors-name (validate ~to-validate ~validations)]\n (if (empty? ~errors-name)\n ~@then-else)))\n\n1\n(defmacro when-valid\n [data data-validation & actions]\n `(if-valid ~data ~data-validation ~'err (do ~@actions) false))\n\n(when-valid order-details-good order-details-validation\n (println \"It's a success!\")\n (println :success))\n\n(when-valid order-details-bad order-details-validation\n (println \"It's a success!\")\n (println :success))\n\n(macroexpand '(when-valid order-details order-details-validation\n (println \"It's a success!\")\n (println :success)))\n2\n(defmacro my-or\n \"macro for or logic\"\n ([] nil)\n ([x] x)\n ([form & forms]\n `(let [sym# ~form]\n (if sym# sym# (my-or ~@forms)))))\n\n(my-or nil false 2 1)\n(macroexpand '(my-or nil false 2 1))\n\n3\n(defmacro defattrs\n [& assignments]\n `(do\n ~@(map\n (fn [[retr attr]] `(def ~retr ~attr))\n (partition 2 assignments))))\n\n(defattrs c-int :intelligence wokring? :should-work)\n\n(print wokring? c-int)\n\n(macroexpand '(defattrs c-int :intelligence test :should-work))\n\n"}}},{"rowIdx":610269,"cells":{"_id":{"kind":"string","value":"335cd37f510fea2c873504bdbf2484f4eec983570939507828c7671f0c5df325"},"repository":{"kind":"string","value":"geocaml/ocaml-geojson"},"name":{"kind":"string","value":"geojsone.mli"},"content":{"kind":"string","value":" Copyright ( c ) 2021 - 2022 < > \n\n Permission to use , copy , modify , and/or distribute this software for any \n purpose with or without fee is hereby granted , provided that the above \n copyright notice and this permission notice appear in all copies . \n\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR \n , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , \n FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL \n THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER \n LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING \n FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n DEALINGS IN THE SOFTWARE . \n\n\n Permission to use, copy, modify, and/or distribute this software for any\n purpose with or without fee is hereby granted, provided that the above\n copyright notice and this permission notice appear in all copies.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n DEALINGS IN THE SOFTWARE.\n*)\n\n* A library for manipulating large documents without reading the whole \n document into memory using the { ! streaming , JSON parser .\n document into memory using the {!Jsonm} streaming, JSON parser. *)\n\nmodule Err : sig\n type location = (int * int) * (int * int)\n type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ]\n\n val pp : Format.formatter -> t -> unit\nend\n\nmodule G : Geojson.S with type json = Ezjsone.value\n\n* { 2 Maps } \n\n Maps are functions that allow you to manipulate common structure in GeoJson \n objects . These will be written directly back to the destination that you \n provide .\n\n Maps are functions that allow you to manipulate common structure in GeoJson\n objects. These will be written directly back to the destination that you\n provide. *)\n\nval map_geometry :\n (G.Geometry.t -> G.Geometry.t) ->\n Jsone.src ->\n Jsone.dst ->\n (unit, Err.t) result\n* [ map_geometry f src dst ] will apply [ f ] to all objects . This is \n essentially any \n { { : #section-3.1 } geometry \n object } . \n\n The map will recurse into geometry collections . Note for the moment if you \n have a single geometry object as your document , this will not work .\n essentially any\n {{:#section-3.1} geometry\n object}.\n\n The map will recurse into geometry collections. Note for the moment if you\n have a single geometry object as your document, this will not work. *)\n\nval map_props :\n (Ezjsone.value -> Ezjsone.value) ->\n Jsone.src ->\n Jsone.dst ->\n (unit, Err.t) result\n* [ map_props ~f ] will apply [ f ] to each feature 's properties field . \n The properties field is decoded into an { ! Ezjsone.value } for convenience .\n The properties field is decoded into an {!Ezjsone.value} for convenience. *)\n\n* { 2 Folds } \n\n Folds are like maps except you can collect items into an accumulator which \n is returned to you . \n\n For example , you might want to collect all of the [ names ] in the \n [ properties ] of features . \n\n { [ \n let get_string_exn = function ` String s - > s | _ - > failwith \" err \" \n\n let = function \n | ` O assoc - > List.assoc \" name \" assoc | > get_string_exn \n | _ - > failwith \" err \" \n\n let places src = \n ( fun acc p - > p : : acc ) [ ] src \n ] }\n\n Folds are like maps except you can collect items into an accumulator which\n is returned to you.\n\n For example, you might want to collect all of the [names] in the\n [properties] of features.\n\n {[\n let get_string_exn = function `String s -> s | _ -> failwith \"err\"\n\n let get_name = function\n | `O assoc -> List.assoc \"name\" assoc |> get_string_exn\n | _ -> failwith \"err\"\n\n let places src =\n Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src\n ]} *)\n\nval fold_geometry :\n ('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result\n(** [fold_geometry f acc src] is much like {!map_geometry} but allows you to\n accumulate some result that is then returned to you. *)\n\nval fold_props :\n ('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result\n(** [fold_props f init src] *)\n\n* { 2 Iterators } \n\n Iterators are similar to map functions except they take a function [ f ] that \n takes a single element from the data - structure as an argument and returns \n [ unit ] . In that sense , they tend to be functions with side - effects , such as \n [ print_endline ] . \n\n For example , we might want to print the JSON value of every geometry object \n in a GeoJSON object . \n\n { [ \n let print_geometry g = \n ( . ) \n\n let values src = Geojsonm.iter_geometry print_geometry src \n ] }\n\n Iterators are similar to map functions except they take a function [f] that\n takes a single element from the data-structure as an argument and returns\n [unit]. In that sense, they tend to be functions with side-effects, such as\n [print_endline].\n\n For example, we might want to print the JSON value of every geometry object\n in a GeoJSON object.\n\n {[\n let print_geometry g =\n print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g)\n\n let values src = Geojsonm.iter_geometry print_geometry src\n ]} *)\n\nval iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result\n* [ iter_geometry f src ] will apply [ f ] to all objects .\n\nval iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result\n(** [iter_props f src] will apply [f] to each feature's properties field. *)\n\n* { 2 Effect - based , non - blocking libraries } \n\n These libraries use effects to perform non - blocking parsing . They are \n currently a part of Geojsone and exposed for other libraries to use .\n\n These libraries use effects to perform non-blocking parsing. They are\n currently a part of Geojsone and exposed for other libraries to use. *)\n\nmodule Ezjsone = Ezjsone\nmodule Jsone = Jsone\nmodule Uutfe = Uutfe\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/geocaml/ocaml-geojson/1342f4627caa813cd153d5724f73c2fb8f0eac31/src/geojsone/geojsone.mli"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"* [fold_geometry f acc src] is much like {!map_geometry} but allows you to\n accumulate some result that is then returned to you. \n* [fold_props f init src] \n* [iter_props f src] will apply [f] to each feature's properties field. "},"code":{"kind":"string","value":" Copyright ( c ) 2021 - 2022 < > \n\n Permission to use , copy , modify , and/or distribute this software for any \n purpose with or without fee is hereby granted , provided that the above \n copyright notice and this permission notice appear in all copies . \n\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR \n , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , \n FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL \n THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER \n LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING \n FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n DEALINGS IN THE SOFTWARE . \n\n\n Permission to use, copy, modify, and/or distribute this software for any\n purpose with or without fee is hereby granted, provided that the above\n copyright notice and this permission notice appear in all copies.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n DEALINGS IN THE SOFTWARE.\n*)\n\n* A library for manipulating large documents without reading the whole \n document into memory using the { ! streaming , JSON parser .\n document into memory using the {!Jsonm} streaming, JSON parser. *)\n\nmodule Err : sig\n type location = (int * int) * (int * int)\n type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ]\n\n val pp : Format.formatter -> t -> unit\nend\n\nmodule G : Geojson.S with type json = Ezjsone.value\n\n* { 2 Maps } \n\n Maps are functions that allow you to manipulate common structure in GeoJson \n objects . These will be written directly back to the destination that you \n provide .\n\n Maps are functions that allow you to manipulate common structure in GeoJson\n objects. These will be written directly back to the destination that you\n provide. *)\n\nval map_geometry :\n (G.Geometry.t -> G.Geometry.t) ->\n Jsone.src ->\n Jsone.dst ->\n (unit, Err.t) result\n* [ map_geometry f src dst ] will apply [ f ] to all objects . This is \n essentially any \n { { : #section-3.1 } geometry \n object } . \n\n The map will recurse into geometry collections . Note for the moment if you \n have a single geometry object as your document , this will not work .\n essentially any\n {{:#section-3.1} geometry\n object}.\n\n The map will recurse into geometry collections. Note for the moment if you\n have a single geometry object as your document, this will not work. *)\n\nval map_props :\n (Ezjsone.value -> Ezjsone.value) ->\n Jsone.src ->\n Jsone.dst ->\n (unit, Err.t) result\n* [ map_props ~f ] will apply [ f ] to each feature 's properties field . \n The properties field is decoded into an { ! Ezjsone.value } for convenience .\n The properties field is decoded into an {!Ezjsone.value} for convenience. *)\n\n* { 2 Folds } \n\n Folds are like maps except you can collect items into an accumulator which \n is returned to you . \n\n For example , you might want to collect all of the [ names ] in the \n [ properties ] of features . \n\n { [ \n let get_string_exn = function ` String s - > s | _ - > failwith \" err \" \n\n let = function \n | ` O assoc - > List.assoc \" name \" assoc | > get_string_exn \n | _ - > failwith \" err \" \n\n let places src = \n ( fun acc p - > p : : acc ) [ ] src \n ] }\n\n Folds are like maps except you can collect items into an accumulator which\n is returned to you.\n\n For example, you might want to collect all of the [names] in the\n [properties] of features.\n\n {[\n let get_string_exn = function `String s -> s | _ -> failwith \"err\"\n\n let get_name = function\n | `O assoc -> List.assoc \"name\" assoc |> get_string_exn\n | _ -> failwith \"err\"\n\n let places src =\n Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src\n ]} *)\n\nval fold_geometry :\n ('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result\n\nval fold_props :\n ('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result\n\n* { 2 Iterators } \n\n Iterators are similar to map functions except they take a function [ f ] that \n takes a single element from the data - structure as an argument and returns \n [ unit ] . In that sense , they tend to be functions with side - effects , such as \n [ print_endline ] . \n\n For example , we might want to print the JSON value of every geometry object \n in a GeoJSON object . \n\n { [ \n let print_geometry g = \n ( . ) \n\n let values src = Geojsonm.iter_geometry print_geometry src \n ] }\n\n Iterators are similar to map functions except they take a function [f] that\n takes a single element from the data-structure as an argument and returns\n [unit]. In that sense, they tend to be functions with side-effects, such as\n [print_endline].\n\n For example, we might want to print the JSON value of every geometry object\n in a GeoJSON object.\n\n {[\n let print_geometry g =\n print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g)\n\n let values src = Geojsonm.iter_geometry print_geometry src\n ]} *)\n\nval iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result\n* [ iter_geometry f src ] will apply [ f ] to all objects .\n\nval iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result\n\n* { 2 Effect - based , non - blocking libraries } \n\n These libraries use effects to perform non - blocking parsing . They are \n currently a part of Geojsone and exposed for other libraries to use .\n\n These libraries use effects to perform non-blocking parsing. They are\n currently a part of Geojsone and exposed for other libraries to use. *)\n\nmodule Ezjsone = Ezjsone\nmodule Jsone = Jsone\nmodule Uutfe = Uutfe\n"}}},{"rowIdx":610270,"cells":{"_id":{"kind":"string","value":"bda02469c7de385dfb454449b2e2466c5cdbe512c7de07d562364fffe6bd5f45"},"repository":{"kind":"string","value":"acieroid/scala-am"},"name":{"kind":"string","value":"nqueens.scm"},"content":{"kind":"string","value":"(define (one-to n)\n (letrec ((loop (lambda (i l)\n (if (= i 0)\n l\n (loop (- i 1) (cons i l))))))\n (loop n '())))\n\n(define (ok? row dist placed)\n (if (null? placed)\n #t\n (and (not (= (car placed) (+ row dist)))\n (not (= (car placed) (- row dist)))\n (ok? row (+ dist 1) (cdr placed)))))\n\n(define (try-it x y z)\n (if (null? x)\n (if (null? y)\n 1\n 0)\n (+ (if (ok? (car x) 1 z)\n (try-it (append (cdr x) y) '() (cons (car x) z))\n 0)\n (try-it (cdr x) (cons (car x) y) z))))\n\n(define (nqueens n)\n (try-it (one-to n) '() '()))\n\n(nqueens 8)\n#t\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/gambit/nqueens.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(define (one-to n)\n (letrec ((loop (lambda (i l)\n (if (= i 0)\n l\n (loop (- i 1) (cons i l))))))\n (loop n '())))\n\n(define (ok? row dist placed)\n (if (null? placed)\n #t\n (and (not (= (car placed) (+ row dist)))\n (not (= (car placed) (- row dist)))\n (ok? row (+ dist 1) (cdr placed)))))\n\n(define (try-it x y z)\n (if (null? x)\n (if (null? y)\n 1\n 0)\n (+ (if (ok? (car x) 1 z)\n (try-it (append (cdr x) y) '() (cons (car x) z))\n 0)\n (try-it (cdr x) (cons (car x) y) z))))\n\n(define (nqueens n)\n (try-it (one-to n) '() '()))\n\n(nqueens 8)\n#t\n"}}},{"rowIdx":610271,"cells":{"_id":{"kind":"string","value":"a9f730699351ac9802f59db29444d24785563bff66d67a9a0eec9662899ea1a8"},"repository":{"kind":"string","value":"haskell/vector"},"name":{"kind":"string","value":"take.hs"},"content":{"kind":"string","value":"import qualified Data.Vector as U\nimport Data.Bits\nmain = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int)\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/take.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"import qualified Data.Vector as U\nimport Data.Bits\nmain = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int)\n\n"}}},{"rowIdx":610272,"cells":{"_id":{"kind":"string","value":"d552abab566a0a3f50e8d1787878e8def49d571c4c6be967ef64bece40a65253"},"repository":{"kind":"string","value":"cloudant/monic"},"name":{"kind":"string","value":"monic_utils.erl"},"content":{"kind":"string","value":" Copyright 2011 Cloudant\n%%\n Licensed under the Apache License , Version 2.0 ( the \" License \" ) ; you may not\n%% use this file except in compliance with the License. You may obtain a copy of\n%% the License at\n%%\n%% -2.0\n%%\n%% Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \" AS IS \" BASIS , WITHOUT\n%% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n%% License for the specific language governing permissions and limitations under\n%% the License.\n\n-module(monic_utils).\n-export([path/2, exists/2, open/2]).\n-export([write_term/2, pread_term/2]).\n\n-include(\"monic.hrl\").\n\n-define(MAX_TERM, (1 bsl 16)).\n\npath(ReqData, Context) ->\n Root = proplists:get_value(root, Context, \"tmp\"),\n File = wrq:path_info(file, ReqData),\n filename:join(Root, File).\n\nopen(ReqData, Context) ->\n case monic_file:open(path(ReqData, Context)) of\n {ok, Pid} ->\n monic_file_lru:update(Pid),\n {ok, Pid};\n Else ->\n Else\n end.\n\nexists(ReqData, Context) ->\n filelib:is_file(path(ReqData, Context)).\n\n-spec write_term(term(), term()) -> {ok, integer()} | {error, term()}.\nwrite_term(Fd, Term) ->\n Bin = term_to_binary(Term),\n Size = iolist_size(Bin),\n case Size =< ?MAX_TERM of\n true ->\n case file:write(Fd, <>) of\n ok ->\n {ok, Size + 2};\n Else ->\n Else\n end;\n false ->\n {error, term_too_long}\n end.\n\n-spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}.\npread_term(Fd, Location) ->\n case file:pread(Fd, Location, 2) of\n {ok, <>} ->\n case file:pread(Fd, Location + 2, Size) of\n {ok, <>} ->\n {ok, Size + 2, binary_to_term(Bin)};\n {ok, _} ->\n eof;\n Else ->\n Else\n end;\n Else ->\n Else\n end.\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/cloudant/monic/9b7670d53ee40efea57c777f044b3de74c66e6de/src/monic_utils.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":"\n use this file except in compliance with the License. You may obtain a copy of\n the License at\n\n -2.0\n\n Unless required by applicable law or agreed to in writing, software\n WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n License for the specific language governing permissions and limitations under\n the License."},"code":{"kind":"string","value":" Copyright 2011 Cloudant\n Licensed under the Apache License , Version 2.0 ( the \" License \" ) ; you may not\n distributed under the License is distributed on an \" AS IS \" BASIS , WITHOUT\n\n-module(monic_utils).\n-export([path/2, exists/2, open/2]).\n-export([write_term/2, pread_term/2]).\n\n-include(\"monic.hrl\").\n\n-define(MAX_TERM, (1 bsl 16)).\n\npath(ReqData, Context) ->\n Root = proplists:get_value(root, Context, \"tmp\"),\n File = wrq:path_info(file, ReqData),\n filename:join(Root, File).\n\nopen(ReqData, Context) ->\n case monic_file:open(path(ReqData, Context)) of\n {ok, Pid} ->\n monic_file_lru:update(Pid),\n {ok, Pid};\n Else ->\n Else\n end.\n\nexists(ReqData, Context) ->\n filelib:is_file(path(ReqData, Context)).\n\n-spec write_term(term(), term()) -> {ok, integer()} | {error, term()}.\nwrite_term(Fd, Term) ->\n Bin = term_to_binary(Term),\n Size = iolist_size(Bin),\n case Size =< ?MAX_TERM of\n true ->\n case file:write(Fd, <>) of\n ok ->\n {ok, Size + 2};\n Else ->\n Else\n end;\n false ->\n {error, term_too_long}\n end.\n\n-spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}.\npread_term(Fd, Location) ->\n case file:pread(Fd, Location, 2) of\n {ok, <>} ->\n case file:pread(Fd, Location + 2, Size) of\n {ok, <>} ->\n {ok, Size + 2, binary_to_term(Bin)};\n {ok, _} ->\n eof;\n Else ->\n Else\n end;\n Else ->\n Else\n end.\n"}}},{"rowIdx":610273,"cells":{"_id":{"kind":"string","value":"9f21b932842b907468e2f9d2612fce45e89c5d990b310af4d7bd5e7d4e7e28d9"},"repository":{"kind":"string","value":"incoherentsoftware/defect-process"},"name":{"kind":"string","value":"BubbleProjectile.hs"},"content":{"kind":"string","value":"module Enemy.All.BubbleTurret.BubbleProjectile\n ( bubbleSpinPath\n , bubbleExplodePath\n , mkBubbleProjectile\n ) where\n\nimport Control.Monad.IO.Class (MonadIO)\nimport qualified Data.Set as S\n\nimport Attack\nimport Attack.Projectile\nimport Collision\nimport Configs.All.Enemy\nimport Configs.All.Enemy.BubbleTurret\nimport Constants\nimport Enemy.All.BubbleTurret.Data\nimport FileCache\nimport Id\nimport Msg\nimport Projectile as P\nimport Util\nimport Window.Graphics\nimport World.ZIndex\n\nbubbleSpinPath =\n PackResourceFilePath \"data/enemies/bubble-turret-enemy.pack\" \"bubble-spin.spr\" :: PackResourceFilePath\nbubbleExplodePath =\n PackResourceFilePath \"data/enemies/bubble-turret-enemy.pack\" \"bubble-explode.atk\" :: PackResourceFilePath\n\nregisteredCollisions = S.fromList\n [ ProjRegisteredPlayerCollision\n ] :: S.Set ProjectileRegisteredCollision\n\ndata BubbleProjVelBehavior\n = InitialRiseVel Secs\n | RiseFallVel Secs\n\ndata BubbleProjData = BubbleProjData\n { _velBehavior :: BubbleProjVelBehavior\n , _pos :: Pos2\n , _dir :: Direction\n , _sprite :: Sprite\n , _explodeAtkDesc :: AttackDescription\n , _config :: BubbleTurretEnemyConfig\n }\n\nmkBubbleProjData\n :: (FileCache m, GraphicsRead m, MonadIO m)\n => Pos2\n -> Direction\n -> BubbleTurretEnemyData\n -> m BubbleProjData\nmkBubbleProjData pos dir bubbleProjData = do\n spr <- loadPackSprite bubbleSpinPath\n explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath\n let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData)\n\n return $ BubbleProjData\n { _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg\n , _pos = pos\n , _dir = dir\n , _sprite = spr\n , _explodeAtkDesc = explodeAtkDesc\n , _config = cfg\n }\n\nbubbleProjHitbox :: ProjectileHitbox BubbleProjData\nbubbleProjHitbox bubbleProj = rectHitbox pos width height\n where\n bubbleProjData = _data bubbleProj\n Pos2 x y = _pos (bubbleProjData :: BubbleProjData)\n cfg = _config (bubbleProjData :: BubbleProjData)\n width = _bubbleProjWidth cfg\n height = _bubbleProjHeight cfg\n pos = Pos2 (x - width / 2.0) (y - height / 2.0)\n\nmkBubbleProjectile\n :: (FileCache m, GraphicsRead m, MonadIO m)\n => Pos2\n -> Direction\n -> BubbleTurretEnemyData\n -> m (Some Projectile)\nmkBubbleProjectile pos dir bubbleTurretData = do\n bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData\n msgId <- newId\n\n let\n dummyHbx = dummyHitbox pos\n ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData)\n\n return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl)\n { _hitbox = bubbleProjHitbox\n , _registeredCollisions = registeredCollisions\n , _think = thinkBubbleProj\n , _update = updateBubbleProj\n , _draw = drawBubbleProj\n , _processCollisions = processBubbleProjCollisions\n }\n\nbubbleProjExplodeRemoveMsgs\n :: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload)\n => Projectile BubbleProjData\n -> [Msg p]\nbubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg]\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n dir = _dir (bubbleProjData :: BubbleProjData)\n explodeAtkDesc = _explodeAtkDesc bubbleProjData\n mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc\n mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj\n\n bubbleProjId = P._msgId bubbleProj\n removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId\n\nthinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m\nthinkBubbleProj bubbleProj = return $ if\n | willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj\n\n | otherwise ->\n let\n bubbleProjData = _data bubbleProj\n cfg = _config (bubbleProjData :: BubbleProjData)\n speedX = _bubbleProjSpeedX cfg\n speedY = _bubbleProjSpeedY cfg\n riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg\n dir = _dir (bubbleProjData :: BubbleProjData)\n velX = speedX * directionNeg dir\n velY = vecY $ P._vel bubbleProj\n (velBehavior, velY') = case _velBehavior bubbleProjData of\n InitialRiseVel velTtl\n | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY)\n | otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY)\n RiseFallVel velTtl\n | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY)\n | otherwise -> (RiseFallVel (velTtl - timeStep), velY)\n\n update = \\p -> p\n { _data = (P._data p) {_velBehavior = velBehavior}\n , _vel = Vel2 velX velY'\n }\n in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)]\n\n where willDisappear = P._ttl bubbleProj - timeStep <= 0.0\n\nupdateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m\nupdateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'}\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n vel = P._vel bubbleProj\n pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep)\n spr = _sprite (bubbleProjData :: BubbleProjData)\n bubbleProjData' = bubbleProjData\n { _pos = pos'\n , _sprite = updateSprite spr\n } :: BubbleProjData\n\ndrawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m\ndrawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n dir = _dir (bubbleProjData :: BubbleProjData)\n spr = _sprite (bubbleProjData :: BubbleProjData)\n\nprocessBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData\nprocessBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions\n where\n processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]\n processCollision collision !msgs = case collision of\n ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs\n _ -> msgs\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/BubbleTurret/BubbleProjectile.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"module Enemy.All.BubbleTurret.BubbleProjectile\n ( bubbleSpinPath\n , bubbleExplodePath\n , mkBubbleProjectile\n ) where\n\nimport Control.Monad.IO.Class (MonadIO)\nimport qualified Data.Set as S\n\nimport Attack\nimport Attack.Projectile\nimport Collision\nimport Configs.All.Enemy\nimport Configs.All.Enemy.BubbleTurret\nimport Constants\nimport Enemy.All.BubbleTurret.Data\nimport FileCache\nimport Id\nimport Msg\nimport Projectile as P\nimport Util\nimport Window.Graphics\nimport World.ZIndex\n\nbubbleSpinPath =\n PackResourceFilePath \"data/enemies/bubble-turret-enemy.pack\" \"bubble-spin.spr\" :: PackResourceFilePath\nbubbleExplodePath =\n PackResourceFilePath \"data/enemies/bubble-turret-enemy.pack\" \"bubble-explode.atk\" :: PackResourceFilePath\n\nregisteredCollisions = S.fromList\n [ ProjRegisteredPlayerCollision\n ] :: S.Set ProjectileRegisteredCollision\n\ndata BubbleProjVelBehavior\n = InitialRiseVel Secs\n | RiseFallVel Secs\n\ndata BubbleProjData = BubbleProjData\n { _velBehavior :: BubbleProjVelBehavior\n , _pos :: Pos2\n , _dir :: Direction\n , _sprite :: Sprite\n , _explodeAtkDesc :: AttackDescription\n , _config :: BubbleTurretEnemyConfig\n }\n\nmkBubbleProjData\n :: (FileCache m, GraphicsRead m, MonadIO m)\n => Pos2\n -> Direction\n -> BubbleTurretEnemyData\n -> m BubbleProjData\nmkBubbleProjData pos dir bubbleProjData = do\n spr <- loadPackSprite bubbleSpinPath\n explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath\n let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData)\n\n return $ BubbleProjData\n { _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg\n , _pos = pos\n , _dir = dir\n , _sprite = spr\n , _explodeAtkDesc = explodeAtkDesc\n , _config = cfg\n }\n\nbubbleProjHitbox :: ProjectileHitbox BubbleProjData\nbubbleProjHitbox bubbleProj = rectHitbox pos width height\n where\n bubbleProjData = _data bubbleProj\n Pos2 x y = _pos (bubbleProjData :: BubbleProjData)\n cfg = _config (bubbleProjData :: BubbleProjData)\n width = _bubbleProjWidth cfg\n height = _bubbleProjHeight cfg\n pos = Pos2 (x - width / 2.0) (y - height / 2.0)\n\nmkBubbleProjectile\n :: (FileCache m, GraphicsRead m, MonadIO m)\n => Pos2\n -> Direction\n -> BubbleTurretEnemyData\n -> m (Some Projectile)\nmkBubbleProjectile pos dir bubbleTurretData = do\n bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData\n msgId <- newId\n\n let\n dummyHbx = dummyHitbox pos\n ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData)\n\n return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl)\n { _hitbox = bubbleProjHitbox\n , _registeredCollisions = registeredCollisions\n , _think = thinkBubbleProj\n , _update = updateBubbleProj\n , _draw = drawBubbleProj\n , _processCollisions = processBubbleProjCollisions\n }\n\nbubbleProjExplodeRemoveMsgs\n :: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload)\n => Projectile BubbleProjData\n -> [Msg p]\nbubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg]\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n dir = _dir (bubbleProjData :: BubbleProjData)\n explodeAtkDesc = _explodeAtkDesc bubbleProjData\n mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc\n mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj\n\n bubbleProjId = P._msgId bubbleProj\n removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId\n\nthinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m\nthinkBubbleProj bubbleProj = return $ if\n | willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj\n\n | otherwise ->\n let\n bubbleProjData = _data bubbleProj\n cfg = _config (bubbleProjData :: BubbleProjData)\n speedX = _bubbleProjSpeedX cfg\n speedY = _bubbleProjSpeedY cfg\n riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg\n dir = _dir (bubbleProjData :: BubbleProjData)\n velX = speedX * directionNeg dir\n velY = vecY $ P._vel bubbleProj\n (velBehavior, velY') = case _velBehavior bubbleProjData of\n InitialRiseVel velTtl\n | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY)\n | otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY)\n RiseFallVel velTtl\n | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY)\n | otherwise -> (RiseFallVel (velTtl - timeStep), velY)\n\n update = \\p -> p\n { _data = (P._data p) {_velBehavior = velBehavior}\n , _vel = Vel2 velX velY'\n }\n in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)]\n\n where willDisappear = P._ttl bubbleProj - timeStep <= 0.0\n\nupdateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m\nupdateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'}\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n vel = P._vel bubbleProj\n pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep)\n spr = _sprite (bubbleProjData :: BubbleProjData)\n bubbleProjData' = bubbleProjData\n { _pos = pos'\n , _sprite = updateSprite spr\n } :: BubbleProjData\n\ndrawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m\ndrawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr\n where\n bubbleProjData = _data bubbleProj\n pos = _pos (bubbleProjData :: BubbleProjData)\n dir = _dir (bubbleProjData :: BubbleProjData)\n spr = _sprite (bubbleProjData :: BubbleProjData)\n\nprocessBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData\nprocessBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions\n where\n processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase]\n processCollision collision !msgs = case collision of\n ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs\n _ -> msgs\n"}}},{"rowIdx":610274,"cells":{"_id":{"kind":"string","value":"be9a2e7955dbf0f7e98f37c1d86c47f5e50fea70b0bd46bda1e799f973b8fbb6"},"repository":{"kind":"string","value":"racket/racket7"},"name":{"kind":"string","value":"main.rkt"},"content":{"kind":"string","value":"#lang racket/base\n(printf \"pkg-b first main\\n\")\n(exit 42)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/pkgs/racket-test/tests/pkg/test-pkgs/pkg-b-first/pkg-b/main.rkt"},"language":{"kind":"string","value":"racket"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"#lang racket/base\n(printf \"pkg-b first main\\n\")\n(exit 42)\n"}}},{"rowIdx":610275,"cells":{"_id":{"kind":"string","value":"234b89d41f4977c73e7ed100c381fa1e961cfa8a62cfae3adc0d7444d9778256"},"repository":{"kind":"string","value":"clojerl/clojerl"},"name":{"kind":"string","value":"clojerl_Atom_SUITE.erl"},"content":{"kind":"string","value":"-module(clojerl_Atom_SUITE).\n\n-include(\"clojerl.hrl\").\n-include(\"clj_test_utils.hrl\").\n\n-export([ all/0\n , init_per_suite/1\n , end_per_suite/1\n ]).\n\n-export([ deref/1\n , swap/1\n , reset/1\n , compare_and_set/1\n , equiv/1\n , meta/1\n , str/1\n , complete_coverage/1\n ]).\n\n-spec all() -> [atom()].\nall() -> clj_test_utils:all(?MODULE).\n\n-spec init_per_suite(config()) -> config().\ninit_per_suite(Config) -> clj_test_utils:init_per_suite(Config).\n\n-spec end_per_suite(config()) -> config().\nend_per_suite(Config) -> Config.\n\n%%------------------------------------------------------------------------------\n%% Test Cases\n%%------------------------------------------------------------------------------\n\n-spec deref(config()) -> result().\nderef(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n ct:comment(\"deref an atom\"),\n 1 = clj_rt:deref(Atom),\n\n 2 = 'clojerl.Atom':reset(Atom, 2),\n 2 = clj_rt:deref(Atom),\n\n {comments, \"\"}.\n\n-spec swap(config()) -> result().\nswap(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n ct:comment(\"Successful swaps\"),\n 3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end),\n 4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1),\n 6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1),\n 9 = 'clojerl.Atom':swap( Atom\n , fun(X, Y, Z, W) -> X + Y + Z + W end\n , 1\n , 1\n , [1]\n ),\n\n ct:comment(\"Concurrent swaps\"),\n Inc = fun(X) -> X + 1 end,\n Self = self(),\n ResetFun = fun(_) ->\n spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end)\n end,\n N = 100,\n Result = N + 9,\n lists:foreach(ResetFun, lists:seq(1, N)),\n ok = clj_test_utils:wait_for(ok, N, 1000),\n Result = 'clojerl.Atom':deref(Atom),\n\n {comments, \"\"}.\n\n-spec reset(config()) -> result().\nreset(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n ct:comment(\"Successful resets\"),\n 2 = 'clojerl.Atom':reset(Atom, 2),\n foo = 'clojerl.Atom':reset(Atom, foo),\n bar = 'clojerl.Atom':reset(Atom, bar),\n <<\"baz\">> = 'clojerl.Atom':reset(Atom, <<\"baz\">>),\n\n ct:comment(\"Concurrent resets\"),\n Self = self(),\n ResetFun = fun(N) ->\n spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end)\n end,\n N = 100,\n lists:foreach(ResetFun, lists:seq(1, N)),\n ok = clj_test_utils:wait_for(ok, N, 1000),\n\n {comments, \"\"}.\n\n-spec compare_and_set(config()) -> result().\ncompare_and_set(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n true = 'clojerl.Atom':compare_and_set(Atom, 2, 3),\n false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3),\n\n {comments, \"\"}.\n\n-spec equiv(config()) -> result().\nequiv(_Config) ->\n Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1),\n Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n ct:comment(\"Check that the same atom with different meta is equivalent\"),\n Atom3 = clj_rt:with_meta(Atom1, #{a => 1}),\n Atom4 = clj_rt:with_meta(Atom1, #{b => 2}),\n true = clj_rt:equiv(Atom3, Atom4),\n\n ct:comment(\"Check that different atoms are not equivalent\"),\n false = clj_rt:equiv(Atom1, Atom2),\n\n ct:comment(\"An atom and something else\"),\n false = clj_rt:equiv(Atom1, whatever),\n false = clj_rt:equiv(Atom1, #{}),\n false = clj_rt:equiv(Atom1, 1),\n\n {comments, \"\"}.\n\n-spec meta(config()) -> result().\nmeta(_Config) ->\n Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),\n #{a := 1} = clj_rt:meta(Atom1),\n\n {comments, \"\"}.\n\n-spec str(config()) -> result().\nstr(_Config) ->\n Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),\n Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),\n\n <<\"#> = clj_rt:str(Atom1),\n\n {comments, \"\"}.\n\n-spec complete_coverage(config()) -> result().\ncomplete_coverage(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n Hash = 'clojerl.IHash':hash(Atom),\n Hash = 'clojerl.IHash':hash(Atom),\n true = erlang:is_integer(Hash),\n\n {noreply, state} = 'clojerl.Atom':handle_cast(msg, state),\n {noreply, state} = 'clojerl.Atom':handle_info(msg, state),\n {ok, state} = 'clojerl.Atom':terminate(msg, state),\n {ok, state} = 'clojerl.Atom':code_change(msg, from, state),\n\n {comments, \"\"}.\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/clojerl/clojerl/aa35847ca64e1c66224867ca4c31ca6de95bc898/test/clojerl_Atom_SUITE.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":"------------------------------------------------------------------------------\n Test Cases\n------------------------------------------------------------------------------"},"code":{"kind":"string","value":"-module(clojerl_Atom_SUITE).\n\n-include(\"clojerl.hrl\").\n-include(\"clj_test_utils.hrl\").\n\n-export([ all/0\n , init_per_suite/1\n , end_per_suite/1\n ]).\n\n-export([ deref/1\n , swap/1\n , reset/1\n , compare_and_set/1\n , equiv/1\n , meta/1\n , str/1\n , complete_coverage/1\n ]).\n\n-spec all() -> [atom()].\nall() -> clj_test_utils:all(?MODULE).\n\n-spec init_per_suite(config()) -> config().\ninit_per_suite(Config) -> clj_test_utils:init_per_suite(Config).\n\n-spec end_per_suite(config()) -> config().\nend_per_suite(Config) -> Config.\n\n\n-spec deref(config()) -> result().\nderef(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n ct:comment(\"deref an atom\"),\n 1 = clj_rt:deref(Atom),\n\n 2 = 'clojerl.Atom':reset(Atom, 2),\n 2 = clj_rt:deref(Atom),\n\n {comments, \"\"}.\n\n-spec swap(config()) -> result().\nswap(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n ct:comment(\"Successful swaps\"),\n 3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end),\n 4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1),\n 6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1),\n 9 = 'clojerl.Atom':swap( Atom\n , fun(X, Y, Z, W) -> X + Y + Z + W end\n , 1\n , 1\n , [1]\n ),\n\n ct:comment(\"Concurrent swaps\"),\n Inc = fun(X) -> X + 1 end,\n Self = self(),\n ResetFun = fun(_) ->\n spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end)\n end,\n N = 100,\n Result = N + 9,\n lists:foreach(ResetFun, lists:seq(1, N)),\n ok = clj_test_utils:wait_for(ok, N, 1000),\n Result = 'clojerl.Atom':deref(Atom),\n\n {comments, \"\"}.\n\n-spec reset(config()) -> result().\nreset(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n ct:comment(\"Successful resets\"),\n 2 = 'clojerl.Atom':reset(Atom, 2),\n foo = 'clojerl.Atom':reset(Atom, foo),\n bar = 'clojerl.Atom':reset(Atom, bar),\n <<\"baz\">> = 'clojerl.Atom':reset(Atom, <<\"baz\">>),\n\n ct:comment(\"Concurrent resets\"),\n Self = self(),\n ResetFun = fun(N) ->\n spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end)\n end,\n N = 100,\n lists:foreach(ResetFun, lists:seq(1, N)),\n ok = clj_test_utils:wait_for(ok, N, 1000),\n\n {comments, \"\"}.\n\n-spec compare_and_set(config()) -> result().\ncompare_and_set(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n true = 'clojerl.Atom':compare_and_set(Atom, 2, 3),\n false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3),\n\n {comments, \"\"}.\n\n-spec equiv(config()) -> result().\nequiv(_Config) ->\n Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1),\n Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2),\n\n ct:comment(\"Check that the same atom with different meta is equivalent\"),\n Atom3 = clj_rt:with_meta(Atom1, #{a => 1}),\n Atom4 = clj_rt:with_meta(Atom1, #{b => 2}),\n true = clj_rt:equiv(Atom3, Atom4),\n\n ct:comment(\"Check that different atoms are not equivalent\"),\n false = clj_rt:equiv(Atom1, Atom2),\n\n ct:comment(\"An atom and something else\"),\n false = clj_rt:equiv(Atom1, whatever),\n false = clj_rt:equiv(Atom1, #{}),\n false = clj_rt:equiv(Atom1, 1),\n\n {comments, \"\"}.\n\n-spec meta(config()) -> result().\nmeta(_Config) ->\n Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),\n #{a := 1} = clj_rt:meta(Atom1),\n\n {comments, \"\"}.\n\n-spec str(config()) -> result().\nstr(_Config) ->\n Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1),\n Atom1 = clj_rt:with_meta(Atom0, #{a => 1}),\n\n <<\"#> = clj_rt:str(Atom1),\n\n {comments, \"\"}.\n\n-spec complete_coverage(config()) -> result().\ncomplete_coverage(_Config) ->\n Atom = 'clojerl.Atom':?CONSTRUCTOR(1),\n\n Hash = 'clojerl.IHash':hash(Atom),\n Hash = 'clojerl.IHash':hash(Atom),\n true = erlang:is_integer(Hash),\n\n {noreply, state} = 'clojerl.Atom':handle_cast(msg, state),\n {noreply, state} = 'clojerl.Atom':handle_info(msg, state),\n {ok, state} = 'clojerl.Atom':terminate(msg, state),\n {ok, state} = 'clojerl.Atom':code_change(msg, from, state),\n\n {comments, \"\"}.\n"}}},{"rowIdx":610276,"cells":{"_id":{"kind":"string","value":"61c56175e2501a86f347f6c09b3b349eaac8d9147c2da5ddfaf2b64bf89ea5f1"},"repository":{"kind":"string","value":"jellelicht/guix"},"name":{"kind":"string","value":"gnu.scm"},"content":{"kind":"string","value":";;; GNU Guix --- Functional package management for GNU\n Copyright © 2014 < >\n;;;\n;;; This file is part of GNU Guix.\n;;;\n GNU is free software ; you can redistribute it and/or modify it\n under the terms of the GNU General Public License as published by\n the Free Software Foundation ; either version 3 of the License , or ( at\n;;; your option) any later version.\n;;;\n;;; GNU Guix is distributed in the hope that it will be useful, but\n;;; WITHOUT ANY WARRANTY; without even the implied warranty of\n;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n;;; GNU General Public License for more details.\n;;;\n You should have received a copy of the GNU General Public License\n along with GNU . If not , see < / > .\n\n(define-module (guix scripts import gnu)\n #:use-module (guix ui)\n #:use-module (guix utils)\n #:use-module (guix scripts)\n #:use-module (guix import gnu)\n #:use-module (guix scripts import)\n #:use-module (srfi srfi-1)\n #:use-module (srfi srfi-11)\n #:use-module (srfi srfi-37)\n #:use-module (ice-9 match)\n #:export (guix-import-gnu))\n\n\f\n;;;\n;;; Command-line options.\n;;;\n\n(define %default-options\n '((key-download . interactive)))\n\n(define (show-help)\n (display (_ \"Usage: guix import gnu [OPTION...] PACKAGE\nReturn a package declaration template for PACKAGE, a GNU package.\\n\"))\n ;; '--key-download' taken from (guix scripts refresh).\n (display (_ \"\n --key-download=POLICY\n handle missing OpenPGP keys according to POLICY:\n 'always', 'never', and 'interactive', which is also\n used when 'key-download' is not specified\"))\n (newline)\n (display (_ \"\n -h, --help display this help and exit\"))\n (display (_ \"\n -V, --version display version information and exit\"))\n (newline)\n (show-bug-report-information))\n\n(define %options\n ;; Specification of the command-line options.\n (cons* (option '(#\\h \"help\") #f #f\n (lambda args\n (show-help)\n (exit 0)))\n (option '(#\\V \"version\") #f #f\n (lambda args\n (show-version-and-exit \"guix import gnu\")))\n (option '(\"key-download\") #t #f ;from (guix scripts refresh)\n (lambda (opt name arg result)\n (match arg\n ((or \"interactive\" \"always\" \"never\")\n (alist-cons 'key-download (string->symbol arg)\n result))\n (_\n (leave (_ \"unsupported policy: ~a~%\")\n arg)))))\n %standard-import-options))\n\n\f\n;;;\n;;; Entry point.\n;;;\n\n(define (guix-import-gnu . args)\n (define (parse-options)\n ;; Return the alist of option values.\n (args-fold* args %options\n (lambda (opt name arg result)\n (leave (_ \"~A: unrecognized option~%\") name))\n (lambda (arg result)\n (alist-cons 'argument arg result))\n %default-options))\n\n (let* ((opts (parse-options))\n (args (filter-map (match-lambda\n (('argument . value)\n value)\n (_ #f))\n (reverse opts))))\n (match args\n ((name)\n (with-error-handling\n (gnu->guix-package name\n #:key-download (assoc-ref opts 'key-download))))\n (_\n (leave (_ \"wrong number of arguments~%\"))))))\n\n;;; gnu.scm ends here\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/guix/scripts/import/gnu.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":" GNU Guix --- Functional package management for GNU\n\n This file is part of GNU Guix.\n\n you can redistribute it and/or modify it\n either version 3 of the License , or ( at\n your option) any later version.\n\n GNU Guix is distributed in the hope that it will be useful, but\n WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n\n Command-line options.\n\n '--key-download' taken from (guix scripts refresh).\n Specification of the command-line options.\nfrom (guix scripts refresh)\n\n Entry point.\n\n Return the alist of option values.\n gnu.scm ends here"},"code":{"kind":"string","value":" Copyright © 2014 < >\n under the terms of the GNU General Public License as published by\n You should have received a copy of the GNU General Public License\n along with GNU . If not , see < / > .\n\n(define-module (guix scripts import gnu)\n #:use-module (guix ui)\n #:use-module (guix utils)\n #:use-module (guix scripts)\n #:use-module (guix import gnu)\n #:use-module (guix scripts import)\n #:use-module (srfi srfi-1)\n #:use-module (srfi srfi-11)\n #:use-module (srfi srfi-37)\n #:use-module (ice-9 match)\n #:export (guix-import-gnu))\n\n\f\n\n(define %default-options\n '((key-download . interactive)))\n\n(define (show-help)\n (display (_ \"Usage: guix import gnu [OPTION...] PACKAGE\nReturn a package declaration template for PACKAGE, a GNU package.\\n\"))\n (display (_ \"\n --key-download=POLICY\n handle missing OpenPGP keys according to POLICY:\n 'always', 'never', and 'interactive', which is also\n used when 'key-download' is not specified\"))\n (newline)\n (display (_ \"\n -h, --help display this help and exit\"))\n (display (_ \"\n -V, --version display version information and exit\"))\n (newline)\n (show-bug-report-information))\n\n(define %options\n (cons* (option '(#\\h \"help\") #f #f\n (lambda args\n (show-help)\n (exit 0)))\n (option '(#\\V \"version\") #f #f\n (lambda args\n (show-version-and-exit \"guix import gnu\")))\n (lambda (opt name arg result)\n (match arg\n ((or \"interactive\" \"always\" \"never\")\n (alist-cons 'key-download (string->symbol arg)\n result))\n (_\n (leave (_ \"unsupported policy: ~a~%\")\n arg)))))\n %standard-import-options))\n\n\f\n\n(define (guix-import-gnu . args)\n (define (parse-options)\n (args-fold* args %options\n (lambda (opt name arg result)\n (leave (_ \"~A: unrecognized option~%\") name))\n (lambda (arg result)\n (alist-cons 'argument arg result))\n %default-options))\n\n (let* ((opts (parse-options))\n (args (filter-map (match-lambda\n (('argument . value)\n value)\n (_ #f))\n (reverse opts))))\n (match args\n ((name)\n (with-error-handling\n (gnu->guix-package name\n #:key-download (assoc-ref opts 'key-download))))\n (_\n (leave (_ \"wrong number of arguments~%\"))))))\n\n"}}},{"rowIdx":610277,"cells":{"_id":{"kind":"string","value":"3808527b4a8541e5865943714d06c824c9f05b816fb0b8e61cc0edc86f522baa"},"repository":{"kind":"string","value":"sampou-org/ghc_users_guide_ja"},"name":{"kind":"string","value":"PhasePrograms.hs"},"content":{"kind":"string","value":"module Options.PhasePrograms where\n\nimport Types\n\nphaseProgramsOptions :: [Flag]\nphaseProgramsOptions =\n [ flag { flagName = \"-pgmL ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を文芸的コードのプリプロセッサとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmP ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmc ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を C のコンパイラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlo ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を LLVM 最適化器として使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlc ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を LLVM コンパイラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgms ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をスプリッタとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgma ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をアセンブラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgml ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をリンカとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmdll ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を DLL 生成器として使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmF ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmwindres ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlibtool ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n ]\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/sampou-org/ghc_users_guide_ja/91ac4ee4347802bbfc63686cfcbd4fc12f95a584/8.2.2/mkUserGuidePart/Options/PhasePrograms.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"module Options.PhasePrograms where\n\nimport Types\n\nphaseProgramsOptions :: [Flag]\nphaseProgramsOptions =\n [ flag { flagName = \"-pgmL ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を文芸的コードのプリプロセッサとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmP ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmc ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を C のコンパイラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlo ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を LLVM 最適化器として使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlc ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を LLVM コンパイラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgms ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をスプリッタとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgma ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をアセンブラとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgml ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ をリンカとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmdll ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を DLL 生成器として使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmF ⟨cmd⟩\"\n , flagDescription = \"⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmwindres ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う\"\n , flagType = DynamicFlag\n }\n , flag { flagName = \"-pgmlibtool ⟨cmd⟩\"\n , flagDescription =\n \"⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)\"\n , flagType = DynamicFlag\n }\n ]\n"}}},{"rowIdx":610278,"cells":{"_id":{"kind":"string","value":"27cf0ba744fad5994af301dddadbcc762e8e7f814852e58d080e2bfc6b33e369"},"repository":{"kind":"string","value":"philopon/apiary"},"name":{"kind":"string","value":"Persist.hs"},"content":{"kind":"string","value":"# LANGUAGE UndecidableInstances #\n# LANGUAGE OverlappingInstances #\n# LANGUAGE FlexibleInstances #\n# LANGUAGE FlexibleContexts #\n# LANGUAGE TypeOperators #\n{-# LANGUAGE Rank2Types #-}\n# LANGUAGE LambdaCase #\n# LANGUAGE DataKinds #\n{-# LANGUAGE GADTs #-}\n\nmodule Web.Apiary.Database.Persist\n ( Persist\n -- * initializer\n , Migrator(..), With\n , initPersist, initPersistNoLog\n , initPersistPool, initPersistPoolNoLog\n -- ** low level\n , initPersist', initPersistPool'\n -- * query\n , RunSQL(runSql)\n -- * filter\n , sql\n ) where\n\nimport qualified Data.Pool as Pool\nimport Control.Monad(void, mzero)\nimport Control.Monad.IO.Class(MonadIO(..))\nimport Control.Monad.Logger(NoLoggingT(runNoLoggingT))\nimport Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask)\nimport Control.Monad.Trans.Control(MonadBaseControl)\nimport Web.Apiary.Logger(LogWrapper, runLogWrapper)\n\nimport qualified Database.Persist.Sql as Sql\n\nimport Web.Apiary(Html)\nimport Control.Monad.Apiary.Action(ActionT, applyDict)\nimport Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition))\nimport qualified Network.Routing.Dict as Dict\nimport qualified Network.Routing as R\nimport Data.Proxy.Compat(Proxy(..))\nimport GHC.TypeLits.Compat(KnownSymbol)\nimport Data.Apiary.Extension\n (Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt)\n\ndata Migrator\n = Logging Sql.Migration\n | Silent Sql.Migration\n | Unsafe Sql.Migration\n | NoMigrate\n\ndata Persist\n = PersistPool Sql.ConnectionPool\n | PersistConn Sql.SqlBackend\n\ninstance Extension Persist\n\ntype With c m = forall a. (c -> m a) -> m a\n\ninitPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m) \n => (forall a. Extensions exts -> n a -> m a)\n -> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts)\ninitPersist' run with migr = initializer $ \\es -> run es $\n with $ \\conn -> do\n doMigration migr conn\n return (PersistConn conn)\n\n-- | construct persist extension initializer with no connection pool.\n--\n-- example: \n--\n-- @\n initPersist ( withSqliteConn \" db.sqlite \" ) migrateAll\n-- @\ninitPersist :: (MonadIO m, MonadBaseControl IO m) \n => With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration\n -> Initializer m exts (Persist ': exts)\ninitPersist with = initPersist' runLogWrapper with . Logging\n\ninitPersistNoLog :: (MonadIO m, MonadBaseControl IO m) \n => With Sql.SqlBackend (NoLoggingT m)\n -> Sql.Migration -> Initializer m es (Persist ': es)\ninitPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent\n\ninitPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m)\n => (forall a. Extensions exts -> n a -> m a)\n -> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts)\ninitPersistPool' run with migr = initializer $ \\es -> run es $\n with $ \\pool -> do\n Pool.withResource pool $ doMigration migr\n return (PersistPool pool)\n\ninitPersistPool :: (MonadIO m, MonadBaseControl IO m)\n => With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration\n -> Initializer m exts (Persist ': exts)\ninitPersistPool with = initPersistPool' runLogWrapper with . Logging\n\ninitPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m)\n => With Sql.ConnectionPool (NoLoggingT m)\n -> Sql.Migration -> Initializer m es (Persist ': es)\ninitPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent\n\ndoMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m ()\ndoMigration migr conn = case migr of\n Logging m -> runReaderT (Sql.runMigration m) conn\n Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn\n Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn\n NoMigrate -> return ()\n\n-- | execute sql in action.\nclass RunSQL m where\n runSql :: Sql.SqlPersistT m a -> m a\n\nrunSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a\nrunSql' a persist = case persist of\n PersistPool p -> Sql.runSqlPool a p\n PersistConn c -> Sql.runSqlConn a c\n\ninstance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where\n runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a\n\ninstance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where\n runSql a = ask >>= runSql' a\n\n-- | filter by sql query. since 0.9.0.0.\nsql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict. Maybe Html -- ^ documentation.\n -> proxy k\n -> Sql.SqlPersistT (ActionT exts '[] actM) a\n -> (a -> Maybe b) -- ^ result check function. Nothing: fail filter, Just a: success filter and add parameter.\n -> Filter exts actM m prms (k Dict.:= b ': prms)\nsql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw \"sql\" $ \\d t ->\n fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \\case\n Nothing -> mzero\n Just a -> return (Dict.add k a d, t)\n\nhoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a\nhoistReaderT f m = ReaderT $ \\b -> f (runReaderT m b)\n# INLINE hoistReaderT #\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/philopon/apiary/7da306fcbfcdec85d073746968298de4540d7235/apiary-persistent/src/Web/Apiary/Database/Persist.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE Rank2Types #\n# LANGUAGE GADTs #\n * initializer\n ** low level\n * query\n * filter\n | construct persist extension initializer with no connection pool.\n\n example: \n\n @\n @\n | execute sql in action.\n | filter by sql query. since 0.9.0.0.\n ^ documentation.\n ^ result check function. Nothing: fail filter, Just a: success filter and add parameter."},"code":{"kind":"string","value":"# LANGUAGE UndecidableInstances #\n# LANGUAGE OverlappingInstances #\n# LANGUAGE FlexibleInstances #\n# LANGUAGE FlexibleContexts #\n# LANGUAGE TypeOperators #\n# LANGUAGE LambdaCase #\n# LANGUAGE DataKinds #\n\nmodule Web.Apiary.Database.Persist\n ( Persist\n , Migrator(..), With\n , initPersist, initPersistNoLog\n , initPersistPool, initPersistPoolNoLog\n , initPersist', initPersistPool'\n , RunSQL(runSql)\n , sql\n ) where\n\nimport qualified Data.Pool as Pool\nimport Control.Monad(void, mzero)\nimport Control.Monad.IO.Class(MonadIO(..))\nimport Control.Monad.Logger(NoLoggingT(runNoLoggingT))\nimport Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask)\nimport Control.Monad.Trans.Control(MonadBaseControl)\nimport Web.Apiary.Logger(LogWrapper, runLogWrapper)\n\nimport qualified Database.Persist.Sql as Sql\n\nimport Web.Apiary(Html)\nimport Control.Monad.Apiary.Action(ActionT, applyDict)\nimport Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition))\nimport qualified Network.Routing.Dict as Dict\nimport qualified Network.Routing as R\nimport Data.Proxy.Compat(Proxy(..))\nimport GHC.TypeLits.Compat(KnownSymbol)\nimport Data.Apiary.Extension\n (Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt)\n\ndata Migrator\n = Logging Sql.Migration\n | Silent Sql.Migration\n | Unsafe Sql.Migration\n | NoMigrate\n\ndata Persist\n = PersistPool Sql.ConnectionPool\n | PersistConn Sql.SqlBackend\n\ninstance Extension Persist\n\ntype With c m = forall a. (c -> m a) -> m a\n\ninitPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m) \n => (forall a. Extensions exts -> n a -> m a)\n -> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts)\ninitPersist' run with migr = initializer $ \\es -> run es $\n with $ \\conn -> do\n doMigration migr conn\n return (PersistConn conn)\n\n initPersist ( withSqliteConn \" db.sqlite \" ) migrateAll\ninitPersist :: (MonadIO m, MonadBaseControl IO m) \n => With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration\n -> Initializer m exts (Persist ': exts)\ninitPersist with = initPersist' runLogWrapper with . Logging\n\ninitPersistNoLog :: (MonadIO m, MonadBaseControl IO m) \n => With Sql.SqlBackend (NoLoggingT m)\n -> Sql.Migration -> Initializer m es (Persist ': es)\ninitPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent\n\ninitPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m)\n => (forall a. Extensions exts -> n a -> m a)\n -> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts)\ninitPersistPool' run with migr = initializer $ \\es -> run es $\n with $ \\pool -> do\n Pool.withResource pool $ doMigration migr\n return (PersistPool pool)\n\ninitPersistPool :: (MonadIO m, MonadBaseControl IO m)\n => With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration\n -> Initializer m exts (Persist ': exts)\ninitPersistPool with = initPersistPool' runLogWrapper with . Logging\n\ninitPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m)\n => With Sql.ConnectionPool (NoLoggingT m)\n -> Sql.Migration -> Initializer m es (Persist ': es)\ninitPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent\n\ndoMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m ()\ndoMigration migr conn = case migr of\n Logging m -> runReaderT (Sql.runMigration m) conn\n Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn\n Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn\n NoMigrate -> return ()\n\nclass RunSQL m where\n runSql :: Sql.SqlPersistT m a -> m a\n\nrunSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a\nrunSql' a persist = case persist of\n PersistPool p -> Sql.runSqlPool a p\n PersistConn c -> Sql.runSqlConn a c\n\ninstance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where\n runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a\n\ninstance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where\n runSql a = ask >>= runSql' a\n\nsql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict. proxy k\n -> Sql.SqlPersistT (ActionT exts '[] actM) a\n -> Filter exts actM m prms (k Dict.:= b ': prms)\nsql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw \"sql\" $ \\d t ->\n fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \\case\n Nothing -> mzero\n Just a -> return (Dict.add k a d, t)\n\nhoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a\nhoistReaderT f m = ReaderT $ \\b -> f (runReaderT m b)\n# INLINE hoistReaderT #\n"}}},{"rowIdx":610279,"cells":{"_id":{"kind":"string","value":"9d928bf2be8912124ea0eca64a778884772879aee1238b0a03647238a4db5afb"},"repository":{"kind":"string","value":"TrustInSoft/tis-interpreter"},"name":{"kind":"string","value":"zones.mli"},"content":{"kind":"string","value":" Modified by TrustInSoft\n\n(**************************************************************************)\n(* *)\n This file is part of Frama - C. \n(* *)\n Copyright ( C ) 2007 - 2015 \n CEA ( Commissariat à l'énergie atomique et aux énergies \n(* alternatives) *)\n(* *)\n(* you can redistribute it and/or modify it under the terms of the GNU *)\n Lesser General Public License as published by the Free Software \n Foundation , version 2.1 . \n(* *)\n(* It is distributed in the hope that it will be useful, *)\n(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)\n(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)\n(* GNU Lesser General Public License for more details. *)\n(* *)\n See the GNU Lesser General Public License version 2.1 \n for more details ( enclosed in the file licenses / LGPLv2.1 ) . \n(* *)\n(**************************************************************************)\n\n\n This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml . \n\n This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml.\n*)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/scope/zones.mli"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"************************************************************************\n \n \n alternatives) \n \n you can redistribute it and/or modify it under the terms of the GNU \n \n It is distributed in the hope that it will be useful, \n but WITHOUT ANY WARRANTY; without even the implied warranty of \n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \n GNU Lesser General Public License for more details. \n \n \n************************************************************************"},"code":{"kind":"string","value":" Modified by TrustInSoft\n\n This file is part of Frama - C. \n Copyright ( C ) 2007 - 2015 \n CEA ( Commissariat à l'énergie atomique et aux énergies \n Lesser General Public License as published by the Free Software \n Foundation , version 2.1 . \n See the GNU Lesser General Public License version 2.1 \n for more details ( enclosed in the file licenses / LGPLv2.1 ) . \n\n\n This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml . \n\n This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml.\n*)\n"}}},{"rowIdx":610280,"cells":{"_id":{"kind":"string","value":"f058a5e6bc7d4a02c9821e136b9dc035fc3816f4f74ca7863c824ee6b1bc14ba"},"repository":{"kind":"string","value":"brevis-us/brevis"},"name":{"kind":"string","value":"globals.clj"},"content":{"kind":"string","value":"(ns us.brevis.globals\n (:import [us.brevis.graphics BrCamera]))\n\n(def enable-display-text (atom true))\n\n(def default-gui-state {:fullscreen false\n: camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 )\n: camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 )\n :camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000)\n :gui true\n ;:input (BrInput.)\n: rot - x 90 : rot - y -90 : rot - z -45\n: shift - x 300 : shift - y 300 : shift - z -50;-30 \n :last-report-time 0 :simulation-time 0})\n \n(def #^:dynamic *gui-state* (atom default-gui-state))\n(def #^:dynamic *gui-message-board* (atom (sorted-map))) \n(def #^:dynamic *app-thread* (atom nil))\n(def #^:dynamic *screenshot-filename* (atom nil))\n(def #^:dynamic *simulation-state* (atom {}))\n(def #^:dynamic *graphics* (atom {}))\n(def destroy-hooks (atom []))\n\n;(def #^:dynamic *brevis-params* (atom {}))\n;(def #^:dynamic *brevis-state* (atom {}))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/brevis-us/brevis/de51c173279e82cca6d5990010144167050358a3/src/main/clojure/us/brevis/globals.clj"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":":input (BrInput.)\n-30 \n(def #^:dynamic *brevis-params* (atom {}))\n(def #^:dynamic *brevis-state* (atom {}))"},"code":{"kind":"string","value":"(ns us.brevis.globals\n (:import [us.brevis.graphics BrCamera]))\n\n(def enable-display-text (atom true))\n\n(def default-gui-state {:fullscreen false\n: camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 )\n: camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 )\n :camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000)\n :gui true\n: rot - x 90 : rot - y -90 : rot - z -45\n :last-report-time 0 :simulation-time 0})\n \n(def #^:dynamic *gui-state* (atom default-gui-state))\n(def #^:dynamic *gui-message-board* (atom (sorted-map))) \n(def #^:dynamic *app-thread* (atom nil))\n(def #^:dynamic *screenshot-filename* (atom nil))\n(def #^:dynamic *simulation-state* (atom {}))\n(def #^:dynamic *graphics* (atom {}))\n(def destroy-hooks (atom []))\n\n"}}},{"rowIdx":610281,"cells":{"_id":{"kind":"string","value":"81f66040c8b28d4d5326c5b2d5cc7fd1b91ba8baa56dde06adf56af7a86cb412"},"repository":{"kind":"string","value":"KavehYousefi/Esoteric-programming-languages"},"name":{"kind":"string","value":"types.lisp"},"content":{"kind":"string","value":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; \n;; This file serves in the declaration of the globally significant\n;; types.\n;; \n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n;; -- Declaration of types. -- ;;\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n(deftype list-of (&optional (element-type T))\n \"The ``list-of'' type defines a list of zero or more elements, each\n member of which conforms to the ELEMENT-TYPE, defaulting to the\n comprehensive ``T''.\"\n (let ((predicate (gensym)))\n (declare (type symbol predicate))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (listp candidate)\n (every\n #'(lambda (element)\n (declare (type T element))\n (typep element element-type))\n (the list candidate)))))\n `(satisfies ,predicate)))\n\n;;; -------------------------------------------------------\n\n(deftype hash-table-of (&optional (key-type T) (value-type T))\n \"The ``hash-table-of'' type defines a hash table of zero or more\n entries, each key of which conforms to the KEY-TYPE and associates\n with a value of the VALUE-TYPE, both defaulting to the comprehensive\n ``T''.\"\n (let ((predicate (gensym)))\n (declare (type symbol predicate))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (hash-table-p candidate)\n (loop\n for key\n of-type T\n being the hash-keys in (the hash-table candidate)\n using\n (hash-value value)\n always\n (and (typep key key-type)\n (typep value value-type))))))\n `(satisfies ,predicate)))\n\n;;; -------------------------------------------------------\n\n(deftype attribute-map ()\n \"The ``attribute-map'' type defines a collection of node attributes in\n the form of a hash table mapping which associated keyword symbol\n attribute names to arbitrary values.\"\n '(hash-table-of keyword T))\n\n;;; -------------------------------------------------------\n\n(deftype attribute-list ()\n \"The ``attribute-list'' type defines a list of node attributes in\n terms of a property list, or plist, with each attribute name (key or\n indicator) immediately succeeded by its associated attribute value\n (property value), the former of which must be a keyword symbol,\n whereas the latter may assume the generic type ``T''.\"\n (let ((predicate (gensym)))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (listp candidate)\n (evenp (length (the list candidate)))\n (loop\n for (indicator value)\n of-type (T T)\n on (the list candidate)\n by #'cddr\n always\n (and (typep indicator 'keyword)\n (typep value T))))))\n `(satisfies ,predicate)))\n\n;;; -------------------------------------------------------\n\n(deftype node-list ()\n \"The ``node-list'' type defines a list of zero or more ``Node''\n objects.\"\n '(list-of Node))\n\n;;; -------------------------------------------------------\n\n(deftype set-operator ()\n \"The ``set-operator'' type enumerates the recognized binary set\n operations.\"\n '(member\n :union\n :intersection\n :left-difference\n :right-difference))\n\n;;; -------------------------------------------------------\n\n(deftype set-relationship ()\n \"The ``set-relationship'' type enumerates the recognized relationship\n betwixt two sets, most commonly employed in the indagation of a\n loop's continuation predicate.\"\n '(member\n :subset\n :proper-subset\n :not-subset\n :superset\n :proper-superset\n :not-superset\n :equal))\n\n;;; -------------------------------------------------------\n\n(deftype destination ()\n \"The ``destination'' type defines a sink for output operations,\n enumerating, among others, the functions ``format'' and\n ``write-char''.\"\n '(or null (eql T) stream string))\n\n;;; -------------------------------------------------------\n\n(deftype natural-number ()\n \"The ``natural-number'' type defines a positive integer with no upper\n bourne, that is, a commorant of the range [1, +infinity], most\n commonly employed in the context of set members.\"\n '(integer 1 *))\n\n;;; -------------------------------------------------------\n\n(deftype number-list ()\n \"The ``number-list'' type defines a list of zero or more natural\n numbers, that is, positive integers.\"\n '(list-of natural-number))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/KavehYousefi/Esoteric-programming-languages/86116d6045f426dbe74f881b92944ad76df59c68/SOAP/SOAP_001/types.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":"\n \n This file serves in the declaration of the globally significant\n types.\n \n\n\n -- Declaration of types. -- ;;\n\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------\n -------------------------------------------------------"},"code":{"kind":"string","value":"\n\n\n\n(deftype list-of (&optional (element-type T))\n \"The ``list-of'' type defines a list of zero or more elements, each\n member of which conforms to the ELEMENT-TYPE, defaulting to the\n comprehensive ``T''.\"\n (let ((predicate (gensym)))\n (declare (type symbol predicate))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (listp candidate)\n (every\n #'(lambda (element)\n (declare (type T element))\n (typep element element-type))\n (the list candidate)))))\n `(satisfies ,predicate)))\n\n\n(deftype hash-table-of (&optional (key-type T) (value-type T))\n \"The ``hash-table-of'' type defines a hash table of zero or more\n entries, each key of which conforms to the KEY-TYPE and associates\n with a value of the VALUE-TYPE, both defaulting to the comprehensive\n ``T''.\"\n (let ((predicate (gensym)))\n (declare (type symbol predicate))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (hash-table-p candidate)\n (loop\n for key\n of-type T\n being the hash-keys in (the hash-table candidate)\n using\n (hash-value value)\n always\n (and (typep key key-type)\n (typep value value-type))))))\n `(satisfies ,predicate)))\n\n\n(deftype attribute-map ()\n \"The ``attribute-map'' type defines a collection of node attributes in\n the form of a hash table mapping which associated keyword symbol\n attribute names to arbitrary values.\"\n '(hash-table-of keyword T))\n\n\n(deftype attribute-list ()\n \"The ``attribute-list'' type defines a list of node attributes in\n terms of a property list, or plist, with each attribute name (key or\n indicator) immediately succeeded by its associated attribute value\n (property value), the former of which must be a keyword symbol,\n whereas the latter may assume the generic type ``T''.\"\n (let ((predicate (gensym)))\n (setf (symbol-function predicate)\n #'(lambda (candidate)\n (declare (type T candidate))\n (and\n (listp candidate)\n (evenp (length (the list candidate)))\n (loop\n for (indicator value)\n of-type (T T)\n on (the list candidate)\n by #'cddr\n always\n (and (typep indicator 'keyword)\n (typep value T))))))\n `(satisfies ,predicate)))\n\n\n(deftype node-list ()\n \"The ``node-list'' type defines a list of zero or more ``Node''\n objects.\"\n '(list-of Node))\n\n\n(deftype set-operator ()\n \"The ``set-operator'' type enumerates the recognized binary set\n operations.\"\n '(member\n :union\n :intersection\n :left-difference\n :right-difference))\n\n\n(deftype set-relationship ()\n \"The ``set-relationship'' type enumerates the recognized relationship\n betwixt two sets, most commonly employed in the indagation of a\n loop's continuation predicate.\"\n '(member\n :subset\n :proper-subset\n :not-subset\n :superset\n :proper-superset\n :not-superset\n :equal))\n\n\n(deftype destination ()\n \"The ``destination'' type defines a sink for output operations,\n enumerating, among others, the functions ``format'' and\n ``write-char''.\"\n '(or null (eql T) stream string))\n\n\n(deftype natural-number ()\n \"The ``natural-number'' type defines a positive integer with no upper\n bourne, that is, a commorant of the range [1, +infinity], most\n commonly employed in the context of set members.\"\n '(integer 1 *))\n\n\n(deftype number-list ()\n \"The ``number-list'' type defines a list of zero or more natural\n numbers, that is, positive integers.\"\n '(list-of natural-number))\n"}}},{"rowIdx":610282,"cells":{"_id":{"kind":"string","value":"65ee4e700d62c98cfe275cfc749fa26906769e8a07e90b9c30fd72f70fd27002"},"repository":{"kind":"string","value":"dizengrong/erlang_game"},"name":{"kind":"string","value":"sales_test.erl"},"content":{"kind":"string","value":"-module (sales_test).\n\n-include (\"sales.hrl\").\n-include_lib (\"amnesia/include/amnesia.hrl\").\n\n-compile ([export_all]).\n\npopulate() ->\n amnesia:open({local, sales}, sales),\n\n {ok, Cust1} = amnesia:add_new (sales,\n #customer {customer_code = 102341,\n name = \"John\",\n address = \"XXXXX\"}),\n {ok, Cust2} = amnesia:add_new (sales,\n #customer {customer_code = 394021,\n name = \"Corrado\",\n address = \"YYYYYY\",\n email = \"corrado@yyy\"}),\n {ok, Cust3} = amnesia:add_new (sales,\n #customer {customer_code = 102391,\n name = \"Dave\",\n address = \"Dave's home\",\n email = \"dave@zzz\"}),\n\n {ok, P1} = amnesia:add_new (sales,\n #product { product_code = \"001\",\n description = \"CPU Intel\",\n price = 231.10 }),\n\n {ok, P2} = amnesia:add_new (sales,\n #product { product_code = \"002\",\n description = \"Compact Flash 4G\",\n price = 57.90 }),\n\n {ok, P3} = amnesia:add_new (sales,\n #product { product_code = \"003\",\n description = \"Hard Disk 500G\",\n price = 190.77 }),\n\n {ok, Order} = amnesia:add_new (sales,\n #orders { order_number = 30,\n order_date = {2008, 7, 17},\n customer = Cust2 }),\n\n amnesia:add_new (sales, #order_line { orders = Order,\n product = P2,\n quantity = 3 }),\n\n amnesia:add_new (sales, #order_line { orders = Order,\n product = P1,\n quantity = 10 }),\n\n amnesia:add_new (sales,\n [#product { product_code = \"004\",\n description = \"Data Server\",\n price = 5200.00 },\n #orders { order_number = 31,\n customer = Cust1},\n #order_line { orders = '$2',\n product = P3,\n quantity = 2} ,\n #order_line { orders = '$2',\n product = '$1',\n quantity = 11 }\n ]),\n ok.\n\n\ntest_join () ->\n amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]).\n\ntest_join (Pid) ->\n amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]).\n\ntest_connections () ->\n {ok, [Order]} = amnesia:fetch (sales, orders, {\"order_number = 31\", []}),\n io:format (\"Order #31 is: ~p~n\", [Order]),\n {ok, OrderWithCust} = amnesia:load_referenced (sales, Order),\n io:format (\"Order #31 with customer explicited is: ~p~n\", [OrderWithCust]),\n {ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line),\n io:format (\"The items of order #31 are: ~p~n\", [OrderLines]),\n OrderLinesWithProduct =\n lists:map (fun (Line) ->\n {ok, LineWithProduct} =\n amnesia:load_referenced (sales, Line),\n LineWithProduct\n end, OrderLines),\n io:format (\"The items of order #31, with products explicited, are:~n~p~n\",\n [OrderLinesWithProduct]),\n ok.\n\n\ntest_fetch () ->\n {ok, X1} = amnesia:fetch (sales, customer),\n io:format (\"SIMPLE FETCH = ~p~n~n\", [X1]),\n\n {ok, X2} = amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line]),\n io:format (\"FETCH WITH JOINS = ~p~n~n\", [X2]),\n\n {ok, X3} = amnesia:fetch (sales, orders, {\"order_number = $1\", [30]}),\n io:format (\"SIMPLE FETCH WITH SELECTION = ~p~n~n\", [X3]),\n\n {ok, X4} = amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line],\n {\"name = $1\", [\"Corrado\"]}),\n io:format (\"FETCH WITH JOINS AND SELECTION = ~p~n~n\", [X4]),\n\n {ok, X5} = amnesia:fetch (sales, customer,\n {}, [{order_by, name}]),\n io:format (\"SIMPLE FETCH WITH ORDERING = ~p~n~n\", [X5]),\n\n {ok, X6} = amnesia:fetch (sales,\n [customer, ?JOIN, orders],\n {}, [{order_by, order_number}]),\n io:format (\"FETCH WITH JOINS AND ORDERING = ~p~n~n\", [X6]),\n\n ok.\n\n\ntest_aggregate() ->\n {ok, X1} = amnesia:fetch (sales, customer, {},\n [{aggregate, \"count(*)\", integer}]),\n io:format (\"SIMPLE COUNT = ~p~n~n\", [X1]),\n\n {ok, X2} = amnesia:fetch (sales, product, {},\n [{aggregate, \"max(price)\", decimal}]),\n io:format (\"SIMPLE MAX = ~p~n~n\", [X2]),\n\n {ok, X3} = amnesia:fetch (sales, product, {},\n [{aggregate, \"count(*)\",\n integer, product_code}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n\", [X3]),\n\n {ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n\", [X4]),\n\n {ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code,\n {\"__aggregated_data__ > $1\", [5]}}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n\",\n [X5]),\n\n {ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code},\n {order_by, '__aggregated_data__', desc}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n\",\n [X6]),\n\n {ok, X7} = amnesia:fetch (sales,\n [product, ?JOIN, order_line, ?JOIN, orders], {},\n [{aggregate, \"sum(quantity * price)\",\n decimal, order_number}]),\n io:format (\"~p~n~n\",\n [X7]),\n\n X7.\n\n\ntest_cursor () ->\n {ok, CursorID} =\n amnesia:create_cursor (\n sales,\n amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line] )),\n io:format (\"CURSOR ID = ~p~n~n\", [CursorID]),\n\n show_cursor_data (CursorID, 1).\n\n\nshow_cursor_data (CursorID, N) ->\n case amnesia:nth (sales, CursorID, N) of\n {end_of_data} -> amnesia:delete_cursor (sales, CursorID);\n {ok, X} ->\n io:format (\"Item #~p = ~p~n~n\", [N, X]),\n show_cursor_data (CursorID, N + 1)\n end.\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/dizengrong/erlang_game/4598f97daa9ca5eecff292ac401dd8f903eea867/gerl/lib/amnesia/examples/sales_test.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"-module (sales_test).\n\n-include (\"sales.hrl\").\n-include_lib (\"amnesia/include/amnesia.hrl\").\n\n-compile ([export_all]).\n\npopulate() ->\n amnesia:open({local, sales}, sales),\n\n {ok, Cust1} = amnesia:add_new (sales,\n #customer {customer_code = 102341,\n name = \"John\",\n address = \"XXXXX\"}),\n {ok, Cust2} = amnesia:add_new (sales,\n #customer {customer_code = 394021,\n name = \"Corrado\",\n address = \"YYYYYY\",\n email = \"corrado@yyy\"}),\n {ok, Cust3} = amnesia:add_new (sales,\n #customer {customer_code = 102391,\n name = \"Dave\",\n address = \"Dave's home\",\n email = \"dave@zzz\"}),\n\n {ok, P1} = amnesia:add_new (sales,\n #product { product_code = \"001\",\n description = \"CPU Intel\",\n price = 231.10 }),\n\n {ok, P2} = amnesia:add_new (sales,\n #product { product_code = \"002\",\n description = \"Compact Flash 4G\",\n price = 57.90 }),\n\n {ok, P3} = amnesia:add_new (sales,\n #product { product_code = \"003\",\n description = \"Hard Disk 500G\",\n price = 190.77 }),\n\n {ok, Order} = amnesia:add_new (sales,\n #orders { order_number = 30,\n order_date = {2008, 7, 17},\n customer = Cust2 }),\n\n amnesia:add_new (sales, #order_line { orders = Order,\n product = P2,\n quantity = 3 }),\n\n amnesia:add_new (sales, #order_line { orders = Order,\n product = P1,\n quantity = 10 }),\n\n amnesia:add_new (sales,\n [#product { product_code = \"004\",\n description = \"Data Server\",\n price = 5200.00 },\n #orders { order_number = 31,\n customer = Cust1},\n #order_line { orders = '$2',\n product = P3,\n quantity = 2} ,\n #order_line { orders = '$2',\n product = '$1',\n quantity = 11 }\n ]),\n ok.\n\n\ntest_join () ->\n amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]).\n\ntest_join (Pid) ->\n amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]).\n\ntest_connections () ->\n {ok, [Order]} = amnesia:fetch (sales, orders, {\"order_number = 31\", []}),\n io:format (\"Order #31 is: ~p~n\", [Order]),\n {ok, OrderWithCust} = amnesia:load_referenced (sales, Order),\n io:format (\"Order #31 with customer explicited is: ~p~n\", [OrderWithCust]),\n {ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line),\n io:format (\"The items of order #31 are: ~p~n\", [OrderLines]),\n OrderLinesWithProduct =\n lists:map (fun (Line) ->\n {ok, LineWithProduct} =\n amnesia:load_referenced (sales, Line),\n LineWithProduct\n end, OrderLines),\n io:format (\"The items of order #31, with products explicited, are:~n~p~n\",\n [OrderLinesWithProduct]),\n ok.\n\n\ntest_fetch () ->\n {ok, X1} = amnesia:fetch (sales, customer),\n io:format (\"SIMPLE FETCH = ~p~n~n\", [X1]),\n\n {ok, X2} = amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line]),\n io:format (\"FETCH WITH JOINS = ~p~n~n\", [X2]),\n\n {ok, X3} = amnesia:fetch (sales, orders, {\"order_number = $1\", [30]}),\n io:format (\"SIMPLE FETCH WITH SELECTION = ~p~n~n\", [X3]),\n\n {ok, X4} = amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line],\n {\"name = $1\", [\"Corrado\"]}),\n io:format (\"FETCH WITH JOINS AND SELECTION = ~p~n~n\", [X4]),\n\n {ok, X5} = amnesia:fetch (sales, customer,\n {}, [{order_by, name}]),\n io:format (\"SIMPLE FETCH WITH ORDERING = ~p~n~n\", [X5]),\n\n {ok, X6} = amnesia:fetch (sales,\n [customer, ?JOIN, orders],\n {}, [{order_by, order_number}]),\n io:format (\"FETCH WITH JOINS AND ORDERING = ~p~n~n\", [X6]),\n\n ok.\n\n\ntest_aggregate() ->\n {ok, X1} = amnesia:fetch (sales, customer, {},\n [{aggregate, \"count(*)\", integer}]),\n io:format (\"SIMPLE COUNT = ~p~n~n\", [X1]),\n\n {ok, X2} = amnesia:fetch (sales, product, {},\n [{aggregate, \"max(price)\", decimal}]),\n io:format (\"SIMPLE MAX = ~p~n~n\", [X2]),\n\n {ok, X3} = amnesia:fetch (sales, product, {},\n [{aggregate, \"count(*)\",\n integer, product_code}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n\", [X3]),\n\n {ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n\", [X4]),\n\n {ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code,\n {\"__aggregated_data__ > $1\", [5]}}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n\",\n [X5]),\n\n {ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {},\n [{aggregate, \"sum(quantity)\",\n integer, product_code},\n {order_by, '__aggregated_data__', desc}]),\n io:format (\"COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n\",\n [X6]),\n\n {ok, X7} = amnesia:fetch (sales,\n [product, ?JOIN, order_line, ?JOIN, orders], {},\n [{aggregate, \"sum(quantity * price)\",\n decimal, order_number}]),\n io:format (\"~p~n~n\",\n [X7]),\n\n X7.\n\n\ntest_cursor () ->\n {ok, CursorID} =\n amnesia:create_cursor (\n sales,\n amnesia:fetch (sales,\n [customer, ?JOIN, orders, ?JOIN, order_line] )),\n io:format (\"CURSOR ID = ~p~n~n\", [CursorID]),\n\n show_cursor_data (CursorID, 1).\n\n\nshow_cursor_data (CursorID, N) ->\n case amnesia:nth (sales, CursorID, N) of\n {end_of_data} -> amnesia:delete_cursor (sales, CursorID);\n {ok, X} ->\n io:format (\"Item #~p = ~p~n~n\", [N, X]),\n show_cursor_data (CursorID, N + 1)\n end.\n"}}},{"rowIdx":610283,"cells":{"_id":{"kind":"string","value":"eafead00353d62cf3f87cb2ce301404270a1b20635a5474485f77056f0028da7"},"repository":{"kind":"string","value":"SuzanneSoy/anaphoric"},"name":{"kind":"string","value":"acond-test.rkt"},"content":{"kind":"string","value":"#lang racket\n\n(require anaphoric/acond\n rackunit)\n\n(define lst '(x y z a b c))\n(define seen 0)\n\n;; With else branch\n(check-equal? (acond\n [(member 'a lst) (set! seen (add1 seen))\n (check-equal? it '(a b c))\n 'seen-01]\n [(member 'b lst) (fail \"acond selected wrong branch\")]\n [else (fail \"acond selected wrong branch\")])\n 'seen-01)\n(check-equal? seen 1) ;; multiple body statements\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'b lst) (begin (check-equal? it '(b c))\n 'seen-02)]\n [else (fail \"acond selected wrong branch\")])\n 'seen-02)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")]\n [else 'seen-03])\n 'seen-03)\n\n;; Just else branch\n(check-equal? (acond\n [else 'seen-04])\n 'seen-04)\n\n;; Multiple body statements\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")]\n [else (set! seen (add1 seen))\n 'seen-05])\n 'seen-05)\n(check-equal? seen 2)\n\n;; Without else branch\n(check-equal? (acond\n [(member 'a lst) (set! seen (add1 seen))\n (check-equal? it '(a b c))\n 'seen-06]\n [(member 'b lst) (fail \"acond selected wrong branch\")])\n 'seen-06)\n(check-equal? seen 3)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'b lst) (begin (check-equal? it '(b c))\n 'seen-07)])\n 'seen-07)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")])\n (void))\n\n;; No branch\n(check-equal? (acond)\n (void))\n\n;; Single branch\n(check-equal? (acond\n [(member 'a lst) (begin (check-equal? it '(a b c))\n 'seen-09)])\n 'seen-09)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")])\n (void))"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/SuzanneSoy/anaphoric/c648ec2aad6d2b2ec72acc729143454d1e855cf6/test/acond-test.rkt"},"language":{"kind":"string","value":"racket"},"comments":{"kind":"string","value":" With else branch\n multiple body statements\n Just else branch\n Multiple body statements\n Without else branch\n No branch\n Single branch"},"code":{"kind":"string","value":"#lang racket\n\n(require anaphoric/acond\n rackunit)\n\n(define lst '(x y z a b c))\n(define seen 0)\n\n(check-equal? (acond\n [(member 'a lst) (set! seen (add1 seen))\n (check-equal? it '(a b c))\n 'seen-01]\n [(member 'b lst) (fail \"acond selected wrong branch\")]\n [else (fail \"acond selected wrong branch\")])\n 'seen-01)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'b lst) (begin (check-equal? it '(b c))\n 'seen-02)]\n [else (fail \"acond selected wrong branch\")])\n 'seen-02)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")]\n [else 'seen-03])\n 'seen-03)\n\n(check-equal? (acond\n [else 'seen-04])\n 'seen-04)\n\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")]\n [else (set! seen (add1 seen))\n 'seen-05])\n 'seen-05)\n(check-equal? seen 2)\n\n(check-equal? (acond\n [(member 'a lst) (set! seen (add1 seen))\n (check-equal? it '(a b c))\n 'seen-06]\n [(member 'b lst) (fail \"acond selected wrong branch\")])\n 'seen-06)\n(check-equal? seen 3)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'b lst) (begin (check-equal? it '(b c))\n 'seen-07)])\n 'seen-07)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")]\n [(member 'absent2 lst) (fail \"acond selected wrong branch\")])\n (void))\n\n(check-equal? (acond)\n (void))\n\n(check-equal? (acond\n [(member 'a lst) (begin (check-equal? it '(a b c))\n 'seen-09)])\n 'seen-09)\n\n(check-equal? (acond\n [(member 'absent lst) (fail \"acond selected wrong branch\")])\n (void))"}}},{"rowIdx":610284,"cells":{"_id":{"kind":"string","value":"235f8bad04cb0fa799bd2d2e5e0ee94427f26199188a1fdbedbbe8c0b24e937e"},"repository":{"kind":"string","value":"dyzsr/ocaml-selectml"},"name":{"kind":"string","value":"t330-compact-2.ml"},"content":{"kind":"string","value":" TEST \n include tool - ocaml - lib \n flags = \" -w -a \" \n ocaml_script_as_argument = \" true \" \n * setup - ocaml - build - env \n * * \ninclude tool-ocaml-lib\nflags = \"-w -a\"\nocaml_script_as_argument = \"true\"\n* setup-ocaml-build-env\n** ocaml\n*)\n\nopen Lib;;\nGc.compact ();;\nlet _ = Pervasives.do_at_exit();;\n\n* \n 0 CONSTINT 42 \n 2 PUSHACC0 \n 3 MAKEBLOCK1 0 \n 5 POP 1 \n 7 \n 9 BRANCH 746 \n 11 RESTART \n 12 GRAB 1 \n 14 ACC0 \n 15 BRANCHIFNOT 28 \n 17 ACC1 \n 18 PUSHACC1 \n 19 GETFIELD1 \n 20 PUSHOFFSETCLOSURE0 \n 21 APPLY2 \n 22 PUSHACC1 \n 23 GETFIELD0 \n 24 MAKEBLOCK2 0 \n 26 RETURN 2 \n 28 ACC1 \n 29 RETURN 2 \n 31 RESTART \n 32 GRAB 3 \n 34 CONST0 \n 35 PUSHACC4 \n 36 LEINT \n 37 BRANCHIFNOT 42 \n 39 \n 40 RETURN 4 \n 42 ACC3 \n 43 PUSHACC3 \n 44 PUSHACC3 \n 45 PUSHACC3 \n 46 C_CALL4 caml_input \n 48 PUSHCONST0 \n 49 PUSHACC1 \n 50 EQ \n 51 BRANCHIFNOT 58 \n 53 End_of_file \n 55 MAKEBLOCK1 0 \n 57 RAISE \n 58 ACC0 \n 59 PUSHACC5 \n 60 SUBINT \n 61 PUSHACC1 \n 62 PUSHACC5 \n 63 ADDINT \n 64 PUSHACC4 \n 65 PUSHACC4 \n 66 PUSHOFFSETCLOSURE0 \n 67 APPTERM 4 , 9 \n 70 ACC0 \n 71 C_CALL1 caml_input_scan_line \n 73 PUSHCONST0 \n 74 PUSHACC1 \n 75 EQ \n 76 BRANCHIFNOT 83 \n 78 End_of_file \n 80 MAKEBLOCK1 0 \n 82 RAISE \n 83 \n 84 PUSHACC1 \n 85 \n 86 BRANCHIFNOT 107 \n 88 ACC0 \n 89 OFFSETINT -1 \n 91 C_CALL1 create_string \n 93 PUSHACC1 \n 94 OFFSETINT -1 \n 96 PUSHCONST0 \n 97 PUSHACC2 \n 98 PUSHACC5 \n 99 C_CALL4 caml_input \n 101 ACC2 \n 102 C_CALL1 caml_input_char \n 104 ACC0 \n 105 RETURN 3 \n 107 ACC0 \n 108 NEGINT \n 109 C_CALL1 create_string \n 111 PUSHACC1 \n 112 NEGINT \n 113 PUSHCONST0 \n 114 PUSHACC2 \n 115 PUSHACC5 \n 116 C_CALL4 caml_input \n 118 \n 119 PUSHTRAP 130 \n 121 ACC6 \n 122 PUSHOFFSETCLOSURE0 \n 123 APPLY1 \n 124 PUSHACC5 \n 125 PUSHENVACC1 \n 126 APPLY2 \n 127 POPTRAP \n 128 RETURN 3 \n 130 PUSHGETGLOBAL End_of_file \n 132 PUSHACC1 \n 133 GETFIELD0 \n 134 EQ \n 135 BRANCHIFNOT 140 \n 137 ACC1 \n 138 RETURN 4 \n 140 ACC0 \n 141 RAISE \n 142 ACC0 \n 143 C_CALL1 caml_flush \n 145 RETURN 1 \n 147 RESTART \n 148 GRAB 1 \n 150 ACC1 \n 151 PUSHACC1 \n 152 C_CALL2 caml_output_char \n 154 RETURN 2 \n 156 RESTART \n 157 GRAB 1 \n 159 ACC1 \n 160 PUSHACC1 \n 161 C_CALL2 caml_output_char \n 163 RETURN 2 \n 165 RESTART \n 166 GRAB 1 \n 168 ACC1 \n 169 PUSHACC1 \n 170 C_CALL2 caml_output_int \n 172 RETURN 2 \n 174 RESTART \n 175 GRAB 1 \n 177 ACC1 \n 178 PUSHACC1 \n 179 C_CALL2 caml_seek_out \n 181 RETURN 2 \n 183 ACC0 \n 184 C_CALL1 caml_pos_out \n 186 RETURN 1 \n 188 ACC0 \n 189 C_CALL1 caml_channel_size \n 191 RETURN 1 \n 193 RESTART \n 194 GRAB 1 \n 196 ACC1 \n 197 PUSHACC1 \n 198 C_CALL2 caml_set_binary_mode \n 200 RETURN 2 \n 202 ACC0 \n 203 C_CALL1 caml_input_char \n 205 RETURN 1 \n 207 ACC0 \n 208 C_CALL1 caml_input_char \n 210 RETURN 1 \n 212 ACC0 \n 213 C_CALL1 caml_input_int \n 215 RETURN 1 \n 217 ACC0 \n 218 C_CALL1 input_value \n 220 RETURN 1 \n 222 RESTART \n 223 GRAB 1 \n 225 ACC1 \n 226 PUSHACC1 \n 227 C_CALL2 caml_seek_in \n 229 RETURN 2 \n 231 ACC0 \n 232 C_CALL1 caml_pos_in \n 234 RETURN 1 \n 236 ACC0 \n 237 C_CALL1 caml_channel_size \n 239 RETURN 1 \n 241 ACC0 \n 242 C_CALL1 caml_close_channel \n 244 RETURN 1 \n 246 RESTART \n 247 GRAB 1 \n 249 ACC1 \n 250 PUSHACC1 \n 251 C_CALL2 caml_set_binary_mode \n 253 RETURN 2 \n 255 CONST0 \n 256 PUSHENVACC1 \n 257 APPLY1 \n 258 ACC0 \n 259 C_CALL1 sys_exit \n 261 RETURN 1 \n 263 CONST0 \n 264 PUSHENVACC1 \n 265 GETFIELD0 \n 266 APPTERM1 2 \n 268 CONST0 \n 269 PUSHENVACC1 \n 270 APPLY1 \n 271 CONST0 \n 272 PUSHENVACC2 \n 273 APPTERM1 2 \n 275 ENVACC1 \n 276 GETFIELD0 \n 277 PUSHACC0 \n 278 PUSHACC2 \n 279 CLOSURE 2 , 268 \n 282 PUSHENVACC1 \n 283 SETFIELD0 \n 284 RETURN 2 \n 286 ENVACC1 \n 287 C_CALL1 caml_flush \n 289 ENVACC2 \n 290 C_CALL1 caml_flush \n 292 RETURN 1 \n 294 CONST0 \n 295 PUSHENVACC1 \n 296 APPLY1 \n 297 C_CALL1 float_of_string \n 299 RETURN 1 \n 301 CONST0 \n 302 PUSHENVACC1 \n 303 APPLY1 \n 304 C_CALL1 int_of_string \n 306 RETURN 1 \n 308 ENVACC2 \n 309 C_CALL1 caml_flush \n 311 ENVACC1 \n 312 PUSHENVACC3 \n 313 APPTERM1 2 \n 315 CONSTINT 13 \n 317 PUSHENVACC1 \n 318 C_CALL2 caml_output_char \n 320 ENVACC1 \n 321 C_CALL1 caml_flush \n 323 RETURN 1 \n 325 ACC0 \n 326 PUSHENVACC1 \n 327 PUSHENVACC2 \n 328 APPLY2 \n 329 CONSTINT 13 \n 331 PUSHENVACC1 \n 332 C_CALL2 caml_output_char \n 334 ENVACC1 \n 335 C_CALL1 caml_flush \n 337 RETURN 1 \n 339 ACC0 \n 340 PUSHENVACC1 \n 341 APPLY1 \n 342 PUSHENVACC2 \n 343 PUSHENVACC3 \n 344 APPTERM2 3 \n 346 ACC0 \n 347 PUSHENVACC1 \n 348 APPLY1 \n 349 PUSHENVACC2 \n 350 PUSHENVACC3 \n 351 APPTERM2 3 \n 353 ACC0 \n 354 PUSHENVACC1 \n 355 PUSHENVACC2 \n 356 APPTERM2 3 \n 358 ACC0 \n 359 PUSHENVACC1 \n 360 C_CALL2 caml_output_char \n 362 RETURN 1 \n 364 CONSTINT 13 \n 366 PUSHENVACC1 \n 367 C_CALL2 caml_output_char \n 369 ENVACC1 \n 370 C_CALL1 caml_flush \n 372 RETURN 1 \n 374 ACC0 \n 375 PUSHENVACC1 \n 376 PUSHENVACC2 \n 377 APPLY2 \n 378 CONSTINT 13 \n 380 PUSHENVACC1 \n 381 C_CALL2 caml_output_char \n 383 RETURN 1 \n 385 ACC0 \n 386 PUSHENVACC1 \n 387 APPLY1 \n 388 PUSHENVACC2 \n 389 PUSHENVACC3 \n 390 APPTERM2 3 \n 392 ACC0 \n 393 PUSHENVACC1 \n 394 APPLY1 \n 395 PUSHENVACC2 \n 396 PUSHENVACC3 \n 397 APPTERM2 3 \n 399 ACC0 \n 400 PUSHENVACC1 \n 401 PUSHENVACC2 \n 402 APPTERM2 3 \n 404 ACC0 \n 405 PUSHENVACC1 \n 406 C_CALL2 caml_output_char \n 408 RETURN 1 \n 410 RESTART \n 411 GRAB 3 \n 413 CONST0 \n 414 PUSHACC3 \n 415 LTINT \n 416 BRANCHIF 427 \n 418 ACC1 \n 419 C_CALL1 ml_string_length \n 421 PUSHACC4 \n 422 PUSHACC4 \n 423 ADDINT \n 424 GTINT \n 425 BRANCHIFNOT 432 \n 427 GETGLOBAL \" really_input \" \n 429 PUSHENVACC1 \n 430 APPTERM1 5 \n 432 ACC3 \n 433 PUSHACC3 \n 434 PUSHACC3 \n 435 PUSHACC3 \n 436 PUSHENVACC2 \n 437 APPTERM 4 , 8 \n 440 RESTART \n 441 GRAB 3 \n 443 CONST0 \n 444 PUSHACC3 \n 445 LTINT \n 446 BRANCHIF 457 \n 448 ACC1 \n 449 C_CALL1 ml_string_length \n 451 PUSHACC4 \n 452 PUSHACC4 \n 453 ADDINT \n 454 \n 455 \" input \" \n 459 PUSHENVACC1 \n 460 APPTERM1 5 \n 462 ACC3 \n 463 PUSHACC3 \n 464 PUSHACC3 \n 465 PUSHACC3 \n 466 C_CALL4 caml_input \n 468 RETURN 4 \n 470 ACC0 \n 471 PUSHCONST0 \n 472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) ) \n 474 PUSHENVACC1 \n 475 APPTERM3 4 \n 477 ACC0 \n 478 PUSHCONST0 \n 479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) ) \n 481 PUSHENVACC1 \n 482 APPTERM3 4 \n 484 RESTART \n 485 GRAB 2 \n 487 ACC1 \n 488 PUSHACC1 \n 489 PUSHACC4 \n 490 C_CALL3 sys_open \n 492 C_CALL1 caml_open_descriptor \n 494 RETURN 3 \n 496 ACC0 \n 497 C_CALL1 caml_flush \n 499 ACC0 \n 500 C_CALL1 caml_close_channel \n 502 RETURN 1 \n 504 RESTART \n 505 GRAB 1 \n 507 CONST0 \n 508 PUSHACC2 \n 509 PUSHACC2 \n 510 C_CALL3 output_value \n 512 RETURN 2 \n 514 RESTART \n 515 GRAB 3 \n 517 CONST0 \n 518 PUSHACC3 \n 519 LTINT \n 520 BRANCHIF 531 \n 522 ACC1 \n 523 C_CALL1 ml_string_length \n 525 PUSHACC4 \n 526 PUSHACC4 \n 527 ADDINT \n 528 \n 529 BRANCHIFNOT 536 \n 531 GETGLOBAL \" output \" \n 533 PUSHENVACC1 \n 534 APPTERM1 5 \n 536 ACC3 \n 537 PUSHACC3 \n 538 PUSHACC3 \n 539 PUSHACC3 \n 540 C_CALL4 caml_output \n 542 RETURN 4 \n 544 RESTART \n 545 GRAB 1 \n 547 ACC1 \n 548 C_CALL1 ml_string_length \n 550 PUSHCONST0 \n 551 PUSHACC3 \n 552 PUSHACC3 \n 553 C_CALL4 caml_output \n 555 RETURN 2 \n 557 ACC0 \n 558 PUSHCONSTINT 438 \n 560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) ) \n 562 PUSHENVACC1 \n 563 APPTERM3 4 \n 565 ACC0 \n 566 PUSHCONSTINT 438 \n 568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) ) \n 570 PUSHENVACC1 \n 571 APPTERM3 4 \n 573 RESTART \n 574 GRAB 2 \n 576 ACC1 \n 577 PUSHACC1 \n 578 PUSHACC4 \n 579 C_CALL3 sys_open \n 581 C_CALL1 caml_open_descriptor \n 583 RETURN 3 \n 585 ACC0 \n 586 PUSHGETGLOBAL \" % .12 g \" \n 588 C_CALL2 format_float \n 590 RETURN 1 \n 592 ACC0 \n 593 PUSHGETGLOBAL \" % d \" \n 595 C_CALL2 format_int \n 597 RETURN 1 \n 599 \" false \" \n 601 PUSHACC1 \n 602 C_CALL2 string_equal \n 604 BRANCHIFNOT 609 \n 606 CONST0 \n 607 RETURN 1 \n 609 \" true \" \n 611 PUSHACC1 \n 612 C_CALL2 string_equal \n 614 BRANCHIFNOT 619 \n 616 CONST1 \n 617 RETURN 1 \n 619 \" bool_of_string \" \n 621 PUSHENVACC1 \n 622 APPTERM1 2 \n 624 ACC0 \n 625 BRANCHIFNOT 631 \n 627 \" true \" \n 629 RETURN 1 \n 631 \" false \" \n 633 RETURN 1 \n 635 \n 636 PUSHACC1 \n 637 LTINT \n 638 BRANCHIF 646 \n 640 \n 642 PUSHACC1 \n 643 GTINT \n 644 BRANCHIFNOT 651 \n 646 \" char_of_int \" \n 648 PUSHENVACC1 \n 649 APPTERM1 2 \n 651 ACC0 \n 652 RETURN 1 \n 654 RESTART \n 655 GRAB 1 \n 657 ACC0 \n 658 C_CALL1 ml_string_length \n 660 PUSHACC2 \n 661 C_CALL1 ml_string_length \n 663 PUSHACC0 \n 664 PUSHACC2 \n 665 ADDINT \n 666 C_CALL1 create_string \n 668 PUSHACC2 \n 669 PUSHCONST0 \n 670 PUSHACC2 \n 671 PUSHCONST0 \n 672 PUSHACC7 \n 673 C_CALL5 blit_string \n 675 ACC1 \n 676 PUSHACC3 \n 677 PUSHACC2 \n 678 PUSHCONST0 \n 679 PUSHACC 8 \n 681 C_CALL5 blit_string \n 683 ACC0 \n 684 RETURN 5 \n 686 -1 \n 688 PUSHACC1 \n 689 XORINT \n 690 RETURN 1 \n 692 \n 693 PUSHACC1 \n 694 GEINT \n 695 BRANCHIFNOT 700 \n 697 ACC0 \n 698 RETURN 1 \n 700 ACC0 \n 701 NEGINT \n 702 RETURN 1 \n 704 RESTART \n 705 GRAB 1 \n 707 ACC1 \n 708 PUSHACC1 \n 709 C_CALL2 greaterequal \n 711 BRANCHIFNOT 716 \n 713 ACC0 \n 714 RETURN 2 \n 716 ACC1 \n 717 RETURN 2 \n 719 RESTART \n 720 GRAB 1 \n 722 ACC1 \n 723 PUSHACC1 \n 724 C_CALL2 lessequal \n 726 BRANCHIFNOT 731 \n 728 ACC0 \n 729 RETURN 2 \n 731 ACC1 \n 732 RETURN 2 \n 734 ACC0 \n 735 \n 737 MAKEBLOCK2 0 \n 739 RAISE \n 740 ACC0 \n 741 PUSHGETGLOBAL Failure \n 743 MAKEBLOCK2 0 \n 745 RAISE \n 746 CLOSURE 0 , 740 \n 749 PUSH \n 750 CLOSURE 0 , 734 \n 753 PUSHGETGLOBAL \" Pervasives . Exit \" \n 755 MAKEBLOCK1 0 \n 757 PUSHGETGLOBAL \" Pervasives . Assert_failure \" \n 759 MAKEBLOCK1 0 \n 761 PUSH \n 762 CLOSURE 0 , 720 \n 765 PUSH \n 766 CLOSURE 0 , 705 \n 769 PUSH \n 770 CLOSURE 0 , 692 \n 773 PUSH \n 774 CLOSURE 0 , 686 \n 777 PUSHCONST0 \n 778 PUSHCONSTINT 31 \n 780 PUSHCONST1 \n 781 LSLINT \n 782 EQ \n 783 BRANCHIFNOT 789 \n 785 CONSTINT 30 \n 787 BRANCH 791 \n 789 CONSTINT 62 \n 791 PUSHCONST1 \n 792 LSLINT \n 793 PUSHACC0 \n 794 OFFSETINT -1 \n 796 PUSH \n 797 CLOSURE 0 , 655 \n 800 PUSHACC 9 \n 802 CLOSURE 1 , 635 \n 805 PUSH \n 806 CLOSURE 0 , 624 \n 809 PUSHACC 11 \n 811 CLOSURE 1 , 599 \n 814 PUSH \n 815 CLOSURE 0 , 592 \n 818 PUSH \n 819 CLOSURE 0 , 585 \n 822 PUSH \n 823 CLOSUREREC 0 , 12 \n 827 \n 828 C_CALL1 caml_open_descriptor \n 830 PUSHCONST1 \n 831 C_CALL1 caml_open_descriptor \n 833 PUSHCONST2 \n 834 C_CALL1 caml_open_descriptor \n 836 PUSH \n 837 CLOSURE 0 , 574 \n 840 PUSHACC0 \n 841 CLOSURE 1 , 565 \n 844 PUSHACC1 \n 845 CLOSURE 1 , 557 \n 848 PUSH \n 849 CLOSURE 0 , 545 \n 852 PUSHACC 22 \n 854 CLOSURE 1 , 515 \n 857 PUSH \n 858 CLOSURE 0 , 505 \n 861 PUSH \n 862 CLOSURE 0 , 496 \n 865 PUSH \n 866 CLOSURE 0 , 485 \n 869 PUSHACC0 \n 870 CLOSURE 1 , 477 \n 873 PUSHACC1 \n 874 CLOSURE 1 , 470 \n 877 PUSHACC 28 \n 879 CLOSURE 1 , 441 \n 882 PUSH \n 883 CLOSUREREC 0 , 32 \n 887 ACC0 \n 888 PUSHACC 31 \n 890 CLOSURE 2 , 411 \n 893 PUSHACC 22 \n 895 CLOSUREREC 1 , 70 \n 899 ACC 15 \n 901 CLOSURE 1 , 404 \n 904 PUSHACC 11 \n 906 PUSHACC 17 \n 908 CLOSURE 2 , 399 \n 911 PUSHACC 12 \n 913 PUSHACC 18 \n 915 PUSHACC 23 \n 917 CLOSURE 3 , 392 \n 920 PUSHACC 13 \n 922 PUSHACC 19 \n 924 PUSHACC 23 \n 926 CLOSURE 3 , 385 \n 929 PUSHACC 14 \n 931 PUSHACC 20 \n 933 CLOSURE 2 , 374 \n 936 PUSHACC 20 \n 938 CLOSURE 1 , 364 \n 941 PUSHACC 20 \n 943 CLOSURE 1 , 358 \n 946 PUSHACC 17 \n 948 PUSHACC 22 \n 950 CLOSURE 2 , 353 \n 953 PUSHACC 18 \n 955 PUSHACC 23 \n 957 PUSHACC 29 \n 959 CLOSURE 3 , 346 \n 962 PUSHACC 19 \n 964 PUSHACC 24 \n 966 PUSHACC 29 \n 968 CLOSURE 3 , 339 \n 971 PUSHACC 20 \n 973 PUSHACC 25 \n 975 CLOSURE 2 , 325 \n 978 PUSHACC 25 \n 980 CLOSURE 1 , 315 \n 983 PUSHACC 12 \n 985 PUSHACC 28 \n 987 PUSHACC 30 \n 989 CLOSURE 3 , 308 \n 992 PUSHACC0 \n 993 CLOSURE 1 , 301 \n 996 PUSHACC1 \n 997 CLOSURE 1 , 294 \n 1000 PUSHACC 29 \n 1002 PUSHACC 31 \n 1004 CLOSURE 2 , 286 \n 1007 MAKEBLOCK1 0 \n 1009 PUSHACC0 \n 1010 CLOSURE 1 , 275 \n 1013 PUSHACC1 \n 1014 CLOSURE 1 , 263 \n 1017 PUSHACC0 \n 1018 CLOSURE 1 , 255 \n 1021 PUSHACC1 \n 1022 PUSHACC 22 \n 1024 PUSHACC4 \n 1025 PUSHACC3 \n 1026 PUSH \n 1027 CLOSURE 0 , 247 \n 1030 PUSH \n 1031 CLOSURE 0 , 241 \n 1034 PUSH \n 1035 CLOSURE 0 , 236 \n 1038 PUSH \n 1039 CLOSURE 0 , 231 \n 1042 PUSH \n 1043 CLOSURE 0 , 223 \n 1046 PUSH \n 1047 CLOSURE 0 , 217 \n 1050 PUSH \n 1051 CLOSURE 0 , 212 \n 1054 PUSH \n 1055 CLOSURE 0 , 207 \n 1058 PUSHACC 32 \n 1060 PUSHACC 35 \n 1062 PUSHACC 33 \n 1064 PUSH \n 1065 CLOSURE 0 , 202 \n 1068 PUSHACC 41 \n 1070 PUSHACC 40 \n 1072 PUSHACC 42 \n 1074 PUSH \n 1075 CLOSURE 0 , 194 \n 1078 PUSHACC 46 \n 1080 PUSH \n 1081 CLOSURE 0 , 188 \n 1084 PUSH \n 1085 CLOSURE 0 , 183 \n 1088 PUSH \n 1089 CLOSURE 0 , 175 \n 1092 PUSHACC 51 \n 1094 PUSH \n 1095 CLOSURE 0 , 166 \n 1098 PUSH \n 1099 CLOSURE 0 , 157 \n 1102 PUSHACC 55 \n 1104 PUSHACC 57 \n 1106 PUSH \n 1107 CLOSURE 0 , 148 \n 1110 PUSH \n 1111 CLOSURE 0 , 142 \n 1114 PUSHACC 63 \n 1116 PUSHACC 62 \n 1118 PUSHACC 64 \n 1120 PUSHACC 38 \n 1122 PUSHACC 40 \n 1124 PUSHACC 42 \n 1126 PUSHACC 44 \n 1128 PUSHACC 46 \n 1130 PUSHACC 48 \n 1132 PUSHACC 50 \n 1134 PUSHACC 52 \n 1136 PUSHACC 54 \n 1138 PUSHACC 56 \n 1140 PUSHACC 58 \n 1142 PUSHACC 60 \n 1144 PUSHACC 62 \n 1146 PUSHACC 64 \n 1148 PUSHACC 66 \n 1150 PUSHACC 82 \n 1152 PUSHACC 84 \n 1154 PUSHACC 86 \n 1156 PUSHACC 88 \n 1158 PUSHACC 90 \n 1160 PUSHACC 92 \n 1162 PUSHACC 94 \n 1164 PUSHACC 96 \n 1166 PUSHACC 98 \n 1168 PUSHACC 100 \n 1170 PUSHACC 104 \n 1172 PUSHACC 104 \n 1174 PUSHACC 104 \n 1176 PUSHACC 108 \n 1178 PUSHACC 110 \n 1180 PUSHACC 112 \n 1182 PUSHACC 117 \n 1184 PUSHACC 117 \n 1186 PUSHACC 117 \n 1188 PUSHACC 117 \n 1190 MAKEBLOCK 69 , 0 \n 1193 POP 53 \n 1195 SETGLOBAL Pervasives \n 1197 CONST0 \n 1198 C_CALL1 gc_compaction \n 1200 CONST0 \n 1201 PUSHGETGLOBALFIELD Pervasives , 68 \n 1204 APPLY1 \n 1205 ATOM0 \n 1206 SETGLOBAL T330 - compact-2 \n 1208 STOP \n *\n 0 CONSTINT 42\n 2 PUSHACC0\n 3 MAKEBLOCK1 0\n 5 POP 1\n 7 SETGLOBAL Lib\n 9 BRANCH 746\n 11 RESTART\n 12 GRAB 1\n 14 ACC0\n 15 BRANCHIFNOT 28\n 17 ACC1\n 18 PUSHACC1\n 19 GETFIELD1\n 20 PUSHOFFSETCLOSURE0\n 21 APPLY2\n 22 PUSHACC1\n 23 GETFIELD0\n 24 MAKEBLOCK2 0\n 26 RETURN 2\n 28 ACC1\n 29 RETURN 2\n 31 RESTART\n 32 GRAB 3\n 34 CONST0\n 35 PUSHACC4\n 36 LEINT\n 37 BRANCHIFNOT 42\n 39 CONST0\n 40 RETURN 4\n 42 ACC3\n 43 PUSHACC3\n 44 PUSHACC3\n 45 PUSHACC3\n 46 C_CALL4 caml_input\n 48 PUSHCONST0\n 49 PUSHACC1\n 50 EQ\n 51 BRANCHIFNOT 58\n 53 GETGLOBAL End_of_file\n 55 MAKEBLOCK1 0\n 57 RAISE\n 58 ACC0\n 59 PUSHACC5\n 60 SUBINT\n 61 PUSHACC1\n 62 PUSHACC5\n 63 ADDINT\n 64 PUSHACC4\n 65 PUSHACC4\n 66 PUSHOFFSETCLOSURE0\n 67 APPTERM 4, 9\n 70 ACC0\n 71 C_CALL1 caml_input_scan_line\n 73 PUSHCONST0\n 74 PUSHACC1\n 75 EQ\n 76 BRANCHIFNOT 83\n 78 GETGLOBAL End_of_file\n 80 MAKEBLOCK1 0\n 82 RAISE\n 83 CONST0\n 84 PUSHACC1\n 85 GTINT\n 86 BRANCHIFNOT 107\n 88 ACC0\n 89 OFFSETINT -1\n 91 C_CALL1 create_string\n 93 PUSHACC1\n 94 OFFSETINT -1\n 96 PUSHCONST0\n 97 PUSHACC2\n 98 PUSHACC5\n 99 C_CALL4 caml_input\n 101 ACC2\n 102 C_CALL1 caml_input_char\n 104 ACC0\n 105 RETURN 3\n 107 ACC0\n 108 NEGINT\n 109 C_CALL1 create_string\n 111 PUSHACC1\n 112 NEGINT\n 113 PUSHCONST0\n 114 PUSHACC2\n 115 PUSHACC5\n 116 C_CALL4 caml_input\n 118 CONST0\n 119 PUSHTRAP 130\n 121 ACC6\n 122 PUSHOFFSETCLOSURE0\n 123 APPLY1\n 124 PUSHACC5\n 125 PUSHENVACC1\n 126 APPLY2\n 127 POPTRAP\n 128 RETURN 3\n 130 PUSHGETGLOBAL End_of_file\n 132 PUSHACC1\n 133 GETFIELD0\n 134 EQ\n 135 BRANCHIFNOT 140\n 137 ACC1\n 138 RETURN 4\n 140 ACC0\n 141 RAISE\n 142 ACC0\n 143 C_CALL1 caml_flush\n 145 RETURN 1\n 147 RESTART\n 148 GRAB 1\n 150 ACC1\n 151 PUSHACC1\n 152 C_CALL2 caml_output_char\n 154 RETURN 2\n 156 RESTART\n 157 GRAB 1\n 159 ACC1\n 160 PUSHACC1\n 161 C_CALL2 caml_output_char\n 163 RETURN 2\n 165 RESTART\n 166 GRAB 1\n 168 ACC1\n 169 PUSHACC1\n 170 C_CALL2 caml_output_int\n 172 RETURN 2\n 174 RESTART\n 175 GRAB 1\n 177 ACC1\n 178 PUSHACC1\n 179 C_CALL2 caml_seek_out\n 181 RETURN 2\n 183 ACC0\n 184 C_CALL1 caml_pos_out\n 186 RETURN 1\n 188 ACC0\n 189 C_CALL1 caml_channel_size\n 191 RETURN 1\n 193 RESTART\n 194 GRAB 1\n 196 ACC1\n 197 PUSHACC1\n 198 C_CALL2 caml_set_binary_mode\n 200 RETURN 2\n 202 ACC0\n 203 C_CALL1 caml_input_char\n 205 RETURN 1\n 207 ACC0\n 208 C_CALL1 caml_input_char\n 210 RETURN 1\n 212 ACC0\n 213 C_CALL1 caml_input_int\n 215 RETURN 1\n 217 ACC0\n 218 C_CALL1 input_value\n 220 RETURN 1\n 222 RESTART\n 223 GRAB 1\n 225 ACC1\n 226 PUSHACC1\n 227 C_CALL2 caml_seek_in\n 229 RETURN 2\n 231 ACC0\n 232 C_CALL1 caml_pos_in\n 234 RETURN 1\n 236 ACC0\n 237 C_CALL1 caml_channel_size\n 239 RETURN 1\n 241 ACC0\n 242 C_CALL1 caml_close_channel\n 244 RETURN 1\n 246 RESTART\n 247 GRAB 1\n 249 ACC1\n 250 PUSHACC1\n 251 C_CALL2 caml_set_binary_mode\n 253 RETURN 2\n 255 CONST0\n 256 PUSHENVACC1\n 257 APPLY1\n 258 ACC0\n 259 C_CALL1 sys_exit\n 261 RETURN 1\n 263 CONST0\n 264 PUSHENVACC1\n 265 GETFIELD0\n 266 APPTERM1 2\n 268 CONST0\n 269 PUSHENVACC1\n 270 APPLY1\n 271 CONST0\n 272 PUSHENVACC2\n 273 APPTERM1 2\n 275 ENVACC1\n 276 GETFIELD0\n 277 PUSHACC0\n 278 PUSHACC2\n 279 CLOSURE 2, 268\n 282 PUSHENVACC1\n 283 SETFIELD0\n 284 RETURN 2\n 286 ENVACC1\n 287 C_CALL1 caml_flush\n 289 ENVACC2\n 290 C_CALL1 caml_flush\n 292 RETURN 1\n 294 CONST0\n 295 PUSHENVACC1\n 296 APPLY1\n 297 C_CALL1 float_of_string\n 299 RETURN 1\n 301 CONST0\n 302 PUSHENVACC1\n 303 APPLY1\n 304 C_CALL1 int_of_string\n 306 RETURN 1\n 308 ENVACC2\n 309 C_CALL1 caml_flush\n 311 ENVACC1\n 312 PUSHENVACC3\n 313 APPTERM1 2\n 315 CONSTINT 13\n 317 PUSHENVACC1\n 318 C_CALL2 caml_output_char\n 320 ENVACC1\n 321 C_CALL1 caml_flush\n 323 RETURN 1\n 325 ACC0\n 326 PUSHENVACC1\n 327 PUSHENVACC2\n 328 APPLY2\n 329 CONSTINT 13\n 331 PUSHENVACC1\n 332 C_CALL2 caml_output_char\n 334 ENVACC1\n 335 C_CALL1 caml_flush\n 337 RETURN 1\n 339 ACC0\n 340 PUSHENVACC1\n 341 APPLY1\n 342 PUSHENVACC2\n 343 PUSHENVACC3\n 344 APPTERM2 3\n 346 ACC0\n 347 PUSHENVACC1\n 348 APPLY1\n 349 PUSHENVACC2\n 350 PUSHENVACC3\n 351 APPTERM2 3\n 353 ACC0\n 354 PUSHENVACC1\n 355 PUSHENVACC2\n 356 APPTERM2 3\n 358 ACC0\n 359 PUSHENVACC1\n 360 C_CALL2 caml_output_char\n 362 RETURN 1\n 364 CONSTINT 13\n 366 PUSHENVACC1\n 367 C_CALL2 caml_output_char\n 369 ENVACC1\n 370 C_CALL1 caml_flush\n 372 RETURN 1\n 374 ACC0\n 375 PUSHENVACC1\n 376 PUSHENVACC2\n 377 APPLY2\n 378 CONSTINT 13\n 380 PUSHENVACC1\n 381 C_CALL2 caml_output_char\n 383 RETURN 1\n 385 ACC0\n 386 PUSHENVACC1\n 387 APPLY1\n 388 PUSHENVACC2\n 389 PUSHENVACC3\n 390 APPTERM2 3\n 392 ACC0\n 393 PUSHENVACC1\n 394 APPLY1\n 395 PUSHENVACC2\n 396 PUSHENVACC3\n 397 APPTERM2 3\n 399 ACC0\n 400 PUSHENVACC1\n 401 PUSHENVACC2\n 402 APPTERM2 3\n 404 ACC0\n 405 PUSHENVACC1\n 406 C_CALL2 caml_output_char\n 408 RETURN 1\n 410 RESTART\n 411 GRAB 3\n 413 CONST0\n 414 PUSHACC3\n 415 LTINT\n 416 BRANCHIF 427\n 418 ACC1\n 419 C_CALL1 ml_string_length\n 421 PUSHACC4\n 422 PUSHACC4\n 423 ADDINT\n 424 GTINT\n 425 BRANCHIFNOT 432\n 427 GETGLOBAL \"really_input\"\n 429 PUSHENVACC1\n 430 APPTERM1 5\n 432 ACC3\n 433 PUSHACC3\n 434 PUSHACC3\n 435 PUSHACC3\n 436 PUSHENVACC2\n 437 APPTERM 4, 8\n 440 RESTART\n 441 GRAB 3\n 443 CONST0\n 444 PUSHACC3\n 445 LTINT\n 446 BRANCHIF 457\n 448 ACC1\n 449 C_CALL1 ml_string_length\n 451 PUSHACC4\n 452 PUSHACC4\n 453 ADDINT\n 454 GTINT\n 455 BRANCHIFNOT 462\n 457 GETGLOBAL \"input\"\n 459 PUSHENVACC1\n 460 APPTERM1 5\n 462 ACC3\n 463 PUSHACC3\n 464 PUSHACC3\n 465 PUSHACC3\n 466 C_CALL4 caml_input\n 468 RETURN 4\n 470 ACC0\n 471 PUSHCONST0\n 472 PUSHGETGLOBAL <0>(0, <0>(6, 0))\n 474 PUSHENVACC1\n 475 APPTERM3 4\n 477 ACC0\n 478 PUSHCONST0\n 479 PUSHGETGLOBAL <0>(0, <0>(7, 0))\n 481 PUSHENVACC1\n 482 APPTERM3 4\n 484 RESTART\n 485 GRAB 2\n 487 ACC1\n 488 PUSHACC1\n 489 PUSHACC4\n 490 C_CALL3 sys_open\n 492 C_CALL1 caml_open_descriptor\n 494 RETURN 3\n 496 ACC0\n 497 C_CALL1 caml_flush\n 499 ACC0\n 500 C_CALL1 caml_close_channel\n 502 RETURN 1\n 504 RESTART\n 505 GRAB 1\n 507 CONST0\n 508 PUSHACC2\n 509 PUSHACC2\n 510 C_CALL3 output_value\n 512 RETURN 2\n 514 RESTART\n 515 GRAB 3\n 517 CONST0\n 518 PUSHACC3\n 519 LTINT\n 520 BRANCHIF 531\n 522 ACC1\n 523 C_CALL1 ml_string_length\n 525 PUSHACC4\n 526 PUSHACC4\n 527 ADDINT\n 528 GTINT\n 529 BRANCHIFNOT 536\n 531 GETGLOBAL \"output\"\n 533 PUSHENVACC1\n 534 APPTERM1 5\n 536 ACC3\n 537 PUSHACC3\n 538 PUSHACC3\n 539 PUSHACC3\n 540 C_CALL4 caml_output\n 542 RETURN 4\n 544 RESTART\n 545 GRAB 1\n 547 ACC1\n 548 C_CALL1 ml_string_length\n 550 PUSHCONST0\n 551 PUSHACC3\n 552 PUSHACC3\n 553 C_CALL4 caml_output\n 555 RETURN 2\n 557 ACC0\n 558 PUSHCONSTINT 438\n 560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))\n 562 PUSHENVACC1\n 563 APPTERM3 4\n 565 ACC0\n 566 PUSHCONSTINT 438\n 568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))\n 570 PUSHENVACC1\n 571 APPTERM3 4\n 573 RESTART\n 574 GRAB 2\n 576 ACC1\n 577 PUSHACC1\n 578 PUSHACC4\n 579 C_CALL3 sys_open\n 581 C_CALL1 caml_open_descriptor\n 583 RETURN 3\n 585 ACC0\n 586 PUSHGETGLOBAL \"%.12g\"\n 588 C_CALL2 format_float\n 590 RETURN 1\n 592 ACC0\n 593 PUSHGETGLOBAL \"%d\"\n 595 C_CALL2 format_int\n 597 RETURN 1\n 599 GETGLOBAL \"false\"\n 601 PUSHACC1\n 602 C_CALL2 string_equal\n 604 BRANCHIFNOT 609\n 606 CONST0\n 607 RETURN 1\n 609 GETGLOBAL \"true\"\n 611 PUSHACC1\n 612 C_CALL2 string_equal\n 614 BRANCHIFNOT 619\n 616 CONST1\n 617 RETURN 1\n 619 GETGLOBAL \"bool_of_string\"\n 621 PUSHENVACC1\n 622 APPTERM1 2\n 624 ACC0\n 625 BRANCHIFNOT 631\n 627 GETGLOBAL \"true\"\n 629 RETURN 1\n 631 GETGLOBAL \"false\"\n 633 RETURN 1\n 635 CONST0\n 636 PUSHACC1\n 637 LTINT\n 638 BRANCHIF 646\n 640 CONSTINT 255\n 642 PUSHACC1\n 643 GTINT\n 644 BRANCHIFNOT 651\n 646 GETGLOBAL \"char_of_int\"\n 648 PUSHENVACC1\n 649 APPTERM1 2\n 651 ACC0\n 652 RETURN 1\n 654 RESTART\n 655 GRAB 1\n 657 ACC0\n 658 C_CALL1 ml_string_length\n 660 PUSHACC2\n 661 C_CALL1 ml_string_length\n 663 PUSHACC0\n 664 PUSHACC2\n 665 ADDINT\n 666 C_CALL1 create_string\n 668 PUSHACC2\n 669 PUSHCONST0\n 670 PUSHACC2\n 671 PUSHCONST0\n 672 PUSHACC7\n 673 C_CALL5 blit_string\n 675 ACC1\n 676 PUSHACC3\n 677 PUSHACC2\n 678 PUSHCONST0\n 679 PUSHACC 8\n 681 C_CALL5 blit_string\n 683 ACC0\n 684 RETURN 5\n 686 CONSTINT -1\n 688 PUSHACC1\n 689 XORINT\n 690 RETURN 1\n 692 CONST0\n 693 PUSHACC1\n 694 GEINT\n 695 BRANCHIFNOT 700\n 697 ACC0\n 698 RETURN 1\n 700 ACC0\n 701 NEGINT\n 702 RETURN 1\n 704 RESTART\n 705 GRAB 1\n 707 ACC1\n 708 PUSHACC1\n 709 C_CALL2 greaterequal\n 711 BRANCHIFNOT 716\n 713 ACC0\n 714 RETURN 2\n 716 ACC1\n 717 RETURN 2\n 719 RESTART\n 720 GRAB 1\n 722 ACC1\n 723 PUSHACC1\n 724 C_CALL2 lessequal\n 726 BRANCHIFNOT 731\n 728 ACC0\n 729 RETURN 2\n 731 ACC1\n 732 RETURN 2\n 734 ACC0\n 735 PUSHGETGLOBAL Invalid_argument\n 737 MAKEBLOCK2 0\n 739 RAISE\n 740 ACC0\n 741 PUSHGETGLOBAL Failure\n 743 MAKEBLOCK2 0\n 745 RAISE\n 746 CLOSURE 0, 740\n 749 PUSH\n 750 CLOSURE 0, 734\n 753 PUSHGETGLOBAL \"Pervasives.Exit\"\n 755 MAKEBLOCK1 0\n 757 PUSHGETGLOBAL \"Pervasives.Assert_failure\"\n 759 MAKEBLOCK1 0\n 761 PUSH\n 762 CLOSURE 0, 720\n 765 PUSH\n 766 CLOSURE 0, 705\n 769 PUSH\n 770 CLOSURE 0, 692\n 773 PUSH\n 774 CLOSURE 0, 686\n 777 PUSHCONST0\n 778 PUSHCONSTINT 31\n 780 PUSHCONST1\n 781 LSLINT\n 782 EQ\n 783 BRANCHIFNOT 789\n 785 CONSTINT 30\n 787 BRANCH 791\n 789 CONSTINT 62\n 791 PUSHCONST1\n 792 LSLINT\n 793 PUSHACC0\n 794 OFFSETINT -1\n 796 PUSH\n 797 CLOSURE 0, 655\n 800 PUSHACC 9\n 802 CLOSURE 1, 635\n 805 PUSH\n 806 CLOSURE 0, 624\n 809 PUSHACC 11\n 811 CLOSURE 1, 599\n 814 PUSH\n 815 CLOSURE 0, 592\n 818 PUSH\n 819 CLOSURE 0, 585\n 822 PUSH\n 823 CLOSUREREC 0, 12\n 827 CONST0\n 828 C_CALL1 caml_open_descriptor\n 830 PUSHCONST1\n 831 C_CALL1 caml_open_descriptor\n 833 PUSHCONST2\n 834 C_CALL1 caml_open_descriptor\n 836 PUSH\n 837 CLOSURE 0, 574\n 840 PUSHACC0\n 841 CLOSURE 1, 565\n 844 PUSHACC1\n 845 CLOSURE 1, 557\n 848 PUSH\n 849 CLOSURE 0, 545\n 852 PUSHACC 22\n 854 CLOSURE 1, 515\n 857 PUSH\n 858 CLOSURE 0, 505\n 861 PUSH\n 862 CLOSURE 0, 496\n 865 PUSH\n 866 CLOSURE 0, 485\n 869 PUSHACC0\n 870 CLOSURE 1, 477\n 873 PUSHACC1\n 874 CLOSURE 1, 470\n 877 PUSHACC 28\n 879 CLOSURE 1, 441\n 882 PUSH\n 883 CLOSUREREC 0, 32\n 887 ACC0\n 888 PUSHACC 31\n 890 CLOSURE 2, 411\n 893 PUSHACC 22\n 895 CLOSUREREC 1, 70\n 899 ACC 15\n 901 CLOSURE 1, 404\n 904 PUSHACC 11\n 906 PUSHACC 17\n 908 CLOSURE 2, 399\n 911 PUSHACC 12\n 913 PUSHACC 18\n 915 PUSHACC 23\n 917 CLOSURE 3, 392\n 920 PUSHACC 13\n 922 PUSHACC 19\n 924 PUSHACC 23\n 926 CLOSURE 3, 385\n 929 PUSHACC 14\n 931 PUSHACC 20\n 933 CLOSURE 2, 374\n 936 PUSHACC 20\n 938 CLOSURE 1, 364\n 941 PUSHACC 20\n 943 CLOSURE 1, 358\n 946 PUSHACC 17\n 948 PUSHACC 22\n 950 CLOSURE 2, 353\n 953 PUSHACC 18\n 955 PUSHACC 23\n 957 PUSHACC 29\n 959 CLOSURE 3, 346\n 962 PUSHACC 19\n 964 PUSHACC 24\n 966 PUSHACC 29\n 968 CLOSURE 3, 339\n 971 PUSHACC 20\n 973 PUSHACC 25\n 975 CLOSURE 2, 325\n 978 PUSHACC 25\n 980 CLOSURE 1, 315\n 983 PUSHACC 12\n 985 PUSHACC 28\n 987 PUSHACC 30\n 989 CLOSURE 3, 308\n 992 PUSHACC0\n 993 CLOSURE 1, 301\n 996 PUSHACC1\n 997 CLOSURE 1, 294\n 1000 PUSHACC 29\n 1002 PUSHACC 31\n 1004 CLOSURE 2, 286\n 1007 MAKEBLOCK1 0\n 1009 PUSHACC0\n 1010 CLOSURE 1, 275\n 1013 PUSHACC1\n 1014 CLOSURE 1, 263\n 1017 PUSHACC0\n 1018 CLOSURE 1, 255\n 1021 PUSHACC1\n 1022 PUSHACC 22\n 1024 PUSHACC4\n 1025 PUSHACC3\n 1026 PUSH\n 1027 CLOSURE 0, 247\n 1030 PUSH\n 1031 CLOSURE 0, 241\n 1034 PUSH\n 1035 CLOSURE 0, 236\n 1038 PUSH\n 1039 CLOSURE 0, 231\n 1042 PUSH\n 1043 CLOSURE 0, 223\n 1046 PUSH\n 1047 CLOSURE 0, 217\n 1050 PUSH\n 1051 CLOSURE 0, 212\n 1054 PUSH\n 1055 CLOSURE 0, 207\n 1058 PUSHACC 32\n 1060 PUSHACC 35\n 1062 PUSHACC 33\n 1064 PUSH\n 1065 CLOSURE 0, 202\n 1068 PUSHACC 41\n 1070 PUSHACC 40\n 1072 PUSHACC 42\n 1074 PUSH\n 1075 CLOSURE 0, 194\n 1078 PUSHACC 46\n 1080 PUSH\n 1081 CLOSURE 0, 188\n 1084 PUSH\n 1085 CLOSURE 0, 183\n 1088 PUSH\n 1089 CLOSURE 0, 175\n 1092 PUSHACC 51\n 1094 PUSH\n 1095 CLOSURE 0, 166\n 1098 PUSH\n 1099 CLOSURE 0, 157\n 1102 PUSHACC 55\n 1104 PUSHACC 57\n 1106 PUSH\n 1107 CLOSURE 0, 148\n 1110 PUSH\n 1111 CLOSURE 0, 142\n 1114 PUSHACC 63\n 1116 PUSHACC 62\n 1118 PUSHACC 64\n 1120 PUSHACC 38\n 1122 PUSHACC 40\n 1124 PUSHACC 42\n 1126 PUSHACC 44\n 1128 PUSHACC 46\n 1130 PUSHACC 48\n 1132 PUSHACC 50\n 1134 PUSHACC 52\n 1136 PUSHACC 54\n 1138 PUSHACC 56\n 1140 PUSHACC 58\n 1142 PUSHACC 60\n 1144 PUSHACC 62\n 1146 PUSHACC 64\n 1148 PUSHACC 66\n 1150 PUSHACC 82\n 1152 PUSHACC 84\n 1154 PUSHACC 86\n 1156 PUSHACC 88\n 1158 PUSHACC 90\n 1160 PUSHACC 92\n 1162 PUSHACC 94\n 1164 PUSHACC 96\n 1166 PUSHACC 98\n 1168 PUSHACC 100\n 1170 PUSHACC 104\n 1172 PUSHACC 104\n 1174 PUSHACC 104\n 1176 PUSHACC 108\n 1178 PUSHACC 110\n 1180 PUSHACC 112\n 1182 PUSHACC 117\n 1184 PUSHACC 117\n 1186 PUSHACC 117\n 1188 PUSHACC 117\n 1190 MAKEBLOCK 69, 0\n 1193 POP 53\n 1195 SETGLOBAL Pervasives\n 1197 CONST0\n 1198 C_CALL1 gc_compaction\n 1200 CONST0\n 1201 PUSHGETGLOBALFIELD Pervasives, 68\n 1204 APPLY1\n 1205 ATOM0\n 1206 SETGLOBAL T330-compact-2\n 1208 STOP\n**)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/tool-ocaml/t330-compact-2.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":" TEST \n include tool - ocaml - lib \n flags = \" -w -a \" \n ocaml_script_as_argument = \" true \" \n * setup - ocaml - build - env \n * * \ninclude tool-ocaml-lib\nflags = \"-w -a\"\nocaml_script_as_argument = \"true\"\n* setup-ocaml-build-env\n** ocaml\n*)\n\nopen Lib;;\nGc.compact ();;\nlet _ = Pervasives.do_at_exit();;\n\n* \n 0 CONSTINT 42 \n 2 PUSHACC0 \n 3 MAKEBLOCK1 0 \n 5 POP 1 \n 7 \n 9 BRANCH 746 \n 11 RESTART \n 12 GRAB 1 \n 14 ACC0 \n 15 BRANCHIFNOT 28 \n 17 ACC1 \n 18 PUSHACC1 \n 19 GETFIELD1 \n 20 PUSHOFFSETCLOSURE0 \n 21 APPLY2 \n 22 PUSHACC1 \n 23 GETFIELD0 \n 24 MAKEBLOCK2 0 \n 26 RETURN 2 \n 28 ACC1 \n 29 RETURN 2 \n 31 RESTART \n 32 GRAB 3 \n 34 CONST0 \n 35 PUSHACC4 \n 36 LEINT \n 37 BRANCHIFNOT 42 \n 39 \n 40 RETURN 4 \n 42 ACC3 \n 43 PUSHACC3 \n 44 PUSHACC3 \n 45 PUSHACC3 \n 46 C_CALL4 caml_input \n 48 PUSHCONST0 \n 49 PUSHACC1 \n 50 EQ \n 51 BRANCHIFNOT 58 \n 53 End_of_file \n 55 MAKEBLOCK1 0 \n 57 RAISE \n 58 ACC0 \n 59 PUSHACC5 \n 60 SUBINT \n 61 PUSHACC1 \n 62 PUSHACC5 \n 63 ADDINT \n 64 PUSHACC4 \n 65 PUSHACC4 \n 66 PUSHOFFSETCLOSURE0 \n 67 APPTERM 4 , 9 \n 70 ACC0 \n 71 C_CALL1 caml_input_scan_line \n 73 PUSHCONST0 \n 74 PUSHACC1 \n 75 EQ \n 76 BRANCHIFNOT 83 \n 78 End_of_file \n 80 MAKEBLOCK1 0 \n 82 RAISE \n 83 \n 84 PUSHACC1 \n 85 \n 86 BRANCHIFNOT 107 \n 88 ACC0 \n 89 OFFSETINT -1 \n 91 C_CALL1 create_string \n 93 PUSHACC1 \n 94 OFFSETINT -1 \n 96 PUSHCONST0 \n 97 PUSHACC2 \n 98 PUSHACC5 \n 99 C_CALL4 caml_input \n 101 ACC2 \n 102 C_CALL1 caml_input_char \n 104 ACC0 \n 105 RETURN 3 \n 107 ACC0 \n 108 NEGINT \n 109 C_CALL1 create_string \n 111 PUSHACC1 \n 112 NEGINT \n 113 PUSHCONST0 \n 114 PUSHACC2 \n 115 PUSHACC5 \n 116 C_CALL4 caml_input \n 118 \n 119 PUSHTRAP 130 \n 121 ACC6 \n 122 PUSHOFFSETCLOSURE0 \n 123 APPLY1 \n 124 PUSHACC5 \n 125 PUSHENVACC1 \n 126 APPLY2 \n 127 POPTRAP \n 128 RETURN 3 \n 130 PUSHGETGLOBAL End_of_file \n 132 PUSHACC1 \n 133 GETFIELD0 \n 134 EQ \n 135 BRANCHIFNOT 140 \n 137 ACC1 \n 138 RETURN 4 \n 140 ACC0 \n 141 RAISE \n 142 ACC0 \n 143 C_CALL1 caml_flush \n 145 RETURN 1 \n 147 RESTART \n 148 GRAB 1 \n 150 ACC1 \n 151 PUSHACC1 \n 152 C_CALL2 caml_output_char \n 154 RETURN 2 \n 156 RESTART \n 157 GRAB 1 \n 159 ACC1 \n 160 PUSHACC1 \n 161 C_CALL2 caml_output_char \n 163 RETURN 2 \n 165 RESTART \n 166 GRAB 1 \n 168 ACC1 \n 169 PUSHACC1 \n 170 C_CALL2 caml_output_int \n 172 RETURN 2 \n 174 RESTART \n 175 GRAB 1 \n 177 ACC1 \n 178 PUSHACC1 \n 179 C_CALL2 caml_seek_out \n 181 RETURN 2 \n 183 ACC0 \n 184 C_CALL1 caml_pos_out \n 186 RETURN 1 \n 188 ACC0 \n 189 C_CALL1 caml_channel_size \n 191 RETURN 1 \n 193 RESTART \n 194 GRAB 1 \n 196 ACC1 \n 197 PUSHACC1 \n 198 C_CALL2 caml_set_binary_mode \n 200 RETURN 2 \n 202 ACC0 \n 203 C_CALL1 caml_input_char \n 205 RETURN 1 \n 207 ACC0 \n 208 C_CALL1 caml_input_char \n 210 RETURN 1 \n 212 ACC0 \n 213 C_CALL1 caml_input_int \n 215 RETURN 1 \n 217 ACC0 \n 218 C_CALL1 input_value \n 220 RETURN 1 \n 222 RESTART \n 223 GRAB 1 \n 225 ACC1 \n 226 PUSHACC1 \n 227 C_CALL2 caml_seek_in \n 229 RETURN 2 \n 231 ACC0 \n 232 C_CALL1 caml_pos_in \n 234 RETURN 1 \n 236 ACC0 \n 237 C_CALL1 caml_channel_size \n 239 RETURN 1 \n 241 ACC0 \n 242 C_CALL1 caml_close_channel \n 244 RETURN 1 \n 246 RESTART \n 247 GRAB 1 \n 249 ACC1 \n 250 PUSHACC1 \n 251 C_CALL2 caml_set_binary_mode \n 253 RETURN 2 \n 255 CONST0 \n 256 PUSHENVACC1 \n 257 APPLY1 \n 258 ACC0 \n 259 C_CALL1 sys_exit \n 261 RETURN 1 \n 263 CONST0 \n 264 PUSHENVACC1 \n 265 GETFIELD0 \n 266 APPTERM1 2 \n 268 CONST0 \n 269 PUSHENVACC1 \n 270 APPLY1 \n 271 CONST0 \n 272 PUSHENVACC2 \n 273 APPTERM1 2 \n 275 ENVACC1 \n 276 GETFIELD0 \n 277 PUSHACC0 \n 278 PUSHACC2 \n 279 CLOSURE 2 , 268 \n 282 PUSHENVACC1 \n 283 SETFIELD0 \n 284 RETURN 2 \n 286 ENVACC1 \n 287 C_CALL1 caml_flush \n 289 ENVACC2 \n 290 C_CALL1 caml_flush \n 292 RETURN 1 \n 294 CONST0 \n 295 PUSHENVACC1 \n 296 APPLY1 \n 297 C_CALL1 float_of_string \n 299 RETURN 1 \n 301 CONST0 \n 302 PUSHENVACC1 \n 303 APPLY1 \n 304 C_CALL1 int_of_string \n 306 RETURN 1 \n 308 ENVACC2 \n 309 C_CALL1 caml_flush \n 311 ENVACC1 \n 312 PUSHENVACC3 \n 313 APPTERM1 2 \n 315 CONSTINT 13 \n 317 PUSHENVACC1 \n 318 C_CALL2 caml_output_char \n 320 ENVACC1 \n 321 C_CALL1 caml_flush \n 323 RETURN 1 \n 325 ACC0 \n 326 PUSHENVACC1 \n 327 PUSHENVACC2 \n 328 APPLY2 \n 329 CONSTINT 13 \n 331 PUSHENVACC1 \n 332 C_CALL2 caml_output_char \n 334 ENVACC1 \n 335 C_CALL1 caml_flush \n 337 RETURN 1 \n 339 ACC0 \n 340 PUSHENVACC1 \n 341 APPLY1 \n 342 PUSHENVACC2 \n 343 PUSHENVACC3 \n 344 APPTERM2 3 \n 346 ACC0 \n 347 PUSHENVACC1 \n 348 APPLY1 \n 349 PUSHENVACC2 \n 350 PUSHENVACC3 \n 351 APPTERM2 3 \n 353 ACC0 \n 354 PUSHENVACC1 \n 355 PUSHENVACC2 \n 356 APPTERM2 3 \n 358 ACC0 \n 359 PUSHENVACC1 \n 360 C_CALL2 caml_output_char \n 362 RETURN 1 \n 364 CONSTINT 13 \n 366 PUSHENVACC1 \n 367 C_CALL2 caml_output_char \n 369 ENVACC1 \n 370 C_CALL1 caml_flush \n 372 RETURN 1 \n 374 ACC0 \n 375 PUSHENVACC1 \n 376 PUSHENVACC2 \n 377 APPLY2 \n 378 CONSTINT 13 \n 380 PUSHENVACC1 \n 381 C_CALL2 caml_output_char \n 383 RETURN 1 \n 385 ACC0 \n 386 PUSHENVACC1 \n 387 APPLY1 \n 388 PUSHENVACC2 \n 389 PUSHENVACC3 \n 390 APPTERM2 3 \n 392 ACC0 \n 393 PUSHENVACC1 \n 394 APPLY1 \n 395 PUSHENVACC2 \n 396 PUSHENVACC3 \n 397 APPTERM2 3 \n 399 ACC0 \n 400 PUSHENVACC1 \n 401 PUSHENVACC2 \n 402 APPTERM2 3 \n 404 ACC0 \n 405 PUSHENVACC1 \n 406 C_CALL2 caml_output_char \n 408 RETURN 1 \n 410 RESTART \n 411 GRAB 3 \n 413 CONST0 \n 414 PUSHACC3 \n 415 LTINT \n 416 BRANCHIF 427 \n 418 ACC1 \n 419 C_CALL1 ml_string_length \n 421 PUSHACC4 \n 422 PUSHACC4 \n 423 ADDINT \n 424 GTINT \n 425 BRANCHIFNOT 432 \n 427 GETGLOBAL \" really_input \" \n 429 PUSHENVACC1 \n 430 APPTERM1 5 \n 432 ACC3 \n 433 PUSHACC3 \n 434 PUSHACC3 \n 435 PUSHACC3 \n 436 PUSHENVACC2 \n 437 APPTERM 4 , 8 \n 440 RESTART \n 441 GRAB 3 \n 443 CONST0 \n 444 PUSHACC3 \n 445 LTINT \n 446 BRANCHIF 457 \n 448 ACC1 \n 449 C_CALL1 ml_string_length \n 451 PUSHACC4 \n 452 PUSHACC4 \n 453 ADDINT \n 454 \n 455 \" input \" \n 459 PUSHENVACC1 \n 460 APPTERM1 5 \n 462 ACC3 \n 463 PUSHACC3 \n 464 PUSHACC3 \n 465 PUSHACC3 \n 466 C_CALL4 caml_input \n 468 RETURN 4 \n 470 ACC0 \n 471 PUSHCONST0 \n 472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) ) \n 474 PUSHENVACC1 \n 475 APPTERM3 4 \n 477 ACC0 \n 478 PUSHCONST0 \n 479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) ) \n 481 PUSHENVACC1 \n 482 APPTERM3 4 \n 484 RESTART \n 485 GRAB 2 \n 487 ACC1 \n 488 PUSHACC1 \n 489 PUSHACC4 \n 490 C_CALL3 sys_open \n 492 C_CALL1 caml_open_descriptor \n 494 RETURN 3 \n 496 ACC0 \n 497 C_CALL1 caml_flush \n 499 ACC0 \n 500 C_CALL1 caml_close_channel \n 502 RETURN 1 \n 504 RESTART \n 505 GRAB 1 \n 507 CONST0 \n 508 PUSHACC2 \n 509 PUSHACC2 \n 510 C_CALL3 output_value \n 512 RETURN 2 \n 514 RESTART \n 515 GRAB 3 \n 517 CONST0 \n 518 PUSHACC3 \n 519 LTINT \n 520 BRANCHIF 531 \n 522 ACC1 \n 523 C_CALL1 ml_string_length \n 525 PUSHACC4 \n 526 PUSHACC4 \n 527 ADDINT \n 528 \n 529 BRANCHIFNOT 536 \n 531 GETGLOBAL \" output \" \n 533 PUSHENVACC1 \n 534 APPTERM1 5 \n 536 ACC3 \n 537 PUSHACC3 \n 538 PUSHACC3 \n 539 PUSHACC3 \n 540 C_CALL4 caml_output \n 542 RETURN 4 \n 544 RESTART \n 545 GRAB 1 \n 547 ACC1 \n 548 C_CALL1 ml_string_length \n 550 PUSHCONST0 \n 551 PUSHACC3 \n 552 PUSHACC3 \n 553 C_CALL4 caml_output \n 555 RETURN 2 \n 557 ACC0 \n 558 PUSHCONSTINT 438 \n 560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) ) \n 562 PUSHENVACC1 \n 563 APPTERM3 4 \n 565 ACC0 \n 566 PUSHCONSTINT 438 \n 568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) ) \n 570 PUSHENVACC1 \n 571 APPTERM3 4 \n 573 RESTART \n 574 GRAB 2 \n 576 ACC1 \n 577 PUSHACC1 \n 578 PUSHACC4 \n 579 C_CALL3 sys_open \n 581 C_CALL1 caml_open_descriptor \n 583 RETURN 3 \n 585 ACC0 \n 586 PUSHGETGLOBAL \" % .12 g \" \n 588 C_CALL2 format_float \n 590 RETURN 1 \n 592 ACC0 \n 593 PUSHGETGLOBAL \" % d \" \n 595 C_CALL2 format_int \n 597 RETURN 1 \n 599 \" false \" \n 601 PUSHACC1 \n 602 C_CALL2 string_equal \n 604 BRANCHIFNOT 609 \n 606 CONST0 \n 607 RETURN 1 \n 609 \" true \" \n 611 PUSHACC1 \n 612 C_CALL2 string_equal \n 614 BRANCHIFNOT 619 \n 616 CONST1 \n 617 RETURN 1 \n 619 \" bool_of_string \" \n 621 PUSHENVACC1 \n 622 APPTERM1 2 \n 624 ACC0 \n 625 BRANCHIFNOT 631 \n 627 \" true \" \n 629 RETURN 1 \n 631 \" false \" \n 633 RETURN 1 \n 635 \n 636 PUSHACC1 \n 637 LTINT \n 638 BRANCHIF 646 \n 640 \n 642 PUSHACC1 \n 643 GTINT \n 644 BRANCHIFNOT 651 \n 646 \" char_of_int \" \n 648 PUSHENVACC1 \n 649 APPTERM1 2 \n 651 ACC0 \n 652 RETURN 1 \n 654 RESTART \n 655 GRAB 1 \n 657 ACC0 \n 658 C_CALL1 ml_string_length \n 660 PUSHACC2 \n 661 C_CALL1 ml_string_length \n 663 PUSHACC0 \n 664 PUSHACC2 \n 665 ADDINT \n 666 C_CALL1 create_string \n 668 PUSHACC2 \n 669 PUSHCONST0 \n 670 PUSHACC2 \n 671 PUSHCONST0 \n 672 PUSHACC7 \n 673 C_CALL5 blit_string \n 675 ACC1 \n 676 PUSHACC3 \n 677 PUSHACC2 \n 678 PUSHCONST0 \n 679 PUSHACC 8 \n 681 C_CALL5 blit_string \n 683 ACC0 \n 684 RETURN 5 \n 686 -1 \n 688 PUSHACC1 \n 689 XORINT \n 690 RETURN 1 \n 692 \n 693 PUSHACC1 \n 694 GEINT \n 695 BRANCHIFNOT 700 \n 697 ACC0 \n 698 RETURN 1 \n 700 ACC0 \n 701 NEGINT \n 702 RETURN 1 \n 704 RESTART \n 705 GRAB 1 \n 707 ACC1 \n 708 PUSHACC1 \n 709 C_CALL2 greaterequal \n 711 BRANCHIFNOT 716 \n 713 ACC0 \n 714 RETURN 2 \n 716 ACC1 \n 717 RETURN 2 \n 719 RESTART \n 720 GRAB 1 \n 722 ACC1 \n 723 PUSHACC1 \n 724 C_CALL2 lessequal \n 726 BRANCHIFNOT 731 \n 728 ACC0 \n 729 RETURN 2 \n 731 ACC1 \n 732 RETURN 2 \n 734 ACC0 \n 735 \n 737 MAKEBLOCK2 0 \n 739 RAISE \n 740 ACC0 \n 741 PUSHGETGLOBAL Failure \n 743 MAKEBLOCK2 0 \n 745 RAISE \n 746 CLOSURE 0 , 740 \n 749 PUSH \n 750 CLOSURE 0 , 734 \n 753 PUSHGETGLOBAL \" Pervasives . Exit \" \n 755 MAKEBLOCK1 0 \n 757 PUSHGETGLOBAL \" Pervasives . Assert_failure \" \n 759 MAKEBLOCK1 0 \n 761 PUSH \n 762 CLOSURE 0 , 720 \n 765 PUSH \n 766 CLOSURE 0 , 705 \n 769 PUSH \n 770 CLOSURE 0 , 692 \n 773 PUSH \n 774 CLOSURE 0 , 686 \n 777 PUSHCONST0 \n 778 PUSHCONSTINT 31 \n 780 PUSHCONST1 \n 781 LSLINT \n 782 EQ \n 783 BRANCHIFNOT 789 \n 785 CONSTINT 30 \n 787 BRANCH 791 \n 789 CONSTINT 62 \n 791 PUSHCONST1 \n 792 LSLINT \n 793 PUSHACC0 \n 794 OFFSETINT -1 \n 796 PUSH \n 797 CLOSURE 0 , 655 \n 800 PUSHACC 9 \n 802 CLOSURE 1 , 635 \n 805 PUSH \n 806 CLOSURE 0 , 624 \n 809 PUSHACC 11 \n 811 CLOSURE 1 , 599 \n 814 PUSH \n 815 CLOSURE 0 , 592 \n 818 PUSH \n 819 CLOSURE 0 , 585 \n 822 PUSH \n 823 CLOSUREREC 0 , 12 \n 827 \n 828 C_CALL1 caml_open_descriptor \n 830 PUSHCONST1 \n 831 C_CALL1 caml_open_descriptor \n 833 PUSHCONST2 \n 834 C_CALL1 caml_open_descriptor \n 836 PUSH \n 837 CLOSURE 0 , 574 \n 840 PUSHACC0 \n 841 CLOSURE 1 , 565 \n 844 PUSHACC1 \n 845 CLOSURE 1 , 557 \n 848 PUSH \n 849 CLOSURE 0 , 545 \n 852 PUSHACC 22 \n 854 CLOSURE 1 , 515 \n 857 PUSH \n 858 CLOSURE 0 , 505 \n 861 PUSH \n 862 CLOSURE 0 , 496 \n 865 PUSH \n 866 CLOSURE 0 , 485 \n 869 PUSHACC0 \n 870 CLOSURE 1 , 477 \n 873 PUSHACC1 \n 874 CLOSURE 1 , 470 \n 877 PUSHACC 28 \n 879 CLOSURE 1 , 441 \n 882 PUSH \n 883 CLOSUREREC 0 , 32 \n 887 ACC0 \n 888 PUSHACC 31 \n 890 CLOSURE 2 , 411 \n 893 PUSHACC 22 \n 895 CLOSUREREC 1 , 70 \n 899 ACC 15 \n 901 CLOSURE 1 , 404 \n 904 PUSHACC 11 \n 906 PUSHACC 17 \n 908 CLOSURE 2 , 399 \n 911 PUSHACC 12 \n 913 PUSHACC 18 \n 915 PUSHACC 23 \n 917 CLOSURE 3 , 392 \n 920 PUSHACC 13 \n 922 PUSHACC 19 \n 924 PUSHACC 23 \n 926 CLOSURE 3 , 385 \n 929 PUSHACC 14 \n 931 PUSHACC 20 \n 933 CLOSURE 2 , 374 \n 936 PUSHACC 20 \n 938 CLOSURE 1 , 364 \n 941 PUSHACC 20 \n 943 CLOSURE 1 , 358 \n 946 PUSHACC 17 \n 948 PUSHACC 22 \n 950 CLOSURE 2 , 353 \n 953 PUSHACC 18 \n 955 PUSHACC 23 \n 957 PUSHACC 29 \n 959 CLOSURE 3 , 346 \n 962 PUSHACC 19 \n 964 PUSHACC 24 \n 966 PUSHACC 29 \n 968 CLOSURE 3 , 339 \n 971 PUSHACC 20 \n 973 PUSHACC 25 \n 975 CLOSURE 2 , 325 \n 978 PUSHACC 25 \n 980 CLOSURE 1 , 315 \n 983 PUSHACC 12 \n 985 PUSHACC 28 \n 987 PUSHACC 30 \n 989 CLOSURE 3 , 308 \n 992 PUSHACC0 \n 993 CLOSURE 1 , 301 \n 996 PUSHACC1 \n 997 CLOSURE 1 , 294 \n 1000 PUSHACC 29 \n 1002 PUSHACC 31 \n 1004 CLOSURE 2 , 286 \n 1007 MAKEBLOCK1 0 \n 1009 PUSHACC0 \n 1010 CLOSURE 1 , 275 \n 1013 PUSHACC1 \n 1014 CLOSURE 1 , 263 \n 1017 PUSHACC0 \n 1018 CLOSURE 1 , 255 \n 1021 PUSHACC1 \n 1022 PUSHACC 22 \n 1024 PUSHACC4 \n 1025 PUSHACC3 \n 1026 PUSH \n 1027 CLOSURE 0 , 247 \n 1030 PUSH \n 1031 CLOSURE 0 , 241 \n 1034 PUSH \n 1035 CLOSURE 0 , 236 \n 1038 PUSH \n 1039 CLOSURE 0 , 231 \n 1042 PUSH \n 1043 CLOSURE 0 , 223 \n 1046 PUSH \n 1047 CLOSURE 0 , 217 \n 1050 PUSH \n 1051 CLOSURE 0 , 212 \n 1054 PUSH \n 1055 CLOSURE 0 , 207 \n 1058 PUSHACC 32 \n 1060 PUSHACC 35 \n 1062 PUSHACC 33 \n 1064 PUSH \n 1065 CLOSURE 0 , 202 \n 1068 PUSHACC 41 \n 1070 PUSHACC 40 \n 1072 PUSHACC 42 \n 1074 PUSH \n 1075 CLOSURE 0 , 194 \n 1078 PUSHACC 46 \n 1080 PUSH \n 1081 CLOSURE 0 , 188 \n 1084 PUSH \n 1085 CLOSURE 0 , 183 \n 1088 PUSH \n 1089 CLOSURE 0 , 175 \n 1092 PUSHACC 51 \n 1094 PUSH \n 1095 CLOSURE 0 , 166 \n 1098 PUSH \n 1099 CLOSURE 0 , 157 \n 1102 PUSHACC 55 \n 1104 PUSHACC 57 \n 1106 PUSH \n 1107 CLOSURE 0 , 148 \n 1110 PUSH \n 1111 CLOSURE 0 , 142 \n 1114 PUSHACC 63 \n 1116 PUSHACC 62 \n 1118 PUSHACC 64 \n 1120 PUSHACC 38 \n 1122 PUSHACC 40 \n 1124 PUSHACC 42 \n 1126 PUSHACC 44 \n 1128 PUSHACC 46 \n 1130 PUSHACC 48 \n 1132 PUSHACC 50 \n 1134 PUSHACC 52 \n 1136 PUSHACC 54 \n 1138 PUSHACC 56 \n 1140 PUSHACC 58 \n 1142 PUSHACC 60 \n 1144 PUSHACC 62 \n 1146 PUSHACC 64 \n 1148 PUSHACC 66 \n 1150 PUSHACC 82 \n 1152 PUSHACC 84 \n 1154 PUSHACC 86 \n 1156 PUSHACC 88 \n 1158 PUSHACC 90 \n 1160 PUSHACC 92 \n 1162 PUSHACC 94 \n 1164 PUSHACC 96 \n 1166 PUSHACC 98 \n 1168 PUSHACC 100 \n 1170 PUSHACC 104 \n 1172 PUSHACC 104 \n 1174 PUSHACC 104 \n 1176 PUSHACC 108 \n 1178 PUSHACC 110 \n 1180 PUSHACC 112 \n 1182 PUSHACC 117 \n 1184 PUSHACC 117 \n 1186 PUSHACC 117 \n 1188 PUSHACC 117 \n 1190 MAKEBLOCK 69 , 0 \n 1193 POP 53 \n 1195 SETGLOBAL Pervasives \n 1197 CONST0 \n 1198 C_CALL1 gc_compaction \n 1200 CONST0 \n 1201 PUSHGETGLOBALFIELD Pervasives , 68 \n 1204 APPLY1 \n 1205 ATOM0 \n 1206 SETGLOBAL T330 - compact-2 \n 1208 STOP \n *\n 0 CONSTINT 42\n 2 PUSHACC0\n 3 MAKEBLOCK1 0\n 5 POP 1\n 7 SETGLOBAL Lib\n 9 BRANCH 746\n 11 RESTART\n 12 GRAB 1\n 14 ACC0\n 15 BRANCHIFNOT 28\n 17 ACC1\n 18 PUSHACC1\n 19 GETFIELD1\n 20 PUSHOFFSETCLOSURE0\n 21 APPLY2\n 22 PUSHACC1\n 23 GETFIELD0\n 24 MAKEBLOCK2 0\n 26 RETURN 2\n 28 ACC1\n 29 RETURN 2\n 31 RESTART\n 32 GRAB 3\n 34 CONST0\n 35 PUSHACC4\n 36 LEINT\n 37 BRANCHIFNOT 42\n 39 CONST0\n 40 RETURN 4\n 42 ACC3\n 43 PUSHACC3\n 44 PUSHACC3\n 45 PUSHACC3\n 46 C_CALL4 caml_input\n 48 PUSHCONST0\n 49 PUSHACC1\n 50 EQ\n 51 BRANCHIFNOT 58\n 53 GETGLOBAL End_of_file\n 55 MAKEBLOCK1 0\n 57 RAISE\n 58 ACC0\n 59 PUSHACC5\n 60 SUBINT\n 61 PUSHACC1\n 62 PUSHACC5\n 63 ADDINT\n 64 PUSHACC4\n 65 PUSHACC4\n 66 PUSHOFFSETCLOSURE0\n 67 APPTERM 4, 9\n 70 ACC0\n 71 C_CALL1 caml_input_scan_line\n 73 PUSHCONST0\n 74 PUSHACC1\n 75 EQ\n 76 BRANCHIFNOT 83\n 78 GETGLOBAL End_of_file\n 80 MAKEBLOCK1 0\n 82 RAISE\n 83 CONST0\n 84 PUSHACC1\n 85 GTINT\n 86 BRANCHIFNOT 107\n 88 ACC0\n 89 OFFSETINT -1\n 91 C_CALL1 create_string\n 93 PUSHACC1\n 94 OFFSETINT -1\n 96 PUSHCONST0\n 97 PUSHACC2\n 98 PUSHACC5\n 99 C_CALL4 caml_input\n 101 ACC2\n 102 C_CALL1 caml_input_char\n 104 ACC0\n 105 RETURN 3\n 107 ACC0\n 108 NEGINT\n 109 C_CALL1 create_string\n 111 PUSHACC1\n 112 NEGINT\n 113 PUSHCONST0\n 114 PUSHACC2\n 115 PUSHACC5\n 116 C_CALL4 caml_input\n 118 CONST0\n 119 PUSHTRAP 130\n 121 ACC6\n 122 PUSHOFFSETCLOSURE0\n 123 APPLY1\n 124 PUSHACC5\n 125 PUSHENVACC1\n 126 APPLY2\n 127 POPTRAP\n 128 RETURN 3\n 130 PUSHGETGLOBAL End_of_file\n 132 PUSHACC1\n 133 GETFIELD0\n 134 EQ\n 135 BRANCHIFNOT 140\n 137 ACC1\n 138 RETURN 4\n 140 ACC0\n 141 RAISE\n 142 ACC0\n 143 C_CALL1 caml_flush\n 145 RETURN 1\n 147 RESTART\n 148 GRAB 1\n 150 ACC1\n 151 PUSHACC1\n 152 C_CALL2 caml_output_char\n 154 RETURN 2\n 156 RESTART\n 157 GRAB 1\n 159 ACC1\n 160 PUSHACC1\n 161 C_CALL2 caml_output_char\n 163 RETURN 2\n 165 RESTART\n 166 GRAB 1\n 168 ACC1\n 169 PUSHACC1\n 170 C_CALL2 caml_output_int\n 172 RETURN 2\n 174 RESTART\n 175 GRAB 1\n 177 ACC1\n 178 PUSHACC1\n 179 C_CALL2 caml_seek_out\n 181 RETURN 2\n 183 ACC0\n 184 C_CALL1 caml_pos_out\n 186 RETURN 1\n 188 ACC0\n 189 C_CALL1 caml_channel_size\n 191 RETURN 1\n 193 RESTART\n 194 GRAB 1\n 196 ACC1\n 197 PUSHACC1\n 198 C_CALL2 caml_set_binary_mode\n 200 RETURN 2\n 202 ACC0\n 203 C_CALL1 caml_input_char\n 205 RETURN 1\n 207 ACC0\n 208 C_CALL1 caml_input_char\n 210 RETURN 1\n 212 ACC0\n 213 C_CALL1 caml_input_int\n 215 RETURN 1\n 217 ACC0\n 218 C_CALL1 input_value\n 220 RETURN 1\n 222 RESTART\n 223 GRAB 1\n 225 ACC1\n 226 PUSHACC1\n 227 C_CALL2 caml_seek_in\n 229 RETURN 2\n 231 ACC0\n 232 C_CALL1 caml_pos_in\n 234 RETURN 1\n 236 ACC0\n 237 C_CALL1 caml_channel_size\n 239 RETURN 1\n 241 ACC0\n 242 C_CALL1 caml_close_channel\n 244 RETURN 1\n 246 RESTART\n 247 GRAB 1\n 249 ACC1\n 250 PUSHACC1\n 251 C_CALL2 caml_set_binary_mode\n 253 RETURN 2\n 255 CONST0\n 256 PUSHENVACC1\n 257 APPLY1\n 258 ACC0\n 259 C_CALL1 sys_exit\n 261 RETURN 1\n 263 CONST0\n 264 PUSHENVACC1\n 265 GETFIELD0\n 266 APPTERM1 2\n 268 CONST0\n 269 PUSHENVACC1\n 270 APPLY1\n 271 CONST0\n 272 PUSHENVACC2\n 273 APPTERM1 2\n 275 ENVACC1\n 276 GETFIELD0\n 277 PUSHACC0\n 278 PUSHACC2\n 279 CLOSURE 2, 268\n 282 PUSHENVACC1\n 283 SETFIELD0\n 284 RETURN 2\n 286 ENVACC1\n 287 C_CALL1 caml_flush\n 289 ENVACC2\n 290 C_CALL1 caml_flush\n 292 RETURN 1\n 294 CONST0\n 295 PUSHENVACC1\n 296 APPLY1\n 297 C_CALL1 float_of_string\n 299 RETURN 1\n 301 CONST0\n 302 PUSHENVACC1\n 303 APPLY1\n 304 C_CALL1 int_of_string\n 306 RETURN 1\n 308 ENVACC2\n 309 C_CALL1 caml_flush\n 311 ENVACC1\n 312 PUSHENVACC3\n 313 APPTERM1 2\n 315 CONSTINT 13\n 317 PUSHENVACC1\n 318 C_CALL2 caml_output_char\n 320 ENVACC1\n 321 C_CALL1 caml_flush\n 323 RETURN 1\n 325 ACC0\n 326 PUSHENVACC1\n 327 PUSHENVACC2\n 328 APPLY2\n 329 CONSTINT 13\n 331 PUSHENVACC1\n 332 C_CALL2 caml_output_char\n 334 ENVACC1\n 335 C_CALL1 caml_flush\n 337 RETURN 1\n 339 ACC0\n 340 PUSHENVACC1\n 341 APPLY1\n 342 PUSHENVACC2\n 343 PUSHENVACC3\n 344 APPTERM2 3\n 346 ACC0\n 347 PUSHENVACC1\n 348 APPLY1\n 349 PUSHENVACC2\n 350 PUSHENVACC3\n 351 APPTERM2 3\n 353 ACC0\n 354 PUSHENVACC1\n 355 PUSHENVACC2\n 356 APPTERM2 3\n 358 ACC0\n 359 PUSHENVACC1\n 360 C_CALL2 caml_output_char\n 362 RETURN 1\n 364 CONSTINT 13\n 366 PUSHENVACC1\n 367 C_CALL2 caml_output_char\n 369 ENVACC1\n 370 C_CALL1 caml_flush\n 372 RETURN 1\n 374 ACC0\n 375 PUSHENVACC1\n 376 PUSHENVACC2\n 377 APPLY2\n 378 CONSTINT 13\n 380 PUSHENVACC1\n 381 C_CALL2 caml_output_char\n 383 RETURN 1\n 385 ACC0\n 386 PUSHENVACC1\n 387 APPLY1\n 388 PUSHENVACC2\n 389 PUSHENVACC3\n 390 APPTERM2 3\n 392 ACC0\n 393 PUSHENVACC1\n 394 APPLY1\n 395 PUSHENVACC2\n 396 PUSHENVACC3\n 397 APPTERM2 3\n 399 ACC0\n 400 PUSHENVACC1\n 401 PUSHENVACC2\n 402 APPTERM2 3\n 404 ACC0\n 405 PUSHENVACC1\n 406 C_CALL2 caml_output_char\n 408 RETURN 1\n 410 RESTART\n 411 GRAB 3\n 413 CONST0\n 414 PUSHACC3\n 415 LTINT\n 416 BRANCHIF 427\n 418 ACC1\n 419 C_CALL1 ml_string_length\n 421 PUSHACC4\n 422 PUSHACC4\n 423 ADDINT\n 424 GTINT\n 425 BRANCHIFNOT 432\n 427 GETGLOBAL \"really_input\"\n 429 PUSHENVACC1\n 430 APPTERM1 5\n 432 ACC3\n 433 PUSHACC3\n 434 PUSHACC3\n 435 PUSHACC3\n 436 PUSHENVACC2\n 437 APPTERM 4, 8\n 440 RESTART\n 441 GRAB 3\n 443 CONST0\n 444 PUSHACC3\n 445 LTINT\n 446 BRANCHIF 457\n 448 ACC1\n 449 C_CALL1 ml_string_length\n 451 PUSHACC4\n 452 PUSHACC4\n 453 ADDINT\n 454 GTINT\n 455 BRANCHIFNOT 462\n 457 GETGLOBAL \"input\"\n 459 PUSHENVACC1\n 460 APPTERM1 5\n 462 ACC3\n 463 PUSHACC3\n 464 PUSHACC3\n 465 PUSHACC3\n 466 C_CALL4 caml_input\n 468 RETURN 4\n 470 ACC0\n 471 PUSHCONST0\n 472 PUSHGETGLOBAL <0>(0, <0>(6, 0))\n 474 PUSHENVACC1\n 475 APPTERM3 4\n 477 ACC0\n 478 PUSHCONST0\n 479 PUSHGETGLOBAL <0>(0, <0>(7, 0))\n 481 PUSHENVACC1\n 482 APPTERM3 4\n 484 RESTART\n 485 GRAB 2\n 487 ACC1\n 488 PUSHACC1\n 489 PUSHACC4\n 490 C_CALL3 sys_open\n 492 C_CALL1 caml_open_descriptor\n 494 RETURN 3\n 496 ACC0\n 497 C_CALL1 caml_flush\n 499 ACC0\n 500 C_CALL1 caml_close_channel\n 502 RETURN 1\n 504 RESTART\n 505 GRAB 1\n 507 CONST0\n 508 PUSHACC2\n 509 PUSHACC2\n 510 C_CALL3 output_value\n 512 RETURN 2\n 514 RESTART\n 515 GRAB 3\n 517 CONST0\n 518 PUSHACC3\n 519 LTINT\n 520 BRANCHIF 531\n 522 ACC1\n 523 C_CALL1 ml_string_length\n 525 PUSHACC4\n 526 PUSHACC4\n 527 ADDINT\n 528 GTINT\n 529 BRANCHIFNOT 536\n 531 GETGLOBAL \"output\"\n 533 PUSHENVACC1\n 534 APPTERM1 5\n 536 ACC3\n 537 PUSHACC3\n 538 PUSHACC3\n 539 PUSHACC3\n 540 C_CALL4 caml_output\n 542 RETURN 4\n 544 RESTART\n 545 GRAB 1\n 547 ACC1\n 548 C_CALL1 ml_string_length\n 550 PUSHCONST0\n 551 PUSHACC3\n 552 PUSHACC3\n 553 C_CALL4 caml_output\n 555 RETURN 2\n 557 ACC0\n 558 PUSHCONSTINT 438\n 560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0))))\n 562 PUSHENVACC1\n 563 APPTERM3 4\n 565 ACC0\n 566 PUSHCONSTINT 438\n 568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0))))\n 570 PUSHENVACC1\n 571 APPTERM3 4\n 573 RESTART\n 574 GRAB 2\n 576 ACC1\n 577 PUSHACC1\n 578 PUSHACC4\n 579 C_CALL3 sys_open\n 581 C_CALL1 caml_open_descriptor\n 583 RETURN 3\n 585 ACC0\n 586 PUSHGETGLOBAL \"%.12g\"\n 588 C_CALL2 format_float\n 590 RETURN 1\n 592 ACC0\n 593 PUSHGETGLOBAL \"%d\"\n 595 C_CALL2 format_int\n 597 RETURN 1\n 599 GETGLOBAL \"false\"\n 601 PUSHACC1\n 602 C_CALL2 string_equal\n 604 BRANCHIFNOT 609\n 606 CONST0\n 607 RETURN 1\n 609 GETGLOBAL \"true\"\n 611 PUSHACC1\n 612 C_CALL2 string_equal\n 614 BRANCHIFNOT 619\n 616 CONST1\n 617 RETURN 1\n 619 GETGLOBAL \"bool_of_string\"\n 621 PUSHENVACC1\n 622 APPTERM1 2\n 624 ACC0\n 625 BRANCHIFNOT 631\n 627 GETGLOBAL \"true\"\n 629 RETURN 1\n 631 GETGLOBAL \"false\"\n 633 RETURN 1\n 635 CONST0\n 636 PUSHACC1\n 637 LTINT\n 638 BRANCHIF 646\n 640 CONSTINT 255\n 642 PUSHACC1\n 643 GTINT\n 644 BRANCHIFNOT 651\n 646 GETGLOBAL \"char_of_int\"\n 648 PUSHENVACC1\n 649 APPTERM1 2\n 651 ACC0\n 652 RETURN 1\n 654 RESTART\n 655 GRAB 1\n 657 ACC0\n 658 C_CALL1 ml_string_length\n 660 PUSHACC2\n 661 C_CALL1 ml_string_length\n 663 PUSHACC0\n 664 PUSHACC2\n 665 ADDINT\n 666 C_CALL1 create_string\n 668 PUSHACC2\n 669 PUSHCONST0\n 670 PUSHACC2\n 671 PUSHCONST0\n 672 PUSHACC7\n 673 C_CALL5 blit_string\n 675 ACC1\n 676 PUSHACC3\n 677 PUSHACC2\n 678 PUSHCONST0\n 679 PUSHACC 8\n 681 C_CALL5 blit_string\n 683 ACC0\n 684 RETURN 5\n 686 CONSTINT -1\n 688 PUSHACC1\n 689 XORINT\n 690 RETURN 1\n 692 CONST0\n 693 PUSHACC1\n 694 GEINT\n 695 BRANCHIFNOT 700\n 697 ACC0\n 698 RETURN 1\n 700 ACC0\n 701 NEGINT\n 702 RETURN 1\n 704 RESTART\n 705 GRAB 1\n 707 ACC1\n 708 PUSHACC1\n 709 C_CALL2 greaterequal\n 711 BRANCHIFNOT 716\n 713 ACC0\n 714 RETURN 2\n 716 ACC1\n 717 RETURN 2\n 719 RESTART\n 720 GRAB 1\n 722 ACC1\n 723 PUSHACC1\n 724 C_CALL2 lessequal\n 726 BRANCHIFNOT 731\n 728 ACC0\n 729 RETURN 2\n 731 ACC1\n 732 RETURN 2\n 734 ACC0\n 735 PUSHGETGLOBAL Invalid_argument\n 737 MAKEBLOCK2 0\n 739 RAISE\n 740 ACC0\n 741 PUSHGETGLOBAL Failure\n 743 MAKEBLOCK2 0\n 745 RAISE\n 746 CLOSURE 0, 740\n 749 PUSH\n 750 CLOSURE 0, 734\n 753 PUSHGETGLOBAL \"Pervasives.Exit\"\n 755 MAKEBLOCK1 0\n 757 PUSHGETGLOBAL \"Pervasives.Assert_failure\"\n 759 MAKEBLOCK1 0\n 761 PUSH\n 762 CLOSURE 0, 720\n 765 PUSH\n 766 CLOSURE 0, 705\n 769 PUSH\n 770 CLOSURE 0, 692\n 773 PUSH\n 774 CLOSURE 0, 686\n 777 PUSHCONST0\n 778 PUSHCONSTINT 31\n 780 PUSHCONST1\n 781 LSLINT\n 782 EQ\n 783 BRANCHIFNOT 789\n 785 CONSTINT 30\n 787 BRANCH 791\n 789 CONSTINT 62\n 791 PUSHCONST1\n 792 LSLINT\n 793 PUSHACC0\n 794 OFFSETINT -1\n 796 PUSH\n 797 CLOSURE 0, 655\n 800 PUSHACC 9\n 802 CLOSURE 1, 635\n 805 PUSH\n 806 CLOSURE 0, 624\n 809 PUSHACC 11\n 811 CLOSURE 1, 599\n 814 PUSH\n 815 CLOSURE 0, 592\n 818 PUSH\n 819 CLOSURE 0, 585\n 822 PUSH\n 823 CLOSUREREC 0, 12\n 827 CONST0\n 828 C_CALL1 caml_open_descriptor\n 830 PUSHCONST1\n 831 C_CALL1 caml_open_descriptor\n 833 PUSHCONST2\n 834 C_CALL1 caml_open_descriptor\n 836 PUSH\n 837 CLOSURE 0, 574\n 840 PUSHACC0\n 841 CLOSURE 1, 565\n 844 PUSHACC1\n 845 CLOSURE 1, 557\n 848 PUSH\n 849 CLOSURE 0, 545\n 852 PUSHACC 22\n 854 CLOSURE 1, 515\n 857 PUSH\n 858 CLOSURE 0, 505\n 861 PUSH\n 862 CLOSURE 0, 496\n 865 PUSH\n 866 CLOSURE 0, 485\n 869 PUSHACC0\n 870 CLOSURE 1, 477\n 873 PUSHACC1\n 874 CLOSURE 1, 470\n 877 PUSHACC 28\n 879 CLOSURE 1, 441\n 882 PUSH\n 883 CLOSUREREC 0, 32\n 887 ACC0\n 888 PUSHACC 31\n 890 CLOSURE 2, 411\n 893 PUSHACC 22\n 895 CLOSUREREC 1, 70\n 899 ACC 15\n 901 CLOSURE 1, 404\n 904 PUSHACC 11\n 906 PUSHACC 17\n 908 CLOSURE 2, 399\n 911 PUSHACC 12\n 913 PUSHACC 18\n 915 PUSHACC 23\n 917 CLOSURE 3, 392\n 920 PUSHACC 13\n 922 PUSHACC 19\n 924 PUSHACC 23\n 926 CLOSURE 3, 385\n 929 PUSHACC 14\n 931 PUSHACC 20\n 933 CLOSURE 2, 374\n 936 PUSHACC 20\n 938 CLOSURE 1, 364\n 941 PUSHACC 20\n 943 CLOSURE 1, 358\n 946 PUSHACC 17\n 948 PUSHACC 22\n 950 CLOSURE 2, 353\n 953 PUSHACC 18\n 955 PUSHACC 23\n 957 PUSHACC 29\n 959 CLOSURE 3, 346\n 962 PUSHACC 19\n 964 PUSHACC 24\n 966 PUSHACC 29\n 968 CLOSURE 3, 339\n 971 PUSHACC 20\n 973 PUSHACC 25\n 975 CLOSURE 2, 325\n 978 PUSHACC 25\n 980 CLOSURE 1, 315\n 983 PUSHACC 12\n 985 PUSHACC 28\n 987 PUSHACC 30\n 989 CLOSURE 3, 308\n 992 PUSHACC0\n 993 CLOSURE 1, 301\n 996 PUSHACC1\n 997 CLOSURE 1, 294\n 1000 PUSHACC 29\n 1002 PUSHACC 31\n 1004 CLOSURE 2, 286\n 1007 MAKEBLOCK1 0\n 1009 PUSHACC0\n 1010 CLOSURE 1, 275\n 1013 PUSHACC1\n 1014 CLOSURE 1, 263\n 1017 PUSHACC0\n 1018 CLOSURE 1, 255\n 1021 PUSHACC1\n 1022 PUSHACC 22\n 1024 PUSHACC4\n 1025 PUSHACC3\n 1026 PUSH\n 1027 CLOSURE 0, 247\n 1030 PUSH\n 1031 CLOSURE 0, 241\n 1034 PUSH\n 1035 CLOSURE 0, 236\n 1038 PUSH\n 1039 CLOSURE 0, 231\n 1042 PUSH\n 1043 CLOSURE 0, 223\n 1046 PUSH\n 1047 CLOSURE 0, 217\n 1050 PUSH\n 1051 CLOSURE 0, 212\n 1054 PUSH\n 1055 CLOSURE 0, 207\n 1058 PUSHACC 32\n 1060 PUSHACC 35\n 1062 PUSHACC 33\n 1064 PUSH\n 1065 CLOSURE 0, 202\n 1068 PUSHACC 41\n 1070 PUSHACC 40\n 1072 PUSHACC 42\n 1074 PUSH\n 1075 CLOSURE 0, 194\n 1078 PUSHACC 46\n 1080 PUSH\n 1081 CLOSURE 0, 188\n 1084 PUSH\n 1085 CLOSURE 0, 183\n 1088 PUSH\n 1089 CLOSURE 0, 175\n 1092 PUSHACC 51\n 1094 PUSH\n 1095 CLOSURE 0, 166\n 1098 PUSH\n 1099 CLOSURE 0, 157\n 1102 PUSHACC 55\n 1104 PUSHACC 57\n 1106 PUSH\n 1107 CLOSURE 0, 148\n 1110 PUSH\n 1111 CLOSURE 0, 142\n 1114 PUSHACC 63\n 1116 PUSHACC 62\n 1118 PUSHACC 64\n 1120 PUSHACC 38\n 1122 PUSHACC 40\n 1124 PUSHACC 42\n 1126 PUSHACC 44\n 1128 PUSHACC 46\n 1130 PUSHACC 48\n 1132 PUSHACC 50\n 1134 PUSHACC 52\n 1136 PUSHACC 54\n 1138 PUSHACC 56\n 1140 PUSHACC 58\n 1142 PUSHACC 60\n 1144 PUSHACC 62\n 1146 PUSHACC 64\n 1148 PUSHACC 66\n 1150 PUSHACC 82\n 1152 PUSHACC 84\n 1154 PUSHACC 86\n 1156 PUSHACC 88\n 1158 PUSHACC 90\n 1160 PUSHACC 92\n 1162 PUSHACC 94\n 1164 PUSHACC 96\n 1166 PUSHACC 98\n 1168 PUSHACC 100\n 1170 PUSHACC 104\n 1172 PUSHACC 104\n 1174 PUSHACC 104\n 1176 PUSHACC 108\n 1178 PUSHACC 110\n 1180 PUSHACC 112\n 1182 PUSHACC 117\n 1184 PUSHACC 117\n 1186 PUSHACC 117\n 1188 PUSHACC 117\n 1190 MAKEBLOCK 69, 0\n 1193 POP 53\n 1195 SETGLOBAL Pervasives\n 1197 CONST0\n 1198 C_CALL1 gc_compaction\n 1200 CONST0\n 1201 PUSHGETGLOBALFIELD Pervasives, 68\n 1204 APPLY1\n 1205 ATOM0\n 1206 SETGLOBAL T330-compact-2\n 1208 STOP\n**)\n"}}},{"rowIdx":610285,"cells":{"_id":{"kind":"string","value":"1066710e01cdda0f4ce9c743f62c21693a49348e3eff5e199fadcac5b51e17b5"},"repository":{"kind":"string","value":"shirok/WiLiKi"},"name":{"kind":"string","value":"rss.scm"},"content":{"kind":"string","value":";;;\n wiliki / rss - an ad - hoc RSS generation routine for WiLiKi\n;;;\n Copyright ( c ) 2000 - 2009 < >\n;;;\n;;; Permission is hereby granted, free of charge, to any person\n;;; obtaining a copy of this software and associated documentation\n files ( the \" Software \" ) , to deal in the Software without restriction ,\n;;; including without limitation the rights to use, copy, modify,\n;;; merge, publish, distribute, sublicense, and/or sell copies of\n the Software , and to permit persons to whom the Software is\n;;; furnished to do so, subject to the following conditions:\n;;;\n;;; The above copyright notice and this permission notice shall be\n included in all copies or substantial portions of the Software .\n;;;\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND ,\n;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n;;; OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS\n BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN\n;;; AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF\n;;; OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n;;; IN THE SOFTWARE.\n;;;\n\n;; In future, this might be rewritten to use proper XML framework.\n;; for now, I use an ad-hoc approach.\n\n(define-module wiliki.rss\n (use file.util)\n (use text.html-lite)\n (use text.tree)\n (use util.match)\n (use wiliki.core)\n (export rss-page rss-item-count rss-item-description rss-item-extra-elements\n rss-partial-content-lines rss-source rss-url-format))\n(select-module wiliki.rss)\n\n;; Parameters\n\n # of items included in the RSS\n(define rss-item-count (make-parameter 15))\n\n;; What to include in the 'rdf:description' of each item.\n;; none - omit rdf:description\n;; raw - raw wiki-marked up text.\n;; html - html rendered text. (heavy)\n(define rss-item-description (make-parameter 'none))\n\n;; # of maximum lines in the original wiki format to be included\n;; in the partial content (raw-partial, html-partial).\n(define rss-partial-content-lines (make-parameter 20))\n\n;; A procedure that takes maximum # of entries, and returns a list\n of entries to be included in the RSS . The returned list should be\n;; in the following form:\n;; : ( ...)\n;; : ( . ) | (( . ) . <timestamp>)\n(define rss-source\n (make-parameter (cut wiliki:recent-changes-alist :length <>)))\n\n Whether the url in RSS should be in the format of url?key or url / key\n(define rss-url-format (make-parameter 'query))\n\n;; If not #f, this is inserted as is into each <item>...</item>\n(define rss-item-extra-elements (make-parameter #f))\n\n;; Main entry\n(define (rss-page :key\n (count (rss-item-count))\n (item-description #f))\n (rss-format ((rss-source) count)\n (case (or item-description (rss-item-description))\n [(raw) (cut raw-content <> #f)]\n [(raw-partial) (cut raw-content <> #t)]\n [(html) (cut html-content <> #f)]\n [(html-partial) (cut html-content <> #t)]\n [else (^_ \"\")])))\n\n(define (rss-format entries item-description-proc)\n (let* ([self (wiliki)]\n [full-url (wiliki:url :full)])\n `(\"Content-type: text/xml\\n\\n\"\n \"<?xml version=\\\"1.0\\\" encoding=\\\"\" ,(wiliki:output-charset) \"\\\" ?>\\n\"\n \"<rdf:RDF\n xmlns:rdf=\\\"-rdf-syntax-ns#\\\"\n xmlns=\\\"/\\\"\n xmlns:dc=\\\"/\\\"\n xmlns:content=\\\"/\\\"\n >\\n\"\n ,(rdf-channel\n (wiliki:url :full)\n (rdf-title (ref (wiliki)'title))\n (rdf-link full-url)\n (rdf-description (ref (wiliki)'description))\n (rdf-items-seq (map (^e (rdf-li (entry->url e))) entries)))\n ,(map (^e (let1 url (entry->url e)\n (rdf-item url\n (rdf-title (entry->title e))\n (rdf-link url)\n (item-description-proc (entry->key e))\n (dc-date (entry->timestamp e))\n (or (rss-item-extra-elements) \"\")\n )))\n entries)\n \"</rdf:RDF>\\n\")))\n\n(define (raw-content entry partial?)\n (if-let1 page (wiliki:db-get entry)\n (rdf-description (trim-content (ref page 'content) partial?))\n \"\"))\n\n(define (html-content entry partial?)\n (if-let1 page (wiliki:db-get entry)\n ($ rdf-content $ tree->string $ map wiliki:sxml->stree\n $ wiliki:format-content $ trim-content (~ page'content) partial?)\n \"\"))\n\n(define (trim-content raw-text partial?)\n (if partial?\n (string-join (take* (string-split raw-text \"\\n\")\n (rss-partial-content-lines))\n \"\\n\")\n raw-text))\n\n(define (entry->url entry)\n (case (rss-url-format)\n [(query) (wiliki:url :full \"~a\" (entry->key entry))]\n [(path) (build-path (wiliki:url :full) (entry->key entry))]\n [else (wiliki:url :full \"config-error:invalid-rss-url-format\")]))\n\n(define (entry->title entry)\n (match entry [((key . title) . _) title] [(key . _) key]))\n\n(define (entry->key entry)\n (match entry [((key . title) . _) key] [(key . _) key]))\n\n(define (entry->timestamp entry) (cdr entry))\n\n RDF rendering utilities .\n NB : these should be implemented within xml framework\n(define (rdf-channel about . content)\n `(\"<channel rdf:about=\\\"\" ,(html-escape-string about) \"\\\">\"\n ,@content\n \"\\n</channel>\\n\"))\n\n(define (rdf-li resource)\n `(\"<rdf:li rdf:resource=\\\"\" ,(html-escape-string resource) \"\\\" />\\n\"))\n\n(define (rdf-simple tag . content)\n `(\"<\" ,tag \">\" ,@content \"</\" ,tag \">\\n\"))\n(define (rdf-item about . content)\n `(\"<item rdf:about=\\\"\" ,(html-escape-string about) \"\\\">\"\n ,@content\n \"</item>\\n\"))\n\n(define (rdf-items-seq . items)\n `(\"<items><rdf:Seq>\" ,@items \"</rdf:Seq></items>\\n\"))\n\n(define (rdf-simple-1 tag content)\n `(\"<\" ,tag \">\" ,(html-escape-string content) \"</\" ,tag \">\\n\"))\n\n(define (rdf-title title) (rdf-simple-1 \"title\" title))\n(define (rdf-link link) (rdf-simple-1 \"link\" link))\n(define (rdf-description desc) (rdf-simple-1 \"description\" desc))\n(define (rdf-content content)\n `(\"<content:encoded><![CDATA[\"\n ,(regexp-replace-all #/\\]\\]>/ content \"]]]]><![CDATA[>\")\n \"]]></content:encoded>\"))\n\n(define (dc-date secs)\n (rdf-simple-1 \"dc:date\"\n (sys-strftime \"%Y-%m-%dT%H:%M:%S+00:00\" (sys-gmtime secs))))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/shirok/WiLiKi/c910d5d936c833887f7c7bc99e0e681e262b5334/src/wiliki/rss.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":"\n\n\n Permission is hereby granted, free of charge, to any person\n obtaining a copy of this software and associated documentation\n including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of\n furnished to do so, subject to the following conditions:\n\n The above copyright notice and this permission notice shall be\n\n EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF\n OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n IN THE SOFTWARE.\n\n In future, this might be rewritten to use proper XML framework.\n for now, I use an ad-hoc approach.\n Parameters\n What to include in the 'rdf:description' of each item.\n none - omit rdf:description\n raw - raw wiki-marked up text.\n html - html rendered text. (heavy)\n # of maximum lines in the original wiki format to be included\n in the partial content (raw-partial, html-partial).\n A procedure that takes maximum # of entries, and returns a list\n in the following form:\n <entries> : (<entry> ...)\n <entry> : (<key> . <timestamp>) | ((<key> . <title>) . <timestamp>)\n If not #f, this is inserted as is into each <item>...</item>\n Main entry"},"code":{"kind":"string","value":" wiliki / rss - an ad - hoc RSS generation routine for WiLiKi\n Copyright ( c ) 2000 - 2009 < >\n files ( the \" Software \" ) , to deal in the Software without restriction ,\n the Software , and to permit persons to whom the Software is\n included in all copies or substantial portions of the Software .\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND ,\n NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS\n BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN\n\n\n(define-module wiliki.rss\n (use file.util)\n (use text.html-lite)\n (use text.tree)\n (use util.match)\n (use wiliki.core)\n (export rss-page rss-item-count rss-item-description rss-item-extra-elements\n rss-partial-content-lines rss-source rss-url-format))\n(select-module wiliki.rss)\n\n\n # of items included in the RSS\n(define rss-item-count (make-parameter 15))\n\n(define rss-item-description (make-parameter 'none))\n\n(define rss-partial-content-lines (make-parameter 20))\n\n of entries to be included in the RSS . The returned list should be\n(define rss-source\n (make-parameter (cut wiliki:recent-changes-alist :length <>)))\n\n Whether the url in RSS should be in the format of url?key or url / key\n(define rss-url-format (make-parameter 'query))\n\n(define rss-item-extra-elements (make-parameter #f))\n\n(define (rss-page :key\n (count (rss-item-count))\n (item-description #f))\n (rss-format ((rss-source) count)\n (case (or item-description (rss-item-description))\n [(raw) (cut raw-content <> #f)]\n [(raw-partial) (cut raw-content <> #t)]\n [(html) (cut html-content <> #f)]\n [(html-partial) (cut html-content <> #t)]\n [else (^_ \"\")])))\n\n(define (rss-format entries item-description-proc)\n (let* ([self (wiliki)]\n [full-url (wiliki:url :full)])\n `(\"Content-type: text/xml\\n\\n\"\n \"<?xml version=\\\"1.0\\\" encoding=\\\"\" ,(wiliki:output-charset) \"\\\" ?>\\n\"\n \"<rdf:RDF\n xmlns:rdf=\\\"-rdf-syntax-ns#\\\"\n xmlns=\\\"/\\\"\n xmlns:dc=\\\"/\\\"\n xmlns:content=\\\"/\\\"\n >\\n\"\n ,(rdf-channel\n (wiliki:url :full)\n (rdf-title (ref (wiliki)'title))\n (rdf-link full-url)\n (rdf-description (ref (wiliki)'description))\n (rdf-items-seq (map (^e (rdf-li (entry->url e))) entries)))\n ,(map (^e (let1 url (entry->url e)\n (rdf-item url\n (rdf-title (entry->title e))\n (rdf-link url)\n (item-description-proc (entry->key e))\n (dc-date (entry->timestamp e))\n (or (rss-item-extra-elements) \"\")\n )))\n entries)\n \"</rdf:RDF>\\n\")))\n\n(define (raw-content entry partial?)\n (if-let1 page (wiliki:db-get entry)\n (rdf-description (trim-content (ref page 'content) partial?))\n \"\"))\n\n(define (html-content entry partial?)\n (if-let1 page (wiliki:db-get entry)\n ($ rdf-content $ tree->string $ map wiliki:sxml->stree\n $ wiliki:format-content $ trim-content (~ page'content) partial?)\n \"\"))\n\n(define (trim-content raw-text partial?)\n (if partial?\n (string-join (take* (string-split raw-text \"\\n\")\n (rss-partial-content-lines))\n \"\\n\")\n raw-text))\n\n(define (entry->url entry)\n (case (rss-url-format)\n [(query) (wiliki:url :full \"~a\" (entry->key entry))]\n [(path) (build-path (wiliki:url :full) (entry->key entry))]\n [else (wiliki:url :full \"config-error:invalid-rss-url-format\")]))\n\n(define (entry->title entry)\n (match entry [((key . title) . _) title] [(key . _) key]))\n\n(define (entry->key entry)\n (match entry [((key . title) . _) key] [(key . _) key]))\n\n(define (entry->timestamp entry) (cdr entry))\n\n RDF rendering utilities .\n NB : these should be implemented within xml framework\n(define (rdf-channel about . content)\n `(\"<channel rdf:about=\\\"\" ,(html-escape-string about) \"\\\">\"\n ,@content\n \"\\n</channel>\\n\"))\n\n(define (rdf-li resource)\n `(\"<rdf:li rdf:resource=\\\"\" ,(html-escape-string resource) \"\\\" />\\n\"))\n\n(define (rdf-simple tag . content)\n `(\"<\" ,tag \">\" ,@content \"</\" ,tag \">\\n\"))\n(define (rdf-item about . content)\n `(\"<item rdf:about=\\\"\" ,(html-escape-string about) \"\\\">\"\n ,@content\n \"</item>\\n\"))\n\n(define (rdf-items-seq . items)\n `(\"<items><rdf:Seq>\" ,@items \"</rdf:Seq></items>\\n\"))\n\n(define (rdf-simple-1 tag content)\n `(\"<\" ,tag \">\" ,(html-escape-string content) \"</\" ,tag \">\\n\"))\n\n(define (rdf-title title) (rdf-simple-1 \"title\" title))\n(define (rdf-link link) (rdf-simple-1 \"link\" link))\n(define (rdf-description desc) (rdf-simple-1 \"description\" desc))\n(define (rdf-content content)\n `(\"<content:encoded><![CDATA[\"\n ,(regexp-replace-all #/\\]\\]>/ content \"]]]]><![CDATA[>\")\n \"]]></content:encoded>\"))\n\n(define (dc-date secs)\n (rdf-simple-1 \"dc:date\"\n (sys-strftime \"%Y-%m-%dT%H:%M:%S+00:00\" (sys-gmtime secs))))\n"}}},{"rowIdx":610286,"cells":{"_id":{"kind":"string","value":"c690364e591a5769521efd47dd5cff6c7cde5e8d612327964e51d04b0da6ea9d"},"repository":{"kind":"string","value":"tezos-checker/checker"},"name":{"kind":"string","value":"testChecker.ml"},"content":{"kind":"string","value":"open Ctok\nopen Kit\nopen Tok\nopen Lqt\nopen Burrow\nopen OUnit2\nopen TestLib\nopen CheckerTypes\nopen Fa2Interface\nopen Fa2Ledger\nopen Fa2Implementation\nopen Error\nopen Ptr\nopen LiquidationAuctionTypes\nopen LiquidationAuction\n\nlet property_test_count = 10000\nlet qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t\n\nmodule PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end)\n\nlet checker_address = !Ligo.Tezos.self_address\n\nlet empty_checker =\n initial_checker\n { ctok_fa2 = ctok_fa2_addr;\n ctez_cfmm = ctez_cfmm_addr;\n oracle = oracle_addr;\n collateral_fa2 = collateral_fa2_addr;\n }\n\n(* The starting checker state should satisfy the invariants to begin with. *)\nlet _ = Checker.assert_checker_invariants empty_checker\n\n Enhance the initial checker state with a populated cfmm in a consistent way .\nlet empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) =\n let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal \"1n\")) in\n let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n let checker =\n { empty_checker with\n parameters = { empty_checker.parameters with circulating_kit = checker_kit };\n cfmm = cfmm;\n fa2_state =\n let fa2_state = initial_fa2_state in\n let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in\n let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in\n fa2_state;\n } in\n Checker.assert_checker_invariants checker;\n checker\n\n Produces a checker state with burrows . \n * Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state \n\n * Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state\n*)\nlet checker_with_liquidatable_burrows () =\n let checker = empty_checker in\n (* Create some burrows and mint some kit *)\n let alice_burrow_1 = Ligo.nat_from_literal \"0n\" in\n let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in\n let bob_burrow_1 = Ligo.nat_from_literal \"0n\" in\n Alice burrow 1 . Will NOT be \n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal \"2_000_000n\"))) in\n burrow 2 : N. Will be \n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal \"100n\")))) in\n let checker = List.fold_left (\n fun checker alice_burrow_no ->\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"2_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in\n Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in\n checker\n )\n checker\n alice_burrow_nos\n in\n Bob burrow 1 . Will be .\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal \"20_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in\n Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in\n\n Increase value of kit to make some of the burrows by touching checker\n (* Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high\n * for the burrows to be liquidatable.\n *)\n Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_100_000n\")) in\n (* Touch burrows *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in\n let checker = List.fold_left (\n fun checker alice_burrow_no ->\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in\n checker\n )\n checker\n alice_burrow_nos\n in\n\n (* Check the expected properties of this test fixture *)\n assert_bool \"alice_burrow_1 was liquidatable but it is expected to not be\"\n (not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows))));\n assert_bool \"bob_burrow_1 was not liquidatable but it is expected to be\"\n (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows)));\n List.fold_left (\n fun _ alice_burrow_no ->\n assert_bool (\"alice_burrow_\" ^ (Ligo.string_of_nat alice_burrow_no) ^ \" was not liquidatable but it is expected to be\")\n (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows))))\n ()\n alice_burrow_nos;\n Checker.assert_checker_invariants checker;\n\n let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in\n let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in\n liquidatable_burrow_ids, underburrowed_burrow_ids, checker\n\n(* Produces a checker state with liquidation slices in the queue but no current auction.\n * Returns a list of details for queued slices related to a Close liquidation,\n * a list of details for all other slices in the queue, and the contract state.\n*)\nlet checker_with_queued_liquidation_slices () =\n let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in\n Mark the burrows for liquidation . This will add slices to the queue .\n let checker, close_slice_details, other_slice_details = List.fold_left\n (fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id ->\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in\n let slice_ptr = SliceList.slice_list_element_ptr new_slice in\n let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in\n let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in\n let close_liquidation_slices, other_liquidation_slices =\n if is_burrow_now_closed then\n (List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices\n else\n close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)])\n in\n checker, close_liquidation_slices, other_liquidation_slices\n )\n (checker, [], [])\n liquidatable_burrow_ids\n in\n assert_bool\n \"liquidation auction queue was empty, but it was expected to have some slices\"\n (Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices));\n assert (List.length close_slice_details > 0);\n assert (List.length other_slice_details > 0);\n close_slice_details, other_slice_details, checker\n\n(* Produces a checker state with an active liquidation auction *)\nlet checker_with_active_auction () =\n let _, _, checker = checker_with_queued_liquidation_slices () in\n Touch checker to start an auction\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n assert_bool \"a current liquidation auction should have been started but was not\" (Option.is_some checker.liquidation_auctions.current_auction);\n checker\n\n(* Produces a checker state with a completed liquidation auction *)\nlet checker_with_completed_auction () =\n let checker = checker_with_active_auction () in\n (* Get the current auction minimum bid *)\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n (* Mint enough kit to bid *)\n let bidder = alice_addr in\n let new_burrow_no = Ligo.nat_from_literal \"100n\" in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in\n (* Place a bid *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in\n (* Wait until enough time has passed for the auction to be completable then touch checker *)\n Touch checker to start an auction\n Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n assert_bool\n \"there was not a completed liquidation auction but one should exist\"\n (Option.is_some checker.liquidation_auctions.completed_auctions);\n bidder, checker\n\n Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops\nlet newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker =\n let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, collateral)) in\n ((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker)\n\nlet get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat =\n let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in\n match ops with\n | [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit\n | _ -> failwith (\"Unexpected fa2 response, got: \" ^ show_operation_list ops)\n\nlet suite =\n \"Checker tests\" >::: [\n (\"initial touch (noop)\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker1 = empty_checker in\n let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"0n\")) in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n assert_equal checker1 checker2; (* NOTE: we really want them to be identical here, hence the '='. *)\n ()\n );\n\n (\"create_burrow - updates checker storage\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\")) in\n\n assert_bool\n \"No matching burrow found after calling create_burrow\"\n (Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows));\n assert_bool\n \"The burrow existed before calling create_burrow\"\n (Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows))\n );\n\n (\"create_burrow - collateral in burrow representation does not include creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" Constants.creation_deposit in\n\n let expected_collateral = tok_zero in\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"create_burrow - fails when transaction amount is one mutez below creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal \"1n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n assert_raises\n (Failure (Ligo.string_of_int error_InsufficientFunds))\n (fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)))\n );\n\n (\"create_burrow - passes when transaction amount is exactly the creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" Constants.creation_deposit in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow ->\n assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"deposit_collateral - owner can deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in\n\n (* Create the burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n (* Make a deposit *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"deposit_collateral - non-owner cannot deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\"))in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal \"0n\", tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"))))\n );\n\n (\"withdraw_collateral - owner can withdraw\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let withdrawal = tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", withdrawal)) in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"withdraw_collateral - non-owner cannot withdraw\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let withdrawal = tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", withdrawal)))\n );\n\n (\"entrypoint_activate_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create a burrow and deactivate it *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in\n (* Then activate it *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"1_000_000n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_add_liquidity - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n (* Create a burrow and mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_add_liquidity\n (checker,\n (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = alice_addr;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"5_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_burn_kit - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n (* Create a burrow and mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n (* Then burn the kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_create_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let amnt = tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\") in\n let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amnt)) in\n match ops with\n Note : it 's not really possible to check the first parameter of the contract here which is the \n * function which defines the contract 's logic . \n \n * function which defines the contract's logic.\n *)\n | [ (CreateBurrowContract (_, delegate, tez, storage)) ;\n (Transaction (FA2TransferTransactionValue _, _, _)) as op;\n ] ->\n (* burrow creation values *)\n assert_key_hash_option_equal ~expected:None ~real:delegate;\n assert_tez_equal ~expected:Common.tez_zero ~real:tez;\n assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage;\n (* collateral initialization values *)\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal \"0n\")) checker.burrows) in\n assert_operation_equal\n ~expected:(\n LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat amnt;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n )\n ~real:op\n | _ -> failwith (\"Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n );\n\n (\"entrypoint_deactivate_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create a burrow and deactivate it *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (alice_addr, (Ligo.nat_from_literal \"100_000_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_deposit_collateral - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create the burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n (* Make a deposit *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\"))) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"3_000_000n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_liquidation_auction_place_bid - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n (* Lookup the current minimum bid *)\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n Mint some kit to be able to bid\n let new_burrow_no = Ligo.nat_from_literal \"100n\" in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in\n\n (* Place a bid *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid\n (checker,\n ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid))\n in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_mark_for_liquidation - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Use a checker state already containing some liquidatable burrows *)\n let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in\n let burrow_id = List.nth liquidatable_burrow_ids 0 in\n let sender = bob_addr in\n\n (* Mark one of the liquidatable burrows for liquidation *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (sender, (Ligo.nat_from_literal \"1_001_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_cancel_liquidation_slice - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Use a checker state already containing some liquidatable burrows *)\n (* Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow *)\n let _, slice_details, checker = checker_with_queued_liquidation_slices () in\n let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in\n\n Deposit some extra collateral to one of the burrows with slices in the auction queue\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal \"4_000_000n\"))) in\n\n Now cancel one of the burrow 's liquidation slices\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_liquidation_auction_claim_win - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let winning_bidder, checker = checker_with_completed_auction () in\n let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in\n let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in\n let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in\n\n (* Touch the remaining slices so the bid can be claimed. *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in\n\n (* Claim the winning bid *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = !Ligo.Tezos.self_address;\n txs = [\n { to_ = winning_bidder;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat sold_tok;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_mint_kit - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n (* Create a burrow and mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_set_burrow_delegate - emits expected operations\" >::\n fun _ ->\n (* NOTE: In a collateral=FA2 deployment this would actually fail. *)\n Ligo.Tezos.reset ();\n (* Create the burrow with no delegate *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n (* Then set the burrow's delegate *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.opt_key_hash_transaction\n (Some charles_key_hash)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowSetDelegate\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_receive_price - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal \"42n\", Tok.tok_scaling_factor_nat)) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_remove_liquidity - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n (* Create a burrow and mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n (* Add some liquidity to the contract *)\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_add_liquidity\n (checker,\n (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n (* Now remove the liquidity *)\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_remove_liquidity\n (checker,\n (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *)\n ( lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"5_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n\n\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n FIXME : Operations differ between the FA2 deployment and the TEZ deployment \n ( \" entrypoint_touch - emits expected operations when checker needs to be touched \" > : : \n fun _ - > \n Ligo.Tezos.reset ( ) ; \n let checker = empty_checker in \n Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal \" 0mutez \" ) ; \n let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in \n\n let expected_ops = [ \n ( LigoOp . Tezos.nat_contract_transaction \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % receive_price \" ! . ) ) \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( CheckerTypes.get_oracle_entrypoint checker.external_contracts ) \n ) ; \n ( LigoOp . Tezos.nat_nat_contract_transaction \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % receive_ctez_marginal_price \" ! . ) ) \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts ) \n ) ; \n ] in \n assert_operation_list_equal ~expected : expected_ops ~real : ops \n ) ; \n \n (\"entrypoint_touch - emits expected operations when checker needs to be touched\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch (checker, ()) in\n\n let expected_ops = [\n (LigoOp.Tezos.nat_contract_transaction\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%receive_price\" !Ligo.Tezos.self_address))\n (Ligo.tez_from_literal \"0mutez\")\n (CheckerTypes.get_oracle_entrypoint checker.external_contracts)\n );\n (LigoOp.Tezos.nat_nat_contract_transaction\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%receive_ctez_marginal_price\" !Ligo.Tezos.self_address))\n (Ligo.tez_from_literal \"0mutez\")\n (CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts)\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n *)\n\n (\"entrypoint_touch - emits expected operations when checker has already been touched\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch (checker, ()) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_touch_liquidation_slices - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let _, checker = checker_with_completed_auction () in\n let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in\n let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in\n let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in\n Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded \n * the \" contents \" record accessor in LiquidationAuctionTypes \n \n * the \"contents\" record accessor in LiquidationAuctionTypes\n *)\n\n let expected_ops = let open LiquidationAuctionPrimitiveTypes in\n List.rev (List.map (\n fun slice ->\n let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in\n LigoOp.Tezos.address_nat_transaction\n (checker_address, tok_to_denomination_nat slice.contents.tok)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n ) slices) in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_touch_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create the burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n (* Then touch it *)\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_withdraw_collateral - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n (* Try to withdraw some tez from the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"))) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (alice_addr, (Ligo.nat_from_literal \"1_000_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"calculate_touch_reward - expected result for last_touched 2s ago\" >::\n fun _ ->\n The division in this case should return a remainder < 1/2\n Ligo.Tezos.reset ();\n let time_delta = 2 in\n remainder : 12000 / 36000\n let expected_reward = Ligo.int_from_literal \"3333\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n );\n\n (\"calculate_touch_reward - expected result for last_touched 3s ago\" >::\n fun _ ->\n (* The division in this case should produce no remainder *)\n Ligo.Tezos.reset ();\n let time_delta = 3 in\n remainder : 0\n let expected_reward = Ligo.int_from_literal \"5000\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n );\n\n (\"calculate_touch_reward - expected result for last_touched 4s ago\" >::\n fun _ ->\n The division in this case should return a remainder > 1/2\n Ligo.Tezos.reset ();\n let time_delta = 4 in\n remainder : 24000 / 36000\n let expected_reward = Ligo.int_from_literal \"6666\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n\n );\n\n (\"burn_kit - owner can burn\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n\n let sender = alice_addr in\n\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\")) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n (* There should be no operations emitted. *)\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n (* The owner should be able to burn it back. *)\n let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_token)) in\n\n ()\n );\n\n (\"burn_kit - non-owner cannot burn\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\")) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n (* There should be no operations emitted. *)\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n (* Have the wrong person try to burn it back; this should fail. *)\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () ->\n let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_token))\n );\n\n ()\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_respects_min_kit_expected\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n\n let sender = alice_addr in\n let checker = empty_checker_with_cfmm cfmm in\n\n let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n\n let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = ctok_to_denomination_nat ctok_amount;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n Ligo.geq_nat_nat\n senders_new_kit\n (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected))\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_preserves_kit\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n\n let checker = empty_checker_with_cfmm cfmm in\n let sender = alice_addr in\n\n let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in\n let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n\n let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in\n let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = ctok_to_denomination_nat ctok_amount;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n Ligo.eq_nat_nat\n (Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit)\n (Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit)\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_preserves_tez\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n let checker = empty_checker_with_cfmm cfmm in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_respects_min_tez_expected\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n let bought_muctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n ctok_of_denomination bought_muctok >= min_ctok_expected\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_preserves_kit\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_preserves_tez\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n\n let bought_muctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok\n );\n\n (\n let cfmm_kit = Ligo.nat_from_literal (\"1_000n\") in\n let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal (\"1_000n\")) in\n The maximum amount of kit that you can buy with a finite amount of tez is \n * ( 1 - fee ) * cfmm.kit - 1 \n \n * (1 - fee) * cfmm.kit - 1\n *)\n let max_buyable_kit = 997 in\n let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ \"n\"))) QCheck.(1 -- max_buyable_kit) in\n let arb_tez = TestArbitrary.arb_small_positive_tez in\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez\"\n ~count:property_test_count\n (QCheck.pair arb_kit arb_tez)\n @@ fun (min_expected_kit, additional_tez) ->\n\n Ligo.Tezos.reset();\n let sender = alice_addr in\n\n Populate cfmm with initial liquidity\n let open Ratio in\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = cfmm_ctok;\n kit = kit_of_denomination cfmm_kit;\n } in\n\n Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above\n let ratio_minimum_tez = div_ratio\n (ratio_of_nat cfmm_kit)\n (\n sub_ratio\n (div_ratio (ratio_of_nat (Ligo.nat_from_literal \"998n\")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit)))\n (ratio_of_nat (Ligo.nat_from_literal \"1n\"))\n ) in\n let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal \"1mutez\") in\n (* Adjust transaction by a random amount of extra tez *)\n let ctok_provided = Ctok.ctok_of_denomination (Common.tez_to_mutez_nat (Ligo.add_tez_tez minimum_tez additional_tez)) in (* UNSAFE CAST *)\n\n let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *)\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ctok.ctok_to_denomination_nat ctok_provided;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *)\n\n Ligo.geq_nat_nat\n senders_new_kit\n (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit))\n (* FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the\n * number of QCheck samples may improve this.\n *)\n );\n\n FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have \n * a better way of testing different concrete cfmm implementations we should be able to re - enable this .\n * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)\n\n (* (\"buy_kit - returns expected kit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Populate the cfmm with some liquidity *)\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n } in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"), kit_of_denomination (Ligo.nat_from_literal \"1n\"), Ligo.timestamp_from_seconds_literal 1)) in\n let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = alice_addr;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"1_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:kit;\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n ); *)\n\n\n FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have \n * a better way of testing different concrete cfmm implementations we should be able to re - enable this .\n * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)\n\n ( \" sell_kit - returns expected tez \" > : : \n fun _ - > \n Ligo.Tezos.reset ( ) ; \n\n let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal \" 1_000_000n \" ) in \n let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal \" 1n \" ) in \n\n let checker = \n let checker = \n empty_checker_with_cfmm \n { empty_checker.cfmm with \n ctok = ctok_of_denomination ( Ligo.nat_from_literal \" 2n \" ) ; \n kit = kit_of_denomination ( Ligo.nat_from_literal \" 2n \" ) ; \n lqt = lqt_of_denomination ( Ligo.nat_from_literal \" 1n \" ) ; \n } in \n { checker with \n parameters = \n { checker.parameters with circulating_kit = kit_add } ; \n fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ; \n } in \n Checker.assert_checker_invariants checker ; \n\n Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal \" 0mutez \" ) ; \n let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in \n\n let expected_ops = [ \n ( LigoOp . Tezos.fa2_transfer_transaction \n [ Fa2Interface . { \n from _ = checker_address ; \n = [ \n { to _ = alice_addr ; \n token_id = TokenMetadata.ctok_token_id ; \n amount = Ligo.nat_from_literal \" 1n \" ; \n } \n ] \n } \n ] \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % transfer \" checker.external_contracts.ctok_fa2 ) ) \n ) ; \n ] in \n assert_operation_list_equal ~expected : expected_ops ~real : ops \n ) ;\n fun _ ->\n Ligo.Tezos.reset ();\n\n let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal \"1n\") in\n\n let checker =\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n lqt = lqt_of_denomination (Ligo.nat_from_literal \"1n\");\n } in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell };\n fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"1n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n ); *)\n\n (\"remove_liquidity - returns expected kit and tez\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n\n let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal \"1n\") in\n let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal \"1n\") in\n let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal \"1n\") in\n let sender = alice_addr in\n\n (* Populate the cfmm with some liquidity (carefully crafted) *)\n let checker =\n { empty_checker with\n parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal \"1n\")};\n cfmm =\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n lqt = lqt_of_denomination (Ligo.nat_from_literal \"2n\");\n };\n fa2_state =\n let fa2_state = initial_fa2_state in\n let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in\n let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal \"1n\")) in\n fa2_state;\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in\n let ctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n let kit = get_balance_of checker sender TokenMetadata.kit_token_id in\n\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:kit;\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:ctok;\n ()\n );\n\n (* ************************************************************************* *)\n (** FA2 *)\n (* ************************************************************************* *)\n (\"fa2 scenario\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n\n let initial_addr = Ligo.address_of_string \"INIT_ADDR\" in\n\n (* mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal \"0n\"), checker) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", max_kit)) in\n\n (* get some liquidity *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n Checker.entrypoint_add_liquidity\n ( checker,\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n initialize alice , and leena accounts\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_ = initial_addr;\n txs = [\n { to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal \"5n\" };\n { to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal \"5n\" }\n ];\n }]) in\n\n let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in\n\n (* you can see the initial balances here for reference *)\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"5n\");\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"5n\");\n\n assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n\n (* make leena an operator of bob for kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_update_operators (checker, [\n (Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in\n\n assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker));\n assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker));\n assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker));\n\n (* alice can transfer some kit to bob *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal \"2n\"}]}]) in\n\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"3n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"2n\");\n\n (* but she can not transfer more than she has *)\n assert_raises\n (Failure \"FA2_INSUFFICIENT_BALANCE\")\n (fun () -> Checker.strict_entrypoint_transfer (checker, [\n { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"10n\"}]}]));\n\n (* and leena can send some of that kit back to alice *)\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"1n\"}]}]) in\n\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"4n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"1n\");\n\n but leena can not even send a single kit from 's account when he 's not an operator anymore\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_update_operators (checker, [\n (Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n assert_raises\n (Failure \"FA2_NOT_OPERATOR\")\n (fun () -> Checker.strict_entrypoint_transfer (checker, [\n { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"1n\"}]}]));\n ()\n );\n\n (\"view_total_supply (FA2) - initial kit supply\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"0n\") ~real:total_kit_amount;\n ()\n );\n\n (\"view_total_supply (FA2) - initial lqt supply\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"0n\") ~real:total_lqt_amount;\n ()\n );\n\n (\"view_total_supply (FA2) - undefined token id\" >::\n fun _ ->\n assert_raises\n (Failure \"FA2_TOKEN_UNDEFINED\")\n (fun () -> Checker.view_total_supply (Ligo.nat_from_literal \"3n\", empty_checker))\n );\n\n (\"view_all_tokens (FA2)\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let all_tokens = Checker.view_all_tokens ((), empty_checker) in\n assert_nat_list_equal\n ~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ]\n ~real:all_tokens;\n ()\n );\n\n (* ************************************************************************* *)\n (** LiquidationAuctions *)\n (* ************************************************************************* *)\n (\"entrypoint_liquidation_auction_place_bid: should only allow the current auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) } in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"200_000_000n\"))) in\n let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal \"0n\"), checker) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", max_kit)) in\n let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"10_000_000n\")) } in\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n let res = Checker.view_current_liquidation_auction_details ((), checker) in\n let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in\n\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLiquidationAuction))\n (fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid)));\n );\n\n (\"can complete a liquidation auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n\n (* mint some kit to convert to liquidity *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"200_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _lqt_minted_ret_kit_ops, checker =\n Checker.entrypoint_add_liquidity\n ( checker\n , ( ctok_of_denomination (Ligo.nat_from_literal \"1_000_000n\")\n , kit_one\n , lqt_of_denomination (Ligo.nat_from_literal \"1n\")\n , Ligo.timestamp_from_seconds_literal 1\n )\n ) in (* barely on time *)\n\n (* Activation/deactivation tests *)\n let () =\n (* Creation/deactivation does not incur any costs. *)\n let tez = tok_of_denomination (Ligo.nat_from_literal \"12_345_678n\") in (* NOTE: tez is a misnomer; it's tok really *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tez)) in\n (* created burrow should be deposited (incl. the creation deposit) *)\n let burrow_addr =\n burrow_address\n (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal \"0n\") checker0.burrows)) in\n let () = match ops with\n | [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ;\n (Transaction (FA2TransferTransactionValue _, _, _)) as op ;\n ] ->\n (* burrow creation values *)\n assert_key_hash_option_equal ~expected:None ~real:cb_delegate;\n assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez;\n assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage;\n (* collateral initialization values *)\n assert_operation_equal\n ~expected:(\n LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = bob_addr;\n txs = [\n { to_ = burrow_addr;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat tez;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n )\n ~real:op\n | _ -> assert_failure (\"Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal \"0n\", alice_addr)) in\n assert_operation_list_equal\n ~expected:[\n LigoOp.Tezos.address_nat_transaction\n (alice_addr, tok_to_denomination_nat tez)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" burrow_addr))\n ]\n ~real:ops;\n (* deactivation/activation = identity (if conditions are met ofc). *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal \"0n\", tez)) in\n FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives .\n assert_equal checker0 checker2;\n () in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n let burrow_id = (bob_addr, Ligo.nat_from_literal \"0n\") in\n let burrow_addr =\n burrow_address\n (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal \"0n\") checker.burrows)) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"4_285_714n\") ~real:kit;\n\n assert_bool\n \"should not be overburrowed right after minting\"\n (not\n @@ burrow_is_overburrowed\n checker.parameters\n (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))\n );\n\n (* Minting another kit should fail *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_MintKitFailure))\n (fun () ->\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))\n )\n );\n\n (* Over time the burrows with outstanding kit should be overburrowed\n \t* (NOTE: even if the index stays where it was before, but that would\n \t* take more time I guess). *)\n Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_001n\")) in\n\n let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_bool\n \"if the index goes up, then burrows should become overburrowed\"\n (burrow_is_overburrowed\n checker.parameters\n (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))\n );\n\n (* If enough time passes and the index remains up, then the burrow is even liquidatable. *)\n Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"202_000_000\") (* wow, high reward, many blocks have passed. *)\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n assert_operation_list_equal\n ~expected:[\n LigoOp.Tezos.address_nat_transaction\n (alice_addr, Ligo.nat_from_literal \"1_009_000n\")\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" burrow_addr))\n ]\n ~real:ops;\n\n let slice =\n (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices)\n |> Option.get\n |> fun i -> i.youngest_slice in\n\n (* We shouldn't be able to cancel the liquidation of this slice if the\n * prices don't change, even if it's not in an auction yet. *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_UnwarrantedCancellation))\n (fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice));\n\n (* Trying to cancel a liquidation using an invalid pointer should fail. *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLeafPtr))\n (fun () ->\n let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in\n Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice)\n );\n\n Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_NoOpenAuction))\n (fun () -> Checker.view_current_liquidation_auction_details ((), checker));\n\n let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n assert_bool \"should start an auction\"\n (Option.is_some checker.liquidation_auctions.current_auction);\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"500_000\")\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in\n\n let auction_id =\n min_bid.auction_id in\n assert_kit_equal\n ~expected:(kit_of_denomination (Ligo.nat_from_literal \"2_709_183n\"))\n ~real:min_bid.minimum_bid;\n\n (* Bid the minimum first *)\n let (ops, checker) =\n Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n (* Same person increases the bid *)\n let (ops, checker) =\n Checker.entrypoint_liquidation_auction_place_bid\n ( checker\n , (auction_id, kit_of_denomination (Ligo.nat_from_literal \"4_200_000n\"))\n ) in\n\n let auction_id =\n match checker.liquidation_auctions.current_auction with\n | None -> assert_failure \"entrypoint_liquidation_auction_place_bid should have succeeded\"\n | Some current_auction -> current_auction.contents in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"500_000\")\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n assert_bool \"auction should be completed\"\n (Option.is_none checker.liquidation_auctions.current_auction);\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"21_000_000\")\n ~real:touch_reward;\n\n FIXME : Operations differ between the FA2 deployment and the TEZ deployment \n ( * Check that all the requests for burrows to send tez come _ before _ the \n * request to the oracle to update the index .\n (* Check that all the requests for burrows to send tez come _before_ the\n * request to the oracle to update the index. *)\n begin match ops with\n | [\n Transaction (AddressNatTransactionValue _, _, _); (* send tez requests *)\n Transaction (NatContractTransactionValue _, _, _); (* oracle call *)\n call\n ] -> ()\n | _ -> assert_failure (\"Unexpected operations/operation order: \" ^ show_operation_list ops)\n end;\n *)\n\n We do n't need to touch the slice on this test case since \n * Checker.entrypoint_touch_with_index already touches the oldest 5 \n * slices .\n * Checker.entrypoint_touch_with_index already touches the oldest 5\n * slices. *)\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLeafPtr))\n (fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice]));\n\n assert_bool \"burrow should have no liquidation slices\"\n (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None);\n\n let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in\n assert_tok_equal\n ~expected:tok_zero\n ~real:(burrow_collateral_at_auction result);\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in\n\n assert_operation_list_equal\n ~expected:[\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"3_156_446n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ]\n ~real:ops;\n (* This should fail; shouldn't be able to claim the win twice. *)\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidAvlPtr))\n (fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id));\n\n ()\n );\n\n (\"entrypoint_mark_for_liquidation - should not create empty slices\" >::\n fun _ ->\n (* Setup. *)\n Ligo.Tezos.reset ();\n let sender = alice_addr in\n let checker = empty_checker in\n\n (* Create a burrow with a very little tez in it. *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;\n let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"2_001_001n\")) in\n\n CALCULATIONS \n ~~~~~~~~~~~~ \n Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is \n ( 1tez + 1_001mutez = 1_001_001 ) . This means that \n - The slice we WOULD send to auctions is empty . \n - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions . \n \n ~~~~~~~~~~~~\n Tez in the burrow is (1_001_001mutez + 1tez) so the reward is\n (1tez + 1_001mutez = 1_001_001). This means that\n - The slice we WOULD send to auctions is empty.\n - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions.\n *)\n\n Mint as much kit as possible .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal \"476_667n\"))) in\n\n (* Let some time pass. Over time the burrows with outstanding kit should\n \t* become overburrowed, and eventually liquidatable. Note that this\n \t* could be because of the index, but also it can happen because of the\n \t* fees alone if the index remains the same. *)\n NOTE : I am a little surprised / worried about this being again 211 ...\n Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_105_283n\")) in (* sup *)\n let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n\n Ensure that the burrow is .\n begin match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | None -> assert_failure \"bug\"\n | Some burrow -> assert_bool \"burrow needs to be liquidatable for the test to be potent.\" (Burrow.burrow_is_liquidatable checker.parameters burrow);\n end;\n\n Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *)\n\n Let 's mark the burrow for liquidation now ( second pass : deactivates it ) .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *)\n\n ()\n );\n\n (\"deposit_collateral - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to deposit some tez to the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal \"0n\", amount)) in\n ()\n );\n\n (\"entrypoint_withdraw_collateral - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to withdraw some tez from the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", Constants.creation_deposit)) in\n ()\n );\n\n (\"entrypoint_mint_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Create a burrow *)\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to mint some kit out of the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n ()\n );\n\n (\"entrypoint_burn_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Mint some kit out of the burrow\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to burn some kit into the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n ()\n );\n\n (\"entrypoint_activate_burrow - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n (* Deactivate the burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal \"0n\", !Ligo.Tezos.sender)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to activate the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal \"0n\", amount)) in\n ()\n );\n\n (\"entrypoint_deactivate_burrow - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to deactivate the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal \"0n\", !Ligo.Tezos.sender)) in\n ()\n );\n\n (\"entrypoint_mark_for_liquidation - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to mark the untouched burrow for liquidation *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n (* TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation\n * to really succeed instead of failing for another reason. *)\n assert_raises\n (Failure (Ligo.string_of_int error_NotLiquidationCandidate))\n (fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id));\n );\n\n (* TODO: Add test \"entrypoint_cancel_liquidation_slice - fails on untouched burrows\" *)\n\n (\"entrypoint_set_burrow_delegate - does not fail on untouched burrows\" >::\n fun _ ->\n (* NOTE: In a collateral=FA2 deployment this would actually fail. *)\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to set the delegate of the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal \"0n\", None)) in\n ()\n );\n\n (\"cfmm views\" >:::\n let\n with_cfmm_setup f =\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n let burrow_id = Ligo.nat_from_literal \"42n\" in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n (* Mint some kit *)\n Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in\n (* Add some liquidity *)\n Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"400_000n\") in\n let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal \"400_000n\") in\n let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal \"5n\") in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = f checker in ()\n in\n [\n \"view_buy_kit_min_kit_expected\" >:: with_cfmm_setup\n (fun checker ->\n let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n (* must succeed, otherwise view_buy_kit_min_kit_expected overapproximated *)\n Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline)));\n\n \"view_buy_kit_min_kit_expected - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_BuyKitNoCtokGiven))\n (fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker))\n );\n\n \"view_sell_kit_min_ctok_expected\" >:: with_cfmm_setup\n (fun checker ->\n let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n (* must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated *)\n Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline)));\n\n \"view_sell_kit_min_ctok_expected - fail if no kit is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_SellKitNoKitGiven))\n (fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker))\n );\n\n \"view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted\" >:: with_cfmm_setup\n (fun checker ->\n let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in\n let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n (* must succeed, otherwise\n * view_add_liquidity_max_kit_deposited underapproximated or\n * view_add_liquidity_min_lqt_minted overapproximated (or both of them did) *)\n Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline)));\n\n \"view_add_liquidity_max_kit_deposited - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))\n (fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker))\n );\n\n \"view_add_liquidity_min_lqt_minted - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))\n (fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal \"5n\") in\n let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in\n let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n (* must succeed, otherwise\n * view_remove_liquidity_min_ctok_withdrawn overapproximated or\n * view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did) *)\n Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline)));\n\n \"view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = checker.cfmm.lqt in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = checker.cfmm.lqt in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))\n );\n ]\n );\n\n (\"view_burrow_max_mintable_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to view the max mintable kit from the untouched burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in\n ()\n );\n\n (\"view_is_burrow_overburrowed - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n (* Try to view whether the untouched burrow is overburrowed *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in\n ()\n );\n\n (\"view_is_burrow_liquidatable - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n (* Create a burrow *)\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Try to view whether the untouched burrow is \n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in\n ()\n );\n\n (\"view_current_liquidation_auction_details - raises error when there is no current auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n assert_raises\n (Failure (Ligo.string_of_int error_NoOpenAuction))\n (fun _ -> Checker.view_current_liquidation_auction_details ((), checker))\n );\n\n (\"view_current_liquidation_auction_details - expected value for descending auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n let auction = Option.get checker.liquidation_auctions.current_auction in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n let expected_auction_details = {\n auction_id = auction.contents;\n collateral = tok_of_denomination (Ligo.nat_from_literal \"23_669_648n\");\n minimum_bid = liquidation_auction_current_auction_minimum_bid auction;\n current_bid = None;\n remaining_blocks = None;\n remaining_seconds = None;\n }\n in\n assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details\n );\n\n (\"view_current_liquidation_auction_details - expected value for ascending auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n let auction = Option.get checker.liquidation_auctions.current_auction in\n Place a bid to turn the descending auction into an ascending one\n let bidder = bob_addr in\n let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"1n\", None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"1n\", bid_amnt)) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let auction = Option.get checker.liquidation_auctions.current_auction in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n let expected_auction_details = {\n auction_id = auction.contents;\n collateral = tok_of_denomination (Ligo.nat_from_literal \"23_669_648n\");\n minimum_bid = liquidation_auction_current_auction_minimum_bid auction;\n current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;});\n remaining_blocks = Some (Ligo.int_from_literal \"-2\");\n remaining_seconds = Some (Ligo.int_from_literal \"700\");\n }\n in\n assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details\n );\n ]\n\nlet () =\n run_test_tt_main\n suite\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/tezos-checker/checker/e4bd0f16aa14e10e8a62b28e85f8c98c388a0a6a/tests/testChecker.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":" The starting checker state should satisfy the invariants to begin with. \n Create some burrows and mint some kit \n Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high\n * for the burrows to be liquidatable.\n \n Touch burrows \n Check the expected properties of this test fixture \n Produces a checker state with liquidation slices in the queue but no current auction.\n * Returns a list of details for queued slices related to a Close liquidation,\n * a list of details for all other slices in the queue, and the contract state.\n\n Produces a checker state with an active liquidation auction \n Produces a checker state with a completed liquidation auction \n Get the current auction minimum bid \n Mint enough kit to bid \n Place a bid \n Wait until enough time has passed for the auction to be completable then touch checker \n NOTE: we really want them to be identical here, hence the '='. \n Create the burrow \n Make a deposit \n Create a burrow and deactivate it \n Then activate it \n Create a burrow and mint some kit \n Note: all values here were arbitrarily chosen based on the amount of kit we minted above \n Create a burrow and mint some kit \n Then burn the kit \n burrow creation values \n collateral initialization values \n Create a burrow and deactivate it \n Create the burrow \n Make a deposit \n Lookup the current minimum bid \n Place a bid \n Use a checker state already containing some liquidatable burrows \n Mark one of the liquidatable burrows for liquidation \n Use a checker state already containing some liquidatable burrows \n Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow \n Touch the remaining slices so the bid can be claimed. \n Claim the winning bid \n Create a burrow and mint some kit \n NOTE: In a collateral=FA2 deployment this would actually fail. \n Create the burrow with no delegate \n Then set the burrow's delegate \n Create a burrow and mint some kit \n Add some liquidity to the contract \n Note: all values here were arbitrarily chosen based on the amount of kit we minted above \n Now remove the liquidity \n Note: all values here were arbitrarily chosen based on the amount of kit we minted above \n Create the burrow \n Then touch it \n Create a burrow \n Try to withdraw some tez from the untouched burrow \n The division in this case should produce no remainder \n Create a burrow \n There should be no operations emitted. \n The owner should be able to burn it back. \n Create a burrow \n There should be no operations emitted. \n Have the wrong person try to burn it back; this should fail. \n before \n after \n before \n after \n Adjust transaction by a random amount of extra tez \n UNSAFE CAST \n before \n after \n FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the\n * number of QCheck samples may improve this.\n \n (\"buy_kit - returns expected kit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n (* Populate the cfmm with some liquidity \n Populate the cfmm with some liquidity (carefully crafted) \n ************************************************************************* \n* FA2 \n ************************************************************************* \n mint some kit \n get some liquidity \n you can see the initial balances here for reference \n make leena an operator of bob for kit \n alice can transfer some kit to bob \n but she can not transfer more than she has \n and leena can send some of that kit back to alice \n ************************************************************************* \n* LiquidationAuctions \n ************************************************************************* \n mint some kit to convert to liquidity \n barely on time \n Activation/deactivation tests \n Creation/deactivation does not incur any costs. \n NOTE: tez is a misnomer; it's tok really \n created burrow should be deposited (incl. the creation deposit) \n burrow creation values \n collateral initialization values \n deactivation/activation = identity (if conditions are met ofc). \n Minting another kit should fail \n Over time the burrows with outstanding kit should be overburrowed\n \t* (NOTE: even if the index stays where it was before, but that would\n \t* take more time I guess). \n If enough time passes and the index remains up, then the burrow is even liquidatable. \n wow, high reward, many blocks have passed. \n We shouldn't be able to cancel the liquidation of this slice if the\n * prices don't change, even if it's not in an auction yet. \n Trying to cancel a liquidation using an invalid pointer should fail. \n Bid the minimum first \n Same person increases the bid \n Check that all the requests for burrows to send tez come _before_ the\n * request to the oracle to update the index. \n send tez requests \n oracle call \n This should fail; shouldn't be able to claim the win twice. \n Setup. \n Create a burrow with a very little tez in it. \n Let some time pass. Over time the burrows with outstanding kit should\n \t* become overburrowed, and eventually liquidatable. Note that this\n \t* could be because of the index, but also it can happen because of the\n \t* fees alone if the index remains the same. \n sup \n Ensures no empty slices in the queue. \n Ensures no empty slices in the queue. \n Create a burrow \n Try to deposit some tez to the untouched burrow \n Create a burrow \n Try to withdraw some tez from the untouched burrow \n Create a burrow \n Try to mint some kit out of the untouched burrow \n Create a burrow \n Try to burn some kit into the untouched burrow \n Create a burrow \n Deactivate the burrow \n Try to activate the untouched burrow \n Create a burrow \n Try to deactivate the untouched burrow \n Create a burrow \n Try to mark the untouched burrow for liquidation \n TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation\n * to really succeed instead of failing for another reason. \n TODO: Add test \"entrypoint_cancel_liquidation_slice - fails on untouched burrows\" \n NOTE: In a collateral=FA2 deployment this would actually fail. \n Create a burrow \n Try to set the delegate of the untouched burrow \n Create a burrow \n Mint some kit \n Add some liquidity \n must succeed, otherwise view_buy_kit_min_kit_expected overapproximated \n must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated \n must succeed, otherwise\n * view_add_liquidity_max_kit_deposited underapproximated or\n * view_add_liquidity_min_lqt_minted overapproximated (or both of them did) \n must succeed, otherwise\n * view_remove_liquidity_min_ctok_withdrawn overapproximated or\n * view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did) \n Create a burrow \n Try to view the max mintable kit from the untouched burrow \n Create a burrow \n Try to view whether the untouched burrow is overburrowed \n Create a burrow "},"code":{"kind":"string","value":"open Ctok\nopen Kit\nopen Tok\nopen Lqt\nopen Burrow\nopen OUnit2\nopen TestLib\nopen CheckerTypes\nopen Fa2Interface\nopen Fa2Ledger\nopen Fa2Implementation\nopen Error\nopen Ptr\nopen LiquidationAuctionTypes\nopen LiquidationAuction\n\nlet property_test_count = 10000\nlet qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t\n\nmodule PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end)\n\nlet checker_address = !Ligo.Tezos.self_address\n\nlet empty_checker =\n initial_checker\n { ctok_fa2 = ctok_fa2_addr;\n ctez_cfmm = ctez_cfmm_addr;\n oracle = oracle_addr;\n collateral_fa2 = collateral_fa2_addr;\n }\n\nlet _ = Checker.assert_checker_invariants empty_checker\n\n Enhance the initial checker state with a populated cfmm in a consistent way .\nlet empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) =\n let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal \"1n\")) in\n let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n let checker =\n { empty_checker with\n parameters = { empty_checker.parameters with circulating_kit = checker_kit };\n cfmm = cfmm;\n fa2_state =\n let fa2_state = initial_fa2_state in\n let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in\n let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in\n fa2_state;\n } in\n Checker.assert_checker_invariants checker;\n checker\n\n Produces a checker state with burrows . \n * Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state \n\n * Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state\n*)\nlet checker_with_liquidatable_burrows () =\n let checker = empty_checker in\n let alice_burrow_1 = Ligo.nat_from_literal \"0n\" in\n let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in\n let bob_burrow_1 = Ligo.nat_from_literal \"0n\" in\n Alice burrow 1 . Will NOT be \n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal \"2_000_000n\"))) in\n burrow 2 : N. Will be \n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal \"100n\")))) in\n let checker = List.fold_left (\n fun checker alice_burrow_no ->\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"2_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in\n Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in\n checker\n )\n checker\n alice_burrow_nos\n in\n Bob burrow 1 . Will be .\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal \"20_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in\n Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in\n\n Increase value of kit to make some of the burrows by touching checker\n Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_100_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in\n let checker = List.fold_left (\n fun checker alice_burrow_no ->\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in\n checker\n )\n checker\n alice_burrow_nos\n in\n\n assert_bool \"alice_burrow_1 was liquidatable but it is expected to not be\"\n (not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows))));\n assert_bool \"bob_burrow_1 was not liquidatable but it is expected to be\"\n (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows)));\n List.fold_left (\n fun _ alice_burrow_no ->\n assert_bool (\"alice_burrow_\" ^ (Ligo.string_of_nat alice_burrow_no) ^ \" was not liquidatable but it is expected to be\")\n (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows))))\n ()\n alice_burrow_nos;\n Checker.assert_checker_invariants checker;\n\n let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in\n let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in\n liquidatable_burrow_ids, underburrowed_burrow_ids, checker\n\nlet checker_with_queued_liquidation_slices () =\n let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in\n Mark the burrows for liquidation . This will add slices to the queue .\n let checker, close_slice_details, other_slice_details = List.fold_left\n (fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id ->\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in\n let slice_ptr = SliceList.slice_list_element_ptr new_slice in\n let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in\n let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in\n let close_liquidation_slices, other_liquidation_slices =\n if is_burrow_now_closed then\n (List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices\n else\n close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)])\n in\n checker, close_liquidation_slices, other_liquidation_slices\n )\n (checker, [], [])\n liquidatable_burrow_ids\n in\n assert_bool\n \"liquidation auction queue was empty, but it was expected to have some slices\"\n (Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices));\n assert (List.length close_slice_details > 0);\n assert (List.length other_slice_details > 0);\n close_slice_details, other_slice_details, checker\n\nlet checker_with_active_auction () =\n let _, _, checker = checker_with_queued_liquidation_slices () in\n Touch checker to start an auction\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n assert_bool \"a current liquidation auction should have been started but was not\" (Option.is_some checker.liquidation_auctions.current_auction);\n checker\n\nlet checker_with_completed_auction () =\n let checker = checker_with_active_auction () in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n let bidder = alice_addr in\n let new_burrow_no = Ligo.nat_from_literal \"100n\" in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in\n Touch checker to start an auction\n Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n assert_bool\n \"there was not a completed liquidation auction but one should exist\"\n (Option.is_some checker.liquidation_auctions.completed_auctions);\n bidder, checker\n\n Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops\nlet newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker =\n let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, collateral)) in\n ((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker)\n\nlet get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat =\n let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in\n match ops with\n | [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit\n | _ -> failwith (\"Unexpected fa2 response, got: \" ^ show_operation_list ops)\n\nlet suite =\n \"Checker tests\" >::: [\n (\"initial touch (noop)\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker1 = empty_checker in\n let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"0n\")) in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n ()\n );\n\n (\"create_burrow - updates checker storage\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\")) in\n\n assert_bool\n \"No matching burrow found after calling create_burrow\"\n (Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows));\n assert_bool\n \"The burrow existed before calling create_burrow\"\n (Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows))\n );\n\n (\"create_burrow - collateral in burrow representation does not include creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" Constants.creation_deposit in\n\n let expected_collateral = tok_zero in\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"create_burrow - fails when transaction amount is one mutez below creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal \"1n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n assert_raises\n (Failure (Ligo.string_of_int error_InsufficientFunds))\n (fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)))\n );\n\n (\"create_burrow - passes when transaction amount is exactly the creation deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" Constants.creation_deposit in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow ->\n assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"deposit_collateral - owner can deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"deposit_collateral - non-owner cannot deposit\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\"))in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal \"0n\", tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"))))\n );\n\n (\"withdraw_collateral - owner can withdraw\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let withdrawal = tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let burrow_id, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", withdrawal)) in\n\n match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow)\n | None -> assert_failure \"Expected a burrow representation to exist but none was found\"\n );\n\n (\"withdraw_collateral - non-owner cannot withdraw\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let initial_deposit = tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\") in\n let withdrawal = tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" initial_deposit in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", withdrawal)))\n );\n\n (\"entrypoint_activate_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"1_000_000n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_add_liquidity - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_add_liquidity\n (checker,\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = alice_addr;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"5_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_burn_kit - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_create_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let amnt = tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\") in\n let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amnt)) in\n match ops with\n Note : it 's not really possible to check the first parameter of the contract here which is the \n * function which defines the contract 's logic . \n \n * function which defines the contract's logic.\n *)\n | [ (CreateBurrowContract (_, delegate, tez, storage)) ;\n (Transaction (FA2TransferTransactionValue _, _, _)) as op;\n ] ->\n assert_key_hash_option_equal ~expected:None ~real:delegate;\n assert_tez_equal ~expected:Common.tez_zero ~real:tez;\n assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage;\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal \"0n\")) checker.burrows) in\n assert_operation_equal\n ~expected:(\n LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat amnt;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n )\n ~real:op\n | _ -> failwith (\"Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n );\n\n (\"entrypoint_deactivate_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (alice_addr, (Ligo.nat_from_literal \"100_000_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_deposit_collateral - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\"))) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = alice_addr;\n txs = [\n { to_ = burrow_address burrow;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"3_000_000n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_liquidation_auction_place_bid - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n Mint some kit to be able to bid\n let new_burrow_no = Ligo.nat_from_literal \"100n\" in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid\n (checker,\n ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid))\n in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_mark_for_liquidation - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in\n let burrow_id = List.nth liquidatable_burrow_ids 0 in\n let sender = bob_addr in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (sender, (Ligo.nat_from_literal \"1_001_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_cancel_liquidation_slice - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let _, slice_details, checker = checker_with_queued_liquidation_slices () in\n let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in\n\n Deposit some extra collateral to one of the burrows with slices in the auction queue\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal \"4_000_000n\"))) in\n\n Now cancel one of the burrow 's liquidation slices\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_liquidation_auction_claim_win - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let winning_bidder, checker = checker_with_completed_auction () in\n let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in\n let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in\n let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = !Ligo.Tezos.self_address;\n txs = [\n { to_ = winning_bidder;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat sold_tok;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_mint_kit - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_set_burrow_delegate - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.opt_key_hash_transaction\n (Some charles_key_hash)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowSetDelegate\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_receive_price - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal \"42n\", Tok.tok_scaling_factor_nat)) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_remove_liquidity - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", (kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\")))) in\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_add_liquidity\n (checker,\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_remove_liquidity\n (checker,\n ( lqt_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"5_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n\n\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n FIXME : Operations differ between the FA2 deployment and the TEZ deployment \n ( \" entrypoint_touch - emits expected operations when checker needs to be touched \" > : : \n fun _ - > \n Ligo.Tezos.reset ( ) ; \n let checker = empty_checker in \n Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal \" 0mutez \" ) ; \n let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in \n\n let expected_ops = [ \n ( LigoOp . Tezos.nat_contract_transaction \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % receive_price \" ! . ) ) \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( CheckerTypes.get_oracle_entrypoint checker.external_contracts ) \n ) ; \n ( LigoOp . Tezos.nat_nat_contract_transaction \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % receive_ctez_marginal_price \" ! . ) ) \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts ) \n ) ; \n ] in \n assert_operation_list_equal ~expected : expected_ops ~real : ops \n ) ; \n \n (\"entrypoint_touch - emits expected operations when checker needs to be touched\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch (checker, ()) in\n\n let expected_ops = [\n (LigoOp.Tezos.nat_contract_transaction\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%receive_price\" !Ligo.Tezos.self_address))\n (Ligo.tez_from_literal \"0mutez\")\n (CheckerTypes.get_oracle_entrypoint checker.external_contracts)\n );\n (LigoOp.Tezos.nat_nat_contract_transaction\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%receive_ctez_marginal_price\" !Ligo.Tezos.self_address))\n (Ligo.tez_from_literal \"0mutez\")\n (CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts)\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n *)\n\n (\"entrypoint_touch - emits expected operations when checker has already been touched\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch (checker, ()) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_touch_liquidation_slices - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let _, checker = checker_with_completed_auction () in\n let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in\n let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in\n let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in\n Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded \n * the \" contents \" record accessor in LiquidationAuctionTypes \n \n * the \"contents\" record accessor in LiquidationAuctionTypes\n *)\n\n let expected_ops = let open LiquidationAuctionPrimitiveTypes in\n List.rev (List.map (\n fun slice ->\n let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in\n LigoOp.Tezos.address_nat_transaction\n (checker_address, tok_to_denomination_nat slice.contents.tok)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n ) slices) in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"entrypoint_touch_burrow - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n assert_operation_list_equal ~expected:[] ~real:ops\n );\n\n (\"entrypoint_withdraw_collateral - emits expected operations\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let (_, burrow_no), checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"3_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", tok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"))) in\n let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in\n let expected_ops = [\n (LigoOp.Tezos.address_nat_transaction\n (alice_addr, (Ligo.nat_from_literal \"1_000_000n\"))\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" (burrow_address burrow)))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n );\n\n (\"calculate_touch_reward - expected result for last_touched 2s ago\" >::\n fun _ ->\n The division in this case should return a remainder < 1/2\n Ligo.Tezos.reset ();\n let time_delta = 2 in\n remainder : 12000 / 36000\n let expected_reward = Ligo.int_from_literal \"3333\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n );\n\n (\"calculate_touch_reward - expected result for last_touched 3s ago\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let time_delta = 3 in\n remainder : 0\n let expected_reward = Ligo.int_from_literal \"5000\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n );\n\n (\"calculate_touch_reward - expected result for last_touched 4s ago\" >::\n fun _ ->\n The division in this case should return a remainder > 1/2\n Ligo.Tezos.reset ();\n let time_delta = 4 in\n remainder : 24000 / 36000\n let expected_reward = Ligo.int_from_literal \"6666\" in\n let last_touched = Ligo.timestamp_from_seconds_literal 0 in\n Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in\n\n assert_int_equal ~expected:expected_reward ~real:actual_reward;\n\n );\n\n (\"burn_kit - owner can burn\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n\n let sender = alice_addr in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\")) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_token)) in\n\n ()\n );\n\n (\"burn_kit - non-owner cannot burn\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = newly_created_burrow empty_checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\")) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_raises\n (Failure (Ligo.string_of_int error_NonExistentBurrow))\n (fun () ->\n let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_token))\n );\n\n ()\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_respects_min_kit_expected\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n\n let sender = alice_addr in\n let checker = empty_checker_with_cfmm cfmm in\n\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = ctok_to_denomination_nat ctok_amount;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n Ligo.geq_nat_nat\n senders_new_kit\n (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected))\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_preserves_kit\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n\n let checker = empty_checker_with_cfmm cfmm in\n let sender = alice_addr in\n\n let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n\n let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = ctok_to_denomination_nat ctok_amount;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n Ligo.eq_nat_nat\n (Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit)\n (Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit)\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_buy_kit_preserves_tez\"\n ~count:property_test_count\n make_inputs_for_buy_kit_to_succeed\n @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) ->\n let checker = empty_checker_with_cfmm cfmm in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in\n ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_respects_min_tez_expected\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n let bought_muctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n ctok_of_denomination bought_muctok >= min_ctok_expected\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_preserves_kit\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit\n );\n\n (\n Ligo.Tezos.reset();\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"test_sell_kit_preserves_tez\"\n ~count:property_test_count\n make_inputs_for_sell_kit_to_succeed\n @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) ->\n let sender = alice_addr in\n let checker =\n let checker = empty_checker_with_cfmm cfmm in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount };\n fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in\n\n let bought_muctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok\n );\n\n (\n let cfmm_kit = Ligo.nat_from_literal (\"1_000n\") in\n let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal (\"1_000n\")) in\n The maximum amount of kit that you can buy with a finite amount of tez is \n * ( 1 - fee ) * cfmm.kit - 1 \n \n * (1 - fee) * cfmm.kit - 1\n *)\n let max_buyable_kit = 997 in\n let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ \"n\"))) QCheck.(1 -- max_buyable_kit) in\n let arb_tez = TestArbitrary.arb_small_positive_tez in\n\n qcheck_to_ounit\n @@ QCheck.Test.make\n ~name:\"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez\"\n ~count:property_test_count\n (QCheck.pair arb_kit arb_tez)\n @@ fun (min_expected_kit, additional_tez) ->\n\n Ligo.Tezos.reset();\n let sender = alice_addr in\n\n Populate cfmm with initial liquidity\n let open Ratio in\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = cfmm_ctok;\n kit = kit_of_denomination cfmm_kit;\n } in\n\n Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above\n let ratio_minimum_tez = div_ratio\n (ratio_of_nat cfmm_kit)\n (\n sub_ratio\n (div_ratio (ratio_of_nat (Ligo.nat_from_literal \"998n\")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit)))\n (ratio_of_nat (Ligo.nat_from_literal \"1n\"))\n ) in\n let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal \"1mutez\") in\n\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in\n\n begin match ops with\n | [Transaction (FA2TransferTransactionValue transfer, _, _)] ->\n assert_fa2_transfer_list_equal\n ~expected:[\n Fa2Interface.{\n from_ = sender;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ctok.ctok_to_denomination_nat ctok_provided;\n }\n ]\n }\n ]\n ~real:transfer\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops)\n end;\n\n\n Ligo.geq_nat_nat\n senders_new_kit\n (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit))\n );\n\n FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have \n * a better way of testing different concrete cfmm implementations we should be able to re - enable this .\n * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)\n\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n } in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal \"1_000_000n\"), kit_of_denomination (Ligo.nat_from_literal \"1n\"), Ligo.timestamp_from_seconds_literal 1)) in\n let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = alice_addr;\n txs = [\n { to_ = checker_address;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"1_000_000n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:kit;\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n ); *)\n\n\n FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have \n * a better way of testing different concrete cfmm implementations we should be able to re - enable this .\n * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *)\n\n ( \" sell_kit - returns expected tez \" > : : \n fun _ - > \n Ligo.Tezos.reset ( ) ; \n\n let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal \" 1_000_000n \" ) in \n let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal \" 1n \" ) in \n\n let checker = \n let checker = \n empty_checker_with_cfmm \n { empty_checker.cfmm with \n ctok = ctok_of_denomination ( Ligo.nat_from_literal \" 2n \" ) ; \n kit = kit_of_denomination ( Ligo.nat_from_literal \" 2n \" ) ; \n lqt = lqt_of_denomination ( Ligo.nat_from_literal \" 1n \" ) ; \n } in \n { checker with \n parameters = \n { checker.parameters with circulating_kit = kit_add } ; \n fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ; \n } in \n Checker.assert_checker_invariants checker ; \n\n Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal \" 0mutez \" ) ; \n let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in \n\n let expected_ops = [ \n ( LigoOp . Tezos.fa2_transfer_transaction \n [ Fa2Interface . { \n from _ = checker_address ; \n = [ \n { to _ = alice_addr ; \n token_id = TokenMetadata.ctok_token_id ; \n amount = Ligo.nat_from_literal \" 1n \" ; \n } \n ] \n } \n ] \n ( Ligo.tez_from_literal \" 0mutez \" ) \n ( Option.get ( LigoOp . Tezos.get_entrypoint_opt \" % transfer \" checker.external_contracts.ctok_fa2 ) ) \n ) ; \n ] in \n assert_operation_list_equal ~expected : expected_ops ~real : ops \n ) ;\n fun _ ->\n Ligo.Tezos.reset ();\n\n let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal \"1_000_000n\") in\n let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal \"1n\") in\n\n let checker =\n let checker =\n empty_checker_with_cfmm\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n lqt = lqt_of_denomination (Ligo.nat_from_literal \"1n\");\n } in\n { checker with\n parameters =\n { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell };\n fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell);\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in\n\n let expected_ops = [\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.{\n from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.ctok_token_id;\n amount = Ligo.nat_from_literal \"1n\";\n }\n ]\n }\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.ctok_fa2))\n );\n ] in\n assert_operation_list_equal ~expected:expected_ops ~real:ops\n ); *)\n\n (\"remove_liquidity - returns expected kit and tez\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n\n let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal \"1n\") in\n let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal \"1n\") in\n let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal \"1n\") in\n let sender = alice_addr in\n\n let checker =\n { empty_checker with\n parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal \"1n\")};\n cfmm =\n { empty_checker.cfmm with\n ctok = ctok_of_denomination (Ligo.nat_from_literal \"2n\");\n kit = kit_of_denomination (Ligo.nat_from_literal \"2n\");\n lqt = lqt_of_denomination (Ligo.nat_from_literal \"2n\");\n };\n fa2_state =\n let fa2_state = initial_fa2_state in\n let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in\n let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal \"1n\")) in\n fa2_state;\n } in\n Checker.assert_checker_invariants checker;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in\n let ctok = match ops with\n | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] ->\n begin\n assert_address_equal ~expected:checker_address ~real:from_address;\n assert_address_equal ~expected:sender ~real:tx.to_;\n tx.amount\n end\n | _ -> failwith (\"Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got \" ^ show_operation_list ops)\n in\n let kit = get_balance_of checker sender TokenMetadata.kit_token_id in\n\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:kit;\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"1n\") ~real:ctok;\n ()\n );\n\n (\"fa2 scenario\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n\n let initial_addr = Ligo.address_of_string \"INIT_ADDR\" in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"100_000_000n\"))) in\n let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal \"0n\"), checker) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", max_kit)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker =\n Checker.entrypoint_add_liquidity\n ( checker,\n ( ctok_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , kit_of_denomination (Ligo.nat_from_literal \"5_000_000n\")\n , lqt_of_denomination (Ligo.nat_from_literal \"5n\")\n , Ligo.timestamp_from_seconds_literal 999\n )\n ) in\n\n initialize alice , and leena accounts\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_ = initial_addr;\n txs = [\n { to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal \"5n\" };\n { to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal \"5n\" }\n ];\n }]) in\n\n let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in\n\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"5n\");\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"5n\");\n\n assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal \"0n\");\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_update_operators (checker, [\n (Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in\n\n assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker));\n assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker));\n assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker));\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal \"2n\"}]}]) in\n\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"3n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"2n\");\n\n assert_raises\n (Failure \"FA2_INSUFFICIENT_BALANCE\")\n (fun () -> Checker.strict_entrypoint_transfer (checker, [\n { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"10n\"}]}]));\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.strict_entrypoint_transfer (checker, [\n { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"1n\"}]}]) in\n\n assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"4n\");\n assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal \"1n\");\n\n but leena can not even send a single kit from 's account when he 's not an operator anymore\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_update_operators (checker, [\n (Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n assert_raises\n (Failure \"FA2_NOT_OPERATOR\")\n (fun () -> Checker.strict_entrypoint_transfer (checker, [\n { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal \"1n\"}]}]));\n ()\n );\n\n (\"view_total_supply (FA2) - initial kit supply\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"0n\") ~real:total_kit_amount;\n ()\n );\n\n (\"view_total_supply (FA2) - initial lqt supply\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"0n\") ~real:total_lqt_amount;\n ()\n );\n\n (\"view_total_supply (FA2) - undefined token id\" >::\n fun _ ->\n assert_raises\n (Failure \"FA2_TOKEN_UNDEFINED\")\n (fun () -> Checker.view_total_supply (Ligo.nat_from_literal \"3n\", empty_checker))\n );\n\n (\"view_all_tokens (FA2)\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let all_tokens = Checker.view_all_tokens ((), empty_checker) in\n assert_nat_list_equal\n ~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ]\n ~real:all_tokens;\n ()\n );\n\n (\"entrypoint_liquidation_auction_place_bid: should only allow the current auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) } in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"200_000_000n\"))) in\n let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal \"0n\"), checker) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", max_kit)) in\n let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"10_000_000n\")) } in\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal \"0n\")) in\n let _, checker = Checker.entrypoint_touch (checker, ()) in\n\n let res = Checker.view_current_liquidation_auction_details ((), checker) in\n let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in\n\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLiquidationAuction))\n (fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid)));\n );\n\n (\"can complete a liquidation auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"200_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _lqt_minted_ret_kit_ops, checker =\n Checker.entrypoint_add_liquidity\n ( checker\n , ( ctok_of_denomination (Ligo.nat_from_literal \"1_000_000n\")\n , kit_one\n , lqt_of_denomination (Ligo.nat_from_literal \"1n\")\n , Ligo.timestamp_from_seconds_literal 1\n )\n\n let () =\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tez)) in\n let burrow_addr =\n burrow_address\n (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal \"0n\") checker0.burrows)) in\n let () = match ops with\n | [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ;\n (Transaction (FA2TransferTransactionValue _, _, _)) as op ;\n ] ->\n assert_key_hash_option_equal ~expected:None ~real:cb_delegate;\n assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez;\n assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage;\n assert_operation_equal\n ~expected:(\n LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = bob_addr;\n txs = [\n { to_ = burrow_addr;\n token_id = TokenMetadata.tok_token_id;\n amount = tok_to_denomination_nat tez;\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n )\n ~real:op\n | _ -> assert_failure (\"Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got \" ^ show_operation_list ops) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal \"0n\", alice_addr)) in\n assert_operation_list_equal\n ~expected:[\n LigoOp.Tezos.address_nat_transaction\n (alice_addr, tok_to_denomination_nat tez)\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" burrow_addr))\n ]\n ~real:ops;\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal \"0n\", tez)) in\n FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives .\n assert_equal checker0 checker2;\n () in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero;\n let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"0n\", None, tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n let burrow_id = (bob_addr, Ligo.nat_from_literal \"0n\") in\n let burrow_addr =\n burrow_address\n (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal \"0n\") checker.burrows)) in\n\n Mint as much kit as possible\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) =\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"4_285_714n\"))\n ) in\n\n let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n assert_nat_equal ~expected:(Ligo.nat_from_literal \"4_285_714n\") ~real:kit;\n\n assert_bool\n \"should not be overburrowed right after minting\"\n (not\n @@ burrow_is_overburrowed\n checker.parameters\n (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))\n );\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_MintKitFailure))\n (fun () ->\n Checker.entrypoint_mint_kit\n ( checker\n , (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))\n )\n );\n\n Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_001n\")) in\n\n let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_bool\n \"if the index goes up, then burrows should become overburrowed\"\n (burrow_is_overburrowed\n checker.parameters\n (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))\n );\n\n Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_int_equal\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n assert_operation_list_equal\n ~expected:[\n LigoOp.Tezos.address_nat_transaction\n (alice_addr, Ligo.nat_from_literal \"1_009_000n\")\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%burrowTransfer\" burrow_addr))\n ]\n ~real:ops;\n\n let slice =\n (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices)\n |> Option.get\n |> fun i -> i.youngest_slice in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_UnwarrantedCancellation))\n (fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice));\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLeafPtr))\n (fun () ->\n let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in\n Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice)\n );\n\n Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_NoOpenAuction))\n (fun () -> Checker.view_current_liquidation_auction_details ((), checker));\n\n let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n assert_bool \"should start an auction\"\n (Option.is_some checker.liquidation_auctions.current_auction);\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"500_000\")\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in\n\n let auction_id =\n min_bid.auction_id in\n assert_kit_equal\n ~expected:(kit_of_denomination (Ligo.nat_from_literal \"2_709_183n\"))\n ~real:min_bid.minimum_bid;\n\n let (ops, checker) =\n Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n let (ops, checker) =\n Checker.entrypoint_liquidation_auction_place_bid\n ( checker\n , (auction_id, kit_of_denomination (Ligo.nat_from_literal \"4_200_000n\"))\n ) in\n\n let auction_id =\n match checker.liquidation_auctions.current_auction with\n | None -> assert_failure \"entrypoint_liquidation_auction_place_bid should have succeeded\"\n | Some current_auction -> current_auction.contents in\n\n assert_operation_list_equal ~expected:[] ~real:ops;\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"500_000\")\n ~real:touch_reward;\n\n Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n\n let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_200_000n\")) in\n let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in\n\n let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in\n\n assert_bool \"auction should be completed\"\n (Option.is_none checker.liquidation_auctions.current_auction);\n\n assert_int_equal\n ~expected:(Ligo.int_from_literal \"21_000_000\")\n ~real:touch_reward;\n\n FIXME : Operations differ between the FA2 deployment and the TEZ deployment \n ( * Check that all the requests for burrows to send tez come _ before _ the \n * request to the oracle to update the index .\n begin match ops with\n | [\n call\n ] -> ()\n | _ -> assert_failure (\"Unexpected operations/operation order: \" ^ show_operation_list ops)\n end;\n *)\n\n We do n't need to touch the slice on this test case since \n * Checker.entrypoint_touch_with_index already touches the oldest 5 \n * slices .\n * Checker.entrypoint_touch_with_index already touches the oldest 5\n * slices. *)\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidLeafPtr))\n (fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice]));\n\n assert_bool \"burrow should have no liquidation slices\"\n (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None);\n\n let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in\n assert_tok_equal\n ~expected:tok_zero\n ~real:(burrow_collateral_at_auction result);\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in\n\n assert_operation_list_equal\n ~expected:[\n (LigoOp.Tezos.fa2_transfer_transaction\n [ Fa2Interface.(\n { from_ = checker_address;\n txs = [\n { to_ = alice_addr;\n token_id = TokenMetadata.tok_token_id;\n amount = Ligo.nat_from_literal \"3_156_446n\";\n };\n ];\n }\n )\n ]\n (Ligo.tez_from_literal \"0mutez\")\n (Option.get (LigoOp.Tezos.get_entrypoint_opt \"%transfer\" checker.external_contracts.collateral_fa2))\n );\n ]\n ~real:ops;\n assert_raises\n (Failure (Ligo.string_of_int error_InvalidAvlPtr))\n (fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id));\n\n ()\n );\n\n (\"entrypoint_mark_for_liquidation - should not create empty slices\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let sender = alice_addr in\n let checker = empty_checker in\n\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero;\n let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker \"0n\" (tok_of_denomination (Ligo.nat_from_literal \"2_001_001n\")) in\n\n CALCULATIONS \n ~~~~~~~~~~~~ \n Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is \n ( 1tez + 1_001mutez = 1_001_001 ) . This means that \n - The slice we WOULD send to auctions is empty . \n - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions . \n \n ~~~~~~~~~~~~\n Tez in the burrow is (1_001_001mutez + 1tez) so the reward is\n (1tez + 1_001mutez = 1_001_001). This means that\n - The slice we WOULD send to auctions is empty.\n - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions.\n *)\n\n Mint as much kit as possible .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal \"476_667n\"))) in\n\n NOTE : I am a little surprised / worried about this being again 211 ...\n Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in\n\n Ensure that the burrow is .\n begin match Ligo.Big_map.find_opt burrow_id checker.burrows with\n | None -> assert_failure \"bug\"\n | Some burrow -> assert_bool \"burrow needs to be liquidatable for the test to be potent.\" (Burrow.burrow_is_liquidatable checker.parameters burrow);\n end;\n\n Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n Let 's mark the burrow for liquidation now ( second pass : deactivates it ) .\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in\n\n ()\n );\n\n (\"deposit_collateral - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal \"0n\", amount)) in\n ()\n );\n\n (\"entrypoint_withdraw_collateral - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal \"0n\", Constants.creation_deposit)) in\n ()\n );\n\n (\"entrypoint_mint_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n ()\n );\n\n (\"entrypoint_burn_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = tok_add Constants.creation_deposit Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Mint some kit out of the burrow\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal \"0n\", kit_of_denomination (Ligo.nat_from_literal \"1n\"))) in\n ()\n );\n\n (\"entrypoint_activate_burrow - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal \"0n\", !Ligo.Tezos.sender)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal \"0n\", amount)) in\n ()\n );\n\n (\"entrypoint_deactivate_burrow - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal \"0n\", !Ligo.Tezos.sender)) in\n ()\n );\n\n (\"entrypoint_mark_for_liquidation - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n assert_raises\n (Failure (Ligo.string_of_int error_NotLiquidationCandidate))\n (fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id));\n );\n\n\n (\"entrypoint_set_burrow_delegate - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal \"0n\", None)) in\n ()\n );\n\n (\"cfmm views\" >:::\n let\n with_cfmm_setup f =\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n let burrow_id = Ligo.nat_from_literal \"42n\" in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal \"10_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in\n Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"400_000n\") in\n let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal \"400_000n\") in\n let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal \"5n\") in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = f checker in ()\n in\n [\n \"view_buy_kit_min_kit_expected\" >:: with_cfmm_setup\n (fun checker ->\n let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline)));\n\n \"view_buy_kit_min_kit_expected - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_BuyKitNoCtokGiven))\n (fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker))\n );\n\n \"view_sell_kit_min_ctok_expected\" >:: with_cfmm_setup\n (fun checker ->\n let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline)));\n\n \"view_sell_kit_min_ctok_expected - fail if no kit is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_SellKitNoKitGiven))\n (fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker))\n );\n\n \"view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted\" >:: with_cfmm_setup\n (fun checker ->\n let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal \"100_000n\") in\n let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in\n let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline)));\n\n \"view_add_liquidity_max_kit_deposited - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))\n (fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker))\n );\n\n \"view_add_liquidity_min_lqt_minted - fail if no ctok is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven))\n (fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal \"5n\") in\n let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in\n let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in\n let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal \"20\") in\n Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline)));\n\n \"view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = checker.cfmm.lqt in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given\" >:: with_cfmm_setup\n (fun checker ->\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = checker.cfmm.lqt in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))\n );\n\n \"view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)\" >:: with_cfmm_setup\n (fun checker ->\n let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal \"1n\")) in\n assert_raises\n (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn))\n (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker))\n );\n ]\n );\n\n (\"view_burrow_max_mintable_kit - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in\n ()\n );\n\n (\"view_is_burrow_overburrowed - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in\n ()\n );\n\n (\"view_is_burrow_liquidatable - does not fail on untouched burrows\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let amount = Constants.creation_deposit in\n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero;\n let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal \"0n\", None, amount)) in\n let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal \"0n\") in\n Touch checker\n Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal \"1_000_000n\")) in\n Try to view whether the untouched burrow is \n Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in\n ()\n );\n\n (\"view_current_liquidation_auction_details - raises error when there is no current auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = empty_checker in\n assert_raises\n (Failure (Ligo.string_of_int error_NoOpenAuction))\n (fun _ -> Checker.view_current_liquidation_auction_details ((), checker))\n );\n\n (\"view_current_liquidation_auction_details - expected value for descending auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n let auction = Option.get checker.liquidation_auctions.current_auction in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n let expected_auction_details = {\n auction_id = auction.contents;\n collateral = tok_of_denomination (Ligo.nat_from_literal \"23_669_648n\");\n minimum_bid = liquidation_auction_current_auction_minimum_bid auction;\n current_bid = None;\n remaining_blocks = None;\n remaining_seconds = None;\n }\n in\n assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details\n );\n\n (\"view_current_liquidation_auction_details - expected value for ascending auction\" >::\n fun _ ->\n Ligo.Tezos.reset ();\n let checker = checker_with_active_auction () in\n let auction = Option.get checker.liquidation_auctions.current_auction in\n Place a bid to turn the descending auction into an ascending one\n let bidder = bob_addr in\n let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero;\n let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal \"1n\", None, tok_of_denomination (Ligo.nat_from_literal \"1_000_000_000n\"))) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal \"1n\", bid_amnt)) in\n Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in\n\n Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal \"0mutez\");\n let auction = Option.get checker.liquidation_auctions.current_auction in\n let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in\n let expected_auction_details = {\n auction_id = auction.contents;\n collateral = tok_of_denomination (Ligo.nat_from_literal \"23_669_648n\");\n minimum_bid = liquidation_auction_current_auction_minimum_bid auction;\n current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;});\n remaining_blocks = Some (Ligo.int_from_literal \"-2\");\n remaining_seconds = Some (Ligo.int_from_literal \"700\");\n }\n in\n assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details\n );\n ]\n\nlet () =\n run_test_tt_main\n suite\n"}}},{"rowIdx":610287,"cells":{"_id":{"kind":"string","value":"0a8f25532cf0971dde3236b9b47e1c109be42b879b02b5a765e5a27342932666"},"repository":{"kind":"string","value":"exercism/common-lisp"},"name":{"kind":"string","value":"affine-cipher.lisp"},"content":{"kind":"string","value":"(defpackage :affine-cipher\n (:use :cl)\n (:export :encode\n :decode))\n\n(in-package :affine-cipher)\n\n(defun encode (plaintext a b))\n\n(defun decode (ciphertext a b))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/exercism/common-lisp/4bf94609c7ef0f9ca7ec0b6dca04cc10314cb598/exercises/practice/affine-cipher/affine-cipher.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(defpackage :affine-cipher\n (:use :cl)\n (:export :encode\n :decode))\n\n(in-package :affine-cipher)\n\n(defun encode (plaintext a b))\n\n(defun decode (ciphertext a b))\n"}}},{"rowIdx":610288,"cells":{"_id":{"kind":"string","value":"b7ec84b46d276604ff9a1cdceaf78ec6d78cc1f28205716dd22d9eda14b7ea30"},"repository":{"kind":"string","value":"coq/coq"},"name":{"kind":"string","value":"extend.mli"},"content":{"kind":"string","value":"(************************************************************************)\n(* * The Coq Proof Assistant / The Coq Development Team *)\n v * Copyright INRIA , CNRS and contributors \n < O _ _ _ , , * ( see version control and CREDITS file for authors & dates )\n \\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n(* // * This file is distributed under the terms of the *)\n * GNU Lesser General Public License Version 2.1 \n(* * (see LICENSE file for the text of the license) *)\n(************************************************************************)\n\n(** Entry keys for constr notations *)\n\ntype side = Left | Right\n\ntype production_position =\n | BorderProd of side * Gramlib.Gramext.g_assoc option\n | InternalProd\n\ntype production_level =\n | NextLevel\n | NumLevel of int\n | DefaultLevel (** Interpreted differently at the border or inside a rule *)\n\nval production_level_eq : production_level -> production_level -> bool\n\n(** User-level types used to tell how to parse or interpret of the non-terminal *)\n\ntype 'a constr_entry_key_gen =\n | ETIdent\n | ETName\n | ETGlobal\n | ETBigint\n | ETBinder of bool (* open list of binders if true, closed list of binders otherwise *)\n | ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a\n | ETPattern of bool * int option (* true = strict pattern, i.e. not a single variable *)\n\n(** Entries level (left-hand side of grammar rules) *)\n\ntype constr_entry_key =\n (production_level * production_position) constr_entry_key_gen\n\nval constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool\n\n* Entries used in productions , vernac side ( e.g. \" x bigint \" or \" x ident \" )\n\ntype simple_constr_prod_entry_key =\n production_level constr_entry_key_gen\n\n(** Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals *)\n\ntype binder_target = ForBinder | ForTerm\n\ntype binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list\n\nand constr_prod_entry_key =\n as an ident\n as a name ( ident or _ )\n as a global reference\n as an ( unbounded ) integer\n as name , or name : type or ' pattern , possibly in closed form\n as or pattern , or a subentry of those\n as pattern as a binder ( as subpart of a constr )\n as non - empty list of constr , or subentries of those\n as non - empty list of local binders\n\n* { 5 AST for user - provided entries }\n\ntype 'a user_symbol =\n | Ulist1 of 'a user_symbol\n | Ulist1sep of 'a user_symbol * string\n | Ulist0 of 'a user_symbol\n | Ulist0sep of 'a user_symbol * string\n | Uopt of 'a user_symbol\n | Uentry of 'a\n | Uentryl of 'a * int\n\ntype ('a,'b,'c) ty_user_symbol =\n | TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol\n | TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol\n | TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol\n | TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/coq/coq/f66b58cc7e6a8e245b35c3858989181825c591ce/parsing/extend.mli"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":"**********************************************************************\n * The Coq Proof Assistant / The Coq Development Team \n // * This file is distributed under the terms of the \n * (see LICENSE file for the text of the license) \n**********************************************************************\n* Entry keys for constr notations \n* Interpreted differently at the border or inside a rule \n* User-level types used to tell how to parse or interpret of the non-terminal \n open list of binders if true, closed list of binders otherwise \n true = strict pattern, i.e. not a single variable \n* Entries level (left-hand side of grammar rules) \n* Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals "},"code":{"kind":"string","value":" v * Copyright INRIA , CNRS and contributors \n < O _ _ _ , , * ( see version control and CREDITS file for authors & dates )\n \\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\n * GNU Lesser General Public License Version 2.1 \n\n\ntype side = Left | Right\n\ntype production_position =\n | BorderProd of side * Gramlib.Gramext.g_assoc option\n | InternalProd\n\ntype production_level =\n | NextLevel\n | NumLevel of int\n\nval production_level_eq : production_level -> production_level -> bool\n\n\ntype 'a constr_entry_key_gen =\n | ETIdent\n | ETName\n | ETGlobal\n | ETBigint\n | ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a\n\n\ntype constr_entry_key =\n (production_level * production_position) constr_entry_key_gen\n\nval constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool\n\n* Entries used in productions , vernac side ( e.g. \" x bigint \" or \" x ident \" )\n\ntype simple_constr_prod_entry_key =\n production_level constr_entry_key_gen\n\n\ntype binder_target = ForBinder | ForTerm\n\ntype binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list\n\nand constr_prod_entry_key =\n as an ident\n as a name ( ident or _ )\n as a global reference\n as an ( unbounded ) integer\n as name , or name : type or ' pattern , possibly in closed form\n as or pattern , or a subentry of those\n as pattern as a binder ( as subpart of a constr )\n as non - empty list of constr , or subentries of those\n as non - empty list of local binders\n\n* { 5 AST for user - provided entries }\n\ntype 'a user_symbol =\n | Ulist1 of 'a user_symbol\n | Ulist1sep of 'a user_symbol * string\n | Ulist0 of 'a user_symbol\n | Ulist0sep of 'a user_symbol * string\n | Uopt of 'a user_symbol\n | Uentry of 'a\n | Uentryl of 'a * int\n\ntype ('a,'b,'c) ty_user_symbol =\n | TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol\n | TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol\n | TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol\n | TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol\n | TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol\n"}}},{"rowIdx":610289,"cells":{"_id":{"kind":"string","value":"3bf1a899432101bb7482989a15f22c893ae938fa93801a4d53a8da84702149d2"},"repository":{"kind":"string","value":"emanjavacas/cosycat"},"name":{"kind":"string","value":"results_frame.cljs"},"content":{"kind":"string","value":"(ns cosycat.review.components.results-frame\n (:require [reagent.core :as reagent]\n [re-frame.core :as re-frame]\n [react-bootstrap.components :as bs]\n [cosycat.components :refer [error-panel throbbing-panel]]\n [cosycat.app-utils :refer [parse-hit-id]]\n [cosycat.snippet :refer [snippet-modal]]\n [cosycat.annotation.components.annotation-component\n :refer [annotation-component]]))\n\n(defn highlight-fn [{{:keys [anns]} :meta}]\n (fn [{id :_id}]\n (contains? anns id)))\n\n(defn hit-row [hit-id]\n (let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id])\n color-map (re-frame/subscribe [:project-users-colors])]\n (fn [hit-id]\n [:div.row\n (if (get-in @hit-map [:meta :throbbing?])\n \"loading...\"\n [annotation-component @hit-map color-map\n :db-path :review\n :corpus (get-in @hit-map [:meta :corpus])\n :editable? true\n :highlight-fn (highlight-fn @hit-map)\n :show-match? false\n :show-hit-id? true])])))\n\n(defn sort-by-doc [hit-ids]\n (sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids))\n\n(defn results-frame []\n (let [results (re-frame/subscribe [:project-session :review :results :results-by-id])\n throbbing? (re-frame/subscribe [:throbbing? :review-frame])]\n (fn []\n [:div.container-fluid\n (cond\n (empty? @results) [:div.row [error-panel {:status \"Ooops! Found zero annotations\"}]]\n @throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]]\n :else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))]\n ^{:key (str \"review-\" hit-id)} [hit-row hit-id]))])\n [snippet-modal :review]])))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/emanjavacas/cosycat/a7186363d3c0bdc7b714af126feb565f98793a6e/src/cljs/cosycat/review/components/results_frame.cljs"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(ns cosycat.review.components.results-frame\n (:require [reagent.core :as reagent]\n [re-frame.core :as re-frame]\n [react-bootstrap.components :as bs]\n [cosycat.components :refer [error-panel throbbing-panel]]\n [cosycat.app-utils :refer [parse-hit-id]]\n [cosycat.snippet :refer [snippet-modal]]\n [cosycat.annotation.components.annotation-component\n :refer [annotation-component]]))\n\n(defn highlight-fn [{{:keys [anns]} :meta}]\n (fn [{id :_id}]\n (contains? anns id)))\n\n(defn hit-row [hit-id]\n (let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id])\n color-map (re-frame/subscribe [:project-users-colors])]\n (fn [hit-id]\n [:div.row\n (if (get-in @hit-map [:meta :throbbing?])\n \"loading...\"\n [annotation-component @hit-map color-map\n :db-path :review\n :corpus (get-in @hit-map [:meta :corpus])\n :editable? true\n :highlight-fn (highlight-fn @hit-map)\n :show-match? false\n :show-hit-id? true])])))\n\n(defn sort-by-doc [hit-ids]\n (sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids))\n\n(defn results-frame []\n (let [results (re-frame/subscribe [:project-session :review :results :results-by-id])\n throbbing? (re-frame/subscribe [:throbbing? :review-frame])]\n (fn []\n [:div.container-fluid\n (cond\n (empty? @results) [:div.row [error-panel {:status \"Ooops! Found zero annotations\"}]]\n @throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]]\n :else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))]\n ^{:key (str \"review-\" hit-id)} [hit-row hit-id]))])\n [snippet-modal :review]])))\n"}}},{"rowIdx":610290,"cells":{"_id":{"kind":"string","value":"cb9ed9478a4d22f62b2ca09141e35797bb1fe89e08300a8ab41d4b959ce3b598"},"repository":{"kind":"string","value":"kiselgra/c-mera"},"name":{"kind":"string","value":"version.lisp"},"content":{"kind":"string","value":"(in-package :c-mera)\n\n(defparameter *version* (asdf:component-version (asdf:find-system :c-mera)))\n(defparameter *generator* :undefined)\n\n(defun print-version ()\n (format t \"~a~%\" *version*))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/kiselgra/c-mera/d06ed96d50a40a3fefe188202c8c535d6784f392/src/c-mera/version.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(in-package :c-mera)\n\n(defparameter *version* (asdf:component-version (asdf:find-system :c-mera)))\n(defparameter *generator* :undefined)\n\n(defun print-version ()\n (format t \"~a~%\" *version*))\n"}}},{"rowIdx":610291,"cells":{"_id":{"kind":"string","value":"a1a25142787e2a42ab680a10e6441fea2092ed0d95338d09d9706830d633b512"},"repository":{"kind":"string","value":"jeffshrager/biobike"},"name":{"kind":"string","value":"doc-objects.lisp"},"content":{"kind":"string","value":" -*- Package : help ; mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-\n\n(in-package :help)\n\n;;; +=========================================================================+\n | Copyright ( c ) 2002 - 2006 JP , , |\n;;; | |\n;;; | Permission is hereby granted, free of charge, to any person obtaining |\n;;; | a copy of this software and associated documentation files (the |\n | \" Software \" ) , to deal in the Software without restriction , including |\n;;; | without limitation the rights to use, copy, modify, merge, publish, |\n | distribute , sublicense , and/or sell copies of the Software , and to |\n | permit persons to whom the Software is furnished to do so , subject to |\n;;; | the following conditions: |\n;;; | |\n;;; | The above copyright notice and this permission notice shall be included |\n | in all copies or substantial portions of the Software . |\n;;; | |\n | THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , |\n;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |\n;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |\n;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |\n | CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |\n;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |\n;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |\n;;; +=========================================================================+\n\n Authors : JP Massar , .\n\n\n;;; All the various types of documentation (theoretically) available\n;;; in the system.\n\n;;; Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged\n\n(defparameter *documentation-types*\n '(\n documentation-file\n function-documentation\n glossary-entry\n ;; macro-documentation \n module\n symbol-doc\n topic\n tutorial\n ;; variable-documentation\n ))\n\n(defparameter *doc-types-hash-types*\n '(\n (documentation-file equal)\n (function-documentation eq)\n (glossary-entry equalp)\n ;; macro-documentation \n (module equalp)\n (symbol-doc eq)\n (topic equalp)\n (tutorial equalp)\n ;; variable-documentation\n ))\n\n;; Where all documentation objects are stored.\n;; Use FIND-DOCUMENTATION to pull something out\n\n(eval-when (:compile-toplevel :load-toplevel :execute)\n (defun create-doc-hash-tables ()\n (let ((ht (make-hash-table)))\n (loop for (doc-type hash-test) in *doc-types-hash-types* do\n (setf (gethash doc-type ht) (make-hash-table :test hash-test)))\n ht\n )))\n\n(defvar *documentation* (create-doc-hash-tables))\n\n(defun intern-documentation (name type)\n (or (find-documentation name type)\n (setf (gethash name (gethash type *documentation*)) \n (make-instance type :name name))))\n\n(defun remove-documentation (name type)\n (remhash name (gethash type *documentation*)) (make-instance type :name name))\n\n(defun find-documentation (name type)\n (gethash name (gethash type *documentation*)))\n\n(defun clear-documentation ()\n (setf *documentation* (create-doc-hash-tables)))\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n\n;;; The hierarchy of documentation classes\n\n(defclass basicdoc ()\n ((name :initarg :name :accessor name)\n ;; AKA 'summary'\n (docstring :initform nil :initarg :docstring :accessor docstring)\n (referred-to-by :initform nil :accessor referred-to-by)\n ))\n\n(defmethod summary ((obj basicdoc)) (docstring obj))\n(defmethod text ((obj basicdoc)) nil)\n(defmethod keywords ((obj basicdoc)) nil)\n(defmethod see-also ((obj basicdoc)) nil)\n(defmethod explicitly-documented-p ((obj basicdoc)) nil)\n(defmethod author ((obj basicdoc)) nil)\n\n(defmethod print-object ((obj basicdoc) stream)\n (format stream \"<Docobj ~A (~A)>\" (help:name obj) (type-of obj)))\n\n(defclass documented (basicdoc)\n ((text :initform nil :accessor text)\n (keywords :initform nil :accessor keywords)\n (see-also :initform nil :accessor see-also)\n (author :initform nil :accessor author)\n (explicitly-documented-p :initform nil :accessor explicitly-documented-p)))\n\n(defclass mode-documented (documented) \n ((display-modes \n :initform (list :all) \n :accessor display-modes\n )))\n\n(defclass documentation-file (mode-documented) \n ((label :initform nil :accessor label)\n (source-file :initform nil :accessor source-file)\n (associated-text-file \n :initform nil\n :accessor associated-text-file\n )\n (matches :initform nil :accessor matches)\n (descriptor :initform nil :accessor descriptor)\n ))\n \n;; the reader methods are defined in document-function.lisp\n(defclass function-documentation (documented module-element)\n ((parameters :initform nil :writer (setf parameters))\n (return-values :initform nil :writer (setf return-values))\n (syntax :initform nil :writer (setf syntax))\n (vpl-syntax :initform nil :writer (setf vpl-syntax))\n (examples :initform nil :writer (setf examples))\n (examples-package :initform nil :writer (setf examples-package))\n (synonyms :initform nil :writer (setf synonyms))\n (flavor :initform :defun :writer (setf flavor))\n (canonical :initform nil :accessor canonical)\n (aliased :initform nil :accessor aliased)\n ))\n\n\n(defmethod print-object ((obj function-documentation) stream)\n (print-symbol-docobj obj stream \"DocFunc\"))\n\n(defclass glossary-entry (documented) ())\n\n;; If/when actually implemented, should become a subtype of DOCUMENTED\n(defclass macro-documentation (basicdoc) ())\n\n(defclass module (mode-documented)\n ((functions :initform nil :accessor functions)\n (variables :initform nil :accessor variables)\n (macros :initform nil :accessor macros)\n (submodules :initform nil :accessor submodules)\n (toplevel? :initform t :accessor toplevel?)\n (alpha-listing? :initform t :accessor alpha-listing?)\n ))\n\n(defclass symbol-doc (basicdoc) \n (\n one of : special - operator , : define - function , : macro , : function ,\n ;; :constant, :variable, or :type\n (stype :initform nil :initarg :stype :accessor stype)\n one of : function , : variable , or : type\n (dtype :initform nil :initarg :dtype :accessor dtype)))\n\n(defmethod print-object ((obj symbol-doc) stream)\n (print-symbol-docobj obj stream \"Symbol\"))\n\n(defclass topic (mode-documented) ())\n\n(defclass tutorial (mode-documented) \n ;; :filename -- a string, must be full pathname \n : file - type -- either : html or : \n ;; :user-mode -- a keyword or a list of keywords\n ;; :sort-order -- an integer \n ;; :description -- a string, this is really the summary \n : section - header -- two strings , a title , and a color\n : lhtml - function -- used only wth file type lhtml , must be a symbol\n : start - function -- used only with file type : lhtml , must be a symbol\n ((filename :initform nil :accessor filename)\n (file-type :initform nil :accessor file-type)\n (user-mode :initform nil :accessor user-mode)\n (sort-order :initform nil :accessor sort-order)\n (description :initform nil :accessor description)\n (lhtml-function :initform nil :accessor lhtml-function)\n (start-function :initform nil :accessor start-function)\n (section-header :initform nil :accessor section-header)\n ))\n\n;; If/when actually implemented, should become a subtype of DOCUMENTED\n(defclass variable-documentation (basicdoc) ())\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n\n;;; The definitions that create the verifiers and parsers for\n;;; the definition forms for each documentation object.\n\n\n(define-doc-definer \n documentation-file \n def-documentation-file\n create-documentation-file\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n (:descriptor :one-or-none ddd-string-or-nil identity help:descriptor)\n ))\n\n;; function-documentation has no define-doc-definer, its verifer and parser\n;; are implemented by hand in document-function.lisp\n\n(define-doc-definer \n glossary-entry \n def-glossary-entry \n create-glossary-entry\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n ))\n\n;; not doing macro-documentation for now since it is not used \n\n(define-doc-definer \n module \n def-module\n create-module\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n (:functions :list ddd-all-symbols identity help:functions)\n (:variables :list ddd-all-symbols identity help:variables)\n (:macros :list ddd-all-symbols identity help:macros)\n (:submodules :list ddd-all-symbols identity help:submodules)\n (:toplevel? :exactly-one ddd-boolean identity help:toplevel?)\n (:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?)\n (:display-modes :list ddd-all-symbols identity help:display-modes)\n )\n :after-code (setf (explicitly-documented-p obj) t))\n\n(defmacro document-module (name &body (docstring &rest meta))\n `(def-module ,(string name) (:summary ,docstring) ,@meta))\n\n(defmacro undocument-module (name &key remove-functions)\n `(progn\n (let ((module (find-documentation ',name 'module)))\n (if ,remove-functions\n\t (loop for fn in (functions module) do \n (remove-documentation fn 'function-documentation))\n\t (let ((uncategorized (intern-documentation 'uncategorized 'module)))\n\t (loop for fn in (functions module)\n\t\tfor fn-doc = (find-documentation fn 'function-documentation)\n\t\twhen fn-doc do\n\t\t (setf (module fn-doc) uncategorized)\n\t\t (push fn (functions uncategorized))))))\n (remove-documentation ',name 'module)))\n\n(defun modules () (hash-table-values (gethash 'module *documentation*)))\n\n(defparameter *uncategorized-key* \"UNCATEGORIZED\")\n\n;;; Setup the Uncategorized module.\n(let ((uncategorized (intern-documentation *uncategorized-key* 'module)))\n (setf (docstring uncategorized) \n \"Documented elements not part of any other module.\"))\n\n;; No symbol-doc creator macro because symbol-doc entries are reserved for\n;; those exported symbols which do not have define-function entries. These\n;; symbols are to be searched out at the end of the system load and\n;; at that point symbol-doc objects are created for each such symbol\n;; (using the below function)\n\n(defun create-symbol-doc (symbol &key docstring dtype stype)\n (make-instance\n 'help:symbol-doc \n :name symbol :docstring docstring :dtype dtype :stype stype))\n\n(defun create-symbol-doc-entries (&key (mode :external))\n (declare (ignore mode))\n (loop \n with hash = (gethash 'help:symbol-doc *documentation*)\n with packages-not-to-search =\n (remove (find-package :cl-user) cl-user::*startup-packages*)\n with cl-package = (find-package :common-lisp)\n for package in (list-all-packages) \n do\n ;; The startup packages are those that exist at the start\n ;; of our system load. Hence we only look for symbols in\n our own packages , CL , and third party stuff we load , like PPCRE\n (unless (and (member package packages-not-to-search) \n (not (eq package cl-package)))\n (do-external-symbols (symbol package) \n (when (or (eq package cl-package) \n (not (eq (symbol-package symbol) cl-package)))\n (cond\n ((get symbol :alias-of) (create-alias-for symbol))\n (t \n (vwhen (docs (maybe-create-symbol-docs symbol))\n (setf (gethash symbol hash) docs)\n ))))))))\n\n\n;; create a dummy function-documentation object whose only meaningful slots\n;; are explicitly-documented-p, which is given the value :alias-of to denote\n;; that this is a dummy, and see-also, which contains the real function\n;; that the symbol is an alias for. \n(defun create-alias-for (symbol)\n (let ((real-function (get symbol :alias-of))\n (docobj (intern-documentation symbol 'help:function-documentation)))\n (setf (explicitly-documented-p docobj) :alias-of)\n (setf (docstring docobj) (formatn \"Alias for ~A\" real-function))\n (setf (see-also docobj) nil)\n ;; (list (find-documentation real-function 'help:function-documentation))\n ))\n \n\n;;; Create a set of HELP:SYMBOL-DOC data structures, for a symbol\n\n(defun maybe-create-symbol-docs (symbol) \n (remove-if \n 'null \n (list \n (when (fboundp symbol) \n ;; Don't create an entry if the symbol is already\n ;; documented by DOCUMENT-FUNCTION\n (unless (find-documentation symbol 'help:function-documentation)\n (create-symbol-doc\n symbol\n :docstring (documentation symbol 'function)\n :stype \n (cond \n ((special-operator-p symbol) :special-operator)\n ((define-function-p symbol) :define-function)\n ((macro-function symbol) :macro)\n (t :function))\n :dtype :function\n )))\n (when (boundp symbol)\n (create-symbol-doc\n symbol \n :docstring (documentation symbol 'variable) \n :stype\n (cond\n ((constantp symbol) :constant)\n (t :variable))\n :dtype :variable\n ))\n ;; Figure out if a symbol denotes a type. Not portable.\n ;; This type checking causes the autoloading of the stream goo in ACL.\n (ignore-errors \n (typep nil symbol)\n (create-symbol-doc\n symbol \n :docstring (documentation symbol 'type)\n :stype :type\n :dtype :type\n )))))\n \n(define-doc-definer \n topic \n def-topic \n create-topic\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n ))\n\n The define - doc - definer for tutorials is in live-tutorial.lisp\n\n;; not doing variable-documentation for now since it is not used \n\n#+not-used\n(defmacro document-variable (name docstring)\n `(let ((thing (intern-documentation ',name 'variable-documentation)))\n (setf (explicitly-documented-p thing) t)\n (setf (docstring thing) ,docstring)))\n\n;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;\n\n;; Each documentation type has an associated URL which displays\n;; the documentation object identified by the parameters of the URL\n;; (which are taken from the SEE-ALSO data structure). See-also \n;; lists can also contain objects which are not documentation-objects\n per se ( e.g. , references , URLs and frames )\n\n(wb::define-url&pkg&args \n help-documentation-file-url\n \"/new-help/help-documentation-file-url\" :name)\n\n;; File documentation doesn't have its own URL because the documentation\n directory already has its own AllegroServe PUBLISH - DIRECTORY url\n\n(wb::define-url&pkg&args\n help-function-documentation-url \n \"/new-help/help-function-documentation-url\" :name :package)\n\n(wb::define-url&pkg&args\n help-glossary-entry-url \"/new-help/help-glossary-entry-url\" :name)\n\n;; not doing macro-documentation because it's not used. \n\n(wb::define-url&pkg&args\n help-module-url \"/new-help/help-module-url\" :name)\n\n(wb::define-url&pkg&args\n help-symbol-doc-url \n \"/new-help/help-symbol-doc-url\" :name :package :type)\n\n(wb::define-url&pkg&args\n help-topic-url \"/new-help/help-topic-url\" :name)\n\n(wb::define-url&pkg&args\n help-tutorial-url \"/new-help/help-tutorial-url\" :name)\n\n;; not doing variable-documentation because it's not used. \n\n;; URLs don't have their own URL because they are already a URL!\n;; Frames don't have their own URL here because one exists already.\n\n;;; A page which lists all the glossary entries\n\n(wb::define-url&pkg&args\n help-glossary-url \"/new-help/help-glossary-url\")\n\n;;; A page which lists all the pertinent modules\n\n(wb::define-url&pkg&args help-modules-url \"/help/modules\")\n\n\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Help/doc-objects.lisp"},"language":{"kind":"string","value":"lisp"},"comments":{"kind":"string","value":" mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*-\n +=========================================================================+\n | |\n | Permission is hereby granted, free of charge, to any person obtaining |\n | a copy of this software and associated documentation files (the |\n | without limitation the rights to use, copy, modify, merge, publish, |\n | the following conditions: |\n | |\n | The above copyright notice and this permission notice shall be included |\n | |\n | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |\n | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |\n | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |\n | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |\n | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |\n +=========================================================================+\n All the various types of documentation (theoretically) available\n in the system.\n Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged\n macro-documentation \n variable-documentation\n macro-documentation \n variable-documentation\n Where all documentation objects are stored.\n Use FIND-DOCUMENTATION to pull something out\n\n The hierarchy of documentation classes\n AKA 'summary'\n the reader methods are defined in document-function.lisp\n If/when actually implemented, should become a subtype of DOCUMENTED\n :constant, :variable, or :type\n :filename -- a string, must be full pathname \n :user-mode -- a keyword or a list of keywords\n :sort-order -- an integer \n :description -- a string, this is really the summary \n If/when actually implemented, should become a subtype of DOCUMENTED\n\n The definitions that create the verifiers and parsers for\n the definition forms for each documentation object.\n function-documentation has no define-doc-definer, its verifer and parser\n are implemented by hand in document-function.lisp\n not doing macro-documentation for now since it is not used \n Setup the Uncategorized module.\n No symbol-doc creator macro because symbol-doc entries are reserved for\n those exported symbols which do not have define-function entries. These\n symbols are to be searched out at the end of the system load and\n at that point symbol-doc objects are created for each such symbol\n (using the below function)\n The startup packages are those that exist at the start\n of our system load. Hence we only look for symbols in\n create a dummy function-documentation object whose only meaningful slots\n are explicitly-documented-p, which is given the value :alias-of to denote\n that this is a dummy, and see-also, which contains the real function\n that the symbol is an alias for. \n (list (find-documentation real-function 'help:function-documentation))\n Create a set of HELP:SYMBOL-DOC data structures, for a symbol\n Don't create an entry if the symbol is already\n documented by DOCUMENT-FUNCTION\n Figure out if a symbol denotes a type. Not portable.\n This type checking causes the autoloading of the stream goo in ACL.\n not doing variable-documentation for now since it is not used \n\n Each documentation type has an associated URL which displays\n the documentation object identified by the parameters of the URL\n (which are taken from the SEE-ALSO data structure). See-also \n lists can also contain objects which are not documentation-objects\n File documentation doesn't have its own URL because the documentation\n not doing macro-documentation because it's not used. \n not doing variable-documentation because it's not used. \n URLs don't have their own URL because they are already a URL!\n Frames don't have their own URL here because one exists already.\n A page which lists all the glossary entries\n A page which lists all the pertinent modules"},"code":{"kind":"string","value":"\n(in-package :help)\n\n | Copyright ( c ) 2002 - 2006 JP , , |\n | \" Software \" ) , to deal in the Software without restriction , including |\n | distribute , sublicense , and/or sell copies of the Software , and to |\n | permit persons to whom the Software is furnished to do so , subject to |\n | in all copies or substantial portions of the Software . |\n | THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND , |\n | CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |\n\n Authors : JP Massar , .\n\n\n\n\n(defparameter *documentation-types*\n '(\n documentation-file\n function-documentation\n glossary-entry\n module\n symbol-doc\n topic\n tutorial\n ))\n\n(defparameter *doc-types-hash-types*\n '(\n (documentation-file equal)\n (function-documentation eq)\n (glossary-entry equalp)\n (module equalp)\n (symbol-doc eq)\n (topic equalp)\n (tutorial equalp)\n ))\n\n\n(eval-when (:compile-toplevel :load-toplevel :execute)\n (defun create-doc-hash-tables ()\n (let ((ht (make-hash-table)))\n (loop for (doc-type hash-test) in *doc-types-hash-types* do\n (setf (gethash doc-type ht) (make-hash-table :test hash-test)))\n ht\n )))\n\n(defvar *documentation* (create-doc-hash-tables))\n\n(defun intern-documentation (name type)\n (or (find-documentation name type)\n (setf (gethash name (gethash type *documentation*)) \n (make-instance type :name name))))\n\n(defun remove-documentation (name type)\n (remhash name (gethash type *documentation*)) (make-instance type :name name))\n\n(defun find-documentation (name type)\n (gethash name (gethash type *documentation*)))\n\n(defun clear-documentation ()\n (setf *documentation* (create-doc-hash-tables)))\n\n\n\n\n(defclass basicdoc ()\n ((name :initarg :name :accessor name)\n (docstring :initform nil :initarg :docstring :accessor docstring)\n (referred-to-by :initform nil :accessor referred-to-by)\n ))\n\n(defmethod summary ((obj basicdoc)) (docstring obj))\n(defmethod text ((obj basicdoc)) nil)\n(defmethod keywords ((obj basicdoc)) nil)\n(defmethod see-also ((obj basicdoc)) nil)\n(defmethod explicitly-documented-p ((obj basicdoc)) nil)\n(defmethod author ((obj basicdoc)) nil)\n\n(defmethod print-object ((obj basicdoc) stream)\n (format stream \"<Docobj ~A (~A)>\" (help:name obj) (type-of obj)))\n\n(defclass documented (basicdoc)\n ((text :initform nil :accessor text)\n (keywords :initform nil :accessor keywords)\n (see-also :initform nil :accessor see-also)\n (author :initform nil :accessor author)\n (explicitly-documented-p :initform nil :accessor explicitly-documented-p)))\n\n(defclass mode-documented (documented) \n ((display-modes \n :initform (list :all) \n :accessor display-modes\n )))\n\n(defclass documentation-file (mode-documented) \n ((label :initform nil :accessor label)\n (source-file :initform nil :accessor source-file)\n (associated-text-file \n :initform nil\n :accessor associated-text-file\n )\n (matches :initform nil :accessor matches)\n (descriptor :initform nil :accessor descriptor)\n ))\n \n(defclass function-documentation (documented module-element)\n ((parameters :initform nil :writer (setf parameters))\n (return-values :initform nil :writer (setf return-values))\n (syntax :initform nil :writer (setf syntax))\n (vpl-syntax :initform nil :writer (setf vpl-syntax))\n (examples :initform nil :writer (setf examples))\n (examples-package :initform nil :writer (setf examples-package))\n (synonyms :initform nil :writer (setf synonyms))\n (flavor :initform :defun :writer (setf flavor))\n (canonical :initform nil :accessor canonical)\n (aliased :initform nil :accessor aliased)\n ))\n\n\n(defmethod print-object ((obj function-documentation) stream)\n (print-symbol-docobj obj stream \"DocFunc\"))\n\n(defclass glossary-entry (documented) ())\n\n(defclass macro-documentation (basicdoc) ())\n\n(defclass module (mode-documented)\n ((functions :initform nil :accessor functions)\n (variables :initform nil :accessor variables)\n (macros :initform nil :accessor macros)\n (submodules :initform nil :accessor submodules)\n (toplevel? :initform t :accessor toplevel?)\n (alpha-listing? :initform t :accessor alpha-listing?)\n ))\n\n(defclass symbol-doc (basicdoc) \n (\n one of : special - operator , : define - function , : macro , : function ,\n (stype :initform nil :initarg :stype :accessor stype)\n one of : function , : variable , or : type\n (dtype :initform nil :initarg :dtype :accessor dtype)))\n\n(defmethod print-object ((obj symbol-doc) stream)\n (print-symbol-docobj obj stream \"Symbol\"))\n\n(defclass topic (mode-documented) ())\n\n(defclass tutorial (mode-documented) \n : file - type -- either : html or : \n : section - header -- two strings , a title , and a color\n : lhtml - function -- used only wth file type lhtml , must be a symbol\n : start - function -- used only with file type : lhtml , must be a symbol\n ((filename :initform nil :accessor filename)\n (file-type :initform nil :accessor file-type)\n (user-mode :initform nil :accessor user-mode)\n (sort-order :initform nil :accessor sort-order)\n (description :initform nil :accessor description)\n (lhtml-function :initform nil :accessor lhtml-function)\n (start-function :initform nil :accessor start-function)\n (section-header :initform nil :accessor section-header)\n ))\n\n(defclass variable-documentation (basicdoc) ())\n\n\n\n\n\n(define-doc-definer \n documentation-file \n def-documentation-file\n create-documentation-file\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n (:descriptor :one-or-none ddd-string-or-nil identity help:descriptor)\n ))\n\n\n(define-doc-definer \n glossary-entry \n def-glossary-entry \n create-glossary-entry\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n ))\n\n\n(define-doc-definer \n module \n def-module\n create-module\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n (:functions :list ddd-all-symbols identity help:functions)\n (:variables :list ddd-all-symbols identity help:variables)\n (:macros :list ddd-all-symbols identity help:macros)\n (:submodules :list ddd-all-symbols identity help:submodules)\n (:toplevel? :exactly-one ddd-boolean identity help:toplevel?)\n (:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?)\n (:display-modes :list ddd-all-symbols identity help:display-modes)\n )\n :after-code (setf (explicitly-documented-p obj) t))\n\n(defmacro document-module (name &body (docstring &rest meta))\n `(def-module ,(string name) (:summary ,docstring) ,@meta))\n\n(defmacro undocument-module (name &key remove-functions)\n `(progn\n (let ((module (find-documentation ',name 'module)))\n (if ,remove-functions\n\t (loop for fn in (functions module) do \n (remove-documentation fn 'function-documentation))\n\t (let ((uncategorized (intern-documentation 'uncategorized 'module)))\n\t (loop for fn in (functions module)\n\t\tfor fn-doc = (find-documentation fn 'function-documentation)\n\t\twhen fn-doc do\n\t\t (setf (module fn-doc) uncategorized)\n\t\t (push fn (functions uncategorized))))))\n (remove-documentation ',name 'module)))\n\n(defun modules () (hash-table-values (gethash 'module *documentation*)))\n\n(defparameter *uncategorized-key* \"UNCATEGORIZED\")\n\n(let ((uncategorized (intern-documentation *uncategorized-key* 'module)))\n (setf (docstring uncategorized) \n \"Documented elements not part of any other module.\"))\n\n\n(defun create-symbol-doc (symbol &key docstring dtype stype)\n (make-instance\n 'help:symbol-doc \n :name symbol :docstring docstring :dtype dtype :stype stype))\n\n(defun create-symbol-doc-entries (&key (mode :external))\n (declare (ignore mode))\n (loop \n with hash = (gethash 'help:symbol-doc *documentation*)\n with packages-not-to-search =\n (remove (find-package :cl-user) cl-user::*startup-packages*)\n with cl-package = (find-package :common-lisp)\n for package in (list-all-packages) \n do\n our own packages , CL , and third party stuff we load , like PPCRE\n (unless (and (member package packages-not-to-search) \n (not (eq package cl-package)))\n (do-external-symbols (symbol package) \n (when (or (eq package cl-package) \n (not (eq (symbol-package symbol) cl-package)))\n (cond\n ((get symbol :alias-of) (create-alias-for symbol))\n (t \n (vwhen (docs (maybe-create-symbol-docs symbol))\n (setf (gethash symbol hash) docs)\n ))))))))\n\n\n(defun create-alias-for (symbol)\n (let ((real-function (get symbol :alias-of))\n (docobj (intern-documentation symbol 'help:function-documentation)))\n (setf (explicitly-documented-p docobj) :alias-of)\n (setf (docstring docobj) (formatn \"Alias for ~A\" real-function))\n (setf (see-also docobj) nil)\n ))\n \n\n\n(defun maybe-create-symbol-docs (symbol) \n (remove-if \n 'null \n (list \n (when (fboundp symbol) \n (unless (find-documentation symbol 'help:function-documentation)\n (create-symbol-doc\n symbol\n :docstring (documentation symbol 'function)\n :stype \n (cond \n ((special-operator-p symbol) :special-operator)\n ((define-function-p symbol) :define-function)\n ((macro-function symbol) :macro)\n (t :function))\n :dtype :function\n )))\n (when (boundp symbol)\n (create-symbol-doc\n symbol \n :docstring (documentation symbol 'variable) \n :stype\n (cond\n ((constantp symbol) :constant)\n (t :variable))\n :dtype :variable\n ))\n (ignore-errors \n (typep nil symbol)\n (create-symbol-doc\n symbol \n :docstring (documentation symbol 'type)\n :stype :type\n :dtype :type\n )))))\n \n(define-doc-definer \n topic \n def-topic \n create-topic\n ((:summary :one-or-none ddd-string-or-nil identity help:docstring)\n (:text :non-nil-list ddd-identity identity help:text)\n (:keywords :list ddd-all-symbols-or-strings identity help:keywords)\n (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also)\n (:author :list ddd-all-strings identity help:author)\n ))\n\n The define - doc - definer for tutorials is in live-tutorial.lisp\n\n\n#+not-used\n(defmacro document-variable (name docstring)\n `(let ((thing (intern-documentation ',name 'variable-documentation)))\n (setf (explicitly-documented-p thing) t)\n (setf (docstring thing) ,docstring)))\n\n\n per se ( e.g. , references , URLs and frames )\n\n(wb::define-url&pkg&args \n help-documentation-file-url\n \"/new-help/help-documentation-file-url\" :name)\n\n directory already has its own AllegroServe PUBLISH - DIRECTORY url\n\n(wb::define-url&pkg&args\n help-function-documentation-url \n \"/new-help/help-function-documentation-url\" :name :package)\n\n(wb::define-url&pkg&args\n help-glossary-entry-url \"/new-help/help-glossary-entry-url\" :name)\n\n\n(wb::define-url&pkg&args\n help-module-url \"/new-help/help-module-url\" :name)\n\n(wb::define-url&pkg&args\n help-symbol-doc-url \n \"/new-help/help-symbol-doc-url\" :name :package :type)\n\n(wb::define-url&pkg&args\n help-topic-url \"/new-help/help-topic-url\" :name)\n\n(wb::define-url&pkg&args\n help-tutorial-url \"/new-help/help-tutorial-url\" :name)\n\n\n\n\n(wb::define-url&pkg&args\n help-glossary-url \"/new-help/help-glossary-url\")\n\n\n(wb::define-url&pkg&args help-modules-url \"/help/modules\")\n\n\n\n"}}},{"rowIdx":610292,"cells":{"_id":{"kind":"string","value":"30a50b63c3ecf654875c6c6afdd4fa9e3bf4343a85b180670388b1bef846aa42"},"repository":{"kind":"string","value":"jixiuf/helloerlang"},"name":{"kind":"string","value":"emysql_app.erl"},"content":{"kind":"string","value":" Copyright ( c ) 2009\n < >\n < >\n%%\n%% Permission is hereby granted, free of charge, to any person\n%% obtaining a copy of this software and associated documentation\n files ( the \" Software \" ) , to deal in the Software without\n%% restriction, including without limitation the rights to use,\n%% copy, modify, merge, publish, distribute, sublicense, and/or sell\n copies of the Software , and to permit persons to whom the\n%% Software is furnished to do so, subject to the following\n%% conditions:\n%%\n%% The above copyright notice and this permission notice shall be\n included in all copies or substantial portions of the Software .\n%%\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND ,\n%% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n%% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n%% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n%% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n%% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n%% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n%% OTHER DEALINGS IN THE SOFTWARE.\n-module(emysql_app).\n-behaviour(application).\n\n-export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]).\n\n-include(\"emysql.hrl\").\n\nstart(_Type, _StartArgs) ->\n\n case of\n\t%\t\"%MAKETIME%\" -> ok; % happens with rebar build\n\t _ - > io : format(\"Build time : ~p ~ n \" , )\n\t% end,\n\t\n\temysql_sup:start_link().\n\nstop(_State) ->\n\tlists:foreach(\n\t\tfun(Pool) ->\n\t\t\tlists:foreach(\n\t\t\t\tfun emysql_conn:close_connection/1,\n\t\t\t\tlists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked))\n\t\t\t)\n\t\tend,\n\t\temysql_conn_mgr:pools()\n\t),\n\tok.\n\nmodules() ->\n\t{ok, Modules} = application_controller:get_key(emysql, modules), Modules.\n\ndefault_timeout() ->\n\tcase application:get_env(emysql, default_timeout) of\n\t\tundefined -> ?TIMEOUT;\n\t\t{ok, Timeout} -> Timeout\n\tend.\n\nlock_timeout() ->\n\tcase application:get_env(emysql, lock_timeout) of\n\t\tundefined -> ?LOCK_TIMEOUT;\n\t\t{ok, Timeout} -> Timeout\n\tend.\n\npools() ->\n\tcase application:get_env(emysql, pools) of\n\t\t{ok, Pools} when is_list(Pools) ->\n\t\t\tPools;\n\t\t_ ->\n\t\t\t[]\n\tend.\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/jixiuf/helloerlang/3960eb4237b026f98edf35d6064539259a816d58/gls/sgLogServer/deps/emysql/src/emysql_app.erl"},"language":{"kind":"string","value":"erlang"},"comments":{"kind":"string","value":"\n Permission is hereby granted, free of charge, to any person\n obtaining a copy of this software and associated documentation\n restriction, including without limitation the rights to use,\n copy, modify, merge, publish, distribute, sublicense, and/or sell\n Software is furnished to do so, subject to the following\n conditions:\n\n The above copyright notice and this permission notice shall be\n\n EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n OTHER DEALINGS IN THE SOFTWARE.\n\t\"%MAKETIME%\" -> ok; % happens with rebar build\n end,"},"code":{"kind":"string","value":" Copyright ( c ) 2009\n < >\n < >\n files ( the \" Software \" ) , to deal in the Software without\n copies of the Software , and to permit persons to whom the\n included in all copies or substantial portions of the Software .\n THE SOFTWARE IS PROVIDED \" AS IS \" , WITHOUT WARRANTY OF ANY KIND ,\n-module(emysql_app).\n-behaviour(application).\n\n-export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]).\n\n-include(\"emysql.hrl\").\n\nstart(_Type, _StartArgs) ->\n\n case of\n\t _ - > io : format(\"Build time : ~p ~ n \" , )\n\t\n\temysql_sup:start_link().\n\nstop(_State) ->\n\tlists:foreach(\n\t\tfun(Pool) ->\n\t\t\tlists:foreach(\n\t\t\t\tfun emysql_conn:close_connection/1,\n\t\t\t\tlists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked))\n\t\t\t)\n\t\tend,\n\t\temysql_conn_mgr:pools()\n\t),\n\tok.\n\nmodules() ->\n\t{ok, Modules} = application_controller:get_key(emysql, modules), Modules.\n\ndefault_timeout() ->\n\tcase application:get_env(emysql, default_timeout) of\n\t\tundefined -> ?TIMEOUT;\n\t\t{ok, Timeout} -> Timeout\n\tend.\n\nlock_timeout() ->\n\tcase application:get_env(emysql, lock_timeout) of\n\t\tundefined -> ?LOCK_TIMEOUT;\n\t\t{ok, Timeout} -> Timeout\n\tend.\n\npools() ->\n\tcase application:get_env(emysql, pools) of\n\t\t{ok, Pools} when is_list(Pools) ->\n\t\t\tPools;\n\t\t_ ->\n\t\t\t[]\n\tend.\n"}}},{"rowIdx":610293,"cells":{"_id":{"kind":"string","value":"880432877c4576b2b53a6ab8d82cfcf659217ae9f40897338270e33faf2d362d"},"repository":{"kind":"string","value":"kenbot/church"},"name":{"kind":"string","value":"ChurchList.hs"},"content":{"kind":"string","value":"{-# LANGUAGE RankNTypes #-}\n\nmodule ChurchList where\n\ntype CList a = forall r. (a -> r -> r) -> r -> r\n\ncNil :: CList a \ncNil f nil = nil\n\ncCons :: a -> CList a -> CList a\ncCons a clist = \\f b -> f a (clist f b) \n\ncListToList :: CList a -> [a]\ncListToList clist = clist (:) [] \n\nlistToCList :: [a] -> CList a\nlistToCList [] = cNil\nlistToCList (a : as) = a `cCons` (listToCList as) \n\ncListToString :: Show a => CList a -> String\ncListToString = show . cListToList \n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/kenbot/church/a3da46b584dde00b66da14943154f225f062eb86/ChurchList.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE RankNTypes #"},"code":{"kind":"string","value":"\nmodule ChurchList where\n\ntype CList a = forall r. (a -> r -> r) -> r -> r\n\ncNil :: CList a \ncNil f nil = nil\n\ncCons :: a -> CList a -> CList a\ncCons a clist = \\f b -> f a (clist f b) \n\ncListToList :: CList a -> [a]\ncListToList clist = clist (:) [] \n\nlistToCList :: [a] -> CList a\nlistToCList [] = cNil\nlistToCList (a : as) = a `cCons` (listToCList as) \n\ncListToString :: Show a => CList a -> String\ncListToString = show . cListToList \n"}}},{"rowIdx":610294,"cells":{"_id":{"kind":"string","value":"4fe3c1118b59f0dea430902df4d1007ca2565110ef0da6b5820a22aaf91e766e"},"repository":{"kind":"string","value":"vvvvalvalval/mapdag"},"name":{"kind":"string","value":"default.cljc"},"content":{"kind":"string","value":"(ns mapdag.test.runtime.default\n (:require [clojure.test :refer :all]\n [mapdag.test.core]\n [mapdag.runtime.default]))\n\n(deftest compute--examples\n (mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/vvvvalvalval/mapdag/c0758a7dcd986e7062d80c3dd368ea769d0d5b41/test/mapdag/test/runtime/default.cljc"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(ns mapdag.test.runtime.default\n (:require [clojure.test :refer :all]\n [mapdag.test.core]\n [mapdag.runtime.default]))\n\n(deftest compute--examples\n (mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute))\n"}}},{"rowIdx":610295,"cells":{"_id":{"kind":"string","value":"eb9144d628de1e3551cfcd9ac43fcb375008cf9a1d19819e0bdcb413fc287fd3"},"repository":{"kind":"string","value":"alevy/postgresql-orm"},"name":{"kind":"string","value":"Model_old.hs"},"content":{"kind":"string","value":"# LANGUAGE Trustworthy #\n{-# LANGUAGE RankNTypes #-}\n# LANGUAGE DeriveGeneric #\n# LANGUAGE MultiParamTypeClasses #\n{-# LANGUAGE OverloadedStrings #-}\n{-# LANGUAGE DeriveDataTypeable #-}\n# LANGUAGE ScopedTypeVariables #\n# LANGUAGE FunctionalDependencies #\n# LANGUAGE TypeOperators #\n# LANGUAGE FlexibleInstances #\n# LANGUAGE DefaultSignatures #\n\nmodule Database.PostgreSQL.ORM.LIO.Model where\n\nimport qualified Data.ByteString as S\nimport qualified Database.PostgreSQL.Simple as M\nimport Database.PostgreSQL.Simple.Types\nimport Database.PostgreSQL.ORM.Model (Model, GDBRef)\nimport qualified Database.PostgreSQL.ORM.Model as M\nimport qualified Database.PostgreSQL.ORM.DBSelect as M\nimport qualified Database.PostgreSQL.ORM as M\nimport Data.Typeable\nimport LIO\nimport LIO.DCLabel\nimport LIO.TCB\n\nimport GHC.Generics\nimport Database.PostgreSQL.ORM.CreateTable\nimport Data.Vector (Vector, toList)\n\nfindAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m]\nfindAllP (ConnectionTCB c dcc) = do\n rows <- ioTCB $ M.dbSelect c selectModel\n mapM (labelModel dcc) rows\n\nfindRow :: (Model r, ModelPolicy c r m)\n => Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m))\nfindRow (ConnectionTCB c dcc) k = do\n mrow <- ioTCB $ M.findRow c k\n case mrow of\n Nothing -> return Nothing\n Just row -> labelModel dcc row >>= \\lr -> return $ Just lr\n\ndata Connection c = ConnectionTCB M.Connection c\n\nclass DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where\n labelModel :: c -> a -> DC (DCLabeled b)\n\n selectModel :: M.DBSelect a\n default selectModel :: (Model a) => M.DBSelect a\n selectModel = M.modelDBSelect\n\n lookupModel :: M.DBSelect a\n default lookupModel :: Model a => M.DBSelect a\n lookupModel =\n let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a)\n in M.addWhere_ (Query $ S.concat [primKey, \" = ?\"]) $ M.modelDBSelect\n\nclass Typeable c => DCConnection c where\n newConnection :: DCPriv -> c\n\nconnect :: forall c. DCConnection c => DC (Connection c)\nconnect = do\n let tc = typeRepTyCon $ typeOf (undefined :: c)\n pd = concat\n [ tyConPackage tc\n , \":\"\n , tyConModule tc\n , \".\"\n , tyConName tc ]\n cpriv = PrivTCB $ toCNF $ principal pd\n M.defaultConnectInfo -- { M.connectDatabase = pd }\n return $ ConnectionTCB conn $ newConnection cpriv\n\n--- EXAMPLE\n\ndata MyConn = MyConnTCB DCPriv deriving (Typeable)\n\ninstance DCConnection MyConn where\n newConnection = MyConnTCB\n\ndata Owner = Owner { ownerId :: M.DBKey\n , ownerPrincipal :: String } deriving (Generic, Show)\n\ndata Region = Region { regionId :: M.DBKey\n , regionName :: String\n , regionOwner :: M.DBRef Owner } deriving (Generic, Show)\n\ninstance Model Region where\n modelInfo = M.underscoreModelInfo \"region\"\n\ninstance Model Owner where\n modelInfo = M.underscoreModelInfo \"region\"\n\ninstance ModelPolicy MyConn (Region M.:. Owner) Region where\n selectModel = M.addExpression \"\" $ M.modelDBSelect\n labelModel (MyConnTCB mypriv) (region M.:. owner) = do\n labelP mypriv (ownerPrincipal owner \\/ mypriv %% ownerPrincipal owner \\/ mypriv) region\n\ninstance ModelPolicy MyConn Owner Owner where\n labelModel (MyConnTCB mypriv) owner =\n labelP mypriv (True %% mypriv) owner\n\ndata Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable)\n\ninstance Model Owners where\n\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/alevy/postgresql-orm/9316db2f226c512036c2b72983020f6bdefd41bd/src/Database/PostgreSQL/ORM/LIO/Model_old.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":"# LANGUAGE RankNTypes #\n# LANGUAGE OverloadedStrings #\n# LANGUAGE DeriveDataTypeable #\n { M.connectDatabase = pd }\n- EXAMPLE"},"code":{"kind":"string","value":"# LANGUAGE Trustworthy #\n# LANGUAGE DeriveGeneric #\n# LANGUAGE MultiParamTypeClasses #\n# LANGUAGE ScopedTypeVariables #\n# LANGUAGE FunctionalDependencies #\n# LANGUAGE TypeOperators #\n# LANGUAGE FlexibleInstances #\n# LANGUAGE DefaultSignatures #\n\nmodule Database.PostgreSQL.ORM.LIO.Model where\n\nimport qualified Data.ByteString as S\nimport qualified Database.PostgreSQL.Simple as M\nimport Database.PostgreSQL.Simple.Types\nimport Database.PostgreSQL.ORM.Model (Model, GDBRef)\nimport qualified Database.PostgreSQL.ORM.Model as M\nimport qualified Database.PostgreSQL.ORM.DBSelect as M\nimport qualified Database.PostgreSQL.ORM as M\nimport Data.Typeable\nimport LIO\nimport LIO.DCLabel\nimport LIO.TCB\n\nimport GHC.Generics\nimport Database.PostgreSQL.ORM.CreateTable\nimport Data.Vector (Vector, toList)\n\nfindAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m]\nfindAllP (ConnectionTCB c dcc) = do\n rows <- ioTCB $ M.dbSelect c selectModel\n mapM (labelModel dcc) rows\n\nfindRow :: (Model r, ModelPolicy c r m)\n => Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m))\nfindRow (ConnectionTCB c dcc) k = do\n mrow <- ioTCB $ M.findRow c k\n case mrow of\n Nothing -> return Nothing\n Just row -> labelModel dcc row >>= \\lr -> return $ Just lr\n\ndata Connection c = ConnectionTCB M.Connection c\n\nclass DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where\n labelModel :: c -> a -> DC (DCLabeled b)\n\n selectModel :: M.DBSelect a\n default selectModel :: (Model a) => M.DBSelect a\n selectModel = M.modelDBSelect\n\n lookupModel :: M.DBSelect a\n default lookupModel :: Model a => M.DBSelect a\n lookupModel =\n let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a)\n in M.addWhere_ (Query $ S.concat [primKey, \" = ?\"]) $ M.modelDBSelect\n\nclass Typeable c => DCConnection c where\n newConnection :: DCPriv -> c\n\nconnect :: forall c. DCConnection c => DC (Connection c)\nconnect = do\n let tc = typeRepTyCon $ typeOf (undefined :: c)\n pd = concat\n [ tyConPackage tc\n , \":\"\n , tyConModule tc\n , \".\"\n , tyConName tc ]\n cpriv = PrivTCB $ toCNF $ principal pd\n return $ ConnectionTCB conn $ newConnection cpriv\n\n\ndata MyConn = MyConnTCB DCPriv deriving (Typeable)\n\ninstance DCConnection MyConn where\n newConnection = MyConnTCB\n\ndata Owner = Owner { ownerId :: M.DBKey\n , ownerPrincipal :: String } deriving (Generic, Show)\n\ndata Region = Region { regionId :: M.DBKey\n , regionName :: String\n , regionOwner :: M.DBRef Owner } deriving (Generic, Show)\n\ninstance Model Region where\n modelInfo = M.underscoreModelInfo \"region\"\n\ninstance Model Owner where\n modelInfo = M.underscoreModelInfo \"region\"\n\ninstance ModelPolicy MyConn (Region M.:. Owner) Region where\n selectModel = M.addExpression \"\" $ M.modelDBSelect\n labelModel (MyConnTCB mypriv) (region M.:. owner) = do\n labelP mypriv (ownerPrincipal owner \\/ mypriv %% ownerPrincipal owner \\/ mypriv) region\n\ninstance ModelPolicy MyConn Owner Owner where\n labelModel (MyConnTCB mypriv) owner =\n labelP mypriv (True %% mypriv) owner\n\ndata Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable)\n\ninstance Model Owners where\n\n"}}},{"rowIdx":610296,"cells":{"_id":{"kind":"string","value":"079a9581678803d1db11a5532e99753bb7f9380076b7da3e20f4f3725a4665f2"},"repository":{"kind":"string","value":"fission-codes/fission"},"name":{"kind":"string","value":"Init.hs"},"content":{"kind":"string","value":"-- | Initialize a new Fission app in an existing directory\nmodule Fission.CLI.Handler.App.Init (appInit) where\n\nimport qualified Crypto.PubKey.Ed25519 as Ed25519\nimport qualified Data.Yaml as YAML\nimport qualified System.Console.ANSI as ANSI\n\nimport Fission.Prelude\n\nimport qualified Fission.App.Name as App\nimport Fission.Authorization.ServerDID\nimport Fission.Error.Types\nimport qualified Fission.Internal.UTF8 as UTF8\n\nimport Fission.Web.Auth.Token.Types\nimport Fission.Web.Client\n\nimport Fission.CLI.Display.Text\n\nimport qualified Fission.CLI.Display.Error as CLI.Error\nimport qualified Fission.CLI.Display.Success as CLI.Success\n\nimport qualified Fission.CLI.App.Environment as App.Env\nimport qualified Fission.CLI.Prompt.BuildDir as BuildDir\n\nimport Fission.CLI.Environment\nimport Fission.CLI.WebNative.Mutation.Auth.Store as UCAN\n\n | Sync the current working directory to the server over IPFS\nappInit ::\n ( MonadIO m\n , MonadTime m\n , MonadLogger m\n , MonadEnvironment m\n , UCAN.MonadStore m\n , MonadWebClient m\n , ServerDID m\n\n , MonadCleanup m\n , m `Raises` ClientError\n , m `Raises` YAML.ParseException\n , m `Raises` NotFound FilePath\n\n , Contains (Errors m) (Errors m)\n , Display (OpenUnion (Errors m))\n , Show (OpenUnion (Errors m))\n\n , MonadWebAuth m Token\n , MonadWebAuth m Ed25519.SecretKey\n )\n => FilePath\n -> Maybe FilePath\n -> Maybe App.Name\n -> m ()\nappInit appDir mayBuildDir' mayAppName = do\n logDebug @Text \"appInit\"\n\n proof <- getRootUserProof\n attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \\case\n Left err -> do\n logDebug $ textDisplay err\n CLI.Error.put err $ textDisplay err\n raise err\n\n Right appURL -> do\n logDebug $ \"Created app \" <> textDisplay appURL\n\n case mayBuildDir' of\n Nothing -> do\n guess <- BuildDir.prompt appDir\n App.Env.create appURL $ fromMaybe guess mayBuildDir'\n\n Just dir -> do\n logDebug $ \"BuildDir passed from flag: \" <> dir\n App.Env.create appURL dir\n\n CLI.Success.putOk $ \"App initialized as \" <> textDisplay appURL\n\n UTF8.putText \"⏯️ Next, run \"\n\n colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do\n UTF8.putText \"fission app publish [--open|--watch]\"\n\n UTF8.putText \" to sync data\\n\"\n\n UTF8.putText \"💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at \"\n\n colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do\n UTF8.putText $ \"/\" <> textDisplay appURL <> \"\\n\"\n\n return ()\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/fission-codes/fission/e5a5d6f30fb4451918efba5b72787cbc7632aecf/fission-cli/library/Fission/CLI/Handler/App/Init.hs"},"language":{"kind":"string","value":"haskell"},"comments":{"kind":"string","value":" | Initialize a new Fission app in an existing directory"},"code":{"kind":"string","value":"module Fission.CLI.Handler.App.Init (appInit) where\n\nimport qualified Crypto.PubKey.Ed25519 as Ed25519\nimport qualified Data.Yaml as YAML\nimport qualified System.Console.ANSI as ANSI\n\nimport Fission.Prelude\n\nimport qualified Fission.App.Name as App\nimport Fission.Authorization.ServerDID\nimport Fission.Error.Types\nimport qualified Fission.Internal.UTF8 as UTF8\n\nimport Fission.Web.Auth.Token.Types\nimport Fission.Web.Client\n\nimport Fission.CLI.Display.Text\n\nimport qualified Fission.CLI.Display.Error as CLI.Error\nimport qualified Fission.CLI.Display.Success as CLI.Success\n\nimport qualified Fission.CLI.App.Environment as App.Env\nimport qualified Fission.CLI.Prompt.BuildDir as BuildDir\n\nimport Fission.CLI.Environment\nimport Fission.CLI.WebNative.Mutation.Auth.Store as UCAN\n\n | Sync the current working directory to the server over IPFS\nappInit ::\n ( MonadIO m\n , MonadTime m\n , MonadLogger m\n , MonadEnvironment m\n , UCAN.MonadStore m\n , MonadWebClient m\n , ServerDID m\n\n , MonadCleanup m\n , m `Raises` ClientError\n , m `Raises` YAML.ParseException\n , m `Raises` NotFound FilePath\n\n , Contains (Errors m) (Errors m)\n , Display (OpenUnion (Errors m))\n , Show (OpenUnion (Errors m))\n\n , MonadWebAuth m Token\n , MonadWebAuth m Ed25519.SecretKey\n )\n => FilePath\n -> Maybe FilePath\n -> Maybe App.Name\n -> m ()\nappInit appDir mayBuildDir' mayAppName = do\n logDebug @Text \"appInit\"\n\n proof <- getRootUserProof\n attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \\case\n Left err -> do\n logDebug $ textDisplay err\n CLI.Error.put err $ textDisplay err\n raise err\n\n Right appURL -> do\n logDebug $ \"Created app \" <> textDisplay appURL\n\n case mayBuildDir' of\n Nothing -> do\n guess <- BuildDir.prompt appDir\n App.Env.create appURL $ fromMaybe guess mayBuildDir'\n\n Just dir -> do\n logDebug $ \"BuildDir passed from flag: \" <> dir\n App.Env.create appURL dir\n\n CLI.Success.putOk $ \"App initialized as \" <> textDisplay appURL\n\n UTF8.putText \"⏯️ Next, run \"\n\n colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do\n UTF8.putText \"fission app publish [--open|--watch]\"\n\n UTF8.putText \" to sync data\\n\"\n\n UTF8.putText \"💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at \"\n\n colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do\n UTF8.putText $ \"/\" <> textDisplay appURL <> \"\\n\"\n\n return ()\n"}}},{"rowIdx":610297,"cells":{"_id":{"kind":"string","value":"755f3076fcb051d464f7dacf35ce09e4f8a5bb8d808cd80d344aad7c19313bc2"},"repository":{"kind":"string","value":"dbuenzli/remat"},"name":{"kind":"string","value":"descr.ml"},"content":{"kind":"string","value":"--------------------------------------------------------------------------- \n Copyright 2012 . All rights reserved . \n Distributed under the BSD3 license , see license at the end of the file . \n % % NAME%% release % % ---------------------------------------------------------------------------\n Copyright 2012 Daniel C. Bünzli. All rights reserved.\n Distributed under the BSD3 license, see license at the end of the file.\n %%NAME%% release %%VERSION%%\n ---------------------------------------------------------------------------*)\n\nopen Rresult\nopen Bos\n\n(* Repository description *)\n\ntype t =\n { dir : Path.t;\n mutable repo : Ddescr.Repo.t option;\n mutable index_ids : D.index_id list option;\n indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t;\n mutable doc_ids : D.doc_id list option;\n docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; }\n\n(* Description filename lookup *)\n\nlet warn_junk_file = format_of_string \"suspicious file `%a` in %s directory\"\nlet err_miss_repo p _ = R.msgf \"no repository description file `%a'\" Path.pp p\nlet err_miss_dir dir p _ = R.msgf \"missing %s directory `%a'\" dir Path.pp p\nlet err_miss_file k id p _ =\n R.msgf \"%s `%s': missing description file `%a'\" k id Path.pp p\n\nlet lookup_file err_msg f =\n (OS.File.exists ~err:true f >>= fun _ -> R.ok f)\n |> R.reword_error_msg ~replace:true (err_msg f)\n\nlet lookup_dir err_msg d =\n (OS.Dir.exists ~err:true d >>= fun _ -> R.ok d)\n |> R.reword_error_msg ~replace:true (err_msg d)\n\nlet repo_file d = lookup_file err_miss_repo Path.(d.dir / \"repo.json\")\nlet index_path d = Path.(d.dir / \"i\")\nlet index_dir d = lookup_dir (err_miss_dir \"index\") (index_path d)\nlet index_file d id =\n let err = err_miss_file \"index\" id in\n lookup_file err Path.(index_path d / strf \"%s.json\" id)\n\nlet doc_path d = Path.(d.dir / \"d\")\nlet doc_dir d = lookup_dir (err_miss_dir \"document\") (doc_path d)\nlet doc_file d id =\n let err = err_miss_file \"document\" id in\n lookup_file err Path.(doc_path d / strf \"%s.json\" id)\n\n(* Description decoder *)\n\nlet decode_file file codec =\n let decode ic () =\n let d = Jsonm.decoder (`Channel ic) in\n let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in\n let rec loop () = match Jsont.decode d with\n | `Ok v -> R.ok v\n | `Await -> loop ()\n | `Error (loc, e) ->\n let err = (Jsont.error_to_string e) in\n Log.show \"%a:%a: %s\" Path.pp file Fmt.pp_range loc err;\n loop ()\n in\n loop ()\n in\n OS.File.with_inf decode file ()\n\nlet create dir =\n OS.Dir.exists ~err:true dir\n >>= fun _ -> R.ok { dir;\n repo = None;\n index_ids = None;\n indexes = Hashtbl.create 100;\n doc_ids = None;\n docs = Hashtbl.create 1000; }\n\nlet rec repo d = match d.repo with\n| Some r -> r\n| None ->\n (repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec)\n |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec))\n |> fun (_, r) -> d.repo <- Some r; r\n\nlet find_ids kind dir =\n let add_id acc p =\n if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else\n (Log.warn warn_junk_file Path.pp p kind; acc)\n in\n (dir\n >>= OS.Dir.contents\n >>= fun paths -> R.ok (List.fold_left add_id [] paths))\n |> Log.on_error_msg ~use:[]\n\nlet index_ids d = match d.index_ids with\n| Some ids -> ids\n| None ->\n let ids = find_ids \"index\" (index_dir d) in\n d.index_ids <- Some ids; ids\n\nlet index d id =\n match try Some (Hashtbl.find d.indexes id) with Not_found -> None with\n | Some i -> i\n | None ->\n (index_file d id >>= fun file -> decode_file file Ddescr.Index.codec)\n |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec))\n |> fun (_, i) -> Hashtbl.add d.indexes id i; i\n\nlet doc_ids d = match d.doc_ids with\n| Some ids -> ids\n| None ->\n let ids = find_ids \"document\" (doc_dir d) in\n d.doc_ids <- Some ids; ids\n\n FIXME see if modification of jsont can avoid double parse\n match try Some (Hashtbl.find d.docs id) with Not_found -> None with\n | Some d -> d\n | None ->\n (doc_file d id\n >>= fun file -> decode_file file Ddescr.Doc.codec\n >>= fun (_, doc) -> decode_file file Jsont.json\n >>= fun (_, meta) -> R.ok (doc, meta))\n |> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O [])\n |> fun doc -> Hashtbl.add d.docs id doc; doc\n\n(* Member lookup *)\n\nlet path_to_str ps = String.concat \".\" ps\nlet value_type = function\n| `Null -> \"null\" | `Bool _ -> \"boolean\" | `Float _ -> \"number\"\n| `String _ -> \"string\" | `A _ -> \"array\" | `O _ -> \"object\"\n\nlet err_find_type path seen j =\n R.error_msgf \"path %s stops at %s: value of type %s\"\n (path_to_str path) (path_to_str seen) (value_type j)\n\nlet err_find_name path seen =\n R.error_msgf \"path %s stops at %s: no such member.\"\n (path_to_str path) (path_to_str seen)\n\nlet json_find path j =\n let rec loop j seen = function\n | [] -> R.ok j\n | p :: ps ->\n match j with\n | `O mems ->\n begin match try Some (List.assoc p mems) with Not_found -> None with\n | None -> err_find_name path (List.rev (p :: seen))\n | Some j -> loop j (p :: seen) ps\n end\n | j -> err_find_type path (List.rev (p :: seen)) j\n in\n loop j [] path\n\nlet lookup_to_str = function\n| `Bool b -> R.ok (strf \"%b\" b)\n| `Float f -> R.ok (strf \"%g\" f)\n| `String s -> R.ok s\n| `A _ | `O _ | `Null as v ->\n R.error_msgf \"unexpected %s in member data\" (value_type v)\n\nlet lookup path obj =\n json_find path obj >>= function\n | `A vs ->\n let rec loop acc = function\n | v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs\n | [] -> R.ok (List.rev acc)\n in\n loop [] vs\n | v -> lookup_to_str v >>= fun s -> R.ok [s]\n\n(* Formatting\n\n TODO better error reports, correct string extractors.\n*)\n\nlet parse_fuzzy_date s =\n let is_digit c = (0x0030 <= c && c <= 0x0039) || c = 0x23 (* # *) in\n let check_digits n s =\n let len = String.length s in\n if len <> n then false else\n try\n for i = 0 to len - 1 do\n if not (is_digit (Char.code s.[i])) then raise Exit\n done;\n true\n with Exit -> false\n in\n match String.split ~sep:\"-\" s with\n | [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d ->\n R.ok (y, Some m, Some d)\n | [y; m] when check_digits 4 y && check_digits 2 m ->\n R.ok (y, Some m, None)\n | [y] when check_digits 4 y ->\n R.ok (y, None, None)\n | _ ->\n R.error_msgf \"could not parse fuzzy date (%s)\" s\n\nlet map_todo m =\n let err = R.msgf \"map %s is unimplemented\" m in\n Ok (fun s -> R.error (err, s))\n\n(* let err fmt = Printf.ksprintf (fun e -> R.error e) fmt *)\nlet err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt\n\nlet map_case var kind = match kind with\n| \"less\" | \"lower\" | \"upper\" -> map_todo (\"case_\" ^ kind)\n| _ -> R.error_msgf \"variable $(%s): unknown case map kind `%s`\" var kind\n\n TODO implement dates correctly\n\nlet map_date_y s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok y\n\nlet map_date_yy s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok (String.sub y 2 2)\n\nlet map_date_yyyy s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok y\n\nlet map_date_m s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, m, _) -> Ok (match m with None -> \"#\" | Some m -> m)\n\nlet map_date_mm s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, m, _) -> Ok (match m with None -> \"##\" | Some m -> m)\n\nlet map_date_d s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, _, d) -> Ok (match d with None -> \"#\" | Some m -> m)\n\nlet map_date_dd s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, _, d) -> Ok (match d with None -> \"##\" | Some m -> m)\n\nlet map_date var kind = match kind with\n| \"Y\" -> Ok map_date_y | \"YY\" -> Ok map_date_yy | \"YYYY\" -> Ok map_date_yyyy\n| \"M\" -> Ok map_date_m | \"MM\" -> Ok map_date_mm\n| \"d\" -> Ok map_date_d | \"dd\" -> Ok map_date_dd\n| \"e\" -> map_todo \"date_e\"\n| _ -> R.error_msgf \"variable $(%s): unknown date map kind `%s`\" var kind\n\nlet map_letter var n = match R.int_of_string n with\n| None -> R.error_msgf \"variable $(%s): unknown letter map kind `%s`\" var n\n| Some n ->\n let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in\n Ok map\n\nlet map_int var count = match R.int_of_string count with\n| None -> R.error_msgf \"variable $(%s): unknown int map kind `%s`\" var count\n| Some count ->\n let map s =\n let fmt count i = Printf.sprintf \"%0*d\" count i in\n try Ok (fmt count (int_of_string s)) with\n | Failure _ ->\n err_map ~use:(fmt count 0)\n \"variable $(%s): value `%s` not an int\" var s\n in\n Ok map\n\nlet map_id_find var smaps id = match (String.Map.find id smaps) with\n| None -> R.error_msgf \"variable $(%s): unknown map id `%s`\" var id\n| Some m -> Ok m\n\nlet map_id var smaps id =\n map_id_find var smaps id >>= fun m ->\n let map s = match String.Map.find s m with\n | Some v -> Ok v\n | None ->\n err_map ~use:s\n \"variable $(%s): map id `%s` could not map `%s`\" var id s\n in\n Ok map\n\nlet pmap_id var smaps id = match map_id_find var smaps id with\n| Error _ as e -> e\n| Ok m ->\n let map s = match String.Map.find s m with\n | None -> Ok s\n | Some s -> Ok s\n in\n Ok map\n\nlet get_map var smaps m = match String.cut ~sep:\"_\" (String.trim m) with\n| Some (\"case\", kind) -> map_case var kind\n| Some (\"letter\", n) -> map_letter var n\n| Some (\"date\", kind) -> map_date var kind\n| Some (\"int\", count) -> map_int var count\n| Some (\"map\", id) -> map_id var smaps m\n| Some (\"pmap\", id) -> pmap_id var smaps m\n| None | _ -> R.error_msgf \"variable $(%s): unknown map `%s`\" var m\n\n TODO splicing , de - uglify\n let r = match String.split ~sep:\",\" var_spec with\n | var :: maps ->\n let add_map acc m = match acc with\n | Error _ as e -> e\n | Ok maps ->\n match get_map var smaps m with\n | Error _ as e -> e\n | Ok m -> Ok (m :: maps)\n in\n begin match List.fold_left add_map (Ok []) maps with\n | Error err -> Error (err, \"MAPERROR\")\n | Ok maps -> Ok (String.trim var, List.rev maps)\n end\n | _ ->\n Error (R.msgf \"var `$(%s)`: illegal format variable.\" var_spec, \"ILLEGAL\")\n in\n match r with\n | Error _ as e -> e\n | Ok (var, maps) ->\n match String.Map.find var env with\n | None | Some [] ->\n Error (R.msgf \"var `%s`: undefined variable: `$(%s)'\" var_spec var,\n \"UNDEFINED\")\n | Some [v] ->\n let apply acc m = match acc with\n | Error _ as e -> e\n | Ok s -> m s\n in\n List.fold_left apply (Ok v) maps\n | Some l ->\n Error (R.msgf \"var `%s`: unspliced multiple value\" var_spec,\n \"UNSPLICED\")\n\nlet format ?buf fmt ~env ~smaps = failwith \"TODO\"\n\n let buf = match buf with Some b - > b | None - > Buffer.create 255 in \n let err = ref ( ` Msg \" \" ) in \n let lookup_var = match lookup_var env with \n | Error ( e , v ) - > err : = e ; v \n | Ok v - > v \n in \n Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ; \n let data = Buffer.contents buf in \n if ! err < > ( ` Msg \" \" ) then Error ( ! err , data ) else Ok data \n\n let buf = match buf with Some b -> b | None -> Buffer.create 255 in\n let err = ref (`Msg \"\") in\n let lookup_var var_spec = match lookup_var env smaps var_spec with\n | Error (e, v) -> err := e; v\n | Ok v -> v\n in\n Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt;\n let data = Buffer.contents buf in\n if !err <> (`Msg \"\") then Error (!err, data) else Ok data\n*)\n\nlet formats ?buf fmt ~env ~smaps = failwith \"TODO\"\n\n(*\n\nlet rec product vss = (* ordered cartesian product of lists. *)\n let rec push_v acc v = function\n | l :: lists -> push_v ((v :: l) :: acc) v lists\n | [] -> acc\n in\n let rec push_vs acc lists = function\n | v :: vs -> push_vs (push_v acc v lists) lists vs\n | [] -> acc\n in\n let rec loop acc = function\n | vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss\n | [] -> acc\n in\n if vss = [] then [] else loop [[]] (List.rev vss)\n\n FIXME better error report\n let lookup_var env var =\n match try Some (List.assoc var env) with Not_found -> None with\n | None ->\n FIXME this should n't occur here\n Log.err \"variable %s undefined\" var; \"UNDEFINED\"\n | Some l -> l\n in\n\n let rec assigns acc = function\n | [] -> acc\n | (name, Error e) :: vars ->\n Log.err \"var %s lookup error: %s\" name e;\n assigns ([(name, \"ERROR\")] :: acc) vars\n | (name, Ok vs) :: vars ->\n assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars\n in\n let vars = Ddescr.Formatter.vars fmt in\n let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in\n let envs = product assigns in\n let format = Ddescr.Formatter.format fmt in\n let add_run b acc run =\n Buffer.clear b;\n Buffer.add_substitute b (lookup_var run) format;\n Buffer.contents b :: acc\n in\n let b = Buffer.create 255 in\n List.fold_left (add_run b) [] envs\n\nlet format_str fmt j =\n FIXME report error in case of list ?\n String.concat \"\" (format fmt j)\n*)\n\n(* Variable environements *)\n\nlet cache = Hashtbl.create 255\n\ntype fmt = [`Lit of string | `Var of string ] list\nlet parse_fmt ?buf s =\n try\n let b = match buf with\n | None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf\n in\n let acc = ref [] in\n let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in\n let flush_lit b =\n if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc\n in\n let state = ref `Lit in\n for i = 0 to String.length s - 1 do match !state with\n | `Lit ->\n begin match s.[i] with\n | '$' -> state := `Dollar\n | c -> Buffer.add_char b c\n end\n | `Dollar ->\n begin match s.[i] with\n | '$' -> state := `Lit; Buffer.add_char b '$'\n | '(' -> state := `Var; flush_lit b;\n | _ -> raise Exit\n end\n | `Var ->\n begin match s.[i] with\n | ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc;\n | c -> Buffer.add_char b c\n end\n done;\n if !state <> `Lit then raise Exit else\n (flush_lit b; Ok (List.rev !acc))\n with Exit -> Error (strf \"malformed format: `%s`\" s)\n\n\nlet cache = Hashtbl.create 255\n\nlet file_scan pat = try Hashtbl.find cache pat with\n| Not_found ->\n (OS.Path.unify (Path.of_string pat)\n >>= fun envs -> R.ok (List.rev_map snd envs))\n |> Log.on_error_msg ~use:[]\n |> fun envs -> Hashtbl.add cache pat envs; envs\n\n\n\n\n\n--------------------------------------------------------------------------- \n Copyright 2012 \n All rights reserved . \n\n Redistribution and use in source and binary forms , with or without \n modification , are permitted provided that the following conditions \n are met : \n\n 1 . Redistributions of source code must retain the above copyright \n notice , this list of conditions and the following disclaimer . \n\n 2 . Redistributions in binary form must reproduce the above \n copyright notice , this list of conditions and the following \n disclaimer in the documentation and/or other materials provided \n with the distribution . \n\n 3 . Neither the name of nor the names of \n contributors may be used to endorse or promote products derived \n from this software without specific prior written permission . \n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \n \" AS IS \" AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT \n LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR \n A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT \n OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , \n SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT \n LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , \n DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY \n THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT \n ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE \n OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . \n ---------------------------------------------------------------------------\n Copyright 2012 Daniel C. Bünzli\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided\n with the distribution.\n\n 3. Neither the name of Daniel C. Bünzli nor the names of\n contributors may be used to endorse or promote products derived\n from this software without specific prior written permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n ---------------------------------------------------------------------------*)\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/dbuenzli/remat/28d572e77bbd1ad46bbfde87c0ba8bd0ab99ed28/src-remat/descr.ml"},"language":{"kind":"string","value":"ocaml"},"comments":{"kind":"string","value":" Repository description \n Description filename lookup \n Description decoder \n Member lookup \n Formatting\n\n TODO better error reports, correct string extractors.\n\n # \n let err fmt = Printf.ksprintf (fun e -> R.error e) fmt \n\n\nlet rec product vss = (* ordered cartesian product of lists. \n Variable environements "},"code":{"kind":"string","value":"--------------------------------------------------------------------------- \n Copyright 2012 . All rights reserved . \n Distributed under the BSD3 license , see license at the end of the file . \n % % NAME%% release % % ---------------------------------------------------------------------------\n Copyright 2012 Daniel C. Bünzli. All rights reserved.\n Distributed under the BSD3 license, see license at the end of the file.\n %%NAME%% release %%VERSION%%\n ---------------------------------------------------------------------------*)\n\nopen Rresult\nopen Bos\n\n\ntype t =\n { dir : Path.t;\n mutable repo : Ddescr.Repo.t option;\n mutable index_ids : D.index_id list option;\n indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t;\n mutable doc_ids : D.doc_id list option;\n docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; }\n\n\nlet warn_junk_file = format_of_string \"suspicious file `%a` in %s directory\"\nlet err_miss_repo p _ = R.msgf \"no repository description file `%a'\" Path.pp p\nlet err_miss_dir dir p _ = R.msgf \"missing %s directory `%a'\" dir Path.pp p\nlet err_miss_file k id p _ =\n R.msgf \"%s `%s': missing description file `%a'\" k id Path.pp p\n\nlet lookup_file err_msg f =\n (OS.File.exists ~err:true f >>= fun _ -> R.ok f)\n |> R.reword_error_msg ~replace:true (err_msg f)\n\nlet lookup_dir err_msg d =\n (OS.Dir.exists ~err:true d >>= fun _ -> R.ok d)\n |> R.reword_error_msg ~replace:true (err_msg d)\n\nlet repo_file d = lookup_file err_miss_repo Path.(d.dir / \"repo.json\")\nlet index_path d = Path.(d.dir / \"i\")\nlet index_dir d = lookup_dir (err_miss_dir \"index\") (index_path d)\nlet index_file d id =\n let err = err_miss_file \"index\" id in\n lookup_file err Path.(index_path d / strf \"%s.json\" id)\n\nlet doc_path d = Path.(d.dir / \"d\")\nlet doc_dir d = lookup_dir (err_miss_dir \"document\") (doc_path d)\nlet doc_file d id =\n let err = err_miss_file \"document\" id in\n lookup_file err Path.(doc_path d / strf \"%s.json\" id)\n\n\nlet decode_file file codec =\n let decode ic () =\n let d = Jsonm.decoder (`Channel ic) in\n let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in\n let rec loop () = match Jsont.decode d with\n | `Ok v -> R.ok v\n | `Await -> loop ()\n | `Error (loc, e) ->\n let err = (Jsont.error_to_string e) in\n Log.show \"%a:%a: %s\" Path.pp file Fmt.pp_range loc err;\n loop ()\n in\n loop ()\n in\n OS.File.with_inf decode file ()\n\nlet create dir =\n OS.Dir.exists ~err:true dir\n >>= fun _ -> R.ok { dir;\n repo = None;\n index_ids = None;\n indexes = Hashtbl.create 100;\n doc_ids = None;\n docs = Hashtbl.create 1000; }\n\nlet rec repo d = match d.repo with\n| Some r -> r\n| None ->\n (repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec)\n |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec))\n |> fun (_, r) -> d.repo <- Some r; r\n\nlet find_ids kind dir =\n let add_id acc p =\n if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else\n (Log.warn warn_junk_file Path.pp p kind; acc)\n in\n (dir\n >>= OS.Dir.contents\n >>= fun paths -> R.ok (List.fold_left add_id [] paths))\n |> Log.on_error_msg ~use:[]\n\nlet index_ids d = match d.index_ids with\n| Some ids -> ids\n| None ->\n let ids = find_ids \"index\" (index_dir d) in\n d.index_ids <- Some ids; ids\n\nlet index d id =\n match try Some (Hashtbl.find d.indexes id) with Not_found -> None with\n | Some i -> i\n | None ->\n (index_file d id >>= fun file -> decode_file file Ddescr.Index.codec)\n |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec))\n |> fun (_, i) -> Hashtbl.add d.indexes id i; i\n\nlet doc_ids d = match d.doc_ids with\n| Some ids -> ids\n| None ->\n let ids = find_ids \"document\" (doc_dir d) in\n d.doc_ids <- Some ids; ids\n\n FIXME see if modification of jsont can avoid double parse\n match try Some (Hashtbl.find d.docs id) with Not_found -> None with\n | Some d -> d\n | None ->\n (doc_file d id\n >>= fun file -> decode_file file Ddescr.Doc.codec\n >>= fun (_, doc) -> decode_file file Jsont.json\n >>= fun (_, meta) -> R.ok (doc, meta))\n |> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O [])\n |> fun doc -> Hashtbl.add d.docs id doc; doc\n\n\nlet path_to_str ps = String.concat \".\" ps\nlet value_type = function\n| `Null -> \"null\" | `Bool _ -> \"boolean\" | `Float _ -> \"number\"\n| `String _ -> \"string\" | `A _ -> \"array\" | `O _ -> \"object\"\n\nlet err_find_type path seen j =\n R.error_msgf \"path %s stops at %s: value of type %s\"\n (path_to_str path) (path_to_str seen) (value_type j)\n\nlet err_find_name path seen =\n R.error_msgf \"path %s stops at %s: no such member.\"\n (path_to_str path) (path_to_str seen)\n\nlet json_find path j =\n let rec loop j seen = function\n | [] -> R.ok j\n | p :: ps ->\n match j with\n | `O mems ->\n begin match try Some (List.assoc p mems) with Not_found -> None with\n | None -> err_find_name path (List.rev (p :: seen))\n | Some j -> loop j (p :: seen) ps\n end\n | j -> err_find_type path (List.rev (p :: seen)) j\n in\n loop j [] path\n\nlet lookup_to_str = function\n| `Bool b -> R.ok (strf \"%b\" b)\n| `Float f -> R.ok (strf \"%g\" f)\n| `String s -> R.ok s\n| `A _ | `O _ | `Null as v ->\n R.error_msgf \"unexpected %s in member data\" (value_type v)\n\nlet lookup path obj =\n json_find path obj >>= function\n | `A vs ->\n let rec loop acc = function\n | v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs\n | [] -> R.ok (List.rev acc)\n in\n loop [] vs\n | v -> lookup_to_str v >>= fun s -> R.ok [s]\n\n\nlet parse_fuzzy_date s =\n let check_digits n s =\n let len = String.length s in\n if len <> n then false else\n try\n for i = 0 to len - 1 do\n if not (is_digit (Char.code s.[i])) then raise Exit\n done;\n true\n with Exit -> false\n in\n match String.split ~sep:\"-\" s with\n | [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d ->\n R.ok (y, Some m, Some d)\n | [y; m] when check_digits 4 y && check_digits 2 m ->\n R.ok (y, Some m, None)\n | [y] when check_digits 4 y ->\n R.ok (y, None, None)\n | _ ->\n R.error_msgf \"could not parse fuzzy date (%s)\" s\n\nlet map_todo m =\n let err = R.msgf \"map %s is unimplemented\" m in\n Ok (fun s -> R.error (err, s))\n\nlet err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt\n\nlet map_case var kind = match kind with\n| \"less\" | \"lower\" | \"upper\" -> map_todo (\"case_\" ^ kind)\n| _ -> R.error_msgf \"variable $(%s): unknown case map kind `%s`\" var kind\n\n TODO implement dates correctly\n\nlet map_date_y s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok y\n\nlet map_date_yy s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok (String.sub y 2 2)\n\nlet map_date_yyyy s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (y, _, _) -> Ok y\n\nlet map_date_m s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, m, _) -> Ok (match m with None -> \"#\" | Some m -> m)\n\nlet map_date_mm s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, m, _) -> Ok (match m with None -> \"##\" | Some m -> m)\n\nlet map_date_d s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, _, d) -> Ok (match d with None -> \"#\" | Some m -> m)\n\nlet map_date_dd s = match parse_fuzzy_date s with\n| Error err -> Error (err, s)\n| Ok (_, _, d) -> Ok (match d with None -> \"##\" | Some m -> m)\n\nlet map_date var kind = match kind with\n| \"Y\" -> Ok map_date_y | \"YY\" -> Ok map_date_yy | \"YYYY\" -> Ok map_date_yyyy\n| \"M\" -> Ok map_date_m | \"MM\" -> Ok map_date_mm\n| \"d\" -> Ok map_date_d | \"dd\" -> Ok map_date_dd\n| \"e\" -> map_todo \"date_e\"\n| _ -> R.error_msgf \"variable $(%s): unknown date map kind `%s`\" var kind\n\nlet map_letter var n = match R.int_of_string n with\n| None -> R.error_msgf \"variable $(%s): unknown letter map kind `%s`\" var n\n| Some n ->\n let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in\n Ok map\n\nlet map_int var count = match R.int_of_string count with\n| None -> R.error_msgf \"variable $(%s): unknown int map kind `%s`\" var count\n| Some count ->\n let map s =\n let fmt count i = Printf.sprintf \"%0*d\" count i in\n try Ok (fmt count (int_of_string s)) with\n | Failure _ ->\n err_map ~use:(fmt count 0)\n \"variable $(%s): value `%s` not an int\" var s\n in\n Ok map\n\nlet map_id_find var smaps id = match (String.Map.find id smaps) with\n| None -> R.error_msgf \"variable $(%s): unknown map id `%s`\" var id\n| Some m -> Ok m\n\nlet map_id var smaps id =\n map_id_find var smaps id >>= fun m ->\n let map s = match String.Map.find s m with\n | Some v -> Ok v\n | None ->\n err_map ~use:s\n \"variable $(%s): map id `%s` could not map `%s`\" var id s\n in\n Ok map\n\nlet pmap_id var smaps id = match map_id_find var smaps id with\n| Error _ as e -> e\n| Ok m ->\n let map s = match String.Map.find s m with\n | None -> Ok s\n | Some s -> Ok s\n in\n Ok map\n\nlet get_map var smaps m = match String.cut ~sep:\"_\" (String.trim m) with\n| Some (\"case\", kind) -> map_case var kind\n| Some (\"letter\", n) -> map_letter var n\n| Some (\"date\", kind) -> map_date var kind\n| Some (\"int\", count) -> map_int var count\n| Some (\"map\", id) -> map_id var smaps m\n| Some (\"pmap\", id) -> pmap_id var smaps m\n| None | _ -> R.error_msgf \"variable $(%s): unknown map `%s`\" var m\n\n TODO splicing , de - uglify\n let r = match String.split ~sep:\",\" var_spec with\n | var :: maps ->\n let add_map acc m = match acc with\n | Error _ as e -> e\n | Ok maps ->\n match get_map var smaps m with\n | Error _ as e -> e\n | Ok m -> Ok (m :: maps)\n in\n begin match List.fold_left add_map (Ok []) maps with\n | Error err -> Error (err, \"MAPERROR\")\n | Ok maps -> Ok (String.trim var, List.rev maps)\n end\n | _ ->\n Error (R.msgf \"var `$(%s)`: illegal format variable.\" var_spec, \"ILLEGAL\")\n in\n match r with\n | Error _ as e -> e\n | Ok (var, maps) ->\n match String.Map.find var env with\n | None | Some [] ->\n Error (R.msgf \"var `%s`: undefined variable: `$(%s)'\" var_spec var,\n \"UNDEFINED\")\n | Some [v] ->\n let apply acc m = match acc with\n | Error _ as e -> e\n | Ok s -> m s\n in\n List.fold_left apply (Ok v) maps\n | Some l ->\n Error (R.msgf \"var `%s`: unspliced multiple value\" var_spec,\n \"UNSPLICED\")\n\nlet format ?buf fmt ~env ~smaps = failwith \"TODO\"\n\n let buf = match buf with Some b - > b | None - > Buffer.create 255 in \n let err = ref ( ` Msg \" \" ) in \n let lookup_var = match lookup_var env with \n | Error ( e , v ) - > err : = e ; v \n | Ok v - > v \n in \n Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ; \n let data = Buffer.contents buf in \n if ! err < > ( ` Msg \" \" ) then Error ( ! err , data ) else Ok data \n\n let buf = match buf with Some b -> b | None -> Buffer.create 255 in\n let err = ref (`Msg \"\") in\n let lookup_var var_spec = match lookup_var env smaps var_spec with\n | Error (e, v) -> err := e; v\n | Ok v -> v\n in\n Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt;\n let data = Buffer.contents buf in\n if !err <> (`Msg \"\") then Error (!err, data) else Ok data\n*)\n\nlet formats ?buf fmt ~env ~smaps = failwith \"TODO\"\n\n let rec push_v acc v = function\n | l :: lists -> push_v ((v :: l) :: acc) v lists\n | [] -> acc\n in\n let rec push_vs acc lists = function\n | v :: vs -> push_vs (push_v acc v lists) lists vs\n | [] -> acc\n in\n let rec loop acc = function\n | vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss\n | [] -> acc\n in\n if vss = [] then [] else loop [[]] (List.rev vss)\n\n FIXME better error report\n let lookup_var env var =\n match try Some (List.assoc var env) with Not_found -> None with\n | None ->\n FIXME this should n't occur here\n Log.err \"variable %s undefined\" var; \"UNDEFINED\"\n | Some l -> l\n in\n\n let rec assigns acc = function\n | [] -> acc\n | (name, Error e) :: vars ->\n Log.err \"var %s lookup error: %s\" name e;\n assigns ([(name, \"ERROR\")] :: acc) vars\n | (name, Ok vs) :: vars ->\n assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars\n in\n let vars = Ddescr.Formatter.vars fmt in\n let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in\n let envs = product assigns in\n let format = Ddescr.Formatter.format fmt in\n let add_run b acc run =\n Buffer.clear b;\n Buffer.add_substitute b (lookup_var run) format;\n Buffer.contents b :: acc\n in\n let b = Buffer.create 255 in\n List.fold_left (add_run b) [] envs\n\nlet format_str fmt j =\n FIXME report error in case of list ?\n String.concat \"\" (format fmt j)\n*)\n\n\nlet cache = Hashtbl.create 255\n\ntype fmt = [`Lit of string | `Var of string ] list\nlet parse_fmt ?buf s =\n try\n let b = match buf with\n | None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf\n in\n let acc = ref [] in\n let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in\n let flush_lit b =\n if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc\n in\n let state = ref `Lit in\n for i = 0 to String.length s - 1 do match !state with\n | `Lit ->\n begin match s.[i] with\n | '$' -> state := `Dollar\n | c -> Buffer.add_char b c\n end\n | `Dollar ->\n begin match s.[i] with\n | '$' -> state := `Lit; Buffer.add_char b '$'\n | '(' -> state := `Var; flush_lit b;\n | _ -> raise Exit\n end\n | `Var ->\n begin match s.[i] with\n | ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc;\n | c -> Buffer.add_char b c\n end\n done;\n if !state <> `Lit then raise Exit else\n (flush_lit b; Ok (List.rev !acc))\n with Exit -> Error (strf \"malformed format: `%s`\" s)\n\n\nlet cache = Hashtbl.create 255\n\nlet file_scan pat = try Hashtbl.find cache pat with\n| Not_found ->\n (OS.Path.unify (Path.of_string pat)\n >>= fun envs -> R.ok (List.rev_map snd envs))\n |> Log.on_error_msg ~use:[]\n |> fun envs -> Hashtbl.add cache pat envs; envs\n\n\n\n\n\n--------------------------------------------------------------------------- \n Copyright 2012 \n All rights reserved . \n\n Redistribution and use in source and binary forms , with or without \n modification , are permitted provided that the following conditions \n are met : \n\n 1 . Redistributions of source code must retain the above copyright \n notice , this list of conditions and the following disclaimer . \n\n 2 . Redistributions in binary form must reproduce the above \n copyright notice , this list of conditions and the following \n disclaimer in the documentation and/or other materials provided \n with the distribution . \n\n 3 . Neither the name of nor the names of \n contributors may be used to endorse or promote products derived \n from this software without specific prior written permission . \n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \n \" AS IS \" AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT \n LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR \n A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT \n OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , \n SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT \n LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , \n DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY \n THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT \n ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE \n OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . \n ---------------------------------------------------------------------------\n Copyright 2012 Daniel C. Bünzli\n All rights reserved.\n\n Redistribution and use in source and binary forms, with or without\n modification, are permitted provided that the following conditions\n are met:\n\n 1. Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n\n 2. Redistributions in binary form must reproduce the above\n copyright notice, this list of conditions and the following\n disclaimer in the documentation and/or other materials provided\n with the distribution.\n\n 3. Neither the name of Daniel C. Bünzli nor the names of\n contributors may be used to endorse or promote products derived\n from this software without specific prior written permission.\n\n THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n ---------------------------------------------------------------------------*)\n"}}},{"rowIdx":610298,"cells":{"_id":{"kind":"string","value":"657a6cd99033f97e945cf6b4f87a957ce5827c72d837271d7e728fce049d8792"},"repository":{"kind":"string","value":"acieroid/scala-am"},"name":{"kind":"string","value":"church-2-num-1.scm"},"content":{"kind":"string","value":"(letrec ((zero (lambda (f x) x))\n (inc (lambda (n)\n (lambda (f x)\n (f (n f x)))))\n (plus (lambda (m n)\n (lambda (f x)\n (m f (n f x))))))\n ((inc (inc zero)) (lambda (x) (+ x 1)) 0))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/changesBenevolPaper/church-2-num-1.scm"},"language":{"kind":"string","value":"scheme"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(letrec ((zero (lambda (f x) x))\n (inc (lambda (n)\n (lambda (f x)\n (f (n f x)))))\n (plus (lambda (m n)\n (lambda (f x)\n (m f (n f x))))))\n ((inc (inc zero)) (lambda (x) (+ x 1)) 0))\n"}}},{"rowIdx":610299,"cells":{"_id":{"kind":"string","value":"6febfa553216a882c43330bcab22bc2bad66244caf548a9e136e371549e11481"},"repository":{"kind":"string","value":"funcool/httpurr"},"name":{"kind":"string","value":"generators.cljc"},"content":{"kind":"string","value":"(ns httpurr.test.generators\n (:require\n [clojure.test.check.generators :as gen]\n [httpurr.status :as http]))\n\n(defn gen-statuses\n [coll]\n (gen/such-that\n #(not (empty? %)) (gen/map (gen/return :status)\n (gen/elements coll))))\n\n(def informational-response\n (gen-statuses http/informational-codes))\n\n(def success-response\n (gen-statuses http/success-codes))\n\n(def redirection-response\n (gen-statuses http/redirection-codes))\n\n(def client-error-response\n (gen-statuses http/client-error-codes))\n\n(def server-error-response\n (gen-statuses http/server-error-codes))\n\n(def error-response\n (gen-statuses (concat http/client-error-codes\n http/server-error-codes)))\n"},"license":{"kind":"null"},"download_url":{"kind":"string","value":"https://raw.githubusercontent.com/funcool/httpurr/22fb1b921864155a6b4eff113e2456ee924dd681/test/httpurr/test/generators.cljc"},"language":{"kind":"string","value":"clojure"},"comments":{"kind":"string","value":""},"code":{"kind":"string","value":"(ns httpurr.test.generators\n (:require\n [clojure.test.check.generators :as gen]\n [httpurr.status :as http]))\n\n(defn gen-statuses\n [coll]\n (gen/such-that\n #(not (empty? %)) (gen/map (gen/return :status)\n (gen/elements coll))))\n\n(def informational-response\n (gen-statuses http/informational-codes))\n\n(def success-response\n (gen-statuses http/success-codes))\n\n(def redirection-response\n (gen-statuses http/redirection-codes))\n\n(def client-error-response\n (gen-statuses http/client-error-codes))\n\n(def server-error-response\n (gen-statuses http/server-error-codes))\n\n(def error-response\n (gen-statuses (concat http/client-error-codes\n http/server-error-codes)))\n"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":6102,"numItemsPerPage":100,"numTotalItems":611738,"offset":610200,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzE3MTAwOCwic3ViIjoiL2RhdGFzZXRzL2RodWNrL2Z1bmN0aW9uYWxfY29kZSIsImV4cCI6MTc1NzE3NDYwOCwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.q3XHkecNVb8Pttvv-SXpf_UN77VhJvajQ_Q-ol-YvXjr0e8A5cnruVKvB_1mNuUmHk47eNPcvpo8OLkpqFaeBg","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}"><div><header class="bg-linear-to-t border-b border-gray-100 pt-4 xl:pt-0 from-purple-500/8 dark:from-purple-500/20 to-white to-70% dark:to-gray-950"><div class="mx-4 relative flex flex-col xl:flex-row"><h1 class="flex flex-wrap items-center max-md:leading-tight gap-y-1 text-lg xl:flex-none"><a href="/datasets" class="group flex items-center"><svg class="sm:mr-1 -mr-1 text-gray-400" style="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 25 25"><ellipse cx="12.5" cy="5" fill="currentColor" fill-opacity="0.25" rx="7.5" ry="2"></ellipse><path d="M12.5 15C16.6421 15 20 14.1046 20 13V20C20 21.1046 16.6421 22 12.5 22C8.35786 22 5 21.1046 5 20V13C5 14.1046 8.35786 15 12.5 15Z" fill="currentColor" opacity="0.5"></path><path d="M12.5 7C16.6421 7 20 6.10457 20 5V11.5C20 12.6046 16.6421 13.5 12.5 13.5C8.35786 13.5 5 12.6046 5 11.5V5C5 6.10457 8.35786 7 12.5 7Z" fill="currentColor" opacity="0.5"></path><path d="M5.23628 12C5.08204 12.1598 5 12.8273 5 13C5 14.1046 8.35786 15 12.5 15C16.6421 15 20 14.1046 20 13C20 12.8273 19.918 12.1598 19.7637 12C18.9311 12.8626 15.9947 13.5 12.5 13.5C9.0053 13.5 6.06886 12.8626 5.23628 12Z" fill="currentColor"></path></svg> <span class="mr-2.5 font-semibold text-gray-400 group-hover:text-gray-500 max-sm:hidden">Datasets:</span></a> <hr class="mx-1.5 h-2 translate-y-px rounded-sm border-r dark:border-gray-600 sm:hidden"> <div class="group flex flex-none items-center"><div class="relative mr-1 flex items-center"> <span class="inline-block "><span class="contents"><a href="/dhuck" class="text-gray-400 hover:text-blue-600"><img alt="" class="size-3.5 rounded-full flex-none" src="/avatars/995ae0338dc27d58844fd783ab50986f.svg" crossorigin="anonymous"></a></span> </span></div> <span class="inline-block "><span class="contents"><a href="/dhuck" class="text-gray-400 hover:text-blue-600">dhuck</a></span> </span> <div class="mx-0.5 text-gray-300">/</div></div> <div class="max-w-full xl:flex xl:min-w-0 xl:flex-nowrap xl:items-center xl:gap-x-1"><a class="break-words font-mono font-semibold hover:text-blue-600 text-[1.07rem] xl:truncate" href="/datasets/dhuck/functional_code">functional_code</a> <button class="text-xs mr-3 focus:outline-hidden inline-flex cursor-pointer items-center text-sm mx-0.5 text-gray-600 " title="Copy dataset name to clipboard" type="button"><svg class="" xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M28,10V28H10V10H28m0-2H10a2,2,0,0,0-2,2V28a2,2,0,0,0,2,2H28a2,2,0,0,0,2-2V10a2,2,0,0,0-2-2Z" transform="translate(0)"></path><path d="M4,18H2V4A2,2,0,0,1,4,2H18V4H4Z" transform="translate(0)"></path><rect fill="none" width="32" height="32"></rect></svg> </button></div> <div class="inline-flex items-center overflow-hidden whitespace-nowrap rounded-md border bg-white text-sm leading-none text-gray-500 mr-2"><button class="relative flex items-center overflow-hidden from-red-50 to-transparent dark:from-red-900 px-1.5 py-1 hover:bg-linear-to-t focus:outline-hidden" title="Like"><svg class="left-1.5 absolute" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32" fill="currentColor"><path d="M22.45,6a5.47,5.47,0,0,1,3.91,1.64,5.7,5.7,0,0,1,0,8L16,26.13,5.64,15.64a5.7,5.7,0,0,1,0-8,5.48,5.48,0,0,1,7.82,0L16,10.24l2.53-2.58A5.44,5.44,0,0,1,22.45,6m0-2a7.47,7.47,0,0,0-5.34,2.24L16,7.36,14.89,6.24a7.49,7.49,0,0,0-10.68,0,7.72,7.72,0,0,0,0,10.82L16,29,27.79,17.06a7.72,7.72,0,0,0,0-10.82A7.49,7.49,0,0,0,22.45,4Z"></path></svg> <span class="ml-4 pl-0.5 ">like</span></button> <button class="focus:outline-hidden flex items-center border-l px-1.5 py-1 text-gray-400 hover:bg-gray-50 focus:bg-gray-100 dark:hover:bg-gray-900 dark:focus:bg-gray-800" title="See users who liked this repository">2</button></div> </h1> <div class="flex flex-col-reverse gap-x-2 sm:flex-row sm:items-center sm:justify-between xl:ml-auto"><div class="-mb-px flex h-12 items-center overflow-x-auto overflow-y-hidden "> <a class="tab-alternate" href="/datasets/dhuck/functional_code"><svg class="mr-1.5 text-gray-400 flex-none" style="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 24 24"><path class="uim-quaternary" d="M20.23 7.24L12 12L3.77 7.24a1.98 1.98 0 0 1 .7-.71L11 2.76c.62-.35 1.38-.35 2 0l6.53 3.77c.29.173.531.418.7.71z" opacity=".25" fill="currentColor"></path><path class="uim-tertiary" d="M12 12v9.5a2.09 2.09 0 0 1-.91-.21L4.5 17.48a2.003 2.003 0 0 1-1-1.73v-7.5a2.06 2.06 0 0 1 .27-1.01L12 12z" opacity=".5" fill="currentColor"></path><path class="uim-primary" d="M20.5 8.25v7.5a2.003 2.003 0 0 1-1 1.73l-6.62 3.82c-.275.13-.576.198-.88.2V12l8.23-4.76c.175.308.268.656.27 1.01z" fill="currentColor"></path></svg> Dataset card </a><a class="tab-alternate active" href="/datasets/dhuck/functional_code/viewer/"><svg class="mr-1.5 text-gray-400 flex-none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 12 12"><path fill="currentColor" d="M2.5 2h7a1 1 0 0 1 1 1v6a1 1 0 0 1-1 1h-7a1 1 0 0 1-1-1V3a1 1 0 0 1 1-1Zm0 2v2h3V4h-3Zm4 0v2h3V4h-3Zm-4 3v2h3V7h-3Zm4 0v2h3V7h-3Z"></path></svg> Data Studio </a><a class="tab-alternate" href="/datasets/dhuck/functional_code/tree/main"><svg class="mr-1.5 text-gray-400 flex-none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 24 24"><path class="uim-tertiary" d="M21 19h-8a1 1 0 0 1 0-2h8a1 1 0 0 1 0 2zm0-4h-8a1 1 0 0 1 0-2h8a1 1 0 0 1 0 2zm0-8h-8a1 1 0 0 1 0-2h8a1 1 0 0 1 0 2zm0 4h-8a1 1 0 0 1 0-2h8a1 1 0 0 1 0 2z" opacity=".5" fill="currentColor"></path><path class="uim-primary" d="M9 19a1 1 0 0 1-1-1V6a1 1 0 0 1 2 0v12a1 1 0 0 1-1 1zm-6-4.333a1 1 0 0 1-.64-1.769L3.438 12l-1.078-.898a1 1 0 0 1 1.28-1.538l2 1.667a1 1 0 0 1 0 1.538l-2 1.667a.999.999 0 0 1-.64.231z" fill="currentColor"></path></svg> <span class="xl:hidden">Files</span> <span class="hidden xl:inline">Files and versions</span> <span class="inline-block "><span class="contents"><div slot="anchor" class="shadow-purple-500/10 ml-2 inline-flex -translate-y-px items-center gap-0.5 rounded-md border bg-white px-1 py-0.5 align-middle text-xs font-semibold leading-none text-gray-800 shadow-sm dark:border-gray-700 dark:bg-gradient-to-b dark:from-gray-925 dark:to-gray-925 dark:text-gray-300"><svg class="size-3 " xmlns="http://www.w3.org/2000/svg" aria-hidden="true" fill="currentColor" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 12 12"><path fill-rule="evenodd" clip-rule="evenodd" d="M6.14 3.64 5.1 4.92 2.98 2.28h2.06l1.1 1.36Zm0 4.72-1.1 1.36H2.98l2.13-2.64 1.03 1.28Zm4.9 1.36L8.03 6l3-3.72H8.96L5.97 6l3 3.72h2.06Z" fill="#7875FF"></path><path d="M4.24 6 2.6 8.03.97 6 2.6 3.97 4.24 6Z" fill="#FF7F41" opacity="1"></path></svg> <span>xet</span> </div></span> </span> </a><a class="tab-alternate" href="/datasets/dhuck/functional_code/discussions"><svg class="mr-1.5 text-gray-400 flex-none" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M20.6081 3C21.7684 3 22.8053 3.49196 23.5284 4.38415C23.9756 4.93678 24.4428 5.82749 24.4808 7.16133C24.9674 7.01707 25.4353 6.93643 25.8725 6.93643C26.9833 6.93643 27.9865 7.37587 28.696 8.17411C29.6075 9.19872 30.0124 10.4579 29.8361 11.7177C29.7523 12.3177 29.5581 12.8555 29.2678 13.3534C29.8798 13.8646 30.3306 14.5763 30.5485 15.4322C30.719 16.1032 30.8939 17.5006 29.9808 18.9403C30.0389 19.0342 30.0934 19.1319 30.1442 19.2318C30.6932 20.3074 30.7283 21.5229 30.2439 22.6548C29.5093 24.3704 27.6841 25.7219 24.1397 27.1727C21.9347 28.0753 19.9174 28.6523 19.8994 28.6575C16.9842 29.4379 14.3477 29.8345 12.0653 29.8345C7.87017 29.8345 4.8668 28.508 3.13831 25.8921C0.356375 21.6797 0.754104 17.8269 4.35369 14.1131C6.34591 12.058 7.67023 9.02782 7.94613 8.36275C8.50224 6.39343 9.97271 4.20438 12.4172 4.20438H12.4179C12.6236 4.20438 12.8314 4.2214 13.0364 4.25468C14.107 4.42854 15.0428 5.06476 15.7115 6.02205C16.4331 5.09583 17.134 4.359 17.7682 3.94323C18.7242 3.31737 19.6794 3 20.6081 3ZM20.6081 5.95917C20.2427 5.95917 19.7963 6.1197 19.3039 6.44225C17.7754 7.44319 14.8258 12.6772 13.7458 14.7131C13.3839 15.3952 12.7655 15.6837 12.2086 15.6837C11.1036 15.6837 10.2408 14.5497 12.1076 13.1085C14.9146 10.9402 13.9299 7.39584 12.5898 7.1776C12.5311 7.16799 12.4731 7.16355 12.4172 7.16355C11.1989 7.16355 10.6615 9.33114 10.6615 9.33114C10.6615 9.33114 9.0863 13.4148 6.38031 16.206C3.67434 18.998 3.5346 21.2388 5.50675 24.2246C6.85185 26.2606 9.42666 26.8753 12.0653 26.8753C14.8021 26.8753 17.6077 26.2139 19.1799 25.793C19.2574 25.7723 28.8193 22.984 27.6081 20.6107C27.4046 20.212 27.0693 20.0522 26.6471 20.0522C24.9416 20.0522 21.8393 22.6726 20.5057 22.6726C20.2076 22.6726 19.9976 22.5416 19.9116 22.222C19.3433 20.1173 28.552 19.2325 27.7758 16.1839C27.639 15.6445 27.2677 15.4256 26.746 15.4263C24.4923 15.4263 19.4358 19.5181 18.3759 19.5181C18.2949 19.5181 18.2368 19.4937 18.2053 19.4419C17.6743 18.557 17.9653 17.9394 21.7082 15.6009C25.4511 13.2617 28.0783 11.8545 26.5841 10.1752C26.4121 9.98141 26.1684 9.8956 25.8725 9.8956C23.6001 9.89634 18.2311 14.9403 18.2311 14.9403C18.2311 14.9403 16.7821 16.496 15.9057 16.496C15.7043 16.496 15.533 16.4139 15.4169 16.2112C14.7956 15.1296 21.1879 10.1286 21.5484 8.06535C21.7928 6.66715 21.3771 5.95917 20.6081 5.95917Z" fill="#FF9D00"></path><path d="M5.50686 24.2246C3.53472 21.2387 3.67446 18.9979 6.38043 16.206C9.08641 13.4147 10.6615 9.33111 10.6615 9.33111C10.6615 9.33111 11.2499 6.95933 12.59 7.17757C13.93 7.39581 14.9139 10.9401 12.1069 13.1084C9.29997 15.276 12.6659 16.7489 13.7459 14.713C14.8258 12.6772 17.7747 7.44316 19.304 6.44221C20.8326 5.44128 21.9089 6.00204 21.5484 8.06532C21.188 10.1286 14.795 15.1295 15.4171 16.2118C16.0391 17.2934 18.2312 14.9402 18.2312 14.9402C18.2312 14.9402 25.0907 8.49588 26.5842 10.1752C28.0776 11.8545 25.4512 13.2616 21.7082 15.6008C17.9646 17.9393 17.6744 18.557 18.2054 19.4418C18.7372 20.3266 26.9998 13.1351 27.7759 16.1838C28.5513 19.2324 19.3434 20.1173 19.9117 22.2219C20.48 24.3274 26.3979 18.2382 27.6082 20.6107C28.8193 22.9839 19.2574 25.7722 19.18 25.7929C16.0914 26.62 8.24723 28.3726 5.50686 24.2246Z" fill="#FFD21E"></path></svg> Community </a></div> </div></div></header> </div> <div class="flex flex-col w-full"> <div class="flex h-full flex-1"> <div class="flex flex-1 flex-col overflow-hidden " style="height: calc(100vh - 48px)"><div class="flex flex-col overflow-hidden h-full "> <div class="flex flex-1 flex-col overflow-hidden "><div class="flex flex-1 flex-col overflow-hidden"><div class="flex min-h-0 flex-1"><div class="flex flex-1 flex-col overflow-hidden"><div class="md:shadow-xs dark:border-gray-800 md:my-4 md:ml-4 md:rounded-lg md:border flex min-w-0 flex-wrap "><div class="flex min-w-0 flex-1 flex-wrap"><div class="grid flex-1 grid-cols-1 overflow-hidden text-sm md:grid-cols-2 md:place-content-center md:rounded-lg"><label class="relative block flex-1 px-3 py-2 hover:bg-gray-50 dark:border-gray-850 dark:hover:bg-gray-950 md:border-r md:border-r-0 hidden" title="default"><span class="text-gray-500">Subset (1)</span> <div class="flex items-center whitespace-nowrap"><span class="truncate">default</span> <span class="mx-2 text-gray-500">·</span> <span class="text-gray-500">765k rows</span> <svg class="ml-auto min-w-6 pl-2" width="1em" height="1em" viewBox="0 0 12 7" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1 1L6 6L11 1" stroke="currentColor"></path></svg></div> <select class="absolute inset-0 z-10 w-full cursor-pointer border-0 bg-white text-base opacity-0"><optgroup label="Subset (1)"><option value="default" selected>default (765k rows)</option></optgroup></select></label> <label class="relative block flex-1 px-3 py-2 hover:bg-gray-50 dark:border-gray-850 dark:hover:bg-gray-900 md:border-r md:border-r" title="train"><div class="text-gray-500">Split (2)</div> <div class="flex items-center overflow-hidden whitespace-nowrap"><span class="truncate">train</span> <span class="mx-2 text-gray-500">·</span> <span class="text-gray-500">612k rows</span> <svg class="ml-auto min-w-6 pl-2" width="1em" height="1em" viewBox="0 0 12 7" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M1 1L6 6L11 1" stroke="currentColor"></path></svg></div> <select class="absolute inset-0 z-10 w-full cursor-pointer border-0 bg-white text-base opacity-0"><optgroup label="Split (2)"><option value="train" selected>train (612k rows)</option><option value="test" >test (153k rows)</option></optgroup></select></label></div></div> <div class="hidden flex-none flex-col items-center gap-0.5 border-l px-1 md:flex justify-end"> <span class="inline-block "><span class="contents"><div slot="anchor"><button class="group text-gray-500 hover:text-gray-700" aria-label="Hide sidepanel"><div class="rounded-xs flex size-4 items-center justify-center border border-gray-400 bg-gray-100 hover:border-gray-600 hover:bg-blue-50 dark:border-gray-600 dark:bg-gray-800 dark:hover:bg-gray-700 dark:group-hover:border-gray-400"><div class="float-left h-full w-[65%]"></div> <div class="float-right h-full w-[35%] bg-gray-400 group-hover:bg-gray-600 dark:bg-gray-600 dark:group-hover:bg-gray-400"></div></div></button></div></span> </span> <div class="relative "> <button class="btn px-0.5 py-0.5 " type="button"> <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="p-0.5" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><circle cx="16" cy="7" r="3" fill="currentColor"></circle><circle cx="16" cy="16" r="3" fill="currentColor"></circle><circle cx="16" cy="25" r="3" fill="currentColor"></circle></svg> </button> </div></div></div> <div class="flex min-h-0 flex-1 flex-col border dark:border-gray-800 md:mb-4 md:ml-4 md:rounded-lg"> <div class="bg-linear-to-r text-smd relative flex items-center dark:border-gray-900 dark:bg-gray-950 false rounded-t-lg [&:has(:focus)]:from-gray-50 [&:has(:focus)]:to-transparent [&:has(:focus)]:to-20% dark:[&:has(:focus)]:from-gray-900"><form class="flex-1"><svg class="absolute left-3 top-1/2 transform -translate-y-1/2 pointer-events-none text-gray-400" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M30 28.59L22.45 21A11 11 0 1 0 21 22.45L28.59 30zM5 14a9 9 0 1 1 9 9a9 9 0 0 1-9-9z" fill="currentColor"></path></svg> <input disabled class="outline-hidden h-9 w-full border-none bg-transparent px-1 pl-9 pr-3 placeholder:text-gray-400 " placeholder="Search this dataset" dir="auto"></form> <div class="flex items-center gap-2 px-2 py-1"><button type="button" class="hover:bg-yellow-200/70 flex items-center gap-1 rounded-md border border-yellow-200 bg-yellow-100 pl-0.5 pr-1 text-[.8rem] leading-normal text-gray-700 dark:border-orange-500/25 dark:bg-orange-500/20 dark:text-gray-300 dark:hover:brightness-110 md:hidden"><div class="rounded-sm bg-yellow-300 px-1 font-mono text-[.7rem] font-bold text-black dark:bg-yellow-700 dark:text-gray-200">SQL </div> Console </button></div></div> <div class="flex flex-1 flex-col overflow-hidden min-h-64 flex w-full flex-col border-t md:rounded-b-lg md:shadow-lg"> <div class="flex-1 relative overflow-auto"><table class="w-full table-auto rounded-lg font-mono text-xs text-gray-900"><thead class="shadow-xs sticky left-0 right-0 top-0 z-1 bg-white align-top"><tr class="space-y-54 h-full min-w-fit divide-x border-b text-left"><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">_id <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="130" height="30" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="132" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">64</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">64</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">repository <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="16.902217764691844" width="11.2" height="13.097782235308156" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="14.366671289025337" width="11.2" height="15.633328710974663" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="22.33729243728373" width="11.2" height="7.662707562716269" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">6</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">84</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">name <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="16.110508261405087" width="11.2" height="13.889491738594911" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="24.54409195467516" width="11.2" height="5.455908045324842" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">4</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">110</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">content <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="26" width="11.2" height="4" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">0</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">248k</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">license <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>null</span></div></div> <div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">download_url <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="4.312757149468425" width="11.2" height="25.687242850531575" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="23.48149157785963" width="11.2" height="6.518508422140371" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="26" width="11.2" height="4" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="26" width="11.2" height="4" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">89</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">454</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">language <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">classes</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><defs><clipPath id="rounded-bar"><rect x="0" y="0" width="130" height="8" rx="4"></rect></clipPath><pattern id="hatching" patternUnits="userSpaceOnUse" patternTransform="rotate(-45)" height="1" width="5"><line y1="0" class="stroke-gray-400 dark:stroke-gray-500/80" stroke-width="3" y2="1" x1="2" x2="2"></line></pattern><pattern id="hatching-faded" patternUnits="userSpaceOnUse" patternTransform="rotate(-45)" height="1" width="5"><line y1="0" class="stroke-gray-100 dark:stroke-gray-500/20" stroke-width="3" y2="1" x1="2" x2="2"></line></pattern></defs><g height="8" style="transform: translateY(20px)" clip-path="url(#rounded-bar)"><g style="transform: scaleX(1.0153846153846153) translateX(-1px)"><g><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="1" y="0" width="39.36068055278567" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="42.36068055278567" y="0" width="25.61770561907222" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="69.97838617185789" y="0" width="21.620193612298078" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="93.59857978415597" y="0" width="10.067126776495819" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="105.66570656065178" y="0" width="9.247903514249565" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="116.91361007490136" y="0" width="5.388947555979848" height="8" fill-opacity="1"></rect><rect class="fill-indigo-500 dark:fill-indigo-600/80" x="124.30255763088121" y="0" width="4.697442369118806" height="8" fill-opacity="1"></rect></g></g></g><g style="transform: scaleX(1.0153846153846153) translateX(-1px)"><g><rect class="fill-white cursor-pointer" x="0" y="0" width="41.36068055278567" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="41.36068055278567" y="0" width="27.61770561907222" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="68.97838617185789" y="0" width="23.620193612298078" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="92.59857978415597" y="0" width="12.067126776495819" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="104.66570656065178" y="0" width="11.247903514249565" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="115.91361007490136" y="0" width="7.388947555979848" height="28" fill-opacity="0"></rect><rect class="fill-white cursor-pointer" x="123.30255763088121" y="0" width="6.697442369118806" height="28" fill-opacity="0"></rect></g></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 max-w-full overflow-hidden text-ellipsis whitespace-nowrap">7 values</div></div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">comments <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">0</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">74.6k</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th><th class="h-full max-w-sm p-2 text-left relative w-auto"><div class="flex h-full flex-col flex-nowrap justify-between"><div><div class="flex items-center justify-between">code <form class="flex flex-col"><button id="asc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="-rotate-180 transform text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button> <button id="desc" class="-mr-1 ml-2 h-[0.4rem] w-[0.8rem] transition ease-in-out"><svg class="text-gray-300 hover:text-gray-500" xmlns="http://www.w3.org/2000/svg" viewBox="0 64 256 128" fill="currentColor" aria-hidden="true"><path d="M213.65674,101.657l-80,79.99976a7.99945,7.99945,0,0,1-11.31348,0l-80-79.99976A8,8,0,0,1,48,88H208a8,8,0,0,1,5.65674,13.657Z"></path></svg></button></form></div> <div class="mb-2 whitespace-nowrap text-xs font-normal text-gray-500"><span>string</span><span class="italic text-gray-400 before:mx-1 before:content-['·']">lengths</span></div></div> <div><div class="" style="height: 40px; padding-top: 2px"><svg width="130" height="28"><g><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="0" y="0" width="11.2" height="30" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="13.2" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="26.4" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="39.599999999999994" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="52.8" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="66" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="79.19999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="92.39999999999999" y="25" width="11.2" height="5" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="105.6" y="26" width="11.2" height="4" fill-opacity="1"></rect><rect class="fill-gray-400 dark:fill-gray-500/80" rx="2" x="118.8" y="25" width="11.2" height="5" fill-opacity="1"></rect></g><rect class="fill-white dark:fill-gray-900" x="0" y="26" width="130" height="2" stroke-opacity="1"></rect><line class="stroke-gray-100 dark:stroke-gray-500/20" x1="0" y1="27.5" x2="130" y2="27.5" stroke-opacity="1"></line><g><rect class="fill-indigo-500 cursor-pointer" x="-1" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="12.2" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="25.4" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="38.599999999999994" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="51.8" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="65" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="78.19999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="91.39999999999999" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="104.6" y="0" width="13.2" height="30" fill-opacity="0"></rect><rect class="fill-indigo-500 cursor-pointer" x="117.8" y="0" width="13.2" height="30" fill-opacity="0"></rect></g></svg> <div class="relative font-light text-gray-400" style="height: 10px; width: 130px;"><div class="absolute left-0 overflow-hidden text-ellipsis whitespace-nowrap" style="max-width: 60px">0</div> <div class="absolute overflow-hidden text-ellipsis whitespace-nowrap" style="right: 0px; max-width: 60px">248k</div> </div></div></div></div> <div class="absolute right-0 top-0 z-10 h-full w-1 cursor-col-resize hover:bg-indigo-100 active:bg-indigo-500 dark:hover:bg-indigo-800 dark:active:bg-indigo-600/80"><div class="absolute right-0 top-0 h-full w-1"></div> </div> </th></tr></thead> <tbody class="h-16 overflow-scroll"><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610200"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">f8fbe4cde44f829b16ed92a2847312809cc43baf3c73a042095c9663dc4340be</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">cunger/pythia</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">spotlight.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns core.external.ner.spotlight (:require [settings] [core.external.http :as http] [clojure.data.json :as json])) (declare type-list) (defn http-request [input] (settings/language (settings/domain { :dbpedia { :de (str "=" input "&spotter=Default") :en (str "=" input "&spotter=Default&confidence=0.5&support=20") :es (str ":2231/rest/annotate?text=" input "&spotter=Default") }}))) ;; Main ;; must implement: get-entities, filter-entities (defn get-entities [input] (let [request (http-request (http/urlize input)) response (http/get-response :get request {:headers {"accept" "application/json"}} identity) status (:status response)] (if (= status 200) (let [body (json/read-str (:body response))] (if (contains? body "Resources") (for [resource (get body "Resources")] { :uri (get resource "@URI" ) :form (get resource "@surfaceForm") :offset (get resource "@offset") :types (type-list (get resource "@types"))}))) []))) ;; Aux (defn type-list [string] (map #(clojure.string/replace % "DBpedia:" "/") (filter #(.startsWith % "DBpedia:") (clojure.string/split string #"\,")))) (defn filter-entities [entities] (remove #(or (empty? (:types %)) (some #{""} (:types %))) entities)) (defn most-general-type [entity] (clojure.string/replace (last (:types entity)) "/" ""))</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/cunger/pythia/f58e35395968d4c46aef495fd363c26b1102003c/src/core/external/ner/spotlight.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Main must implement: get-entities, filter-entities Aux</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns core.external.ner.spotlight (:require [settings] [core.external.http :as http] [clojure.data.json :as json])) (declare type-list) (defn http-request [input] (settings/language (settings/domain { :dbpedia { :de (str "=" input "&spotter=Default") :en (str "=" input "&spotter=Default&confidence=0.5&support=20") :es (str ":2231/rest/annotate?text=" input "&spotter=Default") }}))) (defn get-entities [input] (let [request (http-request (http/urlize input)) response (http/get-response :get request {:headers {"accept" "application/json"}} identity) status (:status response)] (if (= status 200) (let [body (json/read-str (:body response))] (if (contains? body "Resources") (for [resource (get body "Resources")] { :uri (get resource "@URI" ) :form (get resource "@surfaceForm") :offset (get resource "@offset") :types (type-list (get resource "@types"))}))) []))) (defn type-list [string] (map #(clojure.string/replace % "DBpedia:" "/") (filter #(.startsWith % "DBpedia:") (clojure.string/split string #"\,")))) (defn filter-entities [entities] (remove #(or (empty? (:types %)) (some #{""} (:types %))) entities)) (defn most-general-type [entity] (clojure.string/replace (last (:types entity)) "/" ""))</span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610201"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">8d48afaf562f9e5a378743add857efba78a94da33189c9cf5414ef2f1c7aa891</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ds-wizard/engine-backend</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">DocumentTemplateFormatSM.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatSM where import Data.Swagger import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatDTO import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatJM () import Shared.Database.Migration.Development.DocumentTemplate.Data.DocumentTemplateFormats import Shared.Service.DocumentTemplate.DocumentTemplateMapper import Shared.Util.Swagger instance ToSchema DocumentTemplateFormatDTO where declareNamedSchema = toSwagger (toFormatDTO formatJson) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-shared/src/Shared/Api/Resource/DocumentTemplate/DocumentTemplateFormatSM.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatSM where import Data.Swagger import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatDTO import Shared.Api.Resource.DocumentTemplate.DocumentTemplateFormatJM () import Shared.Database.Migration.Development.DocumentTemplate.Data.DocumentTemplateFormats import Shared.Service.DocumentTemplate.DocumentTemplateMapper import Shared.Util.Swagger instance ToSchema DocumentTemplateFormatDTO where declareNamedSchema = toSwagger (toFormatDTO formatJson) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610202"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">240f2e36657f1341c292d0b81fd24559402c623c1ad95501d7c8fcac07e629fd</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kepler16/gx.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">impl.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns k16.gx.beta.impl (:refer-clojure :exclude [ref]) #?(:cljs (:require-macros [k16.gx.beta.error-context :refer [with-err-ctx]])) (:require [clojure.walk :as walk] [k16.gx.beta.errors :as gx.err] [k16.gx.beta.schema :as gx.schema] #?(:cljs [clojure.string :as string]) #?(:cljs [k16.gx.beta.registry :as gx.reg]) #?(:clj [k16.gx.beta.error-context :refer [with-err-ctx]]))) (defn sccs "Returns a topologically sorted list of strongly connected components. Tarjan's algorithm." ([g] (sccs g [])) ([g sccs-init] (let [strong-connect (fn strong-connect [acc v] (let [acc (-> acc (assoc-in [:idxs v] (:idx acc)) (assoc-in [:low-links v] (:idx acc)) (update :idx inc) (update :S conj v) (assoc-in [:on-stack v] true)) acc (reduce (fn [acc w] (cond (not (get-in acc [:idxs w])) (let [acc (strong-connect acc w)] (update-in acc [:low-links v] min (get-in acc [:low-links w]))) (get-in acc [:on-stack w]) (update-in acc [:low-links v] min (get-in acc [:idxs w])) :else acc)) acc (get g v))] (if (= (get-in acc [:idxs v]) (get-in acc [:low-links v])) (let [[S on-stack scc] (loop [S (:S acc) on-stack (:on-stack acc) scc #{}] (let [w (peek S) S (pop S) on-stack (dissoc on-stack w) scc (conj scc w)] (if (= v w) [S on-stack scc] (recur S on-stack scc))))] (-> acc (assoc :S S :on-stack on-stack) (update :sccs conj scc))) acc)))] (:sccs (reduce (fn [acc v] (if-not (contains? (:idxs acc) v) (strong-connect acc v) acc)) {:S () :idx 0 :sccs sccs-init} (keys g)))))) (defn cycles [sccs g] (filter #(or (>= (count %) 2) (get-in g [(first %) (first %)])) sccs)) (defn dependency-errors [g sccs] (concat (mapcat (fn [[k v]] (seq (map (fn [does-not-exist] {:type :missing :from k :to does-not-exist}) (remove #(contains? g %) v)))) g) (map (fn [cycle] {:type :cycle :between cycle}) (cycles sccs g)))) (defn human-render-dependency-error [dependency-error] (case (:type dependency-error) :missing (str (:from dependency-error) " depends on " (:to dependency-error) ", but " (:to dependency-error) " doesn't exist") :cycle (str "circular " (apply str (interpose " -> " (concat (reverse (:between dependency-error)) [(first (reverse (:between dependency-error)))])))) (pr-str dependency-error))) #?(:cljs (defn resolve-exported-symbol [sym-str] (let [path (-> sym-str (string/replace #"-" "_") (string/replace #"/" ".") (string/split #"\."))] (loop [p path obj goog.global] (if (and (seq p) obj) (recur (rest p) (aget obj (first p))) obj))))) #?(:cljs (defn sym->js-resolve [sym] (let [ssym (str sym)] (or (get @gx.reg/registry* ssym) (resolve-exported-symbol ssym))))) (defn namespace-symbol "Returns symbol unchanged if it has a namespace, or with clojure.core as it's namespace otherwise." [sym] (cond (namespace sym) #?(:clj sym :cljs (sym->js-resolve sym)) :else #?(:clj (symbol "clojure.core" (name sym)) :cljs ((ns-publics 'cljs.core) sym)))) (def mergable? (every-pred map? (complement record?))) (defn merger [left right] (if (mergable? left right) (merge-with merger left right) (or right left))) (defn deep-merge "Recursively merges maps." [& maps] (reduce merger maps)) (def locals #{'gx/ref 'gx/ref-keys}) (defn local-form? [form] (and (seq? form) (locals (first form)))) (defn parse-local [env form] (condp = (first form) 'gx/ref (get env (second form)) 'gx/ref-keys (select-keys env (second form)))) (defn postwalk-evaluate "A postwalk runtime signal processor evaluator, works most of the time. Doesn't support special symbols and macros, basically just function application. For cljs, consider compiled components or sci-evaluator, would require allowing for swappable evaluation stategies. Point to docs, to inform how to swap evaluator, or alternative ways to specify functions (that get compiled) that can be used." [props form initial-form] (walk/postwalk (fn [x] (cond (local-form? x) (parse-local props x) (and (seq? x) (ifn? (first x))) (try (apply (first x) (rest x)) (catch #?(:clj Throwable :cljs :default) e (gx.err/throw-gx-err (str "Form evaluate error:\n\t>> " initial-form) {:props props} e))) :else x)) form)) (defn resolve-symbol [sym] (if (symbol? sym) (if-let [nss #?(:cljs (namespace-symbol sym) :clj (try (some->> sym (namespace-symbol) (requiring-resolve) (var-get)) (catch Throwable e (gx.err/add-err-cause {:title :symbol-cannot-be-resolved :data sym :exception e}))))] nss (gx.err/add-err-cause {:title :symbol-cannot-be-resolved :data sym})) sym)) (defn form->runnable [form-def] (let [props* (atom #{}) resolved-form (->> form-def (walk/postwalk (fn [sub-form] (cond (locals sub-form) sub-form (local-form? sub-form) (do (swap! props* concat (-> sub-form rest flatten)) sub-form) (special-symbol? sub-form) (gx.err/throw-gx-err "Special forms are not supported" {:form-def form-def :token sub-form}) (resolve-symbol sub-form) (resolve-symbol sub-form) (symbol? sub-form) (gx.err/throw-gx-err "Unable to resolve symbol" {:form-def form-def :token sub-form}) :else sub-form))))] {:env @props* :initial-form form-def :form resolved-form})) (defn push-down-props [{{:keys [props-signals]} :normalize} {:gx/keys [props] :as node-def}] (if (and props (seq props-signals)) (reduce-kv (fn [m k v] (if (and (contains? props-signals k) (not (:gx/props v))) (assoc-in m [k :gx/props] props) m)) node-def node-def) node-def)) (defn remap-signals [from-signals to-signals] (cond (and (seq from-signals) (seq to-signals)) (if from-signals (->> to-signals (map (fn [[k v]] [k (v from-signals)])) (into {})) to-signals) (seq from-signals) from-signals :else to-signals)) (defn flatten-component "Flattens nested components by creating one root component using signal mappings from context (if any)" [context root-component] (let [root-component (assoc root-component :gx/signal-mapping (or (:gx/signal-mapping root-component) (:signal-mapping context)))] (loop [{:gx/keys [component signal-mapping] :as current} root-component] (if-let [nested component] (recur (update nested :gx/signal-mapping #(remap-signals % signal-mapping))) (if-let [mapping (seq (:gx/signal-mapping current))] (->> mapping (map (fn [[k v]] [k (get current v)])) (into root-component)) (dissoc current :gx/signal-mapping)))))) (defn resolve-component "Resolve component by it's symbol and validate against malli schema" [context component] (when component (with-err-ctx {:error-type :normalize-node-component} (let [resolved (some->> component (resolve-symbol) (flatten-component context)) [issues schema] (when resolved (gx.schema/validate-component context resolved))] (cond (not resolved) (gx.err/throw-gx-err "Component could not be resolved" {:component component}) issues (gx.err/throw-gx-err "Component schema error" {:component resolved :component-schema schema :schema-error (set issues)}) :else resolved))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/kepler16/gx.cljc/fdd8103ce5a1fcf7fc974b82493fab0b9b53002f/src/k16/gx/beta/impl.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns k16.gx.beta.impl (:refer-clojure :exclude [ref]) #?(:cljs (:require-macros [k16.gx.beta.error-context :refer [with-err-ctx]])) (:require [clojure.walk :as walk] [k16.gx.beta.errors :as gx.err] [k16.gx.beta.schema :as gx.schema] #?(:cljs [clojure.string :as string]) #?(:cljs [k16.gx.beta.registry :as gx.reg]) #?(:clj [k16.gx.beta.error-context :refer [with-err-ctx]]))) (defn sccs "Returns a topologically sorted list of strongly connected components. Tarjan's algorithm." ([g] (sccs g [])) ([g sccs-init] (let [strong-connect (fn strong-connect [acc v] (let [acc (-> acc (assoc-in [:idxs v] (:idx acc)) (assoc-in [:low-links v] (:idx acc)) (update :idx inc) (update :S conj v) (assoc-in [:on-stack v] true)) acc (reduce (fn [acc w] (cond (not (get-in acc [:idxs w])) (let [acc (strong-connect acc w)] (update-in acc [:low-links v] min (get-in acc [:low-links w]))) (get-in acc [:on-stack w]) (update-in acc [:low-links v] min (get-in acc [:idxs w])) :else acc)) acc (get g v))] (if (= (get-in acc [:idxs v]) (get-in acc [:low-links v])) (let [[S on-stack scc] (loop [S (:S acc) on-stack (:on-stack acc) scc #{}] (let [w (peek S) S (pop S) on-stack (dissoc on-stack w) scc (conj scc w)] (if (= v w) [S on-stack scc] (recur S on-stack scc))))] (-> acc (assoc :S S :on-stack on-stack) (update :sccs conj scc))) acc)))] (:sccs (reduce (fn [acc v] (if-not (contains? (:idxs acc) v) (strong-connect acc v) acc)) {:S () :idx 0 :sccs sccs-init} (keys g)))))) (defn cycles [sccs g] (filter #(or (>= (count %) 2) (get-in g [(first %) (first %)])) sccs)) (defn dependency-errors [g sccs] (concat (mapcat (fn [[k v]] (seq (map (fn [does-not-exist] {:type :missing :from k :to does-not-exist}) (remove #(contains? g %) v)))) g) (map (fn [cycle] {:type :cycle :between cycle}) (cycles sccs g)))) (defn human-render-dependency-error [dependency-error] (case (:type dependency-error) :missing (str (:from dependency-error) " depends on " (:to dependency-error) ", but " (:to dependency-error) " doesn't exist") :cycle (str "circular " (apply str (interpose " -> " (concat (reverse (:between dependency-error)) [(first (reverse (:between dependency-error)))])))) (pr-str dependency-error))) #?(:cljs (defn resolve-exported-symbol [sym-str] (let [path (-> sym-str (string/replace #"-" "_") (string/replace #"/" ".") (string/split #"\."))] (loop [p path obj goog.global] (if (and (seq p) obj) (recur (rest p) (aget obj (first p))) obj))))) #?(:cljs (defn sym->js-resolve [sym] (let [ssym (str sym)] (or (get @gx.reg/registry* ssym) (resolve-exported-symbol ssym))))) (defn namespace-symbol "Returns symbol unchanged if it has a namespace, or with clojure.core as it's namespace otherwise." [sym] (cond (namespace sym) #?(:clj sym :cljs (sym->js-resolve sym)) :else #?(:clj (symbol "clojure.core" (name sym)) :cljs ((ns-publics 'cljs.core) sym)))) (def mergable? (every-pred map? (complement record?))) (defn merger [left right] (if (mergable? left right) (merge-with merger left right) (or right left))) (defn deep-merge "Recursively merges maps." [& maps] (reduce merger maps)) (def locals #{'gx/ref 'gx/ref-keys}) (defn local-form? [form] (and (seq? form) (locals (first form)))) (defn parse-local [env form] (condp = (first form) 'gx/ref (get env (second form)) 'gx/ref-keys (select-keys env (second form)))) (defn postwalk-evaluate "A postwalk runtime signal processor evaluator, works most of the time. Doesn't support special symbols and macros, basically just function application. For cljs, consider compiled components or sci-evaluator, would require allowing for swappable evaluation stategies. Point to docs, to inform how to swap evaluator, or alternative ways to specify functions (that get compiled) that can be used." [props form initial-form] (walk/postwalk (fn [x] (cond (local-form? x) (parse-local props x) (and (seq? x) (ifn? (first x))) (try (apply (first x) (rest x)) (catch #?(:clj Throwable :cljs :default) e (gx.err/throw-gx-err (str "Form evaluate error:\n\t>> " initial-form) {:props props} e))) :else x)) form)) (defn resolve-symbol [sym] (if (symbol? sym) (if-let [nss #?(:cljs (namespace-symbol sym) :clj (try (some->> sym (namespace-symbol) (requiring-resolve) (var-get)) (catch Throwable e (gx.err/add-err-cause {:title :symbol-cannot-be-resolved :data sym :exception e}))))] nss (gx.err/add-err-cause {:title :symbol-cannot-be-resolved :data sym})) sym)) (defn form->runnable [form-def] (let [props* (atom #{}) resolved-form (->> form-def (walk/postwalk (fn [sub-form] (cond (locals sub-form) sub-form (local-form? sub-form) (do (swap! props* concat (-> sub-form rest flatten)) sub-form) (special-symbol? sub-form) (gx.err/throw-gx-err "Special forms are not supported" {:form-def form-def :token sub-form}) (resolve-symbol sub-form) (resolve-symbol sub-form) (symbol? sub-form) (gx.err/throw-gx-err "Unable to resolve symbol" {:form-def form-def :token sub-form}) :else sub-form))))] {:env @props* :initial-form form-def :form resolved-form})) (defn push-down-props [{{:keys [props-signals]} :normalize} {:gx/keys [props] :as node-def}] (if (and props (seq props-signals)) (reduce-kv (fn [m k v] (if (and (contains? props-signals k) (not (:gx/props v))) (assoc-in m [k :gx/props] props) m)) node-def node-def) node-def)) (defn remap-signals [from-signals to-signals] (cond (and (seq from-signals) (seq to-signals)) (if from-signals (->> to-signals (map (fn [[k v]] [k (v from-signals)])) (into {})) to-signals) (seq from-signals) from-signals :else to-signals)) (defn flatten-component "Flattens nested components by creating one root component using signal mappings from context (if any)" [context root-component] (let [root-component (assoc root-component :gx/signal-mapping (or (:gx/signal-mapping root-component) (:signal-mapping context)))] (loop [{:gx/keys [component signal-mapping] :as current} root-component] (if-let [nested component] (recur (update nested :gx/signal-mapping #(remap-signals % signal-mapping))) (if-let [mapping (seq (:gx/signal-mapping current))] (->> mapping (map (fn [[k v]] [k (get current v)])) (into root-component)) (dissoc current :gx/signal-mapping)))))) (defn resolve-component "Resolve component by it's symbol and validate against malli schema" [context component] (when component (with-err-ctx {:error-type :normalize-node-component} (let [resolved (some->> component (resolve-symbol) (flatten-component context)) [issues schema] (when resolved (gx.schema/validate-component context resolved))] (cond (not resolved) (gx.err/throw-gx-err "Component could not be resolved" {:component component}) issues (gx.err/throw-gx-err "Component schema error" {:component resolved :component-schema schema :schema-error (set issues)}) :else resolved))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610203"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">7236d404ea9376a8e5feceab066a4058598b6bbc6bca6d7bb370333ec46b2af1</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">riemann/riemann</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">logging.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns riemann.logging (:import (org.slf4j LoggerFactory) (ch.qos.logback.classic Level Logger) (ch.qos.logback.core ConsoleAppender FileAppender) (ch.qos.logback.core.util FileSize) (ch.qos.logback.core.encoder LayoutWrappingEncoder) (ch.qos.logback.core.rolling RollingFileAppender TimeBasedRollingPolicy FixedWindowRollingPolicy SizeBasedTriggeringPolicy) (ch.qos.logback.classic.encoder PatternLayoutEncoder) (net.logstash.logback JSONEventLayoutV0 JSONEventLayoutV1) (net.logstash.logback.encoder LogstashEncoder) (java.net URL) (ch.qos.logback.classic.joran JoranConfigurator)) (:require wall.hack)) (defn get-logger ([] (LoggerFactory/getLogger Logger/ROOT_LOGGER_NAME)) ([logger] (LoggerFactory/getLogger logger))) (defn- get-context [] (LoggerFactory/getILoggerFactory)) (defmulti encoder identity) (defmethod encoder :json [type] (LogstashEncoder.)) (defmethod encoder :json-event [type] (encoder :json-event-v0)) (defmethod encoder :json-event-v0 [type] (doto (LayoutWrappingEncoder.) (.setLayout (JSONEventLayoutV0.)))) (defmethod encoder :json-event-v1 [type] (doto (LayoutWrappingEncoder.) (.setLayout (JSONEventLayoutV1.)))) (defmethod encoder :riemann [type] (doto (PatternLayoutEncoder.) (.setPattern "%p [%d] %t - %c - %m%n%throwable"))) (defmethod encoder :default [type] (binding [*out* *err*] (println "invalid logging layout specified: " type)) (encoder :riemann)) (defn set-level "Set the level for the given logger, by string name. Example: (set-level Level/INFO) or (set-level \"riemann.client\", Level/DEBUG)" ([level] (. (get-logger) (setLevel level))) ([logger level] (. (get-logger logger) (setLevel level)))) (defmacro suppress "Turns off logging for the evaluation of body." [loggers & body] (let [[logger & more] (flatten [loggers])] (if logger `(let [old-level# (.getLevel (get-logger ~logger))] (try (set-level ~logger Level/ERROR) (suppress ~more ~@body) (finally (set-level ~logger old-level#)))) `(do ~@body)))) (defn configure-from-file "Configure logging from a configuration file" [context config-file] (doto (JoranConfigurator.) (.setContext context) (.doConfigure (URL. config-file)))) (defn configure-from-opts "Configure logging from opts" [logger context opts] (let [{:keys [console? console-layout file file-layout files rotate-count logsize-rotate] :or {console? true console-layout :riemann file-layout :riemann}} opts] (do (when console? (let [encoder (doto (encoder console-layout) (.setContext context) (.start)) console-appender (doto (ConsoleAppender.) (.setContext context) (.setEncoder encoder) (.start))] (.addAppender logger console-appender))) (doseq [{:keys [file file-layout]} (conj files {:file file :file-layout file-layout}) :when file] (if logsize-rotate (let [encoder (doto (encoder file-layout) (.setContext context) (.start)) log-appender (doto (RollingFileAppender.) (.setFile file) (.setContext context) (.setEncoder encoder)) rolling-policy (doto (FixedWindowRollingPolicy.) (.setMinIndex 1) (.setMaxIndex (or rotate-count 10)) (.setFileNamePattern (str file ".%i")) (.setParent log-appender) (.setContext context) (.start)) triggering-policy (doto (SizeBasedTriggeringPolicy.) (.setMaxFileSize (FileSize. logsize-rotate)) (.setContext context) (.start)) log-appender (doto log-appender (.setRollingPolicy rolling-policy) (.setTriggeringPolicy triggering-policy) (.start))] (.addAppender logger log-appender)) (let [encoder (doto (encoder file-layout) (.setContext context) (.start)) log-appender (doto (RollingFileAppender.) (.setFile file) (.setContext context) (.setEncoder encoder)) rolling-policy (doto (TimeBasedRollingPolicy.) (.setMaxHistory (or rotate-count 10)) (.setFileNamePattern (str file ".%d{yyyy-MM-dd}")) (.setParent log-appender) (.setContext context) (.start)) log-appender (doto log-appender (.setRollingPolicy rolling-policy) (.start))] (.addAppender logger log-appender)))) (set-level Level/INFO) (set-level "riemann.client" Level/DEBUG) (set-level "riemann.server" Level/DEBUG) (set-level "riemann.streams" Level/DEBUG) (set-level "riemann.graphite" Level/DEBUG)))) (defn init "Initialize logging. You will probably call this from the config file. You can call init more than once; its changes are destructive. Options: - :console? Determine if logging should happen on the console. - :console-layout Specifying console layout. - :file The file to log to. If omitted, log to console only. - :file-layout Specifying file layout. - :files A list of files to log to. If provided, a seq or vector is expected containing maps with a :file and an :file-layout - :logsize-rotate If size (in bytes) is specified use size based rotation otherwise use default time based rotation. - :rotate-count Specifying the number of rotated files to keep. If omitted, keep last 10 rotated files. Layout can be :riemann or :json. If layout is omitted, the default layout :riemann will be used. For example: ```clojure ; Basic console logging (init) ; Also log to a file (init {:file \"/var/log/riemann.log\"}) ; With rotation (init {:console? false :file \"/var/log/riemann.log\" :rotate-count 10}) ; Rotate at a certain size (init {:console? false :file \"/var/log/riemann.log\" :logsize-rotate 1000000000}) ; Multiple files in different formats (init {:console? false :files [{:file \"/var/log/riemann.log\"}, {:file \"/var/log/riemann.json.log\" :file-layout :json}] :logsize-rotate 100 :rotate-count 5}) ```" ([] (init {})) ([opts] (let [logger (get-logger) context (get-context)] (.detachAndStopAllAppenders logger) (if-let [config-file (System/getProperty "logback.configurationFile")] (configure-from-file context config-file) (configure-from-opts logger context opts))))) (defn nice-syntax-error "Rewrites clojure.lang.LispReader$ReaderException to have error messages that might actually help someone." ([e] (nice-syntax-error e "(no file)")) ([e file] ; Lord help me. (let [line (wall.hack/field (class e) :line e) msg (.getMessage (or (.getCause e) e))] (RuntimeException. (str "Syntax error (" file ":" line ") " msg))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/riemann/riemann/1649687c0bd913c378701ee0b964a9863bde7c7c/src/riemann/logging.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> its changes are destructive. Options: Basic console logging Also log to a file With rotation Rotate at a certain size Multiple files in different formats Lord help me.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns riemann.logging (:import (org.slf4j LoggerFactory) (ch.qos.logback.classic Level Logger) (ch.qos.logback.core ConsoleAppender FileAppender) (ch.qos.logback.core.util FileSize) (ch.qos.logback.core.encoder LayoutWrappingEncoder) (ch.qos.logback.core.rolling RollingFileAppender TimeBasedRollingPolicy FixedWindowRollingPolicy SizeBasedTriggeringPolicy) (ch.qos.logback.classic.encoder PatternLayoutEncoder) (net.logstash.logback JSONEventLayoutV0 JSONEventLayoutV1) (net.logstash.logback.encoder LogstashEncoder) (java.net URL) (ch.qos.logback.classic.joran JoranConfigurator)) (:require wall.hack)) (defn get-logger ([] (LoggerFactory/getLogger Logger/ROOT_LOGGER_NAME)) ([logger] (LoggerFactory/getLogger logger))) (defn- get-context [] (LoggerFactory/getILoggerFactory)) (defmulti encoder identity) (defmethod encoder :json [type] (LogstashEncoder.)) (defmethod encoder :json-event [type] (encoder :json-event-v0)) (defmethod encoder :json-event-v0 [type] (doto (LayoutWrappingEncoder.) (.setLayout (JSONEventLayoutV0.)))) (defmethod encoder :json-event-v1 [type] (doto (LayoutWrappingEncoder.) (.setLayout (JSONEventLayoutV1.)))) (defmethod encoder :riemann [type] (doto (PatternLayoutEncoder.) (.setPattern "%p [%d] %t - %c - %m%n%throwable"))) (defmethod encoder :default [type] (binding [*out* *err*] (println "invalid logging layout specified: " type)) (encoder :riemann)) (defn set-level "Set the level for the given logger, by string name. Example: (set-level Level/INFO) or (set-level \"riemann.client\", Level/DEBUG)" ([level] (. (get-logger) (setLevel level))) ([logger level] (. (get-logger logger) (setLevel level)))) (defmacro suppress "Turns off logging for the evaluation of body." [loggers & body] (let [[logger & more] (flatten [loggers])] (if logger `(let [old-level# (.getLevel (get-logger ~logger))] (try (set-level ~logger Level/ERROR) (suppress ~more ~@body) (finally (set-level ~logger old-level#)))) `(do ~@body)))) (defn configure-from-file "Configure logging from a configuration file" [context config-file] (doto (JoranConfigurator.) (.setContext context) (.doConfigure (URL. config-file)))) (defn configure-from-opts "Configure logging from opts" [logger context opts] (let [{:keys [console? console-layout file file-layout files rotate-count logsize-rotate] :or {console? true console-layout :riemann file-layout :riemann}} opts] (do (when console? (let [encoder (doto (encoder console-layout) (.setContext context) (.start)) console-appender (doto (ConsoleAppender.) (.setContext context) (.setEncoder encoder) (.start))] (.addAppender logger console-appender))) (doseq [{:keys [file file-layout]} (conj files {:file file :file-layout file-layout}) :when file] (if logsize-rotate (let [encoder (doto (encoder file-layout) (.setContext context) (.start)) log-appender (doto (RollingFileAppender.) (.setFile file) (.setContext context) (.setEncoder encoder)) rolling-policy (doto (FixedWindowRollingPolicy.) (.setMinIndex 1) (.setMaxIndex (or rotate-count 10)) (.setFileNamePattern (str file ".%i")) (.setParent log-appender) (.setContext context) (.start)) triggering-policy (doto (SizeBasedTriggeringPolicy.) (.setMaxFileSize (FileSize. logsize-rotate)) (.setContext context) (.start)) log-appender (doto log-appender (.setRollingPolicy rolling-policy) (.setTriggeringPolicy triggering-policy) (.start))] (.addAppender logger log-appender)) (let [encoder (doto (encoder file-layout) (.setContext context) (.start)) log-appender (doto (RollingFileAppender.) (.setFile file) (.setContext context) (.setEncoder encoder)) rolling-policy (doto (TimeBasedRollingPolicy.) (.setMaxHistory (or rotate-count 10)) (.setFileNamePattern (str file ".%d{yyyy-MM-dd}")) (.setParent log-appender) (.setContext context) (.start)) log-appender (doto log-appender (.setRollingPolicy rolling-policy) (.start))] (.addAppender logger log-appender)))) (set-level Level/INFO) (set-level "riemann.client" Level/DEBUG) (set-level "riemann.server" Level/DEBUG) (set-level "riemann.streams" Level/DEBUG) (set-level "riemann.graphite" Level/DEBUG)))) (defn init "Initialize logging. You will probably call this from the config file. You can - :console? Determine if logging should happen on the console. - :console-layout Specifying console layout. - :file The file to log to. If omitted, log to console only. - :file-layout Specifying file layout. - :files A list of files to log to. If provided, a seq or vector is expected containing maps with a :file and an :file-layout - :logsize-rotate If size (in bytes) is specified use size based rotation otherwise use default time based rotation. - :rotate-count Specifying the number of rotated files to keep. If omitted, keep last 10 rotated files. Layout can be :riemann or :json. If layout is omitted, the default layout :riemann will be used. For example: ```clojure (init) (init {:file \"/var/log/riemann.log\"}) (init {:console? false :file \"/var/log/riemann.log\" :rotate-count 10}) (init {:console? false :file \"/var/log/riemann.log\" :logsize-rotate 1000000000}) (init {:console? false :files [{:file \"/var/log/riemann.log\"}, {:file \"/var/log/riemann.json.log\" :file-layout :json}] :logsize-rotate 100 :rotate-count 5}) ```" ([] (init {})) ([opts] (let [logger (get-logger) context (get-context)] (.detachAndStopAllAppenders logger) (if-let [config-file (System/getProperty "logback.configurationFile")] (configure-from-file context config-file) (configure-from-opts logger context opts))))) (defn nice-syntax-error "Rewrites clojure.lang.LispReader$ReaderException to have error messages that might actually help someone." ([e] (nice-syntax-error e "(no file)")) ([e file] (let [line (wall.hack/field (class e) :line e) msg (.getMessage (or (.getCause e) e))] (RuntimeException. (str "Syntax error (" file ":" line ") " msg))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610204"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">dbd95e2a65d61f146144cf6660ca696c0e65b4bebea2df903bcfeca44af4e5fb</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">static-analysis-engineering/codehawk</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bCHExtractInvariants.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Binary Analyzer Author : ------------------------------------------------------------------------------ The MIT License ( MIT ) Copyright ( c ) 2005 - 2019 Kestrel Technology LLC Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Binary Analyzer Author: Henny Sipma ------------------------------------------------------------------------------ The MIT License (MIT) Copyright (c) 2005-2019 Kestrel Technology LLC Copyright (c) 2020 Henny Sipma Copyright (c) 2021-2023 Aarno Labs LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================================================================= *) chlib open CHAtlas (* bchlib *) open BCHLibTypes val extract_ranges: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit val extract_linear_equalities: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit val extract_valuesets: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/static-analysis-engineering/codehawk/15765b4be65024f1687ccc3cc7b645347ce72063/CodeHawk/CHB/bchanalyze/bCHExtractInvariants.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> bchlib </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Binary Analyzer Author : ------------------------------------------------------------------------------ The MIT License ( MIT ) Copyright ( c ) 2005 - 2019 Kestrel Technology LLC Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC Permission is hereby granted , free of charge , to any person obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , including without limitation the rights to use , copy , modify , merge , publish , distribute , sublicense , and/or sell copies of the Software , and to permit persons to whom the Software is furnished to do so , subject to the following conditions : The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = CodeHawk Binary Analyzer Author: Henny Sipma ------------------------------------------------------------------------------ The MIT License (MIT) Copyright (c) 2005-2019 Kestrel Technology LLC Copyright (c) 2020 Henny Sipma Copyright (c) 2021-2023 Aarno Labs LLC Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ============================================================================= *) chlib open CHAtlas open BCHLibTypes val extract_ranges: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit val extract_linear_equalities: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit val extract_valuesets: function_info_int -> (string, (string, atlas_t) Hashtbl.t) Hashtbl.t -> unit </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610205"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">450975b99cc112f44795aedafa6829ce837a9d9a708bc5f483632d8668fb19d0</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">tolysz/ghcjs-stack</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">GlobalFlags.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE BangPatterns #-} # LANGUAGE DeriveGeneric # # LANGUAGE ExistentialQuantification # {-# LANGUAGE RankNTypes #-} # LANGUAGE ScopedTypeVariables # # LANGUAGE RecordWildCards # module Distribution.Client.GlobalFlags ( GlobalFlags(..) , defaultGlobalFlags , RepoContext(..) , withRepoContext , withRepoContext' ) where import Distribution.Client.Types ( Repo(..), RemoteRepo(..) ) import Distribution.Compat.Semigroup import Distribution.Simple.Setup ( Flag(..), fromFlag, flagToMaybe ) import Distribution.Utils.NubList ( NubList, fromNubList ) import Distribution.Client.HttpUtils ( HttpTransport, configureTransport ) import Distribution.Verbosity ( Verbosity ) import Distribution.Simple.Utils ( info ) import Data.Maybe ( fromMaybe ) import Control.Concurrent ( MVar, newMVar, modifyMVar ) import Control.Exception ( throwIO ) import Control.Monad ( when ) import System.FilePath ( (</>) ) import Network.URI ( uriScheme, uriPath ) import Data.Map ( Map ) import qualified Data.Map as Map import GHC.Generics ( Generic ) import qualified Hackage.Security.Client as Sec import qualified Hackage.Security.Util.Path as Sec import qualified Hackage.Security.Util.Pretty as Sec import qualified Hackage.Security.Client.Repository.Cache as Sec import qualified Hackage.Security.Client.Repository.Local as Sec.Local import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote import qualified Distribution.Client.Security.HTTP as Sec.HTTP -- ------------------------------------------------------------ -- * Global flags -- ------------------------------------------------------------ -- | Flags that apply at the top level, not to any sub-command. data GlobalFlags = GlobalFlags { globalVersion :: Flag Bool, globalNumericVersion :: Flag Bool, globalConfigFile :: Flag FilePath, globalSandboxConfigFile :: Flag FilePath, globalConstraintsFile :: Flag FilePath, globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. globalCacheDir :: Flag FilePath, globalLocalRepos :: NubList FilePath, globalLogsDir :: Flag FilePath, globalWorldFile :: Flag FilePath, globalRequireSandbox :: Flag Bool, globalIgnoreSandbox :: Flag Bool, globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates globalHttpTransport :: Flag String } deriving Generic defaultGlobalFlags :: GlobalFlags defaultGlobalFlags = GlobalFlags { globalVersion = Flag False, globalNumericVersion = Flag False, globalConfigFile = mempty, globalSandboxConfigFile = mempty, globalConstraintsFile = mempty, globalRemoteRepos = mempty, globalCacheDir = mempty, globalLocalRepos = mempty, globalLogsDir = mempty, globalWorldFile = mempty, globalRequireSandbox = Flag False, globalIgnoreSandbox = Flag False, globalIgnoreExpiry = Flag False, globalHttpTransport = mempty } instance Monoid GlobalFlags where mempty = gmempty mappend = (<>) instance Semigroup GlobalFlags where (<>) = gmappend -- ------------------------------------------------------------ -- * Repo context -- ------------------------------------------------------------ -- | Access to repositories data RepoContext = RepoContext { -- | All user-specified repositories repoContextRepos :: [Repo] -- | Get the HTTP transport -- The transport will be initialized on the first call to this function . -- -- NOTE: It is important that we don't eagerly initialize the transport. -- Initializing the transport is not free, and especially in contexts where -- we don't know a-priori whether or not we need the transport (for instance -- when using cabal in "nix mode") incurring the overhead of transport initialization on _ every _ invocation ( eg ) is undesirable . , repoContextGetTransport :: IO HttpTransport -- | Get the (initialized) secure repo -- -- (the 'Repo' type itself is stateless and must remain so, because it -- must be serializable) , repoContextWithSecureRepo :: forall a. Repo -> (forall down. Sec.Repository down -> IO a) -> IO a -- | Should we ignore expiry times (when checking security)? , repoContextIgnoreExpiry :: Bool } -- | Wrapper around 'Repository', hiding the type argument data SecureRepo = forall down. SecureRepo (Sec.Repository down) withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a withRepoContext verbosity globalFlags = withRepoContext' verbosity (fromNubList (globalRemoteRepos globalFlags)) (fromNubList (globalLocalRepos globalFlags)) (fromFlag (globalCacheDir globalFlags)) (flagToMaybe (globalHttpTransport globalFlags)) (flagToMaybe (globalIgnoreExpiry globalFlags)) withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] -> FilePath -> Maybe String -> Maybe Bool -> (RepoContext -> IO a) -> IO a withRepoContext' verbosity remoteRepos localRepos sharedCacheDir httpTransport ignoreExpiry = \callback -> do transportRef <- newMVar Nothing let httpLib = Sec.HTTP.transportAdapter verbosity (getTransport transportRef) initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> callback RepoContext { repoContextRepos = allRemoteRepos ++ map RepoLocal localRepos , repoContextGetTransport = getTransport transportRef , repoContextWithSecureRepo = withSecureRepo secureRepos' , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry } where secureRemoteRepos = [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] allRemoteRepos = [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir | remote <- remoteRepos , let cacheDir = sharedCacheDir </> remoteRepoName remote isSecure = remoteRepoSecure remote == Just True ] getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport getTransport transportRef = modifyMVar transportRef $ \mTransport -> do transport <- case mTransport of Just tr -> return tr Nothing -> configureTransport verbosity httpTransport return (Just transport, transport) withSecureRepo :: Map Repo SecureRepo -> Repo -> (forall down. Sec.Repository down -> IO a) -> IO a withSecureRepo secureRepos repo callback = case Map.lookup repo secureRepos of Just (SecureRepo secureRepo) -> callback secureRepo Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" -- | Initialize the provided secure repositories -- -- Assumed invariant: `remoteRepoSecure` should be set for all these repos. initSecureRepos :: forall a. Verbosity -> Sec.HTTP.HttpLib -> [(RemoteRepo, FilePath)] -> (Map Repo SecureRepo -> IO a) -> IO a initSecureRepos verbosity httpLib repos callback = go Map.empty repos where go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a go !acc [] = callback acc go !acc ((r,cacheDir):rs) = do cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir initSecureRepo verbosity httpLib r cachePath $ \r' -> go (Map.insert (RepoSecure r cacheDir) r' acc) rs -- | Initialize the given secure repo -- -- The security library has its own concept of a "local" repository, distinct from - install@ 's ; these are secure repositories , but live in the local -- file system. We use the convention that these repositories are identified by -- URLs of the form @file:/path/to/local/repo@. initSecureRepo :: Verbosity -> Sec.HTTP.HttpLib -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) -> Sec.Path Sec.Absolute -- ^ Cache dir -> (SecureRepo -> IO a) -- ^ Callback -> IO a initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do withRepo $ \r -> do requiresBootstrap <- Sec.requiresBootstrap r when requiresBootstrap $ Sec.uncheckClientErrors $ Sec.bootstrap r (map Sec.KeyId remoteRepoRootKeys) (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) callback $ SecureRepo r where Initialize local or remote repo depending on the URI withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a withRepo callback | uriScheme remoteRepoURI == "file:" = do dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) Sec.Local.withRepository dir cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback withRepo callback = Sec.Remote.withRepository httpLib [remoteRepoURI] Sec.Remote.defaultRepoOpts cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback cache :: Sec.Cache cache = Sec.Cache { cacheRoot = cachePath , cacheLayout = Sec.cabalCacheLayout } We display any TUF progress only in verbose mode , including any transient -- verification errors. If verification fails, then the final exception that -- is thrown will of course be shown. logTUF :: Sec.LogMessage -> IO () logTUF = info verbosity . Sec.pretty </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal-next/cabal-install/Distribution/Client/GlobalFlags.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE BangPatterns # # LANGUAGE RankNTypes # ------------------------------------------------------------ * Global flags ------------------------------------------------------------ | Flags that apply at the top level, not to any sub-command. ^ Available Hackage servers. ^ Ignore security expiry dates ------------------------------------------------------------ * Repo context ------------------------------------------------------------ | Access to repositories | All user-specified repositories | Get the HTTP transport NOTE: It is important that we don't eagerly initialize the transport. Initializing the transport is not free, and especially in contexts where we don't know a-priori whether or not we need the transport (for instance when using cabal in "nix mode") incurring the overhead of transport | Get the (initialized) secure repo (the 'Repo' type itself is stateless and must remain so, because it must be serializable) | Should we ignore expiry times (when checking security)? | Wrapper around 'Repository', hiding the type argument | Initialize the provided secure repositories Assumed invariant: `remoteRepoSecure` should be set for all these repos. | Initialize the given secure repo The security library has its own concept of a "local" repository, distinct file system. We use the convention that these repositories are identified by URLs of the form @file:/path/to/local/repo@. ^ Secure repo ('remoteRepoSecure' assumed) ^ Cache dir ^ Callback verification errors. If verification fails, then the final exception that is thrown will of course be shown.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE DeriveGeneric # # LANGUAGE ExistentialQuantification # # LANGUAGE ScopedTypeVariables # # LANGUAGE RecordWildCards # module Distribution.Client.GlobalFlags ( GlobalFlags(..) , defaultGlobalFlags , RepoContext(..) , withRepoContext , withRepoContext' ) where import Distribution.Client.Types ( Repo(..), RemoteRepo(..) ) import Distribution.Compat.Semigroup import Distribution.Simple.Setup ( Flag(..), fromFlag, flagToMaybe ) import Distribution.Utils.NubList ( NubList, fromNubList ) import Distribution.Client.HttpUtils ( HttpTransport, configureTransport ) import Distribution.Verbosity ( Verbosity ) import Distribution.Simple.Utils ( info ) import Data.Maybe ( fromMaybe ) import Control.Concurrent ( MVar, newMVar, modifyMVar ) import Control.Exception ( throwIO ) import Control.Monad ( when ) import System.FilePath ( (</>) ) import Network.URI ( uriScheme, uriPath ) import Data.Map ( Map ) import qualified Data.Map as Map import GHC.Generics ( Generic ) import qualified Hackage.Security.Client as Sec import qualified Hackage.Security.Util.Path as Sec import qualified Hackage.Security.Util.Pretty as Sec import qualified Hackage.Security.Client.Repository.Cache as Sec import qualified Hackage.Security.Client.Repository.Local as Sec.Local import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote import qualified Distribution.Client.Security.HTTP as Sec.HTTP data GlobalFlags = GlobalFlags { globalVersion :: Flag Bool, globalNumericVersion :: Flag Bool, globalConfigFile :: Flag FilePath, globalSandboxConfigFile :: Flag FilePath, globalConstraintsFile :: Flag FilePath, globalCacheDir :: Flag FilePath, globalLocalRepos :: NubList FilePath, globalLogsDir :: Flag FilePath, globalWorldFile :: Flag FilePath, globalRequireSandbox :: Flag Bool, globalIgnoreSandbox :: Flag Bool, globalHttpTransport :: Flag String } deriving Generic defaultGlobalFlags :: GlobalFlags defaultGlobalFlags = GlobalFlags { globalVersion = Flag False, globalNumericVersion = Flag False, globalConfigFile = mempty, globalSandboxConfigFile = mempty, globalConstraintsFile = mempty, globalRemoteRepos = mempty, globalCacheDir = mempty, globalLocalRepos = mempty, globalLogsDir = mempty, globalWorldFile = mempty, globalRequireSandbox = Flag False, globalIgnoreSandbox = Flag False, globalIgnoreExpiry = Flag False, globalHttpTransport = mempty } instance Monoid GlobalFlags where mempty = gmempty mappend = (<>) instance Semigroup GlobalFlags where (<>) = gmappend data RepoContext = RepoContext { repoContextRepos :: [Repo] The transport will be initialized on the first call to this function . initialization on _ every _ invocation ( eg ) is undesirable . , repoContextGetTransport :: IO HttpTransport , repoContextWithSecureRepo :: forall a. Repo -> (forall down. Sec.Repository down -> IO a) -> IO a , repoContextIgnoreExpiry :: Bool } data SecureRepo = forall down. SecureRepo (Sec.Repository down) withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a withRepoContext verbosity globalFlags = withRepoContext' verbosity (fromNubList (globalRemoteRepos globalFlags)) (fromNubList (globalLocalRepos globalFlags)) (fromFlag (globalCacheDir globalFlags)) (flagToMaybe (globalHttpTransport globalFlags)) (flagToMaybe (globalIgnoreExpiry globalFlags)) withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] -> FilePath -> Maybe String -> Maybe Bool -> (RepoContext -> IO a) -> IO a withRepoContext' verbosity remoteRepos localRepos sharedCacheDir httpTransport ignoreExpiry = \callback -> do transportRef <- newMVar Nothing let httpLib = Sec.HTTP.transportAdapter verbosity (getTransport transportRef) initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> callback RepoContext { repoContextRepos = allRemoteRepos ++ map RepoLocal localRepos , repoContextGetTransport = getTransport transportRef , repoContextWithSecureRepo = withSecureRepo secureRepos' , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry } where secureRemoteRepos = [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] allRemoteRepos = [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir | remote <- remoteRepos , let cacheDir = sharedCacheDir </> remoteRepoName remote isSecure = remoteRepoSecure remote == Just True ] getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport getTransport transportRef = modifyMVar transportRef $ \mTransport -> do transport <- case mTransport of Just tr -> return tr Nothing -> configureTransport verbosity httpTransport return (Just transport, transport) withSecureRepo :: Map Repo SecureRepo -> Repo -> (forall down. Sec.Repository down -> IO a) -> IO a withSecureRepo secureRepos repo callback = case Map.lookup repo secureRepos of Just (SecureRepo secureRepo) -> callback secureRepo Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" initSecureRepos :: forall a. Verbosity -> Sec.HTTP.HttpLib -> [(RemoteRepo, FilePath)] -> (Map Repo SecureRepo -> IO a) -> IO a initSecureRepos verbosity httpLib repos callback = go Map.empty repos where go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a go !acc [] = callback acc go !acc ((r,cacheDir):rs) = do cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir initSecureRepo verbosity httpLib r cachePath $ \r' -> go (Map.insert (RepoSecure r cacheDir) r' acc) rs from - install@ 's ; these are secure repositories , but live in the local initSecureRepo :: Verbosity -> Sec.HTTP.HttpLib -> IO a initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do withRepo $ \r -> do requiresBootstrap <- Sec.requiresBootstrap r when requiresBootstrap $ Sec.uncheckClientErrors $ Sec.bootstrap r (map Sec.KeyId remoteRepoRootKeys) (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) callback $ SecureRepo r where Initialize local or remote repo depending on the URI withRepo :: (forall down. Sec.Repository down -> IO a) -> IO a withRepo callback | uriScheme remoteRepoURI == "file:" = do dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) Sec.Local.withRepository dir cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback withRepo callback = Sec.Remote.withRepository httpLib [remoteRepoURI] Sec.Remote.defaultRepoOpts cache Sec.hackageRepoLayout Sec.hackageIndexLayout logTUF callback cache :: Sec.Cache cache = Sec.Cache { cacheRoot = cachePath , cacheLayout = Sec.cabalCacheLayout } We display any TUF progress only in verbose mode , including any transient logTUF :: Sec.LogMessage -> IO () logTUF = info verbosity . Sec.pretty </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610206"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">82284587cb68b515f27436afd216fb6e1c552c4de4a79ac432832a13af417752</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">stassats/lisp-bots</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">mop.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">((:name "mop" :description "Metaobject Protocol" :url-prefix "-MOP/" :abbreviate t) ("accessor-method-slot-definition" "accessor-method-slot-definition.html") ("add-dependent" "add-dependent.html") ("add-direct-method" "add-direct-method.html") ("add-direct-subclass" "add-direct-subclass.html") ("add-method" "add-method.html") ("allocate-instance" "allocate-instance.html") ("built-in-class" "class-built-in-class.html") ("class" "class-class.html") ("class-default-initargs" "class-default-initargs.html") ("class-direct-default-initargs" "class-direct-default-initargs.html") ("class-direct-slots" "class-direct-slots.html") ("class-direct-subclasses" "class-direct-subclasses.html") ("class-direct-superclasses" "class-direct-superclasses.html") ("class-finalized-p" "class-finalized-p.html") ("class-name" "class-name.html") ("class-precedence-list" "class-precedence-list.html") ("class-prototype" "class-prototype.html") ("class-slots" "class-slots.html") ("compute-applicable-methods" "compute-applicable-methods.html") ("compute-applicable-methods-using-classes" "compute-applicable-methods-using-classes.html") ("compute-class-precedence-list" "compute-class-precedence-list.html") ("compute-default-initargs" "compute-default-initargs.html") ("compute-discriminating-function" "compute-discriminating-function.html") ("compute-effective-method" "compute-effective-method.html") ("compute-effective-slot-definition" "compute-effective-slot-definition.html") ("compute-slots" "compute-slots.html") ("direct-slot-definition" "class-direct-slot-definition.html") ("direct-slot-definition-class" "direct-slot-definition-class.html") ("effective-slot-definition" "class-effective-slot-definition.html") ("effective-slot-definition-class" "effective-slot-definition-class.html") ("ensure-class" "ensure-class.html") ("ensure-class-using-class" "ensure-class-using-class.html") ("ensure-generic-function" "ensure-generic-function.html") ("ensure-generic-function-using-class" "ensure-generic-function-using-class.html") ("eql-specializer" "class-eql-specializer.html") ("eql-specializer-object" "eql-specializer-object.html") ("extract-lambda-list" "extract-lambda-list.html") ("extract-lambda-list" "extract-lambda-list.html") ("extract-specializer-names" "extract-specializer-names.html") ("finalize-inheritance" "finalize-inheritance.html") ("find-method-combination" "find-method-combination.html") ("forward-referenced-class" "class-forward-referenced-class.html") ("funcallable-standard-class" "class-funcallable-standard-class.html") ("funcallable-standard-instance-access" "funcallable-standard-instance-access.html") ("funcallable-standard-object" "class-funcallable-standard-object.html") ("function" "class-function.html") ("generic-function" "class-generic-function.html") ("generic-function-argument-precedence-order" "generic-function-argument-precedence-order.html") ("generic-function-declarations" "generic-function-declarations.html") ("generic-function-lambda-list" "generic-function-lambda-list.html") ("generic-function-method-class" "generic-function-method-class.html") ("generic-function-method-combination" "generic-function-method-combination.html") ("generic-function-methods" "generic-function-methods.html") ("generic-function-name" "generic-function-name.html") ("intern-eql-specializer" "intern-eql-specializer.html") ("make-instance" "make-instance.html") ("make-method-lambda" "make-method-lambda.html") ("map-dependents" "map-dependents.html") ("metaobject" "class-metaobject.html") ("method" "class-method.html") ("method-combination" "class-method-combination.html") ("method-function" "method-function.html") ("method-generic-function" "method-generic-function.html") ("method-lambda-list" "method-lambda-list.html") ("method-qualifiers" "method-qualifiers.html") ("method-specializers" "method-specializers.html") ("reader-method-class" "reader-method-class.html") ("remove-dependent" "remove-dependent.html") ("remove-direct-method" "remove-direct-method.html") ("remove-direct-subclass" "remove-direct-subclass.html") ("remove-method" "remove-method.html") ("set-funcallable-instance-function" "set-funcallable-instance-function.html") ("setf-class-name" "setf-class-name.html") ("setf-generic-function-name" "setf-generic-function-name.html") ("setf-slot-value-using-class" "setf-slot-value-using-class.html") ("slot-boundp-using-class" "slot-boundp-using-class.html") ("slot-definition" "class-slot-definition.html") ("slot-definition-allocation" "slot-definition-allocation.html") ("slot-definition-initargs" "slot-definition-initargs.html") ("slot-definition-initform" "slot-definition-initform.html") ("slot-definition-initfunction" "slot-definition-initfunction.html") ("slot-definition-location" "slot-definition-location.html") ("slot-definition-name" "slot-definition-name.html") ("slot-definition-readers" "slot-definition-readers.html") ("slot-definition-type" "slot-definition-type.html") ("slot-definition-writers" "slot-definition-writers.html") ("slot-makunbound-using-class" "slot-makunbound-using-class.html") ("slot-value-using-class" "slot-value-using-class.html") ("specializer" "class-specializer.html") ("specializer" "class-specializer.html") ("specializer-direct-generic-functions" "specializer-direct-generic-functions.html") ("specializer-direct-methods" "specializer-direct-methods.html") ("standard-accessor-method" "class-standard-accessor-method.html") ("standard-class" "class-standard-class.html") ("standard-direct-slot-definition" "class-standard-direct-slot-definition.html") ("standard-effective-slot-definition" "class-standard-effective-slot-definition.html") ("standard-generic-function" "class-standard-generic-function.html") ("standard-instance-access" "standard-instance-access.html") ("standard-method" "class-standard-method.html") ("standard-object" "class-standard-object.html") ("standard-reader-method" "class-standard-reader-method.html") ("standard-slot-definition" "class-standard-slot-definition.html") ("standard-writer-method" "class-standard-writer-method.html") ("t" "class-t.html") ("update-dependent" "update-dependent.html") ("validate-superclass" "validate-superclass.html") ("writer-method-class" "writer-method-class.html")) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/stassats/lisp-bots/09bfce724afd20c91a08acde8816be6faf5f54b2/specs/mop.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">((:name "mop" :description "Metaobject Protocol" :url-prefix "-MOP/" :abbreviate t) ("accessor-method-slot-definition" "accessor-method-slot-definition.html") ("add-dependent" "add-dependent.html") ("add-direct-method" "add-direct-method.html") ("add-direct-subclass" "add-direct-subclass.html") ("add-method" "add-method.html") ("allocate-instance" "allocate-instance.html") ("built-in-class" "class-built-in-class.html") ("class" "class-class.html") ("class-default-initargs" "class-default-initargs.html") ("class-direct-default-initargs" "class-direct-default-initargs.html") ("class-direct-slots" "class-direct-slots.html") ("class-direct-subclasses" "class-direct-subclasses.html") ("class-direct-superclasses" "class-direct-superclasses.html") ("class-finalized-p" "class-finalized-p.html") ("class-name" "class-name.html") ("class-precedence-list" "class-precedence-list.html") ("class-prototype" "class-prototype.html") ("class-slots" "class-slots.html") ("compute-applicable-methods" "compute-applicable-methods.html") ("compute-applicable-methods-using-classes" "compute-applicable-methods-using-classes.html") ("compute-class-precedence-list" "compute-class-precedence-list.html") ("compute-default-initargs" "compute-default-initargs.html") ("compute-discriminating-function" "compute-discriminating-function.html") ("compute-effective-method" "compute-effective-method.html") ("compute-effective-slot-definition" "compute-effective-slot-definition.html") ("compute-slots" "compute-slots.html") ("direct-slot-definition" "class-direct-slot-definition.html") ("direct-slot-definition-class" "direct-slot-definition-class.html") ("effective-slot-definition" "class-effective-slot-definition.html") ("effective-slot-definition-class" "effective-slot-definition-class.html") ("ensure-class" "ensure-class.html") ("ensure-class-using-class" "ensure-class-using-class.html") ("ensure-generic-function" "ensure-generic-function.html") ("ensure-generic-function-using-class" "ensure-generic-function-using-class.html") ("eql-specializer" "class-eql-specializer.html") ("eql-specializer-object" "eql-specializer-object.html") ("extract-lambda-list" "extract-lambda-list.html") ("extract-lambda-list" "extract-lambda-list.html") ("extract-specializer-names" "extract-specializer-names.html") ("finalize-inheritance" "finalize-inheritance.html") ("find-method-combination" "find-method-combination.html") ("forward-referenced-class" "class-forward-referenced-class.html") ("funcallable-standard-class" "class-funcallable-standard-class.html") ("funcallable-standard-instance-access" "funcallable-standard-instance-access.html") ("funcallable-standard-object" "class-funcallable-standard-object.html") ("function" "class-function.html") ("generic-function" "class-generic-function.html") ("generic-function-argument-precedence-order" "generic-function-argument-precedence-order.html") ("generic-function-declarations" "generic-function-declarations.html") ("generic-function-lambda-list" "generic-function-lambda-list.html") ("generic-function-method-class" "generic-function-method-class.html") ("generic-function-method-combination" "generic-function-method-combination.html") ("generic-function-methods" "generic-function-methods.html") ("generic-function-name" "generic-function-name.html") ("intern-eql-specializer" "intern-eql-specializer.html") ("make-instance" "make-instance.html") ("make-method-lambda" "make-method-lambda.html") ("map-dependents" "map-dependents.html") ("metaobject" "class-metaobject.html") ("method" "class-method.html") ("method-combination" "class-method-combination.html") ("method-function" "method-function.html") ("method-generic-function" "method-generic-function.html") ("method-lambda-list" "method-lambda-list.html") ("method-qualifiers" "method-qualifiers.html") ("method-specializers" "method-specializers.html") ("reader-method-class" "reader-method-class.html") ("remove-dependent" "remove-dependent.html") ("remove-direct-method" "remove-direct-method.html") ("remove-direct-subclass" "remove-direct-subclass.html") ("remove-method" "remove-method.html") ("set-funcallable-instance-function" "set-funcallable-instance-function.html") ("setf-class-name" "setf-class-name.html") ("setf-generic-function-name" "setf-generic-function-name.html") ("setf-slot-value-using-class" "setf-slot-value-using-class.html") ("slot-boundp-using-class" "slot-boundp-using-class.html") ("slot-definition" "class-slot-definition.html") ("slot-definition-allocation" "slot-definition-allocation.html") ("slot-definition-initargs" "slot-definition-initargs.html") ("slot-definition-initform" "slot-definition-initform.html") ("slot-definition-initfunction" "slot-definition-initfunction.html") ("slot-definition-location" "slot-definition-location.html") ("slot-definition-name" "slot-definition-name.html") ("slot-definition-readers" "slot-definition-readers.html") ("slot-definition-type" "slot-definition-type.html") ("slot-definition-writers" "slot-definition-writers.html") ("slot-makunbound-using-class" "slot-makunbound-using-class.html") ("slot-value-using-class" "slot-value-using-class.html") ("specializer" "class-specializer.html") ("specializer" "class-specializer.html") ("specializer-direct-generic-functions" "specializer-direct-generic-functions.html") ("specializer-direct-methods" "specializer-direct-methods.html") ("standard-accessor-method" "class-standard-accessor-method.html") ("standard-class" "class-standard-class.html") ("standard-direct-slot-definition" "class-standard-direct-slot-definition.html") ("standard-effective-slot-definition" "class-standard-effective-slot-definition.html") ("standard-generic-function" "class-standard-generic-function.html") ("standard-instance-access" "standard-instance-access.html") ("standard-method" "class-standard-method.html") ("standard-object" "class-standard-object.html") ("standard-reader-method" "class-standard-reader-method.html") ("standard-slot-definition" "class-standard-slot-definition.html") ("standard-writer-method" "class-standard-writer-method.html") ("t" "class-t.html") ("update-dependent" "update-dependent.html") ("validate-superclass" "validate-superclass.html") ("writer-method-class" "writer-method-class.html")) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610207"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">5901ac1b622ad79f188ada09cd1a41f8e092b16fa144a7097474c5f0dcf4cd0b</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kowainik/cake-slayer</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Random.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{- | Utilities for generating random strings. -} module CakeSlayer.Random ( mkRandomDigits , mkRandomString ) where import Relude.Unsafe ((!!)) import System.Random (newStdGen, randomRIO, randomRs) -- | Generates @n@ random digits. mkRandomDigits :: (MonadIO m) => Int -> m Text mkRandomDigits len = toText . take len . randomRs ('0', '9') <$> liftIO newStdGen | Make a random string comprised of the following letters of a given length : 1 . Lowercase characters @[a .. z]@ 2 . Uppercase characters @[A .. Z]@ 3 . Digits @[0 .. 9]@. Returns empty string if given length is less than zero . 1. Lowercase characters @[a..z]@ 2. Uppercase characters @[A..Z]@ 3. Digits @[0..9]@. Returns empty string if given length is less than zero. -} mkRandomString :: MonadIO m => Int -- ^ Length of the string -> m Text -- ^ Generated string of the given length mkRandomString n = liftIO $ toText <$> replicateM n peekRandomChar where alphabet :: String alphabet = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] alphabetLength :: Int alphabetLength = length alphabet peekRandomChar :: IO Char peekRandomChar = do i <- randomRIO (0, alphabetLength - 1) pure $ alphabet !! i </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/kowainik/cake-slayer/744f072c0eeaf50e43210e4b548705e1948e5a39/src/CakeSlayer/Random.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | Utilities for generating random strings. | Generates @n@ random digits. ^ Length of the string ^ Generated string of the given length</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module CakeSlayer.Random ( mkRandomDigits , mkRandomString ) where import Relude.Unsafe ((!!)) import System.Random (newStdGen, randomRIO, randomRs) mkRandomDigits :: (MonadIO m) => Int -> m Text mkRandomDigits len = toText . take len . randomRs ('0', '9') <$> liftIO newStdGen | Make a random string comprised of the following letters of a given length : 1 . Lowercase characters @[a .. z]@ 2 . Uppercase characters @[A .. Z]@ 3 . Digits @[0 .. 9]@. Returns empty string if given length is less than zero . 1. Lowercase characters @[a..z]@ 2. Uppercase characters @[A..Z]@ 3. Digits @[0..9]@. Returns empty string if given length is less than zero. -} mkRandomString :: MonadIO m mkRandomString n = liftIO $ toText <$> replicateM n peekRandomChar where alphabet :: String alphabet = ['A'..'Z'] ++ ['a'..'z'] ++ ['0'..'9'] alphabetLength :: Int alphabetLength = length alphabet peekRandomChar :: IO Char peekRandomChar = do i <- randomRIO (0, alphabetLength - 1) pure $ alphabet !! i </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610208"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">7bbfc2185cb9872d86ad1621639768765447ebdd1fc69768c1224aae55e1c841</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">startalkIM/ejabberd</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">node_public.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">%%%---------------------------------------------------------------------- File : node_public.erl Author : %%% Purpose : Created : 1 Dec 2007 by %%% %%% ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne %%% %%% This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 2 of the %%% License, or (at your option) any later version. %%% %%% This program is distributed in the hope that it will be useful, %%% but WITHOUT ANY WARRANTY; without even the implied warranty of %%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU %%% General Public License for more details. %%% You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . %%% %%%---------------------------------------------------------------------- -module(node_public). -behaviour(gen_pubsub_node). -author(''). -include("pubsub.hrl"). -include("jlib.hrl"). -export([init/3, terminate/2, options/0, features/0, create_node_permission/6, create_node/2, delete_node/1, purge_node/2, subscribe_node/8, unsubscribe_node/4, publish_item/7, delete_item/4, remove_extra_items/3, get_entity_affiliations/2, get_node_affiliations/1, get_affiliation/2, set_affiliation/3, get_entity_subscriptions/2, get_node_subscriptions/1, get_subscriptions/2, set_subscriptions/4, get_pending_nodes/2, get_states/1, get_state/2, set_state/1, get_items/7, get_items/3, get_item/7, get_item/2, set_item/1, get_item_name/3, node_to_path/1, path_to_node/1]). init(Host, ServerHost, Opts) -> node_flat:init(Host, ServerHost, Opts). terminate(Host, ServerHost) -> node_flat:terminate(Host, ServerHost). options() -> [{deliver_payloads, true}, {notify_config, false}, {notify_delete, false}, {notify_retract, true}, {purge_offline, false}, {persist_items, true}, {max_items, ?MAXITEMS}, {subscribe, true}, {access_model, open}, {roster_groups_allowed, []}, {publish_model, publishers}, {notification_type, headline}, {max_payload_size, ?MAX_PAYLOAD_SIZE}, {send_last_published_item, never}, {deliver_notifications, true}, {presence_based_delivery, false}, {itemreply, none}]. features() -> [<<"create-nodes">>, <<"delete-nodes">>, <<"delete-items">>, <<"instant-nodes">>, <<"outcast-affiliation">>, <<"persistent-items">>, <<"publish">>, <<"purge-nodes">>, <<"retract-items">>, <<"retrieve-affiliations">>, <<"retrieve-items">>, <<"retrieve-subscriptions">>, <<"subscribe">>, <<"subscription-notifications">>]. create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) -> node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access). create_node(Nidx, Owner) -> node_flat:create_node(Nidx, Owner). delete_node(Removed) -> node_flat:delete_node(Removed). subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast, PresenceSubscription, RosterGroup, Options) -> node_flat:subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast, PresenceSubscription, RosterGroup, Options). unsubscribe_node(Nidx, Sender, Subscriber, SubId) -> node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId). publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) -> node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts). remove_extra_items(Nidx, MaxItems, ItemIds) -> node_flat:remove_extra_items(Nidx, MaxItems, ItemIds). delete_item(Nidx, Publisher, PublishModel, ItemId) -> node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId). purge_node(Nidx, Owner) -> node_flat:purge_node(Nidx, Owner). get_entity_affiliations(Host, Owner) -> node_flat:get_entity_affiliations(Host, Owner). get_node_affiliations(Nidx) -> node_flat:get_node_affiliations(Nidx). get_affiliation(Nidx, Owner) -> node_flat:get_affiliation(Nidx, Owner). set_affiliation(Nidx, Owner, Affiliation) -> node_flat:set_affiliation(Nidx, Owner, Affiliation). get_entity_subscriptions(Host, Owner) -> node_flat:get_entity_subscriptions(Host, Owner). get_node_subscriptions(Nidx) -> node_flat:get_node_subscriptions(Nidx). get_subscriptions(Nidx, Owner) -> node_flat:get_subscriptions(Nidx, Owner). set_subscriptions(Nidx, Owner, Subscription, SubId) -> node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId). get_pending_nodes(Host, Owner) -> node_flat:get_pending_nodes(Host, Owner). get_states(Nidx) -> node_flat:get_states(Nidx). get_state(Nidx, JID) -> node_flat:get_state(Nidx, JID). set_state(State) -> node_flat:set_state(State). get_items(Nidx, From, RSM) -> node_flat:get_items(Nidx, From, RSM). get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) -> node_flat:get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM). get_item(Nidx, ItemId) -> node_flat:get_item(Nidx, ItemId). get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) -> node_flat:get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId). set_item(Item) -> node_flat:set_item(Item). get_item_name(Host, Node, Id) -> node_flat:get_item_name(Host, Node, Id). node_to_path(Node) -> node_flat:node_to_path(Node). path_to_node(Path) -> node_flat:path_to_node(Path). </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/startalkIM/ejabberd/718d86cd2f5681099fad14dab5f2541ddc612c8b/src/node_public.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">---------------------------------------------------------------------- Purpose : This program is free software; you can redistribute it and/or License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. ----------------------------------------------------------------------</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> File : node_public.erl Author : Created : 1 Dec 2007 by ejabberd , Copyright ( C ) 2002 - 2016 ProcessOne modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 2 of the You should have received a copy of the GNU General Public License along with this program ; if not , write to the Free Software Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA . -module(node_public). -behaviour(gen_pubsub_node). -author(''). -include("pubsub.hrl"). -include("jlib.hrl"). -export([init/3, terminate/2, options/0, features/0, create_node_permission/6, create_node/2, delete_node/1, purge_node/2, subscribe_node/8, unsubscribe_node/4, publish_item/7, delete_item/4, remove_extra_items/3, get_entity_affiliations/2, get_node_affiliations/1, get_affiliation/2, set_affiliation/3, get_entity_subscriptions/2, get_node_subscriptions/1, get_subscriptions/2, set_subscriptions/4, get_pending_nodes/2, get_states/1, get_state/2, set_state/1, get_items/7, get_items/3, get_item/7, get_item/2, set_item/1, get_item_name/3, node_to_path/1, path_to_node/1]). init(Host, ServerHost, Opts) -> node_flat:init(Host, ServerHost, Opts). terminate(Host, ServerHost) -> node_flat:terminate(Host, ServerHost). options() -> [{deliver_payloads, true}, {notify_config, false}, {notify_delete, false}, {notify_retract, true}, {purge_offline, false}, {persist_items, true}, {max_items, ?MAXITEMS}, {subscribe, true}, {access_model, open}, {roster_groups_allowed, []}, {publish_model, publishers}, {notification_type, headline}, {max_payload_size, ?MAX_PAYLOAD_SIZE}, {send_last_published_item, never}, {deliver_notifications, true}, {presence_based_delivery, false}, {itemreply, none}]. features() -> [<<"create-nodes">>, <<"delete-nodes">>, <<"delete-items">>, <<"instant-nodes">>, <<"outcast-affiliation">>, <<"persistent-items">>, <<"publish">>, <<"purge-nodes">>, <<"retract-items">>, <<"retrieve-affiliations">>, <<"retrieve-items">>, <<"retrieve-subscriptions">>, <<"subscribe">>, <<"subscription-notifications">>]. create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access) -> node_flat:create_node_permission(Host, ServerHost, Node, ParentNode, Owner, Access). create_node(Nidx, Owner) -> node_flat:create_node(Nidx, Owner). delete_node(Removed) -> node_flat:delete_node(Removed). subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast, PresenceSubscription, RosterGroup, Options) -> node_flat:subscribe_node(Nidx, Sender, Subscriber, AccessModel, SendLast, PresenceSubscription, RosterGroup, Options). unsubscribe_node(Nidx, Sender, Subscriber, SubId) -> node_flat:unsubscribe_node(Nidx, Sender, Subscriber, SubId). publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts) -> node_flat:publish_item(Nidx, Publisher, Model, MaxItems, ItemId, Payload, PubOpts). remove_extra_items(Nidx, MaxItems, ItemIds) -> node_flat:remove_extra_items(Nidx, MaxItems, ItemIds). delete_item(Nidx, Publisher, PublishModel, ItemId) -> node_flat:delete_item(Nidx, Publisher, PublishModel, ItemId). purge_node(Nidx, Owner) -> node_flat:purge_node(Nidx, Owner). get_entity_affiliations(Host, Owner) -> node_flat:get_entity_affiliations(Host, Owner). get_node_affiliations(Nidx) -> node_flat:get_node_affiliations(Nidx). get_affiliation(Nidx, Owner) -> node_flat:get_affiliation(Nidx, Owner). set_affiliation(Nidx, Owner, Affiliation) -> node_flat:set_affiliation(Nidx, Owner, Affiliation). get_entity_subscriptions(Host, Owner) -> node_flat:get_entity_subscriptions(Host, Owner). get_node_subscriptions(Nidx) -> node_flat:get_node_subscriptions(Nidx). get_subscriptions(Nidx, Owner) -> node_flat:get_subscriptions(Nidx, Owner). set_subscriptions(Nidx, Owner, Subscription, SubId) -> node_flat:set_subscriptions(Nidx, Owner, Subscription, SubId). get_pending_nodes(Host, Owner) -> node_flat:get_pending_nodes(Host, Owner). get_states(Nidx) -> node_flat:get_states(Nidx). get_state(Nidx, JID) -> node_flat:get_state(Nidx, JID). set_state(State) -> node_flat:set_state(State). get_items(Nidx, From, RSM) -> node_flat:get_items(Nidx, From, RSM). get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM) -> node_flat:get_items(Nidx, JID, AccessModel, PresenceSubscription, RosterGroup, SubId, RSM). get_item(Nidx, ItemId) -> node_flat:get_item(Nidx, ItemId). get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId) -> node_flat:get_item(Nidx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, SubId). set_item(Item) -> node_flat:set_item(Item). get_item_name(Host, Node, Id) -> node_flat:get_item_name(Host, Node, Id). node_to_path(Node) -> node_flat:node_to_path(Node). path_to_node(Path) -> node_flat:path_to_node(Path). </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610209"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">e96e8c5eea53204a2c9a461284fcf2aa2dafe422aae74abe9ce785496dc3c9fb</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">CatalaLang/catala</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">from_dcalc.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> This file is part of the Catala compiler , a specification language for tax and social benefits computation rules . Copyright ( C ) 2020 , contributor : < > Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not use this file except in compliance with the License . You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing , software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the License for the specific language governing permissions and limitations under the License . and social benefits computation rules. Copyright (C) 2020 Inria, contributor: Denis Merigoux <> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) val translate_program_with_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program (** Translation from the default calculus to the lambda calculus. This translation uses exceptions to handle empty default terms. *) val translate_program_without_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program * Translation from the default calculus to the lambda calculus . This translation uses an option monad to handle empty defaults terms . This transformation is one piece to permit to compile toward legacy languages that does not contains exceptions . translation uses an option monad to handle empty defaults terms. This transformation is one piece to permit to compile toward legacy languages that does not contains exceptions. *) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/CatalaLang/catala/4f059883c1b30d5c4efde77cecbd977704cbf972/compiler/lcalc/from_dcalc.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">* Translation from the default calculus to the lambda calculus. This translation uses exceptions to handle empty default terms. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> This file is part of the Catala compiler , a specification language for tax and social benefits computation rules . Copyright ( C ) 2020 , contributor : < > Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not use this file except in compliance with the License . You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing , software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied . See the License for the specific language governing permissions and limitations under the License . and social benefits computation rules. Copyright (C) 2020 Inria, contributor: Denis Merigoux <> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *) val translate_program_with_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program val translate_program_without_exceptions : 'm Dcalc.Ast.program -> 'm Ast.program * Translation from the default calculus to the lambda calculus . This translation uses an option monad to handle empty defaults terms . This transformation is one piece to permit to compile toward legacy languages that does not contains exceptions . translation uses an option monad to handle empty defaults terms. This transformation is one piece to permit to compile toward legacy languages that does not contains exceptions. *) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610210"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9993a7adcba57a62452659ffdeacf2d79bfd08cd46acc1b124c7eb3a293d3444</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">nikita-volkov/rebase</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Strict.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Rebase.Data.STRef.Strict ( module Data.STRef.Strict ) where import Data.STRef.Strict </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Data/STRef/Strict.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Rebase.Data.STRef.Strict ( module Data.STRef.Strict ) where import Data.STRef.Strict </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610211"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">2c5377437517b03d96690384e664cf3266aa582b219a6f0b0d342cbca83a1520</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">fpco/stack-docker-image-build</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Main.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE OverloadedStrings #-} module Main (main) where import System.Process.Typed import System.FilePath import Data.Foldable import System.Directory import System.Environment (getArgs) import Control.Exception import Control.Monad import qualified Data.Text as T import qualified Data.Text.Lazy as TL import Data.Text.Lazy.Encoding (decodeUtf8) import Data.Yaml import qualified Data.HashMap.Strict as HashMap import qualified Data.Vector as V import Data.Maybe parseStackYaml :: IO ([String], [String]) parseStackYaml = do val <- decodeFileEither "stack.yaml" >>= either throwIO return let buildFirst = fromMaybe [] $ do Object o1 <- Just val Object o2 <- HashMap.lookup "x-stack-docker-image-build" o1 Array vals <- HashMap.lookup "build-first" o2 Just [T.unpack dep | String dep <- V.toList vals] extraDeps = fromMaybe [] $ do Object o <- Just val Array vals <- HashMap.lookup "extra-deps" o Just [T.unpack dep | String dep <- V.toList vals] return (buildFirst, extraDeps) stack :: [String] -> ProcessConfig () () () stack args = proc "stack" $ ["--no-install-ghc", "--system-ghc"] ++ args runStack :: [String] -> IO () runStack = runProcess_ . stack readStack :: [String] -> IO String readStack = fmap (TL.unpack . decodeUtf8 . fst) . readProcess_ . stack getDir :: String -> IO FilePath getDir flag = do dirRaw <- readStack ["path", flag] return $ takeWhile (/= '\n') dirRaw getDBDir :: String -> IO FilePath getDBDir typ = getDir $ concat ["--", typ, "-pkg-db"] getBinDir :: String -> IO FilePath getBinDir typ = do dir <- getDir $ concat ["--", typ, "-install-root"] return $ dir </> "bin" main :: IO () main = do args <- getArgs (buildFirst, deps) <- parseStackYaml forM_ buildFirst $ \pkg -> do putStrLn $ "Building " ++ pkg ++ " from build-first" runStack $ "build" : words pkg ++ args unless (null deps) $ do putStrLn "Building extra-deps" runStack $ "build" : deps ++ args putStrLn "Performing build local" runStack $ "build" : args globaldb <- getDBDir "global" forM_ (words "snapshot local") $ \typ -> do bindir <- getBinDir typ bindirexists <- doesDirectoryExist bindir bincontents <- if bindirexists then getDirectoryContents bindir else return [] forM_ bincontents $ \file -> do let fp = bindir </> file exists <- doesFileExist fp when exists $ do putStrLn $ "Linking " ++ fp runProcess_ $ proc "ln" [fp, "/usr/local/bin" </> file] dbdir <- getDBDir typ dbdirexists <- doesDirectoryExist dbdir dbcontents <- if dbdirexists then getDirectoryContents dbdir else return [] forM_ dbcontents $ \file -> when (takeExtension file == ".conf") $ do let fp = dbdir </> file putStrLn $ "Registering: " ++ file runStack [ "exec" , "--" , "ghc-pkg" , "register" , fp , "--package-db" , globaldb , "--force" ] stackDir <- getAppUserDataDirectory "stack" stackContents <- getDirectoryContents stackDir let toKeep "." = True toKeep ".." = True toKeep "snapshots" = True toKeep _ = False forM_ (filter (not . toKeep) stackContents) $ \x -> runProcess_ $ proc "rm" ["-rf", stackDir </> x] </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/fpco/stack-docker-image-build/8aa2ff15e7a5807e295f6ff6b2f70f74f5d08722/app/Main.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE OverloadedStrings #</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Main (main) where import System.Process.Typed import System.FilePath import Data.Foldable import System.Directory import System.Environment (getArgs) import Control.Exception import Control.Monad import qualified Data.Text as T import qualified Data.Text.Lazy as TL import Data.Text.Lazy.Encoding (decodeUtf8) import Data.Yaml import qualified Data.HashMap.Strict as HashMap import qualified Data.Vector as V import Data.Maybe parseStackYaml :: IO ([String], [String]) parseStackYaml = do val <- decodeFileEither "stack.yaml" >>= either throwIO return let buildFirst = fromMaybe [] $ do Object o1 <- Just val Object o2 <- HashMap.lookup "x-stack-docker-image-build" o1 Array vals <- HashMap.lookup "build-first" o2 Just [T.unpack dep | String dep <- V.toList vals] extraDeps = fromMaybe [] $ do Object o <- Just val Array vals <- HashMap.lookup "extra-deps" o Just [T.unpack dep | String dep <- V.toList vals] return (buildFirst, extraDeps) stack :: [String] -> ProcessConfig () () () stack args = proc "stack" $ ["--no-install-ghc", "--system-ghc"] ++ args runStack :: [String] -> IO () runStack = runProcess_ . stack readStack :: [String] -> IO String readStack = fmap (TL.unpack . decodeUtf8 . fst) . readProcess_ . stack getDir :: String -> IO FilePath getDir flag = do dirRaw <- readStack ["path", flag] return $ takeWhile (/= '\n') dirRaw getDBDir :: String -> IO FilePath getDBDir typ = getDir $ concat ["--", typ, "-pkg-db"] getBinDir :: String -> IO FilePath getBinDir typ = do dir <- getDir $ concat ["--", typ, "-install-root"] return $ dir </> "bin" main :: IO () main = do args <- getArgs (buildFirst, deps) <- parseStackYaml forM_ buildFirst $ \pkg -> do putStrLn $ "Building " ++ pkg ++ " from build-first" runStack $ "build" : words pkg ++ args unless (null deps) $ do putStrLn "Building extra-deps" runStack $ "build" : deps ++ args putStrLn "Performing build local" runStack $ "build" : args globaldb <- getDBDir "global" forM_ (words "snapshot local") $ \typ -> do bindir <- getBinDir typ bindirexists <- doesDirectoryExist bindir bincontents <- if bindirexists then getDirectoryContents bindir else return [] forM_ bincontents $ \file -> do let fp = bindir </> file exists <- doesFileExist fp when exists $ do putStrLn $ "Linking " ++ fp runProcess_ $ proc "ln" [fp, "/usr/local/bin" </> file] dbdir <- getDBDir typ dbdirexists <- doesDirectoryExist dbdir dbcontents <- if dbdirexists then getDirectoryContents dbdir else return [] forM_ dbcontents $ \file -> when (takeExtension file == ".conf") $ do let fp = dbdir </> file putStrLn $ "Registering: " ++ file runStack [ "exec" , "--" , "ghc-pkg" , "register" , fp , "--package-db" , globaldb , "--force" ] stackDir <- getAppUserDataDirectory "stack" stackContents <- getDirectoryContents stackDir let toKeep "." = True toKeep ".." = True toKeep "snapshots" = True toKeep _ = False forM_ (filter (not . toKeep) stackContents) $ \x -> runProcess_ $ proc "rm" ["-rf", stackDir </> x] </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610212"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">52aaf9bbbb665fac4c939b34fdbecf3f098567139b92bfe5f9deb1c2b6833d0d</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lmj/lparallel</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kernel-test.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2011 - 2012 , . All rights reserved . ;;; ;;; Redistribution and use in source and binary forms, with or without ;;; modification, are permitted provided that the following conditions ;;; are met: ;;; ;;; * Redistributions of source code must retain the above copyright ;;; notice, this list of conditions and the following disclaimer. ;;; ;;; * Redistributions in binary form must reproduce the above ;;; copyright notice, this list of conditions and the following ;;; disclaimer in the documentation and/or other materials provided ;;; with the distribution. ;;; ;;; * Neither the name of the project nor the names of its ;;; contributors may be used to endorse or promote products derived ;;; from this software without specific prior written permission. ;;; ;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT ;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , ;;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ;;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE ;;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (in-package #:lparallel-test) (full-test kernel-test (let ((channel (make-channel))) (mapcar (lambda (x) (submit-task channel (lambda () (* x x)))) (list 5 6 7 8)) (is (equal (list 25 36 49 64) (sort (collect-n 4 (receive-result channel)) '<))))) (full-test no-kernel-test (let ((*kernel* nil)) (signals no-kernel-error (submit-task (make-channel) (lambda ()))))) (base-test end-kernel-test (repeat 10 (loop for n from 1 below 32 do (with-temp-kernel (n) (is (= 1 1)))))) (full-test many-task-test (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())) (is (null (receive-result channel)))) (repeat 1000 (submit-task channel (lambda ()))) (repeat 1000 (is (null (receive-result channel)))) (repeat 1000 (let ((*task-priority* :low)) (submit-task channel (lambda ()))) (is (null (receive-result channel)))) (repeat 1000 (let ((*task-priority* :low)) (submit-task channel (lambda ())))) (repeat 1000 (is (null (receive-result channel)))))) #-lparallel.without-kill (base-test kill-during-end-kernel-test (let* ((*kernel* (make-kernel 2)) (kernel *kernel*) (out *standard-output*) (channel (make-channel)) (handled (make-queue)) (finished (make-queue))) (task-handler-bind ((error #'invoke-transfer-error)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop)))) (with-thread () (block top (handler-bind ((task-killed-error (lambda (e) (declare (ignore e)) (push-queue t handled) (return-from top)))) (receive-result channel)))) (sleep 0.2) (let ((thread (with-thread () (let ((*standard-output* out)) (let ((*kernel* kernel)) (end-kernel :wait t) (push-queue t finished)))))) (sleep 0.2) (is (null (peek-queue finished))) (is (eql 1 (kill-tasks :default))) (sleep 0.2) (is (eq t (peek-queue handled))) (is (eq t (peek-queue finished))) (is (not (null thread)))))) (full-test channel-capacity-test (let ((channel (make-channel :fixed-capacity 1))) (submit-task channel (lambda () 3)) (submit-task channel (lambda () 4)) (submit-task channel (lambda () 5)) (is (equal '(3 4 5) avoid sbcl warning (locally (declare (notinline sort)) (sort (list (receive-result channel) (receive-result channel) (receive-result channel)) #'<)))))) (full-test try-receive-test (let ((channel (make-channel))) (multiple-value-bind (a b) (try-receive-result channel) (is (null a)) (is (null b))) (submit-task channel (lambda () 3)) (sleep 0.1) (multiple-value-bind (a b) (try-receive-result channel) (is (= 3 a)) (is (eq t b))) (multiple-value-bind (a b) (try-receive-result channel) (is (null a)) (is (null b))))) #-lparallel.without-bordeaux-threads-condition-wait-timeout (full-test try-receive-timeout-test (let ((channel (make-channel))) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (null a)) (is (null b))) (submit-task channel (lambda () 3)) (sleep 0.1) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (= 3 a)) (is (eq t b))) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (null a)) (is (null b))))) (full-test kernel-client-error-test (task-handler-bind ((client-error #'invoke-transfer-error)) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals client-error (receive-result channel)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'transfer-error (make-condition 'foo-error))))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'transfer-error 'foo-error)))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel))))) (full-test user-restart-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-restart 'eleven)))) (let ((channel (make-channel))) (submit-task channel (lambda () (restart-case (error 'foo-error) (eleven () 11)))) (is (eql 11 (receive-result channel))))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'eleven)))) (let ((channel (make-channel))) (submit-task channel (lambda () (restart-case (error 'foo-error) (eleven () 11)))) (is (eql 11 (receive-result channel)))))) (full-test error-cascade-test (task-handler-bind ((error (lambda (e) (invoke-restart 'transfer-error e)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (error 'foo-error)))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel)))))) (base-test complex-handler-test (flet ((estr (e) (with-output-to-string (out) (write e :escape nil :stream out)))) (let ((queue (make-queue))) (ignore-errors (handler-bind ((error (lambda (e) (push-queue (cons 'a (estr e)) queue)))) (handler-bind ((error (lambda (e) (push-queue (cons 'b (estr e)) queue) (error "Z")))) (handler-bind ((error (lambda (e) (push-queue (cons 'c (estr e)) queue) (error "Y")))) (handler-bind ((error (lambda (e) (push-queue (cons 'd (estr e)) queue)))) (error "X")))))) (is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z")) (extract-queue queue)))) (with-temp-kernel (2) (let ((queue (make-queue))) (task-handler-bind ((error #'invoke-transfer-error)) (task-handler-bind ((error (lambda (e) (push-queue (cons 'a (estr e)) queue)))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'b (estr e)) queue) (error "Z")))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'c (estr e)) queue) (error "Y")))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'd (estr e)) queue)))) (submit-task (make-channel) #'error "X")))))) (is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z")) (loop repeat 4 collect (pop-queue queue)))))))) (base-test kernel-worker-context-test (with-temp-kernel (2 :context (lambda (run) (let ((*memo* 9)) (funcall run)))) (let ((channel (make-channel))) (setf *memo* 7) (submit-task channel (lambda () *memo*)) (is (eql 9 (receive-result channel))) (is (eql 7 *memo*))))) (base-test kernel-binding-test (unwind-protect (progn (end-kernel) (setf *kernel* (make-kernel 4)) (let ((channel (make-channel))) (setf *memo* :main) (submit-task channel (lambda () (setf *memo* :worker) *memo*)) (is (eq :worker (receive-result channel))) (is (eq :worker *memo*)))) (end-kernel)) (with-temp-kernel (4 :bindings (acons '*memo* :worker nil)) (let ((node (assoc '*memo* (kernel-bindings)))) (is (eq (cdr node) :worker))) (let ((channel (make-channel))) (setf *memo* :main) (submit-task channel (lambda () *memo*)) (is (eq :worker (receive-result channel))) (is (eq :main *memo*))))) (full-test kernel-var-test (let ((channel (make-channel))) (submit-task channel (lambda () *kernel*)) (is (eq *kernel* (receive-result channel))))) (base-test task-categories-test (with-temp-kernel (2) (is (notany #'identity (task-categories-running))) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.4))) (sleep 0.2) (is (eql 1 (count :default (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* :foo)) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 1 (count :foo (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* 999)) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 1 (count 999 (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* :foo)) (submit-task channel (lambda () (sleep 0.4))) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 2 (count :foo (task-categories-running))))))) (base-test no-kernel-restart-test (let ((*kernel* nil)) (unwind-protect (let ((flag nil)) (handler-bind ((no-kernel-error (lambda (c) (setf flag :called) (invoke-restart (find-restart 'make-kernel c) 3)))) (let ((channel (make-channel))) (submit-task channel (lambda (x) (* x x)) 3) (is (= 9 (receive-result channel)))) (is (= 3 (kernel-worker-count))) (is (eq :called flag)))) (end-kernel)))) (base-test kernel-warnings-test (let ((*error-output* (make-string-output-stream))) (with-temp-kernel (3) (is (zerop (length (get-output-stream-string *error-output*)))) (let ((channel (make-channel))) (submit-task channel (lambda () (warn "blah"))) (receive-result channel)) (is (search "blah" (get-output-stream-string *error-output*)))))) (full-test handler-bind-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-restart 'double-me 3)))) (let ((channel (make-channel))) (repeat 3 (submit-task channel (lambda () (restart-case (error 'foo-error) (double-me (x) ;; clisp warns unless interactive is given :interactive (lambda ()) (* 2 x)))))) (is (equal '(6 6 6) (collect-n 3 (receive-result channel))))))) (full-test aborted-worker-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-abort-thread)))) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (restart-case (error 'foo-error) (eleven () 11)))) (signals task-killed-error (receive-result channel))))) (defun all-workers-alive-p () (sleep 0.2) (every #'bordeaux-threads:thread-alive-p (map 'list #'lparallel.kernel::thread (lparallel.kernel::workers *kernel*)))) (base-test active-worker-replacement-test (with-thread-count-check (with-temp-kernel (2) (is (all-workers-alive-p)) (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-abort-thread)))) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (error 'foo-error))) (signals task-killed-error (receive-result channel)))) (is (all-workers-alive-p))))) #-lparallel.without-kill (base-test sleeping-worker-replacement-test (with-thread-count-check (with-temp-kernel (2 :bindings (list (cons '*error-output* (make-broadcast-stream)))) (is (all-workers-alive-p)) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 0))) (is (all-workers-alive-p)) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 0))) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 1))) (is (all-workers-alive-p))))) (define-condition foo-condition () ()) (full-test non-error-condition-test (let ((result nil)) (task-handler-bind ((foo-condition (lambda (c) (declare (ignore c)) (setf result :called)))) (let ((channel (make-channel))) (submit-task channel (lambda () (signal 'foo-condition))) (receive-result channel))) (is (eq :called result)))) #-lparallel.without-kill (base-test custom-kill-task-test (with-thread-count-check (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* 'blah)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop)))) (sleep 0.2) (submit-task channel (lambda () 'survived)) (sleep 0.2) (kill-tasks 'blah) (sleep 0.2) (let ((errors nil) (regulars nil)) (repeat 3 (handler-case (push (receive-result channel) regulars) (task-killed-error (e) (push e errors)))) (is (= 2 (length errors))) (is (equal '(survived) regulars))))))) #-lparallel.without-kill (base-test default-kill-task-test (with-thread-count-check (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (sleep 0.2) (submit-task channel (lambda () 'survived)) (sleep 0.2) (kill-tasks *task-category*) (sleep 0.2) (let ((errors nil) (regulars nil)) (repeat 3 (handler-case (push (receive-result channel) regulars) (task-killed-error (e) (push e errors)))) (is (= 2 (length errors))) (is (equal '(survived) regulars))))))) (base-test submit-timeout-test (with-temp-kernel (2) (let ((channel (make-channel))) (declare (notinline submit-timeout)) (submit-timeout channel 0.1 'timeout) (submit-task channel (lambda () 3)) (is (eql 3 (receive-result channel))) (is (eq 'timeout (receive-result channel)))))) #-lparallel.without-kill (base-test cancel-timeout-test (with-temp-kernel (2) (locally (declare (notinline submit-timeout cancel-timeout)) (let* ((channel (make-channel)) (timeout (submit-timeout channel 999 'timeout))) (sleep 0.2) (cancel-timeout timeout 'a) (is (eq 'a (receive-result channel))))))) #-lparallel.without-kill (base-test kill-timeout-test (with-temp-kernel (2) (locally (declare (notinline submit-timeout)) (let* ((channel (make-channel)) (timeout (submit-timeout channel 999 'timeout))) (sleep 0.2) (lparallel.kernel::with-timeout-slots (lparallel.kernel::thread) timeout (destroy-thread lparallel.kernel::thread)) (signals task-killed-error (receive-result channel)))))) (define-condition foo-condition-2 (condition) ()) (full-test signaling-after-signal-test (let ((q (make-queue))) (task-handler-bind ((foo-condition-2 (lambda (c) (declare (ignore c)) (push-queue 'outer q)))) (task-handler-bind ((foo-condition (lambda (c) (declare (ignore c)) (push-queue 'inner q) (signal 'foo-condition-2)))) (let ((channel (make-channel))) (submit-task channel (lambda () (signal 'foo-condition))) (receive-result channel)))) (is (equal '(inner outer) (extract-queue q))))) (base-test task-handler-bind-syntax-test (signals error (macroexpand '(task-handler-bind ((()))))) (signals error (macroexpand '(task-handler-bind (())))) (signals error (macroexpand '(task-handler-bind ((x))))) (signals error (macroexpand '(task-handler-bind ((x y z)))))) (full-test print-kernel-test (is (plusp (length (with-output-to-string (s) (print *kernel* s)))))) (base-test end-kernel-wait-test (with-thread-count-check (let ((*kernel* (make-kernel 3))) (unwind-protect (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 1)))) (is (eql 3 (length (end-kernel :wait t)))))))) (base-test steal-work-test (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.4))) (submit-task channel (lambda () (sleep 0.4))) (sleep 0.1) (let ((execp nil)) (submit-task channel (lambda () (setf execp t))) (sleep 0.1) (is (eq t (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*))) (is (eq t execp)) (is (eq nil (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.2))) (submit-task channel (lambda () (sleep 0.2))) (sleep 0.1) (is (eq nil (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*)))))) (base-test kernel-store-value-test (unwind-protect (handler-bind ((no-kernel-error (lambda (e) (declare (ignore e)) (invoke-restart 'store-value (make-kernel 2))))) (let ((channel (make-channel))) (submit-task channel 'identity 3) (is (= 3 (receive-result channel))))) (end-kernel))) #-lparallel.without-kill (base-test reject-kill-nil-test (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (sleep 999))) (sleep 0.2) (signals error (kill-tasks nil)) (= 1 (kill-tasks :default))))) #-lparallel.without-kill (full-test worker-suicide-test (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (kill-tasks :default))) (signals task-killed-error (receive-result channel))) (let ((channel (make-channel)) (*task-category* 'foo)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (kill-tasks 'foo))) (signals task-killed-error (receive-result channel)))) (full-test submit-after-end-kernel-test (let ((channel (make-channel))) (end-kernel :wait t) (signals error (submit-task channel (lambda ()))))) (base-test double-end-kernel-test (let* ((kernel (make-kernel 2)) (*kernel* kernel)) (end-kernel :wait t) (let ((*kernel* kernel)) (end-kernel :wait t))) ;; got here without an error (is (= 1 1))) (base-test kernel-reader-test (setf *memo* nil) (let ((context (lambda (worker-loop) (let ((*memo* 3)) (funcall worker-loop))))) (with-temp-kernel (2 :name "foo" :bindings `((*blah* . 99)) :context context) (let ((channel (make-channel))) (submit-task channel (lambda () (declare (special *blah*)) (list *memo* *blah*))) (is (equal '(3 99) (receive-result channel)))) (is (string-equal "foo" (kernel-name))) (is (equal '((*blah* . 99)) (kernel-bindings))) (is (eq context (kernel-context)))))) (defun aborting-context (worker-loop) (declare (ignore worker-loop)) (invoke-abort-thread)) (defun non-funcalling-context (worker-loop) (declare (ignore worker-loop))) (base-test context-error-test (dolist (n '(1 2 4 8)) (with-thread-count-check (signals kernel-creation-error (make-kernel n :context #'aborting-context))))) (base-test non-funcalling-context-test (dolist (n '(1 2 4 8)) (with-thread-count-check (signals kernel-creation-error (make-kernel n :context 'non-funcalling-context))))) (base-test nonexistent-context-test (with-thread-count-check (signals error (make-kernel 1 :context 'nonexistent-function)))) (base-test broadcast-test (setf *memo* 0) (dolist (n '(1 2 3 4 7 8 15 16)) (with-temp-kernel (n :bindings '((*memo* . 1))) (is (= 0 *memo*)) (let ((channel (make-channel))) (repeat 100 (submit-task channel (lambda () *memo*))) (repeat 100 (is (= 1 (receive-result channel))))) (is (every (lambda (x) (= x 1)) (broadcast-task (lambda () *memo*)))) (let ((channel (make-channel))) (repeat (kernel-worker-count) (submit-task channel #'sleep 0.2))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (is (= 0 *memo*)) (let ((channel (make-channel))) (repeat 100 (submit-task channel (lambda () *memo*))) (repeat 100 (is (= 99 (receive-result channel))))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () *memo*)))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () *memo*)))) (is (every (lambda (x) (= x 5)) (broadcast-task #'+ 2 3)))))) (full-test broadcast-error-test (let ((*kernel* nil)) (signals no-kernel-error (broadcast-task (lambda ())))) (signals error (broadcast-task 3)) (signals error (broadcast-task "foo")) (task-handler-bind ((error #'invoke-transfer-error)) (signals foo-error (broadcast-task #'error 'foo-error)) (let ((channel (make-channel))) (submit-task channel (lambda () (broadcast-task (lambda ())))) (signals error (receive-result channel))) (signals error (broadcast-task (lambda () (broadcast-task (lambda ()))))))) (full-test worker-index-test (is (null (kernel-worker-index))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel #'kernel-worker-index)) (repeat 1000 (let ((x (receive-result channel))) (is (and (>= x 0) (< x (kernel-worker-count))))))) (loop for i across (sort (broadcast-task #'kernel-worker-index) #'<) for j from 0 do (is (= i j)))) ;;;; check for messed up imports (defun packages-matching (string) (remove-if-not (lambda (package) (search string (package-name package) :test #'equalp)) (list-all-packages))) (defun assert-internal-symbols-not-imported (&key own-packages third-party-packages) (let ((third-party-packages (mapcar #'find-package third-party-packages))) (dolist (own-package own-packages) (do-symbols (symbol own-package) (when-let (third-party-package (find (symbol-package symbol) third-party-packages)) (when (eq :internal (nth-value 1 (find-symbol (symbol-name symbol) third-party-package))) (error "Internal symbol ~s was imported into ~a." symbol (package-name own-package)))))))) (base-test package-test (assert-internal-symbols-not-imported :own-packages (packages-matching "lparallel") :third-party-packages '(#:alexandria #:bordeaux-threads)) (is t)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/lmj/lparallel/9c11f40018155a472c540b63684049acc9b36e15/test/kernel-test.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT LOSS OF USE , DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. clisp warns unless interactive is given got here without an error check for messed up imports</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2011 - 2012 , . All rights reserved . " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT HOLDER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT (in-package #:lparallel-test) (full-test kernel-test (let ((channel (make-channel))) (mapcar (lambda (x) (submit-task channel (lambda () (* x x)))) (list 5 6 7 8)) (is (equal (list 25 36 49 64) (sort (collect-n 4 (receive-result channel)) '<))))) (full-test no-kernel-test (let ((*kernel* nil)) (signals no-kernel-error (submit-task (make-channel) (lambda ()))))) (base-test end-kernel-test (repeat 10 (loop for n from 1 below 32 do (with-temp-kernel (n) (is (= 1 1)))))) (full-test many-task-test (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())) (is (null (receive-result channel)))) (repeat 1000 (submit-task channel (lambda ()))) (repeat 1000 (is (null (receive-result channel)))) (repeat 1000 (let ((*task-priority* :low)) (submit-task channel (lambda ()))) (is (null (receive-result channel)))) (repeat 1000 (let ((*task-priority* :low)) (submit-task channel (lambda ())))) (repeat 1000 (is (null (receive-result channel)))))) #-lparallel.without-kill (base-test kill-during-end-kernel-test (let* ((*kernel* (make-kernel 2)) (kernel *kernel*) (out *standard-output*) (channel (make-channel)) (handled (make-queue)) (finished (make-queue))) (task-handler-bind ((error #'invoke-transfer-error)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop)))) (with-thread () (block top (handler-bind ((task-killed-error (lambda (e) (declare (ignore e)) (push-queue t handled) (return-from top)))) (receive-result channel)))) (sleep 0.2) (let ((thread (with-thread () (let ((*standard-output* out)) (let ((*kernel* kernel)) (end-kernel :wait t) (push-queue t finished)))))) (sleep 0.2) (is (null (peek-queue finished))) (is (eql 1 (kill-tasks :default))) (sleep 0.2) (is (eq t (peek-queue handled))) (is (eq t (peek-queue finished))) (is (not (null thread)))))) (full-test channel-capacity-test (let ((channel (make-channel :fixed-capacity 1))) (submit-task channel (lambda () 3)) (submit-task channel (lambda () 4)) (submit-task channel (lambda () 5)) (is (equal '(3 4 5) avoid sbcl warning (locally (declare (notinline sort)) (sort (list (receive-result channel) (receive-result channel) (receive-result channel)) #'<)))))) (full-test try-receive-test (let ((channel (make-channel))) (multiple-value-bind (a b) (try-receive-result channel) (is (null a)) (is (null b))) (submit-task channel (lambda () 3)) (sleep 0.1) (multiple-value-bind (a b) (try-receive-result channel) (is (= 3 a)) (is (eq t b))) (multiple-value-bind (a b) (try-receive-result channel) (is (null a)) (is (null b))))) #-lparallel.without-bordeaux-threads-condition-wait-timeout (full-test try-receive-timeout-test (let ((channel (make-channel))) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (null a)) (is (null b))) (submit-task channel (lambda () 3)) (sleep 0.1) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (= 3 a)) (is (eq t b))) (multiple-value-bind (a b) (try-receive-result channel :timeout 0.1) (is (null a)) (is (null b))))) (full-test kernel-client-error-test (task-handler-bind ((client-error #'invoke-transfer-error)) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals client-error (receive-result channel)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'transfer-error (make-condition 'foo-error))))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'transfer-error 'foo-error)))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel))))) (full-test user-restart-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-restart 'eleven)))) (let ((channel (make-channel))) (submit-task channel (lambda () (restart-case (error 'foo-error) (eleven () 11)))) (is (eql 11 (receive-result channel))))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (invoke-restart 'eleven)))) (let ((channel (make-channel))) (submit-task channel (lambda () (restart-case (error 'foo-error) (eleven () 11)))) (is (eql 11 (receive-result channel)))))) (full-test error-cascade-test (task-handler-bind ((error (lambda (e) (invoke-restart 'transfer-error e)))) (task-handler-bind ((error (lambda (e) (declare (ignore e)) (error 'foo-error)))) (let ((channel (make-channel))) (submit-task channel (lambda () (error 'client-error))) (signals foo-error (receive-result channel)))))) (base-test complex-handler-test (flet ((estr (e) (with-output-to-string (out) (write e :escape nil :stream out)))) (let ((queue (make-queue))) (ignore-errors (handler-bind ((error (lambda (e) (push-queue (cons 'a (estr e)) queue)))) (handler-bind ((error (lambda (e) (push-queue (cons 'b (estr e)) queue) (error "Z")))) (handler-bind ((error (lambda (e) (push-queue (cons 'c (estr e)) queue) (error "Y")))) (handler-bind ((error (lambda (e) (push-queue (cons 'd (estr e)) queue)))) (error "X")))))) (is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z")) (extract-queue queue)))) (with-temp-kernel (2) (let ((queue (make-queue))) (task-handler-bind ((error #'invoke-transfer-error)) (task-handler-bind ((error (lambda (e) (push-queue (cons 'a (estr e)) queue)))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'b (estr e)) queue) (error "Z")))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'c (estr e)) queue) (error "Y")))) (task-handler-bind ((error (lambda (e) (push-queue (cons 'd (estr e)) queue)))) (submit-task (make-channel) #'error "X")))))) (is (equal '((D . "X") (C . "X") (B . "Y") (A . "Z")) (loop repeat 4 collect (pop-queue queue)))))))) (base-test kernel-worker-context-test (with-temp-kernel (2 :context (lambda (run) (let ((*memo* 9)) (funcall run)))) (let ((channel (make-channel))) (setf *memo* 7) (submit-task channel (lambda () *memo*)) (is (eql 9 (receive-result channel))) (is (eql 7 *memo*))))) (base-test kernel-binding-test (unwind-protect (progn (end-kernel) (setf *kernel* (make-kernel 4)) (let ((channel (make-channel))) (setf *memo* :main) (submit-task channel (lambda () (setf *memo* :worker) *memo*)) (is (eq :worker (receive-result channel))) (is (eq :worker *memo*)))) (end-kernel)) (with-temp-kernel (4 :bindings (acons '*memo* :worker nil)) (let ((node (assoc '*memo* (kernel-bindings)))) (is (eq (cdr node) :worker))) (let ((channel (make-channel))) (setf *memo* :main) (submit-task channel (lambda () *memo*)) (is (eq :worker (receive-result channel))) (is (eq :main *memo*))))) (full-test kernel-var-test (let ((channel (make-channel))) (submit-task channel (lambda () *kernel*)) (is (eq *kernel* (receive-result channel))))) (base-test task-categories-test (with-temp-kernel (2) (is (notany #'identity (task-categories-running))) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.4))) (sleep 0.2) (is (eql 1 (count :default (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* :foo)) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 1 (count :foo (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* 999)) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 1 (count 999 (task-categories-running)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* :foo)) (submit-task channel (lambda () (sleep 0.4))) (submit-task channel (lambda () (sleep 0.4)))) (sleep 0.2) (is (eql 2 (count :foo (task-categories-running))))))) (base-test no-kernel-restart-test (let ((*kernel* nil)) (unwind-protect (let ((flag nil)) (handler-bind ((no-kernel-error (lambda (c) (setf flag :called) (invoke-restart (find-restart 'make-kernel c) 3)))) (let ((channel (make-channel))) (submit-task channel (lambda (x) (* x x)) 3) (is (= 9 (receive-result channel)))) (is (= 3 (kernel-worker-count))) (is (eq :called flag)))) (end-kernel)))) (base-test kernel-warnings-test (let ((*error-output* (make-string-output-stream))) (with-temp-kernel (3) (is (zerop (length (get-output-stream-string *error-output*)))) (let ((channel (make-channel))) (submit-task channel (lambda () (warn "blah"))) (receive-result channel)) (is (search "blah" (get-output-stream-string *error-output*)))))) (full-test handler-bind-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-restart 'double-me 3)))) (let ((channel (make-channel))) (repeat 3 (submit-task channel (lambda () (restart-case (error 'foo-error) (double-me (x) :interactive (lambda ()) (* 2 x)))))) (is (equal '(6 6 6) (collect-n 3 (receive-result channel))))))) (full-test aborted-worker-test (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-abort-thread)))) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (restart-case (error 'foo-error) (eleven () 11)))) (signals task-killed-error (receive-result channel))))) (defun all-workers-alive-p () (sleep 0.2) (every #'bordeaux-threads:thread-alive-p (map 'list #'lparallel.kernel::thread (lparallel.kernel::workers *kernel*)))) (base-test active-worker-replacement-test (with-thread-count-check (with-temp-kernel (2) (is (all-workers-alive-p)) (task-handler-bind ((foo-error (lambda (e) (declare (ignore e)) (invoke-abort-thread)))) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (error 'foo-error))) (signals task-killed-error (receive-result channel)))) (is (all-workers-alive-p))))) #-lparallel.without-kill (base-test sleeping-worker-replacement-test (with-thread-count-check (with-temp-kernel (2 :bindings (list (cons '*error-output* (make-broadcast-stream)))) (is (all-workers-alive-p)) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 0))) (is (all-workers-alive-p)) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 0))) (destroy-thread (lparallel.kernel::thread (aref (lparallel.kernel::workers *kernel*) 1))) (is (all-workers-alive-p))))) (define-condition foo-condition () ()) (full-test non-error-condition-test (let ((result nil)) (task-handler-bind ((foo-condition (lambda (c) (declare (ignore c)) (setf result :called)))) (let ((channel (make-channel))) (submit-task channel (lambda () (signal 'foo-condition))) (receive-result channel))) (is (eq :called result)))) #-lparallel.without-kill (base-test custom-kill-task-test (with-thread-count-check (with-temp-kernel (2) (let ((channel (make-channel))) (let ((*task-category* 'blah)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop)))) (sleep 0.2) (submit-task channel (lambda () 'survived)) (sleep 0.2) (kill-tasks 'blah) (sleep 0.2) (let ((errors nil) (regulars nil)) (repeat 3 (handler-case (push (receive-result channel) regulars) (task-killed-error (e) (push e errors)))) (is (= 2 (length errors))) (is (equal '(survived) regulars))))))) #-lparallel.without-kill (base-test default-kill-task-test (with-thread-count-check (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (infinite-loop))) (sleep 0.2) (submit-task channel (lambda () 'survived)) (sleep 0.2) (kill-tasks *task-category*) (sleep 0.2) (let ((errors nil) (regulars nil)) (repeat 3 (handler-case (push (receive-result channel) regulars) (task-killed-error (e) (push e errors)))) (is (= 2 (length errors))) (is (equal '(survived) regulars))))))) (base-test submit-timeout-test (with-temp-kernel (2) (let ((channel (make-channel))) (declare (notinline submit-timeout)) (submit-timeout channel 0.1 'timeout) (submit-task channel (lambda () 3)) (is (eql 3 (receive-result channel))) (is (eq 'timeout (receive-result channel)))))) #-lparallel.without-kill (base-test cancel-timeout-test (with-temp-kernel (2) (locally (declare (notinline submit-timeout cancel-timeout)) (let* ((channel (make-channel)) (timeout (submit-timeout channel 999 'timeout))) (sleep 0.2) (cancel-timeout timeout 'a) (is (eq 'a (receive-result channel))))))) #-lparallel.without-kill (base-test kill-timeout-test (with-temp-kernel (2) (locally (declare (notinline submit-timeout)) (let* ((channel (make-channel)) (timeout (submit-timeout channel 999 'timeout))) (sleep 0.2) (lparallel.kernel::with-timeout-slots (lparallel.kernel::thread) timeout (destroy-thread lparallel.kernel::thread)) (signals task-killed-error (receive-result channel)))))) (define-condition foo-condition-2 (condition) ()) (full-test signaling-after-signal-test (let ((q (make-queue))) (task-handler-bind ((foo-condition-2 (lambda (c) (declare (ignore c)) (push-queue 'outer q)))) (task-handler-bind ((foo-condition (lambda (c) (declare (ignore c)) (push-queue 'inner q) (signal 'foo-condition-2)))) (let ((channel (make-channel))) (submit-task channel (lambda () (signal 'foo-condition))) (receive-result channel)))) (is (equal '(inner outer) (extract-queue q))))) (base-test task-handler-bind-syntax-test (signals error (macroexpand '(task-handler-bind ((()))))) (signals error (macroexpand '(task-handler-bind (())))) (signals error (macroexpand '(task-handler-bind ((x))))) (signals error (macroexpand '(task-handler-bind ((x y z)))))) (full-test print-kernel-test (is (plusp (length (with-output-to-string (s) (print *kernel* s)))))) (base-test end-kernel-wait-test (with-thread-count-check (let ((*kernel* (make-kernel 3))) (unwind-protect (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 1)))) (is (eql 3 (length (end-kernel :wait t)))))))) (base-test steal-work-test (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.4))) (submit-task channel (lambda () (sleep 0.4))) (sleep 0.1) (let ((execp nil)) (submit-task channel (lambda () (setf execp t))) (sleep 0.1) (is (eq t (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*))) (is (eq t execp)) (is (eq nil (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*)))))) (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (sleep 0.2))) (submit-task channel (lambda () (sleep 0.2))) (sleep 0.1) (is (eq nil (lparallel.kernel::steal-work *kernel* lparallel.kernel::*worker*)))))) (base-test kernel-store-value-test (unwind-protect (handler-bind ((no-kernel-error (lambda (e) (declare (ignore e)) (invoke-restart 'store-value (make-kernel 2))))) (let ((channel (make-channel))) (submit-task channel 'identity 3) (is (= 3 (receive-result channel))))) (end-kernel))) #-lparallel.without-kill (base-test reject-kill-nil-test (with-temp-kernel (2) (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (sleep 999))) (sleep 0.2) (signals error (kill-tasks nil)) (= 1 (kill-tasks :default))))) #-lparallel.without-kill (full-test worker-suicide-test (let ((channel (make-channel))) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (kill-tasks :default))) (signals task-killed-error (receive-result channel))) (let ((channel (make-channel)) (*task-category* 'foo)) (submit-task channel (lambda () (setf *error-output* (make-broadcast-stream)) (kill-tasks 'foo))) (signals task-killed-error (receive-result channel)))) (full-test submit-after-end-kernel-test (let ((channel (make-channel))) (end-kernel :wait t) (signals error (submit-task channel (lambda ()))))) (base-test double-end-kernel-test (let* ((kernel (make-kernel 2)) (*kernel* kernel)) (end-kernel :wait t) (let ((*kernel* kernel)) (end-kernel :wait t))) (is (= 1 1))) (base-test kernel-reader-test (setf *memo* nil) (let ((context (lambda (worker-loop) (let ((*memo* 3)) (funcall worker-loop))))) (with-temp-kernel (2 :name "foo" :bindings `((*blah* . 99)) :context context) (let ((channel (make-channel))) (submit-task channel (lambda () (declare (special *blah*)) (list *memo* *blah*))) (is (equal '(3 99) (receive-result channel)))) (is (string-equal "foo" (kernel-name))) (is (equal '((*blah* . 99)) (kernel-bindings))) (is (eq context (kernel-context)))))) (defun aborting-context (worker-loop) (declare (ignore worker-loop)) (invoke-abort-thread)) (defun non-funcalling-context (worker-loop) (declare (ignore worker-loop))) (base-test context-error-test (dolist (n '(1 2 4 8)) (with-thread-count-check (signals kernel-creation-error (make-kernel n :context #'aborting-context))))) (base-test non-funcalling-context-test (dolist (n '(1 2 4 8)) (with-thread-count-check (signals kernel-creation-error (make-kernel n :context 'non-funcalling-context))))) (base-test nonexistent-context-test (with-thread-count-check (signals error (make-kernel 1 :context 'nonexistent-function)))) (base-test broadcast-test (setf *memo* 0) (dolist (n '(1 2 3 4 7 8 15 16)) (with-temp-kernel (n :bindings '((*memo* . 1))) (is (= 0 *memo*)) (let ((channel (make-channel))) (repeat 100 (submit-task channel (lambda () *memo*))) (repeat 100 (is (= 1 (receive-result channel))))) (is (every (lambda (x) (= x 1)) (broadcast-task (lambda () *memo*)))) (let ((channel (make-channel))) (repeat (kernel-worker-count) (submit-task channel #'sleep 0.2))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () (setf *memo* 99))))) (is (= 0 *memo*)) (let ((channel (make-channel))) (repeat 100 (submit-task channel (lambda () *memo*))) (repeat 100 (is (= 99 (receive-result channel))))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel (lambda ())))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () *memo*)))) (is (every (lambda (x) (= x 99)) (broadcast-task (lambda () *memo*)))) (is (every (lambda (x) (= x 5)) (broadcast-task #'+ 2 3)))))) (full-test broadcast-error-test (let ((*kernel* nil)) (signals no-kernel-error (broadcast-task (lambda ())))) (signals error (broadcast-task 3)) (signals error (broadcast-task "foo")) (task-handler-bind ((error #'invoke-transfer-error)) (signals foo-error (broadcast-task #'error 'foo-error)) (let ((channel (make-channel))) (submit-task channel (lambda () (broadcast-task (lambda ())))) (signals error (receive-result channel))) (signals error (broadcast-task (lambda () (broadcast-task (lambda ()))))))) (full-test worker-index-test (is (null (kernel-worker-index))) (let ((channel (make-channel))) (repeat 1000 (submit-task channel #'kernel-worker-index)) (repeat 1000 (let ((x (receive-result channel))) (is (and (>= x 0) (< x (kernel-worker-count))))))) (loop for i across (sort (broadcast-task #'kernel-worker-index) #'<) for j from 0 do (is (= i j)))) (defun packages-matching (string) (remove-if-not (lambda (package) (search string (package-name package) :test #'equalp)) (list-all-packages))) (defun assert-internal-symbols-not-imported (&key own-packages third-party-packages) (let ((third-party-packages (mapcar #'find-package third-party-packages))) (dolist (own-package own-packages) (do-symbols (symbol own-package) (when-let (third-party-package (find (symbol-package symbol) third-party-packages)) (when (eq :internal (nth-value 1 (find-symbol (symbol-name symbol) third-party-package))) (error "Internal symbol ~s was imported into ~a." symbol (package-name own-package)))))))) (base-test package-test (assert-internal-symbols-not-imported :own-packages (packages-matching "lparallel") :third-party-packages '(#:alexandria #:bordeaux-threads)) (is t)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610213"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">36542d02c484f2d7b330af14cb44c5f7244a919497c9b311045a883bdd513bea</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">madstap/comfy</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">project.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(defproject madstap/comfy "1.0.5" :description "Clojure(script) utils" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.9.0"] [org.clojure/clojurescript "1.9.946" :scope "provided"]] :plugins [[lein-codox "0.10.3"] [lein-cljsbuild "1.1.6"] [lein-doo "0.1.7"]] :codox {:output-path "docs" :metadata {:doc/format :markdown} :source-uri "/{version}/{filepath}#L{line}"} :cljsbuild {:builds {:test {:source-paths ["src" "test"] :compiler {:output-to "target/main.js" :output-dir "target" :main madstap.comfy.test-runner :process-shim false ; ¯\_(ツ)_/¯ :optimizations :simple}}}} :doo {:paths {:rhino "lein run -m org.mozilla.javascript.tools.shell.Main"}} :aliases {"test-cljs" ["doo" "rhino" "test" "once"] "test-clj" ["with-profile" "+1.9" "test"] "test-all" ["do" ["test-clj"] ["test-cljs"]]} :profiles {:dev {:dependencies [[com.cemerick/piggieback "0.2.2"] [org.clojure/test.check "0.10.0-alpha2"] [org.clojure/tools.nrepl "0.2.10"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}} :test {:dependencies [[org.mozilla/rhino "1.7.7.2"]]} :1.9 {:dependencies [[org.clojure/clojure "1.9.0-beta1"]]}}) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/madstap/comfy/0af9cc940bcf1726e0c11c6f19986620d8cfd345/project.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> ¯\_(ツ)_/¯</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(defproject madstap/comfy "1.0.5" :description "Clojure(script) utils" :url "" :license {:name "Eclipse Public License" :url "-v10.html"} :dependencies [[org.clojure/clojure "1.9.0"] [org.clojure/clojurescript "1.9.946" :scope "provided"]] :plugins [[lein-codox "0.10.3"] [lein-cljsbuild "1.1.6"] [lein-doo "0.1.7"]] :codox {:output-path "docs" :metadata {:doc/format :markdown} :source-uri "/{version}/{filepath}#L{line}"} :cljsbuild {:builds {:test {:source-paths ["src" "test"] :compiler {:output-to "target/main.js" :output-dir "target" :main madstap.comfy.test-runner :optimizations :simple}}}} :doo {:paths {:rhino "lein run -m org.mozilla.javascript.tools.shell.Main"}} :aliases {"test-cljs" ["doo" "rhino" "test" "once"] "test-clj" ["with-profile" "+1.9" "test"] "test-all" ["do" ["test-clj"] ["test-cljs"]]} :profiles {:dev {:dependencies [[com.cemerick/piggieback "0.2.2"] [org.clojure/test.check "0.10.0-alpha2"] [org.clojure/tools.nrepl "0.2.10"]] :repl-options {:nrepl-middleware [cemerick.piggieback/wrap-cljs-repl]}} :test {:dependencies [[org.mozilla/rhino "1.7.7.2"]]} :1.9 {:dependencies [[org.clojure/clojure "1.9.0-beta1"]]}}) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610214"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">c0485baea4035191ebeba91eed2285ce5edb45d00e7aba43a08b49d1b9b0e904</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">charlieg/Sparser</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">compile-singletons.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(CL-USER COMMON-LISP) -*- Copyright ( c ) 2007 BBNT Solutions LLC . All Rights Reserved $ I d : compile-singletons.lisp 207 2009 - 06 - 18 20:59:16Z cgreenba $ ;;; ;;; File: "compile-singletons" ;;; Module: "init;scripts:" version : March 2007 ;; Derived from compile-everthing on 3/20/07 (in-package :sparser) ;;;-------------------------------------------------- ;;; compile individual files in the preloader, etc. ;;;-------------------------------------------------- (just-compile "init;everything") (just-compile "init;Lisp:kind-of-lisp") (just-compile "init;Lisp:grammar-module") (just-compile "init;Lisp:ddef-logical") (just-compile "init;Lisp:lload") (just-compile "init;scripts:just-dm&p") (when nil (just-compile "init;scripts:Academic version") (just-compile "init;scripts:Apple loader") (just-compile "init;scripts:BBN") (just-compile "init;scripts:compile everything") (just-compile "init;scripts:compile academic") (just-compile "init;scripts:copy everything") (just-compile "init;scripts:copy academic") (just-compile "init;scripts:just dm&p") (just-compile "init;scripts:no grammar") (just-compile "init;scripts:SUN") (just-compile "init;scripts:v2.3a") ;; standard (just-compile "init;scripts:v2.3ag") ;; "academic grammar" (just-compile "init;scripts:v2.3g") ;; (public) "grammar" ) (just-compile "version;loaders:grammar") (just-compile "version;loaders:grammar modules") (just-compile "version;loaders:lisp-switch-settings") (just-compile "version;loaders:logicals") (just-compile "version;loaders:master-loader") (just-compile "version;loaders:model") (unless *nothing-Mac-specific* (just-compile "version;loaders:save routine")) (just-compile "version;loaders:stubs") (just-compile "version;salutation") (just-compile "version;updating") (just-compile "config;explicitly-loaded-files") (just-compile "config;image") (just-compile "config;launch") (just-compile "config;load") (when nil (just-compile "grammar-configurations;academic grammar") (just-compile "grammar-configurations;AssetNet") (just-compile "grammar-configurations;bbn") (just-compile "grammar-configurations;Debris analysis") (just-compile "grammar-configurations;full grammar") (just-compile "grammar-configurations;minimal dm&p grammar") (just-compile "grammar-configurations;partial grammar") (just-compile "grammar-configurations;public grammar") (just-compile "grammar-configurations;SUN") ) (unless *nothing-Mac-specific* (just-compile "images;do-the-save")) (when nil ( just - compile " init;workspaces : Apple " ) -- references (just-compile "init;workspaces:Darwin") (just-compile "init;workspaces:dm&p") (just-compile "init;workspaces:generic") (just-compile "init;workspaces:Mari") (just-compile "init;workspaces:quarterly earnings") (just-compile "init;workspaces:Sandia") (just-compile "init;workspaces:SUN") (just-compile "init;workspaces:SUN1") (just-compile "init;workspaces:SUN2") (just-compile "init;workspaces:Switchboard") (just-compile "init;workspaces:text segments") (just-compile "init;workspaces:tipster") (just-compile "init;workspaces:Who's News") (just-compile "init;workspaces:workbench") :finished-compilation </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/init/scripts/compile-singletons.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> -*- Mode:LISP; Syntax:Common-Lisp; Package:(CL-USER COMMON-LISP) -*- File: "compile-singletons" Module: "init;scripts:" Derived from compile-everthing on 3/20/07 -------------------------------------------------- compile individual files in the preloader, etc. -------------------------------------------------- standard "academic grammar" (public) "grammar"</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2007 BBNT Solutions LLC . All Rights Reserved $ I d : compile-singletons.lisp 207 2009 - 06 - 18 20:59:16Z cgreenba $ version : March 2007 (in-package :sparser) (just-compile "init;everything") (just-compile "init;Lisp:kind-of-lisp") (just-compile "init;Lisp:grammar-module") (just-compile "init;Lisp:ddef-logical") (just-compile "init;Lisp:lload") (just-compile "init;scripts:just-dm&p") (when nil (just-compile "init;scripts:Academic version") (just-compile "init;scripts:Apple loader") (just-compile "init;scripts:BBN") (just-compile "init;scripts:compile everything") (just-compile "init;scripts:compile academic") (just-compile "init;scripts:copy everything") (just-compile "init;scripts:copy academic") (just-compile "init;scripts:just dm&p") (just-compile "init;scripts:no grammar") (just-compile "init;scripts:SUN") ) (just-compile "version;loaders:grammar") (just-compile "version;loaders:grammar modules") (just-compile "version;loaders:lisp-switch-settings") (just-compile "version;loaders:logicals") (just-compile "version;loaders:master-loader") (just-compile "version;loaders:model") (unless *nothing-Mac-specific* (just-compile "version;loaders:save routine")) (just-compile "version;loaders:stubs") (just-compile "version;salutation") (just-compile "version;updating") (just-compile "config;explicitly-loaded-files") (just-compile "config;image") (just-compile "config;launch") (just-compile "config;load") (when nil (just-compile "grammar-configurations;academic grammar") (just-compile "grammar-configurations;AssetNet") (just-compile "grammar-configurations;bbn") (just-compile "grammar-configurations;Debris analysis") (just-compile "grammar-configurations;full grammar") (just-compile "grammar-configurations;minimal dm&p grammar") (just-compile "grammar-configurations;partial grammar") (just-compile "grammar-configurations;public grammar") (just-compile "grammar-configurations;SUN") ) (unless *nothing-Mac-specific* (just-compile "images;do-the-save")) (when nil ( just - compile " init;workspaces : Apple " ) -- references (just-compile "init;workspaces:Darwin") (just-compile "init;workspaces:dm&p") (just-compile "init;workspaces:generic") (just-compile "init;workspaces:Mari") (just-compile "init;workspaces:quarterly earnings") (just-compile "init;workspaces:Sandia") (just-compile "init;workspaces:SUN") (just-compile "init;workspaces:SUN1") (just-compile "init;workspaces:SUN2") (just-compile "init;workspaces:Switchboard") (just-compile "init;workspaces:text segments") (just-compile "init;workspaces:tipster") (just-compile "init;workspaces:Who's News") (just-compile "init;workspaces:workbench") :finished-compilation </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610215"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">2a158bd53e3f5eaf51db78ecd149879964f7516bedd85dc706efa569256e1f3f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">avsm/platform</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">test.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(**************************************************************************) (* *) : a generic graph library for OCaml Copyright ( C ) 2004 - 2007 , and (* *) (* This software is free software; you can redistribute it and/or *) modify it under the terms of the GNU Library General Public License version 2 , with the special exception on linking (* described in file LICENSE. *) (* *) (* This software is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *) (* *) (**************************************************************************) open Format open Graph module Int = struct type t = int let compare = compare let hash = Hashtbl.hash let equal = (=) let default = 0 end module G = Persistent.Digraph.ConcreteLabeled(Int)(Int) let g = G.empty let g = G.add_vertex g 1 let g = G.add_edge_e g (G.E.create 1 10 2) let g = G.add_edge_e g (G.E.create 2 50 3) let g = G.add_edge_e g (G.E.create 1 30 4) let g = G.add_edge_e g (G.E.create 1 100 5) let g = G.add_edge_e g (G.E.create 3 10 5) let g = G.add_edge_e g (G.E.create 4 20 3) let g = G.add_edge_e g (G.E.create 4 60 5) let g = G.remove_vertex g 4 let gc = G.add_edge_e g (G.E.create 5 10 1) let gc = G.add_vertex gc 6 module W = struct type edge = G.E.t type t = int let weight e = G.E.label e let zero = 0 let add = (+) let sub = (-) let compare = compare end module Dij = Path.Dijkstra(G)(W) let p,w = Dij.shortest_path gc 1 5 open G.E let () = List.iter (fun e -> printf "[%d -> %d]" (src e) (dst e)) p; printf "@." module Comp = Components.Make(G) let g = G.add_edge g 3 2 let n, f = Comp.scc g let () = G.iter_edges (fun u v -> printf "%d -> %d@." u v) g let () = printf "%d components@." n let () = G.iter_vertex (fun v -> printf " %d -> %d@." v (f v)) g </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/ocamlgraph.1.8.8%2Bdune/tests/test.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">************************************************************************ This software is free software; you can redistribute it and/or described in file LICENSE. This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ************************************************************************</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> : a generic graph library for OCaml Copyright ( C ) 2004 - 2007 , and modify it under the terms of the GNU Library General Public License version 2 , with the special exception on linking open Format open Graph module Int = struct type t = int let compare = compare let hash = Hashtbl.hash let equal = (=) let default = 0 end module G = Persistent.Digraph.ConcreteLabeled(Int)(Int) let g = G.empty let g = G.add_vertex g 1 let g = G.add_edge_e g (G.E.create 1 10 2) let g = G.add_edge_e g (G.E.create 2 50 3) let g = G.add_edge_e g (G.E.create 1 30 4) let g = G.add_edge_e g (G.E.create 1 100 5) let g = G.add_edge_e g (G.E.create 3 10 5) let g = G.add_edge_e g (G.E.create 4 20 3) let g = G.add_edge_e g (G.E.create 4 60 5) let g = G.remove_vertex g 4 let gc = G.add_edge_e g (G.E.create 5 10 1) let gc = G.add_vertex gc 6 module W = struct type edge = G.E.t type t = int let weight e = G.E.label e let zero = 0 let add = (+) let sub = (-) let compare = compare end module Dij = Path.Dijkstra(G)(W) let p,w = Dij.shortest_path gc 1 5 open G.E let () = List.iter (fun e -> printf "[%d -> %d]" (src e) (dst e)) p; printf "@." module Comp = Components.Make(G) let g = G.add_edge g 3 2 let n, f = Comp.scc g let () = G.iter_edges (fun u v -> printf "%d -> %d@." u v) g let () = printf "%d components@." n let () = G.iter_vertex (fun v -> printf " %d -> %d@." v (f v)) g </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610216"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">1fa4dbcb1799f9f9cf25347ecd197741923658e9810bed2056d07b67775ba2d6</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">yellowtides/owenbot-hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Config.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE DeriveGeneric # module Config where import Control.Exception (IOException, try) import Data.Aeson (FromJSON, ToJSON, eitherDecode, encode) import qualified Data.ByteString as BS (ByteString, readFile, writeFile) import qualified Data.ByteString.Lazy as BL (ByteString, fromStrict, toStrict) import qualified Data.HashMap.Strict as HM import qualified Data.Text as T import GHC.Generics import System.Directory (XdgDirectory(XdgConfig), createDirectoryIfMissing, getXdgDirectory) import Discord.Types (ChannelId) | OwenConfig represents the configuration of Owenbot ! data OwenConfig = OwenConfig { owenConfigToken :: T.Text , owenConfigDevs :: [T.Text] these two do n't do anything yet , owenConfigDadFreq :: Int -- because reading values every time is slow and a solution can't be thought of , owenConfigRepoDir :: Maybe FilePath , owenConfigStartupChan :: ChannelId , owenConfigQuizChan :: ChannelId -- maybe move this into a per-guild db } deriving (Generic, Show) instance FromJSON OwenConfig instance ToJSON OwenConfig getConfigDir :: IO FilePath getConfigDir = getXdgDirectory XdgConfig "owen" | Takes a filename and reads from it into a data structure . readConfig :: IO OwenConfig readConfig = do createDirectoryIfMissing True <$> getConfigDir fp <- (<> "/config.json") <$> getConfigDir json <- BS.readFile fp case eitherDecode (BL.fromStrict json) of Left e -> error $ "Incorrect config format, can't continue running Owen:\n[ERROR] " <> e Right cfg -> pure cfg -- (commented since writing to config is never necessary and goes against rules) -- | Takes a filename (with no suffix) and a data structure, and writes a json -- file to that location. writeConfig : : ToJSON a = > String - > a - > IO ( ) -- writeConfig file db = do -- fp <- mkPath file -- BS.writeFile fp $ BL.toStrict $ encode db </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/yellowtides/owenbot-hs/74669e8620b5202e7ad9c5ac69cbf74118fc9b64/src/Config.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> because reading values every time is slow and a solution can't be thought of maybe move this into a per-guild db (commented since writing to config is never necessary and goes against rules) | Takes a filename (with no suffix) and a data structure, and writes a json file to that location. writeConfig file db = do fp <- mkPath file BS.writeFile fp $ BL.toStrict $ encode db</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE DeriveGeneric # module Config where import Control.Exception (IOException, try) import Data.Aeson (FromJSON, ToJSON, eitherDecode, encode) import qualified Data.ByteString as BS (ByteString, readFile, writeFile) import qualified Data.ByteString.Lazy as BL (ByteString, fromStrict, toStrict) import qualified Data.HashMap.Strict as HM import qualified Data.Text as T import GHC.Generics import System.Directory (XdgDirectory(XdgConfig), createDirectoryIfMissing, getXdgDirectory) import Discord.Types (ChannelId) | OwenConfig represents the configuration of Owenbot ! data OwenConfig = OwenConfig { owenConfigToken :: T.Text , owenConfigDevs :: [T.Text] these two do n't do anything yet , owenConfigRepoDir :: Maybe FilePath , owenConfigStartupChan :: ChannelId } deriving (Generic, Show) instance FromJSON OwenConfig instance ToJSON OwenConfig getConfigDir :: IO FilePath getConfigDir = getXdgDirectory XdgConfig "owen" | Takes a filename and reads from it into a data structure . readConfig :: IO OwenConfig readConfig = do createDirectoryIfMissing True <$> getConfigDir fp <- (<> "/config.json") <$> getConfigDir json <- BS.readFile fp case eitherDecode (BL.fromStrict json) of Left e -> error $ "Incorrect config format, can't continue running Owen:\n[ERROR] " <> e Right cfg -> pure cfg writeConfig : : ToJSON a = > String - > a - > IO ( ) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610217"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">1d6e98dc726ddaa88ed26b7ba120d3d0ee53f3e041cedee9b3a288cc4547052c</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell-tools/haskell-tools</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Quoted.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE TemplateHaskell # module TH.Quoted where import qualified Text.Read.Lex (Lexeme) $(let x = ''Text.Read.Lex.Lexeme in return []) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/refactor/examples/TH/Quoted.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE TemplateHaskell # module TH.Quoted where import qualified Text.Read.Lex (Lexeme) $(let x = ''Text.Read.Lex.Lexeme in return []) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610218"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">8e48bab51ee4eac5a200fe448dee61e5cb123b9e425f4e58810d21b0fc0bf685</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bobzhang/fan</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">S.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Fan.Syntax; module Ast = Camlp4Ast; open FanUtil; let open FanParsers in begin pa_r (module Fan); pa_rp (module Fan); pa_q (module Fan); pa_g (module Fan); pa_l (module Fan); pa_m (module Fan); end; Fan.iter_and_take_callbacks (fun (_,f) -> f ()) ; let t e s = Gram.parse_string e FanLoc.string_loc s; (* {:extend.create|Gram s v|}; *) with " patt " (* {:extend|Gram *) (* s: *) (* ["`"; a_ident{s} -> {| `$s |} *) | " ` " ; a_ident{v } ; ` ANT ( ( " " | " anti " as n ) , s ) - > { | ` $ v $ ( anti : mk_anti ~c:"patt " n s)| } | " ` " ; a_ident{s } ; ` STR(_,v ) - > { | ` $ s $ str : v | } (* |"`"; a_ident{s}; `LID x -> {| `$s $lid:x |} *) (* |"`"; a_ident{s}; "("; L1 v SEP ","{v}; ")" -> *) (* match v with *) (* [ [x] -> {| `$s $x |} *) | [ x::xs ] - > { | ` $ s ( $ x,$list : xs ) | } (* | _ -> assert false ] ] *) (* v: *) (* [ `STR(_,s) -> {| $str:s|} *) (* | `LID x -> (\* {| $(id:{:ident|$lid:x|}) |} *\) {| $lid:x|} *) | S{p1 } ; " | " ; S{p2 } - > { |$p1 | $ p2 | } | " ( " ; S{p1 } ; " as " ; S{p2 } ; " ) " - > { | ( $ p1 as $ p2 ) | } (* ] *) (* |}; *) (* (\* *) t s " ` A ( ( \"x\"|\"y\ " as n),s ) " ; (* t s "`A $x"; *) (* t s `UID ("First"|"Last" as x ) *) Comparing two ant (* *\) *) (* Gram.dump Format.std_formatter expr; *) (* {:delete|Gram ident:[`UID i]|}; *) ( \ * { : delete|Gram expr:[TRY ; S ; " ) " ] | } ; * \ ) (* t expr "A.B.C.D.c"; *) {:extend.create|Gram a b a_eoi |} ; {:extend|Gram a: [ TRY module_longident_dot_lparen{s} -> s | b{s} -> s ] b "ident": [ (* a_UIDENT{i} -> {| $uid:i |} *) (* | *) a_LIDENT{i} -> {| $lid:i |} | `UID i -> {|$uid:i|} | `UID i; "."; S{j} -> {| $uid:i.$j |} (* | a_UIDENT{i}; "."; S{j} -> {| $uid:i.$j |} *)] [ ` LID i - > { | $ lid : } | ` UID s ; " . " ; S{j } - > { |$uid : s.$j| } ] a_eoi: [a{i} ; `EOI -> i] |}; (* {:extend.create|Gram c|} ; *) (* with "ident"{:extend|Gram local:d; *) c : [ d { x } ; " ( " - > { | ] (* d:[`UID x -> x ] *) (* |}; *) t a_eoi "A.C.U.b" ; </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/todoml/testr/S.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> {:extend.create|Gram s v|}; {:extend|Gram s: ["`"; a_ident{s} -> {| `$s |} |"`"; a_ident{s}; `LID x -> {| `$s $lid:x |} |"`"; a_ident{s}; "("; L1 v SEP ","{v}; ")" -> match v with [ [x] -> {| `$s $x |} | _ -> assert false ] ] v: [ `STR(_,s) -> {| $str:s|} | `LID x -> (\* {| $(id:{:ident|$lid:x|}) |} *\) {| $lid:x|} ] |}; (\* t s "`A $x"; t s `UID ("First"|"Last" as x ) *\) Gram.dump Format.std_formatter expr; {:delete|Gram ident:[`UID i]|}; t expr "A.B.C.D.c"; a_UIDENT{i} -> {| $uid:i |} | | a_UIDENT{i}; "."; S{j} -> {| $uid:i.$j |} {:extend.create|Gram c|} ; with "ident"{:extend|Gram local:d; d:[`UID x -> x ] |}; </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Fan.Syntax; module Ast = Camlp4Ast; open FanUtil; let open FanParsers in begin pa_r (module Fan); pa_rp (module Fan); pa_q (module Fan); pa_g (module Fan); pa_l (module Fan); pa_m (module Fan); end; Fan.iter_and_take_callbacks (fun (_,f) -> f ()) ; let t e s = Gram.parse_string e FanLoc.string_loc s; with " patt " | " ` " ; a_ident{v } ; ` ANT ( ( " " | " anti " as n ) , s ) - > { | ` $ v $ ( anti : mk_anti ~c:"patt " n s)| } | " ` " ; a_ident{s } ; ` STR(_,v ) - > { | ` $ s $ str : v | } | [ x::xs ] - > { | ` $ s ( $ x,$list : xs ) | } | S{p1 } ; " | " ; S{p2 } - > { |$p1 | $ p2 | } | " ( " ; S{p1 } ; " as " ; S{p2 } ; " ) " - > { | ( $ p1 as $ p2 ) | } t s " ` A ( ( \"x\"|\"y\ " as n),s ) " ; Comparing two ant ( \ * { : delete|Gram expr:[TRY ; S ; " ) " ] | } ; * \ ) {:extend.create|Gram a b a_eoi |} ; {:extend|Gram a: [ TRY module_longident_dot_lparen{s} -> s | b{s} -> s ] b "ident": [ | `UID i -> {|$uid:i|} | `UID i; "."; S{j} -> {| $uid:i.$j |} [ ` LID i - > { | $ lid : } | ` UID s ; " . " ; S{j } - > { |$uid : s.$j| } ] a_eoi: [a{i} ; `EOI -> i] |}; c : [ d { x } ; " ( " - > { | ] t a_eoi "A.C.U.b" ; </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610219"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">08f2630fa66f364d7212d5ff5520548fbb64665e67e8558fc24cb9a99ae9a8de</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">naushadh/hello-world</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Lib.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE OverloadedStrings #-} module Lib ( dbFour , PSQL.defaultConnectInfo , PSQL.ConnectInfo(..) ) where import qualified Database.PostgreSQL.Simple as PSQL dbFour :: PSQL.ConnectInfo -> IO () dbFour connectInfo = do conn <- PSQL.connect connectInfo [PSQL.Only i] <- PSQL.query_ conn "select 2 + 2" putStrLn "dbFour" putStrLn . show $ (i :: Int) return ()</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/naushadh/hello-world/742b24ed9be53d95a7f6f9177b44132c635b78ab/hello-postgresql/src/Lib.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE OverloadedStrings #</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module Lib ( dbFour , PSQL.defaultConnectInfo , PSQL.ConnectInfo(..) ) where import qualified Database.PostgreSQL.Simple as PSQL dbFour :: PSQL.ConnectInfo -> IO () dbFour connectInfo = do conn <- PSQL.connect connectInfo [PSQL.Only i] <- PSQL.query_ conn "select 2 + 2" putStrLn "dbFour" putStrLn . show $ (i :: Int) return ()</span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610220"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">a5fca846a777dc63dcaf9066866da266ab6fe1e1cfe8fd8efce0a870b3b68ff9</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">liquidz/antq</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">edn.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns antq.report.edn (:require [antq.report :as report])) (defmethod report/reporter "edn" [deps _options] (->> deps ;; Convert a record to just a map (map #(merge {} %)) ;; NOTE Add diff-url for backward compatibility (map #(assoc % :diff-url (:changes-url %))) (pr-str) (println))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/liquidz/antq/ca8472b28702f5e568492001bc476fb09e5b2e6b/src/antq/report/edn.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Convert a record to just a map NOTE Add diff-url for backward compatibility</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns antq.report.edn (:require [antq.report :as report])) (defmethod report/reporter "edn" [deps _options] (->> deps (map #(merge {} %)) (map #(assoc % :diff-url (:changes-url %))) (pr-str) (println))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610221"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9facb518cfcdb95f99f0c96280bdaabcb3dafcd819a13f5dccd5aeb1d767a751</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">avsm/platform</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">chaoticIteration.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(**************************************************************************) (* *) : a generic graph library for OCaml Copyright ( C ) 2004 - 2010 , and (* *) (* This software is free software; you can redistribute it and/or *) modify it under the terms of the GNU Library General Public License version 2.1 , with the special exception on linking (* described in file LICENSE. *) (* *) (* This software is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *) (* *) (**************************************************************************) Copyright © 2015 thi.suzanne ( @ ) gmail.com > * École Normale Supérieure , Département d'Informatique * Paris Sciences et Lettres * École Normale Supérieure, Département d'Informatique * Paris Sciences et Lettres *) * computation with widenings using weak topological orderings as defined by and implemented in { ! WeakTopological } . { ! } is another ( simpler ) fixpoint computation module , with general references . The general idea of fixpoint computation is to iteratively compute the result of the analysis a vertex from the results of its predecessors , until stabilisation is achieved on every vertex . The way to determine , at each step , the next vertex to analyse is called a { e chaotic iteration strategy } . A good strategy can make the analysis much faster . To enforce the termination of the analyse and speed it up when it terminates in too many steps , one can also use a { e widening } , to ensure that there is no infinite ( nor too big ) sequence of intermediary results for a given vertex . However , it usually results in a loss of precision , which is why choosing a good widening set ( the set of points on which the widening will be performed ) is mandatory . This module computes a fixpoint over a graph using weak topological ordering , which can be used to get both the iteration strategy and the widening set . The module { ! WeakTopological } aims to compute weak topological orderings which are known to be excellent decompositions w.r.t these two critical points . @author @see " Efficient chaotic iteration strategies with widenings " , , Formal Methods in Programming and their Applications , Springer Berlin Heidelberg , 1993 orderings as defined by François Bourdoncle and implemented in {!WeakTopological}. {!Fixpoint} is another (simpler) fixpoint computation module, with general references. The general idea of fixpoint computation is to iteratively compute the result of the analysis a vertex from the results of its predecessors, until stabilisation is achieved on every vertex. The way to determine, at each step, the next vertex to analyse is called a {e chaotic iteration strategy}. A good strategy can make the analysis much faster. To enforce the termination of the analyse and speed it up when it terminates in too many steps, one can also use a {e widening}, to ensure that there is no infinite (nor too big) sequence of intermediary results for a given vertex. However, it usually results in a loss of precision, which is why choosing a good widening set (the set of points on which the widening will be performed) is mandatory. This module computes a fixpoint over a graph using weak topological ordering, which can be used to get both the iteration strategy and the widening set. The module {!WeakTopological} aims to compute weak topological orderings which are known to be excellent decompositions w.r.t these two critical points. @author Thibault Suzanne @see "Efficient chaotic iteration strategies with widenings", François Bourdoncle, Formal Methods in Programming and their Applications, Springer Berlin Heidelberg, 1993 *) * How to determine which vertices are to be considered as widening points . - [ FromWto ] indicates to use as widening points the heads of the weak topological ordering given as a parameter of the analysis function . This will always be a safe choice , and in most cases it will also be a good one with respect to the precision of the analysis . - [ Predicate f ] indicates to use [ f ] as the characteristic function of the widening set . [ Predicate ( fun _ - > false ) ] can be used if a widening is not needed . This variant can be used when there is a special knowledge of the graph to achieve a better precision of the analysis . For instance , if the graph happens to be the flow graph of a program , the predicate should be true for control structures heads . In any case , a condition for a safe widening predicate is that every cycle of the graph should go through at least one widening point . Otherwise , the analysis may not terminate . Note that even with a safe predicate , ensuring the termination does still require a correct widening definition . points. - [FromWto] indicates to use as widening points the heads of the weak topological ordering given as a parameter of the analysis function. This will always be a safe choice, and in most cases it will also be a good one with respect to the precision of the analysis. - [Predicate f] indicates to use [f] as the characteristic function of the widening set. [Predicate (fun _ -> false)] can be used if a widening is not needed. This variant can be used when there is a special knowledge of the graph to achieve a better precision of the analysis. For instance, if the graph happens to be the flow graph of a program, the predicate should be true for control structures heads. In any case, a condition for a safe widening predicate is that every cycle of the graph should go through at least one widening point. Otherwise, the analysis may not terminate. Note that even with a safe predicate, ensuring the termination does still require a correct widening definition. *) type 'a widening_set = | FromWto | Predicate of ('a -> bool) * Minimal graph signature for the algorithm . Sub - signature of [ Traverse . G ] . Sub-signature of [Traverse.G]. *) module type G = sig type t module V : Sig.COMPARABLE module E : sig type t val src : t -> V.t end val fold_pred_e : (E.t -> 'a -> 'a) -> t -> V.t -> 'a -> 'a end (** Parameters of the analysis. *) module type Data = sig type t (** Information stored at each vertex. *) type edge (** Edge of the graph. *) val join : t -> t -> t (** Operation to join data when several paths meet. *) val equal : t -> t -> bool (** Equality test for data. *) val analyze : edge -> t -> t * How to analyze one edge : given an edge and the data stored at its origin , it must compute the resulting data to be stored at its destination . its origin, it must compute the resulting data to be stored at its destination. *) val widening : t -> t -> t (** The widening operator. [fun _ x -> x] is correct and is equivalent to not doing widening. Note that to enforce termination, the following property should hold: for all sequence [x_0, x_1, ...] of data, the sequence defined by [y_0 = x_0; y_{i+1} = widening y_i x_i] stabilizes in finite time. *) end module Make (G : G) (D : Data with type edge = G.E.t) : sig module M : Map.S with type key = G.V.t (** Map used to store the result of the analysis *) val recurse : G.t -> G.V.t WeakTopological.t -> (G.V.t -> D.t) -> G.V.t widening_set -> int -> D.t M.t * [ recurse g wto init widening_set widening_delay ] computes the fixpoint of the analysis of a graph . This function uses the recursive iteration strategy : it recursively stabilizes the subcomponents of every component every time the component is stabilized ( cf . Bourdoncle 's paper ) . @param g The graph to analyse . @param wto A weak topological ordering of the vertices of [ g ] . @param widening_set On which points to do the widening . @param widening_delay How many computations steps will be done before using widening to speed up the stabilisation . This counter is reset when entering each component , and is shared between all outermost vertices of this component . A negative value means [ 0 ] . @param init How to compute the initial analysis data . @return A map from vertices of [ g ] to their analysis result . fixpoint of the analysis of a graph. This function uses the recursive iteration strategy: it recursively stabilizes the subcomponents of every component every time the component is stabilized (cf. Bourdoncle's paper). @param g The graph to analyse. @param wto A weak topological ordering of the vertices of [g]. @param widening_set On which points to do the widening. @param widening_delay How many computations steps will be done before using widening to speed up the stabilisation. This counter is reset when entering each component, and is shared between all outermost vertices of this component. A negative value means [0]. @param init How to compute the initial analysis data. @return A map from vertices of [g] to their analysis result. *) end </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/avsm/platform/b254e3c6b60f3c0c09dfdcde92eb1abdc267fa1c/duniverse/ocamlgraph.1.8.8%2Bdune/src/chaoticIteration.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">************************************************************************ This software is free software; you can redistribute it and/or described in file LICENSE. This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ************************************************************************ * Parameters of the analysis. * Information stored at each vertex. * Edge of the graph. * Operation to join data when several paths meet. * Equality test for data. * The widening operator. [fun _ x -> x] is correct and is equivalent to not doing widening. Note that to enforce termination, the following property should hold: for all sequence [x_0, x_1, ...] of data, the sequence defined by [y_0 = x_0; y_{i+1} = widening y_i x_i] stabilizes in finite time. * Map used to store the result of the analysis </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> : a generic graph library for OCaml Copyright ( C ) 2004 - 2010 , and modify it under the terms of the GNU Library General Public License version 2.1 , with the special exception on linking Copyright © 2015 thi.suzanne ( @ ) gmail.com > * École Normale Supérieure , Département d'Informatique * Paris Sciences et Lettres * École Normale Supérieure, Département d'Informatique * Paris Sciences et Lettres *) * computation with widenings using weak topological orderings as defined by and implemented in { ! WeakTopological } . { ! } is another ( simpler ) fixpoint computation module , with general references . The general idea of fixpoint computation is to iteratively compute the result of the analysis a vertex from the results of its predecessors , until stabilisation is achieved on every vertex . The way to determine , at each step , the next vertex to analyse is called a { e chaotic iteration strategy } . A good strategy can make the analysis much faster . To enforce the termination of the analyse and speed it up when it terminates in too many steps , one can also use a { e widening } , to ensure that there is no infinite ( nor too big ) sequence of intermediary results for a given vertex . However , it usually results in a loss of precision , which is why choosing a good widening set ( the set of points on which the widening will be performed ) is mandatory . This module computes a fixpoint over a graph using weak topological ordering , which can be used to get both the iteration strategy and the widening set . The module { ! WeakTopological } aims to compute weak topological orderings which are known to be excellent decompositions w.r.t these two critical points . @author @see " Efficient chaotic iteration strategies with widenings " , , Formal Methods in Programming and their Applications , Springer Berlin Heidelberg , 1993 orderings as defined by François Bourdoncle and implemented in {!WeakTopological}. {!Fixpoint} is another (simpler) fixpoint computation module, with general references. The general idea of fixpoint computation is to iteratively compute the result of the analysis a vertex from the results of its predecessors, until stabilisation is achieved on every vertex. The way to determine, at each step, the next vertex to analyse is called a {e chaotic iteration strategy}. A good strategy can make the analysis much faster. To enforce the termination of the analyse and speed it up when it terminates in too many steps, one can also use a {e widening}, to ensure that there is no infinite (nor too big) sequence of intermediary results for a given vertex. However, it usually results in a loss of precision, which is why choosing a good widening set (the set of points on which the widening will be performed) is mandatory. This module computes a fixpoint over a graph using weak topological ordering, which can be used to get both the iteration strategy and the widening set. The module {!WeakTopological} aims to compute weak topological orderings which are known to be excellent decompositions w.r.t these two critical points. @author Thibault Suzanne @see "Efficient chaotic iteration strategies with widenings", François Bourdoncle, Formal Methods in Programming and their Applications, Springer Berlin Heidelberg, 1993 *) * How to determine which vertices are to be considered as widening points . - [ FromWto ] indicates to use as widening points the heads of the weak topological ordering given as a parameter of the analysis function . This will always be a safe choice , and in most cases it will also be a good one with respect to the precision of the analysis . - [ Predicate f ] indicates to use [ f ] as the characteristic function of the widening set . [ Predicate ( fun _ - > false ) ] can be used if a widening is not needed . This variant can be used when there is a special knowledge of the graph to achieve a better precision of the analysis . For instance , if the graph happens to be the flow graph of a program , the predicate should be true for control structures heads . In any case , a condition for a safe widening predicate is that every cycle of the graph should go through at least one widening point . Otherwise , the analysis may not terminate . Note that even with a safe predicate , ensuring the termination does still require a correct widening definition . points. - [FromWto] indicates to use as widening points the heads of the weak topological ordering given as a parameter of the analysis function. This will always be a safe choice, and in most cases it will also be a good one with respect to the precision of the analysis. - [Predicate f] indicates to use [f] as the characteristic function of the widening set. [Predicate (fun _ -> false)] can be used if a widening is not needed. This variant can be used when there is a special knowledge of the graph to achieve a better precision of the analysis. For instance, if the graph happens to be the flow graph of a program, the predicate should be true for control structures heads. In any case, a condition for a safe widening predicate is that every cycle of the graph should go through at least one widening point. Otherwise, the analysis may not terminate. Note that even with a safe predicate, ensuring the termination does still require a correct widening definition. *) type 'a widening_set = | FromWto | Predicate of ('a -> bool) * Minimal graph signature for the algorithm . Sub - signature of [ Traverse . G ] . Sub-signature of [Traverse.G]. *) module type G = sig type t module V : Sig.COMPARABLE module E : sig type t val src : t -> V.t end val fold_pred_e : (E.t -> 'a -> 'a) -> t -> V.t -> 'a -> 'a end module type Data = sig type t type edge val join : t -> t -> t val equal : t -> t -> bool val analyze : edge -> t -> t * How to analyze one edge : given an edge and the data stored at its origin , it must compute the resulting data to be stored at its destination . its origin, it must compute the resulting data to be stored at its destination. *) val widening : t -> t -> t end module Make (G : G) (D : Data with type edge = G.E.t) : sig module M : Map.S with type key = G.V.t val recurse : G.t -> G.V.t WeakTopological.t -> (G.V.t -> D.t) -> G.V.t widening_set -> int -> D.t M.t * [ recurse g wto init widening_set widening_delay ] computes the fixpoint of the analysis of a graph . This function uses the recursive iteration strategy : it recursively stabilizes the subcomponents of every component every time the component is stabilized ( cf . Bourdoncle 's paper ) . @param g The graph to analyse . @param wto A weak topological ordering of the vertices of [ g ] . @param widening_set On which points to do the widening . @param widening_delay How many computations steps will be done before using widening to speed up the stabilisation . This counter is reset when entering each component , and is shared between all outermost vertices of this component . A negative value means [ 0 ] . @param init How to compute the initial analysis data . @return A map from vertices of [ g ] to their analysis result . fixpoint of the analysis of a graph. This function uses the recursive iteration strategy: it recursively stabilizes the subcomponents of every component every time the component is stabilized (cf. Bourdoncle's paper). @param g The graph to analyse. @param wto A weak topological ordering of the vertices of [g]. @param widening_set On which points to do the widening. @param widening_delay How many computations steps will be done before using widening to speed up the stabilisation. This counter is reset when entering each component, and is shared between all outermost vertices of this component. A negative value means [0]. @param init How to compute the initial analysis data. @return A map from vertices of [g] to their analysis result. *) end </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610222"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ca13a580e63fd3733485a2e16698253fa27ce22e67c8d6ac0aa9bb2285db8321</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">jrm-code-project/LISP-Machine</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">character.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-*- Mode : LISP ; Package : SI ; Cold - Load : T ; : CL ; ; Lowercase : T -*- ;;; Character functions and variables. ; character lossage of the most complete kind (defconstant char-code-limit #o400 "Character code values must be less than this.") (defconstant char-font-limit #o400 "Font codes in characters must be less than this.") (defconstant char-bits-limit #o20 "All the special bits in a character must be less than this. They are Control, Meta, Super and Hyper.") (defconstant char-control-bit 1 "This bit within the bits of a character is the Control bit.") (defconstant char-meta-bit 2 "This bit, within the bits of a character, is the Meta bit.") (defconstant char-super-bit 4 "This bit, within the bits of a character, is the Super bit.") (defconstant char-hyper-bit 8. "This bit, within the bits of a character, is the Hyper bit.") (defsubst char-code (char) "Returns the character code of the character CHAR. This is sans the font number and meta bits." (ldb %%ch-char char)) (defsubst char-font (char) "Returns the font number of character CHAR." (ldb %%ch-font char)) (defsubst char-bits (char) "Returns the special bits of the character CHAR." (%logldb %%kbd-control-meta char)) ;These are now microcoded ;(defsubst alpha-char-p (char) " T if is alphabetic with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) ; (or ( #/A (ldb %%ch-char char) #/Z) ; ( #/a (ldb %%ch-char char) #/z)))) ;(defsubst upper-case-p (char) " T if is an upper case letter with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) ; ( #/A (ldb %%ch-char char) #/Z))) ;(defsubst lower-case-p (char) " T if is an upper case letter with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) ; ( #/a (ldb %%ch-char char) #/z))) ;(defsubst both-case-p (char) " T if is a character which has upper and lower case forms , with no meta bits . ;This is just letters." ( and ( zerop ( ldb % % - control - meta char ) ) ; (or ( #/A (ldb %%ch-char char) #/Z) ; ( #/a (ldb %%ch-char char) #/z)))) ( ( char ) " T if is a letter or digit , with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) ; (or ( #/0 (ldb %%ch-char char) #/9) ; ( #/A (ldb %%ch-char char) #/Z) ; ( #/a (ldb %%ch-char char) #/z)))) (defsubst char< (&rest chars) "T if all the characters are monotonically increasing, considering bits, font and case." (apply #'< chars)) (defsubst char> (&rest chars) "T if all the characters are monotonically decreasing, considering bits, font and case." (apply #'> chars)) (defsubst char<= (&rest chars) "T if all the characters are monotonically nondecreasing, considering bits, font and case." (apply #' chars)) (defsubst char>= (&rest chars) "T if all the characters are monotonically nonincreasing, considering bits, font and case." (apply #' chars)) (defsubst char (&rest chars) "T if all the characters are monotonically nondecreasing, considering bits, font and case." (apply #' chars)) (defsubst char (&rest chars) "T if all the characters are monotonically nonincreasing, considering bits, font and case." (apply #' chars)) (defsubst char/= (&rest chars) "T if all the characters are distinct (no two equal), considering bits, font and case." (apply #' chars)) (defsubst char= (&rest chars) "T if all the characters are equal, considering bits, font and case." (apply #'= chars)) (defsubst char (&rest chars) "T if all the characters are distinct (no two equal), considering bits, font and case." (apply #' chars)) (defun standard-char-p (char) "T if CHAR is one of the ASCII printing characters or the Newline character." (or (char= char #\Newline) ( (char-int #\space) (char-int char) #o176))) (defsubst graphic-char-p (char) "T if CHAR is a graphic character, one which prints as a single glyph. Things like #\NEWLINE and #\RESUME and #\CONTROL-A are not graphic." ( 0 (char-int char) #o177)) (defsubst string-char-p (char) "T if CHAR is a character which ordinary strings can contain. Note that ART-FAT-STRING arrays can contain additional characters, for which this function nevertheless returns NIL." ( 0 (char-int char) #o377)) ;>> flush (defsubst fat-string-char-p (char) "T if CHAR is a charater which a fat string can contain." ( 0 (char-int char) #o177777)) (defun digit-char-p (char &optional (radix 10.)) "Weight of CHAR as a digit, if it is a digit in radix RADIX; else NIL. The weights of #\0 through #\9 are 0 through 9; the weights of letters start at ten for A. RADIX does not affect the weight of any digit, but it affects whether NIL is returned." (and (zerop (char-bits char)) (let ((basic (char-code char))) (and (if ( radix 10.) ( (char-int #\0) basic (+ (char-int #\0) radix -1)) (or ( (char-int #\0) basic (char-int #\9)) ( (char-int #\A) (setq basic (char-code (char-upcase char))) (+ (char-int #\A) radix -11.)))) (if ( basic (char-int #\9)) (- basic (char-int #\0)) (+ 10. (- basic (char-int #\A)))))))) ;;; This is symbol*cs braindeath. Darn if I know what it's for. ;;; It's apparently something to do with their way of making ;;; standard characters. It is not a common lisp thing. (defun char-standard (char) (declare (ignore char)) t) (defun char-not-equal (&rest chars) "T if all the characters are distinct, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((char1 (car tail))) (dolist (char2 (cdr tail)) (if (char-equal char1 char2) (return-from char-not-equal nil)))))) ;; compiled code usually calls the char-equal microinstruction (defun char-equal (&rest chars) "T if all the characters are equal, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (unless (char-equal (car tail) (cadr tail)) (return nil)))) (defun char-lessp (&rest chars) "T if all the characters are monotonically increasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless (< ch1 ch2) (return nil))))) (defun char-greaterp (&rest chars) "T if all the characters are monotonically decreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless (> ch1 ch2) (return nil))))) (defun char-not-lessp (&rest chars) "T if all the characters are monotonically nonincreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless ( ch1 ch2) (return nil))))) (defun char-not-greaterp (&rest chars) "T if all the characters are monotonically nondecreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless ( ch1 ch2) (return nil))))) ;; now microcoded ;(defun char-upcase (char &aux subchar) ; "Return the uppercase version of CHAR. If does not have a uppercase version , it is returned unchanged . " ( ( char - code char ) ) ( if (  # # /z ) ; (if (fixnump char) ; (logxor #o40 char) ; (int-char (logxor #o40 char))) ; char)) ;(defun char-downcase (char &aux subchar) ; "Return the lowercase version of CHAR. If does not have a lowercase version , it is returned unchanged . " ( ( ldb % % ch - char char ) ) ; (if ( #/A subchar #/Z) ; (if (fixnump char) ; (logxor #o40 char) ; (int-char (logxor #o40 char))) ; char)) (defun char-flipcase (char) "If CHAR is an uppercase character, return it's lowercase conterpart, and vice-versa. Returns CHAR unchanged if CHAR is neither upper now lower case." (cond ((upper-case-p char) (char-downcase char)) ((lower-case-p char) (char-upcase char)) (t char))) (defun code-char (code &optional (bits 0) (font 0)) "Returns a character whose code comes from CODE, bits from BITS and font from FONT. CODE can be a number or a character. NIL is returned if it is not possible to have a character object with the specified FONT and BITS." (if (and ( 0 bits (1- char-bits-limit)) ( 0 font (1- char-font-limit))) (%make-pointer dtp-character (%logdpb bits %%kbd-control-meta (dpb font %%ch-font code))) nil)) (deff make-char 'code-char) (defun digit-char (weight &optional (radix 10.) (font 0)) "Return a character which signifies WEIGHT in radix RADIX, with FONT as specified. This is always NIL if WEIGHT is  RADIX. Otherwise, for WEIGHT between 0 and 9, you get characters 0 through 9; for higher weights, you get letters." (if (not ( 0 weight (1- radix))) nil (if (not ( 0 font char-font-limit)) nil (%make-pointer dtp-character (dpb font %%ch-font (if (< weight 10.) (+ (char-code #\0) weight) (+ (char-code #\A) weight -10.))))))) Now ;(defun char-int (char) ; "Returns an integer whose value corresponds to CHAR. ;On the Lisp machine, this conversion will happen automatically ;in most places that an integer can be used." ; (dont-optimize (%pointer char))) (defun char-name (char) "Returns the standard name of CHAR, as a string; or NIL if there is none. For example, \"RETURN\" for the character Return. Only works for characters which are not GRAPHIC-CHAR-P (unlike \"a\", for example.)" ;character lossage (let ((elt (rassq (char-int char) xr-special-character-names))) (if elt (symbol-name (car elt))))) (defun name-char (name) "Returns a character object which is the meaning of NAME as a character name, or NIL if NAME has none." (let ((found (cdr (ass 'string-equal name xr-special-character-names)))) (and found (int-char found)))) (defparameter *char-bit-alist* `((:control . ,%%kbd-control) (:meta . ,%%kbd-meta) (:super . ,%%kbd-super) (:hyper . ,%%kbd-hyper)) "Alist of bit names for CHAR-BIT vs byte specifiers to extract those bits from a character.") (defun char-bit (char bit-name) "T if the bit spec'd by BIT-NAME (a keyword) is on in CHAR. BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER." (let ((byte (cdr (assq bit-name *char-bit-alist*)))) (if byte (%logldb-test byte char) (ferror "~S is not a valid character-bit specifier" bit-name)))) (defun set-char-bit (char bit-name new-value) "Returns a character like CHAR except that the bit BIT-NAME has value NEW-VALUE in it. BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER. NEW-VALUE should be T or NIL." (let ((byte (cdr (assq bit-name *char-bit-alist*)))) (if byte (let* ((new-char (%logdpb (if new-value 1 0) byte char))) (if (typep char 'character) (int-char new-char) new-char)) (ferror "~S is not a valid character-bit specifier" bit-name)))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/jrm-code-project/LISP-Machine/0a448d27f40761fafabe5775ffc550637be537b2/lambda/sys2/character.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Package : SI ; Cold - Load : T ; : CL ; ; Lowercase : T -*- Character functions and variables. character lossage of the most complete kind These are now microcoded (defsubst alpha-char-p (char) (or ( #/A (ldb %%ch-char char) #/Z) ( #/a (ldb %%ch-char char) #/z)))) (defsubst upper-case-p (char) ( #/A (ldb %%ch-char char) #/Z))) (defsubst lower-case-p (char) ( #/a (ldb %%ch-char char) #/z))) (defsubst both-case-p (char) This is just letters." (or ( #/A (ldb %%ch-char char) #/Z) ( #/a (ldb %%ch-char char) #/z)))) (or ( #/0 (ldb %%ch-char char) #/9) ( #/A (ldb %%ch-char char) #/Z) ( #/a (ldb %%ch-char char) #/z)))) >> flush else NIL. This is symbol*cs braindeath. Darn if I know what it's for. It's apparently something to do with their way of making standard characters. It is not a common lisp thing. compiled code usually calls the char-equal microinstruction now microcoded (defun char-upcase (char &aux subchar) "Return the uppercase version of CHAR. (if (fixnump char) (logxor #o40 char) (int-char (logxor #o40 char))) char)) (defun char-downcase (char &aux subchar) "Return the lowercase version of CHAR. (if ( #/A subchar #/Z) (if (fixnump char) (logxor #o40 char) (int-char (logxor #o40 char))) char)) (defun char-int (char) "Returns an integer whose value corresponds to CHAR. On the Lisp machine, this conversion will happen automatically in most places that an integer can be used." (dont-optimize (%pointer char))) or NIL if there is none. character lossage</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> (defconstant char-code-limit #o400 "Character code values must be less than this.") (defconstant char-font-limit #o400 "Font codes in characters must be less than this.") (defconstant char-bits-limit #o20 "All the special bits in a character must be less than this. They are Control, Meta, Super and Hyper.") (defconstant char-control-bit 1 "This bit within the bits of a character is the Control bit.") (defconstant char-meta-bit 2 "This bit, within the bits of a character, is the Meta bit.") (defconstant char-super-bit 4 "This bit, within the bits of a character, is the Super bit.") (defconstant char-hyper-bit 8. "This bit, within the bits of a character, is the Hyper bit.") (defsubst char-code (char) "Returns the character code of the character CHAR. This is sans the font number and meta bits." (ldb %%ch-char char)) (defsubst char-font (char) "Returns the font number of character CHAR." (ldb %%ch-font char)) (defsubst char-bits (char) "Returns the special bits of the character CHAR." (%logldb %%kbd-control-meta char)) " T if is alphabetic with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) " T if is an upper case letter with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) " T if is an upper case letter with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) " T if is a character which has upper and lower case forms , with no meta bits . ( and ( zerop ( ldb % % - control - meta char ) ) ( ( char ) " T if is a letter or digit , with no meta bits . " ( and ( zerop ( ldb % % - control - meta char ) ) (defsubst char< (&rest chars) "T if all the characters are monotonically increasing, considering bits, font and case." (apply #'< chars)) (defsubst char> (&rest chars) "T if all the characters are monotonically decreasing, considering bits, font and case." (apply #'> chars)) (defsubst char<= (&rest chars) "T if all the characters are monotonically nondecreasing, considering bits, font and case." (apply #' chars)) (defsubst char>= (&rest chars) "T if all the characters are monotonically nonincreasing, considering bits, font and case." (apply #' chars)) (defsubst char (&rest chars) "T if all the characters are monotonically nondecreasing, considering bits, font and case." (apply #' chars)) (defsubst char (&rest chars) "T if all the characters are monotonically nonincreasing, considering bits, font and case." (apply #' chars)) (defsubst char/= (&rest chars) "T if all the characters are distinct (no two equal), considering bits, font and case." (apply #' chars)) (defsubst char= (&rest chars) "T if all the characters are equal, considering bits, font and case." (apply #'= chars)) (defsubst char (&rest chars) "T if all the characters are distinct (no two equal), considering bits, font and case." (apply #' chars)) (defun standard-char-p (char) "T if CHAR is one of the ASCII printing characters or the Newline character." (or (char= char #\Newline) ( (char-int #\space) (char-int char) #o176))) (defsubst graphic-char-p (char) "T if CHAR is a graphic character, one which prints as a single glyph. Things like #\NEWLINE and #\RESUME and #\CONTROL-A are not graphic." ( 0 (char-int char) #o177)) (defsubst string-char-p (char) "T if CHAR is a character which ordinary strings can contain. Note that ART-FAT-STRING arrays can contain additional characters, for which this function nevertheless returns NIL." ( 0 (char-int char) #o377)) (defsubst fat-string-char-p (char) "T if CHAR is a charater which a fat string can contain." ( 0 (char-int char) #o177777)) (defun digit-char-p (char &optional (radix 10.)) the weights of letters start at ten for A. RADIX does not affect the weight of any digit, but it affects whether NIL is returned." (and (zerop (char-bits char)) (let ((basic (char-code char))) (and (if ( radix 10.) ( (char-int #\0) basic (+ (char-int #\0) radix -1)) (or ( (char-int #\0) basic (char-int #\9)) ( (char-int #\A) (setq basic (char-code (char-upcase char))) (+ (char-int #\A) radix -11.)))) (if ( basic (char-int #\9)) (- basic (char-int #\0)) (+ 10. (- basic (char-int #\A)))))))) (defun char-standard (char) (declare (ignore char)) t) (defun char-not-equal (&rest chars) "T if all the characters are distinct, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((char1 (car tail))) (dolist (char2 (cdr tail)) (if (char-equal char1 char2) (return-from char-not-equal nil)))))) (defun char-equal (&rest chars) "T if all the characters are equal, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (unless (char-equal (car tail) (cadr tail)) (return nil)))) (defun char-lessp (&rest chars) "T if all the characters are monotonically increasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless (< ch1 ch2) (return nil))))) (defun char-greaterp (&rest chars) "T if all the characters are monotonically decreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless (> ch1 ch2) (return nil))))) (defun char-not-lessp (&rest chars) "T if all the characters are monotonically nonincreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless ( ch1 ch2) (return nil))))) (defun char-not-greaterp (&rest chars) "T if all the characters are monotonically nondecreasing, ignoring bits, font and case." (do ((tail chars (cdr tail))) ((null (cdr tail)) t) (let ((ch1 (char-code (car tail))) (ch2 (char-code (cadr tail)))) (setq ch1 (char-upcase ch1)) (setq ch2 (char-upcase ch2)) (unless ( ch1 ch2) (return nil))))) If does not have a uppercase version , it is returned unchanged . " ( ( char - code char ) ) ( if (  # # /z ) If does not have a lowercase version , it is returned unchanged . " ( ( ldb % % ch - char char ) ) (defun char-flipcase (char) "If CHAR is an uppercase character, return it's lowercase conterpart, and vice-versa. Returns CHAR unchanged if CHAR is neither upper now lower case." (cond ((upper-case-p char) (char-downcase char)) ((lower-case-p char) (char-upcase char)) (t char))) (defun code-char (code &optional (bits 0) (font 0)) "Returns a character whose code comes from CODE, bits from BITS and font from FONT. CODE can be a number or a character. NIL is returned if it is not possible to have a character object with the specified FONT and BITS." (if (and ( 0 bits (1- char-bits-limit)) ( 0 font (1- char-font-limit))) (%make-pointer dtp-character (%logdpb bits %%kbd-control-meta (dpb font %%ch-font code))) nil)) (deff make-char 'code-char) (defun digit-char (weight &optional (radix 10.) (font 0)) "Return a character which signifies WEIGHT in radix RADIX, with FONT as specified. This is always NIL if WEIGHT is  RADIX. for higher weights, you get letters." (if (not ( 0 weight (1- radix))) nil (if (not ( 0 font char-font-limit)) nil (%make-pointer dtp-character (dpb font %%ch-font (if (< weight 10.) (+ (char-code #\0) weight) (+ (char-code #\A) weight -10.))))))) Now (defun char-name (char) For example, \"RETURN\" for the character Return. Only works for characters which are not GRAPHIC-CHAR-P (unlike \"a\", for example.)" (let ((elt (rassq (char-int char) xr-special-character-names))) (if elt (symbol-name (car elt))))) (defun name-char (name) "Returns a character object which is the meaning of NAME as a character name, or NIL if NAME has none." (let ((found (cdr (ass 'string-equal name xr-special-character-names)))) (and found (int-char found)))) (defparameter *char-bit-alist* `((:control . ,%%kbd-control) (:meta . ,%%kbd-meta) (:super . ,%%kbd-super) (:hyper . ,%%kbd-hyper)) "Alist of bit names for CHAR-BIT vs byte specifiers to extract those bits from a character.") (defun char-bit (char bit-name) "T if the bit spec'd by BIT-NAME (a keyword) is on in CHAR. BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER." (let ((byte (cdr (assq bit-name *char-bit-alist*)))) (if byte (%logldb-test byte char) (ferror "~S is not a valid character-bit specifier" bit-name)))) (defun set-char-bit (char bit-name new-value) "Returns a character like CHAR except that the bit BIT-NAME has value NEW-VALUE in it. BIT-NAME can be :CONTROL, :META, :SUPER or :HYPER. NEW-VALUE should be T or NIL." (let ((byte (cdr (assq bit-name *char-bit-alist*)))) (if byte (let* ((new-char (%logdpb (if new-value 1 0) byte char))) (if (typep char 'character) (int-char new-char) new-char)) (ferror "~S is not a valid character-bit specifier" bit-name)))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610223"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">8f25ba741d7e981839e9fa0c28447ede0172f200bb696cb2adde349694076dff</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">returntocorp/semgrep</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ast_php.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> , * * Copyright ( C ) 2011 - 2013 Facebook * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation , with the * special exception on linking described in file license.txt . * * This library is distributed in the hope that it will be useful , but * WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file * license.txt for more details . * * Copyright (C) 2011-2013 Facebook * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *) (*****************************************************************************) (* Prelude *) (*****************************************************************************) A ( real ) Abstract Syntax Tree for PHP , not a Concrete Syntax Tree * as in cst_php.ml . * * This file contains a simplified PHP abstract syntax tree . The original * PHP syntax tree ( cst_php.ml ) is good for code refactoring or * code visualization ; the types used matches exactly the source . However , * for other algorithms , the nature of the AST makes the code a bit * redundant . Hence the idea of a SimpleAST which is the * original AST where certain constructions have been factorized * or even removed . * * Here is a list of the simplications / factorizations : * - no purely syntactical tokens in the AST like parenthesis , brackets , * braces , angles , commas , semicolons , antislash , etc . No ParenExpr . * No FinalDef . No NotParsedCorrectly . The only token information kept * is for identifiers for error reporting . See wrap ( ) below . * * - support for old syntax is removed . No IfColon , ColonStmt , * CaseColonList . * - support for extra tools is removed . No XdebugXxx * update : but support for semgrep is restored ( Ellipsis ) * - support for features we do n't really use in our code is removed * e.g. unset cast . No Use , UseDirect , UseParen . No CastUnset . * Also no StaticObjCallVar . * - some known directives like ' ) ; ' or ' declare(strict=1 ) ; ' * are skipped because they do n't have a useful semantic for * the abstract interpreter or the type inference engine . No Declare . * * - sugar is removed , no ArrayLong vs ArrayShort , no InlineHtml , * no HereDoc , no EncapsXxx , no XhpSingleton ( but kept Xhp ) , no * implicit fields via constructor parameters . * - some builtins , for instance ' echo ' , are transformed in " _ _ builtin__echo " . * See builtin ( ) below . * - no include / require , they are transformed in call * to _ _ builtin__require ( maybe not a good idea ) * - some special keywords , for instance ' self ' , are transformed in * " _ _ special__self " . See special ( ) below . * The comment is still relevant but we should use a different example than self . * - the different ways to define namespaces are merged , no * NamespaceBracketDef . * * - a simpler stmt type ; no extra toplevel and stmt_and_def types , * no FuncDefNested , no ClassDefNested . No StmtList . * - a simpler expr type ; no lvalue vs expr vs static_scalar vs attribute * ( update : now static_scalar = expr = lvalue also in cst_php.ml ) . * Also no scalar . No Sc , no C. No Lv . Pattern matching constants * is simpler : | Sc ( C ( String ... ) ) - > ... becomes just | String - > .... * Also no arg type . No Arg , ArgRef , ArgUnpack . Also no xhp_attr_value type . * No XhpAttrString , XhpAttrExpr . * - no EmptyStmt , it is transformed in an empty Block * - a simpler If . ' elseif ' are transformed in nested If , and empty ' else ' * in an empty Block . * - a simpler , foreach_var_either and foreach_arrow are transformed * into expressions with a new Arrow constructor ( maybe not good idea ) * - some special constructs like AssignRef were transformed into * composite calls to Assign and Ref . Same for AssignList , AssignNew . * Same for arguments passed by reference , no Arg , ArgRef , ArgUnpack . * Same for refs in arrays , no ArrayRef , ArrayArrowRef . Also no ListVar , * ListList , ListEmpty . No ForeachVar , ForeachList . * Array value are also decomposed in regular expr or Arrow , no * ArrayArrowExpr , no ForeachArrow . More orthogonal . * - a unified Call . No FunCallSimple , FunCallVar , MethodCallSimple , * StaticMethodCallSimple , StaticMethodCallVar * ( update : same in cst_php.ml now ) * - a unified Array_get . No VArrayAccess , VArrayAccessXhp , * VBraceAccess , OArrayAccess , OBraceAccess * ( update : same in cst_php.ml now ) * - unified Class_get and Obj_get instead of lots of duplication in * many constructors , e.g. no ClassConstant in a separate scalar type , * no retarded obj_prop_access / obj_dim types , * no OName , CName , ObjProp , ObjPropVar , ObjAccessSimple vs ObjAccess , * no ClassNameRefDynamic , no VQualifier , ClassVar , DynamicClassVar , * etc . * ( update : same in cst_php.ml now ) * - unified eval_var , some constructs were transformed into calls to * " eval_var " builtin , e.g. no GlobalDollar , no VBrace , no Indirect / Deref . * * - a simpler ' name ' for identifiers , xhp names and regular names are merged , * the special keyword self / parent / static are merged , * so the complex I d ( XName [ QI ( Name " foo " ) ] ) becomes just I d [ " foo " ] . * - ... * * todo : * - put back types ! at least the basic one like f_return_type * with no generics * - less : factorize more ? string vs Guil ? * as in cst_php.ml. * * This file contains a simplified PHP abstract syntax tree. The original * PHP syntax tree (cst_php.ml) is good for code refactoring or * code visualization; the types used matches exactly the source. However, * for other algorithms, the nature of the AST makes the code a bit * redundant. Hence the idea of a SimpleAST which is the * original AST where certain constructions have been factorized * or even removed. * * Here is a list of the simplications/factorizations: * - no purely syntactical tokens in the AST like parenthesis, brackets, * braces, angles, commas, semicolons, antislash, etc. No ParenExpr. * No FinalDef. No NotParsedCorrectly. The only token information kept * is for identifiers for error reporting. See wrap() below. * * - support for old syntax is removed. No IfColon, ColonStmt, * CaseColonList. * - support for extra tools is removed. No XdebugXxx * update: but support for semgrep is restored (Ellipsis) * - support for features we don't really use in our code is removed * e.g. unset cast. No Use, UseDirect, UseParen. No CastUnset. * Also no StaticObjCallVar. * - some known directives like 'declare(ticks=1);' or 'declare(strict=1);' * are skipped because they don't have a useful semantic for * the abstract interpreter or the type inference engine. No Declare. * * - sugar is removed, no ArrayLong vs ArrayShort, no InlineHtml, * no HereDoc, no EncapsXxx, no XhpSingleton (but kept Xhp), no * implicit fields via constructor parameters. * - some builtins, for instance 'echo', are transformed in "__builtin__echo". * See builtin() below. * - no include/require, they are transformed in call * to __builtin__require (maybe not a good idea) * - some special keywords, for instance 'self', are transformed in * "__special__self". See special() below. * The comment is still relevant but we should use a different example than self. * - the different ways to define namespaces are merged, no * NamespaceBracketDef. * * - a simpler stmt type; no extra toplevel and stmt_and_def types, * no FuncDefNested, no ClassDefNested. No StmtList. * - a simpler expr type; no lvalue vs expr vs static_scalar vs attribute * (update: now static_scalar = expr = lvalue also in cst_php.ml). * Also no scalar. No Sc, no C. No Lv. Pattern matching constants * is simpler: | Sc (C (String ...)) -> ... becomes just | String -> .... * Also no arg type. No Arg, ArgRef, ArgUnpack. Also no xhp_attr_value type. * No XhpAttrString, XhpAttrExpr. * - no EmptyStmt, it is transformed in an empty Block * - a simpler If. 'elseif' are transformed in nested If, and empty 'else' * in an empty Block. * - a simpler Foreach, foreach_var_either and foreach_arrow are transformed * into expressions with a new Arrow constructor (maybe not good idea) * - some special constructs like AssignRef were transformed into * composite calls to Assign and Ref. Same for AssignList, AssignNew. * Same for arguments passed by reference, no Arg, ArgRef, ArgUnpack. * Same for refs in arrays, no ArrayRef, ArrayArrowRef. Also no ListVar, * ListList, ListEmpty. No ForeachVar, ForeachList. * Array value are also decomposed in regular expr or Arrow, no * ArrayArrowExpr, no ForeachArrow. More orthogonal. * - a unified Call. No FunCallSimple, FunCallVar, MethodCallSimple, * StaticMethodCallSimple, StaticMethodCallVar * (update: same in cst_php.ml now) * - a unified Array_get. No VArrayAccess, VArrayAccessXhp, * VBraceAccess, OArrayAccess, OBraceAccess * (update: same in cst_php.ml now) * - unified Class_get and Obj_get instead of lots of duplication in * many constructors, e.g. no ClassConstant in a separate scalar type, * no retarded obj_prop_access/obj_dim types, * no OName, CName, ObjProp, ObjPropVar, ObjAccessSimple vs ObjAccess, * no ClassNameRefDynamic, no VQualifier, ClassVar, DynamicClassVar, * etc. * (update: same in cst_php.ml now) * - unified eval_var, some constructs were transformed into calls to * "eval_var" builtin, e.g. no GlobalDollar, no VBrace, no Indirect/Deref. * * - a simpler 'name' for identifiers, xhp names and regular names are merged, * the special keyword self/parent/static are merged, * so the complex Id (XName [QI (Name "foo")]) becomes just Id ["foo"]. * - ... * * todo: * - put back types! at least the basic one like f_return_type * with no generics * - less: factorize more? string vs Guil? *) (*****************************************************************************) (* Token (leaves) *) (*****************************************************************************) type tok = Parse_info.t [@@deriving show] type 'a wrap = 'a * tok [@@deriving show] (* with tarzan *) (* round(), square[], curly{}, angle<> brackets *) type 'a bracket = tok * 'a * tok [@@deriving show] (* with tarzan *) type ident = string wrap [@@deriving show] (* with tarzan *) (* the string contains the $ prefix *) type var = string wrap [@@deriving show] (* with tarzan *) (* The keyword 'namespace' can be in a leading position. The special * ident 'ROOT' can also be leading. *) type qualified_ident = ident list [@@deriving show] (* with tarzan *) type name = qualified_ident [@@deriving show] (* with tarzan *) (*****************************************************************************) Expression (*****************************************************************************) lvalue and expr have been mixed in this AST , but an lvalue should be * an expr restricted to : Var $ var , Array_get , Obj_get , Class_get , or List . * an expr restricted to: Var $var, Array_get, Obj_get, Class_get, or List. *) type expr = (* booleans are really just Int in PHP :( *) (* I don't think ^ is true. It reads like a boolean represents a truth value, where for purposes of conversion 0 is cast to false and non-0 is cast to true *) (* *) | Bool of bool wrap | Int of int option wrap | Double of float option wrap PHP has no first - class functions so entities are sometimes passed * as strings so the string wrap below can actually correspond to a * ' I d name ' sometimes . Some magic functions like param_post ( ) also * introduce entities ( variables ) via strings . * as strings so the string wrap below can actually correspond to a * 'Id name' sometimes. Some magic functions like param_post() also * introduce entities (variables) via strings. *) | String of string wrap (* TODO: bracket *) (* Id is valid for "entities" (functions, classes, constants). Id is also * used for class methods/fields/constants. It can also contain * "self/parent" or "static", "class". It can be "true", "false", "null" * and many other builtin constants. See builtin() and special() below. * * todo: For field name, if in the code they are referenced like $this->fld, * we should prepend a $ to fld to match their definition. *) | Id of name (* less: should be renamed Name *) | IdSpecial of special wrap Var used to be merged with But then we were doing lots of * ' when Ast.is_variable name ' so maybe better to have I d and * ( at the same time OCaml does not differentiate I d from Var ) . * The string contains the ' $ ' . * 'when Ast.is_variable name' so maybe better to have Id and Var * (at the same time OCaml does not differentiate Id from Var). * The string contains the '$'. *) | Var of var (* when None it means add to the end when used in lvalue position *) | Array_get of expr * expr option bracket Unified method / field access . * ex : $ o->foo ( ) = = > Call(Obj_get(Var " $ o " , I d " foo " ) , [ ] ) * ex : A::foo ( ) = = > Call(Class_get(Id " A " , I d " foo " ) , [ ] ) * note that I d can be " self " , " parent " , " static " . * ex: $o->foo() ==> Call(Obj_get(Var "$o", Id "foo"), []) * ex: A::foo() ==> Call(Class_get(Id "A", Id "foo"), []) * note that Id can be "self", "parent", "static". *) | Obj_get of expr * tok * expr | Class_get of expr * tok * expr | New of tok * expr * argument list | NewAnonClass of tok * argument list * class_def | InstanceOf of tok * expr * expr (* pad: could perhaps be at the statement level? The left expr * must be an lvalue (e.g. a variable). *) | Assign of expr * tok * expr | AssignOp of expr * binaryOp wrap * expr (* really a destructuring tuple let; always used as part of an Assign or * in foreach_pattern. *) | List of expr list bracket (* used only inside array_value or foreach_pattern, or for yield * (which is translated as a builtin and so a Call) *) | Arrow of expr * tok * expr $ y = & $ x is transformed into an Assign(Var " $ y " , Ref ( Var " $ x " ) ) . In * PHP refs are always used in an Assign context . * PHP refs are always used in an Assign context. *) | Ref of tok * expr (* e.g. f(...$x) *) | Unpack of expr | Call of expr * argument list bracket | Throw of tok * expr (* todo? transform into Call (builtin ...) ? *) | Infix of AST_generic.incr_decr wrap * expr | Postfix of AST_generic.incr_decr wrap * expr | Binop of expr * binaryOp wrap * expr | Unop of unaryOp wrap * expr | Guil of expr list bracket | ConsArray of array_value list bracket | CondExpr of expr * expr * expr | Cast of cast_type wrap * expr yeah ! PHP 5.3 is becoming a real language | Lambda of func_def | Match of tok * expr * match_ list (* sgrep-ext: *) | Ellipsis of tok | DeepEllipsis of expr bracket and match_ = MCase of expr list * expr | MDefault of tok * expr and cast_type = | BoolTy | IntTy | DoubleTy (* float *) | StringTy | ArrayTy | ObjectTy and special = often transformed in Var " $ this " in the analysis | This (* represents the "self" keyword expression in a classes *) | Self (* represents the "parent" keyword expression in a class *) | Parent | FuncLike of funclike (* language constructs that look like functions *) and funclike = Empty | Eval | Exit | Isset | Unset and binaryOp = TODO : now available in AST_generic _ ? | BinaryConcat | CombinedComparison | ArithOp of AST_generic.operator and unaryOp = AST_generic.operator and argument = | Arg of expr | ArgRef of tok * expr | ArgUnpack of tok * expr | ArgLabel of ident * tok * expr only Var , List , or Arrow , and apparently also Array_get is ok , so * basically any lvalue * basically any lvalue *) and foreach_pattern = expr often an Arrow and array_value = expr (* string_const_expr is for shape field names which are permitted to be either * literal strings or class constants. *) and string_const_expr = expr (*****************************************************************************) (* Types *) (*****************************************************************************) and hint_type = | Hint of name (* todo: add the generics *) | HintArray of tok | HintQuestion of tok * hint_type | HintTuple of hint_type list bracket | HintCallback of hint_type list * hint_type option | HintTypeConst of hint_type * tok * hint_type (* ?? *) | HintVariadic of tok * hint_type option and class_name = hint_type (*****************************************************************************) (* Statement *) (*****************************************************************************) and stmt = | Expr of expr * tok | Block of stmt list bracket | If of tok * expr * stmt * stmt | Switch of tok * expr * case list | While of tok * expr * stmt | Do of tok * stmt * expr | For of tok * expr list * expr list * expr list * stmt ' foreach ( $ xs as $ k ) ' , ' ... ( $ xs as $ k = > $ v ) ' , ' ... ( $ xs as list($ ... ) ) ' | Foreach of tok * expr * tok * foreach_pattern * stmt | Return of tok * expr option | Break of tok * expr option | Continue of tok * expr option | Label of ident * tok (* : *) * stmt | Goto of tok * ident | Try of tok * stmt * catch list * finally list (* only at toplevel in most of our code *) | ClassDef of class_def | FuncDef of func_def (* only at toplevel *) | ConstantDef of constant_def | TypeDef of type_def (* the qualified_ident below can not have a leading '\', it can also * be the root namespace *) | NamespaceDef of tok * qualified_ident * stmt list bracket | NamespaceUse of tok * qualified_ident * ident option (* when alias *) Note that there is no LocalVars constructor . Variables in PHP are * declared when they are first assigned . * declared when they are first assigned. *) | StaticVars of tok * (var * expr option) list (* expr is most of the time a simple variable name *) | Global of tok * expr list and case = Case of tok * expr * stmt list | Default of tok * stmt list catch(Exception $ exn ) { ... } = > ( " Exception " , " $ exn " , [ ... ] ) * TODO : can now be a list of hint_type , Exn1 | Exn2 like in Java . * TODO: can now be a list of hint_type, Exn1 | Exn2 like in Java. *) and catch = tok * hint_type * var * stmt and finally = tok * stmt (*****************************************************************************) (* Definitions *) (*****************************************************************************) TODO : factorize xx_name in an entity type like in AST_generic.ml , * which also lead to a cleaner Lambda and NewAnonClass . * TODO : factorize also the xx_modifiers and xx_attrs ? * which also lead to a cleaner Lambda and NewAnonClass. * TODO: factorize also the xx_modifiers and xx_attrs? *) (* The func_def type below is actually used both for functions and methods. * * For methods, a few names are specials: * - __construct, __destruct * - __call, __callStatic *) and func_def = { TODO : " _ lambda " when used for lambda , see also AnonLambda for f_kind below f_name : ident; f_kind : function_kind wrap; TODO bracket f_return_type : hint_type option; (* functions returning a ref are rare *) f_ref : bool; (* only for methods; always empty for functions *) m_modifiers : modifier list; only for AnonLambda ( could also abuse parameter ) , not for ShortLambda l_uses : (bool (* is_ref *) * var) list; f_attrs : attribute list; f_body : stmt; } and function_kind = | Function | AnonLambda | ShortLambda (* they have different scoping rules for free variables *) | Method and parameter = | ParamClassic of parameter_classic (* sgrep-ext: *) | ParamEllipsis of tok and parameter_classic = { p_type : hint_type option; p_ref : tok option; p_name : var; p_default : expr option; p_attrs : attribute list; p_variadic : tok option; } (* for methods, and below for fields too *) and modifier = keyword_modifier wrap and keyword_modifier = | Public | Private | Protected | Abstract | Final | Static | Async (* normally either an Id or Call with only static arguments *) and attribute = expr and constant_def = { cst_tok : tok; cst_name : ident; (* normally a static scalar *) cst_body : expr; } and enum_type = { e_base : hint_type; e_constraint : hint_type option } and class_def = { c_name : ident; c_kind : class_kind wrap; c_extends : class_name option; c_implements : class_name list; c_uses : class_name list; (* traits *) (* If this class is an enum, what is the underlying type (and * constraint) of the enum? *) c_enum_type : enum_type option; c_modifiers : modifier list; c_attrs : attribute list; c_constants : constant_def list; c_variables : class_var list; c_methods : method_def list; c_braces : unit bracket; } and class_kind = Class | Interface | Trait | Enum and xhp_field = class_var * bool and class_var = { (* note that the name will contain a $ *) cv_name : var; cv_type : hint_type option; cv_value : expr option; cv_modifiers : modifier list; } and method_def = func_def and type_def = { t_name : ident; t_kind : type_def_kind } and type_def_kind = Alias of hint_type [@@deriving show { with_path = false }] (* with tarzan *) (*****************************************************************************) Program (*****************************************************************************) type program = stmt list [@@deriving show { with_path = false }] (* with tarzan *) (*****************************************************************************) (* Any *) (*****************************************************************************) type partial = PartialIf of tok * expr [@@deriving show { with_path = false }] (* with tarzan *) type any = | Program of program | Stmt of stmt | Expr2 of expr | Param of parameter | Partial of partial [@@deriving show { with_path = false }] (* with tarzan *) (*****************************************************************************) (* Helpers *) (*****************************************************************************) let unwrap x = fst x let wrap_fake s = (s, Parse_info.fake_info s) TODO : replace builtin ( ) by IdSpecial like I do in AST_generic.ml * builtin ( ) is used for : * - ' eval ' , and implicitly generated eval / reflection like functions : * " eval_var " ( e.g. for echo $ $ x , echo $ { " x" . " } ) , * - ' clone ' , * - ' exit ' , ' yield ' , ' yield_break ' TODO ' yield_from ? ' * - ' unset ' , ' isset ' , ' empty ' * * * - ' echo ' , ' print ' , * - ' @ ' , ' ` ' , * - ' include ' , ' require ' , ' include_once ' , ' require_once ' . * - _ _ LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/ * * See also data / php_stdlib / pfff.php which declares those builtins . * See also tests / php / semantic/ for example of uses of those builtins . * * coupling : if modify the string , git grep it because it 's probably * used in patterns too . * builtin() is used for: * - 'eval', and implicitly generated eval/reflection like functions: * "eval_var" (e.g. for echo $$x, echo ${"x"."y"}), * - 'clone', * - 'exit', 'yield', 'yield_break' TODO 'yield_from?' * - 'unset', 'isset', 'empty' * * * - 'echo', 'print', * - '@', '`', * - 'include', 'require', 'include_once', 'require_once'. * - __LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/ * * See also data/php_stdlib/pfff.php which declares those builtins. * See also tests/php/semantic/ for example of uses of those builtins. * * coupling: if modify the string, git grep it because it's probably * used in patterns too. *) let builtin x = "__builtin__" ^ x for ' self'/'parent ' , ' static ' , ' lambda ' , ' namespace ' , root namespace ' \ ' , * ' class ' as in C::class * TODO : transform in IdSpecial ! * 'class' as in C::class * TODO: transform in IdSpecial! *) let special x = "__special__" ^ x AST helpers let has_modifier cv = List.length cv.cv_modifiers > 0 let is_static modifiers = List.mem Static (List.map unwrap modifiers) let is_private modifiers = List.mem Private (List.map unwrap modifiers) let string_of_xhp_tag xs = ":" ^ Common.join ":" xs let str_of_ident (s, _) = s let tok_of_ident (_, x) = x exception TodoNamespace of tok let str_of_name = function | [ id ] -> str_of_ident id | [] -> raise Common.Impossible | x :: _xs -> raise (TodoNamespace (tok_of_ident x)) let tok_of_name = function | [ id ] -> tok_of_ident id | [] -> raise Common.Impossible pick first one | x :: _xs -> tok_of_ident x (* we sometimes need to remove the '$' prefix *) let remove_first_char s = String.sub s 1 (String.length s - 1) let str_of_class_name x = match x with | Hint name -> str_of_name name | _ -> raise Common.Impossible let name_of_class_name x = match x with | Hint [ name ] -> name | Hint [] -> raise Common.Impossible | Hint name -> raise (TodoNamespace (tok_of_name name)) | _ -> raise Common.Impossible </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/returntocorp/semgrep/dcea978347df81cbc8f2c2b49b80c1980f6194cf/languages/php/ast/ast_php.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">*************************************************************************** Prelude *************************************************************************** *************************************************************************** Token (leaves) *************************************************************************** with tarzan round(), square[], curly{}, angle<> brackets with tarzan with tarzan the string contains the $ prefix with tarzan The keyword 'namespace' can be in a leading position. The special * ident 'ROOT' can also be leading. with tarzan with tarzan *************************************************************************** *************************************************************************** booleans are really just Int in PHP :( I don't think ^ is true. It reads like a boolean represents a truth value, where for purposes of conversion 0 is cast to false and non-0 is cast to true TODO: bracket Id is valid for "entities" (functions, classes, constants). Id is also * used for class methods/fields/constants. It can also contain * "self/parent" or "static", "class". It can be "true", "false", "null" * and many other builtin constants. See builtin() and special() below. * * todo: For field name, if in the code they are referenced like $this->fld, * we should prepend a $ to fld to match their definition. less: should be renamed Name when None it means add to the end when used in lvalue position pad: could perhaps be at the statement level? The left expr * must be an lvalue (e.g. a variable). really a destructuring tuple let; always used as part of an Assign or * in foreach_pattern. used only inside array_value or foreach_pattern, or for yield * (which is translated as a builtin and so a Call) e.g. f(...$x) todo? transform into Call (builtin ...) ? sgrep-ext: float represents the "self" keyword expression in a classes represents the "parent" keyword expression in a class language constructs that look like functions string_const_expr is for shape field names which are permitted to be either * literal strings or class constants. *************************************************************************** Types *************************************************************************** todo: add the generics ?? *************************************************************************** Statement *************************************************************************** : only at toplevel in most of our code only at toplevel the qualified_ident below can not have a leading '\', it can also * be the root namespace when alias expr is most of the time a simple variable name *************************************************************************** Definitions *************************************************************************** The func_def type below is actually used both for functions and methods. * * For methods, a few names are specials: * - __construct, __destruct * - __call, __callStatic functions returning a ref are rare only for methods; always empty for functions is_ref they have different scoping rules for free variables sgrep-ext: for methods, and below for fields too normally either an Id or Call with only static arguments normally a static scalar traits If this class is an enum, what is the underlying type (and * constraint) of the enum? note that the name will contain a $ with tarzan *************************************************************************** *************************************************************************** with tarzan *************************************************************************** Any *************************************************************************** with tarzan with tarzan *************************************************************************** Helpers *************************************************************************** we sometimes need to remove the '$' prefix </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> , * * Copyright ( C ) 2011 - 2013 Facebook * * This library is free software ; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation , with the * special exception on linking described in file license.txt . * * This library is distributed in the hope that it will be useful , but * WITHOUT ANY WARRANTY ; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the file * license.txt for more details . * * Copyright (C) 2011-2013 Facebook * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public License * version 2.1 as published by the Free Software Foundation, with the * special exception on linking described in file license.txt. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file * license.txt for more details. *) A ( real ) Abstract Syntax Tree for PHP , not a Concrete Syntax Tree * as in cst_php.ml . * * This file contains a simplified PHP abstract syntax tree . The original * PHP syntax tree ( cst_php.ml ) is good for code refactoring or * code visualization ; the types used matches exactly the source . However , * for other algorithms , the nature of the AST makes the code a bit * redundant . Hence the idea of a SimpleAST which is the * original AST where certain constructions have been factorized * or even removed . * * Here is a list of the simplications / factorizations : * - no purely syntactical tokens in the AST like parenthesis , brackets , * braces , angles , commas , semicolons , antislash , etc . No ParenExpr . * No FinalDef . No NotParsedCorrectly . The only token information kept * is for identifiers for error reporting . See wrap ( ) below . * * - support for old syntax is removed . No IfColon , ColonStmt , * CaseColonList . * - support for extra tools is removed . No XdebugXxx * update : but support for semgrep is restored ( Ellipsis ) * - support for features we do n't really use in our code is removed * e.g. unset cast . No Use , UseDirect , UseParen . No CastUnset . * Also no StaticObjCallVar . * - some known directives like ' ) ; ' or ' declare(strict=1 ) ; ' * are skipped because they do n't have a useful semantic for * the abstract interpreter or the type inference engine . No Declare . * * - sugar is removed , no ArrayLong vs ArrayShort , no InlineHtml , * no HereDoc , no EncapsXxx , no XhpSingleton ( but kept Xhp ) , no * implicit fields via constructor parameters . * - some builtins , for instance ' echo ' , are transformed in " _ _ builtin__echo " . * See builtin ( ) below . * - no include / require , they are transformed in call * to _ _ builtin__require ( maybe not a good idea ) * - some special keywords , for instance ' self ' , are transformed in * " _ _ special__self " . See special ( ) below . * The comment is still relevant but we should use a different example than self . * - the different ways to define namespaces are merged , no * NamespaceBracketDef . * * - a simpler stmt type ; no extra toplevel and stmt_and_def types , * no FuncDefNested , no ClassDefNested . No StmtList . * - a simpler expr type ; no lvalue vs expr vs static_scalar vs attribute * ( update : now static_scalar = expr = lvalue also in cst_php.ml ) . * Also no scalar . No Sc , no C. No Lv . Pattern matching constants * is simpler : | Sc ( C ( String ... ) ) - > ... becomes just | String - > .... * Also no arg type . No Arg , ArgRef , ArgUnpack . Also no xhp_attr_value type . * No XhpAttrString , XhpAttrExpr . * - no EmptyStmt , it is transformed in an empty Block * - a simpler If . ' elseif ' are transformed in nested If , and empty ' else ' * in an empty Block . * - a simpler , foreach_var_either and foreach_arrow are transformed * into expressions with a new Arrow constructor ( maybe not good idea ) * - some special constructs like AssignRef were transformed into * composite calls to Assign and Ref . Same for AssignList , AssignNew . * Same for arguments passed by reference , no Arg , ArgRef , ArgUnpack . * Same for refs in arrays , no ArrayRef , ArrayArrowRef . Also no ListVar , * ListList , ListEmpty . No ForeachVar , ForeachList . * Array value are also decomposed in regular expr or Arrow , no * ArrayArrowExpr , no ForeachArrow . More orthogonal . * - a unified Call . No FunCallSimple , FunCallVar , MethodCallSimple , * StaticMethodCallSimple , StaticMethodCallVar * ( update : same in cst_php.ml now ) * - a unified Array_get . No VArrayAccess , VArrayAccessXhp , * VBraceAccess , OArrayAccess , OBraceAccess * ( update : same in cst_php.ml now ) * - unified Class_get and Obj_get instead of lots of duplication in * many constructors , e.g. no ClassConstant in a separate scalar type , * no retarded obj_prop_access / obj_dim types , * no OName , CName , ObjProp , ObjPropVar , ObjAccessSimple vs ObjAccess , * no ClassNameRefDynamic , no VQualifier , ClassVar , DynamicClassVar , * etc . * ( update : same in cst_php.ml now ) * - unified eval_var , some constructs were transformed into calls to * " eval_var " builtin , e.g. no GlobalDollar , no VBrace , no Indirect / Deref . * * - a simpler ' name ' for identifiers , xhp names and regular names are merged , * the special keyword self / parent / static are merged , * so the complex I d ( XName [ QI ( Name " foo " ) ] ) becomes just I d [ " foo " ] . * - ... * * todo : * - put back types ! at least the basic one like f_return_type * with no generics * - less : factorize more ? string vs Guil ? * as in cst_php.ml. * * This file contains a simplified PHP abstract syntax tree. The original * PHP syntax tree (cst_php.ml) is good for code refactoring or * code visualization; the types used matches exactly the source. However, * for other algorithms, the nature of the AST makes the code a bit * redundant. Hence the idea of a SimpleAST which is the * original AST where certain constructions have been factorized * or even removed. * * Here is a list of the simplications/factorizations: * - no purely syntactical tokens in the AST like parenthesis, brackets, * braces, angles, commas, semicolons, antislash, etc. No ParenExpr. * No FinalDef. No NotParsedCorrectly. The only token information kept * is for identifiers for error reporting. See wrap() below. * * - support for old syntax is removed. No IfColon, ColonStmt, * CaseColonList. * - support for extra tools is removed. No XdebugXxx * update: but support for semgrep is restored (Ellipsis) * - support for features we don't really use in our code is removed * e.g. unset cast. No Use, UseDirect, UseParen. No CastUnset. * Also no StaticObjCallVar. * - some known directives like 'declare(ticks=1);' or 'declare(strict=1);' * are skipped because they don't have a useful semantic for * the abstract interpreter or the type inference engine. No Declare. * * - sugar is removed, no ArrayLong vs ArrayShort, no InlineHtml, * no HereDoc, no EncapsXxx, no XhpSingleton (but kept Xhp), no * implicit fields via constructor parameters. * - some builtins, for instance 'echo', are transformed in "__builtin__echo". * See builtin() below. * - no include/require, they are transformed in call * to __builtin__require (maybe not a good idea) * - some special keywords, for instance 'self', are transformed in * "__special__self". See special() below. * The comment is still relevant but we should use a different example than self. * - the different ways to define namespaces are merged, no * NamespaceBracketDef. * * - a simpler stmt type; no extra toplevel and stmt_and_def types, * no FuncDefNested, no ClassDefNested. No StmtList. * - a simpler expr type; no lvalue vs expr vs static_scalar vs attribute * (update: now static_scalar = expr = lvalue also in cst_php.ml). * Also no scalar. No Sc, no C. No Lv. Pattern matching constants * is simpler: | Sc (C (String ...)) -> ... becomes just | String -> .... * Also no arg type. No Arg, ArgRef, ArgUnpack. Also no xhp_attr_value type. * No XhpAttrString, XhpAttrExpr. * - no EmptyStmt, it is transformed in an empty Block * - a simpler If. 'elseif' are transformed in nested If, and empty 'else' * in an empty Block. * - a simpler Foreach, foreach_var_either and foreach_arrow are transformed * into expressions with a new Arrow constructor (maybe not good idea) * - some special constructs like AssignRef were transformed into * composite calls to Assign and Ref. Same for AssignList, AssignNew. * Same for arguments passed by reference, no Arg, ArgRef, ArgUnpack. * Same for refs in arrays, no ArrayRef, ArrayArrowRef. Also no ListVar, * ListList, ListEmpty. No ForeachVar, ForeachList. * Array value are also decomposed in regular expr or Arrow, no * ArrayArrowExpr, no ForeachArrow. More orthogonal. * - a unified Call. No FunCallSimple, FunCallVar, MethodCallSimple, * StaticMethodCallSimple, StaticMethodCallVar * (update: same in cst_php.ml now) * - a unified Array_get. No VArrayAccess, VArrayAccessXhp, * VBraceAccess, OArrayAccess, OBraceAccess * (update: same in cst_php.ml now) * - unified Class_get and Obj_get instead of lots of duplication in * many constructors, e.g. no ClassConstant in a separate scalar type, * no retarded obj_prop_access/obj_dim types, * no OName, CName, ObjProp, ObjPropVar, ObjAccessSimple vs ObjAccess, * no ClassNameRefDynamic, no VQualifier, ClassVar, DynamicClassVar, * etc. * (update: same in cst_php.ml now) * - unified eval_var, some constructs were transformed into calls to * "eval_var" builtin, e.g. no GlobalDollar, no VBrace, no Indirect/Deref. * * - a simpler 'name' for identifiers, xhp names and regular names are merged, * the special keyword self/parent/static are merged, * so the complex Id (XName [QI (Name "foo")]) becomes just Id ["foo"]. * - ... * * todo: * - put back types! at least the basic one like f_return_type * with no generics * - less: factorize more? string vs Guil? *) type tok = Parse_info.t [@@deriving show] Expression lvalue and expr have been mixed in this AST , but an lvalue should be * an expr restricted to : Var $ var , Array_get , Obj_get , Class_get , or List . * an expr restricted to: Var $var, Array_get, Obj_get, Class_get, or List. *) type expr = | Bool of bool wrap | Int of int option wrap | Double of float option wrap PHP has no first - class functions so entities are sometimes passed * as strings so the string wrap below can actually correspond to a * ' I d name ' sometimes . Some magic functions like param_post ( ) also * introduce entities ( variables ) via strings . * as strings so the string wrap below can actually correspond to a * 'Id name' sometimes. Some magic functions like param_post() also * introduce entities (variables) via strings. *) | IdSpecial of special wrap Var used to be merged with But then we were doing lots of * ' when Ast.is_variable name ' so maybe better to have I d and * ( at the same time OCaml does not differentiate I d from Var ) . * The string contains the ' $ ' . * 'when Ast.is_variable name' so maybe better to have Id and Var * (at the same time OCaml does not differentiate Id from Var). * The string contains the '$'. *) | Var of var | Array_get of expr * expr option bracket Unified method / field access . * ex : $ o->foo ( ) = = > Call(Obj_get(Var " $ o " , I d " foo " ) , [ ] ) * ex : A::foo ( ) = = > Call(Class_get(Id " A " , I d " foo " ) , [ ] ) * note that I d can be " self " , " parent " , " static " . * ex: $o->foo() ==> Call(Obj_get(Var "$o", Id "foo"), []) * ex: A::foo() ==> Call(Class_get(Id "A", Id "foo"), []) * note that Id can be "self", "parent", "static". *) | Obj_get of expr * tok * expr | Class_get of expr * tok * expr | New of tok * expr * argument list | NewAnonClass of tok * argument list * class_def | InstanceOf of tok * expr * expr | Assign of expr * tok * expr | AssignOp of expr * binaryOp wrap * expr | List of expr list bracket | Arrow of expr * tok * expr $ y = & $ x is transformed into an Assign(Var " $ y " , Ref ( Var " $ x " ) ) . In * PHP refs are always used in an Assign context . * PHP refs are always used in an Assign context. *) | Ref of tok * expr | Unpack of expr | Call of expr * argument list bracket | Throw of tok * expr | Infix of AST_generic.incr_decr wrap * expr | Postfix of AST_generic.incr_decr wrap * expr | Binop of expr * binaryOp wrap * expr | Unop of unaryOp wrap * expr | Guil of expr list bracket | ConsArray of array_value list bracket | CondExpr of expr * expr * expr | Cast of cast_type wrap * expr yeah ! PHP 5.3 is becoming a real language | Lambda of func_def | Match of tok * expr * match_ list | Ellipsis of tok | DeepEllipsis of expr bracket and match_ = MCase of expr list * expr | MDefault of tok * expr and cast_type = | BoolTy | IntTy | StringTy | ArrayTy | ObjectTy and special = often transformed in Var " $ this " in the analysis | This | Self | Parent | FuncLike of funclike and funclike = Empty | Eval | Exit | Isset | Unset and binaryOp = TODO : now available in AST_generic _ ? | BinaryConcat | CombinedComparison | ArithOp of AST_generic.operator and unaryOp = AST_generic.operator and argument = | Arg of expr | ArgRef of tok * expr | ArgUnpack of tok * expr | ArgLabel of ident * tok * expr only Var , List , or Arrow , and apparently also Array_get is ok , so * basically any lvalue * basically any lvalue *) and foreach_pattern = expr often an Arrow and array_value = expr and string_const_expr = expr and hint_type = | HintArray of tok | HintQuestion of tok * hint_type | HintTuple of hint_type list bracket | HintCallback of hint_type list * hint_type option | HintVariadic of tok * hint_type option and class_name = hint_type and stmt = | Expr of expr * tok | Block of stmt list bracket | If of tok * expr * stmt * stmt | Switch of tok * expr * case list | While of tok * expr * stmt | Do of tok * stmt * expr | For of tok * expr list * expr list * expr list * stmt ' foreach ( $ xs as $ k ) ' , ' ... ( $ xs as $ k = > $ v ) ' , ' ... ( $ xs as list($ ... ) ) ' | Foreach of tok * expr * tok * foreach_pattern * stmt | Return of tok * expr option | Break of tok * expr option | Continue of tok * expr option | Goto of tok * ident | Try of tok * stmt * catch list * finally list | ClassDef of class_def | FuncDef of func_def | ConstantDef of constant_def | TypeDef of type_def | NamespaceDef of tok * qualified_ident * stmt list bracket Note that there is no LocalVars constructor . Variables in PHP are * declared when they are first assigned . * declared when they are first assigned. *) | StaticVars of tok * (var * expr option) list | Global of tok * expr list and case = Case of tok * expr * stmt list | Default of tok * stmt list catch(Exception $ exn ) { ... } = > ( " Exception " , " $ exn " , [ ... ] ) * TODO : can now be a list of hint_type , Exn1 | Exn2 like in Java . * TODO: can now be a list of hint_type, Exn1 | Exn2 like in Java. *) and catch = tok * hint_type * var * stmt and finally = tok * stmt TODO : factorize xx_name in an entity type like in AST_generic.ml , * which also lead to a cleaner Lambda and NewAnonClass . * TODO : factorize also the xx_modifiers and xx_attrs ? * which also lead to a cleaner Lambda and NewAnonClass. * TODO: factorize also the xx_modifiers and xx_attrs? *) and func_def = { TODO : " _ lambda " when used for lambda , see also AnonLambda for f_kind below f_name : ident; f_kind : function_kind wrap; TODO bracket f_return_type : hint_type option; f_ref : bool; m_modifiers : modifier list; only for AnonLambda ( could also abuse parameter ) , not for ShortLambda f_attrs : attribute list; f_body : stmt; } and function_kind = | Function | AnonLambda | Method and parameter = | ParamClassic of parameter_classic | ParamEllipsis of tok and parameter_classic = { p_type : hint_type option; p_ref : tok option; p_name : var; p_default : expr option; p_attrs : attribute list; p_variadic : tok option; } and modifier = keyword_modifier wrap and keyword_modifier = | Public | Private | Protected | Abstract | Final | Static | Async and attribute = expr and constant_def = { cst_tok : tok; cst_name : ident; cst_body : expr; } and enum_type = { e_base : hint_type; e_constraint : hint_type option } and class_def = { c_name : ident; c_kind : class_kind wrap; c_extends : class_name option; c_implements : class_name list; c_enum_type : enum_type option; c_modifiers : modifier list; c_attrs : attribute list; c_constants : constant_def list; c_variables : class_var list; c_methods : method_def list; c_braces : unit bracket; } and class_kind = Class | Interface | Trait | Enum and xhp_field = class_var * bool and class_var = { cv_name : var; cv_type : hint_type option; cv_value : expr option; cv_modifiers : modifier list; } and method_def = func_def and type_def = { t_name : ident; t_kind : type_def_kind } and type_def_kind = Alias of hint_type [@@deriving show { with_path = false }] Program type program = stmt list [@@deriving show { with_path = false }] type partial = PartialIf of tok * expr [@@deriving show { with_path = false }] type any = | Program of program | Stmt of stmt | Expr2 of expr | Param of parameter | Partial of partial [@@deriving show { with_path = false }] let unwrap x = fst x let wrap_fake s = (s, Parse_info.fake_info s) TODO : replace builtin ( ) by IdSpecial like I do in AST_generic.ml * builtin ( ) is used for : * - ' eval ' , and implicitly generated eval / reflection like functions : * " eval_var " ( e.g. for echo $ $ x , echo $ { " x" . " } ) , * - ' clone ' , * - ' exit ' , ' yield ' , ' yield_break ' TODO ' yield_from ? ' * - ' unset ' , ' isset ' , ' empty ' * * * - ' echo ' , ' print ' , * - ' @ ' , ' ` ' , * - ' include ' , ' require ' , ' include_once ' , ' require_once ' . * - _ _ LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/ * * See also data / php_stdlib / pfff.php which declares those builtins . * See also tests / php / semantic/ for example of uses of those builtins . * * coupling : if modify the string , git grep it because it 's probably * used in patterns too . * builtin() is used for: * - 'eval', and implicitly generated eval/reflection like functions: * "eval_var" (e.g. for echo $$x, echo ${"x"."y"}), * - 'clone', * - 'exit', 'yield', 'yield_break' TODO 'yield_from?' * - 'unset', 'isset', 'empty' * * * - 'echo', 'print', * - '@', '`', * - 'include', 'require', 'include_once', 'require_once'. * - __LINE__/__FILE/__DIR/__CLASS/__TRAIT/__FUNCTION/__METHOD/ * * See also data/php_stdlib/pfff.php which declares those builtins. * See also tests/php/semantic/ for example of uses of those builtins. * * coupling: if modify the string, git grep it because it's probably * used in patterns too. *) let builtin x = "__builtin__" ^ x for ' self'/'parent ' , ' static ' , ' lambda ' , ' namespace ' , root namespace ' \ ' , * ' class ' as in C::class * TODO : transform in IdSpecial ! * 'class' as in C::class * TODO: transform in IdSpecial! *) let special x = "__special__" ^ x AST helpers let has_modifier cv = List.length cv.cv_modifiers > 0 let is_static modifiers = List.mem Static (List.map unwrap modifiers) let is_private modifiers = List.mem Private (List.map unwrap modifiers) let string_of_xhp_tag xs = ":" ^ Common.join ":" xs let str_of_ident (s, _) = s let tok_of_ident (_, x) = x exception TodoNamespace of tok let str_of_name = function | [ id ] -> str_of_ident id | [] -> raise Common.Impossible | x :: _xs -> raise (TodoNamespace (tok_of_ident x)) let tok_of_name = function | [ id ] -> tok_of_ident id | [] -> raise Common.Impossible pick first one | x :: _xs -> tok_of_ident x let remove_first_char s = String.sub s 1 (String.length s - 1) let str_of_class_name x = match x with | Hint name -> str_of_name name | _ -> raise Common.Impossible let name_of_class_name x = match x with | Hint [ name ] -> name | Hint [] -> raise Common.Impossible | Hint name -> raise (TodoNamespace (tok_of_name name)) | _ -> raise Common.Impossible </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610224"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">7c767a0a935701e0abd2f928089d50a44b5ecaa8fcd293efc25f905db83be597</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">juhp/fbrnch</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Bump.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Cmd.Bump ( bumpPkgs, ) where import Branches import Common import Common.System import Git import Koji import Package import System.IO.Extra -- FIXME --force FIXME --target FIXME detect rpmautospec and add empty commit bumpPkgs :: Bool -> Maybe CommitOpt -> (BranchesReq,[String]) -> IO () bumpPkgs local mopt = withPackagesByBranches (boolHeader local) False (if local then cleanGit else cleanGitFetchActive) AnyNumber bumpPkg where bumpPkg :: Package -> AnyBranch -> IO () bumpPkg pkg br = do dead <- doesFileExist "dead.package" if dead then putStrLn "dead package" else do spec <- localBranchSpecFile pkg br rbr <- case br of RelBranch rbr -> return rbr OtherBranch _ -> systemBranch newnvr <- pkgNameVerRel' rbr spec moldnvr <- if local then do withTempFile $ \tempfile -> do git "show" ["origin:" ++ spec] >>= writeFile tempfile pkgNameVerRel rbr tempfile else case br of RelBranch rbr' -> let tag = branchDestTag rbr' in kojiLatestNVR tag $ unPackage pkg FIXME fallback to local ? _ -> return Nothing if equivNVR newnvr (fromMaybe "" moldnvr) then do git_ "log" ["origin..HEAD", "--pretty=oneline"] let clmsg = case mopt of Just (CommitMsg msg) -> msg _ -> "rebuild" FIXME check for rpmautospec first cmd_ "rpmdev-bumpspec" ["-c", clmsg, spec] let copts = case mopt of Nothing -> ["-m", "bump release"] Just opt -> case opt of CommitMsg msg -> ["-m", msg] FIXME reject amend if already pushed CommitAmend -> ["--amend", "--no-edit"] -- FIXME quiet commit? git_ "commit" $ "-a" : copts else putStrLn "already bumped" </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/juhp/fbrnch/c724daa9e24a999328c3f7cad0213dafdf8183a8/src/Cmd/Bump.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> FIXME --force target FIXME quiet commit?</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Cmd.Bump ( bumpPkgs, ) where import Branches import Common import Common.System import Git import Koji import Package import System.IO.Extra FIXME detect rpmautospec and add empty commit bumpPkgs :: Bool -> Maybe CommitOpt -> (BranchesReq,[String]) -> IO () bumpPkgs local mopt = withPackagesByBranches (boolHeader local) False (if local then cleanGit else cleanGitFetchActive) AnyNumber bumpPkg where bumpPkg :: Package -> AnyBranch -> IO () bumpPkg pkg br = do dead <- doesFileExist "dead.package" if dead then putStrLn "dead package" else do spec <- localBranchSpecFile pkg br rbr <- case br of RelBranch rbr -> return rbr OtherBranch _ -> systemBranch newnvr <- pkgNameVerRel' rbr spec moldnvr <- if local then do withTempFile $ \tempfile -> do git "show" ["origin:" ++ spec] >>= writeFile tempfile pkgNameVerRel rbr tempfile else case br of RelBranch rbr' -> let tag = branchDestTag rbr' in kojiLatestNVR tag $ unPackage pkg FIXME fallback to local ? _ -> return Nothing if equivNVR newnvr (fromMaybe "" moldnvr) then do git_ "log" ["origin..HEAD", "--pretty=oneline"] let clmsg = case mopt of Just (CommitMsg msg) -> msg _ -> "rebuild" FIXME check for rpmautospec first cmd_ "rpmdev-bumpspec" ["-c", clmsg, spec] let copts = case mopt of Nothing -> ["-m", "bump release"] Just opt -> case opt of CommitMsg msg -> ["-m", msg] FIXME reject amend if already pushed CommitAmend -> ["--amend", "--no-edit"] git_ "commit" $ "-a" : copts else putStrLn "already bumped" </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610225"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">5658eac1e5d0215e2d7da9bb890275d1db4273fca868b9fe64b331e348b7d35f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">slindley/effect-handlers</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ParameterisedState.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-- state using parameterised handlers # LANGUAGE TypeFamilies , NoMonomorphismRestriction , FlexibleContexts , TypeOperators , ScopedTypeVariables # FlexibleContexts, TypeOperators, ScopedTypeVariables #-} import ParameterisedHandlers data Get s = Get instance Op (Get s) where type Param (Get s) = () type Return (Get s) = s get = applyOp Get data Put s = Put instance Op (Put s) where type Param (Put s) = s type Return (Put s) = () put = applyOp Put -- handle state in the standard way handleState :: Monad m => s -> Comp (Get s, (Put s, ())) a -> m a handleState = handleStateWith Empty -- The handleStateWith function generalises handleState to support -- horizontal composition, either for throwing other effects or for -- composing with compatible effects such as random choice. handleStateWith :: (Get s `NotIn` e, Put s `NotIn` e, Monad m) => OpHandler e (m a) s -> s -> Comp (Get s, (Put s, e)) a -> m a handleStateWith h s comp = handle s comp (Get |-> (\() k -> k s s) :<: Put |-> (\s k -> k s ()) :<: h, return) data Mode = Handle | Forward mcbrideState mode (s :: Int) comp = handle mode comp ((Get |-> case mode of Handle -> (\() k -> mcbrideState Forward s (k Forward s)) Forward -> (\p k -> App makeWitness Get p (k Forward))) :<: (Put |-> case mode of Handle -> (\s k -> mcbrideState Forward s (k Forward ())) Forward -> (\p k -> App makeWitness Put p (k Forward))) :<: Empty, return) getInt :: In (Get Int) e => () -> Comp e Int getInt = get putInt :: In (Put Int) e => Int -> Comp e () putInt = put count :: Comp (Get Int, (Put Int, ())) () count = do {n <- get (); if n == (0 :: Int) then return () else do {put (n-1); count}} </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/slindley/effect-handlers/39d0d09582d198dd6210177a0896db55d92529f4/Examples/experimental/ParameterisedState.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> state using parameterised handlers handle state in the standard way The handleStateWith function generalises handleState to support horizontal composition, either for throwing other effects or for composing with compatible effects such as random choice.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> # LANGUAGE TypeFamilies , NoMonomorphismRestriction , FlexibleContexts , TypeOperators , ScopedTypeVariables # FlexibleContexts, TypeOperators, ScopedTypeVariables #-} import ParameterisedHandlers data Get s = Get instance Op (Get s) where type Param (Get s) = () type Return (Get s) = s get = applyOp Get data Put s = Put instance Op (Put s) where type Param (Put s) = s type Return (Put s) = () put = applyOp Put handleState :: Monad m => s -> Comp (Get s, (Put s, ())) a -> m a handleState = handleStateWith Empty handleStateWith :: (Get s `NotIn` e, Put s `NotIn` e, Monad m) => OpHandler e (m a) s -> s -> Comp (Get s, (Put s, e)) a -> m a handleStateWith h s comp = handle s comp (Get |-> (\() k -> k s s) :<: Put |-> (\s k -> k s ()) :<: h, return) data Mode = Handle | Forward mcbrideState mode (s :: Int) comp = handle mode comp ((Get |-> case mode of Handle -> (\() k -> mcbrideState Forward s (k Forward s)) Forward -> (\p k -> App makeWitness Get p (k Forward))) :<: (Put |-> case mode of Handle -> (\s k -> mcbrideState Forward s (k Forward ())) Forward -> (\p k -> App makeWitness Put p (k Forward))) :<: Empty, return) getInt :: In (Get Int) e => () -> Comp e Int getInt = get putInt :: In (Put Int) e => Int -> Comp e () putInt = put count :: Comp (Get Int, (Put Int, ())) () count = do {n <- get (); if n == (0 :: Int) then return () else do {put (n-1); count}} </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610226"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">01b791c12e9e919d592ad75dfaec4416b712a56ae3db4aa0100697fc2c0e4826</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">spechub/Hets</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ParseAS.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE TupleSections # module OWL2.ParseAS where import Prelude hiding (lookup) import OWL2.AS as AS import Common.AnnoParser (newlineOrEof, commentLine) import Common.IRI hiding (parseIRI) import Common.Parsec import Common.Lexer (getNumber, value, nestCommentOut) import qualified Common.GlobalAnnotations as GA (PrefixMap) import Text.ParserCombinators.Parsec import Data.Char import Data.Map (union, fromList) | @followedBy c p@ first parses @p@ then looks ahead for Does n't consume any input on failure . any input on failure. -} followedBy :: CharParser st b -> CharParser st a -> CharParser st a followedBy cond p = try $ do r <- p lookAhead cond return r -- | Performs an arbitrary lookahead over choices of parsers arbitraryLookaheadOption :: [CharParser st a] -> CharParser st a arbitraryLookaheadOption p = try $ lookAhead $ choice p | @manyN n p@ parses @n@ or more occurences of @p@ manyN :: Int -> CharParser st a -> CharParser st [a] manyN n p = foldr (\ _ r -> p <:> r) (return []) [1 .. n] <++> many p | alias for @return Nothing@ never :: CharParser st (Maybe a) never = return Nothing -- # Basic constructs -- | Parses a comment comment :: CharParser st String comment = try $ do char '#' manyTill anyChar newlineOrEof -- | Skips trailing whitespaces and comments skips :: CharParser st a -> CharParser st a skips = (<< skips') | and comments skips' :: CharParser st () skips' = skipMany (forget space <|> forget comment <|> forget commentLine <|> forget nestCommentOut) -- | Parses plain string with skip keyword :: String -> CharParser st () keyword s = try $ skips (string s >> notFollowedBy alphaNum) -- | Parses a full iri fullIri :: CharParser st IRI fullIri = angles iriParser ncNameStart :: Char -> Bool ncNameStart c = isAlpha c || c == '_' -- | rfc3987 plus '+' from scheme (scheme does not allow the dots) ncNameChar :: Char -> Bool ncNameChar c = isAlphaNum c || elem c ".+-_\183" | Parses a prefix name ( PNAME_NS of ) prefix :: CharParser st String prefix = skips $ option "" (satisfy ncNameStart <:> many (satisfy ncNameChar)) << char ':' -- | Parses an abbreviated or full iri parseIRI :: GA.PrefixMap -> CharParser st IRI parseIRI pm = skips (expandIRI pm <$> (fullIri <|> compoundIriCurie) <?> "IRI") | @parseEnclosedWithKeyword k p@ parses the keyword @k@ followed @p@ enclosed in parentheses . Skips spaces and comments before and after @p@. enclosed in parentheses. Skips spaces and comments before and after @p@. -} parseEnclosedWithKeyword :: String -> CharParser st a -> CharParser st a parseEnclosedWithKeyword s p = do keyword s skips $ char '(' r <- skips p skips $ char ')' return r parsePrefixDeclaration :: CharParser st (String, IRI) parsePrefixDeclaration = parseEnclosedWithKeyword "Prefix" $ do p <- prefix skips $ char '=' iri <- fullIri return $ (p, iri) parseDirectlyImportsDocument :: GA.PrefixMap -> CharParser st IRI parseDirectlyImportsDocument pm = parseEnclosedWithKeyword "Import" (parseIRI pm) <?> "Import" -- # Entities, Literals, and Individuals -- ## Entities parseEntity' :: GA.PrefixMap -> EntityType -> String -> CharParser st Entity parseEntity' pm t k = parseEnclosedWithKeyword k $ do iri <- parseIRI pm return $ mkEntity t iri parseEntity :: GA.PrefixMap -> CharParser st Entity parseEntity pm = parseEntity' pm Class "Class" <|> parseEntity' pm Datatype "Datatype" <|> parseEntity' pm ObjectProperty "ObjectProperty" <|> parseEntity' pm DataProperty "DataProperty" <|> parseEntity' pm AnnotationProperty "AnnotationProperty" <|> parseEntity' pm NamedIndividual "NamedIndividual" <?> "Entity" # # Literals charOrEscaped :: CharParser st Char charOrEscaped = (try $ string "\\\"" >> return '"') <|> (try $ string "\\\\" >> return '\\') <|> anyChar parseTypeSignature :: GA.PrefixMap -> CharParser st IRI parseTypeSignature pm = do string "^^" parseIRI pm parseLanguageTag :: CharParser st String parseLanguageTag = do char '@' many1 (letter <|> char '-') parseLiteral :: GA.PrefixMap -> CharParser st Literal parseLiteral pm = do char '"' s <- manyTill charOrEscaped (try $ char '"') typ <- (Typed <$> parseTypeSignature pm) <|> (Untyped <$> optionMaybe parseLanguageTag) return $ Literal s typ -- ## Individuals parseAnonymousIndividual :: GA.PrefixMap -> CharParser st AnonymousIndividual parseAnonymousIndividual pm = skips $ expandIRI pm <$> iriCurie parseIndividual :: GA.PrefixMap -> CharParser st Individual parseIndividual pm = parseIRI pm <|> parseAnonymousIndividual pm <?> "Individual" -- # Annotations parseAnnotationValue :: GA.PrefixMap -> CharParser st AnnotationValue parseAnnotationValue pm = (parseLiteral pm >>= return . AnnValLit) <|> (parseIRI pm >>= return . AnnValue) <|> (parseAnonymousIndividual pm >>= return . AnnAnInd) <?> "AnnotationValue" parseAnnotationSubject :: GA.PrefixMap -> CharParser st AnnotationSubject parseAnnotationSubject pm = (AnnSubAnInd <$> parseAnonymousIndividual pm) <|> (AnnSubIri <$> parseIRI pm) parseAnnotations :: GA.PrefixMap -> CharParser st [Annotation] parseAnnotations pm = many $ parseAnnotation pm parseAnnotation :: GA.PrefixMap -> CharParser st Annotation parseAnnotation pm = (flip (<?>)) "Annotation" $ parseEnclosedWithKeyword "Annotation" $ do an <- (many (parseAnnotation pm)) property <- (parseIRI pm) v <- parseAnnotationValue pm return $ Annotation an property v -- ## Data Range parseDataJunction' :: GA.PrefixMap -> String -> JunctionType -> CharParser st DataRange parseDataJunction' pm k t = parseEnclosedWithKeyword k $ DataJunction t <$> manyN 2 (parseDataRange pm) parseDataJunction :: GA.PrefixMap -> CharParser st DataRange parseDataJunction pm = parseDataJunction' pm "DataUnionOf" UnionOf <|> parseDataJunction' pm "DataIntersectionOf" IntersectionOf parseDataComplementOf :: GA.PrefixMap -> CharParser st DataRange parseDataComplementOf pm = parseEnclosedWithKeyword "DataComplementOf" $ DataComplementOf <$> parseDataRange pm parseDataOneOf :: GA.PrefixMap -> CharParser st DataRange parseDataOneOf pm = parseEnclosedWithKeyword "DataOneOf" $ DataOneOf <$> many1 (parseLiteral pm) parseDatatypeResComponent :: GA.PrefixMap -> CharParser st (ConstrainingFacet, RestrictionValue) parseDatatypeResComponent pm = (,) <$> (parseIRI pm) <*> (parseLiteral pm) parseDatatypeRestriction :: GA.PrefixMap -> CharParser st DataRange parseDatatypeRestriction pm = parseEnclosedWithKeyword "DatatypeRestriction" $ do dataType <- (parseIRI pm) restrictions <- many1 (parseDatatypeResComponent pm) return $ DataType dataType restrictions parseDataRange :: GA.PrefixMap -> CharParser st DataRange parseDataRange pm = (parseDataJunction pm) <|> (parseDataComplementOf pm) <|> (parseDataOneOf pm) <|> (parseDatatypeRestriction pm) <|> (DataType <$> (parseIRI pm) <*> return []) <?> "DataRange" -- # Axioms # # Declaration parseDeclaration :: GA.PrefixMap -> CharParser st Axiom parseDeclaration pm = parseEnclosedWithKeyword "Declaration" $ do annotations <- many (parseAnnotation pm) entity <- (parseEntity pm) return $ Declaration annotations entity # # ClassExpressions parseObjectIntersectionOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectIntersectionOf pm = parseEnclosedWithKeyword "ObjectIntersectionOf" $ ObjectJunction IntersectionOf <$> manyN 2 (parseClassExpression pm) parseObjectUnionOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectUnionOf pm = parseEnclosedWithKeyword "ObjectUnionOf" $ ObjectJunction UnionOf <$> manyN 2 (parseClassExpression pm) parseObjectComplementOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectComplementOf pm = parseEnclosedWithKeyword "ObjectComplementOf" $ ObjectComplementOf <$> (parseClassExpression pm) parseObjectOneOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectOneOf pm = parseEnclosedWithKeyword "ObjectOneOf" $ ObjectOneOf <$> many1 (parseIndividual pm) parseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseObjectProperty pm = ObjectProp <$> (parseIRI pm) parseInverseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseInverseObjectProperty pm = parseEnclosedWithKeyword "ObjectInverseOf" $ ObjectInverseOf <$> (parseObjectProperty pm) parseObjectPropertyExpression :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseObjectPropertyExpression pm = (parseInverseObjectProperty pm) <|> (parseObjectProperty pm) <?> "ObjectPropertyExpression" parseObjectSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseObjectSomeValuesFrom pm = parseEnclosedWithKeyword "ObjectSomeValuesFrom" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) classExpr <- (parseClassExpression pm) return $ ObjectValuesFrom SomeValuesFrom objectPropertyExpr classExpr parseObjectAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseObjectAllValuesFrom pm = parseEnclosedWithKeyword "ObjectAllValuesFrom" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) classExpr <- (parseClassExpression pm) return $ ObjectValuesFrom AllValuesFrom objectPropertyExpr classExpr parseObjectHasValue :: GA.PrefixMap -> CharParser st ClassExpression parseObjectHasValue pm = parseEnclosedWithKeyword "ObjectHasValue" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) val <- (parseIndividual pm) return $ ObjectHasValue objectPropertyExpr val parseObjectHasSelf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectHasSelf pm = parseEnclosedWithKeyword "ObjectHasSelf" $ ObjectHasSelf <$> (parseObjectPropertyExpression pm) parseCardinality' :: CardinalityType -> String -> CharParser st a -> CharParser st b -> CharParser st (Cardinality a b) parseCardinality' c k pa pb = parseEnclosedWithKeyword k $ do n <- skips $ value 10 <$> getNumber objectPropertyExpr <- pa classExpr <- optionMaybe pb return $ Cardinality c n objectPropertyExpr classExpr parseObjectCardinality :: GA.PrefixMap -> CharParser st ClassExpression parseObjectCardinality pm = ObjectCardinality <$> ( cardinality "ObjectMinCardinality" MinCardinality <|> cardinality "ObjectMaxCardinality" MaxCardinality <|> cardinality "ObjectExactCardinality" ExactCardinality ) where cardinality s t = parseCardinality' t s a b a = (parseObjectPropertyExpression pm) b = (parseClassExpression pm) parseDataCardinality :: GA.PrefixMap -> CharParser st ClassExpression parseDataCardinality pm = DataCardinality <$> ( cardinality "DataMinCardinality" MinCardinality <|> cardinality "DataMaxCardinality" MaxCardinality <|> cardinality "DataExactCardinality" ExactCardinality ) where cardinality s t = parseCardinality' t s a b a = (parseIRI pm) b = (parseDataRange pm) parseDataSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseDataSomeValuesFrom pm = parseEnclosedWithKeyword "DataSomeValuesFrom" $ do exprs <- many1 (followedBy ((parseDataRange pm)) ((parseIRI pm))) range <- (parseDataRange pm) return $ DataValuesFrom SomeValuesFrom exprs range parseDataAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseDataAllValuesFrom pm = parseEnclosedWithKeyword "DataAllValuesFrom" $ do exprs <- many1 (followedBy (parseDataRange pm) ((parseIRI pm))) range <- (parseDataRange pm) return $ DataValuesFrom AllValuesFrom exprs range parseDataHasValue :: GA.PrefixMap -> CharParser st ClassExpression parseDataHasValue pm = parseEnclosedWithKeyword "DataHasValue" $ DataHasValue <$> (parseIRI pm) <*> (parseLiteral pm) parseClassExpression :: GA.PrefixMap -> CharParser st ClassExpression parseClassExpression pm = (parseObjectIntersectionOf pm) <|> (parseObjectUnionOf pm) <|> (parseObjectComplementOf pm) <|> (parseObjectOneOf pm) <|> (parseObjectCardinality pm) <|> (parseObjectSomeValuesFrom pm) <|> (parseObjectAllValuesFrom pm) <|> (parseObjectHasValue pm) <|> (parseObjectHasSelf pm) <|> (parseDataSomeValuesFrom pm) <|> (parseDataAllValuesFrom pm) <|> (parseDataHasValue pm) <|> (parseDataCardinality pm) <|> (Expression <$> (parseIRI pm)) <?> "ClassExpression" -- ## Class Axioms parseSubClassOf :: GA.PrefixMap -> CharParser st ClassAxiom parseSubClassOf pm = parseEnclosedWithKeyword "SubClassOf" $ do annotations <- many (parseAnnotation pm) subClassExpression <- (parseClassExpression pm) superClassExpression <- (parseClassExpression pm) return $ SubClassOf annotations subClassExpression superClassExpression parseEquivalentClasses :: GA.PrefixMap -> CharParser st ClassAxiom parseEquivalentClasses pm = parseEnclosedWithKeyword "EquivalentClasses" $ EquivalentClasses <$> (parseAnnotations pm) <*> manyN 2 (parseClassExpression pm) parseDisjointClasses :: GA.PrefixMap -> CharParser st ClassAxiom parseDisjointClasses pm = parseEnclosedWithKeyword "DisjointClasses" $ DisjointClasses <$> (parseAnnotations pm) <*> manyN 2 (parseClassExpression pm) parseDisjointUnion :: GA.PrefixMap -> CharParser st ClassAxiom parseDisjointUnion pm = parseEnclosedWithKeyword "DisjointUnion" $ DisjointUnion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> manyN 2 (parseClassExpression pm) parseClassAxiom :: GA.PrefixMap -> CharParser st Axiom parseClassAxiom pm = ClassAxiom <$> ( (parseSubClassOf pm) <|> (parseEquivalentClasses pm) <|> (parseDisjointClasses pm) <|> (parseDisjointUnion pm) <?> "ClassAxiom" ) # # Object Property Axioms parseEquivalentObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseEquivalentObjectProperties pm = parseEnclosedWithKeyword "EquivalentObjectProperties" $ EquivalentObjectProperties <$> (parseAnnotations pm) <*> manyN 2 (parseObjectPropertyExpression pm) parseDisjointObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseDisjointObjectProperties pm = parseEnclosedWithKeyword "DisjointObjectProperties" $ DisjointObjectProperties <$> (parseAnnotations pm) <*> manyN 2 (parseObjectPropertyExpression pm) parseObjectPropertyDomain :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseObjectPropertyDomain pm = parseEnclosedWithKeyword "ObjectPropertyDomain" $ ObjectPropertyDomain <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseClassExpression pm) parseObjectPropertyRange :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseObjectPropertyRange pm = parseEnclosedWithKeyword "ObjectPropertyRange" $ ObjectPropertyRange <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseClassExpression pm) parseInverseObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseInverseObjectProperties pm = parseEnclosedWithKeyword "InverseObjectProperties" $ InverseObjectProperties <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseObjectPropertyExpression pm) # # # SubObjectPropertyOf parseObjectPropertyExpressionChain :: GA.PrefixMap -> CharParser st PropertyExpressionChain parseObjectPropertyExpressionChain pm = parseEnclosedWithKeyword "ObjectPropertyChain" $ many1 (parseObjectPropertyExpression pm) parseSubObjectPropertyExpression :: GA.PrefixMap -> CharParser st SubObjectPropertyExpression parseSubObjectPropertyExpression pm = SubObjPropExpr_exprchain <$> (parseObjectPropertyExpressionChain pm) <|> SubObjPropExpr_obj <$> (parseObjectPropertyExpression pm) <?> "SubObjectPropertyExpression" parseSubObjectPropertyOf :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseSubObjectPropertyOf pm = parseEnclosedWithKeyword "SubObjectPropertyOf" $ SubObjectPropertyOf <$> (parseAnnotations pm) <*> (parseSubObjectPropertyExpression pm) <*> (parseObjectPropertyExpression pm) -- | Helper function for *C*ommon*O*bject*P*roperty*A*xioms parseCOPA :: GA.PrefixMap -> ( AxiomAnnotations -> ObjectPropertyExpression -> ObjectPropertyAxiom ) -> String -> CharParser st ObjectPropertyAxiom parseCOPA pm c s = parseEnclosedWithKeyword s $ c <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) parseObjectPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom parseObjectPropertyAxiom pm = ObjectPropertyAxiom <$> ( (parseSubObjectPropertyOf pm) <|> (parseEquivalentObjectProperties pm) <|> (parseDisjointObjectProperties pm) <|> (parseObjectPropertyDomain pm) <|> (parseObjectPropertyRange pm) <|> (parseInverseObjectProperties pm) <|> parseCOPA pm FunctionalObjectProperty "FunctionalObjectProperty" <|> parseCOPA pm InverseFunctionalObjectProperty "InverseFunctionalObjectProperty" <|> parseCOPA pm ReflexiveObjectProperty "ReflexiveObjectProperty" <|> parseCOPA pm IrreflexiveObjectProperty "IrreflexiveObjectProperty" <|> parseCOPA pm SymmetricObjectProperty "SymmetricObjectProperty" <|> parseCOPA pm AsymmetricObjectProperty "AsymmetricObjectProperty" <|> parseCOPA pm TransitiveObjectProperty "TransitiveObjectProperty" <?> "ObjectPropertyAxiom" ) # # DataPropertyAxioms parseSubDataPropertyOf :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseSubDataPropertyOf pm = parseEnclosedWithKeyword "SubDataPropertyOf" $ SubDataPropertyOf <$> parseAnnotations pm <*> (parseIRI pm) <*> (parseIRI pm) parseEquivalentDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseEquivalentDataProperties pm = parseEnclosedWithKeyword "EquivalentDataProperties" $ EquivalentDataProperties <$> (parseAnnotations pm) <*> manyN 2 (parseIRI pm) parseDisjointDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDisjointDataProperties pm = parseEnclosedWithKeyword "DisjointDataProperties" $ DisjointDataProperties <$> parseAnnotations pm <*> manyN 2 (parseIRI pm) parseDataPropertyDomain :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDataPropertyDomain pm = parseEnclosedWithKeyword "DataPropertyDomain" $ DataPropertyDomain <$> parseAnnotations pm <*> (parseIRI pm) <*> parseClassExpression pm parseDataPropertyRange :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDataPropertyRange pm = parseEnclosedWithKeyword "DataPropertyRange" $ DataPropertyRange <$> parseAnnotations pm <*> (parseIRI pm) <*> parseDataRange pm parseFunctionalDataProperty :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseFunctionalDataProperty pm = parseEnclosedWithKeyword "FunctionalDataProperty" $ FunctionalDataProperty <$> parseAnnotations pm <*> (parseIRI pm) parseDataPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom parseDataPropertyAxiom pm = DataPropertyAxiom <$> ( parseSubDataPropertyOf pm <|> parseEquivalentDataProperties pm <|> parseDisjointDataProperties pm <|> parseDataPropertyDomain pm <|> parseDataPropertyRange pm <|> parseFunctionalDataProperty pm <?> "DataPropertyAxiom" ) -- ## Data Type Definition parseDataTypeDefinition :: GA.PrefixMap -> CharParser st Axiom parseDataTypeDefinition pm = parseEnclosedWithKeyword "DatatypeDefinition" $ DatatypeDefinition <$> parseAnnotations pm <*> (parseIRI pm) <*> parseDataRange pm skipChar :: Char -> CharParser st () skipChar = forget . skips . char parensP :: CharParser st a -> CharParser st a parensP = between (skipChar '(') (skipChar ')') # # HasKey parseHasKey :: GA.PrefixMap -> CharParser st Axiom parseHasKey pm = parseEnclosedWithKeyword "HasKey" $ do annotations <- (parseAnnotations pm) classExpr <- (parseClassExpression pm) objectPropertyExprs <- parensP $ many (parseObjectPropertyExpression pm) dataPropertyExprs <- parensP $ many (parseIRI pm) return $ HasKey annotations classExpr objectPropertyExprs dataPropertyExprs # # Assertion parseSameIndividual :: GA.PrefixMap -> CharParser st Assertion parseSameIndividual pm = parseEnclosedWithKeyword "SameIndividual" $ SameIndividual <$> (parseAnnotations pm) <*> manyN 2 (parseIndividual pm) parseDifferentIndividuals :: GA.PrefixMap -> CharParser st Assertion parseDifferentIndividuals pm = parseEnclosedWithKeyword "DifferentIndividuals" $ DifferentIndividuals <$> (parseAnnotations pm) <*> manyN 2 (parseIndividual pm) parseClassAssertion :: GA.PrefixMap -> CharParser st Assertion parseClassAssertion pm = parseEnclosedWithKeyword "ClassAssertion" $ ClassAssertion <$> (parseAnnotations pm) <*> (parseClassExpression pm) <*> (parseIndividual pm) parseObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseObjectPropertyAssertion pm = parseEnclosedWithKeyword "ObjectPropertyAssertion" $ ObjectPropertyAssertion <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseIndividual pm) <*> (parseIndividual pm) parseNegativeObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseNegativeObjectPropertyAssertion pm = parseEnclosedWithKeyword "NegativeObjectPropertyAssertion" $ NegativeObjectPropertyAssertion <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseIndividual pm) <*> (parseIndividual pm) parseDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseDataPropertyAssertion pm = parseEnclosedWithKeyword "DataPropertyAssertion" $ DataPropertyAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIndividual pm) <*> (parseLiteral pm) parseNegativeDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseNegativeDataPropertyAssertion pm = parseEnclosedWithKeyword "NegativeDataPropertyAssertion" $ NegativeDataPropertyAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIndividual pm) <*> (parseLiteral pm) parseAssertion :: GA.PrefixMap -> CharParser st Axiom parseAssertion pm = Assertion <$> ( (parseSameIndividual pm) <|> (parseDifferentIndividuals pm) <|> (parseClassAssertion pm) <|> (parseObjectPropertyAssertion pm) <|> (parseNegativeObjectPropertyAssertion pm) <|> (parseDataPropertyAssertion pm) <|> (parseNegativeDataPropertyAssertion pm) ) parseAnnotationAssertion :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationAssertion pm = parseEnclosedWithKeyword "AnnotationAssertion" $ AnnotationAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseAnnotationSubject pm) <*> (parseAnnotationValue pm) parseSubAnnotationPropertyOf :: GA.PrefixMap -> CharParser st AnnotationAxiom parseSubAnnotationPropertyOf pm = parseEnclosedWithKeyword "SubAnnotationPropertyOf" $ SubAnnotationPropertyOf <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationPropertyDomain :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationPropertyDomain pm = parseEnclosedWithKeyword "AnnotationPropertyDomain" $ AnnotationPropertyDomain <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationPropertyRange :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationPropertyRange pm = parseEnclosedWithKeyword "AnnotationPropertyRange" $ AnnotationPropertyRange <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationAxiom :: GA.PrefixMap -> CharParser st Axiom parseAnnotationAxiom pm = AnnotationAxiom <$> ( (parseAnnotationAssertion pm) <|> (parseSubAnnotationPropertyOf pm) <|> (parseAnnotationPropertyDomain pm) <|> (parseAnnotationPropertyRange pm) ) parseIndividualArg :: GA.PrefixMap -> CharParser st IndividualArg parseIndividualArg pm = -- Apparently the keyword is "Variable" instead of "IndividualVariable" IVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|> IArg <$> parseAnonymousIndividual pm parseDataArg :: GA.PrefixMap -> CharParser st DataArg parseDataArg pm = Apparently the keyword is " Literal " instead of " LiteralVariable " DVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|> DArg <$> parseLiteral pm parseClassAtom :: GA.PrefixMap -> CharParser st Atom parseClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $ ClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm parseDataRangeAtom :: GA.PrefixMap -> CharParser st Atom parseDataRangeAtom pm = parseEnclosedWithKeyword "DataRangeAtom" $ DataRangeAtom <$> parseDataRange pm <*> parseDataArg pm parseObjectPropertyAtom :: GA.PrefixMap -> CharParser st Atom parseObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $ ObjectPropertyAtom <$> parseObjectPropertyExpression pm <*> parseIndividualArg pm <*> parseIndividualArg pm parseDataPropertyAtom :: GA.PrefixMap -> CharParser st Atom parseDataPropertyAtom pm = parseEnclosedWithKeyword "DataPropertyAtom" $ DataPropertyAtom <$> parseIRI pm <*> parseIndividualArg pm <*> parseDataArg pm parseBuiltInAtom :: GA.PrefixMap -> CharParser st Atom parseBuiltInAtom pm = parseEnclosedWithKeyword "BuiltInAtom" $ BuiltInAtom <$> parseIRI pm <*> many1 (parseDataArg pm) parseSameIndividualAtom :: GA.PrefixMap -> CharParser st Atom parseSameIndividualAtom pm = parseEnclosedWithKeyword "SameIndividualAtom" $ SameIndividualAtom <$> parseIndividualArg pm <*> parseIndividualArg pm parseDifferentIndividualsAtom :: GA.PrefixMap -> CharParser st Atom parseDifferentIndividualsAtom pm = parseEnclosedWithKeyword "DifferentIndividualsAtom" $ DifferentIndividualsAtom <$> parseIndividualArg pm <*> parseIndividualArg pm parseAtom :: GA.PrefixMap -> CharParser st Atom parseAtom pm = parseClassAtom pm <|> parseDataRangeAtom pm <|> parseObjectPropertyAtom pm <|> parseDataPropertyAtom pm <|> parseBuiltInAtom pm<|> parseSameIndividualAtom pm <|> parseDifferentIndividualsAtom pm <?> "Atom" parseBody :: GA.PrefixMap -> CharParser st Body parseBody pm = do parseEnclosedWithKeyword "Body" $ many (parseAtom pm) parseHead :: GA.PrefixMap -> CharParser st Body parseHead pm = do parseEnclosedWithKeyword "Head" $ many (parseAtom pm) parseDLSafeRule :: GA.PrefixMap -> CharParser st Rule parseDLSafeRule pm = parseEnclosedWithKeyword "DLSafeRule" $ DLSafeRule <$> parseAnnotations pm <*> parseBody pm <*> parseHead pm parseDGClassAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $ DGClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm parseDGObjectPropertyAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $ DGObjectPropertyAtom <$> parseObjectPropertyExpression pm <*> parseIndividualArg pm <*> parseIndividualArg pm parseDGAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGAtom pm = parseDGClassAtom pm <|> parseDGObjectPropertyAtom pm parseDGBody :: GA.PrefixMap -> CharParser st DGBody parseDGBody pm = parseEnclosedWithKeyword "Body" $ many (parseDGAtom pm) parseDGHead :: GA.PrefixMap -> CharParser st DGHead parseDGHead pm = parseEnclosedWithKeyword "Head" $ many (parseDGAtom pm) parseDGRule :: GA.PrefixMap -> CharParser st Rule parseDGRule pm = parseEnclosedWithKeyword "DescriptionGraphRule" $ DGRule <$> parseAnnotations pm <*> parseDGBody pm <*> parseDGHead pm parseRule :: GA.PrefixMap -> CharParser st Axiom parseRule pm = Rule <$> (parseDLSafeRule pm <|> parseDGRule pm) parseDGNodeAssertion :: GA.PrefixMap -> CharParser st DGNodeAssertion parseDGNodeAssertion pm = parseEnclosedWithKeyword "NodeAssertion" $ DGNodeAssertion <$> parseIRI pm <*> parseIRI pm parseDGNodes :: GA.PrefixMap -> CharParser st DGNodes parseDGNodes pm = parseEnclosedWithKeyword "Nodes" $ many1 (parseDGNodeAssertion pm) parseDGEdgeAssertion :: GA.PrefixMap -> CharParser st DGEdgeAssertion parseDGEdgeAssertion pm = parseEnclosedWithKeyword "EdgeAssertion" $ DGEdgeAssertion <$> parseIRI pm <*> parseIRI pm <*> parseIRI pm parseDGEdes :: GA.PrefixMap -> CharParser st DGEdges parseDGEdes pm = parseEnclosedWithKeyword "Edges" $ many1 (parseDGEdgeAssertion pm) parseMainClasses :: GA.PrefixMap -> CharParser st MainClasses parseMainClasses pm = parseEnclosedWithKeyword "MainClasses" $ many1 (parseIRI pm) parseDGAxiom :: GA.PrefixMap -> CharParser st Axiom parseDGAxiom pm = parseEnclosedWithKeyword "DescriptionGraph" $ DGAxiom <$> parseAnnotations pm <*> parseIRI pm <*> parseDGNodes pm <*> parseDGEdes pm <*> parseMainClasses pm parseAxiom :: GA.PrefixMap -> CharParser st Axiom parseAxiom pm = (parseDeclaration pm) <|> (parseClassAxiom pm) <|> (parseObjectPropertyAxiom pm) <|> (parseDataPropertyAxiom pm) <|> (parseDataTypeDefinition pm) <|> (parseHasKey pm) <|> (parseAssertion pm) <|> (parseAnnotationAxiom pm) <|> (parseRule pm) <|> (parseDGAxiom pm) <?> "Axiom" parseOntology :: GA.PrefixMap -> CharParser st Ontology parseOntology pm = let parseIriIfNotImportOrAxiomOrAnnotation = (arbitraryLookaheadOption [ forget (parseDirectlyImportsDocument pm), forget (parseAnnotation pm), forget (parseAxiom pm) ] >> never) <|> optionMaybe (parseIRI pm) in parseEnclosedWithKeyword "Ontology" $ do ontologyIri <- parseIriIfNotImportOrAxiomOrAnnotation versionIri <- parseIriIfNotImportOrAxiomOrAnnotation imports <- many (parseDirectlyImportsDocument pm) annotations <- many (parseAnnotation pm) axs <- many (parseAxiom pm) return $ Ontology ontologyIri versionIri (imports) annotations axs | Parses an OntologyDocument from Owl2 Functional Syntax parseOntologyDocument :: GA.PrefixMap -> CharParser st OntologyDocument parseOntologyDocument gapm = do skips' prefixes <- many parsePrefixDeclaration let pm = union gapm (fromList prefixes) onto <- parseOntology pm return $ OntologyDocument (OntologyMetadata AS) pm onto </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/spechub/Hets/c27bd92f22f3b92e792eff0adaa3baec9d61c2b1/OWL2/ParseAS.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | Performs an arbitrary lookahead over choices of parsers # Basic constructs | Parses a comment | Skips trailing whitespaces and comments | Parses plain string with skip | Parses a full iri | rfc3987 plus '+' from scheme (scheme does not allow the dots) | Parses an abbreviated or full iri # Entities, Literals, and Individuals ## Entities ## Individuals # Annotations ## Data Range # Axioms ## Class Axioms | Helper function for *C*ommon*O*bject*P*roperty*A*xioms ## Data Type Definition Apparently the keyword is "Variable" instead of "IndividualVariable"</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE TupleSections # module OWL2.ParseAS where import Prelude hiding (lookup) import OWL2.AS as AS import Common.AnnoParser (newlineOrEof, commentLine) import Common.IRI hiding (parseIRI) import Common.Parsec import Common.Lexer (getNumber, value, nestCommentOut) import qualified Common.GlobalAnnotations as GA (PrefixMap) import Text.ParserCombinators.Parsec import Data.Char import Data.Map (union, fromList) | @followedBy c p@ first parses @p@ then looks ahead for Does n't consume any input on failure . any input on failure. -} followedBy :: CharParser st b -> CharParser st a -> CharParser st a followedBy cond p = try $ do r <- p lookAhead cond return r arbitraryLookaheadOption :: [CharParser st a] -> CharParser st a arbitraryLookaheadOption p = try $ lookAhead $ choice p | @manyN n p@ parses @n@ or more occurences of @p@ manyN :: Int -> CharParser st a -> CharParser st [a] manyN n p = foldr (\ _ r -> p <:> r) (return []) [1 .. n] <++> many p | alias for @return Nothing@ never :: CharParser st (Maybe a) never = return Nothing comment :: CharParser st String comment = try $ do char '#' manyTill anyChar newlineOrEof skips :: CharParser st a -> CharParser st a skips = (<< skips') | and comments skips' :: CharParser st () skips' = skipMany (forget space <|> forget comment <|> forget commentLine <|> forget nestCommentOut) keyword :: String -> CharParser st () keyword s = try $ skips (string s >> notFollowedBy alphaNum) fullIri :: CharParser st IRI fullIri = angles iriParser ncNameStart :: Char -> Bool ncNameStart c = isAlpha c || c == '_' ncNameChar :: Char -> Bool ncNameChar c = isAlphaNum c || elem c ".+-_\183" | Parses a prefix name ( PNAME_NS of ) prefix :: CharParser st String prefix = skips $ option "" (satisfy ncNameStart <:> many (satisfy ncNameChar)) << char ':' parseIRI :: GA.PrefixMap -> CharParser st IRI parseIRI pm = skips (expandIRI pm <$> (fullIri <|> compoundIriCurie) <?> "IRI") | @parseEnclosedWithKeyword k p@ parses the keyword @k@ followed @p@ enclosed in parentheses . Skips spaces and comments before and after @p@. enclosed in parentheses. Skips spaces and comments before and after @p@. -} parseEnclosedWithKeyword :: String -> CharParser st a -> CharParser st a parseEnclosedWithKeyword s p = do keyword s skips $ char '(' r <- skips p skips $ char ')' return r parsePrefixDeclaration :: CharParser st (String, IRI) parsePrefixDeclaration = parseEnclosedWithKeyword "Prefix" $ do p <- prefix skips $ char '=' iri <- fullIri return $ (p, iri) parseDirectlyImportsDocument :: GA.PrefixMap -> CharParser st IRI parseDirectlyImportsDocument pm = parseEnclosedWithKeyword "Import" (parseIRI pm) <?> "Import" parseEntity' :: GA.PrefixMap -> EntityType -> String -> CharParser st Entity parseEntity' pm t k = parseEnclosedWithKeyword k $ do iri <- parseIRI pm return $ mkEntity t iri parseEntity :: GA.PrefixMap -> CharParser st Entity parseEntity pm = parseEntity' pm Class "Class" <|> parseEntity' pm Datatype "Datatype" <|> parseEntity' pm ObjectProperty "ObjectProperty" <|> parseEntity' pm DataProperty "DataProperty" <|> parseEntity' pm AnnotationProperty "AnnotationProperty" <|> parseEntity' pm NamedIndividual "NamedIndividual" <?> "Entity" # # Literals charOrEscaped :: CharParser st Char charOrEscaped = (try $ string "\\\"" >> return '"') <|> (try $ string "\\\\" >> return '\\') <|> anyChar parseTypeSignature :: GA.PrefixMap -> CharParser st IRI parseTypeSignature pm = do string "^^" parseIRI pm parseLanguageTag :: CharParser st String parseLanguageTag = do char '@' many1 (letter <|> char '-') parseLiteral :: GA.PrefixMap -> CharParser st Literal parseLiteral pm = do char '"' s <- manyTill charOrEscaped (try $ char '"') typ <- (Typed <$> parseTypeSignature pm) <|> (Untyped <$> optionMaybe parseLanguageTag) return $ Literal s typ parseAnonymousIndividual :: GA.PrefixMap -> CharParser st AnonymousIndividual parseAnonymousIndividual pm = skips $ expandIRI pm <$> iriCurie parseIndividual :: GA.PrefixMap -> CharParser st Individual parseIndividual pm = parseIRI pm <|> parseAnonymousIndividual pm <?> "Individual" parseAnnotationValue :: GA.PrefixMap -> CharParser st AnnotationValue parseAnnotationValue pm = (parseLiteral pm >>= return . AnnValLit) <|> (parseIRI pm >>= return . AnnValue) <|> (parseAnonymousIndividual pm >>= return . AnnAnInd) <?> "AnnotationValue" parseAnnotationSubject :: GA.PrefixMap -> CharParser st AnnotationSubject parseAnnotationSubject pm = (AnnSubAnInd <$> parseAnonymousIndividual pm) <|> (AnnSubIri <$> parseIRI pm) parseAnnotations :: GA.PrefixMap -> CharParser st [Annotation] parseAnnotations pm = many $ parseAnnotation pm parseAnnotation :: GA.PrefixMap -> CharParser st Annotation parseAnnotation pm = (flip (<?>)) "Annotation" $ parseEnclosedWithKeyword "Annotation" $ do an <- (many (parseAnnotation pm)) property <- (parseIRI pm) v <- parseAnnotationValue pm return $ Annotation an property v parseDataJunction' :: GA.PrefixMap -> String -> JunctionType -> CharParser st DataRange parseDataJunction' pm k t = parseEnclosedWithKeyword k $ DataJunction t <$> manyN 2 (parseDataRange pm) parseDataJunction :: GA.PrefixMap -> CharParser st DataRange parseDataJunction pm = parseDataJunction' pm "DataUnionOf" UnionOf <|> parseDataJunction' pm "DataIntersectionOf" IntersectionOf parseDataComplementOf :: GA.PrefixMap -> CharParser st DataRange parseDataComplementOf pm = parseEnclosedWithKeyword "DataComplementOf" $ DataComplementOf <$> parseDataRange pm parseDataOneOf :: GA.PrefixMap -> CharParser st DataRange parseDataOneOf pm = parseEnclosedWithKeyword "DataOneOf" $ DataOneOf <$> many1 (parseLiteral pm) parseDatatypeResComponent :: GA.PrefixMap -> CharParser st (ConstrainingFacet, RestrictionValue) parseDatatypeResComponent pm = (,) <$> (parseIRI pm) <*> (parseLiteral pm) parseDatatypeRestriction :: GA.PrefixMap -> CharParser st DataRange parseDatatypeRestriction pm = parseEnclosedWithKeyword "DatatypeRestriction" $ do dataType <- (parseIRI pm) restrictions <- many1 (parseDatatypeResComponent pm) return $ DataType dataType restrictions parseDataRange :: GA.PrefixMap -> CharParser st DataRange parseDataRange pm = (parseDataJunction pm) <|> (parseDataComplementOf pm) <|> (parseDataOneOf pm) <|> (parseDatatypeRestriction pm) <|> (DataType <$> (parseIRI pm) <*> return []) <?> "DataRange" # # Declaration parseDeclaration :: GA.PrefixMap -> CharParser st Axiom parseDeclaration pm = parseEnclosedWithKeyword "Declaration" $ do annotations <- many (parseAnnotation pm) entity <- (parseEntity pm) return $ Declaration annotations entity # # ClassExpressions parseObjectIntersectionOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectIntersectionOf pm = parseEnclosedWithKeyword "ObjectIntersectionOf" $ ObjectJunction IntersectionOf <$> manyN 2 (parseClassExpression pm) parseObjectUnionOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectUnionOf pm = parseEnclosedWithKeyword "ObjectUnionOf" $ ObjectJunction UnionOf <$> manyN 2 (parseClassExpression pm) parseObjectComplementOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectComplementOf pm = parseEnclosedWithKeyword "ObjectComplementOf" $ ObjectComplementOf <$> (parseClassExpression pm) parseObjectOneOf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectOneOf pm = parseEnclosedWithKeyword "ObjectOneOf" $ ObjectOneOf <$> many1 (parseIndividual pm) parseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseObjectProperty pm = ObjectProp <$> (parseIRI pm) parseInverseObjectProperty :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseInverseObjectProperty pm = parseEnclosedWithKeyword "ObjectInverseOf" $ ObjectInverseOf <$> (parseObjectProperty pm) parseObjectPropertyExpression :: GA.PrefixMap -> CharParser st ObjectPropertyExpression parseObjectPropertyExpression pm = (parseInverseObjectProperty pm) <|> (parseObjectProperty pm) <?> "ObjectPropertyExpression" parseObjectSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseObjectSomeValuesFrom pm = parseEnclosedWithKeyword "ObjectSomeValuesFrom" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) classExpr <- (parseClassExpression pm) return $ ObjectValuesFrom SomeValuesFrom objectPropertyExpr classExpr parseObjectAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseObjectAllValuesFrom pm = parseEnclosedWithKeyword "ObjectAllValuesFrom" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) classExpr <- (parseClassExpression pm) return $ ObjectValuesFrom AllValuesFrom objectPropertyExpr classExpr parseObjectHasValue :: GA.PrefixMap -> CharParser st ClassExpression parseObjectHasValue pm = parseEnclosedWithKeyword "ObjectHasValue" $ do objectPropertyExpr <- (parseObjectPropertyExpression pm) val <- (parseIndividual pm) return $ ObjectHasValue objectPropertyExpr val parseObjectHasSelf :: GA.PrefixMap -> CharParser st ClassExpression parseObjectHasSelf pm = parseEnclosedWithKeyword "ObjectHasSelf" $ ObjectHasSelf <$> (parseObjectPropertyExpression pm) parseCardinality' :: CardinalityType -> String -> CharParser st a -> CharParser st b -> CharParser st (Cardinality a b) parseCardinality' c k pa pb = parseEnclosedWithKeyword k $ do n <- skips $ value 10 <$> getNumber objectPropertyExpr <- pa classExpr <- optionMaybe pb return $ Cardinality c n objectPropertyExpr classExpr parseObjectCardinality :: GA.PrefixMap -> CharParser st ClassExpression parseObjectCardinality pm = ObjectCardinality <$> ( cardinality "ObjectMinCardinality" MinCardinality <|> cardinality "ObjectMaxCardinality" MaxCardinality <|> cardinality "ObjectExactCardinality" ExactCardinality ) where cardinality s t = parseCardinality' t s a b a = (parseObjectPropertyExpression pm) b = (parseClassExpression pm) parseDataCardinality :: GA.PrefixMap -> CharParser st ClassExpression parseDataCardinality pm = DataCardinality <$> ( cardinality "DataMinCardinality" MinCardinality <|> cardinality "DataMaxCardinality" MaxCardinality <|> cardinality "DataExactCardinality" ExactCardinality ) where cardinality s t = parseCardinality' t s a b a = (parseIRI pm) b = (parseDataRange pm) parseDataSomeValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseDataSomeValuesFrom pm = parseEnclosedWithKeyword "DataSomeValuesFrom" $ do exprs <- many1 (followedBy ((parseDataRange pm)) ((parseIRI pm))) range <- (parseDataRange pm) return $ DataValuesFrom SomeValuesFrom exprs range parseDataAllValuesFrom :: GA.PrefixMap -> CharParser st ClassExpression parseDataAllValuesFrom pm = parseEnclosedWithKeyword "DataAllValuesFrom" $ do exprs <- many1 (followedBy (parseDataRange pm) ((parseIRI pm))) range <- (parseDataRange pm) return $ DataValuesFrom AllValuesFrom exprs range parseDataHasValue :: GA.PrefixMap -> CharParser st ClassExpression parseDataHasValue pm = parseEnclosedWithKeyword "DataHasValue" $ DataHasValue <$> (parseIRI pm) <*> (parseLiteral pm) parseClassExpression :: GA.PrefixMap -> CharParser st ClassExpression parseClassExpression pm = (parseObjectIntersectionOf pm) <|> (parseObjectUnionOf pm) <|> (parseObjectComplementOf pm) <|> (parseObjectOneOf pm) <|> (parseObjectCardinality pm) <|> (parseObjectSomeValuesFrom pm) <|> (parseObjectAllValuesFrom pm) <|> (parseObjectHasValue pm) <|> (parseObjectHasSelf pm) <|> (parseDataSomeValuesFrom pm) <|> (parseDataAllValuesFrom pm) <|> (parseDataHasValue pm) <|> (parseDataCardinality pm) <|> (Expression <$> (parseIRI pm)) <?> "ClassExpression" parseSubClassOf :: GA.PrefixMap -> CharParser st ClassAxiom parseSubClassOf pm = parseEnclosedWithKeyword "SubClassOf" $ do annotations <- many (parseAnnotation pm) subClassExpression <- (parseClassExpression pm) superClassExpression <- (parseClassExpression pm) return $ SubClassOf annotations subClassExpression superClassExpression parseEquivalentClasses :: GA.PrefixMap -> CharParser st ClassAxiom parseEquivalentClasses pm = parseEnclosedWithKeyword "EquivalentClasses" $ EquivalentClasses <$> (parseAnnotations pm) <*> manyN 2 (parseClassExpression pm) parseDisjointClasses :: GA.PrefixMap -> CharParser st ClassAxiom parseDisjointClasses pm = parseEnclosedWithKeyword "DisjointClasses" $ DisjointClasses <$> (parseAnnotations pm) <*> manyN 2 (parseClassExpression pm) parseDisjointUnion :: GA.PrefixMap -> CharParser st ClassAxiom parseDisjointUnion pm = parseEnclosedWithKeyword "DisjointUnion" $ DisjointUnion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> manyN 2 (parseClassExpression pm) parseClassAxiom :: GA.PrefixMap -> CharParser st Axiom parseClassAxiom pm = ClassAxiom <$> ( (parseSubClassOf pm) <|> (parseEquivalentClasses pm) <|> (parseDisjointClasses pm) <|> (parseDisjointUnion pm) <?> "ClassAxiom" ) # # Object Property Axioms parseEquivalentObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseEquivalentObjectProperties pm = parseEnclosedWithKeyword "EquivalentObjectProperties" $ EquivalentObjectProperties <$> (parseAnnotations pm) <*> manyN 2 (parseObjectPropertyExpression pm) parseDisjointObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseDisjointObjectProperties pm = parseEnclosedWithKeyword "DisjointObjectProperties" $ DisjointObjectProperties <$> (parseAnnotations pm) <*> manyN 2 (parseObjectPropertyExpression pm) parseObjectPropertyDomain :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseObjectPropertyDomain pm = parseEnclosedWithKeyword "ObjectPropertyDomain" $ ObjectPropertyDomain <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseClassExpression pm) parseObjectPropertyRange :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseObjectPropertyRange pm = parseEnclosedWithKeyword "ObjectPropertyRange" $ ObjectPropertyRange <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseClassExpression pm) parseInverseObjectProperties :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseInverseObjectProperties pm = parseEnclosedWithKeyword "InverseObjectProperties" $ InverseObjectProperties <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseObjectPropertyExpression pm) # # # SubObjectPropertyOf parseObjectPropertyExpressionChain :: GA.PrefixMap -> CharParser st PropertyExpressionChain parseObjectPropertyExpressionChain pm = parseEnclosedWithKeyword "ObjectPropertyChain" $ many1 (parseObjectPropertyExpression pm) parseSubObjectPropertyExpression :: GA.PrefixMap -> CharParser st SubObjectPropertyExpression parseSubObjectPropertyExpression pm = SubObjPropExpr_exprchain <$> (parseObjectPropertyExpressionChain pm) <|> SubObjPropExpr_obj <$> (parseObjectPropertyExpression pm) <?> "SubObjectPropertyExpression" parseSubObjectPropertyOf :: GA.PrefixMap -> CharParser st ObjectPropertyAxiom parseSubObjectPropertyOf pm = parseEnclosedWithKeyword "SubObjectPropertyOf" $ SubObjectPropertyOf <$> (parseAnnotations pm) <*> (parseSubObjectPropertyExpression pm) <*> (parseObjectPropertyExpression pm) parseCOPA :: GA.PrefixMap -> ( AxiomAnnotations -> ObjectPropertyExpression -> ObjectPropertyAxiom ) -> String -> CharParser st ObjectPropertyAxiom parseCOPA pm c s = parseEnclosedWithKeyword s $ c <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) parseObjectPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom parseObjectPropertyAxiom pm = ObjectPropertyAxiom <$> ( (parseSubObjectPropertyOf pm) <|> (parseEquivalentObjectProperties pm) <|> (parseDisjointObjectProperties pm) <|> (parseObjectPropertyDomain pm) <|> (parseObjectPropertyRange pm) <|> (parseInverseObjectProperties pm) <|> parseCOPA pm FunctionalObjectProperty "FunctionalObjectProperty" <|> parseCOPA pm InverseFunctionalObjectProperty "InverseFunctionalObjectProperty" <|> parseCOPA pm ReflexiveObjectProperty "ReflexiveObjectProperty" <|> parseCOPA pm IrreflexiveObjectProperty "IrreflexiveObjectProperty" <|> parseCOPA pm SymmetricObjectProperty "SymmetricObjectProperty" <|> parseCOPA pm AsymmetricObjectProperty "AsymmetricObjectProperty" <|> parseCOPA pm TransitiveObjectProperty "TransitiveObjectProperty" <?> "ObjectPropertyAxiom" ) # # DataPropertyAxioms parseSubDataPropertyOf :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseSubDataPropertyOf pm = parseEnclosedWithKeyword "SubDataPropertyOf" $ SubDataPropertyOf <$> parseAnnotations pm <*> (parseIRI pm) <*> (parseIRI pm) parseEquivalentDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseEquivalentDataProperties pm = parseEnclosedWithKeyword "EquivalentDataProperties" $ EquivalentDataProperties <$> (parseAnnotations pm) <*> manyN 2 (parseIRI pm) parseDisjointDataProperties :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDisjointDataProperties pm = parseEnclosedWithKeyword "DisjointDataProperties" $ DisjointDataProperties <$> parseAnnotations pm <*> manyN 2 (parseIRI pm) parseDataPropertyDomain :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDataPropertyDomain pm = parseEnclosedWithKeyword "DataPropertyDomain" $ DataPropertyDomain <$> parseAnnotations pm <*> (parseIRI pm) <*> parseClassExpression pm parseDataPropertyRange :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseDataPropertyRange pm = parseEnclosedWithKeyword "DataPropertyRange" $ DataPropertyRange <$> parseAnnotations pm <*> (parseIRI pm) <*> parseDataRange pm parseFunctionalDataProperty :: GA.PrefixMap -> CharParser st DataPropertyAxiom parseFunctionalDataProperty pm = parseEnclosedWithKeyword "FunctionalDataProperty" $ FunctionalDataProperty <$> parseAnnotations pm <*> (parseIRI pm) parseDataPropertyAxiom :: GA.PrefixMap -> CharParser st Axiom parseDataPropertyAxiom pm = DataPropertyAxiom <$> ( parseSubDataPropertyOf pm <|> parseEquivalentDataProperties pm <|> parseDisjointDataProperties pm <|> parseDataPropertyDomain pm <|> parseDataPropertyRange pm <|> parseFunctionalDataProperty pm <?> "DataPropertyAxiom" ) parseDataTypeDefinition :: GA.PrefixMap -> CharParser st Axiom parseDataTypeDefinition pm = parseEnclosedWithKeyword "DatatypeDefinition" $ DatatypeDefinition <$> parseAnnotations pm <*> (parseIRI pm) <*> parseDataRange pm skipChar :: Char -> CharParser st () skipChar = forget . skips . char parensP :: CharParser st a -> CharParser st a parensP = between (skipChar '(') (skipChar ')') # # HasKey parseHasKey :: GA.PrefixMap -> CharParser st Axiom parseHasKey pm = parseEnclosedWithKeyword "HasKey" $ do annotations <- (parseAnnotations pm) classExpr <- (parseClassExpression pm) objectPropertyExprs <- parensP $ many (parseObjectPropertyExpression pm) dataPropertyExprs <- parensP $ many (parseIRI pm) return $ HasKey annotations classExpr objectPropertyExprs dataPropertyExprs # # Assertion parseSameIndividual :: GA.PrefixMap -> CharParser st Assertion parseSameIndividual pm = parseEnclosedWithKeyword "SameIndividual" $ SameIndividual <$> (parseAnnotations pm) <*> manyN 2 (parseIndividual pm) parseDifferentIndividuals :: GA.PrefixMap -> CharParser st Assertion parseDifferentIndividuals pm = parseEnclosedWithKeyword "DifferentIndividuals" $ DifferentIndividuals <$> (parseAnnotations pm) <*> manyN 2 (parseIndividual pm) parseClassAssertion :: GA.PrefixMap -> CharParser st Assertion parseClassAssertion pm = parseEnclosedWithKeyword "ClassAssertion" $ ClassAssertion <$> (parseAnnotations pm) <*> (parseClassExpression pm) <*> (parseIndividual pm) parseObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseObjectPropertyAssertion pm = parseEnclosedWithKeyword "ObjectPropertyAssertion" $ ObjectPropertyAssertion <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseIndividual pm) <*> (parseIndividual pm) parseNegativeObjectPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseNegativeObjectPropertyAssertion pm = parseEnclosedWithKeyword "NegativeObjectPropertyAssertion" $ NegativeObjectPropertyAssertion <$> (parseAnnotations pm) <*> (parseObjectPropertyExpression pm) <*> (parseIndividual pm) <*> (parseIndividual pm) parseDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseDataPropertyAssertion pm = parseEnclosedWithKeyword "DataPropertyAssertion" $ DataPropertyAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIndividual pm) <*> (parseLiteral pm) parseNegativeDataPropertyAssertion :: GA.PrefixMap -> CharParser st Assertion parseNegativeDataPropertyAssertion pm = parseEnclosedWithKeyword "NegativeDataPropertyAssertion" $ NegativeDataPropertyAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIndividual pm) <*> (parseLiteral pm) parseAssertion :: GA.PrefixMap -> CharParser st Axiom parseAssertion pm = Assertion <$> ( (parseSameIndividual pm) <|> (parseDifferentIndividuals pm) <|> (parseClassAssertion pm) <|> (parseObjectPropertyAssertion pm) <|> (parseNegativeObjectPropertyAssertion pm) <|> (parseDataPropertyAssertion pm) <|> (parseNegativeDataPropertyAssertion pm) ) parseAnnotationAssertion :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationAssertion pm = parseEnclosedWithKeyword "AnnotationAssertion" $ AnnotationAssertion <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseAnnotationSubject pm) <*> (parseAnnotationValue pm) parseSubAnnotationPropertyOf :: GA.PrefixMap -> CharParser st AnnotationAxiom parseSubAnnotationPropertyOf pm = parseEnclosedWithKeyword "SubAnnotationPropertyOf" $ SubAnnotationPropertyOf <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationPropertyDomain :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationPropertyDomain pm = parseEnclosedWithKeyword "AnnotationPropertyDomain" $ AnnotationPropertyDomain <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationPropertyRange :: GA.PrefixMap -> CharParser st AnnotationAxiom parseAnnotationPropertyRange pm = parseEnclosedWithKeyword "AnnotationPropertyRange" $ AnnotationPropertyRange <$> (parseAnnotations pm) <*> (parseIRI pm) <*> (parseIRI pm) parseAnnotationAxiom :: GA.PrefixMap -> CharParser st Axiom parseAnnotationAxiom pm = AnnotationAxiom <$> ( (parseAnnotationAssertion pm) <|> (parseSubAnnotationPropertyOf pm) <|> (parseAnnotationPropertyDomain pm) <|> (parseAnnotationPropertyRange pm) ) parseIndividualArg :: GA.PrefixMap -> CharParser st IndividualArg parseIndividualArg pm = IVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|> IArg <$> parseAnonymousIndividual pm parseDataArg :: GA.PrefixMap -> CharParser st DataArg parseDataArg pm = Apparently the keyword is " Literal " instead of " LiteralVariable " DVar <$> parseEnclosedWithKeyword "Variable" (parseIRI pm) <|> DArg <$> parseLiteral pm parseClassAtom :: GA.PrefixMap -> CharParser st Atom parseClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $ ClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm parseDataRangeAtom :: GA.PrefixMap -> CharParser st Atom parseDataRangeAtom pm = parseEnclosedWithKeyword "DataRangeAtom" $ DataRangeAtom <$> parseDataRange pm <*> parseDataArg pm parseObjectPropertyAtom :: GA.PrefixMap -> CharParser st Atom parseObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $ ObjectPropertyAtom <$> parseObjectPropertyExpression pm <*> parseIndividualArg pm <*> parseIndividualArg pm parseDataPropertyAtom :: GA.PrefixMap -> CharParser st Atom parseDataPropertyAtom pm = parseEnclosedWithKeyword "DataPropertyAtom" $ DataPropertyAtom <$> parseIRI pm <*> parseIndividualArg pm <*> parseDataArg pm parseBuiltInAtom :: GA.PrefixMap -> CharParser st Atom parseBuiltInAtom pm = parseEnclosedWithKeyword "BuiltInAtom" $ BuiltInAtom <$> parseIRI pm <*> many1 (parseDataArg pm) parseSameIndividualAtom :: GA.PrefixMap -> CharParser st Atom parseSameIndividualAtom pm = parseEnclosedWithKeyword "SameIndividualAtom" $ SameIndividualAtom <$> parseIndividualArg pm <*> parseIndividualArg pm parseDifferentIndividualsAtom :: GA.PrefixMap -> CharParser st Atom parseDifferentIndividualsAtom pm = parseEnclosedWithKeyword "DifferentIndividualsAtom" $ DifferentIndividualsAtom <$> parseIndividualArg pm <*> parseIndividualArg pm parseAtom :: GA.PrefixMap -> CharParser st Atom parseAtom pm = parseClassAtom pm <|> parseDataRangeAtom pm <|> parseObjectPropertyAtom pm <|> parseDataPropertyAtom pm <|> parseBuiltInAtom pm<|> parseSameIndividualAtom pm <|> parseDifferentIndividualsAtom pm <?> "Atom" parseBody :: GA.PrefixMap -> CharParser st Body parseBody pm = do parseEnclosedWithKeyword "Body" $ many (parseAtom pm) parseHead :: GA.PrefixMap -> CharParser st Body parseHead pm = do parseEnclosedWithKeyword "Head" $ many (parseAtom pm) parseDLSafeRule :: GA.PrefixMap -> CharParser st Rule parseDLSafeRule pm = parseEnclosedWithKeyword "DLSafeRule" $ DLSafeRule <$> parseAnnotations pm <*> parseBody pm <*> parseHead pm parseDGClassAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGClassAtom pm = parseEnclosedWithKeyword "ClassAtom" $ DGClassAtom <$> parseClassExpression pm <*> parseIndividualArg pm parseDGObjectPropertyAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGObjectPropertyAtom pm = parseEnclosedWithKeyword "ObjectPropertyAtom" $ DGObjectPropertyAtom <$> parseObjectPropertyExpression pm <*> parseIndividualArg pm <*> parseIndividualArg pm parseDGAtom :: GA.PrefixMap -> CharParser st DGAtom parseDGAtom pm = parseDGClassAtom pm <|> parseDGObjectPropertyAtom pm parseDGBody :: GA.PrefixMap -> CharParser st DGBody parseDGBody pm = parseEnclosedWithKeyword "Body" $ many (parseDGAtom pm) parseDGHead :: GA.PrefixMap -> CharParser st DGHead parseDGHead pm = parseEnclosedWithKeyword "Head" $ many (parseDGAtom pm) parseDGRule :: GA.PrefixMap -> CharParser st Rule parseDGRule pm = parseEnclosedWithKeyword "DescriptionGraphRule" $ DGRule <$> parseAnnotations pm <*> parseDGBody pm <*> parseDGHead pm parseRule :: GA.PrefixMap -> CharParser st Axiom parseRule pm = Rule <$> (parseDLSafeRule pm <|> parseDGRule pm) parseDGNodeAssertion :: GA.PrefixMap -> CharParser st DGNodeAssertion parseDGNodeAssertion pm = parseEnclosedWithKeyword "NodeAssertion" $ DGNodeAssertion <$> parseIRI pm <*> parseIRI pm parseDGNodes :: GA.PrefixMap -> CharParser st DGNodes parseDGNodes pm = parseEnclosedWithKeyword "Nodes" $ many1 (parseDGNodeAssertion pm) parseDGEdgeAssertion :: GA.PrefixMap -> CharParser st DGEdgeAssertion parseDGEdgeAssertion pm = parseEnclosedWithKeyword "EdgeAssertion" $ DGEdgeAssertion <$> parseIRI pm <*> parseIRI pm <*> parseIRI pm parseDGEdes :: GA.PrefixMap -> CharParser st DGEdges parseDGEdes pm = parseEnclosedWithKeyword "Edges" $ many1 (parseDGEdgeAssertion pm) parseMainClasses :: GA.PrefixMap -> CharParser st MainClasses parseMainClasses pm = parseEnclosedWithKeyword "MainClasses" $ many1 (parseIRI pm) parseDGAxiom :: GA.PrefixMap -> CharParser st Axiom parseDGAxiom pm = parseEnclosedWithKeyword "DescriptionGraph" $ DGAxiom <$> parseAnnotations pm <*> parseIRI pm <*> parseDGNodes pm <*> parseDGEdes pm <*> parseMainClasses pm parseAxiom :: GA.PrefixMap -> CharParser st Axiom parseAxiom pm = (parseDeclaration pm) <|> (parseClassAxiom pm) <|> (parseObjectPropertyAxiom pm) <|> (parseDataPropertyAxiom pm) <|> (parseDataTypeDefinition pm) <|> (parseHasKey pm) <|> (parseAssertion pm) <|> (parseAnnotationAxiom pm) <|> (parseRule pm) <|> (parseDGAxiom pm) <?> "Axiom" parseOntology :: GA.PrefixMap -> CharParser st Ontology parseOntology pm = let parseIriIfNotImportOrAxiomOrAnnotation = (arbitraryLookaheadOption [ forget (parseDirectlyImportsDocument pm), forget (parseAnnotation pm), forget (parseAxiom pm) ] >> never) <|> optionMaybe (parseIRI pm) in parseEnclosedWithKeyword "Ontology" $ do ontologyIri <- parseIriIfNotImportOrAxiomOrAnnotation versionIri <- parseIriIfNotImportOrAxiomOrAnnotation imports <- many (parseDirectlyImportsDocument pm) annotations <- many (parseAnnotation pm) axs <- many (parseAxiom pm) return $ Ontology ontologyIri versionIri (imports) annotations axs | Parses an OntologyDocument from Owl2 Functional Syntax parseOntologyDocument :: GA.PrefixMap -> CharParser st OntologyDocument parseOntologyDocument gapm = do skips' prefixes <- many parsePrefixDeclaration let pm = union gapm (fromList prefixes) onto <- parseOntology pm return $ OntologyDocument (OntologyMetadata AS) pm onto </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610227"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">cb8cea1d77d4a354285424ae00b7c005b39106930dcd9adb400fa2a4ac0f7136</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">yzh44yzh/practical_erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">mylib_worker.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(mylib_worker). -behavior(gen_server). -export([start_link/0, get_version/0, get_modules/0, get_min_val/0, get_connection_timeout/0, all_apps/0]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/yzh44yzh/practical_erlang/c9eec8cf44e152bf50d9bc6d5cb87fee4764f609/13_application/exercise/src/mylib_worker.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(mylib_worker). -behavior(gen_server). -export([start_link/0, get_version/0, get_modules/0, get_min_val/0, get_connection_timeout/0, all_apps/0]). -export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]). </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610228"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">a9507e4dbe1ec744425b8d15120235a056048efe1cd0ec836bbca10d1c819ac6</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">karimarttila/clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">config.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns simpleserver.util.config (:require [aero.core :as aero] [clojure.java.io :as io])) ;; clj-kondo requires this? (defn create-config [] (aero/read-config (io/resource "config.edn"))) ;; Commented out for clj-kondo ;; Testing locally. #_(comment (def config (create-config)) (def table-name "session") (def my-env :dev) (def my-table-prefix "ss") ) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/karimarttila/clojure/ee1261b9a8e6be92cb47aeb325f82a278f2c1ed3/webstore-demo/re-frame-demo/src/clj/simpleserver/util/config.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> clj-kondo requires this? Commented out for clj-kondo Testing locally.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns simpleserver.util.config (:require [aero.core :as aero] (defn create-config [] (aero/read-config (io/resource "config.edn"))) #_(comment (def config (create-config)) (def table-name "session") (def my-env :dev) (def my-table-prefix "ss") ) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610229"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9e9c89c914ee51670a65eea8f30b98d54497e410f137deb24baf2f55c1085cec</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">juxtin/clj-bob</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lang.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns clj-bob.lang (:refer-clojure :exclude [atom cons + < num if]) (:require [clojure.string :as str])) (defn if-nil [q a e] (if (or (nil? q) (= 'nil q)) (e) (a))) (defn if [Q A E] (if-nil Q (fn [] A) (fn [] E))) (defrecord Pair [car cdr]) (defmethod print-method Pair [p writer] (.write writer (format "(%s . %s)" (:car p) (:cdr p)))) (defn s-car [x] (if (instance? Pair x) (:car x) (first x))) (defn s-cdr [x] (if (instance? Pair x) (:cdr x) (rest x))) (def s-+ clojure.core/+) (def s-< clojure.core/<) (defn cons [h t] (if (sequential? t) (apply list (concat [h] t)) (Pair. h t))) (defn equal "HAHAHAHA equality in Scheme is very weak." [x y] (= (str/lower-case x) (str/lower-case y))) (defn pair? [x] (or (instance? Pair x) (and (list? x) (seq x)))) ;; this is a bit different (defn num [x] (let [num-sym? #(re-find #"^\d+$" (str %))] (cond (number? x) x (num-sym? x) (Integer/parseInt (str x)) :else 0))) (defn atom [x] (if (pair? x) 'nil 't)) (defn car [x] (if (pair? x) (s-car x) ())) (defn cdr [x] (if (pair? x) (s-cdr x) ())) (defn equal [x y] (if (= x y) 't 'nil)) (defn < [x y] (if (s-< (num x) (num y)) 't 'nil)) (defn nat? [x] (if (and (integer? x) (< 0 x)) 't 'nil)) (def natp nat?) (defn + [x y] (s-+ (num x) (num y))) (defmacro defun [name args & body] `(defn ~name ~(vec args) ~@body)) (defmacro dethm [name args & body] `(defn ~name ~(vec args) ~@body)) (defn size [x] (if (atom x) 0 (+ 1 (+ (size (car x)) (size (cdr x)))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/juxtin/clj-bob/daec6cae0582e9bb4a72153e97f01fab5872ed1f/src/clj_bob/lang.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> this is a bit different</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns clj-bob.lang (:refer-clojure :exclude [atom cons + < num if]) (:require [clojure.string :as str])) (defn if-nil [q a e] (if (or (nil? q) (= 'nil q)) (e) (a))) (defn if [Q A E] (if-nil Q (fn [] A) (fn [] E))) (defrecord Pair [car cdr]) (defmethod print-method Pair [p writer] (.write writer (format "(%s . %s)" (:car p) (:cdr p)))) (defn s-car [x] (if (instance? Pair x) (:car x) (first x))) (defn s-cdr [x] (if (instance? Pair x) (:cdr x) (rest x))) (def s-+ clojure.core/+) (def s-< clojure.core/<) (defn cons [h t] (if (sequential? t) (apply list (concat [h] t)) (Pair. h t))) (defn equal "HAHAHAHA equality in Scheme is very weak." [x y] (= (str/lower-case x) (str/lower-case y))) (defn pair? [x] (or (instance? Pair x) (and (list? x) (seq x)))) (defn num [x] (let [num-sym? #(re-find #"^\d+$" (str %))] (cond (number? x) x (num-sym? x) (Integer/parseInt (str x)) :else 0))) (defn atom [x] (if (pair? x) 'nil 't)) (defn car [x] (if (pair? x) (s-car x) ())) (defn cdr [x] (if (pair? x) (s-cdr x) ())) (defn equal [x y] (if (= x y) 't 'nil)) (defn < [x y] (if (s-< (num x) (num y)) 't 'nil)) (defn nat? [x] (if (and (integer? x) (< 0 x)) 't 'nil)) (def natp nat?) (defn + [x y] (s-+ (num x) (num y))) (defmacro defun [name args & body] `(defn ~name ~(vec args) ~@body)) (defmacro dethm [name args & body] `(defn ~name ~(vec args) ~@body)) (defn size [x] (if (atom x) 0 (+ 1 (+ (size (car x)) (size (cdr x)))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610230"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">0682241e1480796801caa572d6bc9cc343975ae48d530719843bba1b23f5fa46</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kitnil/dotfiles</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">dotfiles.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(use-modules (packages containers) (packages kubernetes) (packages networking)) (packages->manifest (list cisco plumber k3d k9s kompose kubectl kubernetes-helm nerdctl virtctl)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/kitnil/dotfiles/68c67af0cbaa7f56f3e53f660f7e3b46e6d3fb4e/dotfiles/manifests/dotfiles.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(use-modules (packages containers) (packages kubernetes) (packages networking)) (packages->manifest (list cisco plumber k3d k9s kompose kubectl kubernetes-helm nerdctl virtctl)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610231"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">41f6f42a4460c026a280dbde9405f87aa4a0dacd837d688b40b0cdb34f035f40</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">avsm/mirage-duniverse</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">tcptimer.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> * Copyright ( c ) 2012 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2012 Balraj Singh <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) type t type time = int64 type tr = | Stoptimer | Continue of Sequence.t | ContinueSetPeriod of (time * Sequence.t) module Make(T:Mirage_time_lwt.S) : sig val t : period_ns: time -> expire: (Sequence.t -> tr Lwt.t) -> t val start : t -> ?p:time -> Sequence.t -> unit Lwt.t end </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/avsm/mirage-duniverse/983e115ff5a9fb37e3176c373e227e9379f0d777/ocaml_modules/tcpip/src/tcp/tcptimer.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> * Copyright ( c ) 2012 < > * * Permission to use , copy , modify , and distribute this software for any * purpose with or without fee is hereby granted , provided that the above * copyright notice and this permission notice appear in all copies . * * THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN * ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE . * Copyright (c) 2012 Balraj Singh <> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *) type t type time = int64 type tr = | Stoptimer | Continue of Sequence.t | ContinueSetPeriod of (time * Sequence.t) module Make(T:Mirage_time_lwt.S) : sig val t : period_ns: time -> expire: (Sequence.t -> tr Lwt.t) -> t val start : t -> ?p:time -> Sequence.t -> unit Lwt.t end </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610232"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">115cd7e38aa11596c46d2ebaf3c18d2b1729a1e68810fe3da6a76c689e54e1e4</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">debasishg/hask</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Service.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-- | All service-related functions. module Lib.Service ( module Service ) where import Lib.Service.AccountService as Service (AccountService (..)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/debasishg/hask/1745ed50c8175cd035e8070c9cb988f4f5063653/h3layer/src/Lib/Service.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | All service-related functions.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module Lib.Service ( module Service ) where import Lib.Service.AccountService as Service (AccountService (..)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610233"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">db416a063be971c5827c9d52838b7c0b18972dca0eda900ef6b9434fb2ac762f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">alesaccoia/festival_flinger</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ogi_aec_diphone.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;;;;;;;;;;;;;;;;;;;;;;;;;;;;;<--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; Oregon Health & Science University ; ; Division of Biomedical Computer Science ; ; Center for Spoken Language Understanding ; ; Portland , OR USA ; ; Copyright ( c ) 2000 ; ; ;; ;; This module is not part of the CSTR / University of Edinburgh ; ; ;; release of the Festival TTS system. ;; ;; ;; ;; In addition to any conditions disclaimers below, please see the file ;; " NE Copyright Materials License.txt " distributed with this software ; ; ;; for information on usage and redistribution, and for a DISCLAIMER OF ;; ;; ALL WARRANTIES. ;; ;; ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;<--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;; Set up diphone voice OGI AEC diphones : male American English collected May 1997 ;; load unit selection routine (set! ogi_aec_diphone_dir (cdr (assoc 'ogi_aec_diphone voice-locations))) (set! load-path (cons (path-append ogi_aec_diphone_dir "festvox") load-path)) (set! load-path (cons (path-append libdir "ogi") load-path)) (require 'ogi_configure_voice) ;; select voice ;; this defines all the modules to use when synthesizing text using the current voice (define (voice_ogi_aec_diphone) "(voice_ogi_aec_diphone) Set up the current voice to be an American male AEC using the aec diphone set." ;; set target average pitch and variation (set! ogi_aec_diphone_f0_mean 105) (set! ogi_aec_diphone_f0_std 19) ;; set unit selection method (set! OGI_unitsel OGI_diphone_unitsel) ;; use the grouped file by default (set! ogi_grouped_or_ungrouped 'grouped) use or sinLPC signal processing (set! ogi_resORsinLPC 'resLPC) ;; configure voice using defaults (ogi_configure_voice 'ogi_aec_diphone) ;; overwrite defaults here: ;; diphone unit selection fallbacks (set! ogi_di_alt_L '((m= (m)) (n= (n)) (l= (l)) (h (pau)) (j (i:)) (dx (t d)) (& (^)) (k>9r (k)) (k>w (k)) (k>l (k)) (p>9r (p)) (p>w (p)) (p>l (p)) (t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p)) (t<s (t)))) (set! ogi_di_alt_R '((m= (m)) (n= (n)) (l= (l)) (h (pau)) (j (i:)) (dx (t d)) (& (^)) (k>9r (k)) (k>w (k)) (k>l (k)) (p>9r (p)) (p>w (p)) (p>l (p)) (t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p)) (t<s (t)))) (set! ogi_di_default "pau-h") ;; reslpc ungrouped analysis ;; define analysis parameters for OGIresLPC module (set! voicename 'ogi_aec_diphone) ;; define analysis parameters for OGIresLPC module (if (string-equal ogi_grouped_or_ungrouped "grouped") (set! ogi_resLPC_analysis_params (list (list 'dbname voice_dirname) (list 'groupfile (path-append ogi_diphone_dir "group" (string-append voicename "_resLPC.group"))) '(data_type "resLPC") '(access_mode "ondemand") )) ;; else if ungrouped (set! ogi_resLPC_analysis_params (list (list 'dbname voice_dirname) (list 'unitdic_file (path-append ogi_diphone_dir "ungrouped" "unittable.ms")) (list 'gain_file (path-append ogi_diphone_dir "festvox" "gain.dat")) '(phoneset "worldbet") (list 'base_dir (path-append ogi_diphone_dir "ungrouped/")) '(lpc_dir "lpc/") '(lpc_ext ".lsf") '(exc_dir "lpc/") '(exc_ext ".res") '(pm_dir "pm/") '(pm_ext ".pmv") '(data_type "resLPC") '(access_mode "ondemand") '(samp_freq 16000) '(sig_band 0.010) '(isCompressed "Y") ;; if "Y", compress when saving group file '(preemph 0.96) )) ) ;; initialize signal processing module (initialize_OGIsynth) ) ;; proclaim voice (proclaim_voice 'ogi_aec_diphone '((language english) (gender male) (dialect american) (description "This voice provides an American English male voice using a residual excited or sinusoidal LPC diphone synthesis module created at OGI. It uses a lexicon compiled from MOBY and CMU lexicons, and other trained modules used by CSTR voices.") (samplerate 16000))) comment this out if you want changes in this file to take effect without restarting Festival (provide 'ogi_aec_diphone) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/alesaccoia/festival_flinger/87345aad3a3230751a8ff479f74ba1676217accd/lib/voices/english/ogi_aec_diphone/festvox/ogi_aec_diphone.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "><--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ; ; ; ; ; ;; ; release of the Festival TTS system. ;; ;; In addition to any conditions disclaimers below, please see the file ;; ; for information on usage and redistribution, and for a DISCLAIMER OF ;; ALL WARRANTIES. ;; ;; <--OHSU-->;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; Set up diphone voice load unit selection routine select voice this defines all the modules to use when synthesizing text using the current voice set target average pitch and variation set unit selection method use the grouped file by default configure voice using defaults overwrite defaults here: diphone unit selection fallbacks reslpc ungrouped analysis define analysis parameters for OGIresLPC module define analysis parameters for OGIresLPC module else if ungrouped if "Y", compress when saving group file initialize signal processing module proclaim voice</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> OGI AEC diphones : male American English collected May 1997 (set! ogi_aec_diphone_dir (cdr (assoc 'ogi_aec_diphone voice-locations))) (set! load-path (cons (path-append ogi_aec_diphone_dir "festvox") load-path)) (set! load-path (cons (path-append libdir "ogi") load-path)) (require 'ogi_configure_voice) (define (voice_ogi_aec_diphone) "(voice_ogi_aec_diphone) Set up the current voice to be an American male AEC using the aec diphone set." (set! ogi_aec_diphone_f0_mean 105) (set! ogi_aec_diphone_f0_std 19) (set! OGI_unitsel OGI_diphone_unitsel) (set! ogi_grouped_or_ungrouped 'grouped) use or sinLPC signal processing (set! ogi_resORsinLPC 'resLPC) (ogi_configure_voice 'ogi_aec_diphone) (set! ogi_di_alt_L '((m= (m)) (n= (n)) (l= (l)) (h (pau)) (j (i:)) (dx (t d)) (& (^)) (k>9r (k)) (k>w (k)) (k>l (k)) (p>9r (p)) (p>w (p)) (p>l (p)) (t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p)) (t<s (t)))) (set! ogi_di_alt_R '((m= (m)) (n= (n)) (l= (l)) (h (pau)) (j (i:)) (dx (t d)) (& (^)) (k>9r (k)) (k>w (k)) (k>l (k)) (p>9r (p)) (p>w (p)) (p>l (p)) (t>9r (t)) (t>w (t)) (t>l (t)) (t>9r<s (t>9r t<s t)) (p>9r<s (p>9r p)) (t<s (t)))) (set! ogi_di_default "pau-h") (set! voicename 'ogi_aec_diphone) (if (string-equal ogi_grouped_or_ungrouped "grouped") (set! ogi_resLPC_analysis_params (list (list 'dbname voice_dirname) (list 'groupfile (path-append ogi_diphone_dir "group" (string-append voicename "_resLPC.group"))) '(data_type "resLPC") '(access_mode "ondemand") )) (set! ogi_resLPC_analysis_params (list (list 'dbname voice_dirname) (list 'unitdic_file (path-append ogi_diphone_dir "ungrouped" "unittable.ms")) (list 'gain_file (path-append ogi_diphone_dir "festvox" "gain.dat")) '(phoneset "worldbet") (list 'base_dir (path-append ogi_diphone_dir "ungrouped/")) '(lpc_dir "lpc/") '(lpc_ext ".lsf") '(exc_dir "lpc/") '(exc_ext ".res") '(pm_dir "pm/") '(pm_ext ".pmv") '(data_type "resLPC") '(access_mode "ondemand") '(samp_freq 16000) '(sig_band 0.010) '(preemph 0.96) )) ) (initialize_OGIsynth) ) (proclaim_voice 'ogi_aec_diphone '((language english) (gender male) (dialect american) (description "This voice provides an American English male voice using a residual excited or sinusoidal LPC diphone synthesis module created at OGI. It uses a lexicon compiled from MOBY and CMU lexicons, and other trained modules used by CSTR voices.") (samplerate 16000))) comment this out if you want changes in this file to take effect without restarting Festival (provide 'ogi_aec_diphone) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610234"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">d2aef96194b2e5a861a7f58525e6fe3718ad328b8d907a9367c287dc7f0310e1</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ayato-p/mokuhan</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">renderer_test.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns org.panchromatic.mokuhan.renderer-test (:require [clojure.test :as t] [org.panchromatic.mokuhan.renderer :as sut] [org.panchromatic.mokuhan.ast :as ast])) (def ^:private delimiters {:open "{{" :close "}}"}) (t/deftest render-escaped-variable-test (t/testing "Single path" (let [v (ast/new-escaped-variable ["x"] delimiters)] (t/testing "String" (t/is (= "Hi" (sut/render v {:x "Hi"})))) (t/testing "Integer" (t/is (= "42" (sut/render v {:x 42})))) (t/testing "Boolean" (t/is (= "true" (sut/render v {:x true}))) (t/is (= "false" (sut/render v {:x false})))) (t/testing "HTML string" (t/is (= "&amp;&lt;&gt;&#39;&quot;" (sut/render v {:x "&<>'\""})))) (t/testing "Map" (t/is (= "{:foo 1}" (sut/render v {:x {:foo 1}})))) (t/testing "Vector" (t/is (= "[1 2]" (sut/render v {:x [1 2]})))) (t/testing "Object" (t/is (= "object!" (sut/render v {:x (reify Object (toString [this] "object!"))})))) (t/testing "nil" (t/is (= "" (sut/render v {:x nil})))) (t/testing "missing" (t/is (= "" (sut/render v {})))))) (t/testing "Dotted path" (let [v (ast/new-escaped-variable ["x" "y"] delimiters)] (t/testing "String" (t/is (= "Hi" (sut/render v {:x {:y "Hi"}})))) (t/testing "Integer" (t/is (= "42" (sut/render v {:x {:y 42}})))) (t/testing "Boolean" (t/is (= "true" (sut/render v {:x {:y true}}))) (t/is (= "false" (sut/render v {:x {:y false}})))) (t/testing "HTML string" (t/is (= "&amp;&lt;&gt;&#39;&quot;" (sut/render v {:x {:y "&<>'\""}})))) (t/testing "Map" (t/is (= "{:foo 1}" (sut/render v {:x {:y {:foo 1}}})))) (t/testing "Vector" (t/is (= "[1 2]" (sut/render v {:x {:y [1 2]}})))) (t/testing "nil" (t/is (= "" (sut/render v {:x {:y nil}})))) (t/testing "missing" (t/is (= "" (sut/render v {:x {}})))))) (t/testing "Include index of list" (let [v (ast/new-escaped-variable ["x" 1 "y"] delimiters)] (t/is (= "42" (sut/render v {:x [{:y 41} {:y 42}]}))) (t/is (= "" (sut/render v {:x [{:y 41}]}))))) (t/testing "Dot" (let [v (ast/new-escaped-variable ["."] delimiters)] (t/is (= "{:x 42}" (sut/render v {:x 42})))))) (t/deftest render-standard-section-test (t/testing "single path section" (let [v (-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (ast/new-text "!!")))] (t/is (= "!!" (sut/render v {:x true}) (sut/render v {:x {}}) (sut/render v {:x 42}) (sut/render v {:x "Hello"}))) (t/is (= "" (sut/render v {:x false}) (sut/render v {:x []}) (sut/render v {:x nil}) (sut/render v {}) (sut/render v nil))) (t/is (= "!!!!" (sut/render v {:x [1 1]}))) (t/is (= "Hello!!" (sut/render v {:x #(str "Hello" %)}))))) (t/testing "dotted path section" (let [v (-> (ast/new-standard-section ["x" "y"] delimiters) (update :contents conj (ast/new-text "!!")))] (t/is (= "!!" (sut/render v {:x {:y true}}) (sut/render v {:x {:y {}}}) (sut/render v {:x {:y 42}}) (sut/render v {:x {:y "Hello"}}))) (t/is (= "" (sut/render v {:x {:y false}}) (sut/render v {:x {:y []}}) (sut/render v {:x {:y nil}}) (sut/render v {:x {}}) (sut/render v {:x nil}))) (t/is (= "!!!!" (sut/render v {:x {:y [1 1]}}))) (t/is (= "Hello!!" (sut/render v {:x {:y #(str "Hello" %)}}))))) (t/testing "nested section" (let [v (-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (-> (ast/new-standard-section ["y"] delimiters) (update :contents conj (ast/new-text "!!")))))] (t/is (= "!!" (sut/render v {:x {:y true}}))) (t/is (= "!!!!" (sut/render v {:x {:y [1 1]}}))) (t/is (= "!!!!!!!!" (sut/render v {:x [{:y [1 1]} {:y [1 1]}]}))) (t/is (= "!!!!" (sut/render v {:x [{:y [1 1]} {:y []}]}) (sut/render v {:x [{:y true} {:y false} {:y true}]}))))) (t/testing "nested and don't use outer key" (let [v (-> [(-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (-> (ast/new-standard-section ["y"] delimiters) (update :contents conj (ast/new-text "Hello")))))] ast/new-mustache)] (t/is (= "" (sut/render v {:x [{:y false}] :y true})))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/ayato-p/mokuhan/8f6de17b5c4a3712aa83ba4f37234de86f3c630b/test/org/panchromatic/mokuhan/renderer_test.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns org.panchromatic.mokuhan.renderer-test (:require [clojure.test :as t] [org.panchromatic.mokuhan.renderer :as sut] [org.panchromatic.mokuhan.ast :as ast])) (def ^:private delimiters {:open "{{" :close "}}"}) (t/deftest render-escaped-variable-test (t/testing "Single path" (let [v (ast/new-escaped-variable ["x"] delimiters)] (t/testing "String" (t/is (= "Hi" (sut/render v {:x "Hi"})))) (t/testing "Integer" (t/is (= "42" (sut/render v {:x 42})))) (t/testing "Boolean" (t/is (= "true" (sut/render v {:x true}))) (t/is (= "false" (sut/render v {:x false})))) (t/testing "HTML string" (t/is (= "&amp;&lt;&gt;&#39;&quot;" (sut/render v {:x "&<>'\""})))) (t/testing "Map" (t/is (= "{:foo 1}" (sut/render v {:x {:foo 1}})))) (t/testing "Vector" (t/is (= "[1 2]" (sut/render v {:x [1 2]})))) (t/testing "Object" (t/is (= "object!" (sut/render v {:x (reify Object (toString [this] "object!"))})))) (t/testing "nil" (t/is (= "" (sut/render v {:x nil})))) (t/testing "missing" (t/is (= "" (sut/render v {})))))) (t/testing "Dotted path" (let [v (ast/new-escaped-variable ["x" "y"] delimiters)] (t/testing "String" (t/is (= "Hi" (sut/render v {:x {:y "Hi"}})))) (t/testing "Integer" (t/is (= "42" (sut/render v {:x {:y 42}})))) (t/testing "Boolean" (t/is (= "true" (sut/render v {:x {:y true}}))) (t/is (= "false" (sut/render v {:x {:y false}})))) (t/testing "HTML string" (t/is (= "&amp;&lt;&gt;&#39;&quot;" (sut/render v {:x {:y "&<>'\""}})))) (t/testing "Map" (t/is (= "{:foo 1}" (sut/render v {:x {:y {:foo 1}}})))) (t/testing "Vector" (t/is (= "[1 2]" (sut/render v {:x {:y [1 2]}})))) (t/testing "nil" (t/is (= "" (sut/render v {:x {:y nil}})))) (t/testing "missing" (t/is (= "" (sut/render v {:x {}})))))) (t/testing "Include index of list" (let [v (ast/new-escaped-variable ["x" 1 "y"] delimiters)] (t/is (= "42" (sut/render v {:x [{:y 41} {:y 42}]}))) (t/is (= "" (sut/render v {:x [{:y 41}]}))))) (t/testing "Dot" (let [v (ast/new-escaped-variable ["."] delimiters)] (t/is (= "{:x 42}" (sut/render v {:x 42})))))) (t/deftest render-standard-section-test (t/testing "single path section" (let [v (-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (ast/new-text "!!")))] (t/is (= "!!" (sut/render v {:x true}) (sut/render v {:x {}}) (sut/render v {:x 42}) (sut/render v {:x "Hello"}))) (t/is (= "" (sut/render v {:x false}) (sut/render v {:x []}) (sut/render v {:x nil}) (sut/render v {}) (sut/render v nil))) (t/is (= "!!!!" (sut/render v {:x [1 1]}))) (t/is (= "Hello!!" (sut/render v {:x #(str "Hello" %)}))))) (t/testing "dotted path section" (let [v (-> (ast/new-standard-section ["x" "y"] delimiters) (update :contents conj (ast/new-text "!!")))] (t/is (= "!!" (sut/render v {:x {:y true}}) (sut/render v {:x {:y {}}}) (sut/render v {:x {:y 42}}) (sut/render v {:x {:y "Hello"}}))) (t/is (= "" (sut/render v {:x {:y false}}) (sut/render v {:x {:y []}}) (sut/render v {:x {:y nil}}) (sut/render v {:x {}}) (sut/render v {:x nil}))) (t/is (= "!!!!" (sut/render v {:x {:y [1 1]}}))) (t/is (= "Hello!!" (sut/render v {:x {:y #(str "Hello" %)}}))))) (t/testing "nested section" (let [v (-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (-> (ast/new-standard-section ["y"] delimiters) (update :contents conj (ast/new-text "!!")))))] (t/is (= "!!" (sut/render v {:x {:y true}}))) (t/is (= "!!!!" (sut/render v {:x {:y [1 1]}}))) (t/is (= "!!!!!!!!" (sut/render v {:x [{:y [1 1]} {:y [1 1]}]}))) (t/is (= "!!!!" (sut/render v {:x [{:y [1 1]} {:y []}]}) (sut/render v {:x [{:y true} {:y false} {:y true}]}))))) (t/testing "nested and don't use outer key" (let [v (-> [(-> (ast/new-standard-section ["x"] delimiters) (update :contents conj (-> (ast/new-standard-section ["y"] delimiters) (update :contents conj (ast/new-text "Hello")))))] ast/new-mustache)] (t/is (= "" (sut/render v {:x [{:y false}] :y true})))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610235"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">6f4947175e9394e316d0b7679bf7a33f69138a679b5146778f63f7a99cfa0bf6</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">monadbobo/ocaml-core</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">test.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">type t = {foo:int ; bar : int ; baz : int} with compare </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/compare/sample_output/test.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">type t = {foo:int ; bar : int ; baz : int} with compare </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610236"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">957195459c8ca38faaab9927d168140725b48dd661f7e529ea9bcc5c468f3a9f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">nikomatsakis/a-mir-formality</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">wf--outlives.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket (require redex/reduction-semantics "../../util.rkt" "../grammar.rkt" "../prove.rkt" "../libcore.rkt" ) (module+ test (redex-let* formality-rust [(Rust/Program (term ([(crate C { (struct Ref[(type T) (lifetime a)] where [(T : a)] { }) (struct NoRef[(type T) (lifetime a)] where [] { }) })] C))) ] (traced '() (test-term-true (rust:can-prove-where-clause-in-program Rust/Program (∀ [(type A)] where [] ; key point here: ; ; requires proving `A : 'b`, but that's implied by Ref < A , ' b > being WF (for[(lifetime b)] ((Ref < A b >) : b)) ) ) )) (traced '() (test-term-false (rust:can-prove-where-clause-in-program Rust/Program (∀ [(type A)] where [] in contrast to previous test , the ` NoRef ` struct does not ; imply a connection between `A` and `b` (for[(lifetime b)] ((NoRef < A b >) : b)) ) ) )) ) ) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/nikomatsakis/a-mir-formality/71be4d5c4bd5e91d326277eaedd19a7abe3ac76a/racket-src/rust/test/wf--outlives.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">racket</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> key point here: requires proving `A : 'b`, but that's implied by imply a connection between `A` and `b`</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket (require redex/reduction-semantics "../../util.rkt" "../grammar.rkt" "../prove.rkt" "../libcore.rkt" ) (module+ test (redex-let* formality-rust [(Rust/Program (term ([(crate C { (struct Ref[(type T) (lifetime a)] where [(T : a)] { }) (struct NoRef[(type T) (lifetime a)] where [] { }) })] C))) ] (traced '() (test-term-true (rust:can-prove-where-clause-in-program Rust/Program (∀ [(type A)] where [] Ref < A , ' b > being WF (for[(lifetime b)] ((Ref < A b >) : b)) ) ) )) (traced '() (test-term-false (rust:can-prove-where-clause-in-program Rust/Program (∀ [(type A)] where [] in contrast to previous test , the ` NoRef ` struct does not (for[(lifetime b)] ((NoRef < A b >) : b)) ) ) )) ) ) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610237"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">fee5dd349e94e0dde694da8c30a7cf772f3e9ce5ed542303f2d7e7c307f81b2c</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">sKabYY/palestra</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">p68.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(load "stream.scm") ; Don't run this file!!! (define (wrong-pairs s t) (interleave (stream-map-n (lambda (x) (list (stream-car s) x)) t) (wrong-pairs (stream-cdr s) (stream-cdr t)))) ;(define hehe (wrong-pairs integers integers)) ;(stream-for-n println hehe 10) (display (stream-car (wrong-pairs integers integers) ))(newline) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/sKabYY/palestra/0906cc3a1fb786093a388d5ae7d59120f5aae16c/old1/sicp/3/p68.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Don't run this file!!! (define hehe (wrong-pairs integers integers)) (stream-for-n println hehe 10)</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(load "stream.scm") (define (wrong-pairs s t) (interleave (stream-map-n (lambda (x) (list (stream-car s) x)) t) (wrong-pairs (stream-cdr s) (stream-cdr t)))) (display (stream-car (wrong-pairs integers integers) ))(newline) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610238"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">e002d82651e9e1a5af85311d7c0681f286cb3e21d431075194beea685fc86366</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">sealchain-project/sealchain</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Swagger.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE DataKinds #-} # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE LambdaCase # {-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE TypeFamilies #-} # LANGUAGE UndecidableInstances # # LANGUAGE ViewPatterns # # OPTIONS_GHC -fno - warn - orphans # module Cardano.Wallet.API.V1.Swagger where import Universum hiding (get, put) import Cardano.Wallet.API.Indices (ParamNames) import Cardano.Wallet.API.Request.Filter import Cardano.Wallet.API.Request.Pagination import Cardano.Wallet.API.Request.Sort import Cardano.Wallet.API.Response import Cardano.Wallet.API.V1.Generic (gconsName) import Cardano.Wallet.API.V1.Parameters import Cardano.Wallet.API.V1.Swagger.Example import Cardano.Wallet.API.V1.Types import Cardano.Wallet.TypeLits (KnownSymbols (..)) import Pos.Chain.Update (SoftwareVersion (svNumber)) import Pos.Core.NetworkMagic (NetworkMagic (..)) import Pos.Util.CompileInfo (CompileTimeInfo, ctiGitRevision) import Pos.Util.Servant (LoggingApi) import Control.Lens (At, Index, IxValue, at, (?~)) import Data.Aeson (encode) import Data.Aeson.Encode.Pretty import Data.Map (Map) import Data.Swagger hiding (Example) import Data.Typeable import Formatting (build, sformat) import NeatInterpolation import Servant (Handler, ServantErr (..), Server, StdMethod (..)) import Servant.API.Sub import Servant.Swagger import Servant.Swagger.UI (SwaggerSchemaUI') import Servant.Swagger.UI.Core (swaggerSchemaUIServerImpl) import Servant.Swagger.UI.ReDoc (redocFiles) import qualified Data.ByteString.Lazy as BL import qualified Data.Map.Strict as M import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Pos.Core as Core import qualified Pos.Core.Attributes as Core import qualified Pos.Crypto.Hashing as Crypto -- -- Helper functions -- -- | Surround a Text with another surroundedBy :: Text -> Text -> Text surroundedBy wrap context = wrap <> context <> wrap -- | Display a multi-line code-block inline (e.g. in tables) inlineCodeBlock :: Text -> Text inlineCodeBlock txt = "<pre>" <> replaceNewLines (replaceWhiteSpaces txt) <> "</pre>" where replaceNewLines = T.replace "\n" "<br/>" replaceWhiteSpaces = T.replace " " "&nbsp;" -- | Drill in the 'Swagger' file in an unsafe way to modify a specific operation -- identified by a tuple (verb, path). The function looks a bit scary to use -- but is actually rather simple (see example below). -- -- Note that if the identified path doesn't exist, the function will throw -- at runtime when trying to read the underlying swagger structure! -- -- Example: -- -- swagger & paths % ~ ( POST , " /api / v1 / wallets " ) ` alterOperation ` ( description ? ~ " foo " ) & paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` alterOperation ` ( description ? ~ " bar " ) -- alterOperation :: ( IxValue m ~ item , Index m ~ FilePath , At m , HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => (StdMethod, FilePath) -> (Operation -> Operation) -> m -> m alterOperation (verb, path) alter = at path %~ (Just . unsafeAlterItem) where errUnreachableEndpoint :: Text errUnreachableEndpoint = "Unreachable endpoint: " <> show verb <> " " <> show path errUnsupportedVerb :: Text errUnsupportedVerb = "Used unsupported verb to identify an endpoint: " <> show verb unsafeAlterItem :: ( HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => Maybe item -> item unsafeAlterItem = maybe (error errUnreachableEndpoint) (unsafeLensFor verb %~ (Just . unsafeAlterOperation)) unsafeAlterOperation :: Maybe Operation -> Operation unsafeAlterOperation = maybe (error errUnreachableEndpoint) alter unsafeLensFor :: ( Functor f , HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => StdMethod -> (Maybe Operation -> f (Maybe Operation)) -> item -> f item unsafeLensFor = \case GET -> get PUT -> put PATCH -> patch POST -> post DELETE -> delete _ -> error errUnsupportedVerb -- | A combinator to modify the description of an operation, using -- 'alterOperation' under the hood. -- -- -- Example: -- -- swagger & paths % ~ ( POST , " /api / v1 / wallets " ) ` setDescription ` " foo " & paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` setDescription ` " bar " setDescription :: (IxValue m ~ PathItem, Index m ~ FilePath, At m) => (StdMethod, FilePath) -> Text -> m -> m setDescription endpoint str = endpoint `alterOperation` (description ?~ str) -- -- Instances -- instance HasSwagger a => HasSwagger (LoggingApi config a) where toSwagger _ = toSwagger (Proxy @a) instance ( Typeable res , KnownSymbols syms , HasSwagger subApi , syms ~ ParamNames res params ) => HasSwagger (FilterBy params res :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @subApi) allOps = map toText $ symbolVals (Proxy @syms) in swgr & over (operationsOf swgr . parameters) (addFilterOperations allOps) where addFilterOperations :: [Text] -> [Referenced Param] -> [Referenced Param] addFilterOperations ops xs = map (Inline . newParam) ops <> xs newParam :: Text -> Param newParam opName = let typeOfRes = fromString $ show $ typeRep (Proxy @ res) in Param { _paramName = opName , _paramRequired = Nothing , _paramDescription = Just $ filterDescription typeOfRes , _paramSchema = ParamOther ParamOtherSchema { _paramOtherSchemaIn = ParamQuery , _paramOtherSchemaAllowEmptyValue = Nothing , _paramOtherSchemaParamSchema = mempty } } filterDescription :: Text -> Text filterDescription typeOfRes = mconcat [ "A **FILTER** operation on a " <> typeOfRes <> ". " , "Filters support a variety of queries on the resource. " , "These are: \n\n" , "- `EQ[value]` : only allow values equal to `value`\n" , "- `LT[value]` : allow resource with attribute less than the `value`\n" , "- `GT[value]` : allow objects with an attribute greater than the `value`\n" , "- `GTE[value]` : allow objects with an attribute at least the `value`\n" , "- `LTE[value]` : allow objects with an attribute at most the `value`\n" , "- `RANGE[lo,hi]` : allow objects with the attribute in the range between `lo` and `hi`\n" , "- `IN[a,b,c,d]` : allow objects with the attribute belonging to one provided.\n\n" ] instance ( Typeable res , KnownSymbols syms , syms ~ ParamNames res params , HasSwagger subApi ) => HasSwagger (SortBy params res :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @subApi) in swgr & over (operationsOf swgr . parameters) addSortOperation where addSortOperation :: [Referenced Param] -> [Referenced Param] addSortOperation xs = Inline newParam : xs newParam :: Param newParam = let typeOfRes = fromString $ show $ typeRep (Proxy @ res) allowedKeys = T.intercalate "," (map toText $ symbolVals (Proxy @syms)) in Param { _paramName = "sort_by" , _paramRequired = Just False , _paramDescription = Just (sortDescription typeOfRes allowedKeys) , _paramSchema = ParamOther ParamOtherSchema { _paramOtherSchemaIn = ParamQuery , _paramOtherSchemaAllowEmptyValue = Just True , _paramOtherSchemaParamSchema = mempty } } instance (HasSwagger subApi) => HasSwagger (WalletRequestParams :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @(WithWalletRequestParams subApi)) in swgr & over (operationsOf swgr . parameters) (map toDescription) where toDescription :: Referenced Param -> Referenced Param toDescription (Inline p@(_paramName -> pName)) = case M.lookup pName requestParameterToDescription of Nothing -> Inline p Just d -> Inline (p & description .~ Just d) toDescription x = x instance ToParamSchema WalletId instance ToSchema Core.Address where declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions instance ToParamSchema Core.Address where toParamSchema _ = mempty & type_ .~ SwaggerString instance ToParamSchema (V1 Core.Address) where toParamSchema _ = toParamSchema (Proxy @Core.Address) -- Descriptions -- customQueryFlagToDescription :: Map T.Text T.Text customQueryFlagToDescription = M.fromList [ ("force_ntp_check", forceNtpCheckDescription) ] requestParameterToDescription :: Map T.Text T.Text requestParameterToDescription = M.fromList [ ("page", pageDescription) , ("per_page", perPageDescription (fromString $ show maxPerPageEntries) (fromString $ show defaultPerPageEntries)) ] forceNtpCheckDescription :: T.Text forceNtpCheckDescription = [text| In some cases, API Clients need to force a new NTP check as a previous result gets cached. A typical use-case is after asking a user to fix its system clock. If this flag is set, request will block until NTP server responds or it will timeout if NTP server is not available within a short delay. |] pageDescription :: T.Text pageDescription = [text| The page number to fetch for this request. The minimum is **1**. If nothing is specified, **this value defaults to 1** and always shows the first entries in the requested collection. |] perPageDescription :: T.Text -> T.Text -> T.Text perPageDescription maxValue defaultValue = [text| The number of entries to display for each page. The minimum is **1**, whereas the maximum is **$maxValue**. If nothing is specified, **this value defaults to $defaultValue**. |] sortDescription :: Text -> Text -> Text sortDescription resource allowedKeys = [text| A **SORT** operation on this $resource. Allowed keys: `$allowedKeys`. |] errorsDescription :: Text errorsDescription = [text| Error Name / Description | HTTP Error code | Example -------------------------|-----------------|--------- $errors |] where errors = T.intercalate "\n" rows rows = -- 'WalletError' [ mkRow fmtErr $ NotEnoughMoney (ErrAvailableBalanceIsInsufficient 1400) , mkRow fmtErr $ OutputIsRedeem sampleAddress , mkRow fmtErr $ UnknownError "Unexpected internal error." , mkRow fmtErr $ InvalidAddressFormat "Provided address format is not valid." , mkRow fmtErr WalletNotFound , mkRow fmtErr $ WalletAlreadyExists exampleWalletId , mkRow fmtErr AddressNotFound , mkRow fmtErr $ InvalidPublicKey "Extended public key (for external wallet) is invalid." , mkRow fmtErr UnsignedTxCreationError , mkRow fmtErr $ SignedTxSubmitError "Unable to submit externally-signed transaction." , mkRow fmtErr TooBigTransaction , mkRow fmtErr TxFailedToStabilize , mkRow fmtErr TxRedemptionDepleted , mkRow fmtErr $ TxSafeSignerNotFound sampleAddress , mkRow fmtErr $ MissingRequiredParams (("wallet_id", "walletId") :| []) , mkRow fmtErr $ WalletIsNotReadyToProcessPayments genExample , mkRow fmtErr $ NodeIsStillSyncing genExample , mkRow fmtErr $ CannotCreateAddress "Cannot create derivation path for new address in external wallet." , mkRow fmtErr $ RequestThrottled 42 -- 'JSONValidationError' , mkRow fmtErr $ JSONValidationFailed "Expected String, found Null." -- 'UnsupportedMimeTypeError' , mkRow fmtErr $ UnsupportedMimeTypePresent "Expected Content-Type's main MIME-type to be 'application/json'." , mkRow fmtErr $ UtxoNotEnoughFragmented (ErrUtxoNotEnoughFragmented 1 msgUtxoNotEnoughFragmented) -- TODO 'MnemonicError' ? ] mkRow fmt err = T.intercalate "|" (fmt err) fmtErr err = [ surroundedBy "`" (gconsName err) <> "<br/>" <> toText (sformat build err) , show $ errHTTPCode $ toServantError err , inlineCodeBlock (T.decodeUtf8 $ BL.toStrict $ encodePretty err) ] sampleAddress = V1 Core.Address { Core.addrRoot = Crypto.unsafeAbstractHash ("asdfasdf" :: String) , Core.addrAttributes = Core.mkAttributes $ Core.AddrAttributes Nothing Core.BootstrapEraDistr NetworkMainOrStage , Core.addrType = Core.ATPubKey } | Shorter version of the doc below , only for Dev & V0 documentations highLevelShortDescription :: DescriptionEnvironment -> T.Text highLevelShortDescription DescriptionEnvironment{..} = [text| This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain). Protocol Version | Git Revision -------------------|------------------- $deSoftwareVersion | $deGitRevision |] -- | Provide additional insights on V1 documentation highLevelDescription :: DescriptionEnvironment -> T.Text highLevelDescription DescriptionEnvironment{..} = [text| This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain). Protocol Version | Git Revision -------------------|------------------- $deSoftwareVersion | $deGitRevision Getting Started =============== In the following examples, we will use *curl* to illustrate request to an API running on the default port **8090**. Please note that wallet web API uses TLS for secure communication. Requests to the API need to send a client CA certificate that was used when launching the node and identifies the client as being permitted to invoke the server API. Creating a New Wallet --------------------- You can create your first wallet using the [`POST /api/v1/wallets`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets%2Fpost) endpoint as follow: ``` curl -X POST :8090/api/v1/wallets \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ --cert ./scripts/tls-files/client.pem \ --cacert ./scripts/tls-files/ca.crt \ -d '{ "operation": "create", "backupPhrase": $deMnemonicExample, "assuranceLevel": "normal", "name": "MyFirstWallet", "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` > **Warning**: Those 12 mnemonic words given for the backup phrase act as an example. **Do > not** use them on a production system. See the section below about mnemonic codes for more > information. The `spendingPassword` is optional but highly recommended. It a string of 32 characters, encoded in base 16, yielding to an hexadecimal sequence of 64 bytes. This passphrase is required for sensitive operations on the wallet and adds an extra security layer to it. To generate a valid `spendingPassword`, please follow the following steps: - Pick a long sentence using a wide variety of characters (uppercase, lowercase, whitespace, punctuation, etc). Using a computer to randomly generate a passphrase is best, as humans aren't a good source of randomness. - Compute an appropriate hash of this passphrase. You'll need to use an algorithm that yields a 32-byte long string (e.g. *SHA256* or *BLAKE2b*). - Hex-encode the 32-byte hash into a 64-byte sequence of bytes. As a response, the API provides you with a unique wallet `id` to be used in subsequent requests. Make sure to store it / write it down. Note that every API response is [jsend-compliant](); Sealchain also augments responses with meta-data specific to pagination. More details in the section below about [Pagination](#section/Pagination) ```json $createWallet ``` You have just created your first wallet. Information about this wallet can be retrieved using the [`GET /api/v1/wallets/{walletId}`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets~1{walletId}%2Fget) endpoint as follows: ``` curl -X GET :8090/api/v1/wallets/{{walletId}} \ -H "Accept: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` Receiving SEAL (or GD) ------------- To receive _SEAL_ (or GD) from other users you should provide your address. This address can be obtained from an account. Each wallet contains at least one account. An account is like a pocket inside of your wallet. Vew all existing accounts of a wallet by using the [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget) endpoint: ``` curl -X GET :8090/api/v1/wallets/{{walletId}}/accounts?page=1&per_page=10 \ -H "Accept: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` Since you have, for now, only a single wallet, you'll see something like this: ```json $readAccounts ``` All the wallet's accounts are listed under the `addresses` field. You can communicate one of these addresses to receive _SEAL_(or GD) on the associated account. Sending SEAL(or GD) ----------- In order to send _SEAL_(or GD) from one of your accounts to another address, you must create a new payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1payment%2Fpost) endpoint as follows: ``` curl -X POST :8090/api/v1/transactions/payment \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "destinations": [{ "amount": { "coins": 100000000, "gds": 100 } "address": "A7k5bz1QR2...Tx561NNmfF" }], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Note that, in order to perform a transaction, you need to have enough existing _SEAL_(or GD) on the source account! The Sealchain API is designed to accomodate multiple recipients payments out-of-the-box; notice how `destinations` is a list of addresses (and corresponding amounts). When the transaction succeeds, funds are no longer available in the sources addresses, and are soon made available to the destinations within a short delay. Note that, you can at any time see the status of your wallets by using the [`GET /api/v1/transactions/payment`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget) endpoint as follows: ``` curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwUPE...8V3AVTnqGZ\ -H "Accept: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` Here we constrained the request to a specific account. After our previous transaction the output should look roughly similar to this: ```json $readTransactions ``` In addition, and because it is not possible to _preview_ a transaction, one can lookup a transaction's fees using the [`POST /api/v1/transactions/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost) endpoint to get an estimation of those fees. See [Estimating Transaction Fees](#section/Common-Use-Cases/Estimating-Transaction-Fees) for more details. Issue GD ----------- To increase or decrease GD total supply, The issuer (the GD operator) can create a new payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1issurance%2Fpost) endpoint as follows: ``` curl -X POST :8090/api/v1/transactions/issurance \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "info": { "increment": 10000000, "proof": "692068617665206120746f6e206f6620676f6c647320696e204a50204d6f7267616e2e" -- proof in hex }, "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Pagination ========== **All GET requests of the API are paginated by default**. Whilst this can be a source of surprise, is the best way of ensuring the performance of GET requests is not affected by the size of the data storage. Version `V1` introduced a different way of requesting information to the API. In particular, GET requests which returns a _collection_ (i.e. typically a JSON array of resources) lists extra parameters which can be used to modify the shape of the response. In particular, those are: * `page`: (Default value: **1**). * `per_page`: (Default value: **$deDefaultPerPage**) For a more accurate description, see the section `Parameters` of each GET request, but as a brief overview the first two control how many results and which results to access in a paginated request. Filtering and Sorting ===================== `GET` endpoints which list collection of resources supports filters & sort operations, which are clearly marked in the swagger docs with the `FILTER` or `SORT` labels. The query format is quite simple, and it goes this way: Filter Operators ---------------- | Operator | Description | Example | | - | If **no operator** is passed, this is equivalent to `EQ` (see below). | `balance=10` | | `EQ` | Retrieves the resources with index _equal_ to the one provided. | `balance=EQ[10]` | | `LT` | Retrieves the resources with index _less than_ the one provided. | `balance=LT[10]` | | `LTE` | Retrieves the resources with index _less than equal_ the one provided. | `balance=LTE[10]` | | `GT` | Retrieves the resources with index _greater than_ the one provided. | `balance=GT[10]` | | `GTE` | Retrieves the resources with index _greater than equal_ the one provided. | `balance=GTE[10]` | | `RANGE` | Retrieves the resources with index _within the inclusive range_ [k,k]. | `balance=RANGE[10,20]` | Sort Operators -------------- | Operator | Description | Example | | `ASC` | Sorts the resources with the given index in _ascending_ order. | `sort_by=ASC[balance]` | | `DES` | Sorts the resources with the given index in _descending_ order. | `sort_by=DES[balance]` | | - | If **no operator** is passed, this is equivalent to `DES` (see above). | `sort_by=balance` | Errors ====== In case a request cannot be served by the API, a non-2xx HTTP response will be issued, together with a [JSend-compliant]() JSON Object describing the error in detail together with a numeric error code which can be used by API consumers to implement proper error handling in their application. For example, here's a typical error which might be issued: ``` json $deErrorExample ``` Existing Wallet Errors ---------------------- $deWalletErrorTable Monetary Denomination & Units ============================= Sealchain's platform currency is called _SEAL_. _SEAL_ has up to **8** decimal places; hence the smallest monetary unit that can be represented in the Seaichain's blockhain is: 0.00000001. Sealchain originaly includes stablecoin called GD (GoldDollar), GD has up to **2** decimal places. > **Warning**: All amounts manipulated in the API are given and expected in smallest monetary unit. Mnemonic Codes ============== The full list of accepted mnemonic codes to secure a wallet is defined by the [BIP-39 specifications](-0039.mediawiki). Note that picking up 12 random words from the list **is not enough** and leads to poor security. Make sure to carefully follow the steps described in the protocol when you generate words for a new wallet. Versioning & Legacy =================== The API is **versioned**, meaning that is possible to access different versions of the API by adding the _version number_ in the URL. **For the sake of backward compatibility, we expose the legacy version of the API, available simply as unversioned endpoints.** This means that _omitting_ the version number would call the old version of the API. Deprecated endpoints are currently grouped under an appropriate section; they would be removed in upcoming released, if you're starting a new integration with Sealchain, please ignore these. Note that Compatibility between major versions is not _guaranteed_, i.e. the request & response formats might differ. Disable TLS (Not Recommended) ----------------------------- If needed, you can disable TLS by providing the `--no-tls` flag to the wallet or by running a wallet in debug mode with `--wallet-debug` turned on. Common Use-Cases ================ Sending Money to Multiple Recipients ------------------------------------ As seen in [Sending SEAL](#section/Getting-Started/Sending-SEAL), you can send _SEAL_ to another party using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fpost) endpoint. Important to notice is the type of the field `destinations`: it's a list, enabling you to provide more than one destination. Each destination is composed of: - An address - A corresponding amount The overall transaction corresponds to the sum of each outputs. For instance, to send money to two parties simultaneously: ``` curl -X POST :8090/api/v1/transactions \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "destinations": [ { "amount": 14, "address": "A7k5bz1QR2...Tx561NNmfF" }, { "amount": 42, "address": "B56n78WKE8...jXAa34NUFz" } ], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` About UTXO Fragmentation ------------------------ As described in [Sending Money to Multiple Recipients](#section/Common-Use-Cases/Sending-Money-to-Multiple-Recipients), it is possible to send ada to more than one destination. Sealchain only allows a given UTXO to cover at most one single transaction output. As a result, when the number of transaction outputs is greater than the number the API returns a `UtxoNotEnoughFragmented` error which looks like the following ``` { "status": "error", "diagnostic": { "details": { "help": "Utxo is not enough fragmented to handle the number of outputs of this transaction. Query /api/v1/wallets/{walletId}/statistics/utxos endpoint for more information", "missingUtxos": 1 } }, "message": "UtxoNotEnoughFragmented" } ``` To make sure the source account has a sufficient level of UTXO fragmentation (i.e. number of UTXOs), please monitor the state of the UTXOs as described in [Getting UTXO Statistics](#section/Common-Use-Cases/Getting-Utxo-Statistics). The number of wallet UTXOs should be no less than the transaction outputs, and the sum of all UTXOs should be enough to cover the total transaction amount, including fees. Contrary to a classic accounting model, there's no such thing as spending _part of a UTXO_, and one has to wait for a transaction to be included in a block before spending the remaining change. This is very similar to using bank notes: one can't spend a USD 20 bill at two different shops at the same time, even if it is enough to cover both purchases — one has to wait for change from the first transaction before making the second one. There's no "ideal" level of fragmentation; it depends on one's needs. However, the more UTXOs that are available, the higher the concurrency capacity of one's wallet, allowing multiple transactions to be made at the same time. Similarly, there's no practical maximum number of UTXOs, but there is nevertheless a maximum transaction size. By having many small UTXOs, one is taking the risk of hitting that restriction, should too many inputs be selected to fill a transaction. The only way to work around this is to make multiple smaller transactions. Estimating Transaction Fees --------------------------- When you submit a transaction to the network, some fees apply depending on, but not only, the selected grouping policy and the available inputs on the source wallet. There's actually a trade-off between fees, cryptographic security, throughput and privacy. The more inputs are selected, the bigger is the payload, the bigger are the fees. The API lets you estimate fees for a given transaction via the [`POST /api/v1/transaction/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost) endpoint. The request payload is identical to the one you would make to create a transaction: ``` curl -X POST :8090/api/v1/transactions/fees \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "destinations": [{ "amount": 14, "address": "A7k5bz1QR2...Tx561NNmfF" }], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" } }' ``` The API resolves with an estimated amount in _SEAL_. This estimation highly depends on the current state of the ledger and diverges with time. ```json $readFees ``` Managing Accounts ----------------- A wallet isn't limited to one account. It can actually be useful to have more than one account in order to separate business activities. With the API, you can retrieve a specific account, create new ones, list all existing accounts of a wallet or edit a few things on an existing account. By default, your wallet comes with a provided account. Let's see how to create a fresh new account on a wallet using [`POST /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fpost): ``` curl -X POST \ :8090/api/v1/Ae2tdPwUPE...8V3AVTnqGZ/accounts \ -H 'Content-Type: application/json;charset=utf-8' \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "name": "MyOtherAccount", "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Note that the `spendingPassword` here should match the one provided earlier in [Creating a New Wallet](#section/Getting-Started/Creating-a-New-Wallet). ```json $createAccount ``` You can always retrieve this account description later if needed via [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts~1{accountId}%2Fget). For example: ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts/2902829384 \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` For a broader view, the full list of accounts of a given wallet can be retrieved using [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget) ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` ```json $readAccounts ``` Partial Representations ----------------------- The previous endpoint gives you a list of full representations. However, in some cases, it might be interesting to retrieve only a partial representation of an account (e.g. only the balance). There are two extra endpoints one could use to either fetch a given account's balance, and another to retrieve the list of addresses associated to a specific account. [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/addresses`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1addresses%2Fget) ```json $readAccountAddresses ``` Note that this endpoint is paginated and allow basic filtering and sorting on addresses. Similarly, you can retrieve only the account balance with: [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/amount`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1amount%2Fget) ```json $readAccountBalance ``` Managing Addresses ------------------ By default, wallets you create are provided with an account which has one default address. It is possible (and recommended) for an account to manage multiple addresses. Address reuse actually reduces privacy for it tights more transactions to a small set of addresses. When paying, the wallet makes many of these choices for you. Addresses are selected from a wallet's account based on several different strategies and policies. To create a new address, use the [`POST /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fpost) endpoint: ``` curl -X POST \ :8090/api/v1/addresses \ -H 'Content-Type: application/json;charset=utf-8' \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '{ "walletId": "Ae2tdPwUPE...V3AVTnqGZ4", "accountIndex": 2147483648 }' ``` ```json $createAddress ``` If your wallet is protected with a password, this password is also required in order to create new addresses for that wallet. In such case, the field `spendingPassword` should match the one defined earlier to protect your wallet. Addresses generated as just described are always valid. When the API encounters an invalid address however (e.g. when provided by another party), it will fail with a client error. You can always view all your available addresses across all your wallets by using [`GET /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fget): ``` curl -X GET :8090/api/v1/addresses \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` ```json $readAddresses ``` Checking Synchronization Progress --------------------------------- You can control the synchronization progress of the underlying node hosting the wallet's server via [`GET /api/v1/node-info`](#tag/Info%2Fpaths%2F~1api~1v1~1node-info%2Fget). The output is rather verbose and gives real-time progress updates about the current node. ``` curl -X GET :8090/api/v1/node-info \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` ```json $readNodeInfo ``` Retrieving Transaction History ------------------------------ If needed, applications may regularly poll the wallet's backend to retrieve the history of transactions of a given wallet. Using the [`GET /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget) endpoint, you can view the status of all transactions that ever sent or took money from the wallet. The following table sums up the available filters (also detailed in the endpoint documentation details): Filter On | Corresponding Query Parameter(s) ----------------------------| ------------------------------ Wallet | `wallet_id` Wallet's account | `account_index` + `wallet_id` Address | `address` Transaction's creation time | `created_at` Transaction's id | `id` For example, in order to retrieve the last 50 transactions of a particular account, ordered by descending date: ``` curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&account_index=2902829384&sort_by=DES\[created_at\]&per_page=50' \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` For example, in order to retrieve the last 50 transactions, ordered by descending date: ``` curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ &sort_by=DES\[created_at\]&per_page=50' \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` Another example, if you were to look for all transactions made since the 1st of January 2018: ``` curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&created_at=GT\[2018-01-01T00:00:00.00000\]' \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` Getting Utxo statistics --------------------------------- You can get Utxo statistics of a given wallet using [`GET /api/v1/wallets/{{walletId}}/statistics/utxos`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1statistics~1utxos%2Fget) ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/statistics/utxos \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem ``` ```json $readUtxoStatistics ``` Make sure to carefully read the section about [Pagination](#section/Pagination) to fully leverage the API capabilities. Importing (Unused) Addresses From a Previous Node (or Version) -------------------------------------------------------------- When restoring a wallet, only the information available on the blockchain can be retrieved. Some pieces of information aren't stored on the blockchain and are only defined as _Metadata_ of the wallet backend. This includes: - The wallet's name - The wallet's assurance level - The wallet's spending password - The wallet's unused addresses Unused addresses are not recorded on the blockchain and, in the case of random derivation, it is unlikely that the same addresses will be generated on two different node instances. However, some API users may wish to preserve unused addresses between different instances of the wallet backend. To enable this, the wallet backend provides an endpoint ([`POST /api/v1/wallets/{{walletId}}/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1addresses%2Fpost)) to import a list of addresses into a given account. Note that this endpoint is quite lenient when it comes to errors: it tries to import all provided addresses one by one, and ignores any that can't be imported for whatever reason. The server will respond with the total number of successes and, if any, a list of addresses that failed to be imported. Trying to import an address that is already present will behave as a no-op. For example: ``` curl -X POST \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/addresses \ -H 'Accept: application/json;charset=utf-8' \ --cacert ./scripts/tls-files/ca.crt \ --cert ./scripts/tls-files/client.pem \ -d '[ "Ae2tdPwUPE...8V3AVTnqGZ", "Ae2odDwvbA...b6V104CTV8" ]' ``` > **IMPORTANT**: This feature is experimental and performance is > not guaranteed. Users are advised to import small batches only. |] where createAccount = decodeUtf8 $ encodePretty $ genExample @(APIResponse Account) createAddress = decodeUtf8 $ encodePretty $ genExample @(APIResponse WalletAddress) createWallet = decodeUtf8 $ encodePretty $ genExample @(APIResponse Wallet) readAccounts = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Account]) readAccountBalance = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountBalance) readAccountAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountAddresses) readAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Address]) readFees = decodeUtf8 $ encodePretty $ genExample @(APIResponse EstimatedFees) readNodeInfo = decodeUtf8 $ encodePretty $ genExample @(APIResponse NodeInfo) readTransactions = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Transaction]) readUtxoStatistics = decodeUtf8 $ encodePretty $ genExample @(APIResponse UtxoStatistics) -- | Provide an alternative UI (ReDoc) for rendering Swagger documentation. swaggerSchemaUIServer :: (Server api ~ Handler Swagger) => Swagger -> Server (SwaggerSchemaUI' dir api) swaggerSchemaUIServer = swaggerSchemaUIServerImpl redocIndexTemplate redocFiles where redocIndexTemplate :: Text redocIndexTemplate = [text| <!doctype html> <html lang="en"> <head> <title>ReDoc</title> <meta charset="utf-8"/> <meta name="viewport" content="width=device-width, initial-scale=1"> <style> body { margin: 0; padding: 0; } </style> <script> // Force Strict-URL Routing for assets relative paths (function onload() { if (!window.location.pathname.endsWith("/")) { window.location.pathname += "/"; } }()); </script> </head> <body> <redoc spec-url="../SERVANT_SWAGGER_UI_SCHEMA"></redoc> <script src="redoc.min.js"> </script> </body> </html>|] applyUpdateDescription :: Text applyUpdateDescription = [text| Apply the next available update proposal from the blockchain. Note that this will immediately shutdown the node and makes it unavailable for a short while. |] postponeUpdateDescription :: Text postponeUpdateDescription = [text| Discard the next available update from the node's local state. Yet, this doesn't reject the update which will still be applied as soon as the node is restarted. |] resetWalletStateDescription :: Text resetWalletStateDescription = [text| Wipe-out the node's local state entirely. The only intended use-case for this endpoint is during API integration testing. Note also that this will fail by default unless the node is running in debug mode. |] estimateFeesDescription :: Text estimateFeesDescription = [text| Estimate the fees which would incur from the input payment. This endpoint **does not** require a _spending password_ to be supplied as it generates under the hood an unsigned transaction. |] getAddressDescription :: Text getAddressDescription = [text| The previous version of this endpoint failed with an HTTP error when the given address was unknown to the wallet. This was misleading since an address that is unknown to the wallet may still belong to the wallet (since it could be part of a pending transaction in another instance of the same wallet). To reflect this, the V1 endpoint does not fail when an address is not recognised and returns a new field which indicates the address' ownership status, from the node point of view. |] -- -- The API -- data DescriptionEnvironment = DescriptionEnvironment { deErrorExample :: !T.Text , deDefaultPerPage :: !T.Text , deWalletErrorTable :: !T.Text , deGitRevision :: !T.Text , deSoftwareVersion :: !T.Text , deMnemonicExample :: !T.Text } api :: HasSwagger a => (CompileTimeInfo, SoftwareVersion) -> Proxy a -> (DescriptionEnvironment -> T.Text) -> Swagger api (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI & info.title .~ "Sealchain Wallet API" & info.version .~ fromString (show curSoftwareVersion) & host ?~ "127.0.0.1:8090" & info.description ?~ mkDescription DescriptionEnvironment { deErrorExample = decodeUtf8 $ encodePretty WalletNotFound , deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase) , deDefaultPerPage = fromString (show defaultPerPageEntries) , deWalletErrorTable = errorsDescription , deGitRevision = ctiGitRevision compileInfo , deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion) } & info.license ?~ ("MIT" & url ?~ URL "-project/sealchain/develop/LICENSE") & paths %~ (POST, "/api/internal/apply-update") `setDescription` applyUpdateDescription & paths %~ (POST, "/api/internal/postpone-update") `setDescription` postponeUpdateDescription & paths %~ (DELETE, "/api/internal/reset-wallet-state") `setDescription` resetWalletStateDescription & paths %~ (POST, "/api/v1/transactions/fees") `setDescription` estimateFeesDescription & paths %~ (GET, "/api/v1/addresses/{address}") `setDescription` getAddressDescription </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/sealchain-project/sealchain/e97b4bac865fb147979cb14723a12c716a62e51e/wallet/src/Cardano/Wallet/API/V1/Swagger.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE DataKinds # # LANGUAGE QuasiQuotes # # LANGUAGE RankNTypes # # LANGUAGE TypeFamilies # Helper functions | Surround a Text with another | Display a multi-line code-block inline (e.g. in tables) | Drill in the 'Swagger' file in an unsafe way to modify a specific operation identified by a tuple (verb, path). The function looks a bit scary to use but is actually rather simple (see example below). Note that if the identified path doesn't exist, the function will throw at runtime when trying to read the underlying swagger structure! Example: swagger | A combinator to modify the description of an operation, using 'alterOperation' under the hood. Example: swagger Instances -----------------------|-----------------|--------- 'WalletError' 'JSONValidationError' 'UnsupportedMimeTypeError' TODO 'MnemonicError' ? -----------------|------------------- | Provide additional insights on V1 documentation -----------------|------------------- ------------------- cert ./scripts/tls-files/client.pem \ cacert ./scripts/tls-files/ca.crt \ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem ----------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem --------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem --------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ proof in hex -------------- ------------ -------------------- --------------------------- no-tls` flag to the wallet or by running a wallet in debug mode with `--wallet-debug` turned on. ---------------------------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ ---------------------- ------------------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ --------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem --------------------- ---------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem ------------------------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem ---------------------------- --------------------------| ------------------------------ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem ------------------------------- cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem ------------------------------------------------------------ cacert ./scripts/tls-files/ca.crt \ cert ./scripts/tls-files/client.pem \ | Provide an alternative UI (ReDoc) for rendering Swagger documentation. The API </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # # LANGUAGE LambdaCase # # LANGUAGE UndecidableInstances # # LANGUAGE ViewPatterns # # OPTIONS_GHC -fno - warn - orphans # module Cardano.Wallet.API.V1.Swagger where import Universum hiding (get, put) import Cardano.Wallet.API.Indices (ParamNames) import Cardano.Wallet.API.Request.Filter import Cardano.Wallet.API.Request.Pagination import Cardano.Wallet.API.Request.Sort import Cardano.Wallet.API.Response import Cardano.Wallet.API.V1.Generic (gconsName) import Cardano.Wallet.API.V1.Parameters import Cardano.Wallet.API.V1.Swagger.Example import Cardano.Wallet.API.V1.Types import Cardano.Wallet.TypeLits (KnownSymbols (..)) import Pos.Chain.Update (SoftwareVersion (svNumber)) import Pos.Core.NetworkMagic (NetworkMagic (..)) import Pos.Util.CompileInfo (CompileTimeInfo, ctiGitRevision) import Pos.Util.Servant (LoggingApi) import Control.Lens (At, Index, IxValue, at, (?~)) import Data.Aeson (encode) import Data.Aeson.Encode.Pretty import Data.Map (Map) import Data.Swagger hiding (Example) import Data.Typeable import Formatting (build, sformat) import NeatInterpolation import Servant (Handler, ServantErr (..), Server, StdMethod (..)) import Servant.API.Sub import Servant.Swagger import Servant.Swagger.UI (SwaggerSchemaUI') import Servant.Swagger.UI.Core (swaggerSchemaUIServerImpl) import Servant.Swagger.UI.ReDoc (redocFiles) import qualified Data.ByteString.Lazy as BL import qualified Data.Map.Strict as M import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Pos.Core as Core import qualified Pos.Core.Attributes as Core import qualified Pos.Crypto.Hashing as Crypto surroundedBy :: Text -> Text -> Text surroundedBy wrap context = wrap <> context <> wrap inlineCodeBlock :: Text -> Text inlineCodeBlock txt = "<pre>" <> replaceNewLines (replaceWhiteSpaces txt) <> "</pre>" where replaceNewLines = T.replace "\n" "<br/>" replaceWhiteSpaces = T.replace " " "&nbsp;" & paths % ~ ( POST , " /api / v1 / wallets " ) ` alterOperation ` ( description ? ~ " foo " ) & paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` alterOperation ` ( description ? ~ " bar " ) alterOperation :: ( IxValue m ~ item , Index m ~ FilePath , At m , HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => (StdMethod, FilePath) -> (Operation -> Operation) -> m -> m alterOperation (verb, path) alter = at path %~ (Just . unsafeAlterItem) where errUnreachableEndpoint :: Text errUnreachableEndpoint = "Unreachable endpoint: " <> show verb <> " " <> show path errUnsupportedVerb :: Text errUnsupportedVerb = "Used unsupported verb to identify an endpoint: " <> show verb unsafeAlterItem :: ( HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => Maybe item -> item unsafeAlterItem = maybe (error errUnreachableEndpoint) (unsafeLensFor verb %~ (Just . unsafeAlterOperation)) unsafeAlterOperation :: Maybe Operation -> Operation unsafeAlterOperation = maybe (error errUnreachableEndpoint) alter unsafeLensFor :: ( Functor f , HasGet item (Maybe Operation) , HasPut item (Maybe Operation) , HasPatch item (Maybe Operation) , HasPost item (Maybe Operation) , HasDelete item (Maybe Operation) ) => StdMethod -> (Maybe Operation -> f (Maybe Operation)) -> item -> f item unsafeLensFor = \case GET -> get PUT -> put PATCH -> patch POST -> post DELETE -> delete _ -> error errUnsupportedVerb & paths % ~ ( POST , " /api / v1 / wallets " ) ` setDescription ` " foo " & paths % ~ ( GET , " /api / v1 / wallets/{walletId } " ) ` setDescription ` " bar " setDescription :: (IxValue m ~ PathItem, Index m ~ FilePath, At m) => (StdMethod, FilePath) -> Text -> m -> m setDescription endpoint str = endpoint `alterOperation` (description ?~ str) instance HasSwagger a => HasSwagger (LoggingApi config a) where toSwagger _ = toSwagger (Proxy @a) instance ( Typeable res , KnownSymbols syms , HasSwagger subApi , syms ~ ParamNames res params ) => HasSwagger (FilterBy params res :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @subApi) allOps = map toText $ symbolVals (Proxy @syms) in swgr & over (operationsOf swgr . parameters) (addFilterOperations allOps) where addFilterOperations :: [Text] -> [Referenced Param] -> [Referenced Param] addFilterOperations ops xs = map (Inline . newParam) ops <> xs newParam :: Text -> Param newParam opName = let typeOfRes = fromString $ show $ typeRep (Proxy @ res) in Param { _paramName = opName , _paramRequired = Nothing , _paramDescription = Just $ filterDescription typeOfRes , _paramSchema = ParamOther ParamOtherSchema { _paramOtherSchemaIn = ParamQuery , _paramOtherSchemaAllowEmptyValue = Nothing , _paramOtherSchemaParamSchema = mempty } } filterDescription :: Text -> Text filterDescription typeOfRes = mconcat [ "A **FILTER** operation on a " <> typeOfRes <> ". " , "Filters support a variety of queries on the resource. " , "These are: \n\n" , "- `EQ[value]` : only allow values equal to `value`\n" , "- `LT[value]` : allow resource with attribute less than the `value`\n" , "- `GT[value]` : allow objects with an attribute greater than the `value`\n" , "- `GTE[value]` : allow objects with an attribute at least the `value`\n" , "- `LTE[value]` : allow objects with an attribute at most the `value`\n" , "- `RANGE[lo,hi]` : allow objects with the attribute in the range between `lo` and `hi`\n" , "- `IN[a,b,c,d]` : allow objects with the attribute belonging to one provided.\n\n" ] instance ( Typeable res , KnownSymbols syms , syms ~ ParamNames res params , HasSwagger subApi ) => HasSwagger (SortBy params res :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @subApi) in swgr & over (operationsOf swgr . parameters) addSortOperation where addSortOperation :: [Referenced Param] -> [Referenced Param] addSortOperation xs = Inline newParam : xs newParam :: Param newParam = let typeOfRes = fromString $ show $ typeRep (Proxy @ res) allowedKeys = T.intercalate "," (map toText $ symbolVals (Proxy @syms)) in Param { _paramName = "sort_by" , _paramRequired = Just False , _paramDescription = Just (sortDescription typeOfRes allowedKeys) , _paramSchema = ParamOther ParamOtherSchema { _paramOtherSchemaIn = ParamQuery , _paramOtherSchemaAllowEmptyValue = Just True , _paramOtherSchemaParamSchema = mempty } } instance (HasSwagger subApi) => HasSwagger (WalletRequestParams :> subApi) where toSwagger _ = let swgr = toSwagger (Proxy @(WithWalletRequestParams subApi)) in swgr & over (operationsOf swgr . parameters) (map toDescription) where toDescription :: Referenced Param -> Referenced Param toDescription (Inline p@(_paramName -> pName)) = case M.lookup pName requestParameterToDescription of Nothing -> Inline p Just d -> Inline (p & description .~ Just d) toDescription x = x instance ToParamSchema WalletId instance ToSchema Core.Address where declareNamedSchema = pure . paramSchemaToNamedSchema defaultSchemaOptions instance ToParamSchema Core.Address where toParamSchema _ = mempty & type_ .~ SwaggerString instance ToParamSchema (V1 Core.Address) where toParamSchema _ = toParamSchema (Proxy @Core.Address) Descriptions customQueryFlagToDescription :: Map T.Text T.Text customQueryFlagToDescription = M.fromList [ ("force_ntp_check", forceNtpCheckDescription) ] requestParameterToDescription :: Map T.Text T.Text requestParameterToDescription = M.fromList [ ("page", pageDescription) , ("per_page", perPageDescription (fromString $ show maxPerPageEntries) (fromString $ show defaultPerPageEntries)) ] forceNtpCheckDescription :: T.Text forceNtpCheckDescription = [text| In some cases, API Clients need to force a new NTP check as a previous result gets cached. A typical use-case is after asking a user to fix its system clock. If this flag is set, request will block until NTP server responds or it will timeout if NTP server is not available within a short delay. |] pageDescription :: T.Text pageDescription = [text| The page number to fetch for this request. The minimum is **1**. If nothing is specified, **this value defaults to 1** and always shows the first entries in the requested collection. |] perPageDescription :: T.Text -> T.Text -> T.Text perPageDescription maxValue defaultValue = [text| The number of entries to display for each page. The minimum is **1**, whereas the maximum is **$maxValue**. If nothing is specified, **this value defaults to $defaultValue**. |] sortDescription :: Text -> Text -> Text sortDescription resource allowedKeys = [text| A **SORT** operation on this $resource. Allowed keys: `$allowedKeys`. |] errorsDescription :: Text errorsDescription = [text| Error Name / Description | HTTP Error code | Example $errors |] where errors = T.intercalate "\n" rows rows = [ mkRow fmtErr $ NotEnoughMoney (ErrAvailableBalanceIsInsufficient 1400) , mkRow fmtErr $ OutputIsRedeem sampleAddress , mkRow fmtErr $ UnknownError "Unexpected internal error." , mkRow fmtErr $ InvalidAddressFormat "Provided address format is not valid." , mkRow fmtErr WalletNotFound , mkRow fmtErr $ WalletAlreadyExists exampleWalletId , mkRow fmtErr AddressNotFound , mkRow fmtErr $ InvalidPublicKey "Extended public key (for external wallet) is invalid." , mkRow fmtErr UnsignedTxCreationError , mkRow fmtErr $ SignedTxSubmitError "Unable to submit externally-signed transaction." , mkRow fmtErr TooBigTransaction , mkRow fmtErr TxFailedToStabilize , mkRow fmtErr TxRedemptionDepleted , mkRow fmtErr $ TxSafeSignerNotFound sampleAddress , mkRow fmtErr $ MissingRequiredParams (("wallet_id", "walletId") :| []) , mkRow fmtErr $ WalletIsNotReadyToProcessPayments genExample , mkRow fmtErr $ NodeIsStillSyncing genExample , mkRow fmtErr $ CannotCreateAddress "Cannot create derivation path for new address in external wallet." , mkRow fmtErr $ RequestThrottled 42 , mkRow fmtErr $ JSONValidationFailed "Expected String, found Null." , mkRow fmtErr $ UnsupportedMimeTypePresent "Expected Content-Type's main MIME-type to be 'application/json'." , mkRow fmtErr $ UtxoNotEnoughFragmented (ErrUtxoNotEnoughFragmented 1 msgUtxoNotEnoughFragmented) ] mkRow fmt err = T.intercalate "|" (fmt err) fmtErr err = [ surroundedBy "`" (gconsName err) <> "<br/>" <> toText (sformat build err) , show $ errHTTPCode $ toServantError err , inlineCodeBlock (T.decodeUtf8 $ BL.toStrict $ encodePretty err) ] sampleAddress = V1 Core.Address { Core.addrRoot = Crypto.unsafeAbstractHash ("asdfasdf" :: String) , Core.addrAttributes = Core.mkAttributes $ Core.AddrAttributes Nothing Core.BootstrapEraDistr NetworkMainOrStage , Core.addrType = Core.ATPubKey } | Shorter version of the doc below , only for Dev & V0 documentations highLevelShortDescription :: DescriptionEnvironment -> T.Text highLevelShortDescription DescriptionEnvironment{..} = [text| This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain). Protocol Version | Git Revision $deSoftwareVersion | $deGitRevision |] highLevelDescription :: DescriptionEnvironment -> T.Text highLevelDescription DescriptionEnvironment{..} = [text| This is the specification for the Sealchain Wallet API, automatically generated as a [Swagger](/) spec from the [Servant](-servant.readthedocs.io/en/stable/) API of [Sealchain](-project/sealchain). Protocol Version | Git Revision $deSoftwareVersion | $deGitRevision Getting Started =============== In the following examples, we will use *curl* to illustrate request to an API running on the default port **8090**. Please note that wallet web API uses TLS for secure communication. Requests to the API need to send a client CA certificate that was used when launching the node and identifies the client as being permitted to invoke the server API. Creating a New Wallet You can create your first wallet using the [`POST /api/v1/wallets`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets%2Fpost) endpoint as follow: ``` curl -X POST :8090/api/v1/wallets \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ -d '{ "operation": "create", "backupPhrase": $deMnemonicExample, "assuranceLevel": "normal", "name": "MyFirstWallet", "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` > **Warning**: Those 12 mnemonic words given for the backup phrase act as an example. **Do > not** use them on a production system. See the section below about mnemonic codes for more > information. The `spendingPassword` is optional but highly recommended. It a string of 32 characters, encoded in base 16, yielding to an hexadecimal sequence of 64 bytes. This passphrase is required for sensitive operations on the wallet and adds an extra security layer to it. To generate a valid `spendingPassword`, please follow the following steps: - Pick a long sentence using a wide variety of characters (uppercase, lowercase, whitespace, punctuation, etc). Using a computer to randomly generate a passphrase is best, as humans aren't a good source of randomness. - Compute an appropriate hash of this passphrase. You'll need to use an algorithm that yields a 32-byte long string (e.g. *SHA256* or *BLAKE2b*). - Hex-encode the 32-byte hash into a 64-byte sequence of bytes. As a response, the API provides you with a unique wallet `id` to be used in subsequent requests. Make sure to store it / write it down. Note that every API response is [jsend-compliant](); Sealchain also augments responses with meta-data specific to pagination. More details in the section below about [Pagination](#section/Pagination) ```json $createWallet ``` You have just created your first wallet. Information about this wallet can be retrieved using the [`GET /api/v1/wallets/{walletId}`](#tag/Wallets%2Fpaths%2F~1api~1v1~1wallets~1{walletId}%2Fget) endpoint as follows: ``` curl -X GET :8090/api/v1/wallets/{{walletId}} \ -H "Accept: application/json; charset=utf-8" \ ``` Receiving SEAL (or GD) To receive _SEAL_ (or GD) from other users you should provide your address. This address can be obtained from an account. Each wallet contains at least one account. An account is like a pocket inside of your wallet. Vew all existing accounts of a wallet by using the [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget) endpoint: ``` curl -X GET :8090/api/v1/wallets/{{walletId}}/accounts?page=1&per_page=10 \ -H "Accept: application/json; charset=utf-8" \ ``` Since you have, for now, only a single wallet, you'll see something like this: ```json $readAccounts ``` All the wallet's accounts are listed under the `addresses` field. You can communicate one of these addresses to receive _SEAL_(or GD) on the associated account. Sending SEAL(or GD) In order to send _SEAL_(or GD) from one of your accounts to another address, you must create a new payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1payment%2Fpost) endpoint as follows: ``` curl -X POST :8090/api/v1/transactions/payment \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ -d '{ "destinations": [{ "amount": { "coins": 100000000, "gds": 100 } "address": "A7k5bz1QR2...Tx561NNmfF" }], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Note that, in order to perform a transaction, you need to have enough existing _SEAL_(or GD) on the source account! The Sealchain API is designed to accomodate multiple recipients payments out-of-the-box; notice how `destinations` is a list of addresses (and corresponding amounts). When the transaction succeeds, funds are no longer available in the sources addresses, and are soon made available to the destinations within a short delay. Note that, you can at any time see the status of your wallets by using the [`GET /api/v1/transactions/payment`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget) endpoint as follows: ``` curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwUPE...8V3AVTnqGZ\ -H "Accept: application/json; charset=utf-8" \ ``` Here we constrained the request to a specific account. After our previous transaction the output should look roughly similar to this: ```json $readTransactions ``` In addition, and because it is not possible to _preview_ a transaction, one can lookup a transaction's fees using the [`POST /api/v1/transactions/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost) endpoint to get an estimation of those fees. See [Estimating Transaction Fees](#section/Common-Use-Cases/Estimating-Transaction-Fees) for more details. Issue GD To increase or decrease GD total supply, The issuer (the GD operator) can create a new payment transaction using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1issurance%2Fpost) endpoint as follows: ``` curl -X POST :8090/api/v1/transactions/issurance \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ -d '{ "info": { "increment": 10000000, }, "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Pagination ========== **All GET requests of the API are paginated by default**. Whilst this can be a source of surprise, is the best way of ensuring the performance of GET requests is not affected by the size of the data storage. Version `V1` introduced a different way of requesting information to the API. In particular, GET requests which returns a _collection_ (i.e. typically a JSON array of resources) lists extra parameters which can be used to modify the shape of the response. In particular, those are: * `page`: (Default value: **1**). * `per_page`: (Default value: **$deDefaultPerPage**) For a more accurate description, see the section `Parameters` of each GET request, but as a brief overview the first two control how many results and which results to access in a paginated request. Filtering and Sorting ===================== `GET` endpoints which list collection of resources supports filters & sort operations, which are clearly marked in the swagger docs with the `FILTER` or `SORT` labels. The query format is quite simple, and it goes this way: Filter Operators | Operator | Description | Example | | - | If **no operator** is passed, this is equivalent to `EQ` (see below). | `balance=10` | | `EQ` | Retrieves the resources with index _equal_ to the one provided. | `balance=EQ[10]` | | `LT` | Retrieves the resources with index _less than_ the one provided. | `balance=LT[10]` | | `LTE` | Retrieves the resources with index _less than equal_ the one provided. | `balance=LTE[10]` | | `GT` | Retrieves the resources with index _greater than_ the one provided. | `balance=GT[10]` | | `GTE` | Retrieves the resources with index _greater than equal_ the one provided. | `balance=GTE[10]` | | `RANGE` | Retrieves the resources with index _within the inclusive range_ [k,k]. | `balance=RANGE[10,20]` | Sort Operators | Operator | Description | Example | | `ASC` | Sorts the resources with the given index in _ascending_ order. | `sort_by=ASC[balance]` | | `DES` | Sorts the resources with the given index in _descending_ order. | `sort_by=DES[balance]` | | - | If **no operator** is passed, this is equivalent to `DES` (see above). | `sort_by=balance` | Errors ====== In case a request cannot be served by the API, a non-2xx HTTP response will be issued, together with a [JSend-compliant]() JSON Object describing the error in detail together with a numeric error code which can be used by API consumers to implement proper error handling in their application. For example, here's a typical error which might be issued: ``` json $deErrorExample ``` Existing Wallet Errors $deWalletErrorTable Monetary Denomination & Units ============================= Sealchain's platform currency is called _SEAL_. _SEAL_ has up to **8** decimal places; hence the smallest monetary unit that can be represented in the Seaichain's blockhain is: 0.00000001. Sealchain originaly includes stablecoin called GD (GoldDollar), GD has up to **2** decimal places. > **Warning**: All amounts manipulated in the API are given and expected in smallest monetary unit. Mnemonic Codes ============== The full list of accepted mnemonic codes to secure a wallet is defined by the [BIP-39 specifications](-0039.mediawiki). Note that picking up 12 random words from the list **is not enough** and leads to poor security. Make sure to carefully follow the steps described in the protocol when you generate words for a new wallet. Versioning & Legacy =================== The API is **versioned**, meaning that is possible to access different versions of the API by adding the _version number_ in the URL. **For the sake of backward compatibility, we expose the legacy version of the API, available simply as unversioned endpoints.** This means that _omitting_ the version number would call the old version of the API. Deprecated endpoints are currently grouped under an appropriate section; they would be removed in upcoming released, if you're starting a new integration with Sealchain, please ignore these. Note that Compatibility between major versions is not _guaranteed_, i.e. the request & response formats might differ. Disable TLS (Not Recommended) Common Use-Cases ================ Sending Money to Multiple Recipients As seen in [Sending SEAL](#section/Getting-Started/Sending-SEAL), you can send _SEAL_ to another party using the [`POST /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fpost) endpoint. Important to notice is the type of the field `destinations`: it's a list, enabling you to provide more than one destination. Each destination is composed of: - An address - A corresponding amount The overall transaction corresponds to the sum of each outputs. For instance, to send money to two parties simultaneously: ``` curl -X POST :8090/api/v1/transactions \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ -d '{ "destinations": [ { "amount": 14, "address": "A7k5bz1QR2...Tx561NNmfF" }, { "amount": 42, "address": "B56n78WKE8...jXAa34NUFz" } ], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" }, "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` About UTXO Fragmentation As described in [Sending Money to Multiple Recipients](#section/Common-Use-Cases/Sending-Money-to-Multiple-Recipients), it is possible to send ada to more than one destination. Sealchain only allows a given UTXO to cover at most one single transaction output. As a result, when the number of transaction outputs is greater than the number the API returns a `UtxoNotEnoughFragmented` error which looks like the following ``` { "status": "error", "diagnostic": { "details": { "help": "Utxo is not enough fragmented to handle the number of outputs of this transaction. Query /api/v1/wallets/{walletId}/statistics/utxos endpoint for more information", "missingUtxos": 1 } }, "message": "UtxoNotEnoughFragmented" } ``` To make sure the source account has a sufficient level of UTXO fragmentation (i.e. number of UTXOs), please monitor the state of the UTXOs as described in [Getting UTXO Statistics](#section/Common-Use-Cases/Getting-Utxo-Statistics). The number of wallet UTXOs should be no less than the transaction outputs, and the sum of all UTXOs should be enough to cover the total transaction amount, including fees. Contrary to a classic accounting model, there's no such thing as spending _part of a UTXO_, and one has to wait for a transaction to be included in a block before spending the remaining change. This is very similar to using bank notes: one can't spend a USD 20 bill at two different shops at the same time, even if it is enough to cover both purchases — one has to wait for change from the first transaction before making the second one. There's no "ideal" level of fragmentation; it depends on one's needs. However, the more UTXOs that are available, the higher the concurrency capacity of one's wallet, allowing multiple transactions to be made at the same time. Similarly, there's no practical maximum number of UTXOs, but there is nevertheless a maximum transaction size. By having many small UTXOs, one is taking the risk of hitting that restriction, should too many inputs be selected to fill a transaction. The only way to work around this is to make multiple smaller transactions. Estimating Transaction Fees When you submit a transaction to the network, some fees apply depending on, but not only, the selected grouping policy and the available inputs on the source wallet. There's actually a trade-off between fees, cryptographic security, throughput and privacy. The more inputs are selected, the bigger is the payload, the bigger are the fees. The API lets you estimate fees for a given transaction via the [`POST /api/v1/transaction/fees`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions~1fees%2Fpost) endpoint. The request payload is identical to the one you would make to create a transaction: ``` curl -X POST :8090/api/v1/transactions/fees \ -H "Accept: application/json; charset=utf-8" \ -H "Content-Type: application/json; charset=utf-8" \ -d '{ "destinations": [{ "amount": 14, "address": "A7k5bz1QR2...Tx561NNmfF" }], "source": { "accountIndex": 0, "walletId": "Ae2tdPwUPE...8V3AVTnqGZ" } }' ``` The API resolves with an estimated amount in _SEAL_. This estimation highly depends on the current state of the ledger and diverges with time. ```json $readFees ``` Managing Accounts A wallet isn't limited to one account. It can actually be useful to have more than one account in order to separate business activities. With the API, you can retrieve a specific account, create new ones, list all existing accounts of a wallet or edit a few things on an existing account. By default, your wallet comes with a provided account. Let's see how to create a fresh new account on a wallet using [`POST /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fpost): ``` curl -X POST \ :8090/api/v1/Ae2tdPwUPE...8V3AVTnqGZ/accounts \ -H 'Content-Type: application/json;charset=utf-8' \ -H 'Accept: application/json;charset=utf-8' \ -d '{ "name": "MyOtherAccount", "spendingPassword": "5416b2988745725998907addf4613c9b0764f04959030e1b81c603b920a115d0" }' ``` Note that the `spendingPassword` here should match the one provided earlier in [Creating a New Wallet](#section/Getting-Started/Creating-a-New-Wallet). ```json $createAccount ``` You can always retrieve this account description later if needed via [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts~1{accountId}%2Fget). For example: ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts/2902829384 \ -H 'Accept: application/json;charset=utf-8' \ ``` For a broader view, the full list of accounts of a given wallet can be retrieved using [`GET /api/v1/wallets/{{walletId}}/accounts`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1accounts%2Fget) ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/accounts \ -H 'Accept: application/json;charset=utf-8' \ ``` ```json $readAccounts ``` Partial Representations The previous endpoint gives you a list of full representations. However, in some cases, it might be interesting to retrieve only a partial representation of an account (e.g. only the balance). There are two extra endpoints one could use to either fetch a given account's balance, and another to retrieve the list of addresses associated to a specific account. [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/addresses`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1addresses%2Fget) ```json $readAccountAddresses ``` Note that this endpoint is paginated and allow basic filtering and sorting on addresses. Similarly, you can retrieve only the account balance with: [`GET /api/v1/wallets/{{walletId}}/accounts/{{accountId}}/amount`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1%7BwalletId%7D~1accounts~1%7BaccountId%7D~1amount%2Fget) ```json $readAccountBalance ``` Managing Addresses By default, wallets you create are provided with an account which has one default address. It is possible (and recommended) for an account to manage multiple addresses. Address reuse actually reduces privacy for it tights more transactions to a small set of addresses. When paying, the wallet makes many of these choices for you. Addresses are selected from a wallet's account based on several different strategies and policies. To create a new address, use the [`POST /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fpost) endpoint: ``` curl -X POST \ :8090/api/v1/addresses \ -H 'Content-Type: application/json;charset=utf-8' \ -H 'Accept: application/json;charset=utf-8' \ -d '{ "walletId": "Ae2tdPwUPE...V3AVTnqGZ4", "accountIndex": 2147483648 }' ``` ```json $createAddress ``` If your wallet is protected with a password, this password is also required in order to create new addresses for that wallet. In such case, the field `spendingPassword` should match the one defined earlier to protect your wallet. Addresses generated as just described are always valid. When the API encounters an invalid address however (e.g. when provided by another party), it will fail with a client error. You can always view all your available addresses across all your wallets by using [`GET /api/v1/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1addresses%2Fget): ``` curl -X GET :8090/api/v1/addresses \ -H 'Accept: application/json;charset=utf-8' \ ``` ```json $readAddresses ``` Checking Synchronization Progress You can control the synchronization progress of the underlying node hosting the wallet's server via [`GET /api/v1/node-info`](#tag/Info%2Fpaths%2F~1api~1v1~1node-info%2Fget). The output is rather verbose and gives real-time progress updates about the current node. ``` curl -X GET :8090/api/v1/node-info \ -H 'Accept: application/json;charset=utf-8' \ ``` ```json $readNodeInfo ``` Retrieving Transaction History If needed, applications may regularly poll the wallet's backend to retrieve the history of transactions of a given wallet. Using the [`GET /api/v1/transactions`](#tag/Transactions%2Fpaths%2F~1api~1v1~1transactions%2Fget) endpoint, you can view the status of all transactions that ever sent or took money from the wallet. The following table sums up the available filters (also detailed in the endpoint documentation details): Filter On | Corresponding Query Parameter(s) Wallet | `wallet_id` Wallet's account | `account_index` + `wallet_id` Address | `address` Transaction's creation time | `created_at` Transaction's id | `id` For example, in order to retrieve the last 50 transactions of a particular account, ordered by descending date: ``` curl -X GET :8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&account_index=2902829384&sort_by=DES\[created_at\]&per_page=50' \ -H 'Accept: application/json;charset=utf-8' \ ``` For example, in order to retrieve the last 50 transactions, ordered by descending date: ``` curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ &sort_by=DES\[created_at\]&per_page=50' \ -H 'Accept: application/json;charset=utf-8' \ ``` Another example, if you were to look for all transactions made since the 1st of January 2018: ``` curl -X GET ':8090/api/v1/transactions?wallet_id=Ae2tdPwU...3AVTnqGZ&created_at=GT\[2018-01-01T00:00:00.00000\]' \ -H 'Accept: application/json;charset=utf-8' \ ``` Getting Utxo statistics You can get Utxo statistics of a given wallet using [`GET /api/v1/wallets/{{walletId}}/statistics/utxos`](#tag/Accounts%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1statistics~1utxos%2Fget) ``` curl -X GET \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/statistics/utxos \ -H 'Accept: application/json;charset=utf-8' \ ``` ```json $readUtxoStatistics ``` Make sure to carefully read the section about [Pagination](#section/Pagination) to fully leverage the API capabilities. Importing (Unused) Addresses From a Previous Node (or Version) When restoring a wallet, only the information available on the blockchain can be retrieved. Some pieces of information aren't stored on the blockchain and are only defined as _Metadata_ of the wallet backend. This includes: - The wallet's name - The wallet's assurance level - The wallet's spending password - The wallet's unused addresses Unused addresses are not recorded on the blockchain and, in the case of random derivation, it is unlikely that the same addresses will be generated on two different node instances. However, some API users may wish to preserve unused addresses between different instances of the wallet backend. To enable this, the wallet backend provides an endpoint ([`POST /api/v1/wallets/{{walletId}}/addresses`](#tag/Addresses%2Fpaths%2F~1api~1v1~1wallets~1{walletId}~1addresses%2Fpost)) to import a list of addresses into a given account. Note that this endpoint is quite lenient when it comes to errors: it tries to import all provided addresses one by one, and ignores any that can't be imported for whatever reason. The server will respond with the total number of successes and, if any, a list of addresses that failed to be imported. Trying to import an address that is already present will behave as a no-op. For example: ``` curl -X POST \ :8090/api/v1/wallets/Ae2tdPwUPE...8V3AVTnqGZ/addresses \ -H 'Accept: application/json;charset=utf-8' \ -d '[ "Ae2tdPwUPE...8V3AVTnqGZ", "Ae2odDwvbA...b6V104CTV8" ]' ``` > **IMPORTANT**: This feature is experimental and performance is > not guaranteed. Users are advised to import small batches only. |] where createAccount = decodeUtf8 $ encodePretty $ genExample @(APIResponse Account) createAddress = decodeUtf8 $ encodePretty $ genExample @(APIResponse WalletAddress) createWallet = decodeUtf8 $ encodePretty $ genExample @(APIResponse Wallet) readAccounts = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Account]) readAccountBalance = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountBalance) readAccountAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse AccountAddresses) readAddresses = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Address]) readFees = decodeUtf8 $ encodePretty $ genExample @(APIResponse EstimatedFees) readNodeInfo = decodeUtf8 $ encodePretty $ genExample @(APIResponse NodeInfo) readTransactions = decodeUtf8 $ encodePretty $ genExample @(APIResponse [Transaction]) readUtxoStatistics = decodeUtf8 $ encodePretty $ genExample @(APIResponse UtxoStatistics) swaggerSchemaUIServer :: (Server api ~ Handler Swagger) => Swagger -> Server (SwaggerSchemaUI' dir api) swaggerSchemaUIServer = swaggerSchemaUIServerImpl redocIndexTemplate redocFiles where redocIndexTemplate :: Text redocIndexTemplate = [text| <!doctype html> <html lang="en"> <head> <title>ReDoc</title> <meta charset="utf-8"/> <meta name="viewport" content="width=device-width, initial-scale=1"> <style> body { margin: 0; padding: 0; } </style> <script> // Force Strict-URL Routing for assets relative paths (function onload() { if (!window.location.pathname.endsWith("/")) { window.location.pathname += "/"; } }()); </script> </head> <body> <redoc spec-url="../SERVANT_SWAGGER_UI_SCHEMA"></redoc> <script src="redoc.min.js"> </script> </body> </html>|] applyUpdateDescription :: Text applyUpdateDescription = [text| Apply the next available update proposal from the blockchain. Note that this will immediately shutdown the node and makes it unavailable for a short while. |] postponeUpdateDescription :: Text postponeUpdateDescription = [text| Discard the next available update from the node's local state. Yet, this doesn't reject the update which will still be applied as soon as the node is restarted. |] resetWalletStateDescription :: Text resetWalletStateDescription = [text| Wipe-out the node's local state entirely. The only intended use-case for this endpoint is during API integration testing. Note also that this will fail by default unless the node is running in debug mode. |] estimateFeesDescription :: Text estimateFeesDescription = [text| Estimate the fees which would incur from the input payment. This endpoint **does not** require a _spending password_ to be supplied as it generates under the hood an unsigned transaction. |] getAddressDescription :: Text getAddressDescription = [text| The previous version of this endpoint failed with an HTTP error when the given address was unknown to the wallet. This was misleading since an address that is unknown to the wallet may still belong to the wallet (since it could be part of a pending transaction in another instance of the same wallet). To reflect this, the V1 endpoint does not fail when an address is not recognised and returns a new field which indicates the address' ownership status, from the node point of view. |] data DescriptionEnvironment = DescriptionEnvironment { deErrorExample :: !T.Text , deDefaultPerPage :: !T.Text , deWalletErrorTable :: !T.Text , deGitRevision :: !T.Text , deSoftwareVersion :: !T.Text , deMnemonicExample :: !T.Text } api :: HasSwagger a => (CompileTimeInfo, SoftwareVersion) -> Proxy a -> (DescriptionEnvironment -> T.Text) -> Swagger api (compileInfo, curSoftwareVersion) walletAPI mkDescription = toSwagger walletAPI & info.title .~ "Sealchain Wallet API" & info.version .~ fromString (show curSoftwareVersion) & host ?~ "127.0.0.1:8090" & info.description ?~ mkDescription DescriptionEnvironment { deErrorExample = decodeUtf8 $ encodePretty WalletNotFound , deMnemonicExample = decodeUtf8 $ encode (genExample @BackupPhrase) , deDefaultPerPage = fromString (show defaultPerPageEntries) , deWalletErrorTable = errorsDescription , deGitRevision = ctiGitRevision compileInfo , deSoftwareVersion = fromString $ show (svNumber curSoftwareVersion) } & info.license ?~ ("MIT" & url ?~ URL "-project/sealchain/develop/LICENSE") & paths %~ (POST, "/api/internal/apply-update") `setDescription` applyUpdateDescription & paths %~ (POST, "/api/internal/postpone-update") `setDescription` postponeUpdateDescription & paths %~ (DELETE, "/api/internal/reset-wallet-state") `setDescription` resetWalletStateDescription & paths %~ (POST, "/api/v1/transactions/fees") `setDescription` estimateFeesDescription & paths %~ (GET, "/api/v1/addresses/{address}") `setDescription` getAddressDescription </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610239"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">418f2e4a28c1eee5c23b1e2879f3f0f450980f61b7b287beb97fcf50477102db</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">arenadotio/pgx</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">test_pgx_value_core.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Core_kernel module Value = Pgx_value_core let time_roundtrip str = Value.of_string str |> Value.to_time_exn let printer = Time.to_string_abs ~zone:Time.Zone.utc let time_testable = Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal ;; let check_time = Alcotest.check time_testable let check_string = Alcotest.(check string) let test_time_of_string _ = let expected = Time.of_string "2016-03-15 19:55:18.123456-04:00" in check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18.123456"); check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18.123456Z"); check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04"); check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04:00") ;; let test_time_of_string_no_ms _ = let expected = Time.of_string "2016-03-15 19:55:18-04:00" in check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18"); check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18Z"); check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18-04"); check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18-04:00") ;; let test_time_conversion_roundtrip _ = let expected_str = "2016-03-15 23:55:18.123456Z" in check_string "parse-print" expected_str (time_roundtrip expected_str |> printer); let expected_time = Time.of_string expected_str in check_time "print-parse" expected_time (Value.of_time expected_time |> Value.to_time_exn) ;; let time_tests = [ Alcotest.test_case "test time_of_string" `Quick test_time_of_string ; Alcotest.test_case "test time_of_string no milliseconds" `Quick test_time_of_string_no_ms ; Alcotest.test_case "test time conversion roundtrip" `Quick test_time_conversion_roundtrip ] ;; let () = Alcotest.run "pgx_async_conversions" [ "time", time_tests ] </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/arenadotio/pgx/8d5ca02213faa69e692c5d0dc3e81408db3774a1/pgx_value_core/test/test_pgx_value_core.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Core_kernel module Value = Pgx_value_core let time_roundtrip str = Value.of_string str |> Value.to_time_exn let printer = Time.to_string_abs ~zone:Time.Zone.utc let time_testable = Alcotest.testable (fun ppf t -> Format.pp_print_string ppf (printer t)) Time.equal ;; let check_time = Alcotest.check time_testable let check_string = Alcotest.(check string) let test_time_of_string _ = let expected = Time.of_string "2016-03-15 19:55:18.123456-04:00" in check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18.123456"); check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18.123456Z"); check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04"); check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18.123456-04:00") ;; let test_time_of_string_no_ms _ = let expected = Time.of_string "2016-03-15 19:55:18-04:00" in check_time "without TZ" expected (time_roundtrip "2016-03-15 23:55:18"); check_time "zulu" expected (time_roundtrip "2016-03-15 23:55:18Z"); check_time "hour TZ" expected (time_roundtrip "2016-03-15 19:55:18-04"); check_time "full TZ" expected (time_roundtrip "2016-03-15 19:55:18-04:00") ;; let test_time_conversion_roundtrip _ = let expected_str = "2016-03-15 23:55:18.123456Z" in check_string "parse-print" expected_str (time_roundtrip expected_str |> printer); let expected_time = Time.of_string expected_str in check_time "print-parse" expected_time (Value.of_time expected_time |> Value.to_time_exn) ;; let time_tests = [ Alcotest.test_case "test time_of_string" `Quick test_time_of_string ; Alcotest.test_case "test time_of_string no milliseconds" `Quick test_time_of_string_no_ms ; Alcotest.test_case "test time conversion roundtrip" `Quick test_time_conversion_roundtrip ] ;; let () = Alcotest.run "pgx_async_conversions" [ "time", time_tests ] </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610240"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">8d35a45e41a48d54970a4d4b22cc2ddb8b1634a954206029ec680281a4a49f75</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bytekid/mkbtt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">codeTree.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright 2010 * GNU Lesser General Public License * * This file is part of MKBtt . * * is free software : you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation , either version 3 of the License , or ( at your * option ) any later version . * * is distributed in the hope that it will be useful , but WITHOUT * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public * License for more details . * * You should have received a copy of the GNU Lesser General Public * License along with MKBtt . If not , see < / > . * GNU Lesser General Public License * * This file is part of MKBtt. * * MKBtt is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * MKBtt is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with MKBtt. If not, see </>. *) * @author @since 2009/07/21 @author Sarah Winkler @since 2009/07/21 *) (** Term indexing using code trees *) (*** OPENS ********************************************************************) open Util;; (*** EXCEPTIONS **********************************************************) exception No_back_pointer exception Malformed_tree of string exception Not_in_index exception Empty_branch (*** MODULES *************************************************************) module Fun = Rewriting.Function;; module Pos = Rewriting.Position;; module Var = Rewriting.Variable;; module T = U.Term;; module M = U.Monad;; open M;; : TermIndex . T with type entry = Entry.t = functor (Entry: TermIndex.ENTRY_TYPE) -> struct (*** SUBMODULES **********************************************************) module EL = TermIndex.EntryList(Entry);; (*** TYPES ***************************************************************) type entry = Entry.t type instruction = | Check of Fun.t * instruction * instruction | Put of int * instruction * instruction | Compare of int * int * instruction * instruction | Success of Entry.t list | Fail ;; type t = instruction (* convenient for generalization retrievals *) type flatterm = | Fun of Fun.t * flatterm * flatterm * T.t (* next, after, subterm here *) | Var of Var.t * flatterm (* next = after *) | End ;; (*** GLOBALS *************************************************************) (*** FUNCTIONS ***********************************************************) let is_empty t = return (t == Fail) let cont n = function | Check (_, c, _ ) | Put (_, c, _ ) | Compare (_, _, c, _ ) -> c | _ -> raise (Malformed_tree "cont does not exist") ;; let back n = function | Check (_, _, b) | Put (_, _, b) | Compare (_, _, _, b) -> b | _ -> raise (Malformed_tree "cont does not exist") ;; let set_back instruction b' = match instruction with | Check (f, c, b) -> Check (f, c, b') | Put (n, c, b) -> Put (n, c, b') | Compare (m, k, c, b) -> Compare (m, k, c, b') | _ -> raise (Malformed_tree "back does not exist") ;; (* output code *) let rec code_to_string c = match c with | Check(f, c, b) -> let cs, bs = code_to_string c, code_to_string b in "Check(" ^ (Fun.to_string f) ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Put(k, c, b) -> let cs, bs = code_to_string c, code_to_string b in "Put(" ^ (string_of_int k) ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Compare(m, k, c, b) -> let cs, bs = code_to_string c, code_to_string b in let sk, sm = string_of_int k, string_of_int m in "Compare(" ^ sm ^ ", " ^ sk ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Success values -> "Success" ^ (List.join Entry.to_string " " values) | Fail -> "Fail" ;; let lookup table x i code = try let j = List.assoc x table in (Compare(j, i, Fail, Fail)) :: code, table with Not_found -> code, (x, i) :: table ;; let rec code_list tcodes ccodes i table = function | T.Var x -> let ccodes', table' = lookup table x i ccodes in (Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1 | T.Fun(f, ts) -> let tcodes' = (Check(f, Fail, Fail)) :: tcodes in List.fold_left app_tcode (tcodes', ccodes, table, i) ts and app_tcode (tcodes, ccodes, table, i) t = code_list tcodes ccodes i table t ;; let rec combine_code instruction = function | [] -> instruction | Check(f, _, _) :: l -> combine_code (Check(f, instruction, Fail)) l | Put(k, _, _) :: l -> combine_code (Put(k, instruction, Fail)) l | Compare(k, m,_, _) :: l -> combine_code (Compare(k, m, instruction, Fail)) l | _ -> raise (Malformed_tree "Compare/Fail/Success not expected") ;; let code_for_term t = let success = Success [] in let tcode, ccode, _, _ = code_list [] [] 0 [] t in combine_code (combine_code success ccode) tcode ;; let code_for_value (t, v) = let success = Success [v] in let tcode, ccode, _, _ = code_list [] [] 0 [] t in combine_code (combine_code success ccode) tcode ;; (* ****************** CONSTRUCTION OF CODE TREES ********************** *) let make () = Fail (* assume code is just code, not tree (otherwise, change case for Success in tree *) let rec insert' code tree = match code, tree with | _, Fail -> code | Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 -> Check(g, insert' c c', b') | Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') -> Compare(m', k', insert' c c', b') | Put(k, c, _), Put(k', c', b') when k = k' -> Put(k, insert' c c', b') | _, Check(_, _, b) | _, Compare (_, _, _, b) | _, Put (_, _, b) -> set_back tree (insert' code b) | Check(_, _, b), Success vs (* cases relevant? *) | Compare(_, _, _, b), Success vs | Put(_, _, b), Success vs -> set_back code (Success vs) | Success v, Success values -> Success (EL.union v values) (* variant *) | Fail, Success _ -> raise (Malformed_tree "Fail, Success not expected") ;; (* add entry element into code tree *) let insert tree (term, value) = T.to_stringm term > > = fun s - > Format.printf " Insert into index term % s\n% ! " s ; Format.printf " Tree before is % s\n " ( code_to_string tree ) ; Format.printf "Insert into index term %s\n%!" s; Format.printf "Tree before is %s\n" (code_to_string tree);*) let code = code_for_value (term, value) in let tree' = insert' code tree in (* Format.printf "Code is %s\n" (code_to_string code); Format.printf "Tree is %s\n" (code_to_string tree');*) return tree' ;; let rec remove_code code tree v = match code, tree with | Fail, _ -> raise (Malformed_tree "Fail in code not expected") | Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 -> (try Check(g, remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') -> (try Compare(m', k', remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | Put(k, c, b), Put(k', c', b') when k = k' -> (try Put(k', remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | _, Check(_, _, b) | _, Compare(_, _, _, b) | _, Put(_, _, b) -> (try set_back tree (remove_code code b v) with Empty_branch -> set_back tree Fail) | Success v, Success values -> if (List.length values) == 1 then raise Empty_branch else Success (EL.diff values v) (* variant *) | _, Success _ -> raise (Malformed_tree "Success in tree not expected") | _ -> raise Not_in_index ;; (* removes the value from the index. if not found, Not_in_index is raised *) let delete tree value = T.to_stringm ( fst value ) > > = fun s - > Format.printf " Remove term % s\n% ! " s ; Format.printf "Remove term %s\n%!" s;*) let code = code_for_value value in let tree' = try remove_code code tree value with Empty_branch -> Fail in return tree' ;; (********* RETRIEVAL OPERATIONS ******************************************) (***** VARIANTS *****) let rec retrieve_variants tree code = match tree, code with | Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 -> retrieve_variants c c' | Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') -> retrieve_variants c c' | Put(k, c, b), Put(k', c', _) when k = k' -> retrieve_variants c c' | Check(_, _, b), _ | Compare(_, _, _, b), _ | Put(_, _, b), _ -> retrieve_variants b code | Success variants, Success _ -> variants | Fail, _ | Success _, _ -> [] ;; let variant_candidates tree term = let code = code_for_term term in let vars = retrieve_variants tree code in U.Term.to_stringm term > > = fun s - > Format.printf " CT : vars 4 % s : % i:\n%s\n " s ( vars ) ( List.foldl ( fun s x - > ( Entry.to_string x)^s ) " " vars ) ; Format.printf "CT: vars 4 %s: %i:\n%s\n" s (List.length vars) (List.foldl (fun s x -> (Entry.to_string x)^s) "" vars);*) return vars ;; (***** GENERALIZATIONS *****) let rec flatten' after t = match t with | T.Var x -> Var (x, after) | T.Fun(f, ts) -> let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in Fun(f, flat_ts, after, t) (* add t here, required in gen retrieve *) ;; let flatten = flatten' End let subst table i = try List.assoc i table with Not_found -> raise (Malformed_tree "compare without put") ;; let rec retrieve_generalizations tree t_flat sub = match tree, t_flat with | Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 -> let gens = retrieve_generalizations c next sub in EL.union (retrieve_generalizations b t_flat sub) gens | Compare(m, k, c, b), End -> let gens = retrieve_generalizations b End sub in if (compare (subst sub m) (subst sub k)) == 0 then EL.union (retrieve_generalizations c End sub) gens else gens | Put(k, c, b), Var (x, after) -> let subterm = T.Var x in let gens = retrieve_generalizations c after ((k, subterm) :: sub) in EL.union (retrieve_generalizations b t_flat sub) gens | Put(k, c, b), Fun (_, _, after, subterm) -> let gens = retrieve_generalizations c after ((k, subterm) :: sub) in EL.union (retrieve_generalizations b t_flat sub) gens | Check(_, _, b), _ -> retrieve_generalizations b t_flat sub | Success entries, End -> entries | Fail, _ | Compare _, _ | Success _, _ -> [] | Put _, End -> raise (Malformed_tree "not malformed?") ;; find generalizations for a given term in dtree let generalization_candidates tree term = let t_flat = flatten term in let gens = retrieve_generalizations tree t_flat [] in return gens ;; (***** ENCOMPASSMENTS *****) given a term , non - var generalization of subterms are returned , paired with the subterm 's position . Not strict ! Also not possible as indexing destroys nonlinearity . paired with the subterm's position. Not strict! Also not possible as indexing destroys nonlinearity. *) let encompassment_candidates tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left ( fun r ( t , p ) - > let gs = retrieve_generalizations tree ( flatten t ) [ ] in ( List.map ( fun n - > ( n , p ) ) gs ) @ r ) [ ] ( ( term , Pos.root ) : : pos_st ) in return ecs ; ; let encompassment_candidates tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left (fun r (t, p) -> let gs = retrieve_generalizations tree (flatten t) [] in (List.map (fun n -> (n, p)) gs) @ r) [] ((term,Pos.root) :: pos_st) in return ecs ;;*) given a term , non - var generalization of subterms are returned , paired with the subterm 's position . Not strict ! paired with the subterm's position. Not strict! *) let encompassment_candidates_below_root tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left (fun r (t, p) -> let gs = retrieve_generalizations tree (flatten t) [] in (List.map (fun n -> (n, p)) gs) @ r) [] pos_st in return ecs ;; let encompassment_candidates tree term = let at_root = retrieve_generalizations tree (flatten term) [] in encompassment_candidates_below_root tree term >>= fun below -> let root = flip Pair.make Pos.root in return (List.rev_append (List.map root at_root) below) ;; let size t = is_empty t >>= fun b -> return (if b then 0 else 1) let overlap1_candidates t = failwith "CodeTree: overlaps not implemented" let overlap1_candidates_below_root t = failwith "CodeTree: overlaps not implemented" ;; let overlap2_candidates t = failwith "CodeTree: overlaps not implemented" let unification_candidates t = failwith "CodeTree: unification not implemented" ;; end (* Make *) module TermCodeTree = Make(TermIndex.TermEntry) let test ( ) = Format.printf " testing module CodeTree\n " ; let c = Fun.of_string " c " 0 in let f = Fun.of_string " f " 1 in let g = Fun.of_string " g " 2 in let x = Term . ( Var.of_string " x " ) in let y = Term . ( Var.of_string " y " ) in let f_x = Term . Fun ( f , [ x ] ) in let f_f_x = Term . Fun ( f , [ f_x ] ) in let c _ = Term . Fun ( c , [ ] ) in let g_x_x = Term . Fun(g , [ x ; x ] ) in Format.printf " Code for % s : \n % s\n " ( Term.to_string f_f_x ) ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ; Format.printf " Code for % s : \n % s\n " ( Term.to_string g_x_x ) ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ; let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string g_f_f_x_c ) ( TermCodeTree.code_to_string ( ) ) ; let = Term . Fun ( g , [ f_f_x ; f_x ] ) in let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string g_f_f_x_f_x ) ( TermCodeTree.code_to_string ( ) ) ; let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string t ) ( TermCodeTree.code_to_string ( t ) ) ; ( * INSERT let test () = Format.printf "testing module CodeTree\n"; let c = Fun.of_string "c" 0 in let f = Fun.of_string "f" 1 in let g = Fun.of_string "g" 2 in let x = Term.Var (Var.of_string "x") in let y = Term.Var (Var.of_string "y") in let f_x = Term.Fun (f, [x]) in let f_f_x = Term.Fun (f, [f_x]) in let c_ = Term.Fun (c, []) in let g_x_x = Term.Fun(g, [x; x]) in Format.printf "Code for %s: \n %s\n" (Term.to_string f_f_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x)); Format.printf "Code for %s: \n %s\n" (Term.to_string g_x_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x)); let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string g_f_f_x_c) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c)); let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string g_f_f_x_f_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x)); let t = Term.Fun (g, [g_f_f_x_f_x; y]) in let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string t) (TermCodeTree.code_to_string (TermCodeTree.code_for_value t)); (* INSERT *) let tree = TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y in Format.printf "Code for insert: \n %s\n\n" (TermCodeTree.code_to_string tree); let tree' = TermCodeTree.insert tree t in Format.printf "Code for insert: \n %s\n\n" (TermCodeTree.code_to_string tree'); let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in let tree' = TermCodeTree.insert tree' g_f_f_y_c in Format.printf "Code for insert g_f_f_y_c: \n %s\n\n" (TermCodeTree.code_to_string tree'); (* DELETE *) let tree'' = TermCodeTree.delete tree' g_f_f_y_c in Format.printf "Code for delete g_f_f_y_c again: \n %s\n\n" (TermCodeTree.code_to_string tree''); Format.printf " Code for delete g_x_x : \n % s\n\n " ( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ; (TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*) (* VARIANTS *) let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in Format.printf "variants for %s: %s, %s: %s\n" (Term.to_string g_f_f_x_f_x) (List.to_string Term.to_string "" variants) (Term.to_string g_f_f_x_y) (List.to_string Term.to_string "" variants'); let tree' = TermCodeTree.insert tree' t' in GENERALIZATIONS let u = Term.Fun (g, [f_x; y]) in let tree' = TermCodeTree.insert tree' u in let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in Format.printf "generalizations for %s: %s\n" (Term.to_string g_f_f_y_c) (List.to_string Term.to_string "" gens); (* ok *) let gens = TermCodeTree.generalization_candidates tree' u in Format.printf "generalizations for %s: %s\n" (Term.to_string u) (List.to_string Term.to_string "" gens); (* ok *) let s = Term.Fun (g, [f_x; x]) in let tree' = TermCodeTree.insert tree' s in let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in Format.printf "generalizations for %s: %s\n" (Term.to_string g_f_f_x_f_x) (List.to_string Term.to_string "" gens); (***** ENCOMPASSMENTS *****) let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in let f (t, p) = (Term.to_string t) ^ "@" ^ (Position.to_string p) ^ "\n" in Format.printf "encompassments for %s: %s\n" (Term.to_string t) (List.to_string f "" gens); ;; *) (* test ()*) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mkbtt/termindexing/codeTree.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">* Term indexing using code trees ** OPENS ******************************************************************* ** EXCEPTIONS ********************************************************* ** MODULES ************************************************************ ** SUBMODULES ********************************************************* ** TYPES ************************************************************** convenient for generalization retrievals next, after, subterm here next = after ** GLOBALS ************************************************************ ** FUNCTIONS ********************************************************** output code ****************** CONSTRUCTION OF CODE TREES ********************** assume code is just code, not tree (otherwise, change case for Success in tree cases relevant? variant add entry element into code tree Format.printf "Code is %s\n" (code_to_string code); Format.printf "Tree is %s\n" (code_to_string tree'); variant removes the value from the index. if not found, Not_in_index is raised ******** RETRIEVAL OPERATIONS ***************************************** **** VARIANTS **** **** GENERALIZATIONS **** add t here, required in gen retrieve **** ENCOMPASSMENTS **** Make INSERT DELETE VARIANTS ok ok **** ENCOMPASSMENTS **** test ()</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright 2010 * GNU Lesser General Public License * * This file is part of MKBtt . * * is free software : you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation , either version 3 of the License , or ( at your * option ) any later version . * * is distributed in the hope that it will be useful , but WITHOUT * ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public * License for more details . * * You should have received a copy of the GNU Lesser General Public * License along with MKBtt . If not , see < / > . * GNU Lesser General Public License * * This file is part of MKBtt. * * MKBtt is free software: you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the * Free Software Foundation, either version 3 of the License, or (at your * option) any later version. * * MKBtt is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with MKBtt. If not, see </>. *) * @author @since 2009/07/21 @author Sarah Winkler @since 2009/07/21 *) open Util;; exception No_back_pointer exception Malformed_tree of string exception Not_in_index exception Empty_branch module Fun = Rewriting.Function;; module Pos = Rewriting.Position;; module Var = Rewriting.Variable;; module T = U.Term;; module M = U.Monad;; open M;; : TermIndex . T with type entry = Entry.t = functor (Entry: TermIndex.ENTRY_TYPE) -> struct module EL = TermIndex.EntryList(Entry);; type entry = Entry.t type instruction = | Check of Fun.t * instruction * instruction | Put of int * instruction * instruction | Compare of int * int * instruction * instruction | Success of Entry.t list | Fail ;; type t = instruction type flatterm = | End ;; let is_empty t = return (t == Fail) let cont n = function | Check (_, c, _ ) | Put (_, c, _ ) | Compare (_, _, c, _ ) -> c | _ -> raise (Malformed_tree "cont does not exist") ;; let back n = function | Check (_, _, b) | Put (_, _, b) | Compare (_, _, _, b) -> b | _ -> raise (Malformed_tree "cont does not exist") ;; let set_back instruction b' = match instruction with | Check (f, c, b) -> Check (f, c, b') | Put (n, c, b) -> Put (n, c, b') | Compare (m, k, c, b) -> Compare (m, k, c, b') | _ -> raise (Malformed_tree "back does not exist") ;; let rec code_to_string c = match c with | Check(f, c, b) -> let cs, bs = code_to_string c, code_to_string b in "Check(" ^ (Fun.to_string f) ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Put(k, c, b) -> let cs, bs = code_to_string c, code_to_string b in "Put(" ^ (string_of_int k) ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Compare(m, k, c, b) -> let cs, bs = code_to_string c, code_to_string b in let sk, sm = string_of_int k, string_of_int m in "Compare(" ^ sm ^ ", " ^ sk ^ ", " ^ cs ^ ", " ^ bs ^ ")" | Success values -> "Success" ^ (List.join Entry.to_string " " values) | Fail -> "Fail" ;; let lookup table x i code = try let j = List.assoc x table in (Compare(j, i, Fail, Fail)) :: code, table with Not_found -> code, (x, i) :: table ;; let rec code_list tcodes ccodes i table = function | T.Var x -> let ccodes', table' = lookup table x i ccodes in (Put (i, Fail, Fail)) :: tcodes, ccodes', table', i + 1 | T.Fun(f, ts) -> let tcodes' = (Check(f, Fail, Fail)) :: tcodes in List.fold_left app_tcode (tcodes', ccodes, table, i) ts and app_tcode (tcodes, ccodes, table, i) t = code_list tcodes ccodes i table t ;; let rec combine_code instruction = function | [] -> instruction | Check(f, _, _) :: l -> combine_code (Check(f, instruction, Fail)) l | Put(k, _, _) :: l -> combine_code (Put(k, instruction, Fail)) l | Compare(k, m,_, _) :: l -> combine_code (Compare(k, m, instruction, Fail)) l | _ -> raise (Malformed_tree "Compare/Fail/Success not expected") ;; let code_for_term t = let success = Success [] in let tcode, ccode, _, _ = code_list [] [] 0 [] t in combine_code (combine_code success ccode) tcode ;; let code_for_value (t, v) = let success = Success [v] in let tcode, ccode, _, _ = code_list [] [] 0 [] t in combine_code (combine_code success ccode) tcode ;; let make () = Fail let rec insert' code tree = match code, tree with | _, Fail -> code | Check(f, c, _), Check(g, c', b') when (Fun.compare f g) == 0 -> Check(g, insert' c c', b') | Compare(m, k, c, _), Compare(m', k', c', b') when (k == k') && (m == m') -> Compare(m', k', insert' c c', b') | Put(k, c, _), Put(k', c', b') when k = k' -> Put(k, insert' c c', b') | _, Check(_, _, b) | _, Compare (_, _, _, b) | _, Put (_, _, b) -> set_back tree (insert' code b) | Compare(_, _, _, b), Success vs | Put(_, _, b), Success vs -> set_back code (Success vs) | Success v, Success values -> | Fail, Success _ -> raise (Malformed_tree "Fail, Success not expected") ;; let insert tree (term, value) = T.to_stringm term > > = fun s - > Format.printf " Insert into index term % s\n% ! " s ; Format.printf " Tree before is % s\n " ( code_to_string tree ) ; Format.printf "Insert into index term %s\n%!" s; Format.printf "Tree before is %s\n" (code_to_string tree);*) let code = code_for_value (term, value) in let tree' = insert' code tree in return tree' ;; let rec remove_code code tree v = match code, tree with | Fail, _ -> raise (Malformed_tree "Fail in code not expected") | Check(f,c,_), Check(g,c',b') when (Fun.compare f g) == 0 -> (try Check(g, remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | Compare(m,k,c,_), Compare(m',k',c',b') when (k==k') && (m==m') -> (try Compare(m', k', remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | Put(k, c, b), Put(k', c', b') when k = k' -> (try Put(k', remove_code c c' v, b') with Empty_branch -> if b' != Fail then b' else raise Empty_branch) | _, Check(_, _, b) | _, Compare(_, _, _, b) | _, Put(_, _, b) -> (try set_back tree (remove_code code b v) with Empty_branch -> set_back tree Fail) | Success v, Success values -> if (List.length values) == 1 then raise Empty_branch | _, Success _ -> raise (Malformed_tree "Success in tree not expected") | _ -> raise Not_in_index ;; let delete tree value = T.to_stringm ( fst value ) > > = fun s - > Format.printf " Remove term % s\n% ! " s ; Format.printf "Remove term %s\n%!" s;*) let code = code_for_value value in let tree' = try remove_code code tree value with Empty_branch -> Fail in return tree' ;; let rec retrieve_variants tree code = match tree, code with | Check(f, c, b), Check(g, c', _) when (Fun.compare f g) == 0 -> retrieve_variants c c' | Compare(m, k, c, b), Compare(m', k', c', _) when (k == k') && (m == m') -> retrieve_variants c c' | Put(k, c, b), Put(k', c', _) when k = k' -> retrieve_variants c c' | Check(_, _, b), _ | Compare(_, _, _, b), _ | Put(_, _, b), _ -> retrieve_variants b code | Success variants, Success _ -> variants | Fail, _ | Success _, _ -> [] ;; let variant_candidates tree term = let code = code_for_term term in let vars = retrieve_variants tree code in U.Term.to_stringm term > > = fun s - > Format.printf " CT : vars 4 % s : % i:\n%s\n " s ( vars ) ( List.foldl ( fun s x - > ( Entry.to_string x)^s ) " " vars ) ; Format.printf "CT: vars 4 %s: %i:\n%s\n" s (List.length vars) (List.foldl (fun s x -> (Entry.to_string x)^s) "" vars);*) return vars ;; let rec flatten' after t = match t with | T.Var x -> Var (x, after) | T.Fun(f, ts) -> let flat_ts = List.fold_right (fun t l -> flatten' l t) ts after in ;; let flatten = flatten' End let subst table i = try List.assoc i table with Not_found -> raise (Malformed_tree "compare without put") ;; let rec retrieve_generalizations tree t_flat sub = match tree, t_flat with | Check(f, c, b), Fun(g, next, after, _) when (Fun.compare f g) == 0 -> let gens = retrieve_generalizations c next sub in EL.union (retrieve_generalizations b t_flat sub) gens | Compare(m, k, c, b), End -> let gens = retrieve_generalizations b End sub in if (compare (subst sub m) (subst sub k)) == 0 then EL.union (retrieve_generalizations c End sub) gens else gens | Put(k, c, b), Var (x, after) -> let subterm = T.Var x in let gens = retrieve_generalizations c after ((k, subterm) :: sub) in EL.union (retrieve_generalizations b t_flat sub) gens | Put(k, c, b), Fun (_, _, after, subterm) -> let gens = retrieve_generalizations c after ((k, subterm) :: sub) in EL.union (retrieve_generalizations b t_flat sub) gens | Check(_, _, b), _ -> retrieve_generalizations b t_flat sub | Success entries, End -> entries | Fail, _ | Compare _, _ | Success _, _ -> [] | Put _, End -> raise (Malformed_tree "not malformed?") ;; find generalizations for a given term in dtree let generalization_candidates tree term = let t_flat = flatten term in let gens = retrieve_generalizations tree t_flat [] in return gens ;; given a term , non - var generalization of subterms are returned , paired with the subterm 's position . Not strict ! Also not possible as indexing destroys nonlinearity . paired with the subterm's position. Not strict! Also not possible as indexing destroys nonlinearity. *) let encompassment_candidates tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left ( fun r ( t , p ) - > let gs = retrieve_generalizations tree ( flatten t ) [ ] in ( List.map ( fun n - > ( n , p ) ) gs ) @ r ) [ ] ( ( term , Pos.root ) : : pos_st ) in return ecs ; ; let encompassment_candidates tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left (fun r (t, p) -> let gs = retrieve_generalizations tree (flatten t) [] in (List.map (fun n -> (n, p)) gs) @ r) [] ((term,Pos.root) :: pos_st) in return ecs ;;*) given a term , non - var generalization of subterms are returned , paired with the subterm 's position . Not strict ! paired with the subterm's position. Not strict! *) let encompassment_candidates_below_root tree term = let pos_st = Termx.nonvar_pos_proper_subterms term in let ecs = List.fold_left (fun r (t, p) -> let gs = retrieve_generalizations tree (flatten t) [] in (List.map (fun n -> (n, p)) gs) @ r) [] pos_st in return ecs ;; let encompassment_candidates tree term = let at_root = retrieve_generalizations tree (flatten term) [] in encompassment_candidates_below_root tree term >>= fun below -> let root = flip Pair.make Pos.root in return (List.rev_append (List.map root at_root) below) ;; let size t = is_empty t >>= fun b -> return (if b then 0 else 1) let overlap1_candidates t = failwith "CodeTree: overlaps not implemented" let overlap1_candidates_below_root t = failwith "CodeTree: overlaps not implemented" ;; let overlap2_candidates t = failwith "CodeTree: overlaps not implemented" let unification_candidates t = failwith "CodeTree: unification not implemented" ;; module TermCodeTree = Make(TermIndex.TermEntry) let test ( ) = Format.printf " testing module CodeTree\n " ; let c = Fun.of_string " c " 0 in let f = Fun.of_string " f " 1 in let g = Fun.of_string " g " 2 in let x = Term . ( Var.of_string " x " ) in let y = Term . ( Var.of_string " y " ) in let f_x = Term . Fun ( f , [ x ] ) in let f_f_x = Term . Fun ( f , [ f_x ] ) in let c _ = Term . Fun ( c , [ ] ) in let g_x_x = Term . Fun(g , [ x ; x ] ) in Format.printf " Code for % s : \n % s\n " ( Term.to_string f_f_x ) ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value f_f_x ) ) ; Format.printf " Code for % s : \n % s\n " ( Term.to_string g_x_x ) ( TermCodeTree.code_to_string ( TermCodeTree.code_for_value g_x_x ) ) ; let g_f_f_x_c = Term . Fun ( g , [ f_f_x ; c _ ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string g_f_f_x_c ) ( TermCodeTree.code_to_string ( ) ) ; let = Term . Fun ( g , [ f_f_x ; f_x ] ) in let g_f_f_x_y = Term . Fun ( g , [ f_f_x ; y ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string g_f_f_x_f_x ) ( TermCodeTree.code_to_string ( ) ) ; let t = Term . Fun ( g , [ g_f_f_x_f_x ; y ] ) in let t ' = Term . Fun ( g , [ g_f_f_x_f_x ; g_x_x ] ) in Format.printf " Code for % s : \n % s\n\n " ( Term.to_string t ) ( TermCodeTree.code_to_string ( t ) ) ; ( * INSERT let test () = Format.printf "testing module CodeTree\n"; let c = Fun.of_string "c" 0 in let f = Fun.of_string "f" 1 in let g = Fun.of_string "g" 2 in let x = Term.Var (Var.of_string "x") in let y = Term.Var (Var.of_string "y") in let f_x = Term.Fun (f, [x]) in let f_f_x = Term.Fun (f, [f_x]) in let c_ = Term.Fun (c, []) in let g_x_x = Term.Fun(g, [x; x]) in Format.printf "Code for %s: \n %s\n" (Term.to_string f_f_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value f_f_x)); Format.printf "Code for %s: \n %s\n" (Term.to_string g_x_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_x_x)); let g_f_f_x_c = Term.Fun (g, [f_f_x; c_]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string g_f_f_x_c) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_c)); let g_f_f_x_f_x = Term.Fun (g, [f_f_x; f_x]) in let g_f_f_x_y = Term.Fun (g, [f_f_x; y]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string g_f_f_x_f_x) (TermCodeTree.code_to_string (TermCodeTree.code_for_value g_f_f_x_f_x)); let t = Term.Fun (g, [g_f_f_x_f_x; y]) in let t' = Term.Fun (g, [g_f_f_x_f_x; g_x_x]) in Format.printf "Code for %s: \n %s\n\n" (Term.to_string t) (TermCodeTree.code_to_string (TermCodeTree.code_for_value t)); let tree = TermCodeTree.insert (TermCodeTree.code_for_value g_f_f_x_c) g_f_f_x_y in Format.printf "Code for insert: \n %s\n\n" (TermCodeTree.code_to_string tree); let tree' = TermCodeTree.insert tree t in Format.printf "Code for insert: \n %s\n\n" (TermCodeTree.code_to_string tree'); let g_f_f_y_c = Term.Fun (g, [Term.Fun (f, [Term.Fun (f, [y])]); c_]) in let tree' = TermCodeTree.insert tree' g_f_f_y_c in Format.printf "Code for insert g_f_f_y_c: \n %s\n\n" (TermCodeTree.code_to_string tree'); let tree'' = TermCodeTree.delete tree' g_f_f_y_c in Format.printf "Code for delete g_f_f_y_c again: \n %s\n\n" (TermCodeTree.code_to_string tree''); Format.printf " Code for delete g_x_x : \n % s\n\n " ( TermCodeTree.code_to_string ( TermCodeTree.delete tree ' g_x_x ) ) ; (TermCodeTree.code_to_string (TermCodeTree.delete tree' g_x_x));*) let variants = TermCodeTree.variant_candidates tree' g_f_f_x_f_x in let variants' = TermCodeTree.variant_candidates tree' g_f_f_x_y in Format.printf "variants for %s: %s, %s: %s\n" (Term.to_string g_f_f_x_f_x) (List.to_string Term.to_string "" variants) (Term.to_string g_f_f_x_y) (List.to_string Term.to_string "" variants'); let tree' = TermCodeTree.insert tree' t' in GENERALIZATIONS let u = Term.Fun (g, [f_x; y]) in let tree' = TermCodeTree.insert tree' u in let gens = TermCodeTree.generalization_candidates tree' g_f_f_y_c in Format.printf "generalizations for %s: %s\n" (Term.to_string g_f_f_y_c) let gens = TermCodeTree.generalization_candidates tree' u in Format.printf "generalizations for %s: %s\n" (Term.to_string u) let s = Term.Fun (g, [f_x; x]) in let tree' = TermCodeTree.insert tree' s in let gens = TermCodeTree.generalization_candidates tree' g_f_f_x_f_x in Format.printf "generalizations for %s: %s\n" (Term.to_string g_f_f_x_f_x) (List.to_string Term.to_string "" gens); let gens = TermCodeTree.encompassment_candidates_not_strict tree' t in let f (t, p) = (Term.to_string t) ^ "@" ^ (Position.to_string p) ^ "\n" in Format.printf "encompassments for %s: %s\n" (Term.to_string t) (List.to_string f "" gens); ;; *) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610241"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">2d15462f3dfb7abb6a87dfbb0692273cd9c76cdfcae05b89b48377f7c0244d7c</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskellari/indexed-traversable</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">GhcList.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE CPP #-} #if MIN_VERSION_base(4,17,0) {-# LANGUAGE Safe #-} #elif __GLASGOW_HASKELL__ >= 702 # LANGUAGE Trustworthy # #endif module GhcList ( build, ) where #if MIN_VERSION_base(4,17,0) import GHC.List (build) #else import GHC.Exts (build) #endif </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/haskellari/indexed-traversable/8403a52163e5b8f3ec32a2846b53ccc2e8088a6f/indexed-traversable/src/GhcList.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE CPP # # LANGUAGE Safe #</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#if MIN_VERSION_base(4,17,0) #elif __GLASGOW_HASKELL__ >= 702 # LANGUAGE Trustworthy # #endif module GhcList ( build, ) where #if MIN_VERSION_base(4,17,0) import GHC.List (build) #else import GHC.Exts (build) #endif </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610242"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">5f4bf801f0e07c26630f9b98714dcb90238998b8f63796e2642b954c933544e2</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">alanz/ghc-exactprint</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">SH_Overlap9.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># OPTIONS_GHC -fwarn - safe # # LANGUAGE FlexibleInstances # -- | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred -- unsafe due to overlapping instances at call site `f`. module SH_Overlap9 where import SH_Overlap9_A instance C [a] where f _ = "[a]" test :: String test = f ([1,2,3,4] :: [Int]) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/alanz/ghc-exactprint/b6b75027811fa4c336b34122a7a7b1a8df462563/tests/examples/ghc80/SH_Overlap9.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | Same as `SH_Overlap6`, but now we are inferring safety. Should be inferred unsafe due to overlapping instances at call site `f`.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># OPTIONS_GHC -fwarn - safe # # LANGUAGE FlexibleInstances # module SH_Overlap9 where import SH_Overlap9_A instance C [a] where f _ = "[a]" test :: String test = f ([1,2,3,4] :: [Int]) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610243"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">85cccff35599098082d332fcc983d93f978779fc63b43311cca246dda1f99ee0</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">janestreet/async_rpc_kernel</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">rpc_metadata.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">* Metadata is arbitrary information provided by a caller along with the query . It is opaque to the Async RPC protocol , and may not be present on all queries . Metadata should generally be small , middleware - provided data that does not affect the callee 's behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are too large . See [ Connection.create ] for more info . opaque to the Async RPC protocol, and may not be present on all queries. Metadata should generally be small, middleware-provided data that does not affect the callee's behavior (e.g. tracing ids). It may be subject to truncation if values provided are too large. See [Connection.create] for more info. *) open! Core type t = string [@@deriving sexp_of] * Retrieves the metadata in the context of the current RPC call , if it is available . val get : unit -> t option module Private : sig val with_metadata : t option -> f:(unit -> 'a) -> 'a end </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/janestreet/async_rpc_kernel/541fb417b39fad5c930ac73b729a7aaf59bd1001/src/rpc_metadata.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">* Metadata is arbitrary information provided by a caller along with the query . It is opaque to the Async RPC protocol , and may not be present on all queries . Metadata should generally be small , middleware - provided data that does not affect the callee 's behavior ( e.g. tracing ids ) . It may be subject to truncation if values provided are too large . See [ Connection.create ] for more info . opaque to the Async RPC protocol, and may not be present on all queries. Metadata should generally be small, middleware-provided data that does not affect the callee's behavior (e.g. tracing ids). It may be subject to truncation if values provided are too large. See [Connection.create] for more info. *) open! Core type t = string [@@deriving sexp_of] * Retrieves the metadata in the context of the current RPC call , if it is available . val get : unit -> t option module Private : sig val with_metadata : t option -> f:(unit -> 'a) -> 'a end </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610244"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">6771c3d64b28efe1bd2eead1a00e0d49576f11f430ff39602d538a7c8f4d162a</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">pqwy/notty</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">notty_top_init.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2017 . All rights reserved . See LICENSE.md . See LICENSE.md. *) open Notty;; #install_printer Notty.Render.pp_image;; #install_printer Notty.Render.pp_attr;; </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/pqwy/notty/389366c023396017aa21efcdbb07ade5ba0974c5/src/notty_top_init.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2017 . All rights reserved . See LICENSE.md . See LICENSE.md. *) open Notty;; #install_printer Notty.Render.pp_image;; #install_printer Notty.Render.pp_attr;; </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610245"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">c6b904a109064fdbcdd47b942a4448d1892030c35f153fa778071da196ef2869</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">hasktorch/ffi-experimental</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Scalar.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # module Torch.Scalar where import Foreign.ForeignPtr import qualified ATen.Const as ATen import qualified ATen.Managed.Type.Scalar as ATen import qualified ATen.Type as ATen import ATen.Managed.Cast import ATen.Class (Castable(..)) import ATen.Cast instance Castable Float (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_d (realToFrac x) >>= f uncast x f = undefined instance Castable Double (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_d (realToFrac x) >>= f uncast x f = undefined instance Castable Int (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_i (fromIntegral x) >>= f uncast x f = undefined class (Castable a (ForeignPtr ATen.Scalar)) => Scalar a instance Scalar Float instance Scalar Double instance Scalar Int </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/hasktorch/ffi-experimental/54192297742221c4d50398586ba8d187451f9ee0/hasktorch/src/Torch/Scalar.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE MultiParamTypeClasses # # LANGUAGE FlexibleContexts # # LANGUAGE FlexibleInstances # module Torch.Scalar where import Foreign.ForeignPtr import qualified ATen.Const as ATen import qualified ATen.Managed.Type.Scalar as ATen import qualified ATen.Type as ATen import ATen.Managed.Cast import ATen.Class (Castable(..)) import ATen.Cast instance Castable Float (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_d (realToFrac x) >>= f uncast x f = undefined instance Castable Double (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_d (realToFrac x) >>= f uncast x f = undefined instance Castable Int (ForeignPtr ATen.Scalar) where cast x f = ATen.newScalar_i (fromIntegral x) >>= f uncast x f = undefined class (Castable a (ForeignPtr ATen.Scalar)) => Scalar a instance Scalar Float instance Scalar Double instance Scalar Int </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610246"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">e8e44553c6bc715e30d1f0cfe4aee8dba9a3f8964713f76f40af4bf96db3856f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">threatgrid/ctia</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">crud.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns ctia.stores.es.crud (:require [clojure.set :as set] [clojure.string :as string] [clojure.tools.logging :as log] [ctia.domain.access-control :as ac :refer [allow-read? allow-write? restricted-read?]] [ctia.lib.pagination :refer [list-response-schema]] [ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]] [ctia.schemas.search-agg :refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]] [ctia.stores.es.sort :as es.sort] [ctia.stores.es.query :as es.query] [ctia.stores.es.schemas :refer [ESConnState]] [ductile.document :as ductile.doc] [ductile.query :as q] [ring.swagger.coerce :as sc] [schema-tools.core :as st] [schema.coerce :as c] [schema.core :as s])) (defn make-es-read-params "Prepare ES Params for read operations, setting the _source field and including ACL mandatory ones." [{:keys [fields] :as es-params}] (cond-> es-params (coll? fields) (-> (assoc :_source (concat fields ac/acl-fields)) (dissoc :fields)))) (defn coerce-to-fn [Model] (c/coercer! Model sc/json-schema-coercion-matcher)) (defn ensure-document-id "Returns a document ID. if id is a object ID, it extract the document ID, if it's a document ID already, it will just return that." [id] (let [[_orig docid] (re-matches #".*?([^/]+)\z" id)] docid)) (defn ensure-document-id-in-map "Ensure a document ID in a given filter map" [{:keys [id] :as m}] (cond-> m (string? id) (update :id list) id (update :id #(map ensure-document-id %)))) (defn remove-es-actions "Removes the ES action level [{:index {:_id \"1\"}} {:index {:_id \"2\"}}] -> [{:_id \"1\"} {:_id \"2\"}] " [items] (map (comp first vals) items)) (defn build-create-result [item coerce-fn] (-> item (dissoc :_id :_index :_type) coerce-fn)) (defn partial-results "Build partial results when an error occurs for one or more items in the bulk operation. Ex: [{model1} {:error \"Error message item2\"} {model3}]" [exception-data models coerce-fn] (let [{{:keys [items]} :es-http-res-body} exception-data] {:data (map (fn [{:keys [error _id]} model] (if error {:error error :id _id} (build-create-result model coerce-fn))) (remove-es-actions items) models)})) (s/defn get-docs-with-indices "Retrieves a documents from a search \"ids\" query. It enables to retrieves documents from an alias that points to multiple indices. It returns the documents with full hits meta data including the real index in which is stored the document." [{:keys [conn index] :as _conn-state} :- ESConnState ids :- [s/Str] es-params] (let [limit (count ids) ids-query (q/ids (map ensure-document-id ids)) res (ductile.doc/query conn index ids-query (assoc (make-es-read-params es-params) :limit limit :full-hits? true))] (:data res))) (s/defn get-doc-with-index "Retrieves a document from a search \"ids\" query. It is used to perform a get query on an alias that points to multiple indices. It returns the document with full hits meta data including the real index in which is stored the document." [conn-state :- ESConnState _id :- s/Str es-params] (first (get-docs-with-indices conn-state [_id] es-params))) (defn ^:private prepare-opts [{:keys [props]} {:keys [refresh]}] {:refresh (or refresh (:refresh props) "false")}) (s/defn bulk-schema [Model :- (s/pred map?)] (st/optional-keys {:create [Model] :index [Model] :update [(st/optional-keys Model)] :delete [s/Str]})) (s/defn ^:private prepare-bulk-doc [{:keys [props]} :- ESConnState mapping :- s/Keyword doc :- (s/pred map?)] (assoc doc :_id (:id doc) :_index (:write-index props) :_type (name mapping))) (defn handle-create "Generate an ES create handler using some mapping and schema" [mapping Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- [Model] [{:keys [conn] :as conn-state} :- ESConnState docs :- [Model] _ident es-params] (let [prepare-doc (partial prepare-bulk-doc conn-state mapping) prepared (mapv prepare-doc docs)] (try (ductile.doc/bulk-index-docs conn prepared (prepare-opts conn-state es-params)) docs (catch Exception e (throw (if-let [ex-data (ex-data e)] ;; Add partial results to the exception data map (ex-info (.getMessage e) (partial-results ex-data docs coerce!)) e)))))))) (defn handle-update "Generate an ES update handler using some mapping and schema" [mapping Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- (s/maybe Model) [{:keys [conn] :as conn-state} :- ESConnState id :- s/Str realized :- Model ident es-params] (when-let [[{index :_index current-doc :_source}] (get-docs-with-indices conn-state [id] {})] (if (allow-write? current-doc ident) (let [update-doc (assoc realized :id (ensure-document-id id))] (ductile.doc/index-doc conn index (name mapping) update-doc (prepare-opts conn-state es-params)) (coerce! update-doc)) (throw (ex-info "You are not allowed to update this document" {:type :access-control-error}))))))) (defn handle-read "Generate an ES read handler using some mapping and schema" [Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- (s/maybe Model) [{{{:keys [get-in-config]} :ConfigService} :services :as conn-state} :- ESConnState id :- s/Str ident es-params] (when-let [doc (-> (get-doc-with-index conn-state id (make-es-read-params es-params)) :_source coerce!)] (if (allow-read? doc ident get-in-config) doc (throw (ex-info "You are not allowed to read this document" {:type :access-control-error}))))))) (defn handle-read-many "Generate an ES read-many handler using some mapping and schema" [Model] (let [coerce! (coerce-to-fn Model)] (s/fn :- [(s/maybe Model)] [{{{:keys [get-in-config]} :ConfigService} :services :as conn-state} :- ESConnState ids :- [s/Str] ident {:keys [suppress-access-control-error?] :or {suppress-access-control-error? false} :as es-params}] (sequence (comp (map :_source) (map coerce!) (map (fn [record] (if (allow-read? record ident get-in-config) record (let [ex (ex-info "You are not allowed to read this document" {:type :access-control-error})] (if suppress-access-control-error? (log/error ex) (throw ex))))))) (get-docs-with-indices conn-state ids (make-es-read-params es-params)))))) (defn access-control-filter-list "Given an ident, keep only documents it is allowed to read" [docs ident get-in-config] (filter #(allow-read? % ident get-in-config) docs)) (s/defschema BulkResult (st/optional-keys {:deleted [s/Str] :updated [s/Str] :errors (st/optional-keys {:forbidden [s/Str] :not-found [s/Str] :internal-error [s/Str]})})) (s/defschema ESActionResult (st/open-schema {:_id s/Str :_index s/Str :status s/Int :result s/Str})) TODO move it to ductile (s/defschema ESBulkRes {:took s/Int :errors s/Bool :items [{ductile.doc/BulkOps ESActionResult}]}) (s/defn ^:private format-bulk-res "transform an elasticsearch bulk result into a CTIA Bulk Result. ex: -bulk.html#docs-bulk-api-example" [bulk-res :- ESBulkRes] (let [{:keys [deleted updated not_found]} (->> (:items bulk-res) (map (comp first vals)) (group-by :result) (into {} (map (fn [[result items]] {(keyword result) (map :_id items)}))))] (cond-> {} deleted (assoc :deleted deleted) updated (assoc :updated updated) not_found (assoc-in [:errors :not-found] not_found)))) (s/defn check-and-prepare-bulk :- (st/assoc BulkResult (s/optional-key :prepared) [(s/pred map?)]) "prepare a bulk query: - retrieve actual indices, deletion cannot be performed on the alias. - filter out forbidden entitites - forbidden and not_found errors are prepared for the response." [conn-state :- ESConnState ids :- [s/Str] ident] (let [get-in-config (get-in conn-state [:services :ConfigService]) doc-ids (map ensure-document-id ids) docs-with-indices (get-docs-with-indices conn-state doc-ids {}) {authorized true forbidden-write false} (group-by #(allow-write? (:_source %) ident) docs-with-indices) {forbidden true not-visible false} (group-by #(allow-read? (:_source %) ident get-in-config) forbidden-write) missing (set/difference (set doc-ids) (set (map :_id docs-with-indices))) not-found (into (map :_id not-visible) missing) prepared-docs (map #(select-keys % [:_index :_type :_id]) authorized)] (cond-> {} forbidden (assoc-in [:errors :forbidden] (map :_id forbidden)) (seq not-found) (assoc-in [:errors :not-found] not-found) authorized (assoc :prepared prepared-docs)))) (s/defn bulk-delete :- BulkResult [{:keys [conn] :as conn-state} ids :- [s/Str] ident es-params] (let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident) bulk-res (when prepared (try (format-bulk-res (ductile.doc/bulk-delete-docs conn prepared (prepare-opts conn-state es-params))) (catch Exception e (log/error e (str "bulk delete failed: " (.getMessage e)) (pr-str prepared)) {:errors {:internal-error (map :_id prepared)}})))] (cond-> bulk-res errors (update :errors #(merge-with concat errors %))))) (s/defn bulk-update "Generate an ES bulk update handler using some mapping and schema" [Model] (s/fn :- BulkResult [{:keys [conn] :as conn-state} docs :- [Model] ident es-params] (let [by-id (group-by :id docs) ids (seq (keys by-id)) {:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident) prepared-docs (map (fn [meta] (-> (:_id meta) by-id first (into meta))) prepared) bulk-res (when prepared (try (format-bulk-res (ductile.doc/bulk-index-docs conn prepared-docs (prepare-opts conn-state es-params))) (catch Exception e (log/error (str "bulk update failed: " (.getMessage e)) (pr-str prepared)) {:errors {:internal-error (map :_id prepared)}})))] (cond-> bulk-res errors (update :errors #(merge-with concat errors %)))))) (defn handle-delete "Generate an ES delete handler using some mapping" [mapping] (s/fn :- s/Bool [{:keys [conn] :as conn-state} :- ESConnState id :- s/Str ident es-params] (if-let [{index :_index doc :_source} (get-doc-with-index conn-state id {})] (if (allow-write? doc ident) (ductile.doc/delete-doc conn index (name mapping) (ensure-document-id id) (prepare-opts conn-state es-params)) (throw (ex-info "You are not allowed to delete this document" {:type :access-control-error}))) false))) (s/defschema FilterSchema (st/optional-keys {:all-of {s/Any s/Any} :one-of {s/Any s/Any} :query s/Str})) (def enumerable-fields-mapping "Mapping table for all fields which needs to be renamed for the sorting or aggregation. Instead of using fielddata we can have a text field for full text searches, and an unanalysed keyword field with doc_values enabled for sorting or aggregation" {"title" "title.whole" "reason" "reason.whole"}) (s/defn parse-sort-by :- [SortExtension] "Parses the sort_by parameter Ex: \"title:ASC,revision:DESC\" -> [{:op :field :field-name \"title\" :sort_order \"ASC\"} {:op :field :field-name \"revision\" :sort_order \"DESC\"}]" [sort_by] (if ((some-fn string? simple-ident?) sort_by) (map (fn [field] (let [[field-name field-order] (string/split field #":")] (cond-> {:op :field :field-name (keyword field-name)} field-order (assoc :sort_order field-order)))) (string/split (name sort_by) #",")) sort_by)) (defn with-default-sort-field [es-params {:keys [default-sort]}] (assert (not (:sort_by es-params))) (update es-params :sort #(or % (some->> default-sort parse-sort-by (mapv (fn [m] (es.sort/parse-sort-params-op m :asc)))) [{"_doc" :asc} {"id" :asc}]))) (s/defn rename-sort-fields "Renames sort fields based on the content of the `enumerable-fields-mapping` table and remaps to script extensions." [{:keys [sort_by sort_order] :as es-params} sort-extension-definitions :- (s/maybe SortExtensionDefinitions)] (cond-> (dissoc es-params :sort_by :sort_order) (and sort_by (not (:sort es-params))) (assoc :sort (->> sort_by parse-sort-by (mapv (fn [field] {:pre [(= :field (:op field))]} (let [{:keys [field-name] :as field} (update field :field-name #(or (keyword (enumerable-fields-mapping (name %))) %))] (assert (simple-keyword? field-name)) (-> (or (some-> (get sort-extension-definitions field-name) (into (select-keys field [:sort_order])) (update :field-name #(or % (:field-name field)))) field) (es.sort/parse-sort-params-op (or sort_order :asc)))))))))) (s/defschema MakeQueryParamsArgs {:params s/Any :props s/Any (s/optional-key :sort-extension-definitions) SortExtensionDefinitions}) (s/defn make-query-params :- {s/Keyword s/Any} [{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs] (cond-> (-> params (rename-sort-fields sort-extension-definitions) (with-default-sort-field props) make-es-read-params) (<= 7 (:version props)) (assoc :track_total_hits true))) (defn handle-find "Generate an ES find/list handler using some mapping and schema" [Model] (let [response-schema (list-response-schema Model) coerce! (coerce-to-fn response-schema)] (s/fn :- response-schema [{{{:keys [get-in-config]} :ConfigService} :services :keys [conn index props]} :- ESConnState {:keys [all-of one-of query] :or {all-of {} one-of {}}} :- FilterSchema ident es-params] (let [filter-val (cond-> (q/prepare-terms all-of) (restricted-read? ident) (conj (es.query/find-restriction-query-part ident get-in-config))) query_string {:query_string {:query query}} date-range-query (es.query/make-date-range-query es-params) bool-params (cond-> {:filter filter-val} (seq one-of) (into {:should (q/prepare-terms one-of) :minimum_should_match 1}) query (update :filter conj query_string) (seq date-range-query) (update :filter conj {:range date-range-query})) query-params (make-query-params {:params es-params :props props})] (cond-> (coerce! (ductile.doc/query conn index (q/bool bool-params) query-params)) (restricted-read? ident) (update :data access-control-filter-list ident get-in-config)))))) (s/defn make-search-query :- {s/Keyword s/Any} "Translate SearchQuery map into ES Query DSL map" [es-conn-state :- ESConnState search-query :- SearchQuery ident] (let [{:keys [services]} es-conn-state {{:keys [get-in-config]} :ConfigService} services {:keys [filter-map range full-text]} search-query range-query (when range {:range range}) filter-terms (-> (ensure-document-id-in-map filter-map) q/prepare-terms)] {:bool {:filter (cond-> [(es.query/find-restriction-query-part ident get-in-config)] (seq filter-map) (into filter-terms) (seq range) (conj range-query) (seq full-text) (into (es.query/refine-full-text-query-parts es-conn-state full-text)))}})) (defn handle-query-string-search "Generate an ES query handler for given schema schema" [Model] (let [response-schema (list-response-schema Model) coerce! (coerce-to-fn response-schema)] (s/fn :- response-schema [{:keys [props] :as es-conn-state} :- ESConnState {:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs] (let [{conn :conn, index :index {{:keys [get-in-config]} :ConfigService} :services} es-conn-state query (make-search-query es-conn-state search-query ident) query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions]) (assoc :props props)))] (cond-> (coerce! (ductile.doc/query conn index query query-params)) (restricted-read? ident) (update :data access-control-filter-list ident get-in-config)))))) (s/defn handle-delete-search "ES delete by query handler" [{:keys [conn index] :as es-conn-state} :- ESConnState search-query :- SearchQuery ident es-params] (let [query (make-search-query es-conn-state search-query ident)] (:deleted (ductile.doc/delete-by-query conn [index] query (prepare-opts es-conn-state es-params))))) (s/defn handle-query-string-count :- (s/pred nat-int?) "ES count handler" [{conn :conn index :index :as es-conn-state} :- ESConnState search-query :- SearchQuery ident] (let [query (make-search-query es-conn-state search-query ident)] (ductile.doc/count-docs conn index query))) (s/defn make-histogram [{:keys [aggregate-on granularity timezone] :or {timezone "+00:00"}} :- HistogramQuery] {:date_histogram {:field aggregate-on TODO switch to calendar_interval with ES7 :time_zone timezone}}) (s/defn make-topn [{:keys [aggregate-on limit sort_order] :or {limit 10 sort_order :desc}} :- TopnQuery] {:terms {:field (get enumerable-fields-mapping aggregate-on aggregate-on) :size limit :order {:_count sort_order}}}) (s/defn make-cardinality [{:keys [aggregate-on]} :- CardinalityQuery] {:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on) :precision_threshold 10000}}) (s/defn make-aggregation [{:keys [agg-type agg-key aggs] :or {agg-key :metric} :as agg-query} :- AggQuery] (let [root-agg (dissoc agg-query :aggs) agg-fn (case agg-type :topn make-topn :cardinality make-cardinality :histogram make-histogram (throw (ex-info (str "invalid aggregation type: " (pr-str agg-type)) {})))] (cond-> {agg-key (agg-fn root-agg)} (seq aggs) (assoc :aggs (make-aggregation aggs))))) (defn format-agg-result [agg-type {:keys [value buckets] :as _metric-res}] (case agg-type :cardinality value :topn (map #(array-map :key (:key %) :value (:doc_count %)) buckets) :histogram (map #(array-map :key (:key_as_string %) :value (:doc_count %)) buckets))) (s/defn handle-aggregate "Generate an ES aggregation handler for given schema" [{:keys [conn index] :as es-conn-state} :- ESConnState search-query :- SearchQuery {:keys [agg-type] :as agg-query} :- AggQuery ident] (let [query (make-search-query es-conn-state search-query ident) agg (make-aggregation (assoc agg-query :agg-key :metric)) es-res (ductile.doc/query conn index query agg {:limit 0})] (format-agg-result agg-type (get-in es-res [:aggs :metric])))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/threatgrid/ctia/6c11ba6a7c57a44de64c16601d3914f5b0cf308e/src/ctia/stores/es/crud.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Add partial results to the exception data map</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns ctia.stores.es.crud (:require [clojure.set :as set] [clojure.string :as string] [clojure.tools.logging :as log] [ctia.domain.access-control :as ac :refer [allow-read? allow-write? restricted-read?]] [ctia.lib.pagination :refer [list-response-schema]] [ctia.schemas.core :refer [SortExtension SortExtensionDefinitions]] [ctia.schemas.search-agg :refer [AggQuery CardinalityQuery HistogramQuery QueryStringSearchArgs SearchQuery TopnQuery]] [ctia.stores.es.sort :as es.sort] [ctia.stores.es.query :as es.query] [ctia.stores.es.schemas :refer [ESConnState]] [ductile.document :as ductile.doc] [ductile.query :as q] [ring.swagger.coerce :as sc] [schema-tools.core :as st] [schema.coerce :as c] [schema.core :as s])) (defn make-es-read-params "Prepare ES Params for read operations, setting the _source field and including ACL mandatory ones." [{:keys [fields] :as es-params}] (cond-> es-params (coll? fields) (-> (assoc :_source (concat fields ac/acl-fields)) (dissoc :fields)))) (defn coerce-to-fn [Model] (c/coercer! Model sc/json-schema-coercion-matcher)) (defn ensure-document-id "Returns a document ID. if id is a object ID, it extract the document ID, if it's a document ID already, it will just return that." [id] (let [[_orig docid] (re-matches #".*?([^/]+)\z" id)] docid)) (defn ensure-document-id-in-map "Ensure a document ID in a given filter map" [{:keys [id] :as m}] (cond-> m (string? id) (update :id list) id (update :id #(map ensure-document-id %)))) (defn remove-es-actions "Removes the ES action level [{:index {:_id \"1\"}} {:index {:_id \"2\"}}] -> [{:_id \"1\"} {:_id \"2\"}] " [items] (map (comp first vals) items)) (defn build-create-result [item coerce-fn] (-> item (dissoc :_id :_index :_type) coerce-fn)) (defn partial-results "Build partial results when an error occurs for one or more items in the bulk operation. Ex: [{model1} {:error \"Error message item2\"} {model3}]" [exception-data models coerce-fn] (let [{{:keys [items]} :es-http-res-body} exception-data] {:data (map (fn [{:keys [error _id]} model] (if error {:error error :id _id} (build-create-result model coerce-fn))) (remove-es-actions items) models)})) (s/defn get-docs-with-indices "Retrieves a documents from a search \"ids\" query. It enables to retrieves documents from an alias that points to multiple indices. It returns the documents with full hits meta data including the real index in which is stored the document." [{:keys [conn index] :as _conn-state} :- ESConnState ids :- [s/Str] es-params] (let [limit (count ids) ids-query (q/ids (map ensure-document-id ids)) res (ductile.doc/query conn index ids-query (assoc (make-es-read-params es-params) :limit limit :full-hits? true))] (:data res))) (s/defn get-doc-with-index "Retrieves a document from a search \"ids\" query. It is used to perform a get query on an alias that points to multiple indices. It returns the document with full hits meta data including the real index in which is stored the document." [conn-state :- ESConnState _id :- s/Str es-params] (first (get-docs-with-indices conn-state [_id] es-params))) (defn ^:private prepare-opts [{:keys [props]} {:keys [refresh]}] {:refresh (or refresh (:refresh props) "false")}) (s/defn bulk-schema [Model :- (s/pred map?)] (st/optional-keys {:create [Model] :index [Model] :update [(st/optional-keys Model)] :delete [s/Str]})) (s/defn ^:private prepare-bulk-doc [{:keys [props]} :- ESConnState mapping :- s/Keyword doc :- (s/pred map?)] (assoc doc :_id (:id doc) :_index (:write-index props) :_type (name mapping))) (defn handle-create "Generate an ES create handler using some mapping and schema" [mapping Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- [Model] [{:keys [conn] :as conn-state} :- ESConnState docs :- [Model] _ident es-params] (let [prepare-doc (partial prepare-bulk-doc conn-state mapping) prepared (mapv prepare-doc docs)] (try (ductile.doc/bulk-index-docs conn prepared (prepare-opts conn-state es-params)) docs (catch Exception e (throw (if-let [ex-data (ex-data e)] (ex-info (.getMessage e) (partial-results ex-data docs coerce!)) e)))))))) (defn handle-update "Generate an ES update handler using some mapping and schema" [mapping Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- (s/maybe Model) [{:keys [conn] :as conn-state} :- ESConnState id :- s/Str realized :- Model ident es-params] (when-let [[{index :_index current-doc :_source}] (get-docs-with-indices conn-state [id] {})] (if (allow-write? current-doc ident) (let [update-doc (assoc realized :id (ensure-document-id id))] (ductile.doc/index-doc conn index (name mapping) update-doc (prepare-opts conn-state es-params)) (coerce! update-doc)) (throw (ex-info "You are not allowed to update this document" {:type :access-control-error}))))))) (defn handle-read "Generate an ES read handler using some mapping and schema" [Model] (let [coerce! (coerce-to-fn (s/maybe Model))] (s/fn :- (s/maybe Model) [{{{:keys [get-in-config]} :ConfigService} :services :as conn-state} :- ESConnState id :- s/Str ident es-params] (when-let [doc (-> (get-doc-with-index conn-state id (make-es-read-params es-params)) :_source coerce!)] (if (allow-read? doc ident get-in-config) doc (throw (ex-info "You are not allowed to read this document" {:type :access-control-error}))))))) (defn handle-read-many "Generate an ES read-many handler using some mapping and schema" [Model] (let [coerce! (coerce-to-fn Model)] (s/fn :- [(s/maybe Model)] [{{{:keys [get-in-config]} :ConfigService} :services :as conn-state} :- ESConnState ids :- [s/Str] ident {:keys [suppress-access-control-error?] :or {suppress-access-control-error? false} :as es-params}] (sequence (comp (map :_source) (map coerce!) (map (fn [record] (if (allow-read? record ident get-in-config) record (let [ex (ex-info "You are not allowed to read this document" {:type :access-control-error})] (if suppress-access-control-error? (log/error ex) (throw ex))))))) (get-docs-with-indices conn-state ids (make-es-read-params es-params)))))) (defn access-control-filter-list "Given an ident, keep only documents it is allowed to read" [docs ident get-in-config] (filter #(allow-read? % ident get-in-config) docs)) (s/defschema BulkResult (st/optional-keys {:deleted [s/Str] :updated [s/Str] :errors (st/optional-keys {:forbidden [s/Str] :not-found [s/Str] :internal-error [s/Str]})})) (s/defschema ESActionResult (st/open-schema {:_id s/Str :_index s/Str :status s/Int :result s/Str})) TODO move it to ductile (s/defschema ESBulkRes {:took s/Int :errors s/Bool :items [{ductile.doc/BulkOps ESActionResult}]}) (s/defn ^:private format-bulk-res "transform an elasticsearch bulk result into a CTIA Bulk Result. ex: -bulk.html#docs-bulk-api-example" [bulk-res :- ESBulkRes] (let [{:keys [deleted updated not_found]} (->> (:items bulk-res) (map (comp first vals)) (group-by :result) (into {} (map (fn [[result items]] {(keyword result) (map :_id items)}))))] (cond-> {} deleted (assoc :deleted deleted) updated (assoc :updated updated) not_found (assoc-in [:errors :not-found] not_found)))) (s/defn check-and-prepare-bulk :- (st/assoc BulkResult (s/optional-key :prepared) [(s/pred map?)]) "prepare a bulk query: - retrieve actual indices, deletion cannot be performed on the alias. - filter out forbidden entitites - forbidden and not_found errors are prepared for the response." [conn-state :- ESConnState ids :- [s/Str] ident] (let [get-in-config (get-in conn-state [:services :ConfigService]) doc-ids (map ensure-document-id ids) docs-with-indices (get-docs-with-indices conn-state doc-ids {}) {authorized true forbidden-write false} (group-by #(allow-write? (:_source %) ident) docs-with-indices) {forbidden true not-visible false} (group-by #(allow-read? (:_source %) ident get-in-config) forbidden-write) missing (set/difference (set doc-ids) (set (map :_id docs-with-indices))) not-found (into (map :_id not-visible) missing) prepared-docs (map #(select-keys % [:_index :_type :_id]) authorized)] (cond-> {} forbidden (assoc-in [:errors :forbidden] (map :_id forbidden)) (seq not-found) (assoc-in [:errors :not-found] not-found) authorized (assoc :prepared prepared-docs)))) (s/defn bulk-delete :- BulkResult [{:keys [conn] :as conn-state} ids :- [s/Str] ident es-params] (let [{:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident) bulk-res (when prepared (try (format-bulk-res (ductile.doc/bulk-delete-docs conn prepared (prepare-opts conn-state es-params))) (catch Exception e (log/error e (str "bulk delete failed: " (.getMessage e)) (pr-str prepared)) {:errors {:internal-error (map :_id prepared)}})))] (cond-> bulk-res errors (update :errors #(merge-with concat errors %))))) (s/defn bulk-update "Generate an ES bulk update handler using some mapping and schema" [Model] (s/fn :- BulkResult [{:keys [conn] :as conn-state} docs :- [Model] ident es-params] (let [by-id (group-by :id docs) ids (seq (keys by-id)) {:keys [prepared errors]} (check-and-prepare-bulk conn-state ids ident) prepared-docs (map (fn [meta] (-> (:_id meta) by-id first (into meta))) prepared) bulk-res (when prepared (try (format-bulk-res (ductile.doc/bulk-index-docs conn prepared-docs (prepare-opts conn-state es-params))) (catch Exception e (log/error (str "bulk update failed: " (.getMessage e)) (pr-str prepared)) {:errors {:internal-error (map :_id prepared)}})))] (cond-> bulk-res errors (update :errors #(merge-with concat errors %)))))) (defn handle-delete "Generate an ES delete handler using some mapping" [mapping] (s/fn :- s/Bool [{:keys [conn] :as conn-state} :- ESConnState id :- s/Str ident es-params] (if-let [{index :_index doc :_source} (get-doc-with-index conn-state id {})] (if (allow-write? doc ident) (ductile.doc/delete-doc conn index (name mapping) (ensure-document-id id) (prepare-opts conn-state es-params)) (throw (ex-info "You are not allowed to delete this document" {:type :access-control-error}))) false))) (s/defschema FilterSchema (st/optional-keys {:all-of {s/Any s/Any} :one-of {s/Any s/Any} :query s/Str})) (def enumerable-fields-mapping "Mapping table for all fields which needs to be renamed for the sorting or aggregation. Instead of using fielddata we can have a text field for full text searches, and an unanalysed keyword field with doc_values enabled for sorting or aggregation" {"title" "title.whole" "reason" "reason.whole"}) (s/defn parse-sort-by :- [SortExtension] "Parses the sort_by parameter Ex: \"title:ASC,revision:DESC\" -> [{:op :field :field-name \"title\" :sort_order \"ASC\"} {:op :field :field-name \"revision\" :sort_order \"DESC\"}]" [sort_by] (if ((some-fn string? simple-ident?) sort_by) (map (fn [field] (let [[field-name field-order] (string/split field #":")] (cond-> {:op :field :field-name (keyword field-name)} field-order (assoc :sort_order field-order)))) (string/split (name sort_by) #",")) sort_by)) (defn with-default-sort-field [es-params {:keys [default-sort]}] (assert (not (:sort_by es-params))) (update es-params :sort #(or % (some->> default-sort parse-sort-by (mapv (fn [m] (es.sort/parse-sort-params-op m :asc)))) [{"_doc" :asc} {"id" :asc}]))) (s/defn rename-sort-fields "Renames sort fields based on the content of the `enumerable-fields-mapping` table and remaps to script extensions." [{:keys [sort_by sort_order] :as es-params} sort-extension-definitions :- (s/maybe SortExtensionDefinitions)] (cond-> (dissoc es-params :sort_by :sort_order) (and sort_by (not (:sort es-params))) (assoc :sort (->> sort_by parse-sort-by (mapv (fn [field] {:pre [(= :field (:op field))]} (let [{:keys [field-name] :as field} (update field :field-name #(or (keyword (enumerable-fields-mapping (name %))) %))] (assert (simple-keyword? field-name)) (-> (or (some-> (get sort-extension-definitions field-name) (into (select-keys field [:sort_order])) (update :field-name #(or % (:field-name field)))) field) (es.sort/parse-sort-params-op (or sort_order :asc)))))))))) (s/defschema MakeQueryParamsArgs {:params s/Any :props s/Any (s/optional-key :sort-extension-definitions) SortExtensionDefinitions}) (s/defn make-query-params :- {s/Keyword s/Any} [{:keys [params props sort-extension-definitions]} :- MakeQueryParamsArgs] (cond-> (-> params (rename-sort-fields sort-extension-definitions) (with-default-sort-field props) make-es-read-params) (<= 7 (:version props)) (assoc :track_total_hits true))) (defn handle-find "Generate an ES find/list handler using some mapping and schema" [Model] (let [response-schema (list-response-schema Model) coerce! (coerce-to-fn response-schema)] (s/fn :- response-schema [{{{:keys [get-in-config]} :ConfigService} :services :keys [conn index props]} :- ESConnState {:keys [all-of one-of query] :or {all-of {} one-of {}}} :- FilterSchema ident es-params] (let [filter-val (cond-> (q/prepare-terms all-of) (restricted-read? ident) (conj (es.query/find-restriction-query-part ident get-in-config))) query_string {:query_string {:query query}} date-range-query (es.query/make-date-range-query es-params) bool-params (cond-> {:filter filter-val} (seq one-of) (into {:should (q/prepare-terms one-of) :minimum_should_match 1}) query (update :filter conj query_string) (seq date-range-query) (update :filter conj {:range date-range-query})) query-params (make-query-params {:params es-params :props props})] (cond-> (coerce! (ductile.doc/query conn index (q/bool bool-params) query-params)) (restricted-read? ident) (update :data access-control-filter-list ident get-in-config)))))) (s/defn make-search-query :- {s/Keyword s/Any} "Translate SearchQuery map into ES Query DSL map" [es-conn-state :- ESConnState search-query :- SearchQuery ident] (let [{:keys [services]} es-conn-state {{:keys [get-in-config]} :ConfigService} services {:keys [filter-map range full-text]} search-query range-query (when range {:range range}) filter-terms (-> (ensure-document-id-in-map filter-map) q/prepare-terms)] {:bool {:filter (cond-> [(es.query/find-restriction-query-part ident get-in-config)] (seq filter-map) (into filter-terms) (seq range) (conj range-query) (seq full-text) (into (es.query/refine-full-text-query-parts es-conn-state full-text)))}})) (defn handle-query-string-search "Generate an ES query handler for given schema schema" [Model] (let [response-schema (list-response-schema Model) coerce! (coerce-to-fn response-schema)] (s/fn :- response-schema [{:keys [props] :as es-conn-state} :- ESConnState {:keys [search-query ident] :as query-string-search-args} :- QueryStringSearchArgs] (let [{conn :conn, index :index {{:keys [get-in-config]} :ConfigService} :services} es-conn-state query (make-search-query es-conn-state search-query ident) query-params (make-query-params (-> (select-keys query-string-search-args [:params :sort-extension-definitions]) (assoc :props props)))] (cond-> (coerce! (ductile.doc/query conn index query query-params)) (restricted-read? ident) (update :data access-control-filter-list ident get-in-config)))))) (s/defn handle-delete-search "ES delete by query handler" [{:keys [conn index] :as es-conn-state} :- ESConnState search-query :- SearchQuery ident es-params] (let [query (make-search-query es-conn-state search-query ident)] (:deleted (ductile.doc/delete-by-query conn [index] query (prepare-opts es-conn-state es-params))))) (s/defn handle-query-string-count :- (s/pred nat-int?) "ES count handler" [{conn :conn index :index :as es-conn-state} :- ESConnState search-query :- SearchQuery ident] (let [query (make-search-query es-conn-state search-query ident)] (ductile.doc/count-docs conn index query))) (s/defn make-histogram [{:keys [aggregate-on granularity timezone] :or {timezone "+00:00"}} :- HistogramQuery] {:date_histogram {:field aggregate-on TODO switch to calendar_interval with ES7 :time_zone timezone}}) (s/defn make-topn [{:keys [aggregate-on limit sort_order] :or {limit 10 sort_order :desc}} :- TopnQuery] {:terms {:field (get enumerable-fields-mapping aggregate-on aggregate-on) :size limit :order {:_count sort_order}}}) (s/defn make-cardinality [{:keys [aggregate-on]} :- CardinalityQuery] {:cardinality {:field (get enumerable-fields-mapping aggregate-on aggregate-on) :precision_threshold 10000}}) (s/defn make-aggregation [{:keys [agg-type agg-key aggs] :or {agg-key :metric} :as agg-query} :- AggQuery] (let [root-agg (dissoc agg-query :aggs) agg-fn (case agg-type :topn make-topn :cardinality make-cardinality :histogram make-histogram (throw (ex-info (str "invalid aggregation type: " (pr-str agg-type)) {})))] (cond-> {agg-key (agg-fn root-agg)} (seq aggs) (assoc :aggs (make-aggregation aggs))))) (defn format-agg-result [agg-type {:keys [value buckets] :as _metric-res}] (case agg-type :cardinality value :topn (map #(array-map :key (:key %) :value (:doc_count %)) buckets) :histogram (map #(array-map :key (:key_as_string %) :value (:doc_count %)) buckets))) (s/defn handle-aggregate "Generate an ES aggregation handler for given schema" [{:keys [conn index] :as es-conn-state} :- ESConnState search-query :- SearchQuery {:keys [agg-type] :as agg-query} :- AggQuery ident] (let [query (make-search-query es-conn-state search-query ident) agg (make-aggregation (assoc agg-query :agg-key :metric)) es-res (ductile.doc/query conn index query agg {:limit 0})] (format-agg-result agg-type (get-in es-res [:aggs :metric])))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610247"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">1474dc7693ddebbfcbd686f2889a6603019a6cd4face2891224b437c797d7c3a</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">argp/bap</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bench_map.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native (* The purpose of this test is to compare different implementation of the Map associative data structure. *) let total_length = 500_000 let (%) = BatPervasives.(%) module MapBench (M : sig val input_length : int end) = struct let input_length = M.input_length let nb_iter = max 10 (total_length / input_length) let () = Printf.printf "%d iterations\n" nb_iter let random_key () = Random.int input_length let random_value () = Random.int input_length let random_inputs random_elt () = BatList.init input_length (fun _ -> random_elt ()) let make_samples input tests () = Bench.bench_funs tests input we do n't use BatInt to ensure that the same comparison function is used ( PMap use Pervasives.compare by default ) , in order to have comparable performance results . is used (PMap use Pervasives.compare by default), in order to have comparable performance results. *) module StdMap = BatMap.Make(struct type t = int let compare = compare end) module Map = BatMap let same_elts stdmap pmap = BatList.of_enum (StdMap.enum stdmap) = BatList.of_enum (Map.enum pmap) (* A benchmark for key insertion *) let create_std_map input = List.fold_left (fun t (k, v) -> StdMap.add k v t) StdMap.empty input let create_poly_map input = List.fold_left (fun t (k, v) -> Map.add k v t) Map.empty input let create_input = let keys = random_inputs random_key () in let values = random_inputs random_value () in BatList.combine keys values let std_created_map = create_std_map create_input let poly_created_map = create_poly_map create_input let () = assert (same_elts std_created_map poly_created_map) let samples_create = make_samples create_input [ "stdmap create", ignore % create_std_map; "pmap create", ignore % create_poly_map ] (* A benchmark for fast import *) let import_std_map input = StdMap.of_enum (BatList.enum input) let import_poly_map input = Map.of_enum (BatList.enum input) let import_input = create_input let () = let std_imported_map = import_std_map import_input in assert (same_elts std_imported_map poly_created_map); let poly_imported_map = import_poly_map import_input in assert (same_elts std_created_map poly_imported_map); () let samples_import = make_samples import_input [ "stdmap import", ignore % import_std_map; "pmap import", ignore % import_poly_map ] (* A benchmark for key lookup *) let lookup_input = random_inputs random_key () let lookup_std_map input = List.iter (fun k -> ignore (StdMap.mem k std_created_map)) input let lookup_poly_map input = List.iter (fun k -> ignore (Map.mem k poly_created_map)) input let samples_lookup = make_samples lookup_input [ "stdmap lookup", lookup_std_map; "pmap lookup", lookup_poly_map ] (* A benchmark for key removal *) let remove_input = random_inputs random_key () let remove_std_map input = List.fold_left (fun t k -> StdMap.remove k t) std_created_map input let remove_poly_map input = List.fold_left (fun t k -> Map.remove k t) poly_created_map input let () = assert (same_elts (remove_std_map remove_input) (remove_poly_map remove_input)) let samples_remove = make_samples remove_input [ "stdmap remove", ignore % remove_std_map; "pmap remove", ignore % remove_poly_map ] (* A benchmark for merging *) let random_pairlist () = BatList.combine (random_inputs random_key ()) (random_inputs random_value ()) let p1 = random_pairlist () let p2 = random_pairlist () let merge_fun k a b = if k mod 2 = 0 then None else Some () let merge_std_map = let m1 = StdMap.of_enum (BatList.enum p1) in let m2 = StdMap.of_enum (BatList.enum p2) in fun () -> StdMap.merge merge_fun m1 m2 let merge_poly_map = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in fun () -> Map.merge merge_fun m1 m2 let samples_merge = make_samples () [ "stdmap merge", ignore % merge_std_map; "pmap merge", ignore % merge_poly_map; ] (* compare fold-based and merge-based union, diff, intersect *) let pmap_union (m1, m2) = Map.union m1 m2 let fold_union (m1, m2) = Map.foldi Map.add m1 m2 let merge_union (m1, m2) = let merge_fun k a b = if a <> None then a else b in Map.merge merge_fun m1 m2 let union_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_union = li (pmap_union union_input) = li (impl_union union_input) in assert (test fold_union); assert (test merge_union); () let samples_union = make_samples union_input [ "pmap union", ignore % pmap_union; "fold-based union", ignore % fold_union; "merge-based union", ignore % merge_union; ] let pmap_diff (m1, m2) = Map.diff m1 m2 let fold_diff (m1, m2) = Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1 let merge_diff (m1, m2) = let merge_fun k a b = if b <> None then None else a in Map.merge merge_fun m1 m2 let diff_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_diff = li (pmap_diff diff_input) = li (impl_diff diff_input) in assert (test fold_diff); assert (test merge_diff); () let samples_diff = make_samples diff_input [ "pmap diff", ignore % pmap_diff; "fold-based diff", ignore % fold_diff; "merge-based diff", ignore % merge_diff; ] let pmap_intersect f (m1, m2) = Map.intersect f m1 m2 let filter_intersect f (m1, m2) = let filter_fun k v1 = match try Some (Map.find k m2) with Not_found -> None with | None -> None | Some v2 -> Some (f v1 v2) in Map.filter_map filter_fun m1 let merge_intersect f (m1, m2) = let merge_fun k a b = match a, b with | Some v1, Some v2 -> Some (f v1 v2) | None, _ | _, None -> None in Map.merge merge_fun m1 m2 let intersect_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_intersect = li (pmap_intersect (-) intersect_input) = li (impl_intersect (-) intersect_input) in assert (test filter_intersect); assert (test merge_intersect); () let samples_intersect = make_samples intersect_input [ "pmap intersect", ignore % pmap_intersect (-); "filter-based intersect", ignore % filter_intersect (-); "merge-based intersect", ignore % merge_intersect (-); ] let () = let create = samples_create () in let import = samples_import () in let lookup = samples_lookup () in let remove = samples_remove () in let merge = samples_merge () in let union = samples_union () in let diff = samples_diff () in let intersect = samples_intersect () in List.iter (print_newline % Bench.summarize) [ create; import; lookup; remove; merge; union; diff; intersect; ] end let big_length = 100_000 let small_length = 500 let () = Printf.printf "Test with small maps (length = %d)\n%!" small_length; let () = let module M = MapBench(struct let input_length = small_length end) in () in print_newline (); print_newline (); Printf.printf "Test with big maps (length = %d)\n%!" big_length; Bench.config.Bench.samples <- 100; let () = let module M = MapBench(struct let input_length = big_length end) in () in () </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/benchsuite/bench_map.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> The purpose of this test is to compare different implementation of the Map associative data structure. A benchmark for key insertion A benchmark for fast import A benchmark for key lookup A benchmark for key removal A benchmark for merging compare fold-based and merge-based union, diff, intersect </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> cd .. & & ocamlbuild benchsuite / bench_map.native & & _ build / benchsuite / bench_map.native let total_length = 500_000 let (%) = BatPervasives.(%) module MapBench (M : sig val input_length : int end) = struct let input_length = M.input_length let nb_iter = max 10 (total_length / input_length) let () = Printf.printf "%d iterations\n" nb_iter let random_key () = Random.int input_length let random_value () = Random.int input_length let random_inputs random_elt () = BatList.init input_length (fun _ -> random_elt ()) let make_samples input tests () = Bench.bench_funs tests input we do n't use BatInt to ensure that the same comparison function is used ( PMap use Pervasives.compare by default ) , in order to have comparable performance results . is used (PMap use Pervasives.compare by default), in order to have comparable performance results. *) module StdMap = BatMap.Make(struct type t = int let compare = compare end) module Map = BatMap let same_elts stdmap pmap = BatList.of_enum (StdMap.enum stdmap) = BatList.of_enum (Map.enum pmap) let create_std_map input = List.fold_left (fun t (k, v) -> StdMap.add k v t) StdMap.empty input let create_poly_map input = List.fold_left (fun t (k, v) -> Map.add k v t) Map.empty input let create_input = let keys = random_inputs random_key () in let values = random_inputs random_value () in BatList.combine keys values let std_created_map = create_std_map create_input let poly_created_map = create_poly_map create_input let () = assert (same_elts std_created_map poly_created_map) let samples_create = make_samples create_input [ "stdmap create", ignore % create_std_map; "pmap create", ignore % create_poly_map ] let import_std_map input = StdMap.of_enum (BatList.enum input) let import_poly_map input = Map.of_enum (BatList.enum input) let import_input = create_input let () = let std_imported_map = import_std_map import_input in assert (same_elts std_imported_map poly_created_map); let poly_imported_map = import_poly_map import_input in assert (same_elts std_created_map poly_imported_map); () let samples_import = make_samples import_input [ "stdmap import", ignore % import_std_map; "pmap import", ignore % import_poly_map ] let lookup_input = random_inputs random_key () let lookup_std_map input = List.iter (fun k -> ignore (StdMap.mem k std_created_map)) input let lookup_poly_map input = List.iter (fun k -> ignore (Map.mem k poly_created_map)) input let samples_lookup = make_samples lookup_input [ "stdmap lookup", lookup_std_map; "pmap lookup", lookup_poly_map ] let remove_input = random_inputs random_key () let remove_std_map input = List.fold_left (fun t k -> StdMap.remove k t) std_created_map input let remove_poly_map input = List.fold_left (fun t k -> Map.remove k t) poly_created_map input let () = assert (same_elts (remove_std_map remove_input) (remove_poly_map remove_input)) let samples_remove = make_samples remove_input [ "stdmap remove", ignore % remove_std_map; "pmap remove", ignore % remove_poly_map ] let random_pairlist () = BatList.combine (random_inputs random_key ()) (random_inputs random_value ()) let p1 = random_pairlist () let p2 = random_pairlist () let merge_fun k a b = if k mod 2 = 0 then None else Some () let merge_std_map = let m1 = StdMap.of_enum (BatList.enum p1) in let m2 = StdMap.of_enum (BatList.enum p2) in fun () -> StdMap.merge merge_fun m1 m2 let merge_poly_map = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in fun () -> Map.merge merge_fun m1 m2 let samples_merge = make_samples () [ "stdmap merge", ignore % merge_std_map; "pmap merge", ignore % merge_poly_map; ] let pmap_union (m1, m2) = Map.union m1 m2 let fold_union (m1, m2) = Map.foldi Map.add m1 m2 let merge_union (m1, m2) = let merge_fun k a b = if a <> None then a else b in Map.merge merge_fun m1 m2 let union_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_union = li (pmap_union union_input) = li (impl_union union_input) in assert (test fold_union); assert (test merge_union); () let samples_union = make_samples union_input [ "pmap union", ignore % pmap_union; "fold-based union", ignore % fold_union; "merge-based union", ignore % merge_union; ] let pmap_diff (m1, m2) = Map.diff m1 m2 let fold_diff (m1, m2) = Map.foldi (fun k _ acc -> Map.remove k acc) m2 m1 let merge_diff (m1, m2) = let merge_fun k a b = if b <> None then None else a in Map.merge merge_fun m1 m2 let diff_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_diff = li (pmap_diff diff_input) = li (impl_diff diff_input) in assert (test fold_diff); assert (test merge_diff); () let samples_diff = make_samples diff_input [ "pmap diff", ignore % pmap_diff; "fold-based diff", ignore % fold_diff; "merge-based diff", ignore % merge_diff; ] let pmap_intersect f (m1, m2) = Map.intersect f m1 m2 let filter_intersect f (m1, m2) = let filter_fun k v1 = match try Some (Map.find k m2) with Not_found -> None with | None -> None | Some v2 -> Some (f v1 v2) in Map.filter_map filter_fun m1 let merge_intersect f (m1, m2) = let merge_fun k a b = match a, b with | Some v1, Some v2 -> Some (f v1 v2) | None, _ | _, None -> None in Map.merge merge_fun m1 m2 let intersect_input = let m1 = Map.of_enum (BatList.enum p1) in let m2 = Map.of_enum (BatList.enum p2) in m1, m2 let () = let li m = BatList.of_enum (Map.enum m) in let test impl_intersect = li (pmap_intersect (-) intersect_input) = li (impl_intersect (-) intersect_input) in assert (test filter_intersect); assert (test merge_intersect); () let samples_intersect = make_samples intersect_input [ "pmap intersect", ignore % pmap_intersect (-); "filter-based intersect", ignore % filter_intersect (-); "merge-based intersect", ignore % merge_intersect (-); ] let () = let create = samples_create () in let import = samples_import () in let lookup = samples_lookup () in let remove = samples_remove () in let merge = samples_merge () in let union = samples_union () in let diff = samples_diff () in let intersect = samples_intersect () in List.iter (print_newline % Bench.summarize) [ create; import; lookup; remove; merge; union; diff; intersect; ] end let big_length = 100_000 let small_length = 500 let () = Printf.printf "Test with small maps (length = %d)\n%!" small_length; let () = let module M = MapBench(struct let input_length = small_length end) in () in print_newline (); print_newline (); Printf.printf "Test with big maps (length = %d)\n%!" big_length; Bench.config.Bench.samples <- 100; let () = let module M = MapBench(struct let input_length = big_length end) in () in () </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610248"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">679d955390e90fde7e78b0a061ac74e3c7b9e356f35d688d8edc4745799d3804</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Oblosys/proxima</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">SemHsTokens.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> UUAGC 0.9.10 ( SemHsTokens.ag ) module SemHsTokens where import qualified Data.Sequence as Seq import Data.Sequence(Seq,empty,singleton,(><)) import Data.Foldable(toList) import Pretty import TokenDef import HsToken import ErrorMessages import CommonTypes import UU.Scanner.Position(Pos) isNTname allnts (Just (NT nt _)) = nt `elem` allnts isNTname allnts _ = False ----------------------------------------------------- visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier fieldnames : [ Identifier ] nt : Identifier synthesized attributes : errors : Seq Error tok : ( Pos , String ) : [ ( Identifier , Identifier ) ] usedFields : Seq Identifier usedLocals : [ Identifier ] alternatives : alternative AGField : child field : { Identifier } child attr : { Identifier } child pos : { Pos } child rdesc : { Maybe String } visit 0 : local addTrace : _ alternative AGLocal : child var : { Identifier } child pos : { Pos } child rdesc : { Maybe String } visit 0 : local _ tup1 : _ local errors : _ local tok : _ local usedLocals : _ alternative CharToken : child value : { String } child pos : { Pos } alternative : child mesg : { String } child pos : { Pos } alternative : child value : { String } child pos : { Pos } alternative StrToken : child value : { String } child pos : { Pos } visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier fieldnames : [Identifier] nt : Identifier synthesized attributes: errors : Seq Error tok : (Pos,String) usedAttrs : [(Identifier,Identifier)] usedFields : Seq Identifier usedLocals : [Identifier] alternatives: alternative AGField: child field : {Identifier} child attr : {Identifier} child pos : {Pos} child rdesc : {Maybe String} visit 0: local addTrace : _ alternative AGLocal: child var : {Identifier} child pos : {Pos} child rdesc : {Maybe String} visit 0: local _tup1 : _ local errors : _ local tok : _ local usedLocals : _ alternative CharToken: child value : {String} child pos : {Pos} alternative Err: child mesg : {String} child pos : {Pos} alternative HsToken: child value : {String} child pos : {Pos} alternative StrToken: child value : {String} child pos : {Pos} -} -- cata sem_HsToken :: HsToken -> T_HsToken sem_HsToken (AGField _field _attr _pos _rdesc ) = (sem_HsToken_AGField _field _attr _pos _rdesc ) sem_HsToken (AGLocal _var _pos _rdesc ) = (sem_HsToken_AGLocal _var _pos _rdesc ) sem_HsToken (CharToken _value _pos ) = (sem_HsToken_CharToken _value _pos ) sem_HsToken (Err _mesg _pos ) = (sem_HsToken_Err _mesg _pos ) sem_HsToken (HsToken _value _pos ) = (sem_HsToken_HsToken _value _pos ) sem_HsToken (StrToken _value _pos ) = (sem_HsToken_StrToken _value _pos ) -- semantic domain newtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> ([Identifier]) -> Identifier -> ( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier]))) data Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier} data Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]} wrap_HsToken :: T_HsToken -> Inh_HsToken -> Syn_HsToken wrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) = (let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsToken_AGField :: Identifier -> Identifier -> Pos -> (Maybe String) -> T_HsToken sem_HsToken_AGField field_ attr_ pos_ rdesc_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOtok :: ((Pos,String)) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 74 , column 15 ) _lhsOerrors = if (field_,attr_) `elem` _lhsIattrs then Seq.empty else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames)) then Seq.singleton (UndefChild _lhsInt _lhsIcon field_) else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False) " SemHsTokens.ag"(line 88 , column 13 ) _lhsOusedAttrs = [(field_,attr_)] " SemHsTokens.ag"(line 115 , column 8) _addTrace = case rdesc_ of Just d -> \x -> "(trace " ++ show (d ++ " -> " ++ show field_ ++ "." ++ show attr_) ++ " (" ++ x ++ "))" Nothing -> id " SemHsTokens.ag"(line 118 , column 8) _lhsOtok = (pos_, _addTrace $ attrname True field_ attr_) use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_AGLocal :: Identifier -> Pos -> (Maybe String) -> T_HsToken sem_HsToken_AGLocal var_ pos_ rdesc_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOusedFields :: (Seq Identifier) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedLocals :: ([Identifier]) _lhsOtok :: ((Pos,String)) " SemHsTokens.ag"(line 64 , column 19 ) __tup1 = if var_ `elem` _lhsIfieldnames then if isNTname _lhsIallnts (lookup var_ (map (\(n,t,_) -> (n,t)) _lhsIallfields)) then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] ) else (Seq.empty, (pos_,fieldname var_), [] ) else if (_LOC,var_) `elem` _lhsIattrs then (Seq.empty , (pos_,locname var_), [var_]) else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] ) " SemHsTokens.ag"(line 64 , column 19 ) (_errors,_,_) = __tup1 " SemHsTokens.ag"(line 64 , column 19 ) (_,_tok,_) = __tup1 " SemHsTokens.ag"(line 64 , column 19 ) (_,_,_usedLocals) = __tup1 " SemHsTokens.ag"(line 96 , column 13 ) _lhsOusedFields = if var_ `elem` _lhsIfieldnames then Seq.singleton var_ else Seq.empty use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = _errors use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = _usedLocals -- copy rule (from local) _lhsOtok = _tok in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_CharToken :: String -> Pos -> T_HsToken sem_HsToken_CharToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 122 , column 16 ) _lhsOtok = (pos_, if null value_ then "" else showCharShort (head value_) ) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_Err :: String -> Pos -> T_HsToken sem_HsToken_Err mesg_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOerrors :: (Seq Error) _lhsOtok :: ((Pos,String)) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 50 , column 9 ) _lhsOerrors = let m = text mesg_ in Seq.singleton (CustomError False pos_ m) " SemHsTokens.ag"(line 128 , column 16 ) _lhsOtok = (pos_, "") use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_HsToken :: String -> Pos -> T_HsToken sem_HsToken_HsToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 120 , column 14 ) _lhsOtok = (pos_, value_) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_StrToken :: String -> Pos -> T_HsToken sem_HsToken_StrToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 127 , column 16 ) _lhsOtok = (pos_, showStrShort value_) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) HsTokens ---------------------------------------------------- visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier fieldnames : [ Identifier ] nt : Identifier synthesized attributes : errors : Seq Error tks : [ ( Pos , String ) ] : [ ( Identifier , Identifier ) ] usedFields : Seq Identifier usedLocals : [ Identifier ] alternatives : alternative Cons : child hd : child tl : alternative : visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier fieldnames : [Identifier] nt : Identifier synthesized attributes: errors : Seq Error tks : [(Pos,String)] usedAttrs : [(Identifier,Identifier)] usedFields : Seq Identifier usedLocals : [Identifier] alternatives: alternative Cons: child hd : HsToken child tl : HsTokens alternative Nil: -} -- cata sem_HsTokens :: HsTokens -> T_HsTokens sem_HsTokens list = (Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) ) -- semantic domain newtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> ([Identifier]) -> Identifier -> ( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier]))) data Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier} data Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]} wrap_HsTokens :: T_HsTokens -> Inh_HsTokens -> Syn_HsTokens wrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) = (let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsTokens_Cons :: T_HsToken -> T_HsTokens -> T_HsTokens sem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) = (T_HsTokens (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtks :: ([(Pos,String)]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) _hdOallfields :: ([(Identifier,Type,Bool)]) _hdOallnts :: ([Identifier]) _hdOattrs :: ([(Identifier,Identifier)]) _hdOcon :: Identifier _hdOfieldnames :: ([Identifier]) _hdOnt :: Identifier _tlOallfields :: ([(Identifier,Type,Bool)]) _tlOallnts :: ([Identifier]) _tlOattrs :: ([(Identifier,Identifier)]) _tlOcon :: Identifier _tlOfieldnames :: ([Identifier]) _tlOnt :: Identifier _hdIerrors :: (Seq Error) _hdItok :: ((Pos,String)) _hdIusedAttrs :: ([(Identifier,Identifier)]) _hdIusedFields :: (Seq Identifier) _hdIusedLocals :: ([Identifier]) _tlIerrors :: (Seq Error) _tlItks :: ([(Pos,String)]) _tlIusedAttrs :: ([(Identifier,Identifier)]) _tlIusedFields :: (Seq Identifier) _tlIusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 110 , column 10 ) _lhsOtks = _hdItok : _tlItks use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = _hdIerrors Seq.>< _tlIerrors use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = _hdIusedAttrs ++ _tlIusedAttrs use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = _hdIusedFields Seq.>< _tlIusedFields use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = _hdIusedLocals ++ _tlIusedLocals -- copy rule (down) _hdOallfields = _lhsIallfields -- copy rule (down) _hdOallnts = _lhsIallnts -- copy rule (down) _hdOattrs = _lhsIattrs -- copy rule (down) _hdOcon = _lhsIcon -- copy rule (down) _hdOfieldnames = _lhsIfieldnames -- copy rule (down) _hdOnt = _lhsInt -- copy rule (down) _tlOallfields = _lhsIallfields -- copy rule (down) _tlOallnts = _lhsIallnts -- copy rule (down) _tlOattrs = _lhsIattrs -- copy rule (down) _tlOcon = _lhsIcon -- copy rule (down) _tlOfieldnames = _lhsIfieldnames -- copy rule (down) _tlOnt = _lhsInt ( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) = (hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt ) ( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) = (tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt ) in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsTokens_Nil :: T_HsTokens sem_HsTokens_Nil = (T_HsTokens (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtks :: ([(Pos,String)]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 111 , column 10 ) _lhsOtks = [] use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) HsTokensRoot ------------------------------------------------ visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier nt : Identifier synthesized attributes : errors : Seq Error textLines : [ String ] : [ ( Identifier , Identifier ) ] usedFields : [ Identifier ] usedLocals : [ Identifier ] alternatives : alternative HsTokensRoot : child tokens : HsTokens visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier nt : Identifier synthesized attributes: errors : Seq Error textLines : [String] usedAttrs : [(Identifier,Identifier)] usedFields : [Identifier] usedLocals : [Identifier] alternatives: alternative HsTokensRoot: child tokens : HsTokens -} -- cata sem_HsTokensRoot :: HsTokensRoot -> T_HsTokensRoot sem_HsTokensRoot (HsTokensRoot _tokens ) = (sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) ) -- semantic domain newtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> Identifier -> ( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier]))) data Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier} data Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]} wrap_HsTokensRoot :: T_HsTokensRoot -> Inh_HsTokensRoot -> Syn_HsTokensRoot wrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) = (let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsTokensRoot_HsTokensRoot :: T_HsTokens -> T_HsTokensRoot sem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) = (T_HsTokensRoot (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt -> (let _tokensOfieldnames :: ([Identifier]) _lhsOusedFields :: ([Identifier]) _lhsOtextLines :: ([String]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedLocals :: ([Identifier]) _tokensOallfields :: ([(Identifier,Type,Bool)]) _tokensOallnts :: ([Identifier]) _tokensOattrs :: ([(Identifier,Identifier)]) _tokensOcon :: Identifier _tokensOnt :: Identifier _tokensIerrors :: (Seq Error) _tokensItks :: ([(Pos,String)]) _tokensIusedAttrs :: ([(Identifier,Identifier)]) _tokensIusedFields :: (Seq Identifier) _tokensIusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 38 , column 18 ) _tokensOfieldnames = map (\(n,_,_) -> n) _lhsIallfields " SemHsTokens.ag"(line 100 , column 18 ) _lhsOusedFields = toList _tokensIusedFields " SemHsTokens.ag"(line 107 , column 18 ) _lhsOtextLines = showTokens _tokensItks use rule " SemHsTokens.ag"(line 18 , column 18 ) _lhsOerrors = _tokensIerrors -- copy rule (up) _lhsOusedAttrs = _tokensIusedAttrs -- copy rule (up) _lhsOusedLocals = _tokensIusedLocals -- copy rule (down) _tokensOallfields = _lhsIallfields -- copy rule (down) _tokensOallnts = _lhsIallnts -- copy rule (down) _tokensOattrs = _lhsIattrs -- copy rule (down) _tokensOcon = _lhsIcon -- copy rule (down) _tokensOnt = _lhsInt ( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) = (tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt ) in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/Oblosys/proxima/f154dff2ccb8afe00eeb325d9d06f5e2a5ee7589/uuagc/src-derived/SemHsTokens.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">--------------------------------------------------- cata semantic domain copy rule (from local) -------------------------------------------------- cata semantic domain copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down) ---------------------------------------------- cata semantic domain copy rule (up) copy rule (up) copy rule (down) copy rule (down) copy rule (down) copy rule (down) copy rule (down)</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> UUAGC 0.9.10 ( SemHsTokens.ag ) module SemHsTokens where import qualified Data.Sequence as Seq import Data.Sequence(Seq,empty,singleton,(><)) import Data.Foldable(toList) import Pretty import TokenDef import HsToken import ErrorMessages import CommonTypes import UU.Scanner.Position(Pos) isNTname allnts (Just (NT nt _)) = nt `elem` allnts isNTname allnts _ = False visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier fieldnames : [ Identifier ] nt : Identifier synthesized attributes : errors : Seq Error tok : ( Pos , String ) : [ ( Identifier , Identifier ) ] usedFields : Seq Identifier usedLocals : [ Identifier ] alternatives : alternative AGField : child field : { Identifier } child attr : { Identifier } child pos : { Pos } child rdesc : { Maybe String } visit 0 : local addTrace : _ alternative AGLocal : child var : { Identifier } child pos : { Pos } child rdesc : { Maybe String } visit 0 : local _ tup1 : _ local errors : _ local tok : _ local usedLocals : _ alternative CharToken : child value : { String } child pos : { Pos } alternative : child mesg : { String } child pos : { Pos } alternative : child value : { String } child pos : { Pos } alternative StrToken : child value : { String } child pos : { Pos } visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier fieldnames : [Identifier] nt : Identifier synthesized attributes: errors : Seq Error tok : (Pos,String) usedAttrs : [(Identifier,Identifier)] usedFields : Seq Identifier usedLocals : [Identifier] alternatives: alternative AGField: child field : {Identifier} child attr : {Identifier} child pos : {Pos} child rdesc : {Maybe String} visit 0: local addTrace : _ alternative AGLocal: child var : {Identifier} child pos : {Pos} child rdesc : {Maybe String} visit 0: local _tup1 : _ local errors : _ local tok : _ local usedLocals : _ alternative CharToken: child value : {String} child pos : {Pos} alternative Err: child mesg : {String} child pos : {Pos} alternative HsToken: child value : {String} child pos : {Pos} alternative StrToken: child value : {String} child pos : {Pos} -} sem_HsToken :: HsToken -> T_HsToken sem_HsToken (AGField _field _attr _pos _rdesc ) = (sem_HsToken_AGField _field _attr _pos _rdesc ) sem_HsToken (AGLocal _var _pos _rdesc ) = (sem_HsToken_AGLocal _var _pos _rdesc ) sem_HsToken (CharToken _value _pos ) = (sem_HsToken_CharToken _value _pos ) sem_HsToken (Err _mesg _pos ) = (sem_HsToken_Err _mesg _pos ) sem_HsToken (HsToken _value _pos ) = (sem_HsToken_HsToken _value _pos ) sem_HsToken (StrToken _value _pos ) = (sem_HsToken_StrToken _value _pos ) newtype T_HsToken = T_HsToken (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> ([Identifier]) -> Identifier -> ( (Seq Error),((Pos,String)),([(Identifier,Identifier)]),(Seq Identifier),([Identifier]))) data Inh_HsToken = Inh_HsToken {allfields_Inh_HsToken :: [(Identifier,Type,Bool)],allnts_Inh_HsToken :: [Identifier],attrs_Inh_HsToken :: [(Identifier,Identifier)],con_Inh_HsToken :: Identifier,fieldnames_Inh_HsToken :: [Identifier],nt_Inh_HsToken :: Identifier} data Syn_HsToken = Syn_HsToken {errors_Syn_HsToken :: Seq Error,tok_Syn_HsToken :: (Pos,String),usedAttrs_Syn_HsToken :: [(Identifier,Identifier)],usedFields_Syn_HsToken :: Seq Identifier,usedLocals_Syn_HsToken :: [Identifier]} wrap_HsToken :: T_HsToken -> Inh_HsToken -> Syn_HsToken wrap_HsToken (T_HsToken sem ) (Inh_HsToken _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) = (let ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) in (Syn_HsToken _lhsOerrors _lhsOtok _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsToken_AGField :: Identifier -> Identifier -> Pos -> (Maybe String) -> T_HsToken sem_HsToken_AGField field_ attr_ pos_ rdesc_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOtok :: ((Pos,String)) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 74 , column 15 ) _lhsOerrors = if (field_,attr_) `elem` _lhsIattrs then Seq.empty else if not(field_ `elem` (_LHS : _LOC: _lhsIfieldnames)) then Seq.singleton (UndefChild _lhsInt _lhsIcon field_) else Seq.singleton (UndefAttr _lhsInt _lhsIcon field_ attr_ False) " SemHsTokens.ag"(line 88 , column 13 ) _lhsOusedAttrs = [(field_,attr_)] " SemHsTokens.ag"(line 115 , column 8) _addTrace = case rdesc_ of Just d -> \x -> "(trace " ++ show (d ++ " -> " ++ show field_ ++ "." ++ show attr_) ++ " (" ++ x ++ "))" Nothing -> id " SemHsTokens.ag"(line 118 , column 8) _lhsOtok = (pos_, _addTrace $ attrname True field_ attr_) use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_AGLocal :: Identifier -> Pos -> (Maybe String) -> T_HsToken sem_HsToken_AGLocal var_ pos_ rdesc_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOusedFields :: (Seq Identifier) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedLocals :: ([Identifier]) _lhsOtok :: ((Pos,String)) " SemHsTokens.ag"(line 64 , column 19 ) __tup1 = if var_ `elem` _lhsIfieldnames then if isNTname _lhsIallnts (lookup var_ (map (\(n,t,_) -> (n,t)) _lhsIallfields)) then (Seq.singleton(ChildAsLocal _lhsInt _lhsIcon var_), (pos_,fieldname var_), [] ) else (Seq.empty, (pos_,fieldname var_), [] ) else if (_LOC,var_) `elem` _lhsIattrs then (Seq.empty , (pos_,locname var_), [var_]) else (Seq.singleton(UndefLocal _lhsInt _lhsIcon var_), (pos_,locname var_), [] ) " SemHsTokens.ag"(line 64 , column 19 ) (_errors,_,_) = __tup1 " SemHsTokens.ag"(line 64 , column 19 ) (_,_tok,_) = __tup1 " SemHsTokens.ag"(line 64 , column 19 ) (_,_,_usedLocals) = __tup1 " SemHsTokens.ag"(line 96 , column 13 ) _lhsOusedFields = if var_ `elem` _lhsIfieldnames then Seq.singleton var_ else Seq.empty use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = _errors use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = _usedLocals _lhsOtok = _tok in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_CharToken :: String -> Pos -> T_HsToken sem_HsToken_CharToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 122 , column 16 ) _lhsOtok = (pos_, if null value_ then "" else showCharShort (head value_) ) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_Err :: String -> Pos -> T_HsToken sem_HsToken_Err mesg_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOerrors :: (Seq Error) _lhsOtok :: ((Pos,String)) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 50 , column 9 ) _lhsOerrors = let m = text mesg_ in Seq.singleton (CustomError False pos_ m) " SemHsTokens.ag"(line 128 , column 16 ) _lhsOtok = (pos_, "") use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_HsToken :: String -> Pos -> T_HsToken sem_HsToken_HsToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 120 , column 14 ) _lhsOtok = (pos_, value_) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsToken_StrToken :: String -> Pos -> T_HsToken sem_HsToken_StrToken value_ pos_ = (T_HsToken (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtok :: ((Pos,String)) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 127 , column 16 ) _lhsOtok = (pos_, showStrShort value_) use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtok,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier fieldnames : [ Identifier ] nt : Identifier synthesized attributes : errors : Seq Error tks : [ ( Pos , String ) ] : [ ( Identifier , Identifier ) ] usedFields : Seq Identifier usedLocals : [ Identifier ] alternatives : alternative Cons : child hd : child tl : alternative : visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier fieldnames : [Identifier] nt : Identifier synthesized attributes: errors : Seq Error tks : [(Pos,String)] usedAttrs : [(Identifier,Identifier)] usedFields : Seq Identifier usedLocals : [Identifier] alternatives: alternative Cons: child hd : HsToken child tl : HsTokens alternative Nil: -} sem_HsTokens :: HsTokens -> T_HsTokens sem_HsTokens list = (Prelude.foldr sem_HsTokens_Cons sem_HsTokens_Nil (Prelude.map sem_HsToken list) ) newtype T_HsTokens = T_HsTokens (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> ([Identifier]) -> Identifier -> ( (Seq Error),([(Pos,String)]),([(Identifier,Identifier)]),(Seq Identifier),([Identifier]))) data Inh_HsTokens = Inh_HsTokens {allfields_Inh_HsTokens :: [(Identifier,Type,Bool)],allnts_Inh_HsTokens :: [Identifier],attrs_Inh_HsTokens :: [(Identifier,Identifier)],con_Inh_HsTokens :: Identifier,fieldnames_Inh_HsTokens :: [Identifier],nt_Inh_HsTokens :: Identifier} data Syn_HsTokens = Syn_HsTokens {errors_Syn_HsTokens :: Seq Error,tks_Syn_HsTokens :: [(Pos,String)],usedAttrs_Syn_HsTokens :: [(Identifier,Identifier)],usedFields_Syn_HsTokens :: Seq Identifier,usedLocals_Syn_HsTokens :: [Identifier]} wrap_HsTokens :: T_HsTokens -> Inh_HsTokens -> Syn_HsTokens wrap_HsTokens (T_HsTokens sem ) (Inh_HsTokens _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) = (let ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt ) in (Syn_HsTokens _lhsOerrors _lhsOtks _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsTokens_Cons :: T_HsToken -> T_HsTokens -> T_HsTokens sem_HsTokens_Cons (T_HsToken hd_ ) (T_HsTokens tl_ ) = (T_HsTokens (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtks :: ([(Pos,String)]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) _hdOallfields :: ([(Identifier,Type,Bool)]) _hdOallnts :: ([Identifier]) _hdOattrs :: ([(Identifier,Identifier)]) _hdOcon :: Identifier _hdOfieldnames :: ([Identifier]) _hdOnt :: Identifier _tlOallfields :: ([(Identifier,Type,Bool)]) _tlOallnts :: ([Identifier]) _tlOattrs :: ([(Identifier,Identifier)]) _tlOcon :: Identifier _tlOfieldnames :: ([Identifier]) _tlOnt :: Identifier _hdIerrors :: (Seq Error) _hdItok :: ((Pos,String)) _hdIusedAttrs :: ([(Identifier,Identifier)]) _hdIusedFields :: (Seq Identifier) _hdIusedLocals :: ([Identifier]) _tlIerrors :: (Seq Error) _tlItks :: ([(Pos,String)]) _tlIusedAttrs :: ([(Identifier,Identifier)]) _tlIusedFields :: (Seq Identifier) _tlIusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 110 , column 10 ) _lhsOtks = _hdItok : _tlItks use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = _hdIerrors Seq.>< _tlIerrors use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = _hdIusedAttrs ++ _tlIusedAttrs use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = _hdIusedFields Seq.>< _tlIusedFields use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = _hdIusedLocals ++ _tlIusedLocals _hdOallfields = _lhsIallfields _hdOallnts = _lhsIallnts _hdOattrs = _lhsIattrs _hdOcon = _lhsIcon _hdOfieldnames = _lhsIfieldnames _hdOnt = _lhsInt _tlOallfields = _lhsIallfields _tlOallnts = _lhsIallnts _tlOattrs = _lhsIattrs _tlOcon = _lhsIcon _tlOfieldnames = _lhsIfieldnames _tlOnt = _lhsInt ( _hdIerrors,_hdItok,_hdIusedAttrs,_hdIusedFields,_hdIusedLocals) = (hd_ _hdOallfields _hdOallnts _hdOattrs _hdOcon _hdOfieldnames _hdOnt ) ( _tlIerrors,_tlItks,_tlIusedAttrs,_tlIusedFields,_tlIusedLocals) = (tl_ _tlOallfields _tlOallnts _tlOattrs _tlOcon _tlOfieldnames _tlOnt ) in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) sem_HsTokens_Nil :: T_HsTokens sem_HsTokens_Nil = (T_HsTokens (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsIfieldnames _lhsInt -> (let _lhsOtks :: ([(Pos,String)]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedFields :: (Seq Identifier) _lhsOusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 111 , column 10 ) _lhsOtks = [] use rule " SemHsTokens.ag"(line 43 , column 37 ) _lhsOerrors = Seq.empty use rule " SemHsTokens.ag"(line 85 , column 40 ) _lhsOusedAttrs = [] use rule " SemHsTokens.ag"(line 93 , column 40 ) _lhsOusedFields = Seq.empty use rule " SemHsTokens.ag"(line 84 , column 40 ) _lhsOusedLocals = [] in ( _lhsOerrors,_lhsOtks,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) ) visit 0 : inherited attributes : allfields : [ ( Identifier , Type , ) ] : [ Identifier ] attrs : [ ( Identifier , Identifier ) ] con : Identifier nt : Identifier synthesized attributes : errors : Seq Error textLines : [ String ] : [ ( Identifier , Identifier ) ] usedFields : [ Identifier ] usedLocals : [ Identifier ] alternatives : alternative HsTokensRoot : child tokens : HsTokens visit 0: inherited attributes: allfields : [(Identifier,Type,Bool)] allnts : [Identifier] attrs : [(Identifier,Identifier)] con : Identifier nt : Identifier synthesized attributes: errors : Seq Error textLines : [String] usedAttrs : [(Identifier,Identifier)] usedFields : [Identifier] usedLocals : [Identifier] alternatives: alternative HsTokensRoot: child tokens : HsTokens -} sem_HsTokensRoot :: HsTokensRoot -> T_HsTokensRoot sem_HsTokensRoot (HsTokensRoot _tokens ) = (sem_HsTokensRoot_HsTokensRoot (sem_HsTokens _tokens ) ) newtype T_HsTokensRoot = T_HsTokensRoot (([(Identifier,Type,Bool)]) -> ([Identifier]) -> ([(Identifier,Identifier)]) -> Identifier -> Identifier -> ( (Seq Error),([String]),([(Identifier,Identifier)]),([Identifier]),([Identifier]))) data Inh_HsTokensRoot = Inh_HsTokensRoot {allfields_Inh_HsTokensRoot :: [(Identifier,Type,Bool)],allnts_Inh_HsTokensRoot :: [Identifier],attrs_Inh_HsTokensRoot :: [(Identifier,Identifier)],con_Inh_HsTokensRoot :: Identifier,nt_Inh_HsTokensRoot :: Identifier} data Syn_HsTokensRoot = Syn_HsTokensRoot {errors_Syn_HsTokensRoot :: Seq Error,textLines_Syn_HsTokensRoot :: [String],usedAttrs_Syn_HsTokensRoot :: [(Identifier,Identifier)],usedFields_Syn_HsTokensRoot :: [Identifier],usedLocals_Syn_HsTokensRoot :: [Identifier]} wrap_HsTokensRoot :: T_HsTokensRoot -> Inh_HsTokensRoot -> Syn_HsTokensRoot wrap_HsTokensRoot (T_HsTokensRoot sem ) (Inh_HsTokensRoot _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) = (let ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals) = (sem _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt ) in (Syn_HsTokensRoot _lhsOerrors _lhsOtextLines _lhsOusedAttrs _lhsOusedFields _lhsOusedLocals )) sem_HsTokensRoot_HsTokensRoot :: T_HsTokens -> T_HsTokensRoot sem_HsTokensRoot_HsTokensRoot (T_HsTokens tokens_ ) = (T_HsTokensRoot (\ _lhsIallfields _lhsIallnts _lhsIattrs _lhsIcon _lhsInt -> (let _tokensOfieldnames :: ([Identifier]) _lhsOusedFields :: ([Identifier]) _lhsOtextLines :: ([String]) _lhsOerrors :: (Seq Error) _lhsOusedAttrs :: ([(Identifier,Identifier)]) _lhsOusedLocals :: ([Identifier]) _tokensOallfields :: ([(Identifier,Type,Bool)]) _tokensOallnts :: ([Identifier]) _tokensOattrs :: ([(Identifier,Identifier)]) _tokensOcon :: Identifier _tokensOnt :: Identifier _tokensIerrors :: (Seq Error) _tokensItks :: ([(Pos,String)]) _tokensIusedAttrs :: ([(Identifier,Identifier)]) _tokensIusedFields :: (Seq Identifier) _tokensIusedLocals :: ([Identifier]) " SemHsTokens.ag"(line 38 , column 18 ) _tokensOfieldnames = map (\(n,_,_) -> n) _lhsIallfields " SemHsTokens.ag"(line 100 , column 18 ) _lhsOusedFields = toList _tokensIusedFields " SemHsTokens.ag"(line 107 , column 18 ) _lhsOtextLines = showTokens _tokensItks use rule " SemHsTokens.ag"(line 18 , column 18 ) _lhsOerrors = _tokensIerrors _lhsOusedAttrs = _tokensIusedAttrs _lhsOusedLocals = _tokensIusedLocals _tokensOallfields = _lhsIallfields _tokensOallnts = _lhsIallnts _tokensOattrs = _lhsIattrs _tokensOcon = _lhsIcon _tokensOnt = _lhsInt ( _tokensIerrors,_tokensItks,_tokensIusedAttrs,_tokensIusedFields,_tokensIusedLocals) = (tokens_ _tokensOallfields _tokensOallnts _tokensOattrs _tokensOcon _tokensOfieldnames _tokensOnt ) in ( _lhsOerrors,_lhsOtextLines,_lhsOusedAttrs,_lhsOusedFields,_lhsOusedLocals))) )</span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610249"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9f01966b071397927bcb7d951499d0e2382d81176ce11c2fd6eaa3a32be64223</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">alex-gutev/tridash</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">macros.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> macros.lisp ;;;; ;;;; Tridash Programming Language. Copyright ( C ) 2019 - 2021 ;;;; ;;;; This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or ;;;; (at your option) any later version. ;;;; ;;;; This program is distributed in the hope that it will be useful, ;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;;; GNU General Public License for more details. ;;;; You should have received a copy of the GNU General Public License ;;;; along with this program. If not, see </>. ;;;; User-Defined Macro Tests (defpackage :tridash/test.macros (:use :generic-cl :alexandria :anaphora :arrows :iterate :optima :named-readtables :tridash.parser :tridash.frontend :fiveam :tridash/test :tridash/test.util) (:shadowing-import-from :generic-cl :emptyp :multiply :accumulate) (:shadowing-import-from :fiveam :fail) (:import-from :lol :defmacro! :lol-syntax) (:import-from :tridash.frontend :tridash->cl-function :call-meta-node :call-tridash-meta-node :call-node :thunk :resolve :resolve% :tridash-fail :fail-thunk :+empty-list+ :group-rest-args :check-arity :correct-arity?% :fail-arity-error :+optional-argument+ :+rest-argument+)) (in-package :tridash/test.macros) (in-readtable lol-syntax) ;;; Test Suite Definition (def-suite macros :description "Test user-defined Tridash macros." :in frontend) (in-suite macros) Utilities (defun functor (operator &rest arguments) "Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments ARGUMENTS." (functor-expression operator arguments)) (defun expression= (expected got) "Checks that the CL expression GOT is equal to EXPECTED. Symbols in EXPECTED, beginning with $, are replaced with the symbol in GOT corresponding to the first occurrence." (let ((aliases (make-hash-map))) (flet ((equal? (got expected) (match* (got expected) (((type symbol) (type symbol)) (= got (cond ((starts-with #\$ (symbol-name expected)) (ensure-get expected aliases got)) ((starts-with #\! (symbol-name expected)) (id-symbol (subseq (symbol-name expected) 1))) (t expected)))) ((_ _) (= got expected))))) (tree-equal got expected :test #'equal?)))) (defmacro with-external-meta-nodes ((&rest names) &body body) "Creates `EXTERNAL-META-NODE's with names NAMES and binds to variables with the same identifiers as the names upcased." `(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names) ,@body)) (defmacro! with-core-nodes ((&rest names) &body body) "Builds the core module and binds the node with names NAMES to variables with the same identifiers as the names, upcased." `(with-module-table ,g!modules (build-core-module) (with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules ,@body))) (defmacro mock-meta-node ((&rest operands) expression) "Creates a `META-NODE' which takes operands OPERANDS and has a value function consisting of EXPRESSION. OPERANDS is a list of symbols naming the dependency nodes. EXPRESSION is evaluated in an environment where each symbol in OPERANDS is bound to the `NODE-LINK' object corresponding to the operand, and the symbol SELF is bound to the `META-NODE' object." (flet ((make-operand (operand) (match operand ((or (list 'optional symb value) (list 'optional symb)) (list +optional-argument+ (make-instance 'node :name symb) value)) ((list 'rest symb) (list +rest-argument+ (make-instance 'node :name symb))) (_ (make-instance 'node :name operand)))) (operand-node (operand) (match operand ((list* 'optional symb _) symb) ((list 'rest symb) symb) (_ operand)))) `(let ((self (make-instance 'final-meta-node :name 'test-meta-node :operands ',(map #'make-operand operands))) ,@(map #`(,a1 (node-link (make-instance 'node :name ',a1))) (map #'operand-node operands))) ;; Create an empty `FLAT-NODE-TABLE' to mark meta-node as ;; already built (setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set))) (setf (value-function (context self nil)) ,expression) ,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1) (map #'operand-node operands)) ,@(map #`(setf (get ',a1 (dependencies self)) ,a1) (map #'operand-node operands)) self))) (defmacro test-compile-meta-node ((&rest operands) expression args body) "Creates and compiles a `META-NODE' to a CL LAMBDA expression and checks that it has arguments ARGS and body BODY, by EXPRESSION=. OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION arguments of MOCK-META-NODE. ARGS (not evaluated) is the expected lambda-list of the function. BODY is the expected body expression within the BLOCK, TAGBODY, RETURN expression. The symbol $recur, occurring in BODY is substituted with the TAGBODY tag for tail-recursive self calls. BODY is evaluated in an environment in which the symbol SELF is bound to the `META-NODE' object." (flet ((lambda-args (lambda-list) (->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list) (map #'ensure-car) (map (compose #'gensym #'symbol-name))))) `(let ((self (mock-meta-node ,operands ,expression))) (is (expression= `(lambda ,',args (declare (ignorable ,@',(lambda-args args))) ,,body) (tridash->cl-function self)))))) Tridash to CL Compilation Tests (test compile-functor-expression "Test compilation of functor expressions to CL." (with-core-nodes ("if" "<" "-") (test-compile-meta-node (a b) (functor if (functor < a b) (functor - b a) (functor - a b)) ($a $b) '(let nil (!|if| (!< $a $b) (thunk (!- $b $a)) (thunk (!- $a $b))))))) (test compile-if-expression "Test compilation of if expressions to CL." (with-core-nodes ("<" "-") (test-compile-meta-node (a b) (if-expression (functor < a b) (functor - b a) (functor - a b)) ($a $b) '(let nil (!|if| (!< $a $b) (thunk (!- $b $a)) (thunk (!- $a $b))))))) (test compile-object-expression "Test compilation of object expressions to CL." (with-core-nodes ("+" "-") (test-compile-meta-node (x y) (object-expression `((sum ,(functor + x y)) (diff ,(functor - x y)))) ($x $y) '(let nil (alist-hash-map (list (cons 'sum (thunk (!+ $x $y))) (cons 'diff (thunk (!- $x $y))))))))) (test compile-member-expression "Test compilation of member expressions to CL." (test-compile-meta-node (object) (member-expression (member-expression object 'key1) 'key2) ($obj) '(let nil (!|member| (!|member| $obj 'key1) 'key2)))) (test compile-catch-expression "Test compilation of catch expressions to CL." (with-core-nodes ("/" "*") (test-compile-meta-node (a b) (catch-expression (functor / a b) (functor * a b)) ($a $b) '(let nil (!|catch| (!/ $a $b) (thunk (!* $a $b))))))) (test compile-fail-expression "Test compilation of fail expressions to CL." (test-compile-meta-node () (fail-expression) () '(let nil (!|fail|)))) (test compile-expression-block "Test compilation of expression blocks, with reference count = 1, to CL." (with-core-nodes ("+") (test-compile-meta-node (a) (expression-block (functor + a 1)) ($a) '(let nil (!+ $a 1))))) (test compile-expression-block-muliple-references "Test compilation of expression blocks, with reference count > 1, to CL." (with-core-nodes ("+") (test-compile-meta-node (a) (let ((block (expression-block (functor + a 1) :count 2))) (functor + block block)) ($a) '(let ($a+1) (setf $a+1 (thunk (!+ $a 1))) (!+ $a+1 $a+1))))) (test compile-meta-node-call "Test compilation of calls to other meta-nodes, to CL." (with-core-nodes ("-") (let ((meta-node (mock-meta-node (a) a))) (test-compile-meta-node (a) (functor meta-node (functor - a)) ($a) `(let nil (call-tridash-meta-node ,meta-node (list (!- $a)))))))) (test compile-higher-order-external-meta-node "Test compilation of higher order external meta-node." (with-core-nodes ("not") (let ((apply (mock-meta-node (f x) (functor f x)))) (test-compile-meta-node (x) (functor apply (meta-node-ref not) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 1) (length $args)) (apply #'!|not| $args) (fail-arity-error))) $x))))))) (test compile-higher-order-if-meta-node "Test compilation of higher order if meta-node." (with-core-nodes ("if") (let ((apply (mock-meta-node (f x y z) (functor f x y z)))) (test-compile-meta-node (x y z) (functor apply (meta-node-ref if) x y z) ($x $y $z) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 3) (length $args)) (apply #'!|if| $args) (fail-arity-error))) $x $y $z))))))) (test compile-higher-order-and-meta-node "Test compilation of higher order `and` meta-node." (with-core-nodes ("and") (let ((apply (mock-meta-node (f x y) (functor f x y)))) (test-compile-meta-node (x y) (functor apply (meta-node-ref and) x y) ($x $y) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 2) (length $args)) (apply #'!|and| $args) (fail-arity-error))) $x $y))))))) (test compile-higher-order-or-meta-node "Test compilation of higher order `or` meta-node." (with-core-nodes ("or") (let ((apply (mock-meta-node (f x y) (functor f x y)))) (test-compile-meta-node (x y) (functor apply (meta-node-ref or) x y) ($x $y) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 2) (length $args)) (apply #'!|or| $args) (fail-arity-error))) $x $y))))))) (test compile-higher-order-meta-node "Test compilation of higher-order user defined meta-node." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x) x))) (test-compile-meta-node (x) (functor apply (meta-node-ref f) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 1) (length $args)) (destructuring-bind ($x2) $args (call-tridash-meta-node ,f (list $x2))) (fail-arity-error))) $x)))))) (test compile-higher-order-meta-node-optional-arguments "Test compilation of higher-order meta-node with optional arguments." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x (optional y) (optional z)) x))) (test-compile-meta-node (x) (functor apply (meta-node-ref f :optional (list 1 2)) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 3) (length $args)) (destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args (call-tridash-meta-node ,f (list $x2 $y $z))) (fail-arity-error))) $x)))))) (test compile-higher-order-meta-node-rest-arguments "Test compilation of higher-order meta-node with rest arguments." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x y (rest xs)) xs))) (test-compile-meta-node (x) (functor apply (meta-node-ref f) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2) (length $args)) (destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+))) $args (call-tridash-meta-node ,f (list $x2 $y $rest))) (fail-arity-error))) $x)))))) (test compile-invoke-higher-order-node "Test compilation of invoking value nodes." (test-compile-meta-node (f x y) (functor f x y) ($f $x $y) `(let nil (call-node $f (list $x $y))))) (test compile-literals "Test compilation of literal values." (with-core-nodes ("and") (test-compile-meta-node () (functor and "hello" (functor and 1 (functor and 2.3 'symbol))) () '(let nil (!|and| "hello" (thunk (!|and| 1 (thunk (!|and| 2.3 'symbol))))))))) (test compile-core-arithmetic "Test compilation of core arithmetic meta-nodes." (with-core-nodes ("/" "*" "+" "-") (test-compile-meta-node (a b c d) (functor / (functor * (functor + a b) (functor - c d)) (functor - d)) ($a $b $c $d) '(let nil (!/ (!* (!+ $a $b) (!- $c $d)) (!- $d)))))) (test compile-core-comparison-and-logical "Test compilation of core comparison and logical meta-nodes." (with-core-nodes ("not" "or" "and" "=" "!=" "<" "<=" ">" ">=") (test-compile-meta-node (x y) (functor not (functor or (functor and (functor < x y) (functor = y x)) (functor or (functor <= x 10) (functor or (functor > 1 y) (functor or (functor >= 8 y) (functor != x y)))))) ($x $y) '(let nil (!|not| (!|or| (!|and| (!< $x $y) (thunk (!= $y $x))) (thunk (!|or| (!<= $x 10) (thunk (!|or| (!> 1 $y) (thunk (!|or| (!>= 8 $y) (thunk (!!= $x $y)))))))))))))) (test compile-core-type-checks "Test compilation of core type checking meta-nodes." (with-core-nodes ("or" "int?" "real?" "string?") (test-compile-meta-node (x y z) (functor or (functor int? x) (functor or (functor real? y) (functor string? z))) ($x $y $z) '(let nil (!|or| (!|int?| $x) (thunk (!|or| (!|real?| $y) (thunk (!|string?| $z))))))))) (test compile-tail-recursive-if "Test compilation of if expression in recursive tail position." (with-core-nodes ("-" "*" "<") (test-compile-meta-node (n acc) (if-expression (functor < n 2) acc (functor self (functor - n 1) (functor * n acc))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-if-functor "Test compilation of if functor in recursive tail position." (with-core-nodes ("if" "-" "*" "<") (test-compile-meta-node (n acc) (functor if (functor < n 2) acc (functor self (functor - n 1) (functor * n acc))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-expression-block "Test compilation of expression blocks in recursive tail position." (with-core-nodes ("if" "-" "*" "<") (test-compile-meta-node (n acc) (functor if (functor < n 2) acc (expression-block (functor self (functor - n 1) (functor * n acc)))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-or-functor "Test compilation of `or` functor in recursive tail position." (with-core-nodes ("or" "=" "!=" "-") (test-compile-meta-node (n) (functor or (functor = n 0) (functor self (functor - n 1))) ($n) `(let nil (!|or| (!= $n 0) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-tail-recursive-and-functor "Test compilation of `and` functor in recursive tail position." (with-core-nodes ("and" "=" "!=" "-") (test-compile-meta-node (n) (functor and (functor = n 0) (functor self (functor - n 1))) ($n) `(let nil (!|and| (!= $n 0) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-tail-recursive-catch-expression "Test compilation of catch expressions in recursive tail position." (with-core-nodes ("-" "+") (test-compile-meta-node (n) (catch-expression (functor self (functor + n 1)) (functor self (functor - n 1))) ($n) `(let nil (!|catch| (call-tridash-meta-node ,self (list (!+ $n 1))) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-meta-node-optional-arguments "Test compilation of meta-node with optional arguments." (with-core-nodes ("+") (test-compile-meta-node (n (optional d 1)) (functor + n d) ($n &optional ($d 1)) '(let nil (!|+| $n $d))))) (test compile-meta-node-multiple-optional-arguments "Test compilation of meta-node with multiple optional arguments." (with-core-nodes ("+") (test-compile-meta-node (n (optional d 1) (optional e 2)) (functor + n (functor + d e)) ($n &optional ($d 1) ($e 2)) '(let nil (!|+| $n (!|+| $d $e)))))) (test compile-meta-node-rest-argument "Test compilation of meta-node with rest argument." (with-core-nodes ("cons") (test-compile-meta-node (x (rest xs)) (functor cons x xs) ($x &optional ($xs +empty-list+)) '(let nil (!|cons| $x $xs))))) (test compile-meta-node-optional-and-rest-arguments "Test compilation of meta-node with optional and rest arguments." (with-core-nodes ("cons") (test-compile-meta-node (x (optional y 2) (rest xs)) (functor cons x (functor cons y xs)) ($x &optional ($y 2) ($xs +empty-list+)) '(let nil (!|cons| $x (thunk (!|cons| $y $xs))))))) (test compile-cyclic-references "Test compilation of cyclic references." (with-core-nodes ("cons") (test-compile-meta-node (a b) (aprog1 (expression-block nil :count 2) (setf (expression-block-expression it) (functor cons a (functor cons b (cyclic-reference it))))) ($a $b) '(let ($block) (setf $block (thunk (!|cons| $a (thunk (!|cons| $b $block))))) $block)))) (test compile-error-usupported-external-meta-node "Test that compiling an unsupported external-meta-node results in an error." (with-external-meta-nodes ("not-a-function") (signals unsupported-meta-node-error (tridash->cl-function (mock-meta-node (arg) (functor not-a-function arg)))))) Test Calling Tridash Meta - Nodes from CL (test call-meta-node-single-expression "Test calling a single expression meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "min(x,y) : case{x < y : x; y}") (with-nodes ((min "min")) modules (is (= 2 (call-meta-node min '(2 10)))) (is (= 2 (call-meta-node min '(10 2)))) (is (= -5.3 (call-meta-node min '(-5.3 7.6)))) (is (= 1 (call-meta-node min '(1 1))))))) (test call-meta-node-with-if-expression "Test calling a meta-node with if expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : if(cond, x, 0)") (with-nodes ((f "f")) modules (is (= 10 (call-meta-node f '(t 10)))) (is (= 0 (call-meta-node f '(nil 5)))) (signals tridash-fail (call-meta-node f '(1 5)))))) (test call-meta-node-with-and-expression "Test calling a meta-node with `and` expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : cond and x") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(t t))) (is (= nil (call-meta-node f '(nil t)))) (is (= nil (call-meta-node f '(t nil)))) (is (= nil (call-meta-node f '(nil nil))))))) (test call-meta-node-with-or-expression "Test calling a meta-node with `or` expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : cond or x") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(t t))) (is-true (call-meta-node f '(nil t))) (is-true (call-meta-node f '(t nil))) (is (= nil (call-meta-node f '(nil nil))))))) (test call-meta-node-catch-fail-expression "Test calling a meta-node with multiple nodes and CATCH-FAIL expressions." (with-module-table modules (build-core-module) (build "/import(core)" "min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }") (with-nodes ((min "min")) modules (is (= 2 (resolve (call-meta-node min '(2 10))))) (is (= 2 (resolve (call-meta-node min '(10 2))))) (is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6))))) (is (= 1 (resolve (call-meta-node min '(1 1)))))))) (test call-meta-node-recursive "Test calling a recursive meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }") (with-nodes ((fact "fact")) modules (is (= 6 (call-meta-node fact '(3)))) (is (= 120 (call-meta-node fact '(5)))) (is (= 1 (call-meta-node fact '(0))))))) (test call-meta-node-tail-recursive "Test calling a tail-recursive meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }") (with-nodes ((fact "fact")) modules (is (= 6 (call-meta-node fact '(3)))) (is (= 120 (call-meta-node fact '(5)))) (is (= 1 (call-meta-node fact '(0))))))) (test call-meta-node-with-meta-node-call "Test calling a meta-node which calls other meta-nodes." (with-module-table modules (build-core-module) (build "/import(core)" "1-(n) : n - 1" "1+(n) : n + 1" "f(a, b) : 1-(a) * 1+(b)") (with-nodes ((f "f")) modules (is (= 0 (call-meta-node f '(1 5)))) (is (= 45 (call-meta-node f '(10 4)))) (is (= 33 (call-meta-node f '(4 10))))))) (test call-meta-node-nested-meta-nodes "Test calling a meta-node with nested meta-nodes." (with-module-table modules (build-core-module) (build "/import(core)" "f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }") (with-nodes ((f "f")) modules (is (= 0 (call-meta-node f '(1 2 3)))) (is (= 2 (call-meta-node f '(2 3 7))))))) (test call-meta-node-optional-arguments-no-default "Test calling a meta-node with optional arguments without default values." (with-module-table modules (build-core-module) (build "/import(core, +, fail-type?)" "inc(n, :(d)) : n + d" "f(x) : inc(x)" "g(x) : inc(x, 2)" "h(x) : fail-type?(inc(x), &(No-Value%))") (with-nodes ((f "f") (g "g") (h "h")) modules (signals tridash-fail (call-meta-node f (list 3))) (is (= 7 (call-meta-node g (list 5)))) (is-true (call-meta-node h (list 2)))))) (test call-meta-node-optional-arguments-with-default "Test calling a meta-node with optional arguments without default values." (with-module-table modules (build-core-module) (build "/import(core, +)" "inc(n, d : 1) : n + d" "f(x) : inc(x)" "g(x) : inc(x, 2)") (with-nodes ((f "f") (g "g")) modules (is (= 4 (call-meta-node f (list 3)))) (is (= 7 (call-meta-node g (list 5))))))) (test call-meta-node-keyword-arguments "Test calling a meta-node with keyword arguments" (with-module-table modules (build-core-module) (build "/import(core, +)" "add(a, b, c : 3, d : 4) : a + b + c + d" "f(x, y) : add(x, d : 10, b : y)") (with-nodes ((f "f")) modules (is (= 16 (call-meta-node f '(1 2)))) (is (= 45 (call-meta-node f '(15 17))))))) (test call-meta-node-rest-argument "Test calling a meta-node with rest argument." (with-module-table modules (build-core-module) (build "/import(core, and, =, Empty)" "check(..(xs)) : xs = Empty" "f(x) : x and check()" "g(x) : check(x)" "h(x) : check(x, 1, 2, 3)") (with-nodes ((f "f") (g "g") (h "h")) modules (is-true (call-meta-node f '(t))) (is (= nil (call-meta-node g '(2)))) (is (= nil (call-meta-node h '(2))))))) (test call-higher-order-meta-node "Test calling meta-node with higher order meta-nodes." (with-module-table modules (build-core-module) (build "/import(core, +, not)" "apply(f, x) : f(x)" "1+(n) : n + 1" "f(a) : apply(..(not), a)" "g(a) : apply(..(1+), a)") (with-nodes ((f "f") (g "g")) modules (is (= t (call-meta-node f '(nil)))) (is (= nil (call-meta-node f '(t)))) (is (= 2 (call-meta-node g '(1)))) (is (= 4 (call-meta-node g '(3))))))) (test call-higher-order-meta-node-optional-arguments "Test calling meta-node with higher-order meta-node with optional arguments." (with-module-table modules (build-core-module) (build "/import(core, +, fail-type?)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "1+(n, :(d)) : n + d" "f(a) : apply(1+, a)" "g(a, b) : apply2(1+, a, b)" "h(x) : fail-type?(apply(1+, x), &(No-Value%))") (with-nodes ((f "f") (g "g") (h "h")) modules (signals tridash-fail (call-meta-node f '(0))) (is (= 3 (call-meta-node g '(1 2)))) (is (= 8 (call-meta-node g '(5 3)))) (is-true (call-meta-node h '(1)))))) (test call-higher-order-meta-node-optional-argument-with-default "Test calling meta-node with higher order meta-node with optional argument default values." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "1+(n, d : 1) : n + d" "f(a) : apply(1+, a)" "g(a, b) : apply2(1+, a, b)") (with-nodes ((f "f") (g "g")) modules (is (= 1 (call-meta-node f '(0)))) (is (= 2 (call-meta-node f '(1)))) (is (= 3 (call-meta-node g '(1 2)))) (is (= 8 (call-meta-node g '(5 3))))))) (test call-higher-order-meta-node-rest-argument "Test calling meta-node with higher order meta-node with rest argument." (with-module-table modules (build-core-module) (build "/import(core, +, cons)" "apply3(f, x, y, z) : f(x, y, z)" "apply(f, x) : f(x)" "l(x, ..(xs)) : cons(x + 1, xs)" "f(a, b, c) : apply3(l, a, b, c)" "g(x) : apply(l, x)") (with-nodes ((f "f") (g "g")) modules (is (= '(2 3 4) (call-meta-node f '(1 3 4)))) (is (= '(2) (call-meta-node g '(1))))))) (test call-higher-order-meta-node-rest-argument-empty "Test calling meta-node with higher order meta-node with empty rest argument." (with-module-table modules (build-core-module) (build "/import(core, Empty, =)" "apply(f, x) : f(x)" "l(x, ..(xs)) : xs = Empty" "f(a) : apply(l, a)") (with-nodes ((f "f")) modules (is-true (bool-value (call-meta-node f '(1))))))) (test call-higher-order-meta-node-optional-arguments-outer-nodes "Test calling higher order meta-node with optional arguments and outer node references." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }") (with-nodes ((test "test")) modules (is (= 6 (call-meta-node test '(2 3))))))) (test call-higher-order-external-meta-node "Test calling meta-node with higher-order external meta-node." (with-module-table modules (build-core-module) (build "/import(core, -)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "f(a) : apply(-, a)" "g(a, b) : apply2(-, a, b)") (with-nodes ((f "f") (g "g")) modules (is (= -1 (call-meta-node f '(1)))) (is (= -2 (call-meta-node f '(2)))) (is (= 1 (call-meta-node g '(3 2)))) (is (= 2 (call-meta-node g '(5 3))))))) (test call-higher-order-meta-node-error "Test error when calling a non-meta-node." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "x+(n) : n + ..(x)" "x" "f(a) : apply(..(x+), a)") (with-nodes ((f "f")) modules (signals semantic-error (call-meta-node f '(1)))))) (test call-primitive-function-subtract-and-negate "Test calling `-` meta-node with 2 arguments and 1 argument." (with-module-table modules (build-core-module) (build "/import(core, -)" "sub(a, b) : a - b" "neg(x) : -(x)") (with-nodes ((sub "sub") (neg "neg")) modules (is (= 3 (call-meta-node sub '(5 2)))) (is (= -5 (call-meta-node neg '(5))))))) (test call-meta-node-object-expressions "Test calling meta-node with object expressions." (with-module-table modules (build "Person(first, last) : { first -> self.first; last -> self.last }" "get-first(p) : p.first" "get-last(p) : p.last") (with-nodes ((person "Person") (get-first "get-first") (get-last "get-last")) modules (let ((p (call-meta-node person '("John" "Doe")))) (is (= "John" (call-meta-node get-first (list p)))) (is (= "Doe" (call-meta-node get-last (list p)))))))) (test call-meta-node-catch-fail-operand "Test catching failures in functor operand." (with-module-table modules (build-core-module) (build "/import(core, !=)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((fails "fails")) modules (is-false (bool-value (call-meta-node fails '(1)))) (is-true (->> (thunk (error 'tridash-fail)) list (call-meta-node fails)))))) (test call-meta-node-catch-fail-operator "Test catching failures in functor operator." ;; Test that failures in the operator of a functor are caught. (with-module-table modules (build-core-module) (build "/import(core, !=, >, -)" "neg(x) : -(x)" "getf(f, x) : { x > 0 -> (f -> self) }" "test(x) : fails((getf(neg, x))(x))" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((test "test")) modules (is (= nil (call-meta-node test '(1)))) (is-true (call-meta-node test '(-1)))))) (test call-meta-node-fail-types "Test failure types." (with-module-table modules (build-core-module modules) (build-source-file "./test/inputs/macros/failure-types.trd" modules) (with-nodes ((check-range "check-range")) modules (is (= "" (call-meta-node check-range '(2 1 3)))) (is (= "Error: below minimum!" (call-meta-node check-range '(0 1 3)))) (is (= "Error: above maximum!" (call-meta-node check-range '(10 2 7))))))) (test call-meta-node-expression-block "Test calling meta-node with one expression-block." (with-module-table modules (build-core-module) (build "/import(core, +)" "f(x) : (x + 1) + (x + 1)") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(1)) 4) (is-true (call-meta-node f '(2)) 6)))) (test call-meta-node-expression-block-multiple-references "Test calling meta-node with expression-block with multiple references." (with-module-table modules (build-core-module) (build "/import(core, *, +, -)" "f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(3 7)) -65) (is-true (call-meta-node f '(5 2)) 20)))) (test call-meta-node-cyclic-references "Test calling a meta-node with cyclic references." (with-module-table modules (build-core-module) (build-source-file "./test/inputs/macros/cyclic-references.trd" modules) (with-nodes ((f "f")) modules (is-true (call-meta-node f '(1 2)) '(1 2 1 2 1))))) (test call-meta-node-type-error-arithmetic-functions "Test type errors in arithmetic functions." (with-module-table modules (build-core-module) (build "/import(core, +, !=)" "1+(x) : fails(x + 1)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((1+ "1+")) modules (is (= nil (call-meta-node 1+ '(1)))) (is-true (call-meta-node 1+ '("hello")))))) (test call-meta-node-type-error-objects "Test type errors in objects." (with-module-table modules (build-core-module) (build "/import(core, !=)" "test(x) : fails(x.key)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((test "test")) modules (is-true (bool-value (call-meta-node test '(1)))) (is-true (bool-value (call-meta-node test (list (make-hash-map))))) (is (= nil (->> (list (cons (id-symbol "key") 1)) alist-hash-map list (call-meta-node test) bool-value)))))) ;;; Test Actual Macros (test macro-compile-time-computation "Test macro which performs computation at compile-time." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : x * x" "/attribute(square, macro, 1)" "a * square(3) -> b") (test-not-nodes modules '(("/in" "core" "*") "a" ("square" 3)) '("square" 3)) (with-nodes ((a "a") (a*9 (("/in" "core" "*") "a" 9)) (b "b") (* "*")) modules (has-value-function (a) a*9 `(,* ,a 9)) (test-simple-binding a*9 b)))) (test macro-quoted-expression "Test macro which returns quoted expression." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : list(/quote(*), x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-meta-node-reference "Test macro which returns expression with meta-node references." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : list(& *, x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-with-macros "Test expansion of macros in macro meta-nodes." (with-module-table modules (build-core-module) (build "/import(core, ->, list, *)" "'(x) : list(/quote(/quote), x)" "/attribute(', macro, 1)" "square(x) : list('(*), x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-multiple-arguments "Test macros with multiple arguments." (with-module-table modules (build-core-module) (build "/import(core, list, ->, if)" "'(x) : list(/quote(/quote), x)" "/attribute(', macro, 1)" "!-(a, b) : list('(if), a, b)" "/attribute(!-, macro, 1)" "/operator(!-, 25, left)" "a !- b -> out") (test-not-nodes modules '("!-" "a" "b")) (with-nodes ((a "a") (b "b") (out "out") (a!-b (("/in" "builtin" "if") "a" "b")) (if "if")) modules (has-value-function (a b) a!-b `(,if ,a ,b :none)) (test-simple-binding a!-b out)))) (test macro-keyword-arguments "Test passing macro arguments by keyword" (with-module-table modules (build-core-module) (build "/import(core, list)" "f(x) : x" "call(operator, operand) : list(operator, operand)" "/attribute(call, macro, True)" "call(operand : in1, operator : f) -> out1") (test-not-nodes modules '("call" (":" "operand" "in1") (":" "operator" "f"))) (with-nodes ((in1 "in1") (out1 "out1") (f "f") (f-in1 ("f" "in1"))) modules (has-value-function (in1) f-in1 `(,f ,in1)) (test-simple-binding f-in1 out1)))) (test macro-arity-check-required-only "Test macro arity checks with required arguments only." (with-module-table modules (build-core-module) (build "/import(core, *, list)" "square(x) : list(/quote(*), x, x)" "/attribute(square, macro, 1)") (signals arity-error (build "square(x, y) -> out")))) (test macro-arity-check-optional-not-enough "Test macro optional argument arity checks with not enough arguments." (with-module-table modules (build-core-module) (build "/import(core, +, list)" "add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))" "/attribute(add3, macro, 1)") (signals arity-error (build "add3(x)")))) (test macro-arity-check-optional-too-many "Test macro optional argument arity checks with too many arguments." (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(x, y, z)")))) (test macro-arity-check-rest-arguments "Test macro rest argument arity checks." (with-module-table modules (build-core-module) (build "/import(core, cons, list)" "make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))" "/attribute(make-list, macro, 1)" "make-list(x, y, z) -> output" "/attribute(x, input, 1)" "/attribute(y, input, 1)" "/attribute(z, input, 1)") (with-nodes ((x "x") (y "y") (z "z") (list "list") (output "output")) (finish-build) (has-value-function (x y z) output `(,list ,(argument-list (list x y z))))))) (test macro-arity-check-keyword-missing-required "Test macro keyword argument arity check with missing required argument" (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(d : 2)")))) (test macro-arity-check-keyword-unknown "Test macro keyword argument arity check with unknown keyword" (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(d : 2, n : 1, delta : 100)")))) (test macro-rest-argument-outer-nodes "Test macros with rest arguments and outer nodes." (with-module-table modules (build-core-module) (build "/import(core, cons, list)" "make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))" "/attribute(make-list, macro, 1)" "/attribute(a, input, 1)" "/attribute(b, input, 1)" "/attribute(c, input, 1)" "/attribute(y, input, 1)") (signals macro-outer-node-error (build "make-list(a, b, c) -> output")))) (test macro-build-meta-node-multiple-times "Test building a meta-node multiple times when building macro." (with-module-table modules (build-core-module) (build "/import(core, if, -, +, *, <)" "fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }" "eval-fact(n) : fact(n)" "/attribute(eval-fact, macro, 1)" "fact(in) + eval-fact(3) -> output" "/attribute(in, input, 1)") (with-nodes ((in "in") (output "output") (fact "fact") (+ "+")) (finish-build) (has-value-function (in) output `(,+ (,fact ,in) 6)) (with-nodes ((iter "iter") (n "n")) (definition fact) (has-value-function (n) fact `(,iter ,n 1)))))) (test macro-error-compile-loop "Test error when compilation loop detected in macro compilation." (with-module-table modules (build-core-module) (build "/import(core, list)" "test(x,y) : list(&(->), x, test(x,y))" "/attribute(test, macro, 1)") (with-nodes ((test "test")) modules (signals compile-meta-node-loop-error (call-meta-node test '(1 2)))))) (test macro-error-malformed-list "Test error when macro returns a malformed list." (with-module-table modules (build-core-module) (build "/import(core)" "mac(x, y) : cons(x, y)" "/attribute(mac, macro, 1)" "f(x) : x" "target-f(s, expr) : cons(s, head(tail(expr)))" "/attribute(f, target-transform, target-f)") (signals tridash-fail (build "mac(1, 2)")) (signals tridash-fail (build "a -> f(b)")))) (test macro-error-return-empty-list-failure "Test error when macro returns empty list failure." (with-module-table modules (build-core-module) (build "/import(core)" "mac(x) : list(x, Empty!)" "/attribute(mac, macro, 1)" "f(x) : x" "target-f(s, expr) : list(s, Empty!)" "/attribute(f, target-transform, target-f)") (signals tridash-fail (build "mac(a)")) (signals tridash-fail (build "x -> f(y)")))) Test Target Node Transform Macros (test target-transform-single-argument "Test target transformation with single argument." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-1.trd" modules) (with-nodes ((in "in") (out "out") (int "int")) (finish-build) (has-value-function (in) out `(,int ,in))))) (test target-transform-multiple-arguments "Test target transformation with multiple arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-2.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-not-enough "Test arity checks in target transform with not enough arguments." (with-module-table modules (build-core-module) (signals arity-error (build-source-file #p"./test/inputs/macros/target-transform-3.trd" modules)))) (test target-transform-arity-check-too-many "Test arity checks in target transform with too many arguments." (with-module-table modules (build-core-module) (signals arity-error (build-source-file #p"./test/inputs/macros/target-transform-4.trd" modules)))) (test target-transform-arity-check-rest-argument "Test arity checks in target transform with rest arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-5.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-optional-and-rest "Test arity checks in target transform with optional and rest arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-6.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-optional-extra "Test arity checks in target transform with optional extra arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-7.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) ;;; Test Attribute Processor Nodes (test attribute-processor-meta-node "Test attribute processor with meta-node." (with-module-table modules (build-source-file #p"./test/inputs/macros/attribute-processor-1.trd" modules) (with-nodes ((f "f") (match-f "match-f")) modules (is (eq match-f (attribute :matcher f)))))) (test attribute-processor-external-meta-node "Test attribute processor with external meta-node" (with-module-table modules (build-source-file #p"./test/inputs/macros/attribute-processor-2.trd" modules) (with-nodes ((f "f") (match-f "match-f")) modules (is (eq match-f (attribute :matcher f)))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/alex-gutev/tridash/c7dbb36efe32a14ad9c4484ed45b1000e2f7132e/test/macros.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Tridash Programming Language. This program is free software: you can redistribute it and/or modify (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. along with this program. If not, see </>. User-Defined Macro Tests Test Suite Definition Create an empty `FLAT-NODE-TABLE' to mark meta-node as already built Test that failures in the operator of a functor are caught. Test Actual Macros Test Attribute Processor Nodes</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> macros.lisp Copyright ( C ) 2019 - 2021 it under the terms of the GNU General Public License as published by the Free Software Foundation , either version 3 of the License , or You should have received a copy of the GNU General Public License (defpackage :tridash/test.macros (:use :generic-cl :alexandria :anaphora :arrows :iterate :optima :named-readtables :tridash.parser :tridash.frontend :fiveam :tridash/test :tridash/test.util) (:shadowing-import-from :generic-cl :emptyp :multiply :accumulate) (:shadowing-import-from :fiveam :fail) (:import-from :lol :defmacro! :lol-syntax) (:import-from :tridash.frontend :tridash->cl-function :call-meta-node :call-tridash-meta-node :call-node :thunk :resolve :resolve% :tridash-fail :fail-thunk :+empty-list+ :group-rest-args :check-arity :correct-arity?% :fail-arity-error :+optional-argument+ :+rest-argument+)) (in-package :tridash/test.macros) (in-readtable lol-syntax) (def-suite macros :description "Test user-defined Tridash macros." :in frontend) (in-suite macros) Utilities (defun functor (operator &rest arguments) "Creates a `FUNCTOR-EXPRESSION' with operator OPERATOR and arguments ARGUMENTS." (functor-expression operator arguments)) (defun expression= (expected got) "Checks that the CL expression GOT is equal to EXPECTED. Symbols in EXPECTED, beginning with $, are replaced with the symbol in GOT corresponding to the first occurrence." (let ((aliases (make-hash-map))) (flet ((equal? (got expected) (match* (got expected) (((type symbol) (type symbol)) (= got (cond ((starts-with #\$ (symbol-name expected)) (ensure-get expected aliases got)) ((starts-with #\! (symbol-name expected)) (id-symbol (subseq (symbol-name expected) 1))) (t expected)))) ((_ _) (= got expected))))) (tree-equal got expected :test #'equal?)))) (defmacro with-external-meta-nodes ((&rest names) &body body) "Creates `EXTERNAL-META-NODE's with names NAMES and binds to variables with the same identifiers as the names upcased." `(let ,(map #`(,(intern (string-upcase a1)) (make-instance 'external-meta-node :name (id-symbol ,a1))) names) ,@body)) (defmacro! with-core-nodes ((&rest names) &body body) "Builds the core module and binds the node with names NAMES to variables with the same identifiers as the names, upcased." `(with-module-table ,g!modules (build-core-module) (with-nodes ,(map #`(,(intern (string-upcase a1)) ,a1) names) ,g!modules ,@body))) (defmacro mock-meta-node ((&rest operands) expression) "Creates a `META-NODE' which takes operands OPERANDS and has a value function consisting of EXPRESSION. OPERANDS is a list of symbols naming the dependency nodes. EXPRESSION is evaluated in an environment where each symbol in OPERANDS is bound to the `NODE-LINK' object corresponding to the operand, and the symbol SELF is bound to the `META-NODE' object." (flet ((make-operand (operand) (match operand ((or (list 'optional symb value) (list 'optional symb)) (list +optional-argument+ (make-instance 'node :name symb) value)) ((list 'rest symb) (list +rest-argument+ (make-instance 'node :name symb))) (_ (make-instance 'node :name operand)))) (operand-node (operand) (match operand ((list* 'optional symb _) symb) ((list 'rest symb) symb) (_ operand)))) `(let ((self (make-instance 'final-meta-node :name 'test-meta-node :operands ',(map #'make-operand operands))) ,@(map #`(,a1 (node-link (make-instance 'node :name ',a1))) (map #'operand-node operands))) (setf (definition self) (make-instance 'flat-node-table :nodes (make-hash-set))) (setf (value-function (context self nil)) ,expression) ,@(map #`(setf (get ',a1 (operands (context self nil))) ,a1) (map #'operand-node operands)) ,@(map #`(setf (get ',a1 (dependencies self)) ,a1) (map #'operand-node operands)) self))) (defmacro test-compile-meta-node ((&rest operands) expression args body) "Creates and compiles a `META-NODE' to a CL LAMBDA expression and checks that it has arguments ARGS and body BODY, by EXPRESSION=. OPERANDS and EXPRESSION correspond to the OPERANDS and EXPRESSION arguments of MOCK-META-NODE. ARGS (not evaluated) is the expected lambda-list of the function. BODY is the expected body expression within the BLOCK, TAGBODY, RETURN expression. The symbol $recur, occurring in BODY is substituted with the TAGBODY tag for tail-recursive self calls. BODY is evaluated in an environment in which the symbol SELF is bound to the `META-NODE' object." (flet ((lambda-args (lambda-list) (->> (remove-if (rcurry #'memberp lambda-list-keywords) lambda-list) (map #'ensure-car) (map (compose #'gensym #'symbol-name))))) `(let ((self (mock-meta-node ,operands ,expression))) (is (expression= `(lambda ,',args (declare (ignorable ,@',(lambda-args args))) ,,body) (tridash->cl-function self)))))) Tridash to CL Compilation Tests (test compile-functor-expression "Test compilation of functor expressions to CL." (with-core-nodes ("if" "<" "-") (test-compile-meta-node (a b) (functor if (functor < a b) (functor - b a) (functor - a b)) ($a $b) '(let nil (!|if| (!< $a $b) (thunk (!- $b $a)) (thunk (!- $a $b))))))) (test compile-if-expression "Test compilation of if expressions to CL." (with-core-nodes ("<" "-") (test-compile-meta-node (a b) (if-expression (functor < a b) (functor - b a) (functor - a b)) ($a $b) '(let nil (!|if| (!< $a $b) (thunk (!- $b $a)) (thunk (!- $a $b))))))) (test compile-object-expression "Test compilation of object expressions to CL." (with-core-nodes ("+" "-") (test-compile-meta-node (x y) (object-expression `((sum ,(functor + x y)) (diff ,(functor - x y)))) ($x $y) '(let nil (alist-hash-map (list (cons 'sum (thunk (!+ $x $y))) (cons 'diff (thunk (!- $x $y))))))))) (test compile-member-expression "Test compilation of member expressions to CL." (test-compile-meta-node (object) (member-expression (member-expression object 'key1) 'key2) ($obj) '(let nil (!|member| (!|member| $obj 'key1) 'key2)))) (test compile-catch-expression "Test compilation of catch expressions to CL." (with-core-nodes ("/" "*") (test-compile-meta-node (a b) (catch-expression (functor / a b) (functor * a b)) ($a $b) '(let nil (!|catch| (!/ $a $b) (thunk (!* $a $b))))))) (test compile-fail-expression "Test compilation of fail expressions to CL." (test-compile-meta-node () (fail-expression) () '(let nil (!|fail|)))) (test compile-expression-block "Test compilation of expression blocks, with reference count = 1, to CL." (with-core-nodes ("+") (test-compile-meta-node (a) (expression-block (functor + a 1)) ($a) '(let nil (!+ $a 1))))) (test compile-expression-block-muliple-references "Test compilation of expression blocks, with reference count > 1, to CL." (with-core-nodes ("+") (test-compile-meta-node (a) (let ((block (expression-block (functor + a 1) :count 2))) (functor + block block)) ($a) '(let ($a+1) (setf $a+1 (thunk (!+ $a 1))) (!+ $a+1 $a+1))))) (test compile-meta-node-call "Test compilation of calls to other meta-nodes, to CL." (with-core-nodes ("-") (let ((meta-node (mock-meta-node (a) a))) (test-compile-meta-node (a) (functor meta-node (functor - a)) ($a) `(let nil (call-tridash-meta-node ,meta-node (list (!- $a)))))))) (test compile-higher-order-external-meta-node "Test compilation of higher order external meta-node." (with-core-nodes ("not") (let ((apply (mock-meta-node (f x) (functor f x)))) (test-compile-meta-node (x) (functor apply (meta-node-ref not) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 1) (length $args)) (apply #'!|not| $args) (fail-arity-error))) $x))))))) (test compile-higher-order-if-meta-node "Test compilation of higher order if meta-node." (with-core-nodes ("if") (let ((apply (mock-meta-node (f x y z) (functor f x y z)))) (test-compile-meta-node (x y z) (functor apply (meta-node-ref if) x y z) ($x $y $z) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 3) (length $args)) (apply #'!|if| $args) (fail-arity-error))) $x $y $z))))))) (test compile-higher-order-and-meta-node "Test compilation of higher order `and` meta-node." (with-core-nodes ("and") (let ((apply (mock-meta-node (f x y) (functor f x y)))) (test-compile-meta-node (x y) (functor apply (meta-node-ref and) x y) ($x $y) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 2) (length $args)) (apply #'!|and| $args) (fail-arity-error))) $x $y))))))) (test compile-higher-order-or-meta-node "Test compilation of higher order `or` meta-node." (with-core-nodes ("or") (let ((apply (mock-meta-node (f x y) (functor f x y)))) (test-compile-meta-node (x y) (functor apply (meta-node-ref or) x y) ($x $y) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2 . 2) (length $args)) (apply #'!|or| $args) (fail-arity-error))) $x $y))))))) (test compile-higher-order-meta-node "Test compilation of higher-order user defined meta-node." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x) x))) (test-compile-meta-node (x) (functor apply (meta-node-ref f) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 1) (length $args)) (destructuring-bind ($x2) $args (call-tridash-meta-node ,f (list $x2))) (fail-arity-error))) $x)))))) (test compile-higher-order-meta-node-optional-arguments "Test compilation of higher-order meta-node with optional arguments." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x (optional y) (optional z)) x))) (test-compile-meta-node (x) (functor apply (meta-node-ref f :optional (list 1 2)) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(1 . 3) (length $args)) (destructuring-bind ($x2 &optional ($y 1) ($z 2)) $args (call-tridash-meta-node ,f (list $x2 $y $z))) (fail-arity-error))) $x)))))) (test compile-higher-order-meta-node-rest-arguments "Test compilation of higher-order meta-node with rest arguments." (let ((apply (mock-meta-node (f x) (functor f x))) (f (mock-meta-node (x y (rest xs)) xs))) (test-compile-meta-node (x) (functor apply (meta-node-ref f) x) ($x) `(let nil (call-tridash-meta-node ,apply (list #'(lambda (&rest $args) (if (correct-arity?% '(2) (length $args)) (destructuring-bind ($x2 $y &rest $xs &aux ($rest (or $xs +empty-list+))) $args (call-tridash-meta-node ,f (list $x2 $y $rest))) (fail-arity-error))) $x)))))) (test compile-invoke-higher-order-node "Test compilation of invoking value nodes." (test-compile-meta-node (f x y) (functor f x y) ($f $x $y) `(let nil (call-node $f (list $x $y))))) (test compile-literals "Test compilation of literal values." (with-core-nodes ("and") (test-compile-meta-node () (functor and "hello" (functor and 1 (functor and 2.3 'symbol))) () '(let nil (!|and| "hello" (thunk (!|and| 1 (thunk (!|and| 2.3 'symbol))))))))) (test compile-core-arithmetic "Test compilation of core arithmetic meta-nodes." (with-core-nodes ("/" "*" "+" "-") (test-compile-meta-node (a b c d) (functor / (functor * (functor + a b) (functor - c d)) (functor - d)) ($a $b $c $d) '(let nil (!/ (!* (!+ $a $b) (!- $c $d)) (!- $d)))))) (test compile-core-comparison-and-logical "Test compilation of core comparison and logical meta-nodes." (with-core-nodes ("not" "or" "and" "=" "!=" "<" "<=" ">" ">=") (test-compile-meta-node (x y) (functor not (functor or (functor and (functor < x y) (functor = y x)) (functor or (functor <= x 10) (functor or (functor > 1 y) (functor or (functor >= 8 y) (functor != x y)))))) ($x $y) '(let nil (!|not| (!|or| (!|and| (!< $x $y) (thunk (!= $y $x))) (thunk (!|or| (!<= $x 10) (thunk (!|or| (!> 1 $y) (thunk (!|or| (!>= 8 $y) (thunk (!!= $x $y)))))))))))))) (test compile-core-type-checks "Test compilation of core type checking meta-nodes." (with-core-nodes ("or" "int?" "real?" "string?") (test-compile-meta-node (x y z) (functor or (functor int? x) (functor or (functor real? y) (functor string? z))) ($x $y $z) '(let nil (!|or| (!|int?| $x) (thunk (!|or| (!|real?| $y) (thunk (!|string?| $z))))))))) (test compile-tail-recursive-if "Test compilation of if expression in recursive tail position." (with-core-nodes ("-" "*" "<") (test-compile-meta-node (n acc) (if-expression (functor < n 2) acc (functor self (functor - n 1) (functor * n acc))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-if-functor "Test compilation of if functor in recursive tail position." (with-core-nodes ("if" "-" "*" "<") (test-compile-meta-node (n acc) (functor if (functor < n 2) acc (functor self (functor - n 1) (functor * n acc))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-expression-block "Test compilation of expression blocks in recursive tail position." (with-core-nodes ("if" "-" "*" "<") (test-compile-meta-node (n acc) (functor if (functor < n 2) acc (expression-block (functor self (functor - n 1) (functor * n acc)))) ($n $acc) `(let nil (!|if| (!< $n 2) $acc (thunk (call-tridash-meta-node ,self (list (!- $n 1) (!* $n $acc))))))))) (test compile-tail-recursive-or-functor "Test compilation of `or` functor in recursive tail position." (with-core-nodes ("or" "=" "!=" "-") (test-compile-meta-node (n) (functor or (functor = n 0) (functor self (functor - n 1))) ($n) `(let nil (!|or| (!= $n 0) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-tail-recursive-and-functor "Test compilation of `and` functor in recursive tail position." (with-core-nodes ("and" "=" "!=" "-") (test-compile-meta-node (n) (functor and (functor = n 0) (functor self (functor - n 1))) ($n) `(let nil (!|and| (!= $n 0) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-tail-recursive-catch-expression "Test compilation of catch expressions in recursive tail position." (with-core-nodes ("-" "+") (test-compile-meta-node (n) (catch-expression (functor self (functor + n 1)) (functor self (functor - n 1))) ($n) `(let nil (!|catch| (call-tridash-meta-node ,self (list (!+ $n 1))) (thunk (call-tridash-meta-node ,self (list (!- $n 1))))))))) (test compile-meta-node-optional-arguments "Test compilation of meta-node with optional arguments." (with-core-nodes ("+") (test-compile-meta-node (n (optional d 1)) (functor + n d) ($n &optional ($d 1)) '(let nil (!|+| $n $d))))) (test compile-meta-node-multiple-optional-arguments "Test compilation of meta-node with multiple optional arguments." (with-core-nodes ("+") (test-compile-meta-node (n (optional d 1) (optional e 2)) (functor + n (functor + d e)) ($n &optional ($d 1) ($e 2)) '(let nil (!|+| $n (!|+| $d $e)))))) (test compile-meta-node-rest-argument "Test compilation of meta-node with rest argument." (with-core-nodes ("cons") (test-compile-meta-node (x (rest xs)) (functor cons x xs) ($x &optional ($xs +empty-list+)) '(let nil (!|cons| $x $xs))))) (test compile-meta-node-optional-and-rest-arguments "Test compilation of meta-node with optional and rest arguments." (with-core-nodes ("cons") (test-compile-meta-node (x (optional y 2) (rest xs)) (functor cons x (functor cons y xs)) ($x &optional ($y 2) ($xs +empty-list+)) '(let nil (!|cons| $x (thunk (!|cons| $y $xs))))))) (test compile-cyclic-references "Test compilation of cyclic references." (with-core-nodes ("cons") (test-compile-meta-node (a b) (aprog1 (expression-block nil :count 2) (setf (expression-block-expression it) (functor cons a (functor cons b (cyclic-reference it))))) ($a $b) '(let ($block) (setf $block (thunk (!|cons| $a (thunk (!|cons| $b $block))))) $block)))) (test compile-error-usupported-external-meta-node "Test that compiling an unsupported external-meta-node results in an error." (with-external-meta-nodes ("not-a-function") (signals unsupported-meta-node-error (tridash->cl-function (mock-meta-node (arg) (functor not-a-function arg)))))) Test Calling Tridash Meta - Nodes from CL (test call-meta-node-single-expression "Test calling a single expression meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "min(x,y) : case{x < y : x; y}") (with-nodes ((min "min")) modules (is (= 2 (call-meta-node min '(2 10)))) (is (= 2 (call-meta-node min '(10 2)))) (is (= -5.3 (call-meta-node min '(-5.3 7.6)))) (is (= 1 (call-meta-node min '(1 1))))))) (test call-meta-node-with-if-expression "Test calling a meta-node with if expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : if(cond, x, 0)") (with-nodes ((f "f")) modules (is (= 10 (call-meta-node f '(t 10)))) (is (= 0 (call-meta-node f '(nil 5)))) (signals tridash-fail (call-meta-node f '(1 5)))))) (test call-meta-node-with-and-expression "Test calling a meta-node with `and` expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : cond and x") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(t t))) (is (= nil (call-meta-node f '(nil t)))) (is (= nil (call-meta-node f '(t nil)))) (is (= nil (call-meta-node f '(nil nil))))))) (test call-meta-node-with-or-expression "Test calling a meta-node with `or` expressions from CL." (with-module-table modules (build-core-module) (build "/import(core)" "f(cond, x) : cond or x") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(t t))) (is-true (call-meta-node f '(nil t))) (is-true (call-meta-node f '(t nil))) (is (= nil (call-meta-node f '(nil nil))))))) (test call-meta-node-catch-fail-expression "Test calling a meta-node with multiple nodes and CATCH-FAIL expressions." (with-module-table modules (build-core-module) (build "/import(core)" "min(x,y) : { x < y -> (x -> /context(self,c)); y -> /context(self,c) }") (with-nodes ((min "min")) modules (is (= 2 (resolve (call-meta-node min '(2 10))))) (is (= 2 (resolve (call-meta-node min '(10 2))))) (is (= -5.3 (resolve (call-meta-node min '(-5.3 7.6))))) (is (= 1 (resolve (call-meta-node min '(1 1)))))))) (test call-meta-node-recursive "Test calling a recursive meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "fact(n) : { case{n < 2 : 1; n * fact(n - 1)} }") (with-nodes ((fact "fact")) modules (is (= 6 (call-meta-node fact '(3)))) (is (= 120 (call-meta-node fact '(5)))) (is (= 1 (call-meta-node fact '(0))))))) (test call-meta-node-tail-recursive "Test calling a tail-recursive meta-node from CL." (with-module-table modules (build-core-module) (build "/import(core)" "fact(n) : { iter(n,acc) : case{n < 2 : acc; iter(n - 1, n * acc)}; iter(n, 1) }") (with-nodes ((fact "fact")) modules (is (= 6 (call-meta-node fact '(3)))) (is (= 120 (call-meta-node fact '(5)))) (is (= 1 (call-meta-node fact '(0))))))) (test call-meta-node-with-meta-node-call "Test calling a meta-node which calls other meta-nodes." (with-module-table modules (build-core-module) (build "/import(core)" "1-(n) : n - 1" "1+(n) : n + 1" "f(a, b) : 1-(a) * 1+(b)") (with-nodes ((f "f")) modules (is (= 0 (call-meta-node f '(1 5)))) (is (= 45 (call-meta-node f '(10 4)))) (is (= 33 (call-meta-node f '(4 10))))))) (test call-meta-node-nested-meta-nodes "Test calling a meta-node with nested meta-nodes." (with-module-table modules (build-core-module) (build "/import(core)" "f(x, y, z) : { g(n) : n - sum; x + y -> sum; g(z) }") (with-nodes ((f "f")) modules (is (= 0 (call-meta-node f '(1 2 3)))) (is (= 2 (call-meta-node f '(2 3 7))))))) (test call-meta-node-optional-arguments-no-default "Test calling a meta-node with optional arguments without default values." (with-module-table modules (build-core-module) (build "/import(core, +, fail-type?)" "inc(n, :(d)) : n + d" "f(x) : inc(x)" "g(x) : inc(x, 2)" "h(x) : fail-type?(inc(x), &(No-Value%))") (with-nodes ((f "f") (g "g") (h "h")) modules (signals tridash-fail (call-meta-node f (list 3))) (is (= 7 (call-meta-node g (list 5)))) (is-true (call-meta-node h (list 2)))))) (test call-meta-node-optional-arguments-with-default "Test calling a meta-node with optional arguments without default values." (with-module-table modules (build-core-module) (build "/import(core, +)" "inc(n, d : 1) : n + d" "f(x) : inc(x)" "g(x) : inc(x, 2)") (with-nodes ((f "f") (g "g")) modules (is (= 4 (call-meta-node f (list 3)))) (is (= 7 (call-meta-node g (list 5))))))) (test call-meta-node-keyword-arguments "Test calling a meta-node with keyword arguments" (with-module-table modules (build-core-module) (build "/import(core, +)" "add(a, b, c : 3, d : 4) : a + b + c + d" "f(x, y) : add(x, d : 10, b : y)") (with-nodes ((f "f")) modules (is (= 16 (call-meta-node f '(1 2)))) (is (= 45 (call-meta-node f '(15 17))))))) (test call-meta-node-rest-argument "Test calling a meta-node with rest argument." (with-module-table modules (build-core-module) (build "/import(core, and, =, Empty)" "check(..(xs)) : xs = Empty" "f(x) : x and check()" "g(x) : check(x)" "h(x) : check(x, 1, 2, 3)") (with-nodes ((f "f") (g "g") (h "h")) modules (is-true (call-meta-node f '(t))) (is (= nil (call-meta-node g '(2)))) (is (= nil (call-meta-node h '(2))))))) (test call-higher-order-meta-node "Test calling meta-node with higher order meta-nodes." (with-module-table modules (build-core-module) (build "/import(core, +, not)" "apply(f, x) : f(x)" "1+(n) : n + 1" "f(a) : apply(..(not), a)" "g(a) : apply(..(1+), a)") (with-nodes ((f "f") (g "g")) modules (is (= t (call-meta-node f '(nil)))) (is (= nil (call-meta-node f '(t)))) (is (= 2 (call-meta-node g '(1)))) (is (= 4 (call-meta-node g '(3))))))) (test call-higher-order-meta-node-optional-arguments "Test calling meta-node with higher-order meta-node with optional arguments." (with-module-table modules (build-core-module) (build "/import(core, +, fail-type?)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "1+(n, :(d)) : n + d" "f(a) : apply(1+, a)" "g(a, b) : apply2(1+, a, b)" "h(x) : fail-type?(apply(1+, x), &(No-Value%))") (with-nodes ((f "f") (g "g") (h "h")) modules (signals tridash-fail (call-meta-node f '(0))) (is (= 3 (call-meta-node g '(1 2)))) (is (= 8 (call-meta-node g '(5 3)))) (is-true (call-meta-node h '(1)))))) (test call-higher-order-meta-node-optional-argument-with-default "Test calling meta-node with higher order meta-node with optional argument default values." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "1+(n, d : 1) : n + d" "f(a) : apply(1+, a)" "g(a, b) : apply2(1+, a, b)") (with-nodes ((f "f") (g "g")) modules (is (= 1 (call-meta-node f '(0)))) (is (= 2 (call-meta-node f '(1)))) (is (= 3 (call-meta-node g '(1 2)))) (is (= 8 (call-meta-node g '(5 3))))))) (test call-higher-order-meta-node-rest-argument "Test calling meta-node with higher order meta-node with rest argument." (with-module-table modules (build-core-module) (build "/import(core, +, cons)" "apply3(f, x, y, z) : f(x, y, z)" "apply(f, x) : f(x)" "l(x, ..(xs)) : cons(x + 1, xs)" "f(a, b, c) : apply3(l, a, b, c)" "g(x) : apply(l, x)") (with-nodes ((f "f") (g "g")) modules (is (= '(2 3 4) (call-meta-node f '(1 3 4)))) (is (= '(2) (call-meta-node g '(1))))))) (test call-higher-order-meta-node-rest-argument-empty "Test calling meta-node with higher order meta-node with empty rest argument." (with-module-table modules (build-core-module) (build "/import(core, Empty, =)" "apply(f, x) : f(x)" "l(x, ..(xs)) : xs = Empty" "f(a) : apply(l, a)") (with-nodes ((f "f")) modules (is-true (bool-value (call-meta-node f '(1))))))) (test call-higher-order-meta-node-optional-arguments-outer-nodes "Test calling higher order meta-node with optional arguments and outer node references." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "test(a, x) : { f(y, d : 1) : y + d + x; apply(f, a) }") (with-nodes ((test "test")) modules (is (= 6 (call-meta-node test '(2 3))))))) (test call-higher-order-external-meta-node "Test calling meta-node with higher-order external meta-node." (with-module-table modules (build-core-module) (build "/import(core, -)" "apply(f, x) : f(x)" "apply2(f, x, y) : f(x, y)" "f(a) : apply(-, a)" "g(a, b) : apply2(-, a, b)") (with-nodes ((f "f") (g "g")) modules (is (= -1 (call-meta-node f '(1)))) (is (= -2 (call-meta-node f '(2)))) (is (= 1 (call-meta-node g '(3 2)))) (is (= 2 (call-meta-node g '(5 3))))))) (test call-higher-order-meta-node-error "Test error when calling a non-meta-node." (with-module-table modules (build-core-module) (build "/import(core, +)" "apply(f, x) : f(x)" "x+(n) : n + ..(x)" "x" "f(a) : apply(..(x+), a)") (with-nodes ((f "f")) modules (signals semantic-error (call-meta-node f '(1)))))) (test call-primitive-function-subtract-and-negate "Test calling `-` meta-node with 2 arguments and 1 argument." (with-module-table modules (build-core-module) (build "/import(core, -)" "sub(a, b) : a - b" "neg(x) : -(x)") (with-nodes ((sub "sub") (neg "neg")) modules (is (= 3 (call-meta-node sub '(5 2)))) (is (= -5 (call-meta-node neg '(5))))))) (test call-meta-node-object-expressions "Test calling meta-node with object expressions." (with-module-table modules (build "Person(first, last) : { first -> self.first; last -> self.last }" "get-first(p) : p.first" "get-last(p) : p.last") (with-nodes ((person "Person") (get-first "get-first") (get-last "get-last")) modules (let ((p (call-meta-node person '("John" "Doe")))) (is (= "John" (call-meta-node get-first (list p)))) (is (= "Doe" (call-meta-node get-last (list p)))))))) (test call-meta-node-catch-fail-operand "Test catching failures in functor operand." (with-module-table modules (build-core-module) (build "/import(core, !=)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((fails "fails")) modules (is-false (bool-value (call-meta-node fails '(1)))) (is-true (->> (thunk (error 'tridash-fail)) list (call-meta-node fails)))))) (test call-meta-node-catch-fail-operator "Test catching failures in functor operator." (with-module-table modules (build-core-module) (build "/import(core, !=, >, -)" "neg(x) : -(x)" "getf(f, x) : { x > 0 -> (f -> self) }" "test(x) : fails((getf(neg, x))(x))" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((test "test")) modules (is (= nil (call-meta-node test '(1)))) (is-true (call-meta-node test '(-1)))))) (test call-meta-node-fail-types "Test failure types." (with-module-table modules (build-core-module modules) (build-source-file "./test/inputs/macros/failure-types.trd" modules) (with-nodes ((check-range "check-range")) modules (is (= "" (call-meta-node check-range '(2 1 3)))) (is (= "Error: below minimum!" (call-meta-node check-range '(0 1 3)))) (is (= "Error: above maximum!" (call-meta-node check-range '(10 2 7))))))) (test call-meta-node-expression-block "Test calling meta-node with one expression-block." (with-module-table modules (build-core-module) (build "/import(core, +)" "f(x) : (x + 1) + (x + 1)") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(1)) 4) (is-true (call-meta-node f '(2)) 6)))) (test call-meta-node-expression-block-multiple-references "Test calling meta-node with expression-block with multiple references." (with-module-table modules (build-core-module) (build "/import(core, *, +, -)" "f(x, y) : { x + 1 -> x1; y + 2 -> y2; (x1 + y2) * (x1 - y2) }") (with-nodes ((f "f")) modules (is-true (call-meta-node f '(3 7)) -65) (is-true (call-meta-node f '(5 2)) 20)))) (test call-meta-node-cyclic-references "Test calling a meta-node with cyclic references." (with-module-table modules (build-core-module) (build-source-file "./test/inputs/macros/cyclic-references.trd" modules) (with-nodes ((f "f")) modules (is-true (call-meta-node f '(1 2)) '(1 2 1 2 1))))) (test call-meta-node-type-error-arithmetic-functions "Test type errors in arithmetic functions." (with-module-table modules (build-core-module) (build "/import(core, +, !=)" "1+(x) : fails(x + 1)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((1+ "1+")) modules (is (= nil (call-meta-node 1+ '(1)))) (is-true (call-meta-node 1+ '("hello")))))) (test call-meta-node-type-error-objects "Test type errors in objects." (with-module-table modules (build-core-module) (build "/import(core, !=)" "test(x) : fails(x.key)" "fails(x) : { x != x -> /context(self, catch); True -> /context(self, catch) }") (with-nodes ((test "test")) modules (is-true (bool-value (call-meta-node test '(1)))) (is-true (bool-value (call-meta-node test (list (make-hash-map))))) (is (= nil (->> (list (cons (id-symbol "key") 1)) alist-hash-map list (call-meta-node test) bool-value)))))) (test macro-compile-time-computation "Test macro which performs computation at compile-time." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : x * x" "/attribute(square, macro, 1)" "a * square(3) -> b") (test-not-nodes modules '(("/in" "core" "*") "a" ("square" 3)) '("square" 3)) (with-nodes ((a "a") (a*9 (("/in" "core" "*") "a" 9)) (b "b") (* "*")) modules (has-value-function (a) a*9 `(,* ,a 9)) (test-simple-binding a*9 b)))) (test macro-quoted-expression "Test macro which returns quoted expression." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : list(/quote(*), x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-meta-node-reference "Test macro which returns expression with meta-node references." (with-module-table modules (build-core-module) (build "/import(core)" "square(x) : list(& *, x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-with-macros "Test expansion of macros in macro meta-nodes." (with-module-table modules (build-core-module) (build "/import(core, ->, list, *)" "'(x) : list(/quote(/quote), x)" "/attribute(', macro, 1)" "square(x) : list('(*), x, x)" "/attribute(square, macro, 1)" "square(a) -> b") (test-not-nodes modules '("square" "a")) (with-nodes ((a "a") (b "b") (a*a (("/in" "core" "*") "a" "a")) (* "*")) modules (has-value-function (a) a*a `(,* ,a ,a)) (test-simple-binding a*a b)))) (test macro-multiple-arguments "Test macros with multiple arguments." (with-module-table modules (build-core-module) (build "/import(core, list, ->, if)" "'(x) : list(/quote(/quote), x)" "/attribute(', macro, 1)" "!-(a, b) : list('(if), a, b)" "/attribute(!-, macro, 1)" "/operator(!-, 25, left)" "a !- b -> out") (test-not-nodes modules '("!-" "a" "b")) (with-nodes ((a "a") (b "b") (out "out") (a!-b (("/in" "builtin" "if") "a" "b")) (if "if")) modules (has-value-function (a b) a!-b `(,if ,a ,b :none)) (test-simple-binding a!-b out)))) (test macro-keyword-arguments "Test passing macro arguments by keyword" (with-module-table modules (build-core-module) (build "/import(core, list)" "f(x) : x" "call(operator, operand) : list(operator, operand)" "/attribute(call, macro, True)" "call(operand : in1, operator : f) -> out1") (test-not-nodes modules '("call" (":" "operand" "in1") (":" "operator" "f"))) (with-nodes ((in1 "in1") (out1 "out1") (f "f") (f-in1 ("f" "in1"))) modules (has-value-function (in1) f-in1 `(,f ,in1)) (test-simple-binding f-in1 out1)))) (test macro-arity-check-required-only "Test macro arity checks with required arguments only." (with-module-table modules (build-core-module) (build "/import(core, *, list)" "square(x) : list(/quote(*), x, x)" "/attribute(square, macro, 1)") (signals arity-error (build "square(x, y) -> out")))) (test macro-arity-check-optional-not-enough "Test macro optional argument arity checks with not enough arguments." (with-module-table modules (build-core-module) (build "/import(core, +, list)" "add3(x, y, z : 1) : list(/quote(+), x, list(/quote(+), y, z))" "/attribute(add3, macro, 1)") (signals arity-error (build "add3(x)")))) (test macro-arity-check-optional-too-many "Test macro optional argument arity checks with too many arguments." (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(x, y, z)")))) (test macro-arity-check-rest-arguments "Test macro rest argument arity checks." (with-module-table modules (build-core-module) (build "/import(core, cons, list)" "make-list(x, ..(xs)) : cons(/quote(list), cons(x, xs))" "/attribute(make-list, macro, 1)" "make-list(x, y, z) -> output" "/attribute(x, input, 1)" "/attribute(y, input, 1)" "/attribute(z, input, 1)") (with-nodes ((x "x") (y "y") (z "z") (list "list") (output "output")) (finish-build) (has-value-function (x y z) output `(,list ,(argument-list (list x y z))))))) (test macro-arity-check-keyword-missing-required "Test macro keyword argument arity check with missing required argument" (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(d : 2)")))) (test macro-arity-check-keyword-unknown "Test macro keyword argument arity check with unknown keyword" (with-module-table modules (build-core-module) (build "/import(core, +, list)" "1+(n, d : 1) : list(/quote(+), x, d)" "/attribute(1+, macro, 1)") (signals arity-error (build "1+(d : 2, n : 1, delta : 100)")))) (test macro-rest-argument-outer-nodes "Test macros with rest arguments and outer nodes." (with-module-table modules (build-core-module) (build "/import(core, cons, list)" "make-list(x, ..(xs)) : cons(/quote(list), cons(x, cons(y, xs)))" "/attribute(make-list, macro, 1)" "/attribute(a, input, 1)" "/attribute(b, input, 1)" "/attribute(c, input, 1)" "/attribute(y, input, 1)") (signals macro-outer-node-error (build "make-list(a, b, c) -> output")))) (test macro-build-meta-node-multiple-times "Test building a meta-node multiple times when building macro." (with-module-table modules (build-core-module) (build "/import(core, if, -, +, *, <)" "fact(n) : { 1 -> start; iter(n, acc) : if(n < start, acc, iter(n - 1, acc * n)); iter(n,1) }" "eval-fact(n) : fact(n)" "/attribute(eval-fact, macro, 1)" "fact(in) + eval-fact(3) -> output" "/attribute(in, input, 1)") (with-nodes ((in "in") (output "output") (fact "fact") (+ "+")) (finish-build) (has-value-function (in) output `(,+ (,fact ,in) 6)) (with-nodes ((iter "iter") (n "n")) (definition fact) (has-value-function (n) fact `(,iter ,n 1)))))) (test macro-error-compile-loop "Test error when compilation loop detected in macro compilation." (with-module-table modules (build-core-module) (build "/import(core, list)" "test(x,y) : list(&(->), x, test(x,y))" "/attribute(test, macro, 1)") (with-nodes ((test "test")) modules (signals compile-meta-node-loop-error (call-meta-node test '(1 2)))))) (test macro-error-malformed-list "Test error when macro returns a malformed list." (with-module-table modules (build-core-module) (build "/import(core)" "mac(x, y) : cons(x, y)" "/attribute(mac, macro, 1)" "f(x) : x" "target-f(s, expr) : cons(s, head(tail(expr)))" "/attribute(f, target-transform, target-f)") (signals tridash-fail (build "mac(1, 2)")) (signals tridash-fail (build "a -> f(b)")))) (test macro-error-return-empty-list-failure "Test error when macro returns empty list failure." (with-module-table modules (build-core-module) (build "/import(core)" "mac(x) : list(x, Empty!)" "/attribute(mac, macro, 1)" "f(x) : x" "target-f(s, expr) : list(s, Empty!)" "/attribute(f, target-transform, target-f)") (signals tridash-fail (build "mac(a)")) (signals tridash-fail (build "x -> f(y)")))) Test Target Node Transform Macros (test target-transform-single-argument "Test target transformation with single argument." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-1.trd" modules) (with-nodes ((in "in") (out "out") (int "int")) (finish-build) (has-value-function (in) out `(,int ,in))))) (test target-transform-multiple-arguments "Test target transformation with multiple arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-2.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-not-enough "Test arity checks in target transform with not enough arguments." (with-module-table modules (build-core-module) (signals arity-error (build-source-file #p"./test/inputs/macros/target-transform-3.trd" modules)))) (test target-transform-arity-check-too-many "Test arity checks in target transform with too many arguments." (with-module-table modules (build-core-module) (signals arity-error (build-source-file #p"./test/inputs/macros/target-transform-4.trd" modules)))) (test target-transform-arity-check-rest-argument "Test arity checks in target transform with rest arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-5.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-optional-and-rest "Test arity checks in target transform with optional and rest arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-6.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test target-transform-arity-check-optional-extra "Test arity checks in target transform with optional extra arguments." (with-module-table modules (build-core-module) (build-source-file #p"./test/inputs/macros/target-transform-7.trd" modules) (with-nodes ((in "in") (a "a") (b "b") (- "-")) (finish-build) (has-value-function (in a) b `(,- ,in ,a))))) (test attribute-processor-meta-node "Test attribute processor with meta-node." (with-module-table modules (build-source-file #p"./test/inputs/macros/attribute-processor-1.trd" modules) (with-nodes ((f "f") (match-f "match-f")) modules (is (eq match-f (attribute :matcher f)))))) (test attribute-processor-external-meta-node "Test attribute processor with external meta-node" (with-module-table modules (build-source-file #p"./test/inputs/macros/attribute-processor-2.trd" modules) (with-nodes ((f "f") (match-f "match-f")) modules (is (eq match-f (attribute :matcher f)))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610250"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">52096dd415375980c51bdb241c422076cda95fd89885d7a1deae03274dfa66d3</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">cstar/ec2nodefinder</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">awssign.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(awssign). -author(''). -include_lib("xmerl/include/xmerl.hrl"). -export([sign_and_send/5, describe_instances/5]). sign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)-> SortedParams = sort([{"Timestamp", create_timestamp()}, {"SignatureVersion", "2"}, {"Version", APIVersion}, {"AWSAccessKeyId", AccessKey}, {"SignatureMethod", "HmacSHA1"} |Params]), EncodedParams = lists:foldl( fun({K,V}, Acc)-> [url_encode(K) ++ "=" ++ url_encode(V)| Acc] end,[], SortedParams), QueryString = string:join(EncodedParams, "&"), ToSign = "GET\n" ++ Host ++ "\n/\n" ++ QueryString, Signature = url_encode( binary_to_list( base64:encode(crypto:sha_mac(SecretKey, ToSign))) ), URL = "http://"++ Host ++ "/?" ++ QueryString ++ "&Signature=" ++ Signature, case http:request(URL) of {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body}; {ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} } end. % lifted from create_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())). create_timestamp({{Y, M, D}, {H, Mn, S}}) -> to_str(Y) ++ "-" ++ to_str(M) ++ "-" ++ to_str(D) ++ "T" ++ to_str(H) ++ ":" ++ to_str(Mn)++ ":" ++ to_str(S) ++ "Z". add_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end. to_str(L) -> add_zeros(integer_to_list(L)). sort(Params)-> lists:sort(fun({A, _}, {X, _}) -> A > X end, Params). describe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)-> Params =[ {"Action", "DescribeInstances"}], Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey), case Res of {ok, XML} -> {R,_} = xmerl_scan:string(XML), [ V#xmlText.value || V<- xmerl_xpath:string("/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \"" ++ SecurityGroup ++ "\"]/instancesSet/item/privateDnsName/text()", R)]; {error, E} -> erlang:error ({ describe_instances_failed, E }), [] end. % lifted from the ever precious yaws_utils.erl integer_to_hex(I) -> case catch erlang:integer_to_list(I, 16) of {'EXIT', _} -> old_integer_to_hex(I); Int -> Int end. old_integer_to_hex(I) when I<10 -> integer_to_list(I); old_integer_to_hex(I) when I<16 -> [I-10+$A]; old_integer_to_hex(I) when I>=16 -> N = trunc(I/16), old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16). url_encode([H|T]) -> if H >= $a, $z >= H -> [H|url_encode(T)]; H >= $A, $Z >= H -> [H|url_encode(T)]; H >= $0, $9 >= H -> [H|url_encode(T)]; H == $_; H == $.; H == $-; H == $/ -> % FIXME: more.. [H|url_encode(T)]; true -> case integer_to_hex(H) of [X, Y] -> [$%, X, Y | url_encode(T)]; [X] -> , $ 0 , X | url_encode(T ) ] end end; url_encode([]) -> [].</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/cstar/ec2nodefinder/42534509b88120d5581ad4a4e822bb806f3b950f/src/awssign.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> lifted from lifted from the ever precious yaws_utils.erl FIXME: more.. , X, Y | url_encode(T)];</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(awssign). -author(''). -include_lib("xmerl/include/xmerl.hrl"). -export([sign_and_send/5, describe_instances/5]). sign_and_send(Params, Host,APIVersion, AccessKey, SecretKey)-> SortedParams = sort([{"Timestamp", create_timestamp()}, {"SignatureVersion", "2"}, {"Version", APIVersion}, {"AWSAccessKeyId", AccessKey}, {"SignatureMethod", "HmacSHA1"} |Params]), EncodedParams = lists:foldl( fun({K,V}, Acc)-> [url_encode(K) ++ "=" ++ url_encode(V)| Acc] end,[], SortedParams), QueryString = string:join(EncodedParams, "&"), ToSign = "GET\n" ++ Host ++ "\n/\n" ++ QueryString, Signature = url_encode( binary_to_list( base64:encode(crypto:sha_mac(SecretKey, ToSign))) ), URL = "http://"++ Host ++ "/?" ++ QueryString ++ "&Signature=" ++ Signature, case http:request(URL) of {ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} -> {ok, Body}; {ok, {{_Version, Code, ReasonPhrase}, _Headers, _Body}} -> {error, {Code, ReasonPhrase} } end. create_timestamp() -> create_timestamp(calendar:now_to_universal_time(now())). create_timestamp({{Y, M, D}, {H, Mn, S}}) -> to_str(Y) ++ "-" ++ to_str(M) ++ "-" ++ to_str(D) ++ "T" ++ to_str(H) ++ ":" ++ to_str(Mn)++ ":" ++ to_str(S) ++ "Z". add_zeros(L) -> if length(L) == 1 -> [$0|L]; true -> L end. to_str(L) -> add_zeros(integer_to_list(L)). sort(Params)-> lists:sort(fun({A, _}, {X, _}) -> A > X end, Params). describe_instances(SecurityGroup, Host,APIVersion, AccessKey, SecretKey)-> Params =[ {"Action", "DescribeInstances"}], Res = sign_and_send(Params, Host, APIVersion, AccessKey, SecretKey), case Res of {ok, XML} -> {R,_} = xmerl_scan:string(XML), [ V#xmlText.value || V<- xmerl_xpath:string("/DescribeInstancesResponse/reservationSet/item[ groupSet/item/groupId = \"" ++ SecurityGroup ++ "\"]/instancesSet/item/privateDnsName/text()", R)]; {error, E} -> erlang:error ({ describe_instances_failed, E }), [] end. integer_to_hex(I) -> case catch erlang:integer_to_list(I, 16) of {'EXIT', _} -> old_integer_to_hex(I); Int -> Int end. old_integer_to_hex(I) when I<10 -> integer_to_list(I); old_integer_to_hex(I) when I<16 -> [I-10+$A]; old_integer_to_hex(I) when I>=16 -> N = trunc(I/16), old_integer_to_hex(N) ++ old_integer_to_hex(I rem 16). url_encode([H|T]) -> if H >= $a, $z >= H -> [H|url_encode(T)]; H >= $A, $Z >= H -> [H|url_encode(T)]; H >= $0, $9 >= H -> [H|url_encode(T)]; [H|url_encode(T)]; true -> case integer_to_hex(H) of [X, Y] -> [X] -> , $ 0 , X | url_encode(T ) ] end end; url_encode([]) -> [].</span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610251"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bc49153ea403dca985ea28d8ecf0e5ce1fa3402b9e79b81628f06680e6d03f53</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">gentoo-haskell/hackport</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Host.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Portage.Host ( getInfo -- :: IO [(String, String)] , LocalInfo(..) ) where import Util (run_cmd) import qualified Data.List.Split as DLS import Data.Maybe (fromJust, isJust, mapMaybe) import qualified System.Directory as D import System.FilePath ((</>)) import System.IO data LocalInfo = LocalInfo { distfiles_dir :: String , overlay_list :: [FilePath] , portage_dir :: FilePath } deriving (Read, Show) defaultInfo :: LocalInfo defaultInfo = LocalInfo { distfiles_dir = "/usr/portage/distfiles" , overlay_list = [] , portage_dir = "/usr/portage" } query and then emerge getInfo :: IO LocalInfo getInfo = fromJust `fmap` performMaybes [ readConfig , performMaybes [ getPaludisInfo , askPortageq , return (Just defaultInfo) ] >>= showAnnoyingWarning ] where performMaybes [] = return Nothing performMaybes (act:acts) = do r <- act if isJust r then return r else performMaybes acts showAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo) showAnnoyingWarning info = do hPutStr stderr $ unlines [ "-- Consider creating ~/" ++ hackport_config ++ " file with contents:" , show info , "-- It will speed hackport startup time a bit." ] return info -- relative to home dir hackport_config :: FilePath hackport_config = ".hackport" </> "repositories" -------------------------- -- fastest: config reading -------------------------- readConfig :: IO (Maybe LocalInfo) readConfig = do home_dir <- D.getHomeDirectory let config_path = home_dir </> hackport_config exists <- D.doesFileExist config_path if exists then read <$> readFile config_path else return Nothing ---------- -- Paludis ---------- getPaludisInfo :: IO (Maybe LocalInfo) getPaludisInfo = fmap parsePaludisInfo <$> run_cmd "cave info" parsePaludisInfo :: String -> LocalInfo parsePaludisInfo text = let chunks = DLS.splitOn [""] . lines $ text repositories = mapMaybe parseRepository chunks in fromJust (mkLocalInfo repositories) where parseRepository :: [String] -> Maybe (String, (String, String)) parseRepository [] = Nothing parseRepository (firstLine:lns) = do name <- case words firstLine of ["Repository", nm] -> return (init nm) _ -> fail "not a repository chunk" let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ] location <- lookup "location" dict distfiles <- lookup "distdir" dict return (name, (location, distfiles)) mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo mkLocalInfo repos = do (gentooLocation, gentooDistfiles) <- lookup "gentoo" repos let overlays = [ loc | (_, (loc, _dist)) <- repos ] return (LocalInfo { distfiles_dir = gentooDistfiles , portage_dir = gentooLocation , overlay_list = overlays }) --------- -- Emerge --------- askPortageq :: IO (Maybe LocalInfo) askPortageq = do distdir <- run_cmd "portageq distdir" portdir <- run_cmd "portageq get_repo_path / gentoo" hsRepo <- run_cmd "portageq get_repo_path / haskell" There really ought to be both distdir and , --but maybe no hsRepo defined yet. let info = if Nothing `elem` [distdir,portdir] then Nothing else Just LocalInfo { distfiles_dir = grab distdir , portage_dir = grab portdir , overlay_list = iffy hsRepo } --init: kill newline char where grab = init . fromJust iffy Nothing = [] iffy (Just repo) = [init repo] return info </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/gentoo-haskell/hackport/61baf96390e7ddc071f9a49fc78919683988c0ca/src/Portage/Host.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> :: IO [(String, String)] relative to home dir ------------------------ fastest: config reading ------------------------ -------- Paludis -------- ------- Emerge ------- but maybe no hsRepo defined yet. init: kill newline char</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Portage.Host , LocalInfo(..) ) where import Util (run_cmd) import qualified Data.List.Split as DLS import Data.Maybe (fromJust, isJust, mapMaybe) import qualified System.Directory as D import System.FilePath ((</>)) import System.IO data LocalInfo = LocalInfo { distfiles_dir :: String , overlay_list :: [FilePath] , portage_dir :: FilePath } deriving (Read, Show) defaultInfo :: LocalInfo defaultInfo = LocalInfo { distfiles_dir = "/usr/portage/distfiles" , overlay_list = [] , portage_dir = "/usr/portage" } query and then emerge getInfo :: IO LocalInfo getInfo = fromJust `fmap` performMaybes [ readConfig , performMaybes [ getPaludisInfo , askPortageq , return (Just defaultInfo) ] >>= showAnnoyingWarning ] where performMaybes [] = return Nothing performMaybes (act:acts) = do r <- act if isJust r then return r else performMaybes acts showAnnoyingWarning :: Maybe LocalInfo -> IO (Maybe LocalInfo) showAnnoyingWarning info = do hPutStr stderr $ unlines [ "-- Consider creating ~/" ++ hackport_config ++ " file with contents:" , show info , "-- It will speed hackport startup time a bit." ] return info hackport_config :: FilePath hackport_config = ".hackport" </> "repositories" readConfig :: IO (Maybe LocalInfo) readConfig = do home_dir <- D.getHomeDirectory let config_path = home_dir </> hackport_config exists <- D.doesFileExist config_path if exists then read <$> readFile config_path else return Nothing getPaludisInfo :: IO (Maybe LocalInfo) getPaludisInfo = fmap parsePaludisInfo <$> run_cmd "cave info" parsePaludisInfo :: String -> LocalInfo parsePaludisInfo text = let chunks = DLS.splitOn [""] . lines $ text repositories = mapMaybe parseRepository chunks in fromJust (mkLocalInfo repositories) where parseRepository :: [String] -> Maybe (String, (String, String)) parseRepository [] = Nothing parseRepository (firstLine:lns) = do name <- case words firstLine of ["Repository", nm] -> return (init nm) _ -> fail "not a repository chunk" let dict = [ (head ln, unwords (tail ln)) | ln <- map words lns ] location <- lookup "location" dict distfiles <- lookup "distdir" dict return (name, (location, distfiles)) mkLocalInfo :: [(String, (String, String))] -> Maybe LocalInfo mkLocalInfo repos = do (gentooLocation, gentooDistfiles) <- lookup "gentoo" repos let overlays = [ loc | (_, (loc, _dist)) <- repos ] return (LocalInfo { distfiles_dir = gentooDistfiles , portage_dir = gentooLocation , overlay_list = overlays }) askPortageq :: IO (Maybe LocalInfo) askPortageq = do distdir <- run_cmd "portageq distdir" portdir <- run_cmd "portageq get_repo_path / gentoo" hsRepo <- run_cmd "portageq get_repo_path / haskell" There really ought to be both distdir and , let info = if Nothing `elem` [distdir,portdir] then Nothing else Just LocalInfo { distfiles_dir = grab distdir , portage_dir = grab portdir , overlay_list = iffy hsRepo } where grab = init . fromJust iffy Nothing = [] iffy (Just repo) = [init repo] return info </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610252"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">7e45d3afedd144feee769d4d2d1e918df0da9d7a490075a9bf08f7bd1b16e4dc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ghcjs/jsaddle-dom</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ConvolverNode.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE PatternSynonyms # -- For HasCallStack compatibility {-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-} # OPTIONS_GHC -fno - warn - unused - imports # module JSDOM.Generated.ConvolverNode (setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked, setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..)) import qualified Prelude (error) import Data.Typeable (Typeable) import Data.Traversable (mapM) import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!)) import Data.Int (Int64) import Data.Word (Word, Word64) import JSDOM.Types import Control.Applicative ((<$>)) import Control.Monad (void) import Control.Lens.Operators ((^.)) import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync) import JSDOM.Enums | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > setBuffer :: (MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m () setBuffer self val = liftDOM (self ^. jss "buffer" (toJSVal val)) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer) getBuffer self = liftDOM ((self ^. js "buffer") >>= fromJSVal) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBufferUnsafe :: (MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer getBufferUnsafe self = liftDOM (((self ^. js "buffer") >>= fromJSVal) >>= maybe (Prelude.error "Nothing to return") return) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBufferUnchecked :: (MonadDOM m) => ConvolverNode -> m AudioBuffer getBufferUnchecked self = liftDOM ((self ^. js "buffer") >>= fromJSValUnchecked) | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation > setNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m () setNormalize self val = liftDOM (self ^. jss "normalize" (toJSVal val)) | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation > getNormalize :: (MonadDOM m) => ConvolverNode -> m Bool getNormalize self = liftDOM ((self ^. js "normalize") >>= valToBool) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/ConvolverNode.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> For HasCallStack compatibility # LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE PatternSynonyms # # OPTIONS_GHC -fno - warn - unused - imports # module JSDOM.Generated.ConvolverNode (setBuffer, getBuffer, getBufferUnsafe, getBufferUnchecked, setNormalize, getNormalize, ConvolverNode(..), gTypeConvolverNode) where import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..)) import qualified Prelude (error) import Data.Typeable (Typeable) import Data.Traversable (mapM) import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!)) import Data.Int (Int64) import Data.Word (Word, Word64) import JSDOM.Types import Control.Applicative ((<$>)) import Control.Monad (void) import Control.Lens.Operators ((^.)) import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync) import JSDOM.Enums | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > setBuffer :: (MonadDOM m) => ConvolverNode -> Maybe AudioBuffer -> m () setBuffer self val = liftDOM (self ^. jss "buffer" (toJSVal val)) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBuffer :: (MonadDOM m) => ConvolverNode -> m (Maybe AudioBuffer) getBuffer self = liftDOM ((self ^. js "buffer") >>= fromJSVal) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBufferUnsafe :: (MonadDOM m, HasCallStack) => ConvolverNode -> m AudioBuffer getBufferUnsafe self = liftDOM (((self ^. js "buffer") >>= fromJSVal) >>= maybe (Prelude.error "Nothing to return") return) | < -US/docs/Web/API/ConvolverNode.buffer Mozilla ConvolverNode.buffer documentation > getBufferUnchecked :: (MonadDOM m) => ConvolverNode -> m AudioBuffer getBufferUnchecked self = liftDOM ((self ^. js "buffer") >>= fromJSValUnchecked) | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation > setNormalize :: (MonadDOM m) => ConvolverNode -> Bool -> m () setNormalize self val = liftDOM (self ^. jss "normalize" (toJSVal val)) | < -US/docs/Web/API/ConvolverNode.normalize Mozilla ConvolverNode.normalize documentation > getNormalize :: (MonadDOM m) => ConvolverNode -> m Bool getNormalize self = liftDOM ((self ^. js "normalize") >>= valToBool) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610253"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">238edc5ab7a83ebe55c5f21ab46c7d5b18764a38ae8a2a9fc3e37b27511d2e31</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">realark/vert</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">game-scene.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(in-package :recurse.vert) @export-class (defclass game-scene (scene gl-pipeline) ((scene-background :initform nil :initarg :background :type scene-background :accessor scene-background) (scene-audio-state :initform nil :documentation "Used to resume audio-state when the scene deactivates.") (scene-music :initarg :music :initform nil :accessor scene-music :documentation "Music which will play when the scene initializes.") (width :initarg :width :initform (error ":width required") :reader width) (height :initarg :height :initform (error ":height required") :reader height) (live-object-radius :initarg :live-object-radius :initform #.(* 15 16) :documentation "Make a rect centered on camera. The value of this slot will be the distance between the live area and camera rect. When camera moves outside this rect, rebuild objects to render and update This is an optimization so we don't have to rebuild the render and update queues every frame.") (render-queue :initform (make-instance 'render-queue :render-priority -1) :documentation "Render scene objects and backgrounds.") (updatable-objects :initform (make-array 100 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil)) (updating-p :initform nil :reader updating-p) (pending-adds :initform (make-array 10 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil) :documentation "Objects to be added to scene at the start of the next frame.") (pending-removes :initform (make-array 10 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil) :documentation "Objects to be removed from scene at the start of the next frame.") (live-object-rebuild-camera-position :initform (vector2) :documentation "Centered camera position used to compute render-queue rebuilds.") (reset-instance-renderers :initform (make-array 5 :adjustable t :fill-pointer 0) :documentation "Sequence of instance renderers which have been reset in the current frame.") (spatial-partition :initform nil :documentation "Optimized spatial partition containing every object in the scene." :reader spatial-partition)) (:documentation "A scene which updates and renders game-objects.")) (defmethod initialize-instance :after ((game-scene game-scene) &rest args) (declare (ignore args)) (with-slots (spatial-partition render-queue) game-scene (gl-pipeline-add game-scene render-queue) (setf spatial-partition (make-instance 'quadtree)))) @export (defgeneric add-to-scene (scene object) (:documentation "Add an object to the game scene") (:method ((scene scene) (overlay overlay)) (with-slots (scene-overlays render-queue) scene (unless (find overlay scene-overlays) (vector-push-extend overlay scene-overlays) overlay))) (:method ((scene game-scene) (overlay overlay)) (with-slots (scene-overlays render-queue) scene (unless (find overlay scene-overlays) (vector-push-extend overlay scene-overlays) ;; (render-queue-add render-queue overlay) overlay))) (:method ((scene game-scene) (object game-object)) (if (updating-p scene) (with-slots (pending-adds pending-removes) scene (if (in-scene-p scene object) (when (find object pending-removes) (log:debug "cancel ~A for scene remove" object) (setf pending-removes (delete object pending-removes)) object) (unless (find object pending-adds) (log:debug "queuing ~A for scene add" object) (vector-push-extend object pending-adds) object))) ;; fast path for adding objects outside of scene update (i.e. initialization) (%%add-object-to-scene scene object)))) @export (defgeneric remove-from-scene (scene object) (:documentation "Remove an object from the game scene") (:method ((scene scene) (overlay overlay)) (with-slots (scene-overlays) scene (when (find overlay scene-overlays) (setf scene-overlays (delete overlay scene-overlays)) overlay))) (:method ((scene game-scene) (overlay overlay)) (with-slots (scene-overlays) scene (when (find overlay scene-overlays) (setf scene-overlays (delete overlay scene-overlays)) ;; (render-queue-remove (slot-value scene 'render-queue) overlay) overlay))) (:method ((scene game-scene) (object game-object)) (with-slots (pending-adds pending-removes) scene (if (in-scene-p scene object) (unless (find object pending-removes) (log:debug "queuing ~A for scene removal" object) (vector-push-extend object pending-removes) (unless (updating-p scene) (%run-pending-removes scene)) object) (when (find object pending-adds) (log:debug "cancel ~A for scene add" object) (setf pending-adds (delete object pending-adds)) object))))) (defmethod scene-activated ((scene game-scene)) (with-slots ((state scene-audio-state)) scene (if state (audio-player-load-state *audio* state) (audio-player-stop-all *audio*)))) (defmethod scene-deactivated ((scene game-scene)) (with-slots ((state scene-audio-state)) scene (with-sdl-mixer-lock-held (unless state (setf state (audio-player-copy-state *audio*))) (audio-player-copy-state *audio* state) (audio-player-stop-music *audio*) (audio-player-stop-sfx *audio*))) (values)) @export (defun scene-teleport-object (scene object &optional new-x new-y new-z) "Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation." (when new-x (setf (x object) new-x)) (when new-y (setf (y object) new-y)) (when new-z (setf (z object) new-z)) (recycle object) (when (%in-live-object-area-p scene object) (with-slots (render-queue updatable-objects) scene (render-queue-add render-queue object) (unless (find object updatable-objects :test #'eq) (vector-push-extend object updatable-objects)))) object) (defgeneric found-object-to-update (game-scene game-object) (:documentation "for subclasses to hook object updates") (:method ((scene game-scene) game-object))) (defun %%add-object-to-scene (scene object) (declare (optimize (speed 3)) (game-scene scene) (game-object object)) (with-slots (spatial-partition render-queue updatable-objects) scene (when (start-tracking spatial-partition object) (event-subscribe object scene killed) (when (%in-live-object-area-p scene object) (render-queue-add render-queue object) (unless (find object updatable-objects :test #'eq) (vector-push-extend object updatable-objects))) object))) (defun %run-pending-removes (scene) (declare (optimize (speed 3)) (game-scene scene)) (with-slots (pending-removes spatial-partition render-queue updatable-objects) scene (declare (vector pending-removes updatable-objects)) (when (> (length pending-removes) 0) (loop :for removed-object :across pending-removes :do (event-unsubscribe removed-object scene killed) (stop-tracking spatial-partition removed-object) (when (%in-live-object-area-p scene removed-object) (render-queue-remove render-queue removed-object) (setf updatable-objects (delete removed-object updatable-objects))) (log:debug "removed ~A from scene" removed-object) :finally (setf (fill-pointer pending-removes) 0)))) (values)) (defun %run-pending-adds (scene) (declare (optimize (speed 3)) (game-scene scene)) (with-slots (pending-adds spatial-partition render-queue updatable-objects) scene (loop :for object :across pending-adds :do (%%add-object-to-scene scene object) :finally (setf (fill-pointer pending-adds) 0)))) (defun %force-rebuild-live-objects (scene) (log:debug "force live object rebuild.") (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (if (float= 0.0 (x live-object-rebuild-camera-position)) (setf (x live-object-rebuild-camera-position) (+ (width camera) live-object-radius)) (setf (x live-object-rebuild-camera-position) 0.0)))) (defun %in-live-object-area-p (scene game-object) "T if OBJECT is inside SCENE's current live object area." (declare (optimize (speed 3)) (game-scene scene) (game-object game-object)) (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (let ((live-x-min (- (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-x-max (+ (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-y-min (- (y live-object-rebuild-camera-position) (height camera) live-object-radius)) (live-y-max (+ (y live-object-rebuild-camera-position) (height camera) live-object-radius))) (multiple-value-bind (x y z w h) (world-dimensions game-object) (declare (ignore z) (single-float x y w h)) (and (or (<= live-x-min x live-x-max) (<= live-x-min (+ x w) live-x-max) (and (<= x live-x-min) (>= (+ x w) live-x-max))) (or (<= live-y-min y live-y-max) (<= live-y-min (+ y h) live-y-max) (and (<= y live-y-min) (>= (+ y h) live-y-max)))))))) (defun %rebuild-live-object-area-p (scene) (declare (optimize (speed 3)) (game-scene scene)) (block camera-moved-outside-render-area-p (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera (declare (single-float c-x c-y c-w c-h)) (let* ((camera-centered-x (+ c-x (/ c-w 2.0))) (camera-centered-y (+ c-y (/ c-h 2.0))) (delta (max (abs (- camera-centered-x (x live-object-rebuild-camera-position))) (abs (- camera-centered-y (y live-object-rebuild-camera-position)))))) (when (>= delta live-object-radius) (setf (x live-object-rebuild-camera-position) camera-centered-x (y live-object-rebuild-camera-position) camera-centered-y) t)))))) (defmethod update :around ((scene game-scene)) (with-slots (updating-p) scene (setf updating-p t) (unwind-protect (call-next-method scene) (setf updating-p nil)))) (defmethod update ((game-scene game-scene)) (declare (optimize (speed 3))) (with-slots (live-object-rebuild-camera-position live-object-radius updatable-objects (queue render-queue) reset-instance-renderers (bg scene-background) scene-overlays pending-removes camera) game-scene (let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene))) (%run-pending-removes game-scene) (%run-pending-adds game-scene) (when rebuild-live-objects-p (setf (fill-pointer updatable-objects) 0) (render-queue-reset queue) (setf (fill-pointer reset-instance-renderers) 0)) ;; pre-update frame to mark positions (pre-update (camera game-scene)) (when bg (pre-update bg) (when rebuild-live-objects-p (render-queue-add queue bg))) (loop :for overlay :across (the (vector overlay) scene-overlays) :do (pre-update overlay)) ;; call super (call-next-method game-scene) ;; update frame (when rebuild-live-objects-p (let ((num-objects-to-update 0) (num-objects-to-render 0) (live-x-min (- (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-x-max (+ (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-y-min (- (y live-object-rebuild-camera-position) (height camera) live-object-radius)) (live-y-max (+ (y live-object-rebuild-camera-position) (height camera) live-object-radius))) (declare (fixnum num-objects-to-render num-objects-to-update) (single-float live-x-min live-x-max live-y-min live-y-max)) (log:debug "rebuilding live-objects") (do-spatial-partition (game-object (spatial-partition game-scene) :static-iteration-p t :min-x live-x-min :max-x live-x-max :min-y live-y-min :max-y live-y-max) (block found-object-to-render ;; TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice ;; to fix this, the render queue should return different values if obj is already queued (block check-if-instance-rendered (if (typep game-object 'instance-rendered-drawable) (with-slots ((instance-renderer instance-renderer)) game-object (unless (find instance-renderer reset-instance-renderers) (incf num-objects-to-render) (vector-push-extend instance-renderer reset-instance-renderers) (instance-renderer-reset instance-renderer game-scene))) (incf num-objects-to-render))) (render-queue-add queue game-object)) (block check-add-to-updatable-objects (when (and (not (typep game-object 'static-object)) (not (find game-object updatable-objects :test #'eq))) (incf num-objects-to-update) (vector-push-extend game-object updatable-objects)))) (log:debug "Rebuild complete. Found ~A objects to render and ~A objects to update" num-objects-to-render num-objects-to-update))) (update (camera game-scene)) (loop :for overlay :across (the (vector overlay) scene-overlays) :do (update overlay) #+nil (when rebuild-live-objects-p (render-queue-add render-queue overlay))) (when rebuild-live-objects-p (render-queue-add queue camera)) (when bg (update bg)) (loop :for game-object :across updatable-objects :do (pre-update game-object) (found-object-to-update game-scene game-object) (update game-object)) (values)))) (defmethod render ((scene game-scene) update-percent camera gl-context) HACK scene transitions get messed up bc rendering occurs before setup stuff is done (prog1 (call-next-method scene update-percent camera gl-context) (with-slots (scene-overlays) scene (loop :for overlay :across (the (vector overlay) scene-overlays) :do (render overlay update-percent (camera scene) gl-context)))))) (defevent-handler killed ((object obb) (game-scene game-scene)) "" (remove-from-scene game-scene object)) ;; TODO: remove this fn and use scheduler util directly @export (defun schedule (game-scene timestamp zero-arg-fn) "When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked." (scheduler-add game-scene timestamp zero-arg-fn) (values)) @export (defun get-object-by-id (scene id) "Return the (presumably) unique game-object identified by ID in SCENE." (declare (game-scene scene)) (block find-object (do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t) (when (equalp (object-id game-object) id) (return-from find-object game-object))))) @export (defun in-scene-p (scene object) "Return OBJECT if OBJECT is in SCENE, nil otherwise." (declare (optimize (speed 3)) (game-scene scene)) (block find-object (do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t) (when (eq obj object) (return-from find-object object))))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/realark/vert/6b1938be9084224cf9ce1cfcb71f787f0ac14655/src/scene/game-scene.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> (render-queue-add render-queue overlay) fast path for adding objects outside of scene update (i.e. initialization) (render-queue-remove (slot-value scene 'render-queue) overlay) pre-update frame to mark positions call super update frame TODO: counter is slightly inaccurate because spatial partitions may visit the same object twice to fix this, the render queue should return different values if obj is already queued TODO: remove this fn and use scheduler util directly</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(in-package :recurse.vert) @export-class (defclass game-scene (scene gl-pipeline) ((scene-background :initform nil :initarg :background :type scene-background :accessor scene-background) (scene-audio-state :initform nil :documentation "Used to resume audio-state when the scene deactivates.") (scene-music :initarg :music :initform nil :accessor scene-music :documentation "Music which will play when the scene initializes.") (width :initarg :width :initform (error ":width required") :reader width) (height :initarg :height :initform (error ":height required") :reader height) (live-object-radius :initarg :live-object-radius :initform #.(* 15 16) :documentation "Make a rect centered on camera. The value of this slot will be the distance between the live area and camera rect. When camera moves outside this rect, rebuild objects to render and update This is an optimization so we don't have to rebuild the render and update queues every frame.") (render-queue :initform (make-instance 'render-queue :render-priority -1) :documentation "Render scene objects and backgrounds.") (updatable-objects :initform (make-array 100 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil)) (updating-p :initform nil :reader updating-p) (pending-adds :initform (make-array 10 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil) :documentation "Objects to be added to scene at the start of the next frame.") (pending-removes :initform (make-array 10 :adjustable t :fill-pointer 0 :element-type '(or null game-object) :initial-element nil) :documentation "Objects to be removed from scene at the start of the next frame.") (live-object-rebuild-camera-position :initform (vector2) :documentation "Centered camera position used to compute render-queue rebuilds.") (reset-instance-renderers :initform (make-array 5 :adjustable t :fill-pointer 0) :documentation "Sequence of instance renderers which have been reset in the current frame.") (spatial-partition :initform nil :documentation "Optimized spatial partition containing every object in the scene." :reader spatial-partition)) (:documentation "A scene which updates and renders game-objects.")) (defmethod initialize-instance :after ((game-scene game-scene) &rest args) (declare (ignore args)) (with-slots (spatial-partition render-queue) game-scene (gl-pipeline-add game-scene render-queue) (setf spatial-partition (make-instance 'quadtree)))) @export (defgeneric add-to-scene (scene object) (:documentation "Add an object to the game scene") (:method ((scene scene) (overlay overlay)) (with-slots (scene-overlays render-queue) scene (unless (find overlay scene-overlays) (vector-push-extend overlay scene-overlays) overlay))) (:method ((scene game-scene) (overlay overlay)) (with-slots (scene-overlays render-queue) scene (unless (find overlay scene-overlays) (vector-push-extend overlay scene-overlays) overlay))) (:method ((scene game-scene) (object game-object)) (if (updating-p scene) (with-slots (pending-adds pending-removes) scene (if (in-scene-p scene object) (when (find object pending-removes) (log:debug "cancel ~A for scene remove" object) (setf pending-removes (delete object pending-removes)) object) (unless (find object pending-adds) (log:debug "queuing ~A for scene add" object) (vector-push-extend object pending-adds) object))) (%%add-object-to-scene scene object)))) @export (defgeneric remove-from-scene (scene object) (:documentation "Remove an object from the game scene") (:method ((scene scene) (overlay overlay)) (with-slots (scene-overlays) scene (when (find overlay scene-overlays) (setf scene-overlays (delete overlay scene-overlays)) overlay))) (:method ((scene game-scene) (overlay overlay)) (with-slots (scene-overlays) scene (when (find overlay scene-overlays) (setf scene-overlays (delete overlay scene-overlays)) overlay))) (:method ((scene game-scene) (object game-object)) (with-slots (pending-adds pending-removes) scene (if (in-scene-p scene object) (unless (find object pending-removes) (log:debug "queuing ~A for scene removal" object) (vector-push-extend object pending-removes) (unless (updating-p scene) (%run-pending-removes scene)) object) (when (find object pending-adds) (log:debug "cancel ~A for scene add" object) (setf pending-adds (delete object pending-adds)) object))))) (defmethod scene-activated ((scene game-scene)) (with-slots ((state scene-audio-state)) scene (if state (audio-player-load-state *audio* state) (audio-player-stop-all *audio*)))) (defmethod scene-deactivated ((scene game-scene)) (with-slots ((state scene-audio-state)) scene (with-sdl-mixer-lock-held (unless state (setf state (audio-player-copy-state *audio*))) (audio-player-copy-state *audio* state) (audio-player-stop-music *audio*) (audio-player-stop-sfx *audio*))) (values)) @export (defun scene-teleport-object (scene object &optional new-x new-y new-z) "Move OBJECT within SCENE to the new coordinates instantly. OBJECT's position will be recycled internally so it will instantly appear in the new position with no position interpolation." (when new-x (setf (x object) new-x)) (when new-y (setf (y object) new-y)) (when new-z (setf (z object) new-z)) (recycle object) (when (%in-live-object-area-p scene object) (with-slots (render-queue updatable-objects) scene (render-queue-add render-queue object) (unless (find object updatable-objects :test #'eq) (vector-push-extend object updatable-objects)))) object) (defgeneric found-object-to-update (game-scene game-object) (:documentation "for subclasses to hook object updates") (:method ((scene game-scene) game-object))) (defun %%add-object-to-scene (scene object) (declare (optimize (speed 3)) (game-scene scene) (game-object object)) (with-slots (spatial-partition render-queue updatable-objects) scene (when (start-tracking spatial-partition object) (event-subscribe object scene killed) (when (%in-live-object-area-p scene object) (render-queue-add render-queue object) (unless (find object updatable-objects :test #'eq) (vector-push-extend object updatable-objects))) object))) (defun %run-pending-removes (scene) (declare (optimize (speed 3)) (game-scene scene)) (with-slots (pending-removes spatial-partition render-queue updatable-objects) scene (declare (vector pending-removes updatable-objects)) (when (> (length pending-removes) 0) (loop :for removed-object :across pending-removes :do (event-unsubscribe removed-object scene killed) (stop-tracking spatial-partition removed-object) (when (%in-live-object-area-p scene removed-object) (render-queue-remove render-queue removed-object) (setf updatable-objects (delete removed-object updatable-objects))) (log:debug "removed ~A from scene" removed-object) :finally (setf (fill-pointer pending-removes) 0)))) (values)) (defun %run-pending-adds (scene) (declare (optimize (speed 3)) (game-scene scene)) (with-slots (pending-adds spatial-partition render-queue updatable-objects) scene (loop :for object :across pending-adds :do (%%add-object-to-scene scene object) :finally (setf (fill-pointer pending-adds) 0)))) (defun %force-rebuild-live-objects (scene) (log:debug "force live object rebuild.") (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (if (float= 0.0 (x live-object-rebuild-camera-position)) (setf (x live-object-rebuild-camera-position) (+ (width camera) live-object-radius)) (setf (x live-object-rebuild-camera-position) 0.0)))) (defun %in-live-object-area-p (scene game-object) "T if OBJECT is inside SCENE's current live object area." (declare (optimize (speed 3)) (game-scene scene) (game-object game-object)) (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (let ((live-x-min (- (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-x-max (+ (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-y-min (- (y live-object-rebuild-camera-position) (height camera) live-object-radius)) (live-y-max (+ (y live-object-rebuild-camera-position) (height camera) live-object-radius))) (multiple-value-bind (x y z w h) (world-dimensions game-object) (declare (ignore z) (single-float x y w h)) (and (or (<= live-x-min x live-x-max) (<= live-x-min (+ x w) live-x-max) (and (<= x live-x-min) (>= (+ x w) live-x-max))) (or (<= live-y-min y live-y-max) (<= live-y-min (+ y h) live-y-max) (and (<= y live-y-min) (>= (+ y h) live-y-max)))))))) (defun %rebuild-live-object-area-p (scene) (declare (optimize (speed 3)) (game-scene scene)) (block camera-moved-outside-render-area-p (with-slots (camera live-object-radius live-object-rebuild-camera-position) scene (with-accessors ((c-x x) (c-y y) (c-w width) (c-h height)) camera (declare (single-float c-x c-y c-w c-h)) (let* ((camera-centered-x (+ c-x (/ c-w 2.0))) (camera-centered-y (+ c-y (/ c-h 2.0))) (delta (max (abs (- camera-centered-x (x live-object-rebuild-camera-position))) (abs (- camera-centered-y (y live-object-rebuild-camera-position)))))) (when (>= delta live-object-radius) (setf (x live-object-rebuild-camera-position) camera-centered-x (y live-object-rebuild-camera-position) camera-centered-y) t)))))) (defmethod update :around ((scene game-scene)) (with-slots (updating-p) scene (setf updating-p t) (unwind-protect (call-next-method scene) (setf updating-p nil)))) (defmethod update ((game-scene game-scene)) (declare (optimize (speed 3))) (with-slots (live-object-rebuild-camera-position live-object-radius updatable-objects (queue render-queue) reset-instance-renderers (bg scene-background) scene-overlays pending-removes camera) game-scene (let ((rebuild-live-objects-p (%rebuild-live-object-area-p game-scene))) (%run-pending-removes game-scene) (%run-pending-adds game-scene) (when rebuild-live-objects-p (setf (fill-pointer updatable-objects) 0) (render-queue-reset queue) (setf (fill-pointer reset-instance-renderers) 0)) (pre-update (camera game-scene)) (when bg (pre-update bg) (when rebuild-live-objects-p (render-queue-add queue bg))) (loop :for overlay :across (the (vector overlay) scene-overlays) :do (pre-update overlay)) (call-next-method game-scene) (when rebuild-live-objects-p (let ((num-objects-to-update 0) (num-objects-to-render 0) (live-x-min (- (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-x-max (+ (x live-object-rebuild-camera-position) (width camera) live-object-radius)) (live-y-min (- (y live-object-rebuild-camera-position) (height camera) live-object-radius)) (live-y-max (+ (y live-object-rebuild-camera-position) (height camera) live-object-radius))) (declare (fixnum num-objects-to-render num-objects-to-update) (single-float live-x-min live-x-max live-y-min live-y-max)) (log:debug "rebuilding live-objects") (do-spatial-partition (game-object (spatial-partition game-scene) :static-iteration-p t :min-x live-x-min :max-x live-x-max :min-y live-y-min :max-y live-y-max) (block found-object-to-render (block check-if-instance-rendered (if (typep game-object 'instance-rendered-drawable) (with-slots ((instance-renderer instance-renderer)) game-object (unless (find instance-renderer reset-instance-renderers) (incf num-objects-to-render) (vector-push-extend instance-renderer reset-instance-renderers) (instance-renderer-reset instance-renderer game-scene))) (incf num-objects-to-render))) (render-queue-add queue game-object)) (block check-add-to-updatable-objects (when (and (not (typep game-object 'static-object)) (not (find game-object updatable-objects :test #'eq))) (incf num-objects-to-update) (vector-push-extend game-object updatable-objects)))) (log:debug "Rebuild complete. Found ~A objects to render and ~A objects to update" num-objects-to-render num-objects-to-update))) (update (camera game-scene)) (loop :for overlay :across (the (vector overlay) scene-overlays) :do (update overlay) #+nil (when rebuild-live-objects-p (render-queue-add render-queue overlay))) (when rebuild-live-objects-p (render-queue-add queue camera)) (when bg (update bg)) (loop :for game-object :across updatable-objects :do (pre-update game-object) (found-object-to-update game-scene game-object) (update game-object)) (values)))) (defmethod render ((scene game-scene) update-percent camera gl-context) HACK scene transitions get messed up bc rendering occurs before setup stuff is done (prog1 (call-next-method scene update-percent camera gl-context) (with-slots (scene-overlays) scene (loop :for overlay :across (the (vector overlay) scene-overlays) :do (render overlay update-percent (camera scene) gl-context)))))) (defevent-handler killed ((object obb) (game-scene game-scene)) "" (remove-from-scene game-scene object)) @export (defun schedule (game-scene timestamp zero-arg-fn) "When the value returned by SCENE-TICKS of GAME-SCENE equals or exceeds TIMESTAMP the ZERO-ARG-FN callback will be invoked." (scheduler-add game-scene timestamp zero-arg-fn) (values)) @export (defun get-object-by-id (scene id) "Return the (presumably) unique game-object identified by ID in SCENE." (declare (game-scene scene)) (block find-object (do-spatial-partition (game-object (spatial-partition scene) :static-iteration-p t) (when (equalp (object-id game-object) id) (return-from find-object game-object))))) @export (defun in-scene-p (scene object) "Return OBJECT if OBJECT is in SCENE, nil otherwise." (declare (optimize (speed 3)) (game-scene scene)) (block find-object (do-spatial-partition (obj (spatial-partition scene) :static-iteration-p t) (when (eq obj object) (return-from find-object object))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610254"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">16f37d0f61a1dd10973c1d70eae652e9e4e695b83cb8aeb9e0189cb2bb4bd772</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ndmitchell/catch</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Blur.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module RegExp.Blur(blur) where import RegExp.Type -- the output of blur must be a finite set -- assuming a finite input blur :: (Eq a, Show a) => RegExp a -> RegExp a blur x = f x where f (RegKleene x) = regKleene (f x) f (RegUnion x) = regUnion (map f x) f (RegConcat x) = regConcat (g x) f x = x g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c) g (RegKleene a : b : c : d : e) | a == b && b == c && c == d = g (RegKleene a : c : d : e) g (a:b:c:d) | a == b && b == c = g (RegKleene a : b : c : d) g (x:xs) = x : g xs g [] = [] </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/src/RegExp/Blur.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> the output of blur must be a finite set assuming a finite input</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module RegExp.Blur(blur) where import RegExp.Type blur :: (Eq a, Show a) => RegExp a -> RegExp a blur x = f x where f (RegKleene x) = regKleene (f x) f (RegUnion x) = regUnion (map f x) f (RegConcat x) = regConcat (g x) f x = x g (a : RegKleene b : c) | a == b = g (RegKleene b : a : c) g (RegKleene a : b : c : d : e) | a == b && b == c && c == d = g (RegKleene a : c : d : e) g (a:b:c:d) | a == b && b == c = g (RegKleene a : b : c : d) g (x:xs) = x : g xs g [] = [] </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610255"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">6f38c355955756cad424182fdb05b92e563b0b1feff4286292687e994838d2c9</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ropas/sparrow</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">instrumentedMem.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(***********************************************************************) (* *) Copyright ( c ) 2007 - present . Programming Research Laboratory ( ROPAS ) , Seoul National University . (* All rights reserved. *) (* *) This software is distributed under the term of the BSD license . (* See the LICENSE file for details. *) (* *) (***********************************************************************) module type S = sig include MapDom.CPO module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t val init_access : unit -> unit val return_access : unit -> Access.info end module Make (Mem : MapDom.CPO) = struct include Mem module Loc = A module Val = B module Access = Access.Make(Mem) let access = ref Access.Info.empty let access_mode = ref false let init_access : unit -> unit = fun () -> access_mode := true; access := Access.Info.empty; () let return_access : unit -> Access.info = fun () -> access_mode := false; !access let add k v m = (if !access_mode then access := Access.Info.add Access.Info.def k !access); add k v m let weak_add k v m = (if !access_mode then access := Access.Info.add Access.Info.all k !access); weak_add k v m let find : A.t -> t -> B.t = fun k m -> (if !access_mode && not (eq m bot) then access := Access.Info.add Access.Info.use k !access); find k m end </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/ropas/sparrow/3ec055b8c87b5c8340ef3ed6cde34f5835865b31/src/domain/instrumentedMem.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">********************************************************************* All rights reserved. See the LICENSE file for details. *********************************************************************</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2007 - present . Programming Research Laboratory ( ROPAS ) , Seoul National University . This software is distributed under the term of the BSD license . module type S = sig include MapDom.CPO module Access : Access.S with type Loc.t = A.t and type PowLoc.t = PowA.t val init_access : unit -> unit val return_access : unit -> Access.info end module Make (Mem : MapDom.CPO) = struct include Mem module Loc = A module Val = B module Access = Access.Make(Mem) let access = ref Access.Info.empty let access_mode = ref false let init_access : unit -> unit = fun () -> access_mode := true; access := Access.Info.empty; () let return_access : unit -> Access.info = fun () -> access_mode := false; !access let add k v m = (if !access_mode then access := Access.Info.add Access.Info.def k !access); add k v m let weak_add k v m = (if !access_mode then access := Access.Info.add Access.Info.all k !access); weak_add k v m let find : A.t -> t -> B.t = fun k m -> (if !access_mode && not (eq m bot) then access := Access.Info.add Access.Info.use k !access); find k m end </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610256"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">02dd07a645cc9e530baec9c86afa0826065c0687915f626790bb9d08de33a90d</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">apache/couchdb-mochiweb</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">mochiweb_websocket_tests.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(mochiweb_websocket_tests). -author(''). The MIT License ( MIT ) Copyright ( c ) 2012 Zadane.pl sp . %% Permission is hereby granted, free of charge, to any person obtaining a copy %% of this software and associated documentation files (the "Software"), to deal in the Software without restriction , including without limitation the rights %% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software , and to permit persons to whom the Software is %% furnished to do so, subject to the following conditions: %% The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR %% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, %% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE %% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , %% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN %% THE SOFTWARE. -include_lib("eunit/include/eunit.hrl"). make_handshake_for_correct_client_test() -> %% Hybi handshake Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1}, mochiweb_headers:make([{"Sec-WebSocket-Key", "Xn3fdKyc3qEXPuj2A3O+ZA=="}])), {Version1, {HttpCode1, Headers1, _}} = mochiweb_websocket:make_handshake(Req1), ?assertEqual(hybi, Version1), ?assertEqual(101, HttpCode1), ?assertEqual("Upgrade", (proplists:get_value("Connection", Headers1))), ?assertEqual(<<"BIFTHkJk4r5t8kuud82tZJaQsCE=">>, (proplists:get_value("Sec-Websocket-Accept", Headers1))), handshake {Version2, {HttpCode2, Headers2, Body2}} = mochiweb_websocket:hixie_handshake("ws://", "localhost", "/", "33j284 9 z63 e 9 7", "TF'3|6D12659H 7 70", <<175, 181, 191, 215, 128, 195, 144, 120>>, "null"), ?assertEqual(hixie, Version2), ?assertEqual(101, HttpCode2), ?assertEqual("null", (proplists:get_value("Sec-WebSocket-Origin", Headers2))), ?assertEqual("ws/", (proplists:get_value("Sec-WebSocket-Location", Headers2))), ?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244, 150, 134, 167, 221, 103, 239, 246>>, Body2). hybi_frames_decode_test() -> ?assertEqual([{1, <<"foo">>}], (mochiweb_websocket:parse_hybi_frames(nil, <<129, 131, 118, 21, 153, 58, 16, 122, 246>>, []))), ?assertEqual([{1, <<"foo">>}, {1, <<"bar">>}], (mochiweb_websocket:parse_hybi_frames(nil, <<129, 131, 1, 225, 201, 42, 103, 142, 166, 129, 131, 93, 222, 214, 66, 63, 191, 164>>, []))). hixie_frames_decode_test() -> ?assertEqual([], (mochiweb_websocket:parse_hixie_frames(<<>>, []))), ?assertEqual([<<"foo">>], (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111, 111, 255>>, []))), ?assertEqual([<<"foo">>, <<"bar">>], (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111, 111, 255, 0, 98, 97, 114, 255>>, []))). end_to_end_test_factory(ServerTransport) -> mochiweb_test_util:with_server(ServerTransport, fun end_to_end_server/1, fun (Transport, Port) -> end_to_end_client(mochiweb_test_util:sock_fun(Transport, Port)) end). end_to_end_server(Req) -> ?assertEqual("Upgrade", (mochiweb_request:get_header_value("connection", Req))), ?assertEqual("websocket", (mochiweb_request:get_header_value("upgrade", Req))), {ReentryWs, _ReplyChannel} = mochiweb_websocket:upgrade_connection(Req, fun end_to_end_ws_loop/3), ReentryWs(ok). end_to_end_ws_loop(Payload, State, ReplyChannel) -> Echo server lists:foreach(ReplyChannel, Payload), State. end_to_end_client(S) -> %% Key and Accept per UpgradeReq = string:join(["GET / HTTP/1.1", "Host: localhost", "Upgrade: websocket", "Connection: Upgrade", "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==", "", ""], "\r\n"), ok = S({send, UpgradeReq}), {ok, {http_response, {1, 1}, 101, _}} = S(recv), read_expected_headers(S, [{'Upgrade', "websocket"}, {'Connection', "Upgrade"}, {'Content-Length', "0"}, {"Sec-Websocket-Accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}]), The first message sent over telegraph :) SmallMessage = <<"What hath God wrought?">>, ok = S({send, <<1:1, %% Fin 0:1, %% Rsv1 0:1, %% Rsv2 0:1, %% Rsv3 Opcode , 1 = text frame 1:1, %% Mask on Length , < 125 case 0:32, %% Mask (trivial) SmallMessage/binary>>}), {ok, WsFrames} = S(recv), <<1:1, %% Fin 0:1, %% Rsv1 0:1, %% Rsv2 0:1, %% Rsv3 1:4, %% Opcode, text frame (all mochiweb supports for now) MsgSize:8, %% Expecting small size SmallMessage/binary>> = WsFrames, ?assertEqual(MsgSize, (byte_size(SmallMessage))), ok. read_expected_headers(S, D) -> Headers = mochiweb_test_util:read_server_headers(S), lists:foreach(fun ({K, V}) -> ?assertEqual(V, (mochiweb_headers:get_value(K, Headers))) end, D). end_to_end_http_test() -> end_to_end_test_factory(plain). end_to_end_https_test() -> end_to_end_test_factory(ssl). </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/apache/couchdb-mochiweb/fce80ef5e2c105405a39d3cdf4615f21e0d1d734/test/mochiweb_websocket_tests.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal to use, copy, modify, merge, publish, distribute, sublicense, and/or sell furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Hybi handshake Key and Accept per Fin Rsv1 Rsv2 Rsv3 Mask on Mask (trivial) Fin Rsv1 Rsv2 Rsv3 Opcode, text frame (all mochiweb supports for now) Expecting small size</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(mochiweb_websocket_tests). -author(''). The MIT License ( MIT ) Copyright ( c ) 2012 Zadane.pl sp . in the Software without restriction , including without limitation the rights copies of the Software , and to permit persons to whom the Software is all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , -include_lib("eunit/include/eunit.hrl"). make_handshake_for_correct_client_test() -> Req1 = mochiweb_request:new(nil, 'GET', "/foo", {1, 1}, mochiweb_headers:make([{"Sec-WebSocket-Key", "Xn3fdKyc3qEXPuj2A3O+ZA=="}])), {Version1, {HttpCode1, Headers1, _}} = mochiweb_websocket:make_handshake(Req1), ?assertEqual(hybi, Version1), ?assertEqual(101, HttpCode1), ?assertEqual("Upgrade", (proplists:get_value("Connection", Headers1))), ?assertEqual(<<"BIFTHkJk4r5t8kuud82tZJaQsCE=">>, (proplists:get_value("Sec-Websocket-Accept", Headers1))), handshake {Version2, {HttpCode2, Headers2, Body2}} = mochiweb_websocket:hixie_handshake("ws://", "localhost", "/", "33j284 9 z63 e 9 7", "TF'3|6D12659H 7 70", <<175, 181, 191, 215, 128, 195, 144, 120>>, "null"), ?assertEqual(hixie, Version2), ?assertEqual(101, HttpCode2), ?assertEqual("null", (proplists:get_value("Sec-WebSocket-Origin", Headers2))), ?assertEqual("ws/", (proplists:get_value("Sec-WebSocket-Location", Headers2))), ?assertEqual(<<230, 144, 237, 94, 84, 214, 41, 69, 244, 150, 134, 167, 221, 103, 239, 246>>, Body2). hybi_frames_decode_test() -> ?assertEqual([{1, <<"foo">>}], (mochiweb_websocket:parse_hybi_frames(nil, <<129, 131, 118, 21, 153, 58, 16, 122, 246>>, []))), ?assertEqual([{1, <<"foo">>}, {1, <<"bar">>}], (mochiweb_websocket:parse_hybi_frames(nil, <<129, 131, 1, 225, 201, 42, 103, 142, 166, 129, 131, 93, 222, 214, 66, 63, 191, 164>>, []))). hixie_frames_decode_test() -> ?assertEqual([], (mochiweb_websocket:parse_hixie_frames(<<>>, []))), ?assertEqual([<<"foo">>], (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111, 111, 255>>, []))), ?assertEqual([<<"foo">>, <<"bar">>], (mochiweb_websocket:parse_hixie_frames(<<0, 102, 111, 111, 255, 0, 98, 97, 114, 255>>, []))). end_to_end_test_factory(ServerTransport) -> mochiweb_test_util:with_server(ServerTransport, fun end_to_end_server/1, fun (Transport, Port) -> end_to_end_client(mochiweb_test_util:sock_fun(Transport, Port)) end). end_to_end_server(Req) -> ?assertEqual("Upgrade", (mochiweb_request:get_header_value("connection", Req))), ?assertEqual("websocket", (mochiweb_request:get_header_value("upgrade", Req))), {ReentryWs, _ReplyChannel} = mochiweb_websocket:upgrade_connection(Req, fun end_to_end_ws_loop/3), ReentryWs(ok). end_to_end_ws_loop(Payload, State, ReplyChannel) -> Echo server lists:foreach(ReplyChannel, Payload), State. end_to_end_client(S) -> UpgradeReq = string:join(["GET / HTTP/1.1", "Host: localhost", "Upgrade: websocket", "Connection: Upgrade", "Sec-WebSocket-Key: dGhlIHNhbXBsZSBub25jZQ==", "", ""], "\r\n"), ok = S({send, UpgradeReq}), {ok, {http_response, {1, 1}, 101, _}} = S(recv), read_expected_headers(S, [{'Upgrade', "websocket"}, {'Connection', "Upgrade"}, {'Content-Length', "0"}, {"Sec-Websocket-Accept", "s3pPLMBiTxaQ9kYGzzhZRbK+xOo="}]), The first message sent over telegraph :) SmallMessage = <<"What hath God wrought?">>, ok = S({send, Opcode , 1 = text frame Length , < 125 case SmallMessage/binary>>}), {ok, WsFrames} = S(recv), SmallMessage/binary>> = WsFrames, ?assertEqual(MsgSize, (byte_size(SmallMessage))), ok. read_expected_headers(S, D) -> Headers = mochiweb_test_util:read_server_headers(S), lists:foreach(fun ({K, V}) -> ?assertEqual(V, (mochiweb_headers:get_value(K, Headers))) end, D). end_to_end_http_test() -> end_to_end_test_factory(plain). end_to_end_https_test() -> end_to_end_test_factory(ssl). </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610257"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ad0c2e6c0fa094fe033c197f80346df8bac985220b3467aa53920dafb8550612</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">naoto-ogawa/h-xproto-mysql</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">NodeSession.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | module : Database . MySQLX.NodeSession description : Session management copyright : ( c ) , 2017 license : MIT maintainer : stability : experimental portability : Session ( a.k.a . Connection ) module : Database.MySQLX.NodeSession description : Session management copyright : (c) naoto ogawa, 2017 license : MIT maintainer : stability : experimental portability : Session (a.k.a. Connection) -} # LANGUAGE RecordWildCards # module DataBase.MySQLX.NodeSession ( -- * Message Message -- * Session Infomation , NodeSessionInfo(..) , defaultNodeSesssionInfo -- * Node Session , NodeSession(clientId, auth_data) -- * Session Management , openNodeSession , closeNodeSession -- * Transaction , begenTrxNodeSession , commitNodeSession , rollbackNodeSession -- * Expectation , sendExpectNoError , sendExpectUnset , sendExpectClose -- , readMessagesR , writeMessageR , repeatreadMessagesR -- * Helper functions , isSocketConnected -- * Internal Use Only , readMsgLengthR , readAllMsgR ) where -- general, standard library import qualified Data.Binary as BIN import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import qualified Data.Int as I import Data.Typeable (TypeRep, Typeable, typeRep, typeOf) import qualified Data.Word as W import Network.Socket hiding (recv) import Network.Socket.ByteString (send, sendAll, recv) import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM) import Control.Monad import Control.Monad.Trans.Reader import Control.Monad.IO.Class -- protocol buffer library import qualified Text.ProtocolBuffers as PB import qualified Text.ProtocolBuffers.Basic as PBB import qualified Text.ProtocolBuffers.Header as PBH import qualified Text.ProtocolBuffers.TextMessage as PBT import qualified Text.ProtocolBuffers.WireMessage as PBW import qualified Text.ProtocolBuffers.Reflections as PBR -- generated library import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk -- my library import DataBase.MySQLX.Exception import DataBase.MySQLX.Model import DataBase.MySQLX.Util -- ----------------------------------------------------------------------------- -- -- ----------------------------------------------------------------------------- -- | Node Session Object data NodeSession = NodeSession { _socket :: Socket -- ^ socket , clientId :: W.Word64 -- ^ client id given by MySQL Server ^ auth_data given by MySQL Server } deriving Show -- | Infomation Object of Node Session data NodeSessionInfo = NodeSessionInfo { host :: HostName -- ^ host name , port :: PortNumber -- ^ port nummber , database :: String -- ^ database name , user :: String -- ^ user , password :: String -- ^ password , charset :: String -- ^ charset } deriving Show -- | Default NodeSessionInfo -- * host : 127.0.0.1 * port : 33600 -- * database : "" -- * user : "root" -- * password : "" -- * charset : "" -- defaultNodeSesssionInfo :: NodeSessionInfo defaultNodeSesssionInfo = NodeSessionInfo "127.0.0.1" 33060 "" "root" "" "" -- | a message (type, payload) type Message = (Int, B.ByteString) -- ----------------------------------------------------------------------------- -- Session Management -- ----------------------------------------------------------------------------- -- | Open node session. openNodeSession :: (MonadIO m, MonadThrow m) => NodeSessionInfo -- ^ NodeSessionInfo -> m NodeSession -- ^ NodeSession openNodeSession sessionInfo = do socket <- _client (host sessionInfo) (port sessionInfo) let session = NodeSession socket (fromIntegral 0) BL.empty x <- runReaderT _negociate session (t, msg):xs <- runReaderT (_auth sessionInfo) session case t of TODO -- debug "success" frm <- getFrame msg case PFr.payload frm of Just x -> do changed <- getSessionStateChanged $ BL.toStrict x -- debug changed ok <- mkAuthenticateOk $ snd $ head xs -- debug ok id <- getClientId changed -- debug $ "NodeSession is opend; clientId =" ++ (show id) return session {clientId = id} Nothing -> throwM $ XProtocolException "Payload is Nothing" TODO err <- getError msg throwM $ XProtocolError err _ -> error $ "message type unknown, =" ++ show t -- | Close node session. closeNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m () closeNodeSession nodeSess = do runReaderT (sendClose >> recieveOk) nodeSess liftIO . close $ _socket nodeSess debug " NodeSession is closed . " return () -- | Make a socket for session. _client :: (MonadIO m) => HostName -> PortNumber -> m Socket _client host port = liftIO $ withSocketsDo $ do addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port) let serverAddr = head addrInfo sock <- socket (addrFamily serverAddr) Stream defaultProtocol connect sock (addrAddress serverAddr) return sock _auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message] _auth NodeSessionInfo{..} = do sendAuthenticateStart user salt <- recieveSalt sendAutenticateContinue database user password salt msgs <- readMessagesR return msgs sendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m () sendCapabilitiesGet = writeMessageR mkCapabilitiesGet _negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message] _negociate = do sendCapabilitiesGet ret@(x:xs) <- readMessagesR if fst x == s_error then do msg <- getError $ snd x throwM $ XProtocolError msg else do return ret sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () sendAuthenticateStart = writeMessageR . mkAuthenticateStart sendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m () sendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password -- | Send Close message to the server. sendClose :: (MonadIO m) => ReaderT NodeSession m () sendClose = writeMessageR mkClose -- | Retreive a salt given by the server. recieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString recieveSalt = do msg <- getAuthenticateContinueR return $ BL.toStrict $ PAC.auth_data msg recieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok recieveOk = getOkR -- | Send NoError expectation message to the server. sendExpectNoError :: (MonadIO m) => ReaderT NodeSession m () sendExpectNoError = writeMessageR mkExpectNoError -- | Send Unset expectation message to the server. sendExpectUnset :: (MonadIO m) => ReaderT NodeSession m () sendExpectUnset = writeMessageR mkExpectUnset interfaces as follows : openNodeSession = do sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( ) salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( ) reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( ) interfaces as follows: openNodeSession = do sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> () salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> () reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> () -} -- {- [C]->[S] -} -- putMsg sock $ getAuthMsg "root" -- -- {- [S]->[C] -} -- x <- parse2AuthenticateContinue sock let salt = S.toStrict $ PAC.auth_data x -- print salt -- -- {- [C]->[S] -} -- putMsg sock $ getAutCont "world_x" "root" salt (B8.pack "root") -- -- {- [S]->[C] -} -- frame <- parse2Frame sock -- getSessionStateChanged frame -- parse2AuthenticateOK sock -- Using NodeSession and making ReaderT -- writeMessage :: (PBT.TextMsg msg ,PBR.ReflectDescriptor msg ,PBW.Wire msg ,Show msg ,Typeable msg ,MonadIO m ) => NodeSession -> msg -> m () writeMessage NodeSession{..} msg = do liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes) -- liftIO $ putStrLn $ PBT.messagePutText msg where bytes = PBW.messagePut msg len = fromIntegral $ PBW.messageSize msg ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg sendExpectClose :: (MonadIO m) => ReaderT NodeSession m () sendExpectClose = do nodeSess <- ask liftIO $ writeExpectClose nodeSess writeExpectClose NodeSession{..} = do liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes) where bytes = PBW.messagePut mkClose len = fromIntegral 0 ty = putMessageType $ fromIntegral 25 -- | write a message. writeMessageR :: (PBT.TextMsg msg ,PBR.ReflectDescriptor msg ,PBW.Wire msg ,Show msg ,Typeable msg ,MonadIO m ) => msg -> ReaderT NodeSession m () writeMessageR msg = do session <- ask liftIO $ writeMessage session msg getErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error getErrorR = readOneMessageR >>= \(_, msg) -> getError msg getFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame getFrameR = readOneMessageR >>= \(_, msg) -> getFrame msg getAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue getAuthenticateContinueR = readOneMessageR >>= \(_, msg) -> getAuthenticateContinue msg getOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok getOkR = readOneMessageR >>= \(_, msg) -> getOk msg getOneMessageR :: (MonadIO m ,MonadThrow m ,PBW.Wire a ,PBR.ReflectDescriptor a ,PBT.TextMsg a ,Typeable a) => ReaderT NodeSession m a getOneMessageR = do session <- ask (_, msg) <- liftIO $ readOneMessage session getMessage msg readMessages :: (MonadIO m) => NodeSession -> m [Message] readMessages NodeSession{..} = do len <- runReaderT readMsgLengthR _socket debug $ " 1st length = " + + ( show $ getIntFromLE len ) ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket return ret readMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message]) readMessagesEither NodeSession{..} = do len <- runReaderT readMsgLengthR _socket debug $ " 1st length = " + + ( show $ getIntFromLE len ) ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket if hasError ret then return $ Left ret -- Error Success where hasError r = length (filterError r) >= 1 filterError xs = filter (\(t,m) -> t == s_error) xs -- | retrieve messages from Node session. readMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message] readMessagesR = ask >>= liftIO . readMessages -- | retrieve messages from Node session. repeatreadMessagesR :: (MonadIO m) => Bool -- ^ True : Expectation No Error , False : Otherwise -> Int -- ^ The number of sending messages. -> ([Message], [Message]) -- ^ Initial empty value, whichi should be ([], []) ^ fst : Success messages , snd : Error messages repeatreadMessagesR noError num acc = do if num == 0 then return acc else do nodeSess <- ask r <- readMessagesEither nodeSess case r of Left m -> if noError then return (fst acc , m ) else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m) Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc ) readOneMessage :: (MonadIO m) => NodeSession -> m Message readOneMessage NodeSession{..} = runReaderT readOneMsgR _socket readOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message readOneMessageR = ask >>= liftIO . readOneMessage readNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message] readNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket readNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message] readNMessageR n = ask >>= liftIO . readNMessage n -- -- Using Socket -- readSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString readSocketR len = ask >>= (\x -> liftIO $ recv x len) readMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString readMsgLengthR = readSocketR 4 readMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString readMsgTypeR = readSocketR 1 readNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString) readNextMsgR len = do bytes <- readSocketR (len + 4) return $ if B.length bytes == len then (bytes, B.empty) else B.splitAt len bytes readOneMsgR :: (MonadIO m) => ReaderT Socket m Message readOneMsgR = do l <- readMsgLengthR t <- readMsgTypeR m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1 return (byte2Int t, m) readNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message] readNMsgR n = sequence $ take n . repeat $ readOneMsgR readAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message] readAllMsgR len = do t <- readMsgTypeR let t' = byte2Int t if t' == s_sql_stmt_execute_ok SQL_STMT_EXECUTE_OK is the last message and has no data . return [(s_sql_stmt_execute_ok, B.empty)] else do debug $ " type= " + + ( show $ byte2Int t ) + + " , reading len= " + + ( show ( len-1 ` max ` 0 ) ) + + " , plus 4 byte " (msg, len) <- readNextMsgR (len-1) -- debug $ (show msg) ++ " , next length of reading chunk byte is " ++ (show $ if B.null len then 0 else getIntFromLE len) if B.null len then return [(t', msg)] else do msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len return $ (t', msg): msgs -- | Begin a transaction. begenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 begenTrxNodeSession = doSimpleSessionStateChangeStmt "begin" -- | Commit a transaction. commitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 commitNodeSession = doSimpleSessionStateChangeStmt "commit" -- | Rollback a transaction. rollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 rollbackNodeSession = doSimpleSessionStateChangeStmt "rollback" -- -- helper -- doSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64 doSimpleSessionStateChangeStmt sql nodeSess = do -- debug $ "session state change statement : " ++ sql runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess ret@(x:xs) <- runReaderT readMessagesR nodeSess -- [Message] if fst x == 1 then do msg <- getError $ snd x throwM $ XProtocolError msg else do frm <- (getFrame . snd ) $ head $ filter (\(t, b) -> t == s_notice) ret -- Frame ssc <- getPayloadSessionStateChanged frm getRowsAffected ssc -- | check a raw socket connectin. isSocketConnected :: NodeSession -> IO Bool isSocketConnected NodeSession{..} = do isConnected _socket naming rule Application Data < -- recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ] Application Data -- > send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ] mkFoo -- > [ Protocol Buffer Object ] ( a ) client - > server message implementatin pattern 1 ) make pure function from some params to a PB object = = > hidden 2 ) make the above function to Reader -- > open package ex ) mkAuthenticateStart | V sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( ) sendAuthenticateStart = writeMessageR . mkAuthenticateStart ( b ) server - > client message implemention patten 1 ) make pure function from ByteString to a PB object ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden getAuthenticateContinue ' = getMessage 2 ) make the above function to Reader Monad 3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString ( c ) client - > server - > client message implementation 1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server . naming rule Application Data <-- recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket] Application Data --> send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket] mkFoo --> [Protocol Buffer Object] (a) client -> server message implementatin pattern 1) make pure function from some params to a PB object ==> hidden 2) make the above function to Reader Monad --> open package ex) mkAuthenticateStart | V sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () sendAuthenticateStart = writeMessageR . mkAuthenticateStart (b) server -> client message implemention patten 1) make pure function from ByteString to a PB object ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden getAuthenticateContinue' = getMessage 2) make the above function to Reader Monad 3) make a function to get concrete data, not Protocol Buffer Objects ==> open ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString (c) client -> server -> client message implementation 1) combine (a) and (b) so that we get a turn-around function between client and server. -} </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/naoto-ogawa/h-xproto-mysql/1eacd6486c99b849016bf088788cb8d8b166f964/src/DataBase/MySQLX/NodeSession.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> * Message * Session Infomation * Node Session * Session Management * Transaction * Expectation * Helper functions * Internal Use Only general, standard library protocol buffer library generated library my library ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- | Node Session Object ^ socket ^ client id given by MySQL Server | Infomation Object of Node Session ^ host name ^ port nummber ^ database name ^ user ^ password ^ charset | Default NodeSessionInfo * database : "" * user : "root" * password : "" * charset : "" | a message (type, payload) ----------------------------------------------------------------------------- Session Management ----------------------------------------------------------------------------- | Open node session. ^ NodeSessionInfo ^ NodeSession debug "success" debug changed debug ok debug $ "NodeSession is opend; clientId =" ++ (show id) | Close node session. | Make a socket for session. | Send Close message to the server. | Retreive a salt given by the server. | Send NoError expectation message to the server. | Send Unset expectation message to the server. {- [C]->[S] -} -- putMsg sock $ getAuthMsg "root" {- [S]->[C] -} x <- parse2AuthenticateContinue sock print salt {- [C]->[S] -} putMsg sock $ getAutCont "world_x" "root" salt (B8.pack "root") {- [S]->[C] -} frame <- parse2Frame sock getSessionStateChanged frame parse2AuthenticateOK sock liftIO $ putStrLn $ PBT.messagePutText msg | write a message. Error | retrieve messages from Node session. | retrieve messages from Node session. ^ True : Expectation No Error , False : Otherwise ^ The number of sending messages. ^ Initial empty value, whichi should be ([], []) Using Socket debug $ (show msg) ++ " , next length of reading chunk byte is " ++ (show $ if B.null len then 0 else getIntFromLE len) | Begin a transaction. | Commit a transaction. | Rollback a transaction. helper debug $ "session state change statement : " ++ sql [Message] Frame | check a raw socket connectin. recv < -- [ Protocol Buffer Object ] < -- get < -- [ Byte Data ] < -- read < -- [ Socket ] > send -- > [ Protocol Buffer Object ] -- > put -- > [ Byte Data ] -- > write -- > [ Socket ] > [ Protocol Buffer Object ] > open package recv <-- [Protocol Buffer Object] <-- get <-- [Byte Data] <-- read <-- [Socket] > send --> [Protocol Buffer Object] --> put --> [Byte Data] --> write --> [Socket] > [Protocol Buffer Object] > open package</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | module : Database . MySQLX.NodeSession description : Session management copyright : ( c ) , 2017 license : MIT maintainer : stability : experimental portability : Session ( a.k.a . Connection ) module : Database.MySQLX.NodeSession description : Session management copyright : (c) naoto ogawa, 2017 license : MIT maintainer : stability : experimental portability : Session (a.k.a. Connection) -} # LANGUAGE RecordWildCards # module DataBase.MySQLX.NodeSession ( Message , NodeSessionInfo(..) , defaultNodeSesssionInfo , NodeSession(clientId, auth_data) , openNodeSession , closeNodeSession , begenTrxNodeSession , commitNodeSession , rollbackNodeSession , sendExpectNoError , sendExpectUnset , sendExpectClose , readMessagesR , writeMessageR , repeatreadMessagesR , isSocketConnected , readMsgLengthR , readAllMsgR ) where import qualified Data.Binary as BIN import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import qualified Data.Int as I import Data.Typeable (TypeRep, Typeable, typeRep, typeOf) import qualified Data.Word as W import Network.Socket hiding (recv) import Network.Socket.ByteString (send, sendAll, recv) import Control.Exception.Safe (Exception, MonadThrow, SomeException, throwM) import Control.Monad import Control.Monad.Trans.Reader import Control.Monad.IO.Class import qualified Text.ProtocolBuffers as PB import qualified Text.ProtocolBuffers.Basic as PBB import qualified Text.ProtocolBuffers.Header as PBH import qualified Text.ProtocolBuffers.TextMessage as PBT import qualified Text.ProtocolBuffers.WireMessage as PBW import qualified Text.ProtocolBuffers.Reflections as PBR import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Error as PE import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Frame as PFr import qualified Com.Mysql.Cj.Mysqlx.Protobuf.AuthenticateContinue as PAC import qualified Com.Mysql.Cj.Mysqlx.Protobuf.Ok as POk import DataBase.MySQLX.Exception import DataBase.MySQLX.Model import DataBase.MySQLX.Util data NodeSession = NodeSession ^ auth_data given by MySQL Server } deriving Show data NodeSessionInfo = NodeSessionInfo } deriving Show * host : 127.0.0.1 * port : 33600 defaultNodeSesssionInfo :: NodeSessionInfo defaultNodeSesssionInfo = NodeSessionInfo "127.0.0.1" 33060 "" "root" "" "" type Message = (Int, B.ByteString) openNodeSession :: (MonadIO m, MonadThrow m) openNodeSession sessionInfo = do socket <- _client (host sessionInfo) (port sessionInfo) let session = NodeSession socket (fromIntegral 0) BL.empty x <- runReaderT _negociate session (t, msg):xs <- runReaderT (_auth sessionInfo) session case t of TODO frm <- getFrame msg case PFr.payload frm of Just x -> do changed <- getSessionStateChanged $ BL.toStrict x ok <- mkAuthenticateOk $ snd $ head xs id <- getClientId changed return session {clientId = id} Nothing -> throwM $ XProtocolException "Payload is Nothing" TODO err <- getError msg throwM $ XProtocolError err _ -> error $ "message type unknown, =" ++ show t closeNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m () closeNodeSession nodeSess = do runReaderT (sendClose >> recieveOk) nodeSess liftIO . close $ _socket nodeSess debug " NodeSession is closed . " return () _client :: (MonadIO m) => HostName -> PortNumber -> m Socket _client host port = liftIO $ withSocketsDo $ do addrInfo <- getAddrInfo Nothing (Just host) (Just $ show port) let serverAddr = head addrInfo sock <- socket (addrFamily serverAddr) Stream defaultProtocol connect sock (addrAddress serverAddr) return sock _auth :: (MonadIO m, MonadThrow m) => NodeSessionInfo -> ReaderT NodeSession m [Message] _auth NodeSessionInfo{..} = do sendAuthenticateStart user salt <- recieveSalt sendAutenticateContinue database user password salt msgs <- readMessagesR return msgs sendCapabilitiesGet :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m () sendCapabilitiesGet = writeMessageR mkCapabilitiesGet _negociate :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m [Message] _negociate = do sendCapabilitiesGet ret@(x:xs) <- readMessagesR if fst x == s_error then do msg <- getError $ snd x throwM $ XProtocolError msg else do return ret sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () sendAuthenticateStart = writeMessageR . mkAuthenticateStart sendAutenticateContinue :: (MonadIO m) => String -> String -> String -> B.ByteString -> ReaderT NodeSession m () sendAutenticateContinue database user password salt = writeMessageR $ mkAuthenticateContinue database user salt password sendClose :: (MonadIO m) => ReaderT NodeSession m () sendClose = writeMessageR mkClose recieveSalt :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m B.ByteString recieveSalt = do msg <- getAuthenticateContinueR return $ BL.toStrict $ PAC.auth_data msg recieveOk :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok recieveOk = getOkR sendExpectNoError :: (MonadIO m) => ReaderT NodeSession m () sendExpectNoError = writeMessageR mkExpectNoError sendExpectUnset :: (MonadIO m) => ReaderT NodeSession m () sendExpectUnset = writeMessageR mkExpectUnset interfaces as follows : openNodeSession = do sendAuthenticateStart username ( throw NetworkException ) : : aaa - > session - > param1 - > ( ) salt < - recieveSalt ( throw ) : : bbb - > session - > ByteString sendAuthenticateContinue schema user salt password ( throw NetworkException ) : : - > session - > param { } - > ( ) reciveAuthenticateOK ( throw AuthenticateException ) : : ddd - > session - > ( ) interfaces as follows: openNodeSession = do sendAuthenticateStart username (throw NetworkException) :: aaa -> session -> param1 -> () salt <- recieveSalt (throw NetworkException) :: bbb -> session -> ByteString sendAuthenticateContinue schema user salt password (throw NetworkException) :: ccc -> session -> param{ } -> () reciveAuthenticateOK (throw AuthenticateException) :: ddd -> session -> () -} let salt = S.toStrict $ PAC.auth_data x Using NodeSession and making ReaderT writeMessage :: (PBT.TextMsg msg ,PBR.ReflectDescriptor msg ,PBW.Wire msg ,Show msg ,Typeable msg ,MonadIO m ) => NodeSession -> msg -> m () writeMessage NodeSession{..} msg = do liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes) where bytes = PBW.messagePut msg len = fromIntegral $ PBW.messageSize msg ty = putMessageType $ fromIntegral $ getClientMsgTypeNo msg sendExpectClose :: (MonadIO m) => ReaderT NodeSession m () sendExpectClose = do nodeSess <- ask liftIO $ writeExpectClose nodeSess writeExpectClose NodeSession{..} = do liftIO $ sendAll _socket (BL.toStrict $ (putMessageLengthLE (len + 1)) `BL.append` ty `BL.append` bytes) where bytes = PBW.messagePut mkClose len = fromIntegral 0 ty = putMessageType $ fromIntegral 25 writeMessageR :: (PBT.TextMsg msg ,PBR.ReflectDescriptor msg ,PBW.Wire msg ,Show msg ,Typeable msg ,MonadIO m ) => msg -> ReaderT NodeSession m () writeMessageR msg = do session <- ask liftIO $ writeMessage session msg getErrorR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PE.Error getErrorR = readOneMessageR >>= \(_, msg) -> getError msg getFrameR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PFr.Frame getFrameR = readOneMessageR >>= \(_, msg) -> getFrame msg getAuthenticateContinueR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m PAC.AuthenticateContinue getAuthenticateContinueR = readOneMessageR >>= \(_, msg) -> getAuthenticateContinue msg getOkR :: (MonadIO m, MonadThrow m) => ReaderT NodeSession m POk.Ok getOkR = readOneMessageR >>= \(_, msg) -> getOk msg getOneMessageR :: (MonadIO m ,MonadThrow m ,PBW.Wire a ,PBR.ReflectDescriptor a ,PBT.TextMsg a ,Typeable a) => ReaderT NodeSession m a getOneMessageR = do session <- ask (_, msg) <- liftIO $ readOneMessage session getMessage msg readMessages :: (MonadIO m) => NodeSession -> m [Message] readMessages NodeSession{..} = do len <- runReaderT readMsgLengthR _socket debug $ " 1st length = " + + ( show $ getIntFromLE len ) ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket return ret readMessagesEither :: (MonadIO m) => NodeSession -> m (Either [Message] [Message]) readMessagesEither NodeSession{..} = do len <- runReaderT readMsgLengthR _socket debug $ " 1st length = " + + ( show $ getIntFromLE len ) ret <- runReaderT (readAllMsgR (fromIntegral $ getIntFromLE len)) _socket if hasError ret Success where hasError r = length (filterError r) >= 1 filterError xs = filter (\(t,m) -> t == s_error) xs readMessagesR :: (MonadIO m) => ReaderT NodeSession m [Message] readMessagesR = ask >>= liftIO . readMessages repeatreadMessagesR :: (MonadIO m) ^ fst : Success messages , snd : Error messages repeatreadMessagesR noError num acc = do if num == 0 then return acc else do nodeSess <- ask r <- readMessagesEither nodeSess case r of Left m -> if noError then return (fst acc , m ) else repeatreadMessagesR noError (num-1) (fst acc , snd acc ++ m) Right m -> repeatreadMessagesR noError (num-1) ((fst acc) ++ m , snd acc ) readOneMessage :: (MonadIO m) => NodeSession -> m Message readOneMessage NodeSession{..} = runReaderT readOneMsgR _socket readOneMessageR :: (MonadIO m) => ReaderT NodeSession m Message readOneMessageR = ask >>= liftIO . readOneMessage readNMessage :: (MonadIO m) => Int -> NodeSession -> m [Message] readNMessage n NodeSession{..} = runReaderT (readNMsgR n) _socket readNMessageR :: (MonadIO m) => Int -> ReaderT NodeSession m [Message] readNMessageR n = ask >>= liftIO . readNMessage n readSocketR :: (MonadIO m) => Int -> ReaderT Socket m B.ByteString readSocketR len = ask >>= (\x -> liftIO $ recv x len) readMsgLengthR :: (MonadIO m) => ReaderT Socket m B.ByteString readMsgLengthR = readSocketR 4 readMsgTypeR :: (MonadIO m) => ReaderT Socket m B.ByteString readMsgTypeR = readSocketR 1 readNextMsgR :: (MonadIO m) => Int -> ReaderT Socket m (B.ByteString, B.ByteString) readNextMsgR len = do bytes <- readSocketR (len + 4) return $ if B.length bytes == len then (bytes, B.empty) else B.splitAt len bytes readOneMsgR :: (MonadIO m) => ReaderT Socket m Message readOneMsgR = do l <- readMsgLengthR t <- readMsgTypeR m <- readSocketR $ fromIntegral $ (getIntFromLE l) -1 return (byte2Int t, m) readNMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message] readNMsgR n = sequence $ take n . repeat $ readOneMsgR readAllMsgR :: (MonadIO m) => Int -> ReaderT Socket m [Message] readAllMsgR len = do t <- readMsgTypeR let t' = byte2Int t if t' == s_sql_stmt_execute_ok SQL_STMT_EXECUTE_OK is the last message and has no data . return [(s_sql_stmt_execute_ok, B.empty)] else do debug $ " type= " + + ( show $ byte2Int t ) + + " , reading len= " + + ( show ( len-1 ` max ` 0 ) ) + + " , plus 4 byte " (msg, len) <- readNextMsgR (len-1) if B.null len then return [(t', msg)] else do msgs <- readAllMsgR $ fromIntegral $ getIntFromLE len return $ (t', msg): msgs begenTrxNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 begenTrxNodeSession = doSimpleSessionStateChangeStmt "begin" commitNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 commitNodeSession = doSimpleSessionStateChangeStmt "commit" rollbackNodeSession :: (MonadIO m, MonadThrow m) => NodeSession -> m W.Word64 rollbackNodeSession = doSimpleSessionStateChangeStmt "rollback" doSimpleSessionStateChangeStmt :: (MonadIO m, MonadThrow m) => String -> NodeSession -> m W.Word64 doSimpleSessionStateChangeStmt sql nodeSess = do runReaderT (writeMessageR (mkStmtExecuteSql sql [])) nodeSess if fst x == 1 then do msg <- getError $ snd x throwM $ XProtocolError msg else do ssc <- getPayloadSessionStateChanged frm getRowsAffected ssc isSocketConnected :: NodeSession -> IO Bool isSocketConnected NodeSession{..} = do isConnected _socket naming rule ( a ) client - > server message implementatin pattern 1 ) make pure function from some params to a PB object = = > hidden ex ) mkAuthenticateStart | V sendAuthenticateStart : : ( MonadIO m ) = > String - > ReaderT NodeSession m ( ) sendAuthenticateStart = writeMessageR . mkAuthenticateStart ( b ) server - > client message implemention patten 1 ) make pure function from ByteString to a PB object ex ) getAuthenticateContinue : : B.ByteString - > PAC.AuthenticateContinue = = > hidden getAuthenticateContinue ' = getMessage 2 ) make the above function to Reader Monad 3 ) make a function to get concrete data , not Protocol Buffer Objects = = > open ex ) recieveSalt : : ( MonadIO m ) = > ReaderT NodeSession m B.ByteString ( c ) client - > server - > client message implementation 1 ) combine ( a ) and ( b ) so that we get a turn - around function between client and server . naming rule (a) client -> server message implementatin pattern 1) make pure function from some params to a PB object ==> hidden 2) make the above function to Reader Monad ex) mkAuthenticateStart | V sendAuthenticateStart :: (MonadIO m) => String -> ReaderT NodeSession m () sendAuthenticateStart = writeMessageR . mkAuthenticateStart (b) server -> client message implemention patten 1) make pure function from ByteString to a PB object ex) getAuthenticateContinue :: B.ByteString -> PAC.AuthenticateContinue ==> hidden getAuthenticateContinue' = getMessage 2) make the above function to Reader Monad 3) make a function to get concrete data, not Protocol Buffer Objects ==> open ex) recieveSalt :: (MonadIO m) => ReaderT NodeSession m B.ByteString (c) client -> server -> client message implementation 1) combine (a) and (b) so that we get a turn-around function between client and server. -} </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610258"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">146dd9da32755c31eb2b8d40877a8ded50fee86dfa05ee58556bc2cf291c02bf</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">rubenbarroso/EOPL</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3_34.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(load "/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm") (load "/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm") (load "/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm") (define-datatype environment nameless-environment? (empty-nameless-env-record) (extended-nameless-env-record (vals vector?) (env nameless-environment?))) (define empty-nameless-env (lambda () (empty-nameless-env-record))) (define extend-nameless-env (lambda (vals env) (extended-nameless-env-record (list->vector vals) env))) (define extend-nameless-env-recursively (lambda (proc-names bodies old-env) (let ((len (length proc-names))) (let ((vec (make-vector len))) (let ((env (extended-nameless-env-record vec old-env))) (for-each (lambda (pos body) (vector-set! vec pos (closure body env))) (iota len) bodies) env))))) ;> (apply-nameless-env ; (extend-nameless-env-recursively ; '(even odd) ; '((var-exp h) (var-exp j)) ; (extend-nameless-env ' ( 5 28 ) ; (empty-nameless-env))) 1 1 ) 28 (define apply-nameless-env (lambda (env depth pos) (if (= pos -1) (eopl:error 'apply-nameless-env "Error accessing free variable at (~s ~s)" depth pos)) (cases environment env (empty-nameless-env-record () (eopl:error 'apply-nameless-env "No binding for ~s" sym)) (extended-nameless-env-record (vals env) (if (= depth 0) (vector-ref vals pos) (apply-nameless-env env (- depth 1) pos)))))) (define scanner-spec-3-13 '((white-sp (whitespace) skip) (comment ("%" (arbno (not #\newline))) skip) (identifier (letter (arbno (or letter digit "?"))) symbol) (number (digit (arbno digit)) number))) (define grammar-3-13 '((program (expression) a-program) (expression (number) lit-exp) (expression (identifier) var-exp) (expression ("lexvar" "(" number number ")") lexvar-exp) (expression (primitive "(" (separated-list expression ",") ")") primapp-exp) (expression ("if" expression "then" expression "else" expression) if-exp) (expression ("let" (arbno identifier "=" expression) "in" expression) let-exp) (expression ("letrec" (arbno identifier "(" (separated-list identifier ",") ")" "=" expression) "in" expression) letrec-exp) (expression ("proc" "(" (separated-list identifier ",") ")" expression) proc-exp) (expression ("(" expression (arbno expression) ")") app-exp) (primitive ("+") add-prim) (primitive ("-") substract-prim) (primitive ("*") mult-prim) (primitive ("add1") incr-prim) (primitive ("sub1") decr-prim) (primitive ("equal?") equal-prim) (primitive ("zero?") zero-prim) (primitive ("greater?") greater-prim) (primitive ("less?") less-prim))) (define scan&parse (sllgen:make-string-parser scanner-spec-3-13 grammar-3-13)) (sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13) (define run (lambda (string) (eval-program (lexical-address-calc (scan&parse string))))) ;helpers (define true-value? (lambda (x) (not (zero? x)))) ; the interpreter (define eval-program (lambda (pgm) (cases program pgm (a-program (body) (eval-expression body (init-nameless-env)))))) (define eval-expression (lambda (exp env) (cases expression exp (lit-exp (datum) datum) (var-exp (id) (eopl:error 'eval-expression "var-exp should not appear in the instrumented interpreter")) (lexvar-exp (depth pos) (apply-nameless-env env depth pos)) (primapp-exp (prim rands) (let ((args (eval-rands rands env))) (apply-primitive prim args))) (if-exp (test-exp true-exp false-exp) (if (true-value? (eval-expression test-exp env)) (eval-expression true-exp env) (eval-expression false-exp env))) (let-exp (ids rands body) (let ((args (eval-rands rands env))) (eval-expression body (extend-nameless-env (list->vector args) env)))) (proc-exp (ids body) (closure body env)) (app-exp (rator rands) (let ((proc (eval-expression rator env)) (args (eval-rands rands env))) (if (procval? proc) (apply-procval proc args) (eopl:error 'eval-expression "Attempt to apply a non-procedure ~s" proc)))) (letrec-exp (proc-names idss bodies letrec-body) (eval-expression letrec-body (extend-nameless-env-recursively proc-names bodies env)))))) (define eval-rands (lambda (rands env) (map (lambda (x) (eval-rand x env)) rands))) (define eval-rand (lambda (rand env) (eval-expression rand env))) (define apply-primitive (lambda (prim args) (cases primitive prim (add-prim () (+ (car args) (cadr args))) (substract-prim () (- (car args) (cadr args))) (mult-prim () (* (car args) (cadr args))) (incr-prim () (+ (car args) 1)) (decr-prim () (- (car args) 1)) (equal-prim () (if (= (car args) (cadr args)) 1 0)) (zero-prim () (if (zero? (car args)) 1 0)) (greater-prim () (if (> (car args) (cadr args)) 1 0)) (less-prim () (if (< (car args) (cadr args)) 1 0))))) (define-datatype procval procval? (closure (body expression?) (env nameless-environment?))) (define apply-procval (lambda (proc args) (cases procval proc (closure (body env) (eval-expression body (extend-nameless-env args env)))))) (define init-nameless-env (lambda () (extend-nameless-env '(1 5 10) (empty-nameless-env)))) ;Helper procedures from exercise 1.31 (define make-lexical-address (lambda (v d p) (list v ': d p))) (define get-v (lambda (address) (car address))) (define get-d (lambda (address) (caddr address))) (define get-p (lambda (address) (cadddr address))) (define increment-depth (lambda (address) (make-lexical-address (get-v address) (+ 1 (get-d address)) (get-p address)))) (define get-lexical-address (lambda (exp addresses) (define iter (lambda (lst) (cond ((null? lst) (make-lexical-address exp -1 -1)) ((eqv? exp (get-v (car lst))) (car lst)) (else (get-lexical-address exp (cdr lst)))))) (iter addresses))) (define index-of (lambda (v declarations) (define helper (lambda (lst index) (cond ((null? lst) 'free) ((eqv? (car lst) v) index) (else (helper (cdr lst) (+ index 1)))))) (helper declarations 0))) (define cross-contour (lambda (declarations addresses) (let ((bound (filter-bound declarations)) (free (filter-free declarations addresses))) (append bound free)))) (define filter-bound (lambda (declarations) (map (lambda (decl) (make-lexical-address decl 0 (index-of decl declarations))) declarations))) (define filter-free (lambda (declarations addresses) (define iter (lambda (lst) (cond ((null? lst) '()) ((not (memq (get-v (car lst)) declarations)) (cons (increment-depth (car lst)) (iter (cdr lst)))) (else (iter (cdr lst)))))) (iter addresses))) (define lexical-address-calc-helper (lambda (exp addresses) (cases expression exp (lit-exp (datum) (lit-exp datum)) (var-exp (id) (let ((lexical-address (get-lexical-address id addresses))) (lexvar-exp (get-d lexical-address) (get-p lexical-address)))) (lexvar-exp (depth pos) (lexvar-exp depth pos)) (primapp-exp (prim rands) (primapp-exp prim (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands))) (if-exp (test-exp true-exp false-exp) (if-exp (lexical-address-calc-helper test-exp addresses) (lexical-address-calc-helper true-exp addresses) (lexical-address-calc-helper false-exp addresses))) (let-exp (ids rands body) (let-exp ids (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands) (lexical-address-calc-helper body (cross-contour ids addresses)))) (proc-exp (ids body) (proc-exp ids (lexical-address-calc-helper body (cross-contour ids addresses)))) (app-exp (rator rands) (app-exp (lexical-address-calc-helper rator addresses) (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands))) (letrec-exp (proc-names idss bodies letrec-body) (let ((new-addresses (cross-contour proc-names addresses))) (letrec-exp proc-names idss (map (lambda (ids body) (lexical-address-calc-helper body (cross-contour ids new-addresses))) idss bodies) (lexical-address-calc-helper letrec-body new-addresses))))))) (define letrec-cross-contour (lambda (proc-names idss addresses) (define iter (lambda (the-ids the-addresses) (if (null? the-ids) the-addresses (iter (cdr the-ids) (cross-contour (car the-ids) the-addresses))))) (iter idss (cross-contour proc-names addresses)))) (define lexical-address-calc (lambda (pgm) (a-program (cases program pgm (a-program (body) (lexical-address-calc-helper body '())))))) ;> (lexical-address-calc ; (scan&parse ; "letrec even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) ; in (odd 13)")) ;(a-program ( letrec - exp ; (even odd) ; ((x) (x)) ; ((if-exp ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) ) ; (lit-exp 1) ; (app-exp ; (lexvar-exp 1 1) ; ((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))) ; (if-exp ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) ) ; (lit-exp 0) ; (app-exp ; (lexvar-exp 1 0) ; ((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))) ; (app-exp (lexvar-exp 0 1) ((lit-exp 13))))) ; ;> (run ; "letrec even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) ; in (odd 13)") 1 ;> (run ; "letrec even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) in ( even 13 ) " ) 0 ;> (run ; "letrec fact(x ) = if ) then 1 else * ( x,(fact ) ) ) in ( fact 6 ) " ) 720 </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/rubenbarroso/EOPL/f9b3c03c2fcbaddf64694ee3243d54be95bfe31d/src/chapter3/3_34.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">> (apply-nameless-env (extend-nameless-env-recursively '(even odd) '((var-exp h) (var-exp j)) (extend-nameless-env (empty-nameless-env))) helpers the interpreter Helper procedures from exercise 1.31 > (lexical-address-calc (scan&parse "letrec in (odd 13)")) (a-program (even odd) ((x) (x)) ((if-exp (lit-exp 1) (app-exp (lexvar-exp 1 1) ((primapp-exp (decr-prim) ((lexvar-exp 0 0)))))) (if-exp (lit-exp 0) (app-exp (lexvar-exp 1 0) ((primapp-exp (decr-prim) ((lexvar-exp 0 0))))))) (app-exp (lexvar-exp 0 1) ((lit-exp 13))))) > (run "letrec in (odd 13)") > (run "letrec > (run "letrec</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(load "/Users/ruben/Dropbox/EOPL/src/interps/r5rs.scm") (load "/Users/ruben/Dropbox/EOPL/src/interps/define-datatype.scm") (load "/Users/ruben/Dropbox/EOPL/src/interps/sllgen.scm") (define-datatype environment nameless-environment? (empty-nameless-env-record) (extended-nameless-env-record (vals vector?) (env nameless-environment?))) (define empty-nameless-env (lambda () (empty-nameless-env-record))) (define extend-nameless-env (lambda (vals env) (extended-nameless-env-record (list->vector vals) env))) (define extend-nameless-env-recursively (lambda (proc-names bodies old-env) (let ((len (length proc-names))) (let ((vec (make-vector len))) (let ((env (extended-nameless-env-record vec old-env))) (for-each (lambda (pos body) (vector-set! vec pos (closure body env))) (iota len) bodies) env))))) ' ( 5 28 ) 1 1 ) 28 (define apply-nameless-env (lambda (env depth pos) (if (= pos -1) (eopl:error 'apply-nameless-env "Error accessing free variable at (~s ~s)" depth pos)) (cases environment env (empty-nameless-env-record () (eopl:error 'apply-nameless-env "No binding for ~s" sym)) (extended-nameless-env-record (vals env) (if (= depth 0) (vector-ref vals pos) (apply-nameless-env env (- depth 1) pos)))))) (define scanner-spec-3-13 '((white-sp (whitespace) skip) (comment ("%" (arbno (not #\newline))) skip) (identifier (letter (arbno (or letter digit "?"))) symbol) (number (digit (arbno digit)) number))) (define grammar-3-13 '((program (expression) a-program) (expression (number) lit-exp) (expression (identifier) var-exp) (expression ("lexvar" "(" number number ")") lexvar-exp) (expression (primitive "(" (separated-list expression ",") ")") primapp-exp) (expression ("if" expression "then" expression "else" expression) if-exp) (expression ("let" (arbno identifier "=" expression) "in" expression) let-exp) (expression ("letrec" (arbno identifier "(" (separated-list identifier ",") ")" "=" expression) "in" expression) letrec-exp) (expression ("proc" "(" (separated-list identifier ",") ")" expression) proc-exp) (expression ("(" expression (arbno expression) ")") app-exp) (primitive ("+") add-prim) (primitive ("-") substract-prim) (primitive ("*") mult-prim) (primitive ("add1") incr-prim) (primitive ("sub1") decr-prim) (primitive ("equal?") equal-prim) (primitive ("zero?") zero-prim) (primitive ("greater?") greater-prim) (primitive ("less?") less-prim))) (define scan&parse (sllgen:make-string-parser scanner-spec-3-13 grammar-3-13)) (sllgen:make-define-datatypes scanner-spec-3-13 grammar-3-13) (define run (lambda (string) (eval-program (lexical-address-calc (scan&parse string))))) (define true-value? (lambda (x) (not (zero? x)))) (define eval-program (lambda (pgm) (cases program pgm (a-program (body) (eval-expression body (init-nameless-env)))))) (define eval-expression (lambda (exp env) (cases expression exp (lit-exp (datum) datum) (var-exp (id) (eopl:error 'eval-expression "var-exp should not appear in the instrumented interpreter")) (lexvar-exp (depth pos) (apply-nameless-env env depth pos)) (primapp-exp (prim rands) (let ((args (eval-rands rands env))) (apply-primitive prim args))) (if-exp (test-exp true-exp false-exp) (if (true-value? (eval-expression test-exp env)) (eval-expression true-exp env) (eval-expression false-exp env))) (let-exp (ids rands body) (let ((args (eval-rands rands env))) (eval-expression body (extend-nameless-env (list->vector args) env)))) (proc-exp (ids body) (closure body env)) (app-exp (rator rands) (let ((proc (eval-expression rator env)) (args (eval-rands rands env))) (if (procval? proc) (apply-procval proc args) (eopl:error 'eval-expression "Attempt to apply a non-procedure ~s" proc)))) (letrec-exp (proc-names idss bodies letrec-body) (eval-expression letrec-body (extend-nameless-env-recursively proc-names bodies env)))))) (define eval-rands (lambda (rands env) (map (lambda (x) (eval-rand x env)) rands))) (define eval-rand (lambda (rand env) (eval-expression rand env))) (define apply-primitive (lambda (prim args) (cases primitive prim (add-prim () (+ (car args) (cadr args))) (substract-prim () (- (car args) (cadr args))) (mult-prim () (* (car args) (cadr args))) (incr-prim () (+ (car args) 1)) (decr-prim () (- (car args) 1)) (equal-prim () (if (= (car args) (cadr args)) 1 0)) (zero-prim () (if (zero? (car args)) 1 0)) (greater-prim () (if (> (car args) (cadr args)) 1 0)) (less-prim () (if (< (car args) (cadr args)) 1 0))))) (define-datatype procval procval? (closure (body expression?) (env nameless-environment?))) (define apply-procval (lambda (proc args) (cases procval proc (closure (body env) (eval-expression body (extend-nameless-env args env)))))) (define init-nameless-env (lambda () (extend-nameless-env '(1 5 10) (empty-nameless-env)))) (define make-lexical-address (lambda (v d p) (list v ': d p))) (define get-v (lambda (address) (car address))) (define get-d (lambda (address) (caddr address))) (define get-p (lambda (address) (cadddr address))) (define increment-depth (lambda (address) (make-lexical-address (get-v address) (+ 1 (get-d address)) (get-p address)))) (define get-lexical-address (lambda (exp addresses) (define iter (lambda (lst) (cond ((null? lst) (make-lexical-address exp -1 -1)) ((eqv? exp (get-v (car lst))) (car lst)) (else (get-lexical-address exp (cdr lst)))))) (iter addresses))) (define index-of (lambda (v declarations) (define helper (lambda (lst index) (cond ((null? lst) 'free) ((eqv? (car lst) v) index) (else (helper (cdr lst) (+ index 1)))))) (helper declarations 0))) (define cross-contour (lambda (declarations addresses) (let ((bound (filter-bound declarations)) (free (filter-free declarations addresses))) (append bound free)))) (define filter-bound (lambda (declarations) (map (lambda (decl) (make-lexical-address decl 0 (index-of decl declarations))) declarations))) (define filter-free (lambda (declarations addresses) (define iter (lambda (lst) (cond ((null? lst) '()) ((not (memq (get-v (car lst)) declarations)) (cons (increment-depth (car lst)) (iter (cdr lst)))) (else (iter (cdr lst)))))) (iter addresses))) (define lexical-address-calc-helper (lambda (exp addresses) (cases expression exp (lit-exp (datum) (lit-exp datum)) (var-exp (id) (let ((lexical-address (get-lexical-address id addresses))) (lexvar-exp (get-d lexical-address) (get-p lexical-address)))) (lexvar-exp (depth pos) (lexvar-exp depth pos)) (primapp-exp (prim rands) (primapp-exp prim (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands))) (if-exp (test-exp true-exp false-exp) (if-exp (lexical-address-calc-helper test-exp addresses) (lexical-address-calc-helper true-exp addresses) (lexical-address-calc-helper false-exp addresses))) (let-exp (ids rands body) (let-exp ids (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands) (lexical-address-calc-helper body (cross-contour ids addresses)))) (proc-exp (ids body) (proc-exp ids (lexical-address-calc-helper body (cross-contour ids addresses)))) (app-exp (rator rands) (app-exp (lexical-address-calc-helper rator addresses) (map (lambda (rand) (lexical-address-calc-helper rand addresses)) rands))) (letrec-exp (proc-names idss bodies letrec-body) (let ((new-addresses (cross-contour proc-names addresses))) (letrec-exp proc-names idss (map (lambda (ids body) (lexical-address-calc-helper body (cross-contour ids new-addresses))) idss bodies) (lexical-address-calc-helper letrec-body new-addresses))))))) (define letrec-cross-contour (lambda (proc-names idss addresses) (define iter (lambda (the-ids the-addresses) (if (null? the-ids) the-addresses (iter (cdr the-ids) (cross-contour (car the-ids) the-addresses))))) (iter idss (cross-contour proc-names addresses)))) (define lexical-address-calc (lambda (pgm) (a-program (cases program pgm (a-program (body) (lexical-address-calc-helper body '())))))) even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) ( letrec - exp ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) ) ( primapp - exp ( zero - prim ) ( ( lexvar - exp 0 0 ) ) ) even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) 1 even(x ) = if ) then 1 else ( odd ) ) odd(x ) = if ) then 0 else ( even ) ) in ( even 13 ) " ) 0 fact(x ) = if ) then 1 else * ( x,(fact ) ) ) in ( fact 6 ) " ) 720 </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610259"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ad9c4785c7b740ba34b9f2dc0d64d14cbfdc8c0ec6ff040bd568e927bec1d9cd</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">jappeace/awesome-project-name</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">frontend.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Main where import qualified Awe.Front.Main as App import Reflex.Dom main :: IO () main = mainWidget $ App.main $ App.IniState Nothing </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/jappeace/awesome-project-name/e80a52dc2673c748a922ec19945cf75368aa3a53/frontend/app/frontend.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Main where import qualified Awe.Front.Main as App import Reflex.Dom main :: IO () main = mainWidget $ App.main $ App.IniState Nothing </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610260"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">0b661532ff59e0c8aa19c3913ebeb1f8d4a5a1d3f696805485357057203940b2</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell-gi/gi-gtk-examples</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">FastDraw.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE OverloadedStrings #-} # LANGUAGE PatternSynonyms # # LANGUAGE ScopedTypeVariables # {-# OPTIONS -O #-} -- Example of an drawing graphics onto a canvas. import Control.Applicative import Prelude import Data.IORef import Graphics.Rendering.Cairo import Foreign (allocaArray) import Graphics.Rendering.Cairo.Types (Cairo(..), PixelData) import Foreign.Storable (Storable(..)) import Foreign.C (CUChar) import qualified GI.Gtk as GI (init) import GI.Gtk (dialogRun, widgetShow, boxPackStart, onWidgetDraw, widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest, drawingAreaNew, dialogGetContentArea, dialogAddButton, dialogNew) import GI.Gtk.Enums (ResponseType(..)) import GI.GLib (pattern PRIORITY_LOW, idleAdd) import GI.Cairo.Structs.Context (Context(..)) import Control.Monad.Trans.Reader (runReaderT) import Foreign.Ptr (castPtr) import Graphics.Rendering.Cairo.Internal (Render(..)) import Data.GI.Base.ManagedPtr (withManagedPtr) main = do GI.init Nothing dia <- dialogNew dialogAddButton dia "_OK" (fromIntegral $ fromEnum ResponseTypeOk) contain <- dialogGetContentArea dia canvas <- drawingAreaNew let w = 256 h = 256 chan = 4 row = w * chan stride = row setWidgetWidthRequest canvas 256 setWidgetHeightRequest canvas 256 create the Pixbuf allocaArray (w * h * chan) $ \ pbData -> do draw into the Pixbuf doFromTo 0 (h-1) $ \y -> doFromTo 0 (w-1) $ \x -> do pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar) pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar) pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar) a function to update the Pixbuf blueRef <- newIORef (0 :: CUChar) dirRef <- newIORef True let updateBlue = do blue <- readIORef blueRef -- print blue doFromTo 0 (h-1) $ \y -> doFromTo 0 (w-1) $ \x -> pokeByteOff pbData (0+x*chan+y*row) blue -- unchecked indexing -- arrange for the canvas to be redrawn now that we've changed the Pixbuf widgetQueueDraw canvas -- update the blue state ready for next time dir <- readIORef dirRef let diff = 1 let blue' = if dir then blue+diff else blue-diff if dir then if blue<=maxBound-diff then writeIORef blueRef blue' else writeIORef blueRef maxBound >> modifyIORef dirRef not else if blue>=minBound+diff then writeIORef blueRef blue' else writeIORef blueRef minBound >> modifyIORef dirRef not return True idleAdd PRIORITY_LOW updateBlue onWidgetDraw canvas $ \(Context fp) -> withManagedPtr fp $ \p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do updateCanvas pbData w h stride return True boxPackStart contain canvas True True 0 widgetShow canvas dialogRun dia return () updateCanvas :: PixelData -> Int -> Int -> Int -> Render () updateCanvas pb w h stride = do s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride setSourceSurface s 0 0 paint GHC is much better at opimising loops like this : -- > doFromTo 0 255 $ \y - > > doFromTo 0 255 $ \x - > do ... -- -- Than it is at optimising loops like this: -- -- > sequence_ [ do ... > | x < - [ 0 .. 255 ] > , y < - [ 0 .. 255 ] ] -- The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 ) # INLINE doFromTo # -- do the action for [from..to], ie it's inclusive. doFromTo :: Int -> Int -> (Int -> IO ()) -> IO () doFromTo from to action = let loop n | n > to = return () | otherwise = do action n loop (n+1) in loop from </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/haskell-gi/gi-gtk-examples/4c4f06dc91fbb9b9f50cdad295c8afe782e0bdec/fastdraw/FastDraw.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE OverloadedStrings # # OPTIONS -O # Example of an drawing graphics onto a canvas. print blue unchecked indexing arrange for the canvas to be redrawn now that we've changed update the blue state ready for next time Than it is at optimising loops like this: > sequence_ [ do ... do the action for [from..to], ie it's inclusive.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE PatternSynonyms # # LANGUAGE ScopedTypeVariables # import Control.Applicative import Prelude import Data.IORef import Graphics.Rendering.Cairo import Foreign (allocaArray) import Graphics.Rendering.Cairo.Types (Cairo(..), PixelData) import Foreign.Storable (Storable(..)) import Foreign.C (CUChar) import qualified GI.Gtk as GI (init) import GI.Gtk (dialogRun, widgetShow, boxPackStart, onWidgetDraw, widgetQueueDraw, setWidgetHeightRequest, setWidgetWidthRequest, drawingAreaNew, dialogGetContentArea, dialogAddButton, dialogNew) import GI.Gtk.Enums (ResponseType(..)) import GI.GLib (pattern PRIORITY_LOW, idleAdd) import GI.Cairo.Structs.Context (Context(..)) import Control.Monad.Trans.Reader (runReaderT) import Foreign.Ptr (castPtr) import Graphics.Rendering.Cairo.Internal (Render(..)) import Data.GI.Base.ManagedPtr (withManagedPtr) main = do GI.init Nothing dia <- dialogNew dialogAddButton dia "_OK" (fromIntegral $ fromEnum ResponseTypeOk) contain <- dialogGetContentArea dia canvas <- drawingAreaNew let w = 256 h = 256 chan = 4 row = w * chan stride = row setWidgetWidthRequest canvas 256 setWidgetHeightRequest canvas 256 create the Pixbuf allocaArray (w * h * chan) $ \ pbData -> do draw into the Pixbuf doFromTo 0 (h-1) $ \y -> doFromTo 0 (w-1) $ \x -> do pokeByteOff pbData (2+x*chan+y*row) (fromIntegral x :: CUChar) pokeByteOff pbData (1+x*chan+y*row) (fromIntegral y :: CUChar) pokeByteOff pbData (0+x*chan+y*row) (0 :: CUChar) a function to update the Pixbuf blueRef <- newIORef (0 :: CUChar) dirRef <- newIORef True let updateBlue = do blue <- readIORef blueRef doFromTo 0 (h-1) $ \y -> doFromTo 0 (w-1) $ \x -> the Pixbuf widgetQueueDraw canvas dir <- readIORef dirRef let diff = 1 let blue' = if dir then blue+diff else blue-diff if dir then if blue<=maxBound-diff then writeIORef blueRef blue' else writeIORef blueRef maxBound >> modifyIORef dirRef not else if blue>=minBound+diff then writeIORef blueRef blue' else writeIORef blueRef minBound >> modifyIORef dirRef not return True idleAdd PRIORITY_LOW updateBlue onWidgetDraw canvas $ \(Context fp) -> withManagedPtr fp $ \p -> (`runReaderT` Cairo (castPtr p)) $ runRender $ do updateCanvas pbData w h stride return True boxPackStart contain canvas True True 0 widgetShow canvas dialogRun dia return () updateCanvas :: PixelData -> Int -> Int -> Int -> Render () updateCanvas pb w h stride = do s <- liftIO $ createImageSurfaceForData pb FormatRGB24 w h stride setSourceSurface s 0 0 paint GHC is much better at opimising loops like this : > doFromTo 0 255 $ \y - > > doFromTo 0 255 $ \x - > do ... > | x < - [ 0 .. 255 ] > , y < - [ 0 .. 255 ] ] The first kind of loop runs significantly faster ( with GHC 6.2 and 6.4 ) # INLINE doFromTo # doFromTo :: Int -> Int -> (Int -> IO ()) -> IO () doFromTo from to action = let loop n | n > to = return () | otherwise = do action n loop (n+1) in loop from </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610261"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3ccad40dd1db3c7b2e9ee962c405a8f537247873317933be62d5a75a48ee543c</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">spell-music/csound-expression</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Pretty.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Csound.Dynamic.Render.Pretty( Doc, vcatSep, ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur, PrettyE(..), PrettyShowE(..), ppE ) where import Control.Monad.Trans.State.Strict import qualified Data.IntMap as IM import Text.PrettyPrint.Leijen.Text import Csound.Dynamic.Types import Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..)) import Data.Text (Text) import Data.Text qualified as Text import Text.Show.Pretty (ppShow) import Data.Fix (foldFix) import Data.ByteString.Base64 qualified as Base64 import Data.Text.Encoding qualified as Text vcatSep :: [Doc] -> Doc vcatSep = vcat . punctuate line binaries, unaries :: Text -> [Doc] -> Doc binaries op as = binary op (as !! 0) (as !! 1) unaries op as = unary op (as !! 0) binary :: Text -> Doc -> Doc -> Doc binary op a b = parens $ a <+> textStrict op <+> b unary :: Text -> Doc -> Doc unary op a = parens $ textStrict op <> a func :: Text -> Doc -> Doc func op a = textStrict op <> parens a ppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc ppCsdFile flags orc sco plugins = tag "CsoundSynthesizer" $ vcatSep [ tag "CsOptions" flags, tag "CsInstruments" orc, tag "CsScore" sco, ppPlugins plugins ] ppPlugins :: [Plugin] -> Doc ppPlugins plugins = vcatSep $ fmap (\(Plugin name body) -> tag name (textStrict body)) plugins tag :: Text -> Doc -> Doc tag name content = vcatSep [ char '<' <> textStrict name <> char '>', content, text "</" <> textStrict name <> char '>'] ppNotes :: InstrId -> [CsdEvent] -> Doc ppNotes instrId = vcat . fmap (ppNote instrId) ppNote :: InstrId -> CsdEvent -> Doc ppNote instrId evt = char 'i' <+> ppInstrId instrId <+> double (csdEventStart evt) <+> double (csdEventDur evt) <+> hsep (fmap ppPrim $ csdEventContent evt) ppPrim :: Prim -> Doc ppPrim x = case x of P n -> char 'p' <> int n PrimInstrId a -> ppInstrId a PString a -> int a PrimInt n -> int n PrimDouble d -> double d PrimString s -> dquotes $ textStrict s PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v where ppConverter dst src t | dst == src = t | dst == Ar && src == Kr = a(t) | dst == Ar && src == Ir = a(k(t)) | dst == Kr = k(t) | dst == Ir && src == Kr = i(t) | dst == Ir && src == Ar = i(k(t)) | otherwise = t where tfm ch v = hcat [char ch, parens v] a = tfm 'a' k = tfm 'k' i = tfm 'i' ppGen :: Int -> Gen -> Doc ppGen tabId ft = char 'f' <> int tabId <+> int 0 <+> (int $ genSize ft) <+> (ppGenId $ genId ft) <+> (maybe empty (textStrict . Text.pack . show) $ genFile ft) <+> (hsep $ map double $ genArgs ft) ppGenId :: GenId -> Doc ppGenId x = case x of IntGenId a -> int a StringGenId a -> dquotes $ textStrict a ppInstr :: InstrId -> Doc -> Doc ppInstr instrId body = vcat [ text "instr" <+> ppInstrHeadId instrId, body, text "endin"] ppInstrHeadId :: InstrId -> Doc ppInstrHeadId x = case x of InstrId den nom -> int nom <> maybe empty ppAfterDot den InstrLabel name -> textStrict name where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a ppInstrId :: InstrId -> Doc ppInstrId x = case x of InstrId den nom -> int nom <> maybe empty ppAfterDot den InstrLabel name -> dquotes $ textStrict name where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a type TabDepth = Int ppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc ppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr) maybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc) maybeStringCopy outs expr = case (outs, expr) of ([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var) ([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var) ([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a) ([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as) ([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a) _ -> Nothing ppStringCopy :: Doc -> Doc -> Doc ppStringCopy outs src = ppOpc outs "strcpyk" [src] ppExp :: Doc -> Exp R.Var -> State TabDepth Doc ppExp res expr = case fmap ppPrimOrVar expr of ExpPrim (PString n) -> tab $ ppStrget res n ExpPrim p -> tab $ res $= ppPrim p Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs Tfm info xs -> tab $ ppOpc res (infoName info) xs ConvertRate to from x -> tab $ ppConvertRate res to from x If _ifRate info t e -> tab $ ppIf res (ppCond info) t e ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as WriteVar v a -> tab $ ppVar v $= a InitVar v a -> tab $ ppOpc (ppVar v) "init" [a] ReadVar v -> tab $ res $= ppVar v InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) "init" as ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text "strcpy" <+> ppReadArr v as WriteArr v as b -> tab $ ppWriteArr v as b WriteInitArr v as b -> tab $ ppWriteInitArr v as b TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) "fillarray" initVals ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text "strcpy" <+> ppReadPureArr arr [index] IfBegin _ a -> succTab $ text "if " <> ppCond a <> text " then" IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el -- ElseIfBegin a -> left >> (succTab $ text "elseif " <> ppCond a <> text " then") ElseBegin -> left >> (succTab $ text "else") IfEnd -> left >> (tab $ text "endif") UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th UntilBegin _ a -> succTab $ text "until " <> ppCond a <> text " do" UntilEnd -> left >> (tab $ text "od") WhileBegin _ a -> succTab $ text "while " <> ppCond a <> text " do" WhileRefBegin var -> succTab $ text "while " <> ppVar var <+> equals <+> text "1" <+> text "do" WhileEnd -> left >> (tab $ text "od") InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue) InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue) InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue) ReadMacrosString name -> tab $ res <+> text "strcpy" <+> readMacro name ReadMacrosDouble name -> tab $ res $= readMacro name ReadMacrosInt name -> tab $ res $= readMacro name EmptyExp -> return empty Verbatim str -> return $ textStrict str Select _rate _n a -> tab $ res $= ("SELECTS" <+> a) Starts -> tab $ res $= "STARTS" Seq a b -> tab $ hsep ["SEQ", a, b] Ends _a -> tab $ "ENDS" ExpBool _ -> tab "ExpBool" -- x -> error $ "unknown expression: " ++ show x -- pp macros readMacro :: Text -> Doc readMacro name = char '$' <> textStrict name initMacros :: Doc -> Doc -> Doc initMacros name initValue = vcat [ text "#ifndef" <+> name , text "#define " <+> name <+> char '#' <> initValue <> char '#' , text "#end" ] -- pp arrays ppTfmArrOut :: Bool -> Var -> Doc ppTfmArrOut isInit v = ppVar v <> (if isInit then (text "[]") else empty) ppArrIndex :: Var -> [Doc] -> Doc ppArrIndex v as = ppVar v <> (hcat $ fmap brackets as) ppArrVar :: Int -> Doc -> Doc ppArrVar n v = v <> (hcat $ replicate n $ text "[]") ppReadArr :: Var -> [Doc] -> Doc ppReadArr v as = ppArrIndex v as ppReadPureArr :: Doc -> [Doc] -> Doc ppReadPureArr v as = v <> (hcat $ fmap brackets as) ppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc ppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b where equalsWord = if (varRate v == Sr) then text "strcpy" else equals ppWriteInitArr :: Var -> [Doc] -> Doc -> Doc ppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b where initWord = text $ if (varRate v == Sr) then "strcpy" else "init" ------------------------------------- tab :: Monad m => Doc -> StateT TabDepth m Doc tab doc = fmap (shiftByTab doc) get tabWidth :: TabDepth tabWidth = 4 shiftByTab :: Doc -> TabDepth -> Doc shiftByTab doc n | n == 0 = doc | otherwise = indent (tabWidth * n) doc left :: State TabDepth () left = modify pred succTab :: Monad m => Doc -> StateT TabDepth m Doc succTab doc = do a <- tab doc modify succ return a prefix :: Text -> [Doc] -> Doc prefix name args = textStrict name <> tupled args ppCond :: Inline CondOp Doc -> Doc ppCond = ppInline ppCondOp ($=) :: Doc -> Doc -> Doc ($=) a b = a <+> equals <+> b ppOuts :: [R.Var] -> Doc ppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs ppPrimOrVar :: PrimOr R.Var -> Doc ppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x ppStrget :: Doc -> Int -> Doc ppStrget out n = ppOpc out "strget" [char 'p' <> int n] ppIf :: Doc -> Doc -> Doc -> Doc -> Doc ppIf res p t e = vcat [ text "if" <+> p <+> text "then" , text " " <> res <+> char '=' <+> t , text "else" , text " " <> res <+> char '=' <+> e , text "endif" ] ppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc ppIf1 = ppIfBy "if" ppWhile = ppIfBy "while" ppUntil = ppIfBy "until" ppIfBy :: Text -> Doc -> Doc -> Doc -> Doc ppIfBy leadTag res p t = vcat [ textStrict leadTag <+> p <+> text "then" , text " " <> res <+> char '=' <+> t , text "endif" ] ppWhileRef :: Doc -> Var -> Doc -> Doc ppWhileRef res p t = vcat [ textStrict "while" <+> ppVar p <+> text "then" , text " " <> res <+> char '=' <+> t , text "endif" ] ppOpc :: Doc -> Text -> [Doc] -> Doc ppOpc out name xs = out <+> ppProc name xs ppProc :: Text -> [Doc] -> Doc ppProc name xs = textStrict name <+> (hsep $ punctuate comma xs) ppVar :: Var -> Doc ppVar v = case v of Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name) VarVerbatim _ name -> textStrict name varPrefix :: VarType -> Char varPrefix x = case x of LocalVar -> 'l' GlobalVar -> 'g' ppVarType :: VarType -> Doc ppVarType x = case x of LocalVar -> empty GlobalVar -> char 'g' ppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc ppConvertRate out to from var = case (to, from) of (Ar, Just Kr) -> upsamp var (Ar, Just Ir) -> upsamp $ toK var (Kr, Just Ar) -> downsamp var (Kr, Just Ir) -> out $= var (Ir, Just Ar) -> downsamp var (Ir, Just Kr) -> out $= toI var (Ar, Nothing) -> out $= toA var (Kr, Nothing) -> out $= toK var (Ir, Nothing) -> out $= toI var (a, Just b) | a == b -> out $= var (a, b) -> error $ "bug: no rate conversion from " ++ show b ++ " to " ++ show a ++ "." where upsamp x = ppOpc out "upsamp" [x] downsamp x = ppOpc out "downsamp" [x] toA = func "a" toK = func "k" toI = func "i" -- expressions ppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc ppInline ppNode a = iter $ inlineExp a where iter x = case x of InlinePrim n -> inlineEnv a IM.! n InlineExp op args -> ppNode op $ fmap iter args -- booleans ppCondOp :: CondOp -> [Doc] -> Doc ppCondOp op = case op of TrueOp -> const $ text "(1 == 1)" FalseOp -> const $ text "(0 == 1)" And -> bi "&&" Or -> bi "||" Equals -> bi "==" NotEquals -> bi "!=" Less -> bi "<" Greater -> bi ">" LessEquals -> bi "<=" GreaterEquals -> bi ">=" where bi = binaries -- numeric ppNumOp :: NumOp -> [Doc] -> Doc ppNumOp op = case op of Add -> bi "+" Sub -> bi "-" Mul -> bi "*" Div -> bi "/" Neg -> uno "-" Pow -> bi "^" Mod -> bi "%" where bi = binaries uno = unaries ppRatedVar :: R.Var -> Doc ppRatedVar v = ppRate (R.varType v) <> int (R.varId v) ppRate :: Rate -> Doc ppRate x = case removeArrRate x of Sr -> char 'S' _ -> phi x where phi = textStrict . Text.toLower . Text.pack . show ppTotalDur :: Double -> Doc ppTotalDur d = text "f0" <+> double d -------------------------------------------------------------- -- debug newtype PrettyShowE = PrettyShowE E newtype PrettyE = PrettyE E instance Show PrettyShowE where show (PrettyShowE expr) = ppShow expr instance Show PrettyE where show (PrettyE expr) = show $ ppE expr ppE :: E -> Doc ppE = foldFix go where go :: RatedExp Doc -> Doc go x = fromExp (fromInfo x) x fromInfo :: RatedExp Doc -> Doc fromInfo RatedExp{..} = hsep [ ppHash ratedExpHash , maybe mempty ppRate ratedExpRate , maybe mempty pretty ratedExpDepends ] ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode fromExp :: Doc -> RatedExp Doc -> Doc fromExp info RatedExp{..} = indent 2 $ post $ case ratedExpExp of ExpPrim p -> ppPrim p EmptyExp -> textStrict "EMPTY_EXPR" Tfm inf args -> ppTfm inf args ConvertRate to from a -> ppConvert to from a Select r n a -> ppSelect r n a If rate cond th el -> ppIff rate cond th el ExpBool args -> hsep ["some bool expr", pretty $ show args] ExpNum arg -> ppExpNum arg InitVar v a -> ppInitVar v a ReadVar v -> "ReadVar" <+> ppVar v WriteVar v a -> ppVar v $= pp a TODO InitArr _v _size -> undefined ReadArr _v _index -> undefined WriteArr _v _index _ -> undefined WriteInitArr _v _index _ -> undefined TfmArr _isInit _v _info _args -> undefined InitPureArr _outRate _procRate _vals -> undefined ReadPureArr _outRate _procRate _arr _index -> undefined IfBegin rate cond -> hsep ["IF", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] IfBlock rate cond (CodeBlock th) -> ppIfBlockBy "IF-BLOCK" rate cond th IfElseBlock rate cond (CodeBlock th) (CodeBlock el) -> ppFun (hsep ["IF-BLOCK", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ]) [ pp th , "ELSE-BLOCK" , pp el , "END-BLOCK" ] ElseBegin -> "ELSE" IfEnd -> "END_IF" UntilBegin rate cond -> hsep ["UNTIL", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] UntilEnd -> "END_UNTIL" WhileBegin rate cond -> hsep ["WHILE", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] WhileRefBegin v -> hsep ["WHILE_REF", ppVar v] WhileEnd -> "END_WHILE" UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy "UNTIL-BLOCK" rate cond th WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy "WHILE-BLOCK" rate cond th WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th Verbatim txt -> ppFun "VERBATIM" [textStrict txt] Starts -> "STARTS" Seq a b -> vcat ["SEQ", pp a, pp b] Ends a -> vcat ["ENDS", pp a] InitMacrosInt _name _n -> undefined InitMacrosDouble _name _d -> undefined InitMacrosString _name _str -> undefined ReadMacrosInt _name -> undefined ReadMacrosDouble _name -> undefined ReadMacrosString _name -> undefined where post a = hsep [hcat ["{",info, "}:"], a] ppIfBlockBy leadTag rate cond th = ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ]) [ pp th , "END-BLOCK" ] ppWhileRefBlock var th = ppFun (hsep ["WHILE-REF-BLOCK", ppVar var]) [ pp th , "END-BLOCK" ] ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args) ppConvert to from a = ppFun (hsep [textStrict "Convert-rate", ppRate to, maybe mempty ppRate from]) [pp a] ppSelect rate n arg = ppFun (hsep ["select", ppRate rate, pretty n]) [pp arg] ppIff rate cond th el = vcat [ hsep ["if", ppRate (fromIfRate rate), ppCond $ fmap pp cond] , indent 2 $ vcat [ "then" <+> pp th , "else" <+> pp el ] ] ppExpNum (PreInline op as) = ppNumOp op (fmap pp as) ppInitVar v a = ppFun (hsep ["InitVar", ppVar v]) [pp a] ppFun name args = vcat [ name , indent 2 $ vcat args ] pp = either ppPrim id . unPrimOr </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/spell-music/csound-expression/345df2c91c9831dd895f58951990165598504814/csound-expression-dynamic/src/Csound/Dynamic/Render/Pretty.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> ElseIfBegin a -> left >> (succTab $ text "elseif " <> ppCond a <> text " then") x -> error $ "unknown expression: " ++ show x pp macros pp arrays ----------------------------------- expressions booleans numeric ------------------------------------------------------------ debug</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Csound.Dynamic.Render.Pretty( Doc, vcatSep, ppCsdFile, ppGen, ppNotes, ppInstr, ppStmt, ppTotalDur, PrettyE(..), PrettyShowE(..), ppE ) where import Control.Monad.Trans.State.Strict import qualified Data.IntMap as IM import Text.PrettyPrint.Leijen.Text import Csound.Dynamic.Types import Csound.Dynamic.Tfm.InferTypes qualified as R(Var(..)) import Data.Text (Text) import Data.Text qualified as Text import Text.Show.Pretty (ppShow) import Data.Fix (foldFix) import Data.ByteString.Base64 qualified as Base64 import Data.Text.Encoding qualified as Text vcatSep :: [Doc] -> Doc vcatSep = vcat . punctuate line binaries, unaries :: Text -> [Doc] -> Doc binaries op as = binary op (as !! 0) (as !! 1) unaries op as = unary op (as !! 0) binary :: Text -> Doc -> Doc -> Doc binary op a b = parens $ a <+> textStrict op <+> b unary :: Text -> Doc -> Doc unary op a = parens $ textStrict op <> a func :: Text -> Doc -> Doc func op a = textStrict op <> parens a ppCsdFile :: Doc -> Doc -> Doc -> [Plugin] -> Doc ppCsdFile flags orc sco plugins = tag "CsoundSynthesizer" $ vcatSep [ tag "CsOptions" flags, tag "CsInstruments" orc, tag "CsScore" sco, ppPlugins plugins ] ppPlugins :: [Plugin] -> Doc ppPlugins plugins = vcatSep $ fmap (\(Plugin name body) -> tag name (textStrict body)) plugins tag :: Text -> Doc -> Doc tag name content = vcatSep [ char '<' <> textStrict name <> char '>', content, text "</" <> textStrict name <> char '>'] ppNotes :: InstrId -> [CsdEvent] -> Doc ppNotes instrId = vcat . fmap (ppNote instrId) ppNote :: InstrId -> CsdEvent -> Doc ppNote instrId evt = char 'i' <+> ppInstrId instrId <+> double (csdEventStart evt) <+> double (csdEventDur evt) <+> hsep (fmap ppPrim $ csdEventContent evt) ppPrim :: Prim -> Doc ppPrim x = case x of P n -> char 'p' <> int n PrimInstrId a -> ppInstrId a PString a -> int a PrimInt n -> int n PrimDouble d -> double d PrimString s -> dquotes $ textStrict s PrimVar targetRate v -> ppConverter targetRate (varRate v) $ ppVar v where ppConverter dst src t | dst == src = t | dst == Ar && src == Kr = a(t) | dst == Ar && src == Ir = a(k(t)) | dst == Kr = k(t) | dst == Ir && src == Kr = i(t) | dst == Ir && src == Ar = i(k(t)) | otherwise = t where tfm ch v = hcat [char ch, parens v] a = tfm 'a' k = tfm 'k' i = tfm 'i' ppGen :: Int -> Gen -> Doc ppGen tabId ft = char 'f' <> int tabId <+> int 0 <+> (int $ genSize ft) <+> (ppGenId $ genId ft) <+> (maybe empty (textStrict . Text.pack . show) $ genFile ft) <+> (hsep $ map double $ genArgs ft) ppGenId :: GenId -> Doc ppGenId x = case x of IntGenId a -> int a StringGenId a -> dquotes $ textStrict a ppInstr :: InstrId -> Doc -> Doc ppInstr instrId body = vcat [ text "instr" <+> ppInstrHeadId instrId, body, text "endin"] ppInstrHeadId :: InstrId -> Doc ppInstrHeadId x = case x of InstrId den nom -> int nom <> maybe empty ppAfterDot den InstrLabel name -> textStrict name where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a ppInstrId :: InstrId -> Doc ppInstrId x = case x of InstrId den nom -> int nom <> maybe empty ppAfterDot den InstrLabel name -> dquotes $ textStrict name where ppAfterDot a = textStrict $ Text.pack $ ('.': ) $ reverse $ show a type TabDepth = Int ppStmt :: [R.Var] -> Exp R.Var -> State TabDepth Doc ppStmt outs expr = maybe (ppExp (ppOuts outs) expr) id (maybeStringCopy outs expr) maybeStringCopy :: [R.Var] -> Exp R.Var -> Maybe (State TabDepth Doc) maybeStringCopy outs expr = case (outs, expr) of ([R.Var Sr _], ExpPrim (PrimVar _rate var)) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var) ([R.Var Sr _], ReadVar var) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppVar var) ([], WriteVar outVar a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppVar outVar) (ppPrimOrVar a) ([R.Var Sr _], ReadArr var as) -> Just $ tab $ ppStringCopy (ppOuts outs) (ppReadArr var $ fmap ppPrimOrVar as) ([], WriteArr outVar bs a) | varRate outVar == Sr -> Just $ tab $ ppStringCopy (ppArrIndex outVar $ fmap ppPrimOrVar bs) (ppPrimOrVar a) _ -> Nothing ppStringCopy :: Doc -> Doc -> Doc ppStringCopy outs src = ppOpc outs "strcpyk" [src] ppExp :: Doc -> Exp R.Var -> State TabDepth Doc ppExp res expr = case fmap ppPrimOrVar expr of ExpPrim (PString n) -> tab $ ppStrget res n ExpPrim p -> tab $ res $= ppPrim p Tfm info [a, b] | isInfix info -> tab $ res $= binary (infoName info) a b Tfm info xs | isPrefix info -> tab $ res $= prefix (infoName info) xs Tfm info xs -> tab $ ppOpc res (infoName info) xs ConvertRate to from x -> tab $ ppConvertRate res to from x If _ifRate info t e -> tab $ ppIf res (ppCond info) t e ExpNum (PreInline op as) -> tab $ res $= ppNumOp op as WriteVar v a -> tab $ ppVar v $= a InitVar v a -> tab $ ppOpc (ppVar v) "init" [a] ReadVar v -> tab $ res $= ppVar v InitArr v as -> tab $ ppOpc (ppArrVar (length as) (ppVar v)) "init" as ReadArr v as -> tab $ if (varRate v /= Sr) then res $= ppReadArr v as else res <+> text "strcpy" <+> ppReadArr v as WriteArr v as b -> tab $ ppWriteArr v as b WriteInitArr v as b -> tab $ ppWriteInitArr v as b TfmArr isInit v op [a,b]| isInfix op -> tab $ ppTfmArrOut isInit v <+> binary (infoName op) a b TfmArr isInit v op args | isPrefix op -> tab $ ppTfmArrOut isInit v <+> prefix (infoName op) args TfmArr isInit v op xs -> tab $ ppOpc (ppTfmArrOut isInit v) (infoName op) xs InitPureArr _outRate _procRate initVals -> tab $ ppOpc (ppArrVar 1 res) "fillarray" initVals ReadPureArr outRate _procRate arr index -> tab $ if (outRate /= Sr) then res $= ppReadPureArr arr [index] else res <+> text "strcpy" <+> ppReadPureArr arr [index] IfBegin _ a -> succTab $ text "if " <> ppCond a <> text " then" IfBlock _ cond (CodeBlock th) -> tab $ ppIf1 res (ppCond cond) th IfElseBlock _ cond (CodeBlock th) (CodeBlock el) -> tab $ ppIf res (ppCond cond) th el ElseBegin -> left >> (succTab $ text "else") IfEnd -> left >> (tab $ text "endif") UntilBlock _ cond (CodeBlock th) -> tab $ ppUntil res (ppCond cond) th WhileBlock _ cond (CodeBlock th) -> tab $ ppWhile res (ppCond cond) th WhileRefBlock var (CodeBlock th) -> tab $ ppWhileRef res var th UntilBegin _ a -> succTab $ text "until " <> ppCond a <> text " do" UntilEnd -> left >> (tab $ text "od") WhileBegin _ a -> succTab $ text "while " <> ppCond a <> text " do" WhileRefBegin var -> succTab $ text "while " <> ppVar var <+> equals <+> text "1" <+> text "do" WhileEnd -> left >> (tab $ text "od") InitMacrosString name initValue -> tab $ initMacros (textStrict name) (textStrict initValue) InitMacrosDouble name initValue -> tab $ initMacros (textStrict name) (double initValue) InitMacrosInt name initValue -> tab $ initMacros (textStrict name) (int initValue) ReadMacrosString name -> tab $ res <+> text "strcpy" <+> readMacro name ReadMacrosDouble name -> tab $ res $= readMacro name ReadMacrosInt name -> tab $ res $= readMacro name EmptyExp -> return empty Verbatim str -> return $ textStrict str Select _rate _n a -> tab $ res $= ("SELECTS" <+> a) Starts -> tab $ res $= "STARTS" Seq a b -> tab $ hsep ["SEQ", a, b] Ends _a -> tab $ "ENDS" ExpBool _ -> tab "ExpBool" readMacro :: Text -> Doc readMacro name = char '$' <> textStrict name initMacros :: Doc -> Doc -> Doc initMacros name initValue = vcat [ text "#ifndef" <+> name , text "#define " <+> name <+> char '#' <> initValue <> char '#' , text "#end" ] ppTfmArrOut :: Bool -> Var -> Doc ppTfmArrOut isInit v = ppVar v <> (if isInit then (text "[]") else empty) ppArrIndex :: Var -> [Doc] -> Doc ppArrIndex v as = ppVar v <> (hcat $ fmap brackets as) ppArrVar :: Int -> Doc -> Doc ppArrVar n v = v <> (hcat $ replicate n $ text "[]") ppReadArr :: Var -> [Doc] -> Doc ppReadArr v as = ppArrIndex v as ppReadPureArr :: Doc -> [Doc] -> Doc ppReadPureArr v as = v <> (hcat $ fmap brackets as) ppWriteArr :: Var -> ArrIndex Doc -> Doc -> Doc ppWriteArr v as b = ppArrIndex v as <+> equalsWord <+> b where equalsWord = if (varRate v == Sr) then text "strcpy" else equals ppWriteInitArr :: Var -> [Doc] -> Doc -> Doc ppWriteInitArr v as b = ppArrIndex v as <+> initWord <+> b where initWord = text $ if (varRate v == Sr) then "strcpy" else "init" tab :: Monad m => Doc -> StateT TabDepth m Doc tab doc = fmap (shiftByTab doc) get tabWidth :: TabDepth tabWidth = 4 shiftByTab :: Doc -> TabDepth -> Doc shiftByTab doc n | n == 0 = doc | otherwise = indent (tabWidth * n) doc left :: State TabDepth () left = modify pred succTab :: Monad m => Doc -> StateT TabDepth m Doc succTab doc = do a <- tab doc modify succ return a prefix :: Text -> [Doc] -> Doc prefix name args = textStrict name <> tupled args ppCond :: Inline CondOp Doc -> Doc ppCond = ppInline ppCondOp ($=) :: Doc -> Doc -> Doc ($=) a b = a <+> equals <+> b ppOuts :: [R.Var] -> Doc ppOuts xs = hsep $ punctuate comma $ map ppRatedVar xs ppPrimOrVar :: PrimOr R.Var -> Doc ppPrimOrVar x = either ppPrim ppRatedVar $ unPrimOr x ppStrget :: Doc -> Int -> Doc ppStrget out n = ppOpc out "strget" [char 'p' <> int n] ppIf :: Doc -> Doc -> Doc -> Doc -> Doc ppIf res p t e = vcat [ text "if" <+> p <+> text "then" , text " " <> res <+> char '=' <+> t , text "else" , text " " <> res <+> char '=' <+> e , text "endif" ] ppIf1, ppWhile, ppUntil :: Doc -> Doc -> Doc -> Doc ppIf1 = ppIfBy "if" ppWhile = ppIfBy "while" ppUntil = ppIfBy "until" ppIfBy :: Text -> Doc -> Doc -> Doc -> Doc ppIfBy leadTag res p t = vcat [ textStrict leadTag <+> p <+> text "then" , text " " <> res <+> char '=' <+> t , text "endif" ] ppWhileRef :: Doc -> Var -> Doc -> Doc ppWhileRef res p t = vcat [ textStrict "while" <+> ppVar p <+> text "then" , text " " <> res <+> char '=' <+> t , text "endif" ] ppOpc :: Doc -> Text -> [Doc] -> Doc ppOpc out name xs = out <+> ppProc name xs ppProc :: Text -> [Doc] -> Doc ppProc name xs = textStrict name <+> (hsep $ punctuate comma xs) ppVar :: Var -> Doc ppVar v = case v of Var ty rate name -> ppVarType ty <> ppRate rate <> textStrict (Text.cons (varPrefix ty) name) VarVerbatim _ name -> textStrict name varPrefix :: VarType -> Char varPrefix x = case x of LocalVar -> 'l' GlobalVar -> 'g' ppVarType :: VarType -> Doc ppVarType x = case x of LocalVar -> empty GlobalVar -> char 'g' ppConvertRate :: Doc -> Rate -> Maybe Rate -> Doc -> Doc ppConvertRate out to from var = case (to, from) of (Ar, Just Kr) -> upsamp var (Ar, Just Ir) -> upsamp $ toK var (Kr, Just Ar) -> downsamp var (Kr, Just Ir) -> out $= var (Ir, Just Ar) -> downsamp var (Ir, Just Kr) -> out $= toI var (Ar, Nothing) -> out $= toA var (Kr, Nothing) -> out $= toK var (Ir, Nothing) -> out $= toI var (a, Just b) | a == b -> out $= var (a, b) -> error $ "bug: no rate conversion from " ++ show b ++ " to " ++ show a ++ "." where upsamp x = ppOpc out "upsamp" [x] downsamp x = ppOpc out "downsamp" [x] toA = func "a" toK = func "k" toI = func "i" ppInline :: (a -> [Doc] -> Doc) -> Inline a Doc -> Doc ppInline ppNode a = iter $ inlineExp a where iter x = case x of InlinePrim n -> inlineEnv a IM.! n InlineExp op args -> ppNode op $ fmap iter args ppCondOp :: CondOp -> [Doc] -> Doc ppCondOp op = case op of TrueOp -> const $ text "(1 == 1)" FalseOp -> const $ text "(0 == 1)" And -> bi "&&" Or -> bi "||" Equals -> bi "==" NotEquals -> bi "!=" Less -> bi "<" Greater -> bi ">" LessEquals -> bi "<=" GreaterEquals -> bi ">=" where bi = binaries ppNumOp :: NumOp -> [Doc] -> Doc ppNumOp op = case op of Add -> bi "+" Sub -> bi "-" Mul -> bi "*" Div -> bi "/" Neg -> uno "-" Pow -> bi "^" Mod -> bi "%" where bi = binaries uno = unaries ppRatedVar :: R.Var -> Doc ppRatedVar v = ppRate (R.varType v) <> int (R.varId v) ppRate :: Rate -> Doc ppRate x = case removeArrRate x of Sr -> char 'S' _ -> phi x where phi = textStrict . Text.toLower . Text.pack . show ppTotalDur :: Double -> Doc ppTotalDur d = text "f0" <+> double d newtype PrettyShowE = PrettyShowE E newtype PrettyE = PrettyE E instance Show PrettyShowE where show (PrettyShowE expr) = ppShow expr instance Show PrettyE where show (PrettyE expr) = show $ ppE expr ppE :: E -> Doc ppE = foldFix go where go :: RatedExp Doc -> Doc go x = fromExp (fromInfo x) x fromInfo :: RatedExp Doc -> Doc fromInfo RatedExp{..} = hsep [ ppHash ratedExpHash , maybe mempty ppRate ratedExpRate , maybe mempty pretty ratedExpDepends ] ppHash = textStrict . Text.take 4 . Text.decodeUtf8 . Base64.encode fromExp :: Doc -> RatedExp Doc -> Doc fromExp info RatedExp{..} = indent 2 $ post $ case ratedExpExp of ExpPrim p -> ppPrim p EmptyExp -> textStrict "EMPTY_EXPR" Tfm inf args -> ppTfm inf args ConvertRate to from a -> ppConvert to from a Select r n a -> ppSelect r n a If rate cond th el -> ppIff rate cond th el ExpBool args -> hsep ["some bool expr", pretty $ show args] ExpNum arg -> ppExpNum arg InitVar v a -> ppInitVar v a ReadVar v -> "ReadVar" <+> ppVar v WriteVar v a -> ppVar v $= pp a TODO InitArr _v _size -> undefined ReadArr _v _index -> undefined WriteArr _v _index _ -> undefined WriteInitArr _v _index _ -> undefined TfmArr _isInit _v _info _args -> undefined InitPureArr _outRate _procRate _vals -> undefined ReadPureArr _outRate _procRate _arr _index -> undefined IfBegin rate cond -> hsep ["IF", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] IfBlock rate cond (CodeBlock th) -> ppIfBlockBy "IF-BLOCK" rate cond th IfElseBlock rate cond (CodeBlock th) (CodeBlock el) -> ppFun (hsep ["IF-BLOCK", ppRate $ fromIfRate rate, ppCond $ fmap pp cond ]) [ pp th , "ELSE-BLOCK" , pp el , "END-BLOCK" ] ElseBegin -> "ELSE" IfEnd -> "END_IF" UntilBegin rate cond -> hsep ["UNTIL", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] UntilEnd -> "END_UNTIL" WhileBegin rate cond -> hsep ["WHILE", ppRate $ fromIfRate rate, ppCond $ fmap pp cond, "\n"] WhileRefBegin v -> hsep ["WHILE_REF", ppVar v] WhileEnd -> "END_WHILE" UntilBlock rate cond (CodeBlock th) -> ppIfBlockBy "UNTIL-BLOCK" rate cond th WhileBlock rate cond (CodeBlock th) -> ppIfBlockBy "WHILE-BLOCK" rate cond th WhileRefBlock var (CodeBlock th) -> ppWhileRefBlock var th Verbatim txt -> ppFun "VERBATIM" [textStrict txt] Starts -> "STARTS" Seq a b -> vcat ["SEQ", pp a, pp b] Ends a -> vcat ["ENDS", pp a] InitMacrosInt _name _n -> undefined InitMacrosDouble _name _d -> undefined InitMacrosString _name _str -> undefined ReadMacrosInt _name -> undefined ReadMacrosDouble _name -> undefined ReadMacrosString _name -> undefined where post a = hsep [hcat ["{",info, "}:"], a] ppIfBlockBy leadTag rate cond th = ppFun (hsep [leadTag, ppRate $ fromIfRate rate, ppCond $ fmap pp cond ]) [ pp th , "END-BLOCK" ] ppWhileRefBlock var th = ppFun (hsep ["WHILE-REF-BLOCK", ppVar var]) [ pp th , "END-BLOCK" ] ppTfm info args = ppFun (textStrict $ infoName info) (fmap pp args) ppConvert to from a = ppFun (hsep [textStrict "Convert-rate", ppRate to, maybe mempty ppRate from]) [pp a] ppSelect rate n arg = ppFun (hsep ["select", ppRate rate, pretty n]) [pp arg] ppIff rate cond th el = vcat [ hsep ["if", ppRate (fromIfRate rate), ppCond $ fmap pp cond] , indent 2 $ vcat [ "then" <+> pp th , "else" <+> pp el ] ] ppExpNum (PreInline op as) = ppNumOp op (fmap pp as) ppInitVar v a = ppFun (hsep ["InitVar", ppVar v]) [pp a] ppFun name args = vcat [ name , indent 2 $ vcat args ] pp = either ppPrim id . unPrimOr </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610262"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3e249bf493c1ef45a931b1c5e58252c2274a7b8ef6ee58e09ad1ff3a8a392510</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clash-lang/clash-compiler</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">PatError.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module PatError where import Prelude topEntity :: Maybe Int -> Int topEntity (Just x) = x </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/clash-lang/clash-compiler/8e461a910f2f37c900705a0847a9b533bce4d2ea/tests/shouldwork/Basic/PatError.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module PatError where import Prelude topEntity :: Maybe Int -> Int topEntity (Just x) = x </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610263"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">1183936d161e464928944a8c3599ab205acb38e27c191c9a3312d98b1103bce9</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">klajo/hacks</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">beam_renamer_tests.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">%%%------------------------------------------------------------------- %%% @doc Test {@link beam_renamer}. @author ( ) %%% @end %%%------------------------------------------------------------------- -module(beam_renamer_tests). -include_lib("eunit/include/eunit.hrl"). replaces_in_atom_table_test() -> 'x^' = run_literal(x, 'x^', x). replaces_in_constant_pool_test() -> ['x^'] = run_literal(x, 'x^', [x]), ['x^', 'x^'] = run_literal(x, 'x^', [x, x]), {'x^', 'x^'} = run_literal(x, 'x^', {x, x}), {[{'x^'}]} = run_literal(x, 'x^', {[{x}]}). run_literal(Name0, Name, Term) -> run_with_renamed_module( fun() -> Name:f() end, mk_module(Name0, [erl_syntax:abstract(Term)]), Name). run_with_renamed_module(Fun, BeamBin, Name) -> Bin = beam_renamer:rename(BeamBin, Name), unload_module(Name), {module, _} = code:load_binary(Name, "dummy.beam", Bin), try Fun() after unload_module(Name) end. unload_module(ModName) -> code:purge(ModName), code:delete(ModName). mk_module(ModName, FuncBody) -> {ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)), Bin. mk_module_forms(ModName, FuncBody) -> erl_syntax:revert_forms( [erl_syntax:attribute( erl_syntax:atom(module), [erl_syntax:atom(ModName)]), erl_syntax:attribute( erl_syntax:atom(compile), [erl_syntax:atom(export_all)]), erl_syntax:function( erl_syntax:atom(f), [erl_syntax:clause([], FuncBody)])]). </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/klajo/hacks/80afdad130b9b914d410cb382ebb1b6ee1236e94/beam/test/beam_renamer_tests.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">------------------------------------------------------------------- @doc Test {@link beam_renamer}. @end -------------------------------------------------------------------</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> @author ( ) -module(beam_renamer_tests). -include_lib("eunit/include/eunit.hrl"). replaces_in_atom_table_test() -> 'x^' = run_literal(x, 'x^', x). replaces_in_constant_pool_test() -> ['x^'] = run_literal(x, 'x^', [x]), ['x^', 'x^'] = run_literal(x, 'x^', [x, x]), {'x^', 'x^'} = run_literal(x, 'x^', {x, x}), {[{'x^'}]} = run_literal(x, 'x^', {[{x}]}). run_literal(Name0, Name, Term) -> run_with_renamed_module( fun() -> Name:f() end, mk_module(Name0, [erl_syntax:abstract(Term)]), Name). run_with_renamed_module(Fun, BeamBin, Name) -> Bin = beam_renamer:rename(BeamBin, Name), unload_module(Name), {module, _} = code:load_binary(Name, "dummy.beam", Bin), try Fun() after unload_module(Name) end. unload_module(ModName) -> code:purge(ModName), code:delete(ModName). mk_module(ModName, FuncBody) -> {ok, ModName, Bin} = compile:forms(mk_module_forms(ModName, FuncBody)), Bin. mk_module_forms(ModName, FuncBody) -> erl_syntax:revert_forms( [erl_syntax:attribute( erl_syntax:atom(module), [erl_syntax:atom(ModName)]), erl_syntax:attribute( erl_syntax:atom(compile), [erl_syntax:atom(export_all)]), erl_syntax:function( erl_syntax:atom(f), [erl_syntax:clause([], FuncBody)])]). </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610264"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3e8a841b1a590d0222f030e46885e26f247f3f9885e2f2c7d02e171facb46a0e</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Lysxia/generic-data</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Prelude.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE FlexibleContexts # -- | Generic deriving for standard classes in base -- -- === Warning -- -- This is an internal module: it is not subject to any versioning policy, -- breaking changes can happen at any time. -- -- If something here seems useful, please report it or create a pull request to -- export it from an external module. module Generic.Data.Internal.Prelude where import Control.Applicative (liftA2, Alternative(..)) import Data.Function (on) import Data.Functor.Classes import Data.Semigroup import GHC.Generics import Generic.Data.Internal.Utils (from', to', liftG2) * ' ' | Generic -- -- @ instance ' ' MyType where -- ('==') = 'geq' -- @ geq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool geq = (==) `on` from' * ' ' -- | Generic 'compare'. -- -- @ instance ' ' where -- 'compare' = 'gcompare' -- @ gcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering gcompare = compare `on` from' -- * 'Semigroup' -- | Generic @('<>')@ (or 'mappend'). -- -- @ instance ' Semigroup ' where -- ('<>') = 'gmappend' -- @ -- -- See also 'gmempty'. gmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a gmappend = \a b -> to (from' a <> from' b) -- * 'Monoid' | Generic ' ' . -- -- @ -- instance 'Monoid' MyType where ' ' = ' gmempty ' -- @ gmempty :: (Generic a, Monoid (Rep a ())) => a gmempty = to' mempty -- | Generic @('<>')@ (or @'mappend'@). -- -- The difference from `gmappend' is the 'Monoid' constraint instead of -- 'Semigroup', for older versions of base where 'Semigroup' is not a -- superclass of 'Monoid'. gmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a gmappend' = \a b -> to (from' a `mappend` from' b) -- * 'Functor' -- | Generic 'fmap'. -- -- @ instance ' Functor ' where -- 'fmap' = 'gfmap' -- @ gfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b gfmap = \f -> to1 . fmap f . from1 -- | Generic @('<$')@. -- -- See also 'gfmap'. gconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a gconstmap = \a -> to1 . (a <$) . from1 -- * 'Applicative' -- | Generic 'pure'. -- -- @ instance ' Applicative ' where -- 'pure' = 'gpure' -- ('<*>') = 'gap' -- @ gpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a gpure = to1 . pure -- | Generic @('<*>')@ (or 'Control.Monad.ap'). -- -- See also 'gpure'. gap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b gap = liftG2 (<*>) -- | Generic 'liftA2'. -- -- See also 'gpure'. gliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c gliftA2 = liftG2 . liftA2 -- * 'Alternative' -- | Generic 'empty'. -- -- @ instance ' Alternative ' where -- 'empty' = 'gempty' -- ('<|>') = 'galt' -- @ gempty :: (Generic1 f, Alternative (Rep1 f)) => f a gempty = to1 empty -- | Generic ('<|>'). -- -- See also 'gempty'. galt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a galt = liftG2 (<|>) -- * 'Foldable' -- | Generic 'foldMap'. -- -- @ instance ' Foldable ' where -- 'foldMap' = 'gfoldMap' -- @ -- -- This is deprecated but kept around just for reference. # DEPRECATED gfoldMap " This definition has been replaced with ' Generic . Data . ' . " # gfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m gfoldMap = \f -> foldMap f . from1 -- | Generic 'foldr'. -- -- @ instance ' Foldable ' where -- 'foldr' = 'gfoldr' -- @ -- -- See also 'gfoldMap'. gfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b gfoldr = \f b -> foldr f b . from1 -- Note: this one is not deprecated because inlining Just Works. * ' ' -- | Generic 'traverse'. -- -- @ instance ' ' where -- 'traverse' = 'gtraverse' -- @ -- -- This is deprecated but kept around just for reference. {-# DEPRECATED gtraverse "This definition has been replaced with 'Generic.Data.Internal.gtraverse'." #-} gtraverse :: (Generic1 f, Traversable (Rep1 f), Applicative m) => (a -> m b) -> f a -> m (f b) gtraverse = \f -> fmap to1 . traverse f . from1 -- | Generic 'sequenceA'. -- -- @ instance ' ' where -- 'sequenceA' = 'gsequenceA' -- @ -- -- See also 'gtraverse'. -- -- This is deprecated but kept around just for reference. {-# DEPRECATED gsequenceA "This definition has been replaced with 'Generic.Data.Internal.gsequenceA'." #-} gsequenceA :: (Generic1 f, Traversable (Rep1 f), Applicative m) => f (m a) -> m (f a) gsequenceA = fmap to1 . sequenceA . from1 * ' Eq1 ' -- | Generic 'liftEq'. gliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool gliftEq = \(==.) a b -> liftEq (==.) (from1 a) (from1 b) -- * 'Ord1' -- | Generic 'liftCompare'. gliftCompare :: (Generic1 f, Ord1 (Rep1 f)) => (a -> b -> Ordering) -> f a -> f b -> Ordering gliftCompare = \compare' a b -> liftCompare compare' (from1 a) (from1 b) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/Lysxia/generic-data/846fafb9ec1e4e60424e4f266451665fe25fdfa9/src/Generic/Data/Internal/Prelude.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | Generic deriving for standard classes in base === Warning This is an internal module: it is not subject to any versioning policy, breaking changes can happen at any time. If something here seems useful, please report it or create a pull request to export it from an external module. @ ('==') = 'geq' @ | Generic 'compare'. @ 'compare' = 'gcompare' @ * 'Semigroup' | Generic @('<>')@ (or 'mappend'). @ ('<>') = 'gmappend' @ See also 'gmempty'. * 'Monoid' @ instance 'Monoid' MyType where @ | Generic @('<>')@ (or @'mappend'@). The difference from `gmappend' is the 'Monoid' constraint instead of 'Semigroup', for older versions of base where 'Semigroup' is not a superclass of 'Monoid'. * 'Functor' | Generic 'fmap'. @ 'fmap' = 'gfmap' @ | Generic @('<$')@. See also 'gfmap'. * 'Applicative' | Generic 'pure'. @ 'pure' = 'gpure' ('<*>') = 'gap' @ | Generic @('<*>')@ (or 'Control.Monad.ap'). See also 'gpure'. | Generic 'liftA2'. See also 'gpure'. * 'Alternative' | Generic 'empty'. @ 'empty' = 'gempty' ('<|>') = 'galt' @ | Generic ('<|>'). See also 'gempty'. * 'Foldable' | Generic 'foldMap'. @ 'foldMap' = 'gfoldMap' @ This is deprecated but kept around just for reference. | Generic 'foldr'. @ 'foldr' = 'gfoldr' @ See also 'gfoldMap'. Note: this one is not deprecated because inlining Just Works. | Generic 'traverse'. @ 'traverse' = 'gtraverse' @ This is deprecated but kept around just for reference. # DEPRECATED gtraverse "This definition has been replaced with 'Generic.Data.Internal.gtraverse'." # | Generic 'sequenceA'. @ 'sequenceA' = 'gsequenceA' @ See also 'gtraverse'. This is deprecated but kept around just for reference. # DEPRECATED gsequenceA "This definition has been replaced with 'Generic.Data.Internal.gsequenceA'." # | Generic 'liftEq'. * 'Ord1' | Generic 'liftCompare'.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE FlexibleContexts # module Generic.Data.Internal.Prelude where import Control.Applicative (liftA2, Alternative(..)) import Data.Function (on) import Data.Functor.Classes import Data.Semigroup import GHC.Generics import Generic.Data.Internal.Utils (from', to', liftG2) * ' ' | Generic instance ' ' MyType where geq :: (Generic a, Eq (Rep a ())) => a -> a -> Bool geq = (==) `on` from' * ' ' instance ' ' where gcompare :: (Generic a, Ord (Rep a ())) => a -> a -> Ordering gcompare = compare `on` from' instance ' Semigroup ' where gmappend :: (Generic a, Semigroup (Rep a ())) => a -> a -> a gmappend = \a b -> to (from' a <> from' b) | Generic ' ' . ' ' = ' gmempty ' gmempty :: (Generic a, Monoid (Rep a ())) => a gmempty = to' mempty gmappend' :: (Generic a, Monoid (Rep a ())) => a -> a -> a gmappend' = \a b -> to (from' a `mappend` from' b) instance ' Functor ' where gfmap :: (Generic1 f, Functor (Rep1 f)) => (a -> b) -> f a -> f b gfmap = \f -> to1 . fmap f . from1 gconstmap :: (Generic1 f, Functor (Rep1 f)) => a -> f b -> f a gconstmap = \a -> to1 . (a <$) . from1 instance ' Applicative ' where gpure :: (Generic1 f, Applicative (Rep1 f)) => a -> f a gpure = to1 . pure gap :: (Generic1 f, Applicative (Rep1 f)) => f (a -> b) -> f a -> f b gap = liftG2 (<*>) gliftA2 :: (Generic1 f, Applicative (Rep1 f)) => (a -> b -> c) -> f a -> f b -> f c gliftA2 = liftG2 . liftA2 instance ' Alternative ' where gempty :: (Generic1 f, Alternative (Rep1 f)) => f a gempty = to1 empty galt :: (Generic1 f, Alternative (Rep1 f)) => f a -> f a -> f a galt = liftG2 (<|>) instance ' Foldable ' where # DEPRECATED gfoldMap " This definition has been replaced with ' Generic . Data . ' . " # gfoldMap :: (Generic1 f, Foldable (Rep1 f), Monoid m) => (a -> m) -> f a -> m gfoldMap = \f -> foldMap f . from1 instance ' Foldable ' where gfoldr :: (Generic1 f, Foldable (Rep1 f)) => (a -> b -> b) -> b -> f a -> b gfoldr = \f b -> foldr f b . from1 * ' ' instance ' ' where gtraverse :: (Generic1 f, Traversable (Rep1 f), Applicative m) => (a -> m b) -> f a -> m (f b) gtraverse = \f -> fmap to1 . traverse f . from1 instance ' ' where gsequenceA :: (Generic1 f, Traversable (Rep1 f), Applicative m) => f (m a) -> m (f a) gsequenceA = fmap to1 . sequenceA . from1 * ' Eq1 ' gliftEq :: (Generic1 f, Eq1 (Rep1 f)) => (a -> b -> Bool) -> f a -> f b -> Bool gliftEq = \(==.) a b -> liftEq (==.) (from1 a) (from1 b) gliftCompare :: (Generic1 f, Ord1 (Rep1 f)) => (a -> b -> Ordering) -> f a -> f b -> Ordering gliftCompare = \compare' a b -> liftCompare compare' (from1 a) (from1 b) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610265"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">b3a1dcdaf55a70a28bc1b23a9a68ef333dda5c9779984e25eb61316b079a3c7e</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">williamleferrand/accretio</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">core_invite.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(* * core - invite * * * *) open Lwt open Printf open CalendarLib open Api open Eliom_content.Html5 open Eliom_content.Html5.D open Message_parsers let has_already_declined = sprintf "core-invite-has-already-declined-%d" let tag_timer_reminder = sprintf "core-invite-reminded-%d" let key_email_anchor = sprintf "core-invite-anchor-%d" let invite context message = lwt content = context.get_message_content ~message in let emails = Ys_email.get_all_emails content in lwt supervisor = $society(context.society)->leader in lwt supervisor_name = $member(supervisor)->name in lwt already_members, already_declined, invited = Lwt_list.fold_left_s (fun (already_members, already_declined, invited) email -> context.log_info "inviting member with email %s to society %d" email context.society ; lwt member = match_lwt Object_member.Store.find_by_email email with | Some uid -> return uid | None -> match_lwt Object_member.Store.create ~preferred_email:email ~emails:[ email ] () with | `Object_already_exists (_, uid) -> return uid | `Object_created member -> return member.Object_member.uid in match_lwt context.is_member ~member with true -> return ((member, email) :: already_members, already_declined, invited) | false -> (* check if the member hasn't declined already *) match_lwt context.get ~key:(has_already_declined member) with Some _ -> return (already_members, (member, email) :: already_declined, invited) | None -> lwt _ = match_lwt context.message_member ~member ~subject:context.society_name ~content:[ pcdata "Greetings," ; br () ; br () ; pcdata "I'm running a group called " ; i [ pcdata context.society_name ] ; pcdata ". "; pcdata context.society_description ; br (); br () ; pcdata "Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email." ; br () ; br () ; pcdata "Looking forward to hearing from you," ; br () ; br () ; pcdata supervisor_name ; ] () with None -> return_unit | Some message_id -> context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id) in lwt _ = context.set_timer ~label:(tag_timer_reminder member) ~duration:(Calendar.Period.lmake ~hour:26 ()) (`RemindMember member) in return (already_members, already_declined, ((member, email) :: invited))) ([], [], []) emails in lwt _ = context.reply_to ~message ~content:[ pcdata "Great. Here is what I did:" ; br () ; br () ; pcdata "Already members:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ; br () ; pcdata "Already declined:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ; br () ; pcdata "Invited:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ; br () ; pcdata "Let's see what comes back!" ] () in return `None let remind context member = context.log_info "sending reminder to member %d" member ; lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in match_lwt context.get ~key:(key_email_anchor member) with None -> lwt _ = context.message_member ~member ~subject:context.society_name ~content:[ pcdata "My apologies for the reminder, but maybe have you missed my previous email." ; br () ; br () ; pcdata "Would you be interested in hearing more about our " ; i [ pcdata context.society_name ] ; pcdata " group?" ; ] () in return `None | Some message -> let message = Ys_uid.of_string message in lwt _ = context.reply_to ~message ~content:[ pcdata "My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?" ; ] () in return `None let accepted context message = lwt member = context.get_message_sender ~message in context.log_info "adding member %d to the society" member ; lwt _ = context.add_member ~member in lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in lwt _ = context.reply_to ~message ~content:[ pcdata "Great, I added you to the list of participants, stay tuned!" ; br () ] () in return `None let declined context message = lwt member = context.get_message_sender ~message in context.log_info "removing member %d to the society" member ; lwt _ = context.remove_member ~member in lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in lwt _ = context.set ~key:(has_already_declined member) ~value:"true" in lwt _ = context.reply_to ~message ~content:[ pcdata "Ok!" ; pcdata " If you change you mind later, don't hesitate to be get back in touch!" ; br () ] () in return `None let initialize_invites context () = lwt _ = context.message_supervisor ~subject:"Who do you want to invite?" ~content:[ pcdata "Greetings," ; br () ; br () ; pcdata "Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with" ; br () ] () in return `None COMPONENT *initialize_invites<forward> ~> `Message of email ~> invite remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted -invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted invite ~> `Accepted of email ~> accepted invite ~> `Declined of email ~> declined </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/williamleferrand/accretio/394f855e9c2a6a18f0c2da35058d5a01aacf6586/playbooks/core_invite.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> * core - invite * * * check if the member hasn't declined already </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> open Lwt open Printf open CalendarLib open Api open Eliom_content.Html5 open Eliom_content.Html5.D open Message_parsers let has_already_declined = sprintf "core-invite-has-already-declined-%d" let tag_timer_reminder = sprintf "core-invite-reminded-%d" let key_email_anchor = sprintf "core-invite-anchor-%d" let invite context message = lwt content = context.get_message_content ~message in let emails = Ys_email.get_all_emails content in lwt supervisor = $society(context.society)->leader in lwt supervisor_name = $member(supervisor)->name in lwt already_members, already_declined, invited = Lwt_list.fold_left_s (fun (already_members, already_declined, invited) email -> context.log_info "inviting member with email %s to society %d" email context.society ; lwt member = match_lwt Object_member.Store.find_by_email email with | Some uid -> return uid | None -> match_lwt Object_member.Store.create ~preferred_email:email ~emails:[ email ] () with | `Object_already_exists (_, uid) -> return uid | `Object_created member -> return member.Object_member.uid in match_lwt context.is_member ~member with true -> return ((member, email) :: already_members, already_declined, invited) | false -> match_lwt context.get ~key:(has_already_declined member) with Some _ -> return (already_members, (member, email) :: already_declined, invited) | None -> lwt _ = match_lwt context.message_member ~member ~subject:context.society_name ~content:[ pcdata "Greetings," ; br () ; br () ; pcdata "I'm running a group called " ; i [ pcdata context.society_name ] ; pcdata ". "; pcdata context.society_description ; br (); br () ; pcdata "Would you like to be notified about the upcoming events? No signup is necessary; we usually organize ourselves by email." ; br () ; br () ; pcdata "Looking forward to hearing from you," ; br () ; br () ; pcdata supervisor_name ; ] () with None -> return_unit | Some message_id -> context.set ~key:(key_email_anchor member) ~value:(Ys_uid.to_string message_id) in lwt _ = context.set_timer ~label:(tag_timer_reminder member) ~duration:(Calendar.Period.lmake ~hour:26 ()) (`RemindMember member) in return (already_members, already_declined, ((member, email) :: invited))) ([], [], []) emails in lwt _ = context.reply_to ~message ~content:[ pcdata "Great. Here is what I did:" ; br () ; br () ; pcdata "Already members:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) already_members) ; br () ; pcdata "Already declined:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) already_declined) ; br () ; pcdata "Invited:" ; ul (List.map (fun (_, email) -> li [ pcdata email ]) invited) ; br () ; pcdata "Let's see what comes back!" ] () in return `None let remind context member = context.log_info "sending reminder to member %d" member ; lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in match_lwt context.get ~key:(key_email_anchor member) with None -> lwt _ = context.message_member ~member ~subject:context.society_name ~content:[ pcdata "My apologies for the reminder, but maybe have you missed my previous email." ; br () ; br () ; pcdata "Would you be interested in hearing more about our " ; i [ pcdata context.society_name ] ; pcdata " group?" ; ] () in return `None | Some message -> let message = Ys_uid.of_string message in lwt _ = context.reply_to ~message ~content:[ pcdata "My apologies for the reminder, but maybe have you missed my previous email - would you be interested in hearing more about our group?" ; ] () in return `None let accepted context message = lwt member = context.get_message_sender ~message in context.log_info "adding member %d to the society" member ; lwt _ = context.add_member ~member in lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in lwt _ = context.reply_to ~message ~content:[ pcdata "Great, I added you to the list of participants, stay tuned!" ; br () ] () in return `None let declined context message = lwt member = context.get_message_sender ~message in context.log_info "removing member %d to the society" member ; lwt _ = context.remove_member ~member in lwt _ = context.cancel_timers ~query:(tag_timer_reminder member) in lwt _ = context.set ~key:(has_already_declined member) ~value:"true" in lwt _ = context.reply_to ~message ~content:[ pcdata "Ok!" ; pcdata " If you change you mind later, don't hesitate to be get back in touch!" ; br () ] () in return `None let initialize_invites context () = lwt _ = context.message_supervisor ~subject:"Who do you want to invite?" ~content:[ pcdata "Greetings," ; br () ; br () ; pcdata "Who do you want to invite? Just send me a bunch of emails and I'll figure out who to get in touch with" ; br () ] () in return `None COMPONENT *initialize_invites<forward> ~> `Message of email ~> invite remind ~> `Declined of email ~> declined ~> `Accepted of email ~> accepted -invite ~> `RemindMember of int ~> remind ~> `Accepted of email ~> accepted invite ~> `Accepted of email ~> accepted invite ~> `Declined of email ~> declined </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610266"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">6b79bf8204c2560a98fd1b57438c1c67a852a1f6bdfc379cf3ec924ba2c70262</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">LaurentRDC/pandoc-plot</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Prelude.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE OverloadedStrings #-} -- | -- Module : $header$ Copyright : ( c ) , 2019 - present License : GNU GPL , version 2 or above -- Maintainer : -- Stability : internal -- Portability : portable -- -- Prelude for renderers, containing some helpful utilities. module Text.Pandoc.Filter.Plot.Renderers.Prelude ( module Prelude, module Text.Pandoc.Filter.Plot.Monad, Text, st, unpack, findExecutable, appendCapture, toRPath, ) where import Data.Text (Text, unpack) import System.Directory (findExecutable) import System.FilePath (isPathSeparator) import Text.Pandoc.Filter.Plot.Monad import Text.Shakespeare.Text (st) -- | A shortcut to append capture script fragments to scripts appendCapture :: (FigureSpec -> FilePath -> Script) -> FigureSpec -> FilePath -> Script appendCapture f s fp = mconcat [script s, "\n", f s fp] -- | R paths use the '/' path separator toRPath :: FilePath -> FilePath toRPath = fmap (\c -> if isPathSeparator c then '/' else c) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/LaurentRDC/pandoc-plot/933daba593196bf3b1ae1f2022d17389552f275c/src/Text/Pandoc/Filter/Plot/Renderers/Prelude.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE OverloadedStrings # | Module : $header$ Maintainer : Stability : internal Portability : portable Prelude for renderers, containing some helpful utilities. | A shortcut to append capture script fragments to scripts | R paths use the '/' path separator</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright : ( c ) , 2019 - present License : GNU GPL , version 2 or above module Text.Pandoc.Filter.Plot.Renderers.Prelude ( module Prelude, module Text.Pandoc.Filter.Plot.Monad, Text, st, unpack, findExecutable, appendCapture, toRPath, ) where import Data.Text (Text, unpack) import System.Directory (findExecutable) import System.FilePath (isPathSeparator) import Text.Pandoc.Filter.Plot.Monad import Text.Shakespeare.Text (st) appendCapture :: (FigureSpec -> FilePath -> Script) -> FigureSpec -> FilePath -> Script appendCapture f s fp = mconcat [script s, "\n", f s fp] toRPath :: FilePath -> FilePath toRPath = fmap (\c -> if isPathSeparator c then '/' else c) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610267"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">eb2259778274d44093ba70d8ba8192f57d237cb82c1d2726aae5f7cd0b2b8a8f</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">rob7hunter/leftparen</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">loc.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;; how much code have you written? #lang scheme/base (require "util.scm") (provide loc) ;; counts all lines except for comment lines and blank lines (define (loc #:comment-chars (comment-chars (list #\;)) . filenames) (fold + 0 (map (lambda (filename) (file-line-fold (lambda (line-str total-loc) (let ((trimmed (string-trim-both line-str #\space))) (cond ((string=? trimmed "") total-loc) ((memq (string-ref trimmed 0) comment-chars) total-loc) (else (+ 1 total-loc))))) 0 filename)) filenames))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/rob7hunter/leftparen/169c896bda989b6a049fe49253a04d6f8b62402b/loc.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> how much code have you written? counts all lines except for comment lines and blank lines )) . filenames)</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang scheme/base (require "util.scm") (provide loc) (fold + 0 (map (lambda (filename) (file-line-fold (lambda (line-str total-loc) (let ((trimmed (string-trim-both line-str #\space))) (cond ((string=? trimmed "") total-loc) ((memq (string-ref trimmed 0) comment-chars) total-loc) (else (+ 1 total-loc))))) 0 filename)) filenames))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610268"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">edcbdbbce0fdac8d366b70a28a717a3431f760e7d8d96f0c396af1b9675ca8df</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">madmax96/brave-clojure-solutions</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">section_8.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns clojure-brave.exercises.section-8) setup for exercise 1 (def order-details-validation {:name ["Please enter a name" not-empty] :email ["Please enter an email address" not-empty "Your email address doesn't look like an email address" #(or (empty? %) (re-seq #"@" %))]}) (def order-details-good {:name "user" :email ""}) (def order-details-bad {:name "user" :email "usermail.com"}) (defn error-messages-for "Return a seq of error messages" [to-validate message-validator-pairs] (map first (filter #(not ((second %) to-validate)) (partition 2 message-validator-pairs)))) (defn validate "Returns a map with a vector of errors for each key" [to-validate validations] (reduce (fn [errors validation] (let [[fieldname validation-check-groups] validation value (get to-validate fieldname) error-messages (error-messages-for value validation-check-groups)] (if (empty? error-messages) errors (assoc errors fieldname error-messages)))) {} validations)) ;we need if-valid macro in order to implement when-valid in most straightforward way, ;similar to how 'when' macro from `clojure.core` is implemented in terms of 'if' (defmacro if-valid "Handle validation more concisely" [to-validate validations errors-name & then-else] `(let [~errors-name (validate ~to-validate ~validations)] (if (empty? ~errors-name) ~@then-else))) 1 (defmacro when-valid [data data-validation & actions] `(if-valid ~data ~data-validation ~'err (do ~@actions) false)) ;Should execute both functions (when-valid order-details-good order-details-validation (println "It's a success!") (println :success)) ;Should return false (when-valid order-details-bad order-details-validation (println "It's a success!") (println :success)) ;Check expanded forms (macroexpand '(when-valid order-details order-details-validation (println "It's a success!") (println :success))) 2 (defmacro my-or "macro for or logic" ([] nil) ([x] x) ([form & forms] `(let [sym# ~form] (if sym# sym# (my-or ~@forms))))) (my-or nil false 2 1) (macroexpand '(my-or nil false 2 1)) 3 (defmacro defattrs [& assignments] `(do ~@(map (fn [[retr attr]] `(def ~retr ~attr)) (partition 2 assignments)))) (defattrs c-int :intelligence wokring? :should-work) (print wokring? c-int) (macroexpand '(defattrs c-int :intelligence test :should-work)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/madmax96/brave-clojure-solutions/3be234bdcf3704acd2aca62d1a46fa03463e5735/section_8.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">we need if-valid macro in order to implement when-valid in most straightforward way, similar to how 'when' macro from `clojure.core` is implemented in terms of 'if' Should execute both functions Should return false Check expanded forms</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns clojure-brave.exercises.section-8) setup for exercise 1 (def order-details-validation {:name ["Please enter a name" not-empty] :email ["Please enter an email address" not-empty "Your email address doesn't look like an email address" #(or (empty? %) (re-seq #"@" %))]}) (def order-details-good {:name "user" :email ""}) (def order-details-bad {:name "user" :email "usermail.com"}) (defn error-messages-for "Return a seq of error messages" [to-validate message-validator-pairs] (map first (filter #(not ((second %) to-validate)) (partition 2 message-validator-pairs)))) (defn validate "Returns a map with a vector of errors for each key" [to-validate validations] (reduce (fn [errors validation] (let [[fieldname validation-check-groups] validation value (get to-validate fieldname) error-messages (error-messages-for value validation-check-groups)] (if (empty? error-messages) errors (assoc errors fieldname error-messages)))) {} validations)) (defmacro if-valid "Handle validation more concisely" [to-validate validations errors-name & then-else] `(let [~errors-name (validate ~to-validate ~validations)] (if (empty? ~errors-name) ~@then-else))) 1 (defmacro when-valid [data data-validation & actions] `(if-valid ~data ~data-validation ~'err (do ~@actions) false)) (when-valid order-details-good order-details-validation (println "It's a success!") (println :success)) (when-valid order-details-bad order-details-validation (println "It's a success!") (println :success)) (macroexpand '(when-valid order-details order-details-validation (println "It's a success!") (println :success))) 2 (defmacro my-or "macro for or logic" ([] nil) ([x] x) ([form & forms] `(let [sym# ~form] (if sym# sym# (my-or ~@forms))))) (my-or nil false 2 1) (macroexpand '(my-or nil false 2 1)) 3 (defmacro defattrs [& assignments] `(do ~@(map (fn [[retr attr]] `(def ~retr ~attr)) (partition 2 assignments)))) (defattrs c-int :intelligence wokring? :should-work) (print wokring? c-int) (macroexpand '(defattrs c-int :intelligence test :should-work)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610269"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">335cd37f510fea2c873504bdbf2484f4eec983570939507828c7671f0c5df325</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">geocaml/ocaml-geojson</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">geojsone.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2021 - 2022 < > Permission to use , copy , modify , and/or distribute this software for any purpose with or without fee is hereby granted , provided that the above copyright notice and this permission notice appear in all copies . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *) * A library for manipulating large documents without reading the whole document into memory using the { ! streaming , JSON parser . document into memory using the {!Jsonm} streaming, JSON parser. *) module Err : sig type location = (int * int) * (int * int) type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ] val pp : Format.formatter -> t -> unit end module G : Geojson.S with type json = Ezjsone.value * { 2 Maps } Maps are functions that allow you to manipulate common structure in GeoJson objects . These will be written directly back to the destination that you provide . Maps are functions that allow you to manipulate common structure in GeoJson objects. These will be written directly back to the destination that you provide. *) val map_geometry : (G.Geometry.t -> G.Geometry.t) -> Jsone.src -> Jsone.dst -> (unit, Err.t) result * [ map_geometry f src dst ] will apply [ f ] to all objects . This is essentially any { { : #section-3.1 } geometry object } . The map will recurse into geometry collections . Note for the moment if you have a single geometry object as your document , this will not work . essentially any {{:#section-3.1} geometry object}. The map will recurse into geometry collections. Note for the moment if you have a single geometry object as your document, this will not work. *) val map_props : (Ezjsone.value -> Ezjsone.value) -> Jsone.src -> Jsone.dst -> (unit, Err.t) result * [ map_props ~f ] will apply [ f ] to each feature 's properties field . The properties field is decoded into an { ! Ezjsone.value } for convenience . The properties field is decoded into an {!Ezjsone.value} for convenience. *) * { 2 Folds } Folds are like maps except you can collect items into an accumulator which is returned to you . For example , you might want to collect all of the [ names ] in the [ properties ] of features . { [ let get_string_exn = function ` String s - > s | _ - > failwith " err " let = function | ` O assoc - > List.assoc " name " assoc | > get_string_exn | _ - > failwith " err " let places src = ( fun acc p - > p : : acc ) [ ] src ] } Folds are like maps except you can collect items into an accumulator which is returned to you. For example, you might want to collect all of the [names] in the [properties] of features. {[ let get_string_exn = function `String s -> s | _ -> failwith "err" let get_name = function | `O assoc -> List.assoc "name" assoc |> get_string_exn | _ -> failwith "err" let places src = Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src ]} *) val fold_geometry : ('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result (** [fold_geometry f acc src] is much like {!map_geometry} but allows you to accumulate some result that is then returned to you. *) val fold_props : ('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result (** [fold_props f init src] *) * { 2 Iterators } Iterators are similar to map functions except they take a function [ f ] that takes a single element from the data - structure as an argument and returns [ unit ] . In that sense , they tend to be functions with side - effects , such as [ print_endline ] . For example , we might want to print the JSON value of every geometry object in a GeoJSON object . { [ let print_geometry g = ( . ) let values src = Geojsonm.iter_geometry print_geometry src ] } Iterators are similar to map functions except they take a function [f] that takes a single element from the data-structure as an argument and returns [unit]. In that sense, they tend to be functions with side-effects, such as [print_endline]. For example, we might want to print the JSON value of every geometry object in a GeoJSON object. {[ let print_geometry g = print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g) let values src = Geojsonm.iter_geometry print_geometry src ]} *) val iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result * [ iter_geometry f src ] will apply [ f ] to all objects . val iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result (** [iter_props f src] will apply [f] to each feature's properties field. *) * { 2 Effect - based , non - blocking libraries } These libraries use effects to perform non - blocking parsing . They are currently a part of Geojsone and exposed for other libraries to use . These libraries use effects to perform non-blocking parsing. They are currently a part of Geojsone and exposed for other libraries to use. *) module Ezjsone = Ezjsone module Jsone = Jsone module Uutfe = Uutfe </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/geocaml/ocaml-geojson/1342f4627caa813cd153d5724f73c2fb8f0eac31/src/geojsone/geojsone.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">* [fold_geometry f acc src] is much like {!map_geometry} but allows you to accumulate some result that is then returned to you. * [fold_props f init src] * [iter_props f src] will apply [f] to each feature's properties field. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2021 - 2022 < > Permission to use , copy , modify , and/or distribute this software for any purpose with or without fee is hereby granted , provided that the above copyright notice and this permission notice appear in all copies . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM , OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE . Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *) * A library for manipulating large documents without reading the whole document into memory using the { ! streaming , JSON parser . document into memory using the {!Jsonm} streaming, JSON parser. *) module Err : sig type location = (int * int) * (int * int) type t = [ `Error of location * Jsone.error | `EOI | `Unexpected of string ] val pp : Format.formatter -> t -> unit end module G : Geojson.S with type json = Ezjsone.value * { 2 Maps } Maps are functions that allow you to manipulate common structure in GeoJson objects . These will be written directly back to the destination that you provide . Maps are functions that allow you to manipulate common structure in GeoJson objects. These will be written directly back to the destination that you provide. *) val map_geometry : (G.Geometry.t -> G.Geometry.t) -> Jsone.src -> Jsone.dst -> (unit, Err.t) result * [ map_geometry f src dst ] will apply [ f ] to all objects . This is essentially any { { : #section-3.1 } geometry object } . The map will recurse into geometry collections . Note for the moment if you have a single geometry object as your document , this will not work . essentially any {{:#section-3.1} geometry object}. The map will recurse into geometry collections. Note for the moment if you have a single geometry object as your document, this will not work. *) val map_props : (Ezjsone.value -> Ezjsone.value) -> Jsone.src -> Jsone.dst -> (unit, Err.t) result * [ map_props ~f ] will apply [ f ] to each feature 's properties field . The properties field is decoded into an { ! Ezjsone.value } for convenience . The properties field is decoded into an {!Ezjsone.value} for convenience. *) * { 2 Folds } Folds are like maps except you can collect items into an accumulator which is returned to you . For example , you might want to collect all of the [ names ] in the [ properties ] of features . { [ let get_string_exn = function ` String s - > s | _ - > failwith " err " let = function | ` O assoc - > List.assoc " name " assoc | > get_string_exn | _ - > failwith " err " let places src = ( fun acc p - > p : : acc ) [ ] src ] } Folds are like maps except you can collect items into an accumulator which is returned to you. For example, you might want to collect all of the [names] in the [properties] of features. {[ let get_string_exn = function `String s -> s | _ -> failwith "err" let get_name = function | `O assoc -> List.assoc "name" assoc |> get_string_exn | _ -> failwith "err" let places src = Geojsonm.fold_props (fun acc p -> get_name p :: acc) [] src ]} *) val fold_geometry : ('a -> G.Geometry.t -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result val fold_props : ('a -> Ezjsone.value -> 'a) -> 'a -> Jsone.src -> ('a, Err.t) result * { 2 Iterators } Iterators are similar to map functions except they take a function [ f ] that takes a single element from the data - structure as an argument and returns [ unit ] . In that sense , they tend to be functions with side - effects , such as [ print_endline ] . For example , we might want to print the JSON value of every geometry object in a GeoJSON object . { [ let print_geometry g = ( . ) let values src = Geojsonm.iter_geometry print_geometry src ] } Iterators are similar to map functions except they take a function [f] that takes a single element from the data-structure as an argument and returns [unit]. In that sense, they tend to be functions with side-effects, such as [print_endline]. For example, we might want to print the JSON value of every geometry object in a GeoJSON object. {[ let print_geometry g = print_endline @@ Ezjsone.value_to_string (Geojsonm.G.Geometry.to_json g) let values src = Geojsonm.iter_geometry print_geometry src ]} *) val iter_geometry : (G.t -> unit) -> Jsone.src -> (unit, Err.t) result * [ iter_geometry f src ] will apply [ f ] to all objects . val iter_props : (Ezjsone.value -> unit) -> Jsone.src -> (unit, Err.t) result * { 2 Effect - based , non - blocking libraries } These libraries use effects to perform non - blocking parsing . They are currently a part of Geojsone and exposed for other libraries to use . These libraries use effects to perform non-blocking parsing. They are currently a part of Geojsone and exposed for other libraries to use. *) module Ezjsone = Ezjsone module Jsone = Jsone module Uutfe = Uutfe </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610270"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">bda02469c7de385dfb454449b2e2466c5cdbe512c7de07d562364fffe6bd5f45</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">acieroid/scala-am</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">nqueens.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(define (one-to n) (letrec ((loop (lambda (i l) (if (= i 0) l (loop (- i 1) (cons i l)))))) (loop n '()))) (define (ok? row dist placed) (if (null? placed) #t (and (not (= (car placed) (+ row dist))) (not (= (car placed) (- row dist))) (ok? row (+ dist 1) (cdr placed))))) (define (try-it x y z) (if (null? x) (if (null? y) 1 0) (+ (if (ok? (car x) 1 z) (try-it (append (cdr x) y) '() (cons (car x) z)) 0) (try-it (cdr x) (cons (car x) y) z)))) (define (nqueens n) (try-it (one-to n) '() '())) (nqueens 8) #t </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/gambit/nqueens.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(define (one-to n) (letrec ((loop (lambda (i l) (if (= i 0) l (loop (- i 1) (cons i l)))))) (loop n '()))) (define (ok? row dist placed) (if (null? placed) #t (and (not (= (car placed) (+ row dist))) (not (= (car placed) (- row dist))) (ok? row (+ dist 1) (cdr placed))))) (define (try-it x y z) (if (null? x) (if (null? y) 1 0) (+ (if (ok? (car x) 1 z) (try-it (append (cdr x) y) '() (cons (car x) z)) 0) (try-it (cdr x) (cons (car x) y) z)))) (define (nqueens n) (try-it (one-to n) '() '())) (nqueens 8) #t </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610271"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">a9f730699351ac9802f59db29444d24785563bff66d67a9a0eec9662899ea1a8</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell/vector</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">take.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">import qualified Data.Vector as U import Data.Bits main = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/haskell/vector/4c87e88f07aad166c6ae2ccb94fa539fbdd99a91/old-testsuite/microsuite/take.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">import qualified Data.Vector as U import Data.Bits main = print . U.length . U.take 100000 . U.replicate 1000000 $ (7 :: Int) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610272"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">d552abab566a0a3f50e8d1787878e8def49d571c4c6be967ef64bece40a65253</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">cloudant/monic</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">monic_utils.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright 2011 Cloudant %% Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not %% use this file except in compliance with the License. You may obtain a copy of %% the License at %% %% -2.0 %% %% Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an " AS IS " BASIS , WITHOUT %% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the %% License for the specific language governing permissions and limitations under %% the License. -module(monic_utils). -export([path/2, exists/2, open/2]). -export([write_term/2, pread_term/2]). -include("monic.hrl"). -define(MAX_TERM, (1 bsl 16)). path(ReqData, Context) -> Root = proplists:get_value(root, Context, "tmp"), File = wrq:path_info(file, ReqData), filename:join(Root, File). open(ReqData, Context) -> case monic_file:open(path(ReqData, Context)) of {ok, Pid} -> monic_file_lru:update(Pid), {ok, Pid}; Else -> Else end. exists(ReqData, Context) -> filelib:is_file(path(ReqData, Context)). -spec write_term(term(), term()) -> {ok, integer()} | {error, term()}. write_term(Fd, Term) -> Bin = term_to_binary(Term), Size = iolist_size(Bin), case Size =< ?MAX_TERM of true -> case file:write(Fd, <<Size:16/integer, Bin/binary>>) of ok -> {ok, Size + 2}; Else -> Else end; false -> {error, term_too_long} end. -spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}. pread_term(Fd, Location) -> case file:pread(Fd, Location, 2) of {ok, <<Size:16/integer>>} -> case file:pread(Fd, Location + 2, Size) of {ok, <<Bin:Size/binary>>} -> {ok, Size + 2, binary_to_term(Bin)}; {ok, _} -> eof; Else -> Else end; Else -> Else end. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/cloudant/monic/9b7670d53ee40efea57c777f044b3de74c66e6de/src/monic_utils.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> use this file except in compliance with the License. You may obtain a copy of the License at -2.0 Unless required by applicable law or agreed to in writing, software WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright 2011 Cloudant Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not distributed under the License is distributed on an " AS IS " BASIS , WITHOUT -module(monic_utils). -export([path/2, exists/2, open/2]). -export([write_term/2, pread_term/2]). -include("monic.hrl"). -define(MAX_TERM, (1 bsl 16)). path(ReqData, Context) -> Root = proplists:get_value(root, Context, "tmp"), File = wrq:path_info(file, ReqData), filename:join(Root, File). open(ReqData, Context) -> case monic_file:open(path(ReqData, Context)) of {ok, Pid} -> monic_file_lru:update(Pid), {ok, Pid}; Else -> Else end. exists(ReqData, Context) -> filelib:is_file(path(ReqData, Context)). -spec write_term(term(), term()) -> {ok, integer()} | {error, term()}. write_term(Fd, Term) -> Bin = term_to_binary(Term), Size = iolist_size(Bin), case Size =< ?MAX_TERM of true -> case file:write(Fd, <<Size:16/integer, Bin/binary>>) of ok -> {ok, Size + 2}; Else -> Else end; false -> {error, term_too_long} end. -spec pread_term(term(), integer()) -> {ok, integer(), term()} | eof | {error, term()}. pread_term(Fd, Location) -> case file:pread(Fd, Location, 2) of {ok, <<Size:16/integer>>} -> case file:pread(Fd, Location + 2, Size) of {ok, <<Bin:Size/binary>>} -> {ok, Size + 2, binary_to_term(Bin)}; {ok, _} -> eof; Else -> Else end; Else -> Else end. </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610273"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9f21b932842b907468e2f9d2612fce45e89c5d990b310af4d7bd5e7d4e7e28d9</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">incoherentsoftware/defect-process</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">BubbleProjectile.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Enemy.All.BubbleTurret.BubbleProjectile ( bubbleSpinPath , bubbleExplodePath , mkBubbleProjectile ) where import Control.Monad.IO.Class (MonadIO) import qualified Data.Set as S import Attack import Attack.Projectile import Collision import Configs.All.Enemy import Configs.All.Enemy.BubbleTurret import Constants import Enemy.All.BubbleTurret.Data import FileCache import Id import Msg import Projectile as P import Util import Window.Graphics import World.ZIndex bubbleSpinPath = PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-spin.spr" :: PackResourceFilePath bubbleExplodePath = PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-explode.atk" :: PackResourceFilePath registeredCollisions = S.fromList [ ProjRegisteredPlayerCollision ] :: S.Set ProjectileRegisteredCollision data BubbleProjVelBehavior = InitialRiseVel Secs | RiseFallVel Secs data BubbleProjData = BubbleProjData { _velBehavior :: BubbleProjVelBehavior , _pos :: Pos2 , _dir :: Direction , _sprite :: Sprite , _explodeAtkDesc :: AttackDescription , _config :: BubbleTurretEnemyConfig } mkBubbleProjData :: (FileCache m, GraphicsRead m, MonadIO m) => Pos2 -> Direction -> BubbleTurretEnemyData -> m BubbleProjData mkBubbleProjData pos dir bubbleProjData = do spr <- loadPackSprite bubbleSpinPath explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData) return $ BubbleProjData { _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg , _pos = pos , _dir = dir , _sprite = spr , _explodeAtkDesc = explodeAtkDesc , _config = cfg } bubbleProjHitbox :: ProjectileHitbox BubbleProjData bubbleProjHitbox bubbleProj = rectHitbox pos width height where bubbleProjData = _data bubbleProj Pos2 x y = _pos (bubbleProjData :: BubbleProjData) cfg = _config (bubbleProjData :: BubbleProjData) width = _bubbleProjWidth cfg height = _bubbleProjHeight cfg pos = Pos2 (x - width / 2.0) (y - height / 2.0) mkBubbleProjectile :: (FileCache m, GraphicsRead m, MonadIO m) => Pos2 -> Direction -> BubbleTurretEnemyData -> m (Some Projectile) mkBubbleProjectile pos dir bubbleTurretData = do bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData msgId <- newId let dummyHbx = dummyHitbox pos ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData) return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl) { _hitbox = bubbleProjHitbox , _registeredCollisions = registeredCollisions , _think = thinkBubbleProj , _update = updateBubbleProj , _draw = drawBubbleProj , _processCollisions = processBubbleProjCollisions } bubbleProjExplodeRemoveMsgs :: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload) => Projectile BubbleProjData -> [Msg p] bubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg] where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) dir = _dir (bubbleProjData :: BubbleProjData) explodeAtkDesc = _explodeAtkDesc bubbleProjData mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj bubbleProjId = P._msgId bubbleProj removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId thinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m thinkBubbleProj bubbleProj = return $ if | willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj | otherwise -> let bubbleProjData = _data bubbleProj cfg = _config (bubbleProjData :: BubbleProjData) speedX = _bubbleProjSpeedX cfg speedY = _bubbleProjSpeedY cfg riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg dir = _dir (bubbleProjData :: BubbleProjData) velX = speedX * directionNeg dir velY = vecY $ P._vel bubbleProj (velBehavior, velY') = case _velBehavior bubbleProjData of InitialRiseVel velTtl | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY) | otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY) RiseFallVel velTtl | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY) | otherwise -> (RiseFallVel (velTtl - timeStep), velY) update = \p -> p { _data = (P._data p) {_velBehavior = velBehavior} , _vel = Vel2 velX velY' } in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)] where willDisappear = P._ttl bubbleProj - timeStep <= 0.0 updateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m updateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'} where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) vel = P._vel bubbleProj pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep) spr = _sprite (bubbleProjData :: BubbleProjData) bubbleProjData' = bubbleProjData { _pos = pos' , _sprite = updateSprite spr } :: BubbleProjData drawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m drawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) dir = _dir (bubbleProjData :: BubbleProjData) spr = _sprite (bubbleProjData :: BubbleProjData) processBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData processBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions where processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase] processCollision collision !msgs = case collision of ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs _ -> msgs </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/incoherentsoftware/defect-process/8797aad1d93bff5aadd7226c39a48f45cf76746e/src/Enemy/All/BubbleTurret/BubbleProjectile.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Enemy.All.BubbleTurret.BubbleProjectile ( bubbleSpinPath , bubbleExplodePath , mkBubbleProjectile ) where import Control.Monad.IO.Class (MonadIO) import qualified Data.Set as S import Attack import Attack.Projectile import Collision import Configs.All.Enemy import Configs.All.Enemy.BubbleTurret import Constants import Enemy.All.BubbleTurret.Data import FileCache import Id import Msg import Projectile as P import Util import Window.Graphics import World.ZIndex bubbleSpinPath = PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-spin.spr" :: PackResourceFilePath bubbleExplodePath = PackResourceFilePath "data/enemies/bubble-turret-enemy.pack" "bubble-explode.atk" :: PackResourceFilePath registeredCollisions = S.fromList [ ProjRegisteredPlayerCollision ] :: S.Set ProjectileRegisteredCollision data BubbleProjVelBehavior = InitialRiseVel Secs | RiseFallVel Secs data BubbleProjData = BubbleProjData { _velBehavior :: BubbleProjVelBehavior , _pos :: Pos2 , _dir :: Direction , _sprite :: Sprite , _explodeAtkDesc :: AttackDescription , _config :: BubbleTurretEnemyConfig } mkBubbleProjData :: (FileCache m, GraphicsRead m, MonadIO m) => Pos2 -> Direction -> BubbleTurretEnemyData -> m BubbleProjData mkBubbleProjData pos dir bubbleProjData = do spr <- loadPackSprite bubbleSpinPath explodeAtkDesc <- loadPackAttackDescription bubbleExplodePath let cfg = _bubbleTurret $ _config (bubbleProjData :: BubbleTurretEnemyData) return $ BubbleProjData { _velBehavior = InitialRiseVel $ _bubbleProjInitialRiseSecs cfg , _pos = pos , _dir = dir , _sprite = spr , _explodeAtkDesc = explodeAtkDesc , _config = cfg } bubbleProjHitbox :: ProjectileHitbox BubbleProjData bubbleProjHitbox bubbleProj = rectHitbox pos width height where bubbleProjData = _data bubbleProj Pos2 x y = _pos (bubbleProjData :: BubbleProjData) cfg = _config (bubbleProjData :: BubbleProjData) width = _bubbleProjWidth cfg height = _bubbleProjHeight cfg pos = Pos2 (x - width / 2.0) (y - height / 2.0) mkBubbleProjectile :: (FileCache m, GraphicsRead m, MonadIO m) => Pos2 -> Direction -> BubbleTurretEnemyData -> m (Some Projectile) mkBubbleProjectile pos dir bubbleTurretData = do bubbleProjData <- mkBubbleProjData pos dir bubbleTurretData msgId <- newId let dummyHbx = dummyHitbox pos ttl = _bubbleProjAliveSecs $ _config (bubbleProjData :: BubbleProjData) return . Some $ (mkProjectile bubbleProjData msgId dummyHbx ttl) { _hitbox = bubbleProjHitbox , _registeredCollisions = registeredCollisions , _think = thinkBubbleProj , _update = updateBubbleProj , _draw = drawBubbleProj , _processCollisions = processBubbleProjCollisions } bubbleProjExplodeRemoveMsgs :: (AllowMsgWrite p NewUpdateProjectileMsgPayload, AllowMsgWrite p ProjectileMsgPayload) => Projectile BubbleProjData -> [Msg p] bubbleProjExplodeRemoveMsgs bubbleProj = [mkAtkProjMsg, removeBubbleProjMsg] where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) dir = _dir (bubbleProjData :: BubbleProjData) explodeAtkDesc = _explodeAtkDesc bubbleProjData mkAtkProj = mkEnemyAttackProjectile pos dir explodeAtkDesc mkAtkProjMsg = mkMsg $ NewUpdateProjectileMsgAddM mkAtkProj bubbleProjId = P._msgId bubbleProj removeBubbleProjMsg = mkMsgTo (ProjectileMsgSetTtl 0.0) bubbleProjId thinkBubbleProj :: Monad m => ProjectileThink BubbleProjData m thinkBubbleProj bubbleProj = return $ if | willDisappear -> bubbleProjExplodeRemoveMsgs bubbleProj | otherwise -> let bubbleProjData = _data bubbleProj cfg = _config (bubbleProjData :: BubbleProjData) speedX = _bubbleProjSpeedX cfg speedY = _bubbleProjSpeedY cfg riseFallPeriodSecs = _bubbleProjRiseFallPeriodSecs cfg dir = _dir (bubbleProjData :: BubbleProjData) velX = speedX * directionNeg dir velY = vecY $ P._vel bubbleProj (velBehavior, velY') = case _velBehavior bubbleProjData of InitialRiseVel velTtl | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, speedY) | otherwise -> (InitialRiseVel (velTtl - timeStep), -speedY) RiseFallVel velTtl | velTtl <= 0.0 -> (RiseFallVel riseFallPeriodSecs, -velY) | otherwise -> (RiseFallVel (velTtl - timeStep), velY) update = \p -> p { _data = (P._data p) {_velBehavior = velBehavior} , _vel = Vel2 velX velY' } in [mkMsgTo (ProjectileMsgUpdate update) (P._msgId bubbleProj)] where willDisappear = P._ttl bubbleProj - timeStep <= 0.0 updateBubbleProj :: Monad m => ProjectileUpdate BubbleProjData m updateBubbleProj bubbleProj = return $ bubbleProj {_data = bubbleProjData'} where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) vel = P._vel bubbleProj pos' = pos `vecAdd` (toPos2 $ vel `vecMul` timeStep) spr = _sprite (bubbleProjData :: BubbleProjData) bubbleProjData' = bubbleProjData { _pos = pos' , _sprite = updateSprite spr } :: BubbleProjData drawBubbleProj :: (GraphicsReadWrite m, MonadIO m) => ProjectileDraw BubbleProjData m drawBubbleProj bubbleProj = drawSprite pos dir enemyAttackProjectileZIndex spr where bubbleProjData = _data bubbleProj pos = _pos (bubbleProjData :: BubbleProjData) dir = _dir (bubbleProjData :: BubbleProjData) spr = _sprite (bubbleProjData :: BubbleProjData) processBubbleProjCollisions :: ProjectileProcessCollisions BubbleProjData processBubbleProjCollisions collisions bubbleProj = foldr processCollision [] collisions where processCollision :: ProjectileCollision -> [Msg ThinkCollisionMsgsPhase] -> [Msg ThinkCollisionMsgsPhase] processCollision collision !msgs = case collision of ProjPlayerCollision _ -> bubbleProjExplodeRemoveMsgs bubbleProj ++ msgs _ -> msgs </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610274"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">be9a2e7955dbf0f7e98f37c1d86c47f5e50fea70b0bd46bda1e799f973b8fbb6</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">racket/racket7</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">main.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket/base (printf "pkg-b first main\n") (exit 42) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/pkgs/racket-test/tests/pkg/test-pkgs/pkg-b-first/pkg-b/main.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">racket</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket/base (printf "pkg-b first main\n") (exit 42) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610275"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">234b89d41f4977c73e7ed100c381fa1e961cfa8a62cfae3adc0d7444d9778256</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojerl/clojerl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojerl_Atom_SUITE.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(clojerl_Atom_SUITE). -include("clojerl.hrl"). -include("clj_test_utils.hrl"). -export([ all/0 , init_per_suite/1 , end_per_suite/1 ]). -export([ deref/1 , swap/1 , reset/1 , compare_and_set/1 , equiv/1 , meta/1 , str/1 , complete_coverage/1 ]). -spec all() -> [atom()]. all() -> clj_test_utils:all(?MODULE). -spec init_per_suite(config()) -> config(). init_per_suite(Config) -> clj_test_utils:init_per_suite(Config). -spec end_per_suite(config()) -> config(). end_per_suite(Config) -> Config. %%------------------------------------------------------------------------------ %% Test Cases %%------------------------------------------------------------------------------ -spec deref(config()) -> result(). deref(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), ct:comment("deref an atom"), 1 = clj_rt:deref(Atom), 2 = 'clojerl.Atom':reset(Atom, 2), 2 = clj_rt:deref(Atom), {comments, ""}. -spec swap(config()) -> result(). swap(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(2), ct:comment("Successful swaps"), 3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end), 4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1), 6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1), 9 = 'clojerl.Atom':swap( Atom , fun(X, Y, Z, W) -> X + Y + Z + W end , 1 , 1 , [1] ), ct:comment("Concurrent swaps"), Inc = fun(X) -> X + 1 end, Self = self(), ResetFun = fun(_) -> spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end) end, N = 100, Result = N + 9, lists:foreach(ResetFun, lists:seq(1, N)), ok = clj_test_utils:wait_for(ok, N, 1000), Result = 'clojerl.Atom':deref(Atom), {comments, ""}. -spec reset(config()) -> result(). reset(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), ct:comment("Successful resets"), 2 = 'clojerl.Atom':reset(Atom, 2), foo = 'clojerl.Atom':reset(Atom, foo), bar = 'clojerl.Atom':reset(Atom, bar), <<"baz">> = 'clojerl.Atom':reset(Atom, <<"baz">>), ct:comment("Concurrent resets"), Self = self(), ResetFun = fun(N) -> spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end) end, N = 100, lists:foreach(ResetFun, lists:seq(1, N)), ok = clj_test_utils:wait_for(ok, N, 1000), {comments, ""}. -spec compare_and_set(config()) -> result(). compare_and_set(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(2), true = 'clojerl.Atom':compare_and_set(Atom, 2, 3), false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3), {comments, ""}. -spec equiv(config()) -> result(). equiv(_Config) -> Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2), ct:comment("Check that the same atom with different meta is equivalent"), Atom3 = clj_rt:with_meta(Atom1, #{a => 1}), Atom4 = clj_rt:with_meta(Atom1, #{b => 2}), true = clj_rt:equiv(Atom3, Atom4), ct:comment("Check that different atoms are not equivalent"), false = clj_rt:equiv(Atom1, Atom2), ct:comment("An atom and something else"), false = clj_rt:equiv(Atom1, whatever), false = clj_rt:equiv(Atom1, #{}), false = clj_rt:equiv(Atom1, 1), {comments, ""}. -spec meta(config()) -> result(). meta(_Config) -> Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom1 = clj_rt:with_meta(Atom0, #{a => 1}), #{a := 1} = clj_rt:meta(Atom1), {comments, ""}. -spec str(config()) -> result(). str(_Config) -> Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom1 = clj_rt:with_meta(Atom0, #{a => 1}), <<"#<clojerl.Atom ", _/binary>> = clj_rt:str(Atom1), {comments, ""}. -spec complete_coverage(config()) -> result(). complete_coverage(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), Hash = 'clojerl.IHash':hash(Atom), Hash = 'clojerl.IHash':hash(Atom), true = erlang:is_integer(Hash), {noreply, state} = 'clojerl.Atom':handle_cast(msg, state), {noreply, state} = 'clojerl.Atom':handle_info(msg, state), {ok, state} = 'clojerl.Atom':terminate(msg, state), {ok, state} = 'clojerl.Atom':code_change(msg, from, state), {comments, ""}. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/clojerl/clojerl/aa35847ca64e1c66224867ca4c31ca6de95bc898/test/clojerl_Atom_SUITE.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">------------------------------------------------------------------------------ Test Cases ------------------------------------------------------------------------------</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module(clojerl_Atom_SUITE). -include("clojerl.hrl"). -include("clj_test_utils.hrl"). -export([ all/0 , init_per_suite/1 , end_per_suite/1 ]). -export([ deref/1 , swap/1 , reset/1 , compare_and_set/1 , equiv/1 , meta/1 , str/1 , complete_coverage/1 ]). -spec all() -> [atom()]. all() -> clj_test_utils:all(?MODULE). -spec init_per_suite(config()) -> config(). init_per_suite(Config) -> clj_test_utils:init_per_suite(Config). -spec end_per_suite(config()) -> config(). end_per_suite(Config) -> Config. -spec deref(config()) -> result(). deref(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), ct:comment("deref an atom"), 1 = clj_rt:deref(Atom), 2 = 'clojerl.Atom':reset(Atom, 2), 2 = clj_rt:deref(Atom), {comments, ""}. -spec swap(config()) -> result(). swap(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(2), ct:comment("Successful swaps"), 3 = 'clojerl.Atom':swap(Atom, fun(X) -> X + 1 end), 4 = 'clojerl.Atom':swap(Atom, fun(X, Y) -> X + Y end, 1), 6 = 'clojerl.Atom':swap(Atom, fun(X, Y, Z) -> X + Y + Z end, 1, 1), 9 = 'clojerl.Atom':swap( Atom , fun(X, Y, Z, W) -> X + Y + Z + W end , 1 , 1 , [1] ), ct:comment("Concurrent swaps"), Inc = fun(X) -> X + 1 end, Self = self(), ResetFun = fun(_) -> spawn(fun() -> 'clojerl.Atom':swap(Atom, Inc), Self ! ok end) end, N = 100, Result = N + 9, lists:foreach(ResetFun, lists:seq(1, N)), ok = clj_test_utils:wait_for(ok, N, 1000), Result = 'clojerl.Atom':deref(Atom), {comments, ""}. -spec reset(config()) -> result(). reset(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), ct:comment("Successful resets"), 2 = 'clojerl.Atom':reset(Atom, 2), foo = 'clojerl.Atom':reset(Atom, foo), bar = 'clojerl.Atom':reset(Atom, bar), <<"baz">> = 'clojerl.Atom':reset(Atom, <<"baz">>), ct:comment("Concurrent resets"), Self = self(), ResetFun = fun(N) -> spawn(fun() -> 'clojerl.Atom':reset(Atom, N), Self ! ok end) end, N = 100, lists:foreach(ResetFun, lists:seq(1, N)), ok = clj_test_utils:wait_for(ok, N, 1000), {comments, ""}. -spec compare_and_set(config()) -> result(). compare_and_set(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(2), true = 'clojerl.Atom':compare_and_set(Atom, 2, 3), false = 'clojerl.Atom':compare_and_set(Atom, whatever, 3), {comments, ""}. -spec equiv(config()) -> result(). equiv(_Config) -> Atom1 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom2 = 'clojerl.Atom':?CONSTRUCTOR(2), ct:comment("Check that the same atom with different meta is equivalent"), Atom3 = clj_rt:with_meta(Atom1, #{a => 1}), Atom4 = clj_rt:with_meta(Atom1, #{b => 2}), true = clj_rt:equiv(Atom3, Atom4), ct:comment("Check that different atoms are not equivalent"), false = clj_rt:equiv(Atom1, Atom2), ct:comment("An atom and something else"), false = clj_rt:equiv(Atom1, whatever), false = clj_rt:equiv(Atom1, #{}), false = clj_rt:equiv(Atom1, 1), {comments, ""}. -spec meta(config()) -> result(). meta(_Config) -> Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom1 = clj_rt:with_meta(Atom0, #{a => 1}), #{a := 1} = clj_rt:meta(Atom1), {comments, ""}. -spec str(config()) -> result(). str(_Config) -> Atom0 = 'clojerl.Atom':?CONSTRUCTOR(1), Atom1 = clj_rt:with_meta(Atom0, #{a => 1}), <<"#<clojerl.Atom ", _/binary>> = clj_rt:str(Atom1), {comments, ""}. -spec complete_coverage(config()) -> result(). complete_coverage(_Config) -> Atom = 'clojerl.Atom':?CONSTRUCTOR(1), Hash = 'clojerl.IHash':hash(Atom), Hash = 'clojerl.IHash':hash(Atom), true = erlang:is_integer(Hash), {noreply, state} = 'clojerl.Atom':handle_cast(msg, state), {noreply, state} = 'clojerl.Atom':handle_info(msg, state), {ok, state} = 'clojerl.Atom':terminate(msg, state), {ok, state} = 'clojerl.Atom':code_change(msg, from, state), {comments, ""}. </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610276"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">61c56175e2501a86f347f6c09b3b349eaac8d9147c2da5ddfaf2b64bf89ea5f1</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">jellelicht/guix</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">gnu.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;;; GNU Guix --- Functional package management for GNU Copyright © 2014 < > ;;; ;;; This file is part of GNU Guix. ;;; GNU is free software ; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation ; either version 3 of the License , or ( at ;;; your option) any later version. ;;; ;;; GNU Guix is distributed in the hope that it will be useful, but ;;; WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (guix scripts import gnu) #:use-module (guix ui) #:use-module (guix utils) #:use-module (guix scripts) #:use-module (guix import gnu) #:use-module (guix scripts import) #:use-module (srfi srfi-1) #:use-module (srfi srfi-11) #:use-module (srfi srfi-37) #:use-module (ice-9 match) #:export (guix-import-gnu)) ;;; ;;; Command-line options. ;;; (define %default-options '((key-download . interactive))) (define (show-help) (display (_ "Usage: guix import gnu [OPTION...] PACKAGE Return a package declaration template for PACKAGE, a GNU package.\n")) ;; '--key-download' taken from (guix scripts refresh). (display (_ " --key-download=POLICY handle missing OpenPGP keys according to POLICY: 'always', 'never', and 'interactive', which is also used when 'key-download' is not specified")) (newline) (display (_ " -h, --help display this help and exit")) (display (_ " -V, --version display version information and exit")) (newline) (show-bug-report-information)) (define %options ;; Specification of the command-line options. (cons* (option '(#\h "help") #f #f (lambda args (show-help) (exit 0))) (option '(#\V "version") #f #f (lambda args (show-version-and-exit "guix import gnu"))) (option '("key-download") #t #f ;from (guix scripts refresh) (lambda (opt name arg result) (match arg ((or "interactive" "always" "never") (alist-cons 'key-download (string->symbol arg) result)) (_ (leave (_ "unsupported policy: ~a~%") arg))))) %standard-import-options)) ;;; ;;; Entry point. ;;; (define (guix-import-gnu . args) (define (parse-options) ;; Return the alist of option values. (args-fold* args %options (lambda (opt name arg result) (leave (_ "~A: unrecognized option~%") name)) (lambda (arg result) (alist-cons 'argument arg result)) %default-options)) (let* ((opts (parse-options)) (args (filter-map (match-lambda (('argument . value) value) (_ #f)) (reverse opts)))) (match args ((name) (with-error-handling (gnu->guix-package name #:key-download (assoc-ref opts 'key-download)))) (_ (leave (_ "wrong number of arguments~%")))))) ;;; gnu.scm ends here </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/guix/scripts/import/gnu.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> GNU Guix --- Functional package management for GNU This file is part of GNU Guix. you can redistribute it and/or modify it either version 3 of the License , or ( at your option) any later version. GNU Guix is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. Command-line options. '--key-download' taken from (guix scripts refresh). Specification of the command-line options. from (guix scripts refresh) Entry point. Return the alist of option values. gnu.scm ends here</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright © 2014 < > under the terms of the GNU General Public License as published by You should have received a copy of the GNU General Public License along with GNU . If not , see < / > . (define-module (guix scripts import gnu) #:use-module (guix ui) #:use-module (guix utils) #:use-module (guix scripts) #:use-module (guix import gnu) #:use-module (guix scripts import) #:use-module (srfi srfi-1) #:use-module (srfi srfi-11) #:use-module (srfi srfi-37) #:use-module (ice-9 match) #:export (guix-import-gnu)) (define %default-options '((key-download . interactive))) (define (show-help) (display (_ "Usage: guix import gnu [OPTION...] PACKAGE Return a package declaration template for PACKAGE, a GNU package.\n")) (display (_ " --key-download=POLICY handle missing OpenPGP keys according to POLICY: 'always', 'never', and 'interactive', which is also used when 'key-download' is not specified")) (newline) (display (_ " -h, --help display this help and exit")) (display (_ " -V, --version display version information and exit")) (newline) (show-bug-report-information)) (define %options (cons* (option '(#\h "help") #f #f (lambda args (show-help) (exit 0))) (option '(#\V "version") #f #f (lambda args (show-version-and-exit "guix import gnu"))) (lambda (opt name arg result) (match arg ((or "interactive" "always" "never") (alist-cons 'key-download (string->symbol arg) result)) (_ (leave (_ "unsupported policy: ~a~%") arg))))) %standard-import-options)) (define (guix-import-gnu . args) (define (parse-options) (args-fold* args %options (lambda (opt name arg result) (leave (_ "~A: unrecognized option~%") name)) (lambda (arg result) (alist-cons 'argument arg result)) %default-options)) (let* ((opts (parse-options)) (args (filter-map (match-lambda (('argument . value) value) (_ #f)) (reverse opts)))) (match args ((name) (with-error-handling (gnu->guix-package name #:key-download (assoc-ref opts 'key-download)))) (_ (leave (_ "wrong number of arguments~%")))))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610277"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3808527b4a8541e5865943714d06c824c9f05b816fb0b8e61cc0edc86f522baa</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">sampou-org/ghc_users_guide_ja</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">PhasePrograms.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Options.PhasePrograms where import Types phaseProgramsOptions :: [Flag] phaseProgramsOptions = [ flag { flagName = "-pgmL ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を文芸的コードのプリプロセッサとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmP ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)" , flagType = DynamicFlag } , flag { flagName = "-pgmc ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を C のコンパイラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlo ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を LLVM 最適化器として使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlc ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を LLVM コンパイラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgms ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をスプリッタとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgma ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をアセンブラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgml ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をリンカとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmdll ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を DLL 生成器として使う" , flagType = DynamicFlag } , flag { flagName = "-pgmF ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)" , flagType = DynamicFlag } , flag { flagName = "-pgmwindres ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlibtool ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)" , flagType = DynamicFlag } ] </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/sampou-org/ghc_users_guide_ja/91ac4ee4347802bbfc63686cfcbd4fc12f95a584/8.2.2/mkUserGuidePart/Options/PhasePrograms.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Options.PhasePrograms where import Types phaseProgramsOptions :: [Flag] phaseProgramsOptions = [ flag { flagName = "-pgmL ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を文芸的コードのプリプロセッサとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmP ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を C プリプロセッサとして使う(``-cpp`` を指定したときのみ)" , flagType = DynamicFlag } , flag { flagName = "-pgmc ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を C のコンパイラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlo ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を LLVM 最適化器として使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlc ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を LLVM コンパイラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgms ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をスプリッタとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgma ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をアセンブラとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgml ⟨cmd⟩" , flagDescription = "⟨cmd⟩ をリンカとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmdll ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を DLL 生成器として使う" , flagType = DynamicFlag } , flag { flagName = "-pgmF ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を プリプロセッサとして使う(``-F`` を指定したときのみ)" , flagType = DynamicFlag } , flag { flagName = "-pgmwindres ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を Windows でマニフェストを埋め込むためのプログラムとして使う" , flagType = DynamicFlag } , flag { flagName = "-pgmlibtool ⟨cmd⟩" , flagDescription = "⟨cmd⟩ を libtool用のコマンドとして使う(``-staticlib`` を指定したときのみ)" , flagType = DynamicFlag } ] </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610278"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">27cf0ba744fad5994af301dddadbcc762e8e7f814852e58d080e2bfc6b33e369</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">philopon/apiary</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Persist.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE UndecidableInstances # # LANGUAGE OverlappingInstances # # LANGUAGE FlexibleInstances # # LANGUAGE FlexibleContexts # # LANGUAGE TypeOperators # {-# LANGUAGE Rank2Types #-} # LANGUAGE LambdaCase # # LANGUAGE DataKinds # {-# LANGUAGE GADTs #-} module Web.Apiary.Database.Persist ( Persist -- * initializer , Migrator(..), With , initPersist, initPersistNoLog , initPersistPool, initPersistPoolNoLog -- ** low level , initPersist', initPersistPool' -- * query , RunSQL(runSql) -- * filter , sql ) where import qualified Data.Pool as Pool import Control.Monad(void, mzero) import Control.Monad.IO.Class(MonadIO(..)) import Control.Monad.Logger(NoLoggingT(runNoLoggingT)) import Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask) import Control.Monad.Trans.Control(MonadBaseControl) import Web.Apiary.Logger(LogWrapper, runLogWrapper) import qualified Database.Persist.Sql as Sql import Web.Apiary(Html) import Control.Monad.Apiary.Action(ActionT, applyDict) import Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition)) import qualified Network.Routing.Dict as Dict import qualified Network.Routing as R import Data.Proxy.Compat(Proxy(..)) import GHC.TypeLits.Compat(KnownSymbol) import Data.Apiary.Extension (Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt) data Migrator = Logging Sql.Migration | Silent Sql.Migration | Unsafe Sql.Migration | NoMigrate data Persist = PersistPool Sql.ConnectionPool | PersistConn Sql.SqlBackend instance Extension Persist type With c m = forall a. (c -> m a) -> m a initPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m) => (forall a. Extensions exts -> n a -> m a) -> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts) initPersist' run with migr = initializer $ \es -> run es $ with $ \conn -> do doMigration migr conn return (PersistConn conn) -- | construct persist extension initializer with no connection pool. -- -- example: -- -- @ initPersist ( withSqliteConn " db.sqlite " ) migrateAll -- @ initPersist :: (MonadIO m, MonadBaseControl IO m) => With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration -> Initializer m exts (Persist ': exts) initPersist with = initPersist' runLogWrapper with . Logging initPersistNoLog :: (MonadIO m, MonadBaseControl IO m) => With Sql.SqlBackend (NoLoggingT m) -> Sql.Migration -> Initializer m es (Persist ': es) initPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent initPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m) => (forall a. Extensions exts -> n a -> m a) -> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts) initPersistPool' run with migr = initializer $ \es -> run es $ with $ \pool -> do Pool.withResource pool $ doMigration migr return (PersistPool pool) initPersistPool :: (MonadIO m, MonadBaseControl IO m) => With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration -> Initializer m exts (Persist ': exts) initPersistPool with = initPersistPool' runLogWrapper with . Logging initPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m) => With Sql.ConnectionPool (NoLoggingT m) -> Sql.Migration -> Initializer m es (Persist ': es) initPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent doMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m () doMigration migr conn = case migr of Logging m -> runReaderT (Sql.runMigration m) conn Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn NoMigrate -> return () -- | execute sql in action. class RunSQL m where runSql :: Sql.SqlPersistT m a -> m a runSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a runSql' a persist = case persist of PersistPool p -> Sql.runSqlPool a p PersistConn c -> Sql.runSqlConn a c instance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a instance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where runSql a = ask >>= runSql' a -- | filter by sql query. since 0.9.0.0. sql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict.</ prms) => Maybe Html -- ^ documentation. -> proxy k -> Sql.SqlPersistT (ActionT exts '[] actM) a -> (a -> Maybe b) -- ^ result check function. Nothing: fail filter, Just a: success filter and add parameter. -> Filter exts actM m prms (k Dict.:= b ': prms) sql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw "sql" $ \d t -> fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \case Nothing -> mzero Just a -> return (Dict.add k a d, t) hoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a hoistReaderT f m = ReaderT $ \b -> f (runReaderT m b) # INLINE hoistReaderT # </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/philopon/apiary/7da306fcbfcdec85d073746968298de4540d7235/apiary-persistent/src/Web/Apiary/Database/Persist.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE Rank2Types # # LANGUAGE GADTs # * initializer ** low level * query * filter | construct persist extension initializer with no connection pool. example: @ @ | execute sql in action. | filter by sql query. since 0.9.0.0. ^ documentation. ^ result check function. Nothing: fail filter, Just a: success filter and add parameter.</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE UndecidableInstances # # LANGUAGE OverlappingInstances # # LANGUAGE FlexibleInstances # # LANGUAGE FlexibleContexts # # LANGUAGE TypeOperators # # LANGUAGE LambdaCase # # LANGUAGE DataKinds # module Web.Apiary.Database.Persist ( Persist , Migrator(..), With , initPersist, initPersistNoLog , initPersistPool, initPersistPoolNoLog , initPersist', initPersistPool' , RunSQL(runSql) , sql ) where import qualified Data.Pool as Pool import Control.Monad(void, mzero) import Control.Monad.IO.Class(MonadIO(..)) import Control.Monad.Logger(NoLoggingT(runNoLoggingT)) import Control.Monad.Trans.Reader(ReaderT(..), runReaderT, ask) import Control.Monad.Trans.Control(MonadBaseControl) import Web.Apiary.Logger(LogWrapper, runLogWrapper) import qualified Database.Persist.Sql as Sql import Web.Apiary(Html) import Control.Monad.Apiary.Action(ActionT, applyDict) import Control.Monad.Apiary.Filter(focus, Filter, Doc(DocPrecondition)) import qualified Network.Routing.Dict as Dict import qualified Network.Routing as R import Data.Proxy.Compat(Proxy(..)) import GHC.TypeLits.Compat(KnownSymbol) import Data.Apiary.Extension (Has, Initializer, initializer, Extensions, Extension, MonadExts, getExt) data Migrator = Logging Sql.Migration | Silent Sql.Migration | Unsafe Sql.Migration | NoMigrate data Persist = PersistPool Sql.ConnectionPool | PersistConn Sql.SqlBackend instance Extension Persist type With c m = forall a. (c -> m a) -> m a initPersist' :: (MonadIO n, MonadBaseControl IO n, Monad m) => (forall a. Extensions exts -> n a -> m a) -> With Sql.SqlBackend n -> Migrator -> Initializer m exts (Persist ': exts) initPersist' run with migr = initializer $ \es -> run es $ with $ \conn -> do doMigration migr conn return (PersistConn conn) initPersist ( withSqliteConn " db.sqlite " ) migrateAll initPersist :: (MonadIO m, MonadBaseControl IO m) => With Sql.SqlBackend (LogWrapper exts m) -> Sql.Migration -> Initializer m exts (Persist ': exts) initPersist with = initPersist' runLogWrapper with . Logging initPersistNoLog :: (MonadIO m, MonadBaseControl IO m) => With Sql.SqlBackend (NoLoggingT m) -> Sql.Migration -> Initializer m es (Persist ': es) initPersistNoLog with = initPersist' (const runNoLoggingT) with . Silent initPersistPool' :: (MonadIO n, MonadBaseControl IO n, Monad m) => (forall a. Extensions exts -> n a -> m a) -> With Sql.ConnectionPool n -> Migrator -> Initializer m exts (Persist ': exts) initPersistPool' run with migr = initializer $ \es -> run es $ with $ \pool -> do Pool.withResource pool $ doMigration migr return (PersistPool pool) initPersistPool :: (MonadIO m, MonadBaseControl IO m) => With Sql.ConnectionPool (LogWrapper exts m) -> Sql.Migration -> Initializer m exts (Persist ': exts) initPersistPool with = initPersistPool' runLogWrapper with . Logging initPersistPoolNoLog :: (MonadIO m, MonadBaseControl IO m) => With Sql.ConnectionPool (NoLoggingT m) -> Sql.Migration -> Initializer m es (Persist ': es) initPersistPoolNoLog with = initPersistPool' (const runNoLoggingT) with . Silent doMigration :: (MonadIO m, MonadBaseControl IO m) => Migrator -> Sql.SqlBackend -> m () doMigration migr conn = case migr of Logging m -> runReaderT (Sql.runMigration m) conn Silent m -> runReaderT (void $ Sql.runMigrationSilent m) conn Unsafe m -> runReaderT (Sql.runMigrationUnsafe m) conn NoMigrate -> return () class RunSQL m where runSql :: Sql.SqlPersistT m a -> m a runSql' :: MonadBaseControl IO m => Sql.SqlPersistT m a -> Persist -> m a runSql' a persist = case persist of PersistPool p -> Sql.runSqlPool a p PersistConn c -> Sql.runSqlConn a c instance (Has Persist es, MonadExts es m, MonadBaseControl IO m) => RunSQL m where runSql a = getExt (Proxy :: Proxy Persist) >>= runSql' a instance (MonadBaseControl IO m) => RunSQL (ReaderT Persist m) where runSql a = ask >>= runSql' a sql :: (KnownSymbol k, Has Persist exts, MonadBaseControl IO actM, k Dict.</ prms) -> proxy k -> Sql.SqlPersistT (ActionT exts '[] actM) a -> Filter exts actM m prms (k Dict.:= b ': prms) sql doc k q p = focus (maybe id DocPrecondition doc) Nothing $ R.raw "sql" $ \d t -> fmap p (runSql $ hoistReaderT (applyDict Dict.emptyDict) q) >>= \case Nothing -> mzero Just a -> return (Dict.add k a d, t) hoistReaderT :: (forall b. m b -> n b) -> ReaderT r m a -> ReaderT r n a hoistReaderT f m = ReaderT $ \b -> f (runReaderT m b) # INLINE hoistReaderT # </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610279"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">9d928bf2be8912124ea0eca64a778884772879aee1238b0a03647238a4db5afb</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">TrustInSoft/tis-interpreter</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">zones.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Modified by TrustInSoft (**************************************************************************) (* *) This file is part of Frama - C. (* *) Copyright ( C ) 2007 - 2015 CEA ( Commissariat à l'énergie atomique et aux énergies (* alternatives) *) (* *) (* you can redistribute it and/or modify it under the terms of the GNU *) Lesser General Public License as published by the Free Software Foundation , version 2.1 . (* *) (* It is distributed in the hope that it will be useful, *) (* but WITHOUT ANY WARRANTY; without even the implied warranty of *) (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *) (* GNU Lesser General Public License for more details. *) (* *) See the GNU Lesser General Public License version 2.1 for more details ( enclosed in the file licenses / LGPLv2.1 ) . (* *) (**************************************************************************) This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml . This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml. *) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/TrustInSoft/tis-interpreter/33132ce4a825494ea48bf2dd6fd03a56b62cc5c3/src/plugins/scope/zones.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">************************************************************************ alternatives) you can redistribute it and/or modify it under the terms of the GNU It is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. ************************************************************************</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Modified by TrustInSoft This file is part of Frama - C. Copyright ( C ) 2007 - 2015 CEA ( Commissariat à l'énergie atomique et aux énergies Lesser General Public License as published by the Free Software Foundation , version 2.1 . See the GNU Lesser General Public License version 2.1 for more details ( enclosed in the file licenses / LGPLv2.1 ) . This file is empty on purpose . Plugins register callbacks in src / kernel / db.ml . This file is empty on purpose. Plugins register callbacks in src/kernel/db.ml. *) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610280"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">f058a5e6bc7d4a02c9821e136b9dc035fc3816f4f74ca7863c824ee6b1bc14ba</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">brevis-us/brevis</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">globals.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns us.brevis.globals (:import [us.brevis.graphics BrCamera])) (def enable-display-text (atom true)) (def default-gui-state {:fullscreen false : camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 ) : camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 ) :camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000) :gui true ;:input (BrInput.) : rot - x 90 : rot - y -90 : rot - z -45 : shift - x 300 : shift - y 300 : shift - z -50;-30 :last-report-time 0 :simulation-time 0}) (def #^:dynamic *gui-state* (atom default-gui-state)) (def #^:dynamic *gui-message-board* (atom (sorted-map))) (def #^:dynamic *app-thread* (atom nil)) (def #^:dynamic *screenshot-filename* (atom nil)) (def #^:dynamic *simulation-state* (atom {})) (def #^:dynamic *graphics* (atom {})) (def destroy-hooks (atom [])) ;(def #^:dynamic *brevis-params* (atom {})) ;(def #^:dynamic *brevis-state* (atom {})) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/brevis-us/brevis/de51c173279e82cca6d5990010144167050358a3/src/main/clojure/us/brevis/globals.clj</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">:input (BrInput.) -30 (def #^:dynamic *brevis-params* (atom {})) (def #^:dynamic *brevis-state* (atom {}))</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns us.brevis.globals (:import [us.brevis.graphics BrCamera])) (def enable-display-text (atom true)) (def default-gui-state {:fullscreen false : camera ( BrCamera . 300 300 -50 90 -70 45 60 ( / 4 3 ) 0.1 4000 ) : camera ( BrCamera . 300 300 -50 162 -56 0 60 ( / 4 3 ) 0.1 4000 ) :camera (BrCamera. 100 50 -50 0 -90 0 60 640 480 0.1 4000) :gui true : rot - x 90 : rot - y -90 : rot - z -45 :last-report-time 0 :simulation-time 0}) (def #^:dynamic *gui-state* (atom default-gui-state)) (def #^:dynamic *gui-message-board* (atom (sorted-map))) (def #^:dynamic *app-thread* (atom nil)) (def #^:dynamic *screenshot-filename* (atom nil)) (def #^:dynamic *simulation-state* (atom {})) (def #^:dynamic *graphics* (atom {})) (def destroy-hooks (atom [])) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610281"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">81f66040c8b28d4d5326c5b2d5cc7fd1b91ba8baa56dde06adf56af7a86cb412</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">KavehYousefi/Esoteric-programming-languages</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">types.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; ;; This file serves in the declaration of the globally significant ;; types. ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; -- Declaration of types. -- ;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; (deftype list-of (&optional (element-type T)) "The ``list-of'' type defines a list of zero or more elements, each member of which conforms to the ELEMENT-TYPE, defaulting to the comprehensive ``T''." (let ((predicate (gensym))) (declare (type symbol predicate)) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (listp candidate) (every #'(lambda (element) (declare (type T element)) (typep element element-type)) (the list candidate))))) `(satisfies ,predicate))) ;;; ------------------------------------------------------- (deftype hash-table-of (&optional (key-type T) (value-type T)) "The ``hash-table-of'' type defines a hash table of zero or more entries, each key of which conforms to the KEY-TYPE and associates with a value of the VALUE-TYPE, both defaulting to the comprehensive ``T''." (let ((predicate (gensym))) (declare (type symbol predicate)) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (hash-table-p candidate) (loop for key of-type T being the hash-keys in (the hash-table candidate) using (hash-value value) always (and (typep key key-type) (typep value value-type)))))) `(satisfies ,predicate))) ;;; ------------------------------------------------------- (deftype attribute-map () "The ``attribute-map'' type defines a collection of node attributes in the form of a hash table mapping which associated keyword symbol attribute names to arbitrary values." '(hash-table-of keyword T)) ;;; ------------------------------------------------------- (deftype attribute-list () "The ``attribute-list'' type defines a list of node attributes in terms of a property list, or plist, with each attribute name (key or indicator) immediately succeeded by its associated attribute value (property value), the former of which must be a keyword symbol, whereas the latter may assume the generic type ``T''." (let ((predicate (gensym))) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (listp candidate) (evenp (length (the list candidate))) (loop for (indicator value) of-type (T T) on (the list candidate) by #'cddr always (and (typep indicator 'keyword) (typep value T)))))) `(satisfies ,predicate))) ;;; ------------------------------------------------------- (deftype node-list () "The ``node-list'' type defines a list of zero or more ``Node'' objects." '(list-of Node)) ;;; ------------------------------------------------------- (deftype set-operator () "The ``set-operator'' type enumerates the recognized binary set operations." '(member :union :intersection :left-difference :right-difference)) ;;; ------------------------------------------------------- (deftype set-relationship () "The ``set-relationship'' type enumerates the recognized relationship betwixt two sets, most commonly employed in the indagation of a loop's continuation predicate." '(member :subset :proper-subset :not-subset :superset :proper-superset :not-superset :equal)) ;;; ------------------------------------------------------- (deftype destination () "The ``destination'' type defines a sink for output operations, enumerating, among others, the functions ``format'' and ``write-char''." '(or null (eql T) stream string)) ;;; ------------------------------------------------------- (deftype natural-number () "The ``natural-number'' type defines a positive integer with no upper bourne, that is, a commorant of the range [1, +infinity], most commonly employed in the context of set members." '(integer 1 *)) ;;; ------------------------------------------------------- (deftype number-list () "The ``number-list'' type defines a list of zero or more natural numbers, that is, positive integers." '(list-of natural-number)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/KavehYousefi/Esoteric-programming-languages/86116d6045f426dbe74f881b92944ad76df59c68/SOAP/SOAP_001/types.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> This file serves in the declaration of the globally significant types. -- Declaration of types. -- ;; ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- ------------------------------------------------------- -------------------------------------------------------</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> (deftype list-of (&optional (element-type T)) "The ``list-of'' type defines a list of zero or more elements, each member of which conforms to the ELEMENT-TYPE, defaulting to the comprehensive ``T''." (let ((predicate (gensym))) (declare (type symbol predicate)) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (listp candidate) (every #'(lambda (element) (declare (type T element)) (typep element element-type)) (the list candidate))))) `(satisfies ,predicate))) (deftype hash-table-of (&optional (key-type T) (value-type T)) "The ``hash-table-of'' type defines a hash table of zero or more entries, each key of which conforms to the KEY-TYPE and associates with a value of the VALUE-TYPE, both defaulting to the comprehensive ``T''." (let ((predicate (gensym))) (declare (type symbol predicate)) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (hash-table-p candidate) (loop for key of-type T being the hash-keys in (the hash-table candidate) using (hash-value value) always (and (typep key key-type) (typep value value-type)))))) `(satisfies ,predicate))) (deftype attribute-map () "The ``attribute-map'' type defines a collection of node attributes in the form of a hash table mapping which associated keyword symbol attribute names to arbitrary values." '(hash-table-of keyword T)) (deftype attribute-list () "The ``attribute-list'' type defines a list of node attributes in terms of a property list, or plist, with each attribute name (key or indicator) immediately succeeded by its associated attribute value (property value), the former of which must be a keyword symbol, whereas the latter may assume the generic type ``T''." (let ((predicate (gensym))) (setf (symbol-function predicate) #'(lambda (candidate) (declare (type T candidate)) (and (listp candidate) (evenp (length (the list candidate))) (loop for (indicator value) of-type (T T) on (the list candidate) by #'cddr always (and (typep indicator 'keyword) (typep value T)))))) `(satisfies ,predicate))) (deftype node-list () "The ``node-list'' type defines a list of zero or more ``Node'' objects." '(list-of Node)) (deftype set-operator () "The ``set-operator'' type enumerates the recognized binary set operations." '(member :union :intersection :left-difference :right-difference)) (deftype set-relationship () "The ``set-relationship'' type enumerates the recognized relationship betwixt two sets, most commonly employed in the indagation of a loop's continuation predicate." '(member :subset :proper-subset :not-subset :superset :proper-superset :not-superset :equal)) (deftype destination () "The ``destination'' type defines a sink for output operations, enumerating, among others, the functions ``format'' and ``write-char''." '(or null (eql T) stream string)) (deftype natural-number () "The ``natural-number'' type defines a positive integer with no upper bourne, that is, a commorant of the range [1, +infinity], most commonly employed in the context of set members." '(integer 1 *)) (deftype number-list () "The ``number-list'' type defines a list of zero or more natural numbers, that is, positive integers." '(list-of natural-number)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610282"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">65ee4e700d62c98cfe275cfc749fa26906769e8a07e90b9c30fd72f70fd27002</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">dizengrong/erlang_game</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">sales_test.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module (sales_test). -include ("sales.hrl"). -include_lib ("amnesia/include/amnesia.hrl"). -compile ([export_all]). populate() -> amnesia:open({local, sales}, sales), {ok, Cust1} = amnesia:add_new (sales, #customer {customer_code = 102341, name = "John", address = "XXXXX"}), {ok, Cust2} = amnesia:add_new (sales, #customer {customer_code = 394021, name = "Corrado", address = "YYYYYY", email = "corrado@yyy"}), {ok, Cust3} = amnesia:add_new (sales, #customer {customer_code = 102391, name = "Dave", address = "Dave's home", email = "dave@zzz"}), {ok, P1} = amnesia:add_new (sales, #product { product_code = "001", description = "CPU Intel", price = 231.10 }), {ok, P2} = amnesia:add_new (sales, #product { product_code = "002", description = "Compact Flash 4G", price = 57.90 }), {ok, P3} = amnesia:add_new (sales, #product { product_code = "003", description = "Hard Disk 500G", price = 190.77 }), {ok, Order} = amnesia:add_new (sales, #orders { order_number = 30, order_date = {2008, 7, 17}, customer = Cust2 }), amnesia:add_new (sales, #order_line { orders = Order, product = P2, quantity = 3 }), amnesia:add_new (sales, #order_line { orders = Order, product = P1, quantity = 10 }), amnesia:add_new (sales, [#product { product_code = "004", description = "Data Server", price = 5200.00 }, #orders { order_number = 31, customer = Cust1}, #order_line { orders = '$2', product = P3, quantity = 2} , #order_line { orders = '$2', product = '$1', quantity = 11 } ]), ok. test_join () -> amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]). test_join (Pid) -> amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]). test_connections () -> {ok, [Order]} = amnesia:fetch (sales, orders, {"order_number = 31", []}), io:format ("Order #31 is: ~p~n", [Order]), {ok, OrderWithCust} = amnesia:load_referenced (sales, Order), io:format ("Order #31 with customer explicited is: ~p~n", [OrderWithCust]), {ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line), io:format ("The items of order #31 are: ~p~n", [OrderLines]), OrderLinesWithProduct = lists:map (fun (Line) -> {ok, LineWithProduct} = amnesia:load_referenced (sales, Line), LineWithProduct end, OrderLines), io:format ("The items of order #31, with products explicited, are:~n~p~n", [OrderLinesWithProduct]), ok. test_fetch () -> {ok, X1} = amnesia:fetch (sales, customer), io:format ("SIMPLE FETCH = ~p~n~n", [X1]), {ok, X2} = amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]), io:format ("FETCH WITH JOINS = ~p~n~n", [X2]), {ok, X3} = amnesia:fetch (sales, orders, {"order_number = $1", [30]}), io:format ("SIMPLE FETCH WITH SELECTION = ~p~n~n", [X3]), {ok, X4} = amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line], {"name = $1", ["Corrado"]}), io:format ("FETCH WITH JOINS AND SELECTION = ~p~n~n", [X4]), {ok, X5} = amnesia:fetch (sales, customer, {}, [{order_by, name}]), io:format ("SIMPLE FETCH WITH ORDERING = ~p~n~n", [X5]), {ok, X6} = amnesia:fetch (sales, [customer, ?JOIN, orders], {}, [{order_by, order_number}]), io:format ("FETCH WITH JOINS AND ORDERING = ~p~n~n", [X6]), ok. test_aggregate() -> {ok, X1} = amnesia:fetch (sales, customer, {}, [{aggregate, "count(*)", integer}]), io:format ("SIMPLE COUNT = ~p~n~n", [X1]), {ok, X2} = amnesia:fetch (sales, product, {}, [{aggregate, "max(price)", decimal}]), io:format ("SIMPLE MAX = ~p~n~n", [X2]), {ok, X3} = amnesia:fetch (sales, product, {}, [{aggregate, "count(*)", integer, product_code}]), io:format ("COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n", [X3]), {ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code}]), io:format ("COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n", [X4]), {ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code, {"__aggregated_data__ > $1", [5]}}]), io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n", [X5]), {ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code}, {order_by, '__aggregated_data__', desc}]), io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n", [X6]), {ok, X7} = amnesia:fetch (sales, [product, ?JOIN, order_line, ?JOIN, orders], {}, [{aggregate, "sum(quantity * price)", decimal, order_number}]), io:format ("~p~n~n", [X7]), X7. test_cursor () -> {ok, CursorID} = amnesia:create_cursor ( sales, amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line] )), io:format ("CURSOR ID = ~p~n~n", [CursorID]), show_cursor_data (CursorID, 1). show_cursor_data (CursorID, N) -> case amnesia:nth (sales, CursorID, N) of {end_of_data} -> amnesia:delete_cursor (sales, CursorID); {ok, X} -> io:format ("Item #~p = ~p~n~n", [N, X]), show_cursor_data (CursorID, N + 1) end. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/dizengrong/erlang_game/4598f97daa9ca5eecff292ac401dd8f903eea867/gerl/lib/amnesia/examples/sales_test.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-module (sales_test). -include ("sales.hrl"). -include_lib ("amnesia/include/amnesia.hrl"). -compile ([export_all]). populate() -> amnesia:open({local, sales}, sales), {ok, Cust1} = amnesia:add_new (sales, #customer {customer_code = 102341, name = "John", address = "XXXXX"}), {ok, Cust2} = amnesia:add_new (sales, #customer {customer_code = 394021, name = "Corrado", address = "YYYYYY", email = "corrado@yyy"}), {ok, Cust3} = amnesia:add_new (sales, #customer {customer_code = 102391, name = "Dave", address = "Dave's home", email = "dave@zzz"}), {ok, P1} = amnesia:add_new (sales, #product { product_code = "001", description = "CPU Intel", price = 231.10 }), {ok, P2} = amnesia:add_new (sales, #product { product_code = "002", description = "Compact Flash 4G", price = 57.90 }), {ok, P3} = amnesia:add_new (sales, #product { product_code = "003", description = "Hard Disk 500G", price = 190.77 }), {ok, Order} = amnesia:add_new (sales, #orders { order_number = 30, order_date = {2008, 7, 17}, customer = Cust2 }), amnesia:add_new (sales, #order_line { orders = Order, product = P2, quantity = 3 }), amnesia:add_new (sales, #order_line { orders = Order, product = P1, quantity = 10 }), amnesia:add_new (sales, [#product { product_code = "004", description = "Data Server", price = 5200.00 }, #orders { order_number = 31, customer = Cust1}, #order_line { orders = '$2', product = P3, quantity = 2} , #order_line { orders = '$2', product = '$1', quantity = 11 } ]), ok. test_join () -> amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]). test_join (Pid) -> amnesia:fetch (Pid, [customer, ?JOIN, orders, ?JOIN, order_line]). test_connections () -> {ok, [Order]} = amnesia:fetch (sales, orders, {"order_number = 31", []}), io:format ("Order #31 is: ~p~n", [Order]), {ok, OrderWithCust} = amnesia:load_referenced (sales, Order), io:format ("Order #31 with customer explicited is: ~p~n", [OrderWithCust]), {ok, OrderLines} = amnesia:load_referenced (sales, Order, order_line), io:format ("The items of order #31 are: ~p~n", [OrderLines]), OrderLinesWithProduct = lists:map (fun (Line) -> {ok, LineWithProduct} = amnesia:load_referenced (sales, Line), LineWithProduct end, OrderLines), io:format ("The items of order #31, with products explicited, are:~n~p~n", [OrderLinesWithProduct]), ok. test_fetch () -> {ok, X1} = amnesia:fetch (sales, customer), io:format ("SIMPLE FETCH = ~p~n~n", [X1]), {ok, X2} = amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line]), io:format ("FETCH WITH JOINS = ~p~n~n", [X2]), {ok, X3} = amnesia:fetch (sales, orders, {"order_number = $1", [30]}), io:format ("SIMPLE FETCH WITH SELECTION = ~p~n~n", [X3]), {ok, X4} = amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line], {"name = $1", ["Corrado"]}), io:format ("FETCH WITH JOINS AND SELECTION = ~p~n~n", [X4]), {ok, X5} = amnesia:fetch (sales, customer, {}, [{order_by, name}]), io:format ("SIMPLE FETCH WITH ORDERING = ~p~n~n", [X5]), {ok, X6} = amnesia:fetch (sales, [customer, ?JOIN, orders], {}, [{order_by, order_number}]), io:format ("FETCH WITH JOINS AND ORDERING = ~p~n~n", [X6]), ok. test_aggregate() -> {ok, X1} = amnesia:fetch (sales, customer, {}, [{aggregate, "count(*)", integer}]), io:format ("SIMPLE COUNT = ~p~n~n", [X1]), {ok, X2} = amnesia:fetch (sales, product, {}, [{aggregate, "max(price)", decimal}]), io:format ("SIMPLE MAX = ~p~n~n", [X2]), {ok, X3} = amnesia:fetch (sales, product, {}, [{aggregate, "count(*)", integer, product_code}]), io:format ("COUNT WITH AGGREGATION (GROUP BY) = ~p~n~n", [X3]), {ok, X4} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code}]), io:format ("COUNT WITH AGGREGATION (GROUP BY) AND JOIN = ~p~n~n", [X4]), {ok, X5} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code, {"__aggregated_data__ > $1", [5]}}]), io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND HAVING= ~p~n~n", [X5]), {ok, X6} = amnesia:fetch (sales, [product, ?JOIN, order_line], {}, [{aggregate, "sum(quantity)", integer, product_code}, {order_by, '__aggregated_data__', desc}]), io:format ("COUNT WITH AGGREGATION (GROUP BY), JOIN AND ORDERING= ~p~n~n", [X6]), {ok, X7} = amnesia:fetch (sales, [product, ?JOIN, order_line, ?JOIN, orders], {}, [{aggregate, "sum(quantity * price)", decimal, order_number}]), io:format ("~p~n~n", [X7]), X7. test_cursor () -> {ok, CursorID} = amnesia:create_cursor ( sales, amnesia:fetch (sales, [customer, ?JOIN, orders, ?JOIN, order_line] )), io:format ("CURSOR ID = ~p~n~n", [CursorID]), show_cursor_data (CursorID, 1). show_cursor_data (CursorID, N) -> case amnesia:nth (sales, CursorID, N) of {end_of_data} -> amnesia:delete_cursor (sales, CursorID); {ok, X} -> io:format ("Item #~p = ~p~n~n", [N, X]), show_cursor_data (CursorID, N + 1) end. </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610283"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">eafead00353d62cf3f87cb2ce301404270a1b20635a5474485f77056f0028da7</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">SuzanneSoy/anaphoric</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">acond-test.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket (require anaphoric/acond rackunit) (define lst '(x y z a b c)) (define seen 0) ;; With else branch (check-equal? (acond [(member 'a lst) (set! seen (add1 seen)) (check-equal? it '(a b c)) 'seen-01] [(member 'b lst) (fail "acond selected wrong branch")] [else (fail "acond selected wrong branch")]) 'seen-01) (check-equal? seen 1) ;; multiple body statements (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'b lst) (begin (check-equal? it '(b c)) 'seen-02)] [else (fail "acond selected wrong branch")]) 'seen-02) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")] [else 'seen-03]) 'seen-03) ;; Just else branch (check-equal? (acond [else 'seen-04]) 'seen-04) ;; Multiple body statements (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")] [else (set! seen (add1 seen)) 'seen-05]) 'seen-05) (check-equal? seen 2) ;; Without else branch (check-equal? (acond [(member 'a lst) (set! seen (add1 seen)) (check-equal? it '(a b c)) 'seen-06] [(member 'b lst) (fail "acond selected wrong branch")]) 'seen-06) (check-equal? seen 3) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'b lst) (begin (check-equal? it '(b c)) 'seen-07)]) 'seen-07) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")]) (void)) ;; No branch (check-equal? (acond) (void)) ;; Single branch (check-equal? (acond [(member 'a lst) (begin (check-equal? it '(a b c)) 'seen-09)]) 'seen-09) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")]) (void))</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/SuzanneSoy/anaphoric/c648ec2aad6d2b2ec72acc729143454d1e855cf6/test/acond-test.rkt</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">racket</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> With else branch multiple body statements Just else branch Multiple body statements Without else branch No branch Single branch</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">#lang racket (require anaphoric/acond rackunit) (define lst '(x y z a b c)) (define seen 0) (check-equal? (acond [(member 'a lst) (set! seen (add1 seen)) (check-equal? it '(a b c)) 'seen-01] [(member 'b lst) (fail "acond selected wrong branch")] [else (fail "acond selected wrong branch")]) 'seen-01) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'b lst) (begin (check-equal? it '(b c)) 'seen-02)] [else (fail "acond selected wrong branch")]) 'seen-02) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")] [else 'seen-03]) 'seen-03) (check-equal? (acond [else 'seen-04]) 'seen-04) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")] [else (set! seen (add1 seen)) 'seen-05]) 'seen-05) (check-equal? seen 2) (check-equal? (acond [(member 'a lst) (set! seen (add1 seen)) (check-equal? it '(a b c)) 'seen-06] [(member 'b lst) (fail "acond selected wrong branch")]) 'seen-06) (check-equal? seen 3) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'b lst) (begin (check-equal? it '(b c)) 'seen-07)]) 'seen-07) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")] [(member 'absent2 lst) (fail "acond selected wrong branch")]) (void)) (check-equal? (acond) (void)) (check-equal? (acond [(member 'a lst) (begin (check-equal? it '(a b c)) 'seen-09)]) 'seen-09) (check-equal? (acond [(member 'absent lst) (fail "acond selected wrong branch")]) (void))</span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610284"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">235f8bad04cb0fa799bd2d2e5e0ee94427f26199188a1fdbedbbe8c0b24e937e</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">dyzsr/ocaml-selectml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">t330-compact-2.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> TEST include tool - ocaml - lib flags = " -w -a " ocaml_script_as_argument = " true " * setup - ocaml - build - env * * include tool-ocaml-lib flags = "-w -a" ocaml_script_as_argument = "true" * setup-ocaml-build-env ** ocaml *) open Lib;; Gc.compact ();; let _ = Pervasives.do_at_exit();; * 0 CONSTINT 42 2 PUSHACC0 3 MAKEBLOCK1 0 5 POP 1 7 9 BRANCH 746 11 RESTART 12 GRAB 1 14 ACC0 15 BRANCHIFNOT 28 17 ACC1 18 PUSHACC1 19 GETFIELD1 20 PUSHOFFSETCLOSURE0 21 APPLY2 22 PUSHACC1 23 GETFIELD0 24 MAKEBLOCK2 0 26 RETURN 2 28 ACC1 29 RETURN 2 31 RESTART 32 GRAB 3 34 CONST0 35 PUSHACC4 36 LEINT 37 BRANCHIFNOT 42 39 40 RETURN 4 42 ACC3 43 PUSHACC3 44 PUSHACC3 45 PUSHACC3 46 C_CALL4 caml_input 48 PUSHCONST0 49 PUSHACC1 50 EQ 51 BRANCHIFNOT 58 53 End_of_file 55 MAKEBLOCK1 0 57 RAISE 58 ACC0 59 PUSHACC5 60 SUBINT 61 PUSHACC1 62 PUSHACC5 63 ADDINT 64 PUSHACC4 65 PUSHACC4 66 PUSHOFFSETCLOSURE0 67 APPTERM 4 , 9 70 ACC0 71 C_CALL1 caml_input_scan_line 73 PUSHCONST0 74 PUSHACC1 75 EQ 76 BRANCHIFNOT 83 78 End_of_file 80 MAKEBLOCK1 0 82 RAISE 83 84 PUSHACC1 85 86 BRANCHIFNOT 107 88 ACC0 89 OFFSETINT -1 91 C_CALL1 create_string 93 PUSHACC1 94 OFFSETINT -1 96 PUSHCONST0 97 PUSHACC2 98 PUSHACC5 99 C_CALL4 caml_input 101 ACC2 102 C_CALL1 caml_input_char 104 ACC0 105 RETURN 3 107 ACC0 108 NEGINT 109 C_CALL1 create_string 111 PUSHACC1 112 NEGINT 113 PUSHCONST0 114 PUSHACC2 115 PUSHACC5 116 C_CALL4 caml_input 118 119 PUSHTRAP 130 121 ACC6 122 PUSHOFFSETCLOSURE0 123 APPLY1 124 PUSHACC5 125 PUSHENVACC1 126 APPLY2 127 POPTRAP 128 RETURN 3 130 PUSHGETGLOBAL End_of_file 132 PUSHACC1 133 GETFIELD0 134 EQ 135 BRANCHIFNOT 140 137 ACC1 138 RETURN 4 140 ACC0 141 RAISE 142 ACC0 143 C_CALL1 caml_flush 145 RETURN 1 147 RESTART 148 GRAB 1 150 ACC1 151 PUSHACC1 152 C_CALL2 caml_output_char 154 RETURN 2 156 RESTART 157 GRAB 1 159 ACC1 160 PUSHACC1 161 C_CALL2 caml_output_char 163 RETURN 2 165 RESTART 166 GRAB 1 168 ACC1 169 PUSHACC1 170 C_CALL2 caml_output_int 172 RETURN 2 174 RESTART 175 GRAB 1 177 ACC1 178 PUSHACC1 179 C_CALL2 caml_seek_out 181 RETURN 2 183 ACC0 184 C_CALL1 caml_pos_out 186 RETURN 1 188 ACC0 189 C_CALL1 caml_channel_size 191 RETURN 1 193 RESTART 194 GRAB 1 196 ACC1 197 PUSHACC1 198 C_CALL2 caml_set_binary_mode 200 RETURN 2 202 ACC0 203 C_CALL1 caml_input_char 205 RETURN 1 207 ACC0 208 C_CALL1 caml_input_char 210 RETURN 1 212 ACC0 213 C_CALL1 caml_input_int 215 RETURN 1 217 ACC0 218 C_CALL1 input_value 220 RETURN 1 222 RESTART 223 GRAB 1 225 ACC1 226 PUSHACC1 227 C_CALL2 caml_seek_in 229 RETURN 2 231 ACC0 232 C_CALL1 caml_pos_in 234 RETURN 1 236 ACC0 237 C_CALL1 caml_channel_size 239 RETURN 1 241 ACC0 242 C_CALL1 caml_close_channel 244 RETURN 1 246 RESTART 247 GRAB 1 249 ACC1 250 PUSHACC1 251 C_CALL2 caml_set_binary_mode 253 RETURN 2 255 CONST0 256 PUSHENVACC1 257 APPLY1 258 ACC0 259 C_CALL1 sys_exit 261 RETURN 1 263 CONST0 264 PUSHENVACC1 265 GETFIELD0 266 APPTERM1 2 268 CONST0 269 PUSHENVACC1 270 APPLY1 271 CONST0 272 PUSHENVACC2 273 APPTERM1 2 275 ENVACC1 276 GETFIELD0 277 PUSHACC0 278 PUSHACC2 279 CLOSURE 2 , 268 282 PUSHENVACC1 283 SETFIELD0 284 RETURN 2 286 ENVACC1 287 C_CALL1 caml_flush 289 ENVACC2 290 C_CALL1 caml_flush 292 RETURN 1 294 CONST0 295 PUSHENVACC1 296 APPLY1 297 C_CALL1 float_of_string 299 RETURN 1 301 CONST0 302 PUSHENVACC1 303 APPLY1 304 C_CALL1 int_of_string 306 RETURN 1 308 ENVACC2 309 C_CALL1 caml_flush 311 ENVACC1 312 PUSHENVACC3 313 APPTERM1 2 315 CONSTINT 13 317 PUSHENVACC1 318 C_CALL2 caml_output_char 320 ENVACC1 321 C_CALL1 caml_flush 323 RETURN 1 325 ACC0 326 PUSHENVACC1 327 PUSHENVACC2 328 APPLY2 329 CONSTINT 13 331 PUSHENVACC1 332 C_CALL2 caml_output_char 334 ENVACC1 335 C_CALL1 caml_flush 337 RETURN 1 339 ACC0 340 PUSHENVACC1 341 APPLY1 342 PUSHENVACC2 343 PUSHENVACC3 344 APPTERM2 3 346 ACC0 347 PUSHENVACC1 348 APPLY1 349 PUSHENVACC2 350 PUSHENVACC3 351 APPTERM2 3 353 ACC0 354 PUSHENVACC1 355 PUSHENVACC2 356 APPTERM2 3 358 ACC0 359 PUSHENVACC1 360 C_CALL2 caml_output_char 362 RETURN 1 364 CONSTINT 13 366 PUSHENVACC1 367 C_CALL2 caml_output_char 369 ENVACC1 370 C_CALL1 caml_flush 372 RETURN 1 374 ACC0 375 PUSHENVACC1 376 PUSHENVACC2 377 APPLY2 378 CONSTINT 13 380 PUSHENVACC1 381 C_CALL2 caml_output_char 383 RETURN 1 385 ACC0 386 PUSHENVACC1 387 APPLY1 388 PUSHENVACC2 389 PUSHENVACC3 390 APPTERM2 3 392 ACC0 393 PUSHENVACC1 394 APPLY1 395 PUSHENVACC2 396 PUSHENVACC3 397 APPTERM2 3 399 ACC0 400 PUSHENVACC1 401 PUSHENVACC2 402 APPTERM2 3 404 ACC0 405 PUSHENVACC1 406 C_CALL2 caml_output_char 408 RETURN 1 410 RESTART 411 GRAB 3 413 CONST0 414 PUSHACC3 415 LTINT 416 BRANCHIF 427 418 ACC1 419 C_CALL1 ml_string_length 421 PUSHACC4 422 PUSHACC4 423 ADDINT 424 GTINT 425 BRANCHIFNOT 432 427 GETGLOBAL " really_input " 429 PUSHENVACC1 430 APPTERM1 5 432 ACC3 433 PUSHACC3 434 PUSHACC3 435 PUSHACC3 436 PUSHENVACC2 437 APPTERM 4 , 8 440 RESTART 441 GRAB 3 443 CONST0 444 PUSHACC3 445 LTINT 446 BRANCHIF 457 448 ACC1 449 C_CALL1 ml_string_length 451 PUSHACC4 452 PUSHACC4 453 ADDINT 454 455 " input " 459 PUSHENVACC1 460 APPTERM1 5 462 ACC3 463 PUSHACC3 464 PUSHACC3 465 PUSHACC3 466 C_CALL4 caml_input 468 RETURN 4 470 ACC0 471 PUSHCONST0 472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) ) 474 PUSHENVACC1 475 APPTERM3 4 477 ACC0 478 PUSHCONST0 479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) ) 481 PUSHENVACC1 482 APPTERM3 4 484 RESTART 485 GRAB 2 487 ACC1 488 PUSHACC1 489 PUSHACC4 490 C_CALL3 sys_open 492 C_CALL1 caml_open_descriptor 494 RETURN 3 496 ACC0 497 C_CALL1 caml_flush 499 ACC0 500 C_CALL1 caml_close_channel 502 RETURN 1 504 RESTART 505 GRAB 1 507 CONST0 508 PUSHACC2 509 PUSHACC2 510 C_CALL3 output_value 512 RETURN 2 514 RESTART 515 GRAB 3 517 CONST0 518 PUSHACC3 519 LTINT 520 BRANCHIF 531 522 ACC1 523 C_CALL1 ml_string_length 525 PUSHACC4 526 PUSHACC4 527 ADDINT 528 529 BRANCHIFNOT 536 531 GETGLOBAL " output " 533 PUSHENVACC1 534 APPTERM1 5 536 ACC3 537 PUSHACC3 538 PUSHACC3 539 PUSHACC3 540 C_CALL4 caml_output 542 RETURN 4 544 RESTART 545 GRAB 1 547 ACC1 548 C_CALL1 ml_string_length 550 PUSHCONST0 551 PUSHACC3 552 PUSHACC3 553 C_CALL4 caml_output 555 RETURN 2 557 ACC0 558 PUSHCONSTINT 438 560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) ) 562 PUSHENVACC1 563 APPTERM3 4 565 ACC0 566 PUSHCONSTINT 438 568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) ) 570 PUSHENVACC1 571 APPTERM3 4 573 RESTART 574 GRAB 2 576 ACC1 577 PUSHACC1 578 PUSHACC4 579 C_CALL3 sys_open 581 C_CALL1 caml_open_descriptor 583 RETURN 3 585 ACC0 586 PUSHGETGLOBAL " % .12 g " 588 C_CALL2 format_float 590 RETURN 1 592 ACC0 593 PUSHGETGLOBAL " % d " 595 C_CALL2 format_int 597 RETURN 1 599 " false " 601 PUSHACC1 602 C_CALL2 string_equal 604 BRANCHIFNOT 609 606 CONST0 607 RETURN 1 609 " true " 611 PUSHACC1 612 C_CALL2 string_equal 614 BRANCHIFNOT 619 616 CONST1 617 RETURN 1 619 " bool_of_string " 621 PUSHENVACC1 622 APPTERM1 2 624 ACC0 625 BRANCHIFNOT 631 627 " true " 629 RETURN 1 631 " false " 633 RETURN 1 635 636 PUSHACC1 637 LTINT 638 BRANCHIF 646 640 642 PUSHACC1 643 GTINT 644 BRANCHIFNOT 651 646 " char_of_int " 648 PUSHENVACC1 649 APPTERM1 2 651 ACC0 652 RETURN 1 654 RESTART 655 GRAB 1 657 ACC0 658 C_CALL1 ml_string_length 660 PUSHACC2 661 C_CALL1 ml_string_length 663 PUSHACC0 664 PUSHACC2 665 ADDINT 666 C_CALL1 create_string 668 PUSHACC2 669 PUSHCONST0 670 PUSHACC2 671 PUSHCONST0 672 PUSHACC7 673 C_CALL5 blit_string 675 ACC1 676 PUSHACC3 677 PUSHACC2 678 PUSHCONST0 679 PUSHACC 8 681 C_CALL5 blit_string 683 ACC0 684 RETURN 5 686 -1 688 PUSHACC1 689 XORINT 690 RETURN 1 692 693 PUSHACC1 694 GEINT 695 BRANCHIFNOT 700 697 ACC0 698 RETURN 1 700 ACC0 701 NEGINT 702 RETURN 1 704 RESTART 705 GRAB 1 707 ACC1 708 PUSHACC1 709 C_CALL2 greaterequal 711 BRANCHIFNOT 716 713 ACC0 714 RETURN 2 716 ACC1 717 RETURN 2 719 RESTART 720 GRAB 1 722 ACC1 723 PUSHACC1 724 C_CALL2 lessequal 726 BRANCHIFNOT 731 728 ACC0 729 RETURN 2 731 ACC1 732 RETURN 2 734 ACC0 735 737 MAKEBLOCK2 0 739 RAISE 740 ACC0 741 PUSHGETGLOBAL Failure 743 MAKEBLOCK2 0 745 RAISE 746 CLOSURE 0 , 740 749 PUSH 750 CLOSURE 0 , 734 753 PUSHGETGLOBAL " Pervasives . Exit " 755 MAKEBLOCK1 0 757 PUSHGETGLOBAL " Pervasives . Assert_failure " 759 MAKEBLOCK1 0 761 PUSH 762 CLOSURE 0 , 720 765 PUSH 766 CLOSURE 0 , 705 769 PUSH 770 CLOSURE 0 , 692 773 PUSH 774 CLOSURE 0 , 686 777 PUSHCONST0 778 PUSHCONSTINT 31 780 PUSHCONST1 781 LSLINT 782 EQ 783 BRANCHIFNOT 789 785 CONSTINT 30 787 BRANCH 791 789 CONSTINT 62 791 PUSHCONST1 792 LSLINT 793 PUSHACC0 794 OFFSETINT -1 796 PUSH 797 CLOSURE 0 , 655 800 PUSHACC 9 802 CLOSURE 1 , 635 805 PUSH 806 CLOSURE 0 , 624 809 PUSHACC 11 811 CLOSURE 1 , 599 814 PUSH 815 CLOSURE 0 , 592 818 PUSH 819 CLOSURE 0 , 585 822 PUSH 823 CLOSUREREC 0 , 12 827 828 C_CALL1 caml_open_descriptor 830 PUSHCONST1 831 C_CALL1 caml_open_descriptor 833 PUSHCONST2 834 C_CALL1 caml_open_descriptor 836 PUSH 837 CLOSURE 0 , 574 840 PUSHACC0 841 CLOSURE 1 , 565 844 PUSHACC1 845 CLOSURE 1 , 557 848 PUSH 849 CLOSURE 0 , 545 852 PUSHACC 22 854 CLOSURE 1 , 515 857 PUSH 858 CLOSURE 0 , 505 861 PUSH 862 CLOSURE 0 , 496 865 PUSH 866 CLOSURE 0 , 485 869 PUSHACC0 870 CLOSURE 1 , 477 873 PUSHACC1 874 CLOSURE 1 , 470 877 PUSHACC 28 879 CLOSURE 1 , 441 882 PUSH 883 CLOSUREREC 0 , 32 887 ACC0 888 PUSHACC 31 890 CLOSURE 2 , 411 893 PUSHACC 22 895 CLOSUREREC 1 , 70 899 ACC 15 901 CLOSURE 1 , 404 904 PUSHACC 11 906 PUSHACC 17 908 CLOSURE 2 , 399 911 PUSHACC 12 913 PUSHACC 18 915 PUSHACC 23 917 CLOSURE 3 , 392 920 PUSHACC 13 922 PUSHACC 19 924 PUSHACC 23 926 CLOSURE 3 , 385 929 PUSHACC 14 931 PUSHACC 20 933 CLOSURE 2 , 374 936 PUSHACC 20 938 CLOSURE 1 , 364 941 PUSHACC 20 943 CLOSURE 1 , 358 946 PUSHACC 17 948 PUSHACC 22 950 CLOSURE 2 , 353 953 PUSHACC 18 955 PUSHACC 23 957 PUSHACC 29 959 CLOSURE 3 , 346 962 PUSHACC 19 964 PUSHACC 24 966 PUSHACC 29 968 CLOSURE 3 , 339 971 PUSHACC 20 973 PUSHACC 25 975 CLOSURE 2 , 325 978 PUSHACC 25 980 CLOSURE 1 , 315 983 PUSHACC 12 985 PUSHACC 28 987 PUSHACC 30 989 CLOSURE 3 , 308 992 PUSHACC0 993 CLOSURE 1 , 301 996 PUSHACC1 997 CLOSURE 1 , 294 1000 PUSHACC 29 1002 PUSHACC 31 1004 CLOSURE 2 , 286 1007 MAKEBLOCK1 0 1009 PUSHACC0 1010 CLOSURE 1 , 275 1013 PUSHACC1 1014 CLOSURE 1 , 263 1017 PUSHACC0 1018 CLOSURE 1 , 255 1021 PUSHACC1 1022 PUSHACC 22 1024 PUSHACC4 1025 PUSHACC3 1026 PUSH 1027 CLOSURE 0 , 247 1030 PUSH 1031 CLOSURE 0 , 241 1034 PUSH 1035 CLOSURE 0 , 236 1038 PUSH 1039 CLOSURE 0 , 231 1042 PUSH 1043 CLOSURE 0 , 223 1046 PUSH 1047 CLOSURE 0 , 217 1050 PUSH 1051 CLOSURE 0 , 212 1054 PUSH 1055 CLOSURE 0 , 207 1058 PUSHACC 32 1060 PUSHACC 35 1062 PUSHACC 33 1064 PUSH 1065 CLOSURE 0 , 202 1068 PUSHACC 41 1070 PUSHACC 40 1072 PUSHACC 42 1074 PUSH 1075 CLOSURE 0 , 194 1078 PUSHACC 46 1080 PUSH 1081 CLOSURE 0 , 188 1084 PUSH 1085 CLOSURE 0 , 183 1088 PUSH 1089 CLOSURE 0 , 175 1092 PUSHACC 51 1094 PUSH 1095 CLOSURE 0 , 166 1098 PUSH 1099 CLOSURE 0 , 157 1102 PUSHACC 55 1104 PUSHACC 57 1106 PUSH 1107 CLOSURE 0 , 148 1110 PUSH 1111 CLOSURE 0 , 142 1114 PUSHACC 63 1116 PUSHACC 62 1118 PUSHACC 64 1120 PUSHACC 38 1122 PUSHACC 40 1124 PUSHACC 42 1126 PUSHACC 44 1128 PUSHACC 46 1130 PUSHACC 48 1132 PUSHACC 50 1134 PUSHACC 52 1136 PUSHACC 54 1138 PUSHACC 56 1140 PUSHACC 58 1142 PUSHACC 60 1144 PUSHACC 62 1146 PUSHACC 64 1148 PUSHACC 66 1150 PUSHACC 82 1152 PUSHACC 84 1154 PUSHACC 86 1156 PUSHACC 88 1158 PUSHACC 90 1160 PUSHACC 92 1162 PUSHACC 94 1164 PUSHACC 96 1166 PUSHACC 98 1168 PUSHACC 100 1170 PUSHACC 104 1172 PUSHACC 104 1174 PUSHACC 104 1176 PUSHACC 108 1178 PUSHACC 110 1180 PUSHACC 112 1182 PUSHACC 117 1184 PUSHACC 117 1186 PUSHACC 117 1188 PUSHACC 117 1190 MAKEBLOCK 69 , 0 1193 POP 53 1195 SETGLOBAL Pervasives 1197 CONST0 1198 C_CALL1 gc_compaction 1200 CONST0 1201 PUSHGETGLOBALFIELD Pervasives , 68 1204 APPLY1 1205 ATOM0 1206 SETGLOBAL T330 - compact-2 1208 STOP * 0 CONSTINT 42 2 PUSHACC0 3 MAKEBLOCK1 0 5 POP 1 7 SETGLOBAL Lib 9 BRANCH 746 11 RESTART 12 GRAB 1 14 ACC0 15 BRANCHIFNOT 28 17 ACC1 18 PUSHACC1 19 GETFIELD1 20 PUSHOFFSETCLOSURE0 21 APPLY2 22 PUSHACC1 23 GETFIELD0 24 MAKEBLOCK2 0 26 RETURN 2 28 ACC1 29 RETURN 2 31 RESTART 32 GRAB 3 34 CONST0 35 PUSHACC4 36 LEINT 37 BRANCHIFNOT 42 39 CONST0 40 RETURN 4 42 ACC3 43 PUSHACC3 44 PUSHACC3 45 PUSHACC3 46 C_CALL4 caml_input 48 PUSHCONST0 49 PUSHACC1 50 EQ 51 BRANCHIFNOT 58 53 GETGLOBAL End_of_file 55 MAKEBLOCK1 0 57 RAISE 58 ACC0 59 PUSHACC5 60 SUBINT 61 PUSHACC1 62 PUSHACC5 63 ADDINT 64 PUSHACC4 65 PUSHACC4 66 PUSHOFFSETCLOSURE0 67 APPTERM 4, 9 70 ACC0 71 C_CALL1 caml_input_scan_line 73 PUSHCONST0 74 PUSHACC1 75 EQ 76 BRANCHIFNOT 83 78 GETGLOBAL End_of_file 80 MAKEBLOCK1 0 82 RAISE 83 CONST0 84 PUSHACC1 85 GTINT 86 BRANCHIFNOT 107 88 ACC0 89 OFFSETINT -1 91 C_CALL1 create_string 93 PUSHACC1 94 OFFSETINT -1 96 PUSHCONST0 97 PUSHACC2 98 PUSHACC5 99 C_CALL4 caml_input 101 ACC2 102 C_CALL1 caml_input_char 104 ACC0 105 RETURN 3 107 ACC0 108 NEGINT 109 C_CALL1 create_string 111 PUSHACC1 112 NEGINT 113 PUSHCONST0 114 PUSHACC2 115 PUSHACC5 116 C_CALL4 caml_input 118 CONST0 119 PUSHTRAP 130 121 ACC6 122 PUSHOFFSETCLOSURE0 123 APPLY1 124 PUSHACC5 125 PUSHENVACC1 126 APPLY2 127 POPTRAP 128 RETURN 3 130 PUSHGETGLOBAL End_of_file 132 PUSHACC1 133 GETFIELD0 134 EQ 135 BRANCHIFNOT 140 137 ACC1 138 RETURN 4 140 ACC0 141 RAISE 142 ACC0 143 C_CALL1 caml_flush 145 RETURN 1 147 RESTART 148 GRAB 1 150 ACC1 151 PUSHACC1 152 C_CALL2 caml_output_char 154 RETURN 2 156 RESTART 157 GRAB 1 159 ACC1 160 PUSHACC1 161 C_CALL2 caml_output_char 163 RETURN 2 165 RESTART 166 GRAB 1 168 ACC1 169 PUSHACC1 170 C_CALL2 caml_output_int 172 RETURN 2 174 RESTART 175 GRAB 1 177 ACC1 178 PUSHACC1 179 C_CALL2 caml_seek_out 181 RETURN 2 183 ACC0 184 C_CALL1 caml_pos_out 186 RETURN 1 188 ACC0 189 C_CALL1 caml_channel_size 191 RETURN 1 193 RESTART 194 GRAB 1 196 ACC1 197 PUSHACC1 198 C_CALL2 caml_set_binary_mode 200 RETURN 2 202 ACC0 203 C_CALL1 caml_input_char 205 RETURN 1 207 ACC0 208 C_CALL1 caml_input_char 210 RETURN 1 212 ACC0 213 C_CALL1 caml_input_int 215 RETURN 1 217 ACC0 218 C_CALL1 input_value 220 RETURN 1 222 RESTART 223 GRAB 1 225 ACC1 226 PUSHACC1 227 C_CALL2 caml_seek_in 229 RETURN 2 231 ACC0 232 C_CALL1 caml_pos_in 234 RETURN 1 236 ACC0 237 C_CALL1 caml_channel_size 239 RETURN 1 241 ACC0 242 C_CALL1 caml_close_channel 244 RETURN 1 246 RESTART 247 GRAB 1 249 ACC1 250 PUSHACC1 251 C_CALL2 caml_set_binary_mode 253 RETURN 2 255 CONST0 256 PUSHENVACC1 257 APPLY1 258 ACC0 259 C_CALL1 sys_exit 261 RETURN 1 263 CONST0 264 PUSHENVACC1 265 GETFIELD0 266 APPTERM1 2 268 CONST0 269 PUSHENVACC1 270 APPLY1 271 CONST0 272 PUSHENVACC2 273 APPTERM1 2 275 ENVACC1 276 GETFIELD0 277 PUSHACC0 278 PUSHACC2 279 CLOSURE 2, 268 282 PUSHENVACC1 283 SETFIELD0 284 RETURN 2 286 ENVACC1 287 C_CALL1 caml_flush 289 ENVACC2 290 C_CALL1 caml_flush 292 RETURN 1 294 CONST0 295 PUSHENVACC1 296 APPLY1 297 C_CALL1 float_of_string 299 RETURN 1 301 CONST0 302 PUSHENVACC1 303 APPLY1 304 C_CALL1 int_of_string 306 RETURN 1 308 ENVACC2 309 C_CALL1 caml_flush 311 ENVACC1 312 PUSHENVACC3 313 APPTERM1 2 315 CONSTINT 13 317 PUSHENVACC1 318 C_CALL2 caml_output_char 320 ENVACC1 321 C_CALL1 caml_flush 323 RETURN 1 325 ACC0 326 PUSHENVACC1 327 PUSHENVACC2 328 APPLY2 329 CONSTINT 13 331 PUSHENVACC1 332 C_CALL2 caml_output_char 334 ENVACC1 335 C_CALL1 caml_flush 337 RETURN 1 339 ACC0 340 PUSHENVACC1 341 APPLY1 342 PUSHENVACC2 343 PUSHENVACC3 344 APPTERM2 3 346 ACC0 347 PUSHENVACC1 348 APPLY1 349 PUSHENVACC2 350 PUSHENVACC3 351 APPTERM2 3 353 ACC0 354 PUSHENVACC1 355 PUSHENVACC2 356 APPTERM2 3 358 ACC0 359 PUSHENVACC1 360 C_CALL2 caml_output_char 362 RETURN 1 364 CONSTINT 13 366 PUSHENVACC1 367 C_CALL2 caml_output_char 369 ENVACC1 370 C_CALL1 caml_flush 372 RETURN 1 374 ACC0 375 PUSHENVACC1 376 PUSHENVACC2 377 APPLY2 378 CONSTINT 13 380 PUSHENVACC1 381 C_CALL2 caml_output_char 383 RETURN 1 385 ACC0 386 PUSHENVACC1 387 APPLY1 388 PUSHENVACC2 389 PUSHENVACC3 390 APPTERM2 3 392 ACC0 393 PUSHENVACC1 394 APPLY1 395 PUSHENVACC2 396 PUSHENVACC3 397 APPTERM2 3 399 ACC0 400 PUSHENVACC1 401 PUSHENVACC2 402 APPTERM2 3 404 ACC0 405 PUSHENVACC1 406 C_CALL2 caml_output_char 408 RETURN 1 410 RESTART 411 GRAB 3 413 CONST0 414 PUSHACC3 415 LTINT 416 BRANCHIF 427 418 ACC1 419 C_CALL1 ml_string_length 421 PUSHACC4 422 PUSHACC4 423 ADDINT 424 GTINT 425 BRANCHIFNOT 432 427 GETGLOBAL "really_input" 429 PUSHENVACC1 430 APPTERM1 5 432 ACC3 433 PUSHACC3 434 PUSHACC3 435 PUSHACC3 436 PUSHENVACC2 437 APPTERM 4, 8 440 RESTART 441 GRAB 3 443 CONST0 444 PUSHACC3 445 LTINT 446 BRANCHIF 457 448 ACC1 449 C_CALL1 ml_string_length 451 PUSHACC4 452 PUSHACC4 453 ADDINT 454 GTINT 455 BRANCHIFNOT 462 457 GETGLOBAL "input" 459 PUSHENVACC1 460 APPTERM1 5 462 ACC3 463 PUSHACC3 464 PUSHACC3 465 PUSHACC3 466 C_CALL4 caml_input 468 RETURN 4 470 ACC0 471 PUSHCONST0 472 PUSHGETGLOBAL <0>(0, <0>(6, 0)) 474 PUSHENVACC1 475 APPTERM3 4 477 ACC0 478 PUSHCONST0 479 PUSHGETGLOBAL <0>(0, <0>(7, 0)) 481 PUSHENVACC1 482 APPTERM3 4 484 RESTART 485 GRAB 2 487 ACC1 488 PUSHACC1 489 PUSHACC4 490 C_CALL3 sys_open 492 C_CALL1 caml_open_descriptor 494 RETURN 3 496 ACC0 497 C_CALL1 caml_flush 499 ACC0 500 C_CALL1 caml_close_channel 502 RETURN 1 504 RESTART 505 GRAB 1 507 CONST0 508 PUSHACC2 509 PUSHACC2 510 C_CALL3 output_value 512 RETURN 2 514 RESTART 515 GRAB 3 517 CONST0 518 PUSHACC3 519 LTINT 520 BRANCHIF 531 522 ACC1 523 C_CALL1 ml_string_length 525 PUSHACC4 526 PUSHACC4 527 ADDINT 528 GTINT 529 BRANCHIFNOT 536 531 GETGLOBAL "output" 533 PUSHENVACC1 534 APPTERM1 5 536 ACC3 537 PUSHACC3 538 PUSHACC3 539 PUSHACC3 540 C_CALL4 caml_output 542 RETURN 4 544 RESTART 545 GRAB 1 547 ACC1 548 C_CALL1 ml_string_length 550 PUSHCONST0 551 PUSHACC3 552 PUSHACC3 553 C_CALL4 caml_output 555 RETURN 2 557 ACC0 558 PUSHCONSTINT 438 560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0)))) 562 PUSHENVACC1 563 APPTERM3 4 565 ACC0 566 PUSHCONSTINT 438 568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0)))) 570 PUSHENVACC1 571 APPTERM3 4 573 RESTART 574 GRAB 2 576 ACC1 577 PUSHACC1 578 PUSHACC4 579 C_CALL3 sys_open 581 C_CALL1 caml_open_descriptor 583 RETURN 3 585 ACC0 586 PUSHGETGLOBAL "%.12g" 588 C_CALL2 format_float 590 RETURN 1 592 ACC0 593 PUSHGETGLOBAL "%d" 595 C_CALL2 format_int 597 RETURN 1 599 GETGLOBAL "false" 601 PUSHACC1 602 C_CALL2 string_equal 604 BRANCHIFNOT 609 606 CONST0 607 RETURN 1 609 GETGLOBAL "true" 611 PUSHACC1 612 C_CALL2 string_equal 614 BRANCHIFNOT 619 616 CONST1 617 RETURN 1 619 GETGLOBAL "bool_of_string" 621 PUSHENVACC1 622 APPTERM1 2 624 ACC0 625 BRANCHIFNOT 631 627 GETGLOBAL "true" 629 RETURN 1 631 GETGLOBAL "false" 633 RETURN 1 635 CONST0 636 PUSHACC1 637 LTINT 638 BRANCHIF 646 640 CONSTINT 255 642 PUSHACC1 643 GTINT 644 BRANCHIFNOT 651 646 GETGLOBAL "char_of_int" 648 PUSHENVACC1 649 APPTERM1 2 651 ACC0 652 RETURN 1 654 RESTART 655 GRAB 1 657 ACC0 658 C_CALL1 ml_string_length 660 PUSHACC2 661 C_CALL1 ml_string_length 663 PUSHACC0 664 PUSHACC2 665 ADDINT 666 C_CALL1 create_string 668 PUSHACC2 669 PUSHCONST0 670 PUSHACC2 671 PUSHCONST0 672 PUSHACC7 673 C_CALL5 blit_string 675 ACC1 676 PUSHACC3 677 PUSHACC2 678 PUSHCONST0 679 PUSHACC 8 681 C_CALL5 blit_string 683 ACC0 684 RETURN 5 686 CONSTINT -1 688 PUSHACC1 689 XORINT 690 RETURN 1 692 CONST0 693 PUSHACC1 694 GEINT 695 BRANCHIFNOT 700 697 ACC0 698 RETURN 1 700 ACC0 701 NEGINT 702 RETURN 1 704 RESTART 705 GRAB 1 707 ACC1 708 PUSHACC1 709 C_CALL2 greaterequal 711 BRANCHIFNOT 716 713 ACC0 714 RETURN 2 716 ACC1 717 RETURN 2 719 RESTART 720 GRAB 1 722 ACC1 723 PUSHACC1 724 C_CALL2 lessequal 726 BRANCHIFNOT 731 728 ACC0 729 RETURN 2 731 ACC1 732 RETURN 2 734 ACC0 735 PUSHGETGLOBAL Invalid_argument 737 MAKEBLOCK2 0 739 RAISE 740 ACC0 741 PUSHGETGLOBAL Failure 743 MAKEBLOCK2 0 745 RAISE 746 CLOSURE 0, 740 749 PUSH 750 CLOSURE 0, 734 753 PUSHGETGLOBAL "Pervasives.Exit" 755 MAKEBLOCK1 0 757 PUSHGETGLOBAL "Pervasives.Assert_failure" 759 MAKEBLOCK1 0 761 PUSH 762 CLOSURE 0, 720 765 PUSH 766 CLOSURE 0, 705 769 PUSH 770 CLOSURE 0, 692 773 PUSH 774 CLOSURE 0, 686 777 PUSHCONST0 778 PUSHCONSTINT 31 780 PUSHCONST1 781 LSLINT 782 EQ 783 BRANCHIFNOT 789 785 CONSTINT 30 787 BRANCH 791 789 CONSTINT 62 791 PUSHCONST1 792 LSLINT 793 PUSHACC0 794 OFFSETINT -1 796 PUSH 797 CLOSURE 0, 655 800 PUSHACC 9 802 CLOSURE 1, 635 805 PUSH 806 CLOSURE 0, 624 809 PUSHACC 11 811 CLOSURE 1, 599 814 PUSH 815 CLOSURE 0, 592 818 PUSH 819 CLOSURE 0, 585 822 PUSH 823 CLOSUREREC 0, 12 827 CONST0 828 C_CALL1 caml_open_descriptor 830 PUSHCONST1 831 C_CALL1 caml_open_descriptor 833 PUSHCONST2 834 C_CALL1 caml_open_descriptor 836 PUSH 837 CLOSURE 0, 574 840 PUSHACC0 841 CLOSURE 1, 565 844 PUSHACC1 845 CLOSURE 1, 557 848 PUSH 849 CLOSURE 0, 545 852 PUSHACC 22 854 CLOSURE 1, 515 857 PUSH 858 CLOSURE 0, 505 861 PUSH 862 CLOSURE 0, 496 865 PUSH 866 CLOSURE 0, 485 869 PUSHACC0 870 CLOSURE 1, 477 873 PUSHACC1 874 CLOSURE 1, 470 877 PUSHACC 28 879 CLOSURE 1, 441 882 PUSH 883 CLOSUREREC 0, 32 887 ACC0 888 PUSHACC 31 890 CLOSURE 2, 411 893 PUSHACC 22 895 CLOSUREREC 1, 70 899 ACC 15 901 CLOSURE 1, 404 904 PUSHACC 11 906 PUSHACC 17 908 CLOSURE 2, 399 911 PUSHACC 12 913 PUSHACC 18 915 PUSHACC 23 917 CLOSURE 3, 392 920 PUSHACC 13 922 PUSHACC 19 924 PUSHACC 23 926 CLOSURE 3, 385 929 PUSHACC 14 931 PUSHACC 20 933 CLOSURE 2, 374 936 PUSHACC 20 938 CLOSURE 1, 364 941 PUSHACC 20 943 CLOSURE 1, 358 946 PUSHACC 17 948 PUSHACC 22 950 CLOSURE 2, 353 953 PUSHACC 18 955 PUSHACC 23 957 PUSHACC 29 959 CLOSURE 3, 346 962 PUSHACC 19 964 PUSHACC 24 966 PUSHACC 29 968 CLOSURE 3, 339 971 PUSHACC 20 973 PUSHACC 25 975 CLOSURE 2, 325 978 PUSHACC 25 980 CLOSURE 1, 315 983 PUSHACC 12 985 PUSHACC 28 987 PUSHACC 30 989 CLOSURE 3, 308 992 PUSHACC0 993 CLOSURE 1, 301 996 PUSHACC1 997 CLOSURE 1, 294 1000 PUSHACC 29 1002 PUSHACC 31 1004 CLOSURE 2, 286 1007 MAKEBLOCK1 0 1009 PUSHACC0 1010 CLOSURE 1, 275 1013 PUSHACC1 1014 CLOSURE 1, 263 1017 PUSHACC0 1018 CLOSURE 1, 255 1021 PUSHACC1 1022 PUSHACC 22 1024 PUSHACC4 1025 PUSHACC3 1026 PUSH 1027 CLOSURE 0, 247 1030 PUSH 1031 CLOSURE 0, 241 1034 PUSH 1035 CLOSURE 0, 236 1038 PUSH 1039 CLOSURE 0, 231 1042 PUSH 1043 CLOSURE 0, 223 1046 PUSH 1047 CLOSURE 0, 217 1050 PUSH 1051 CLOSURE 0, 212 1054 PUSH 1055 CLOSURE 0, 207 1058 PUSHACC 32 1060 PUSHACC 35 1062 PUSHACC 33 1064 PUSH 1065 CLOSURE 0, 202 1068 PUSHACC 41 1070 PUSHACC 40 1072 PUSHACC 42 1074 PUSH 1075 CLOSURE 0, 194 1078 PUSHACC 46 1080 PUSH 1081 CLOSURE 0, 188 1084 PUSH 1085 CLOSURE 0, 183 1088 PUSH 1089 CLOSURE 0, 175 1092 PUSHACC 51 1094 PUSH 1095 CLOSURE 0, 166 1098 PUSH 1099 CLOSURE 0, 157 1102 PUSHACC 55 1104 PUSHACC 57 1106 PUSH 1107 CLOSURE 0, 148 1110 PUSH 1111 CLOSURE 0, 142 1114 PUSHACC 63 1116 PUSHACC 62 1118 PUSHACC 64 1120 PUSHACC 38 1122 PUSHACC 40 1124 PUSHACC 42 1126 PUSHACC 44 1128 PUSHACC 46 1130 PUSHACC 48 1132 PUSHACC 50 1134 PUSHACC 52 1136 PUSHACC 54 1138 PUSHACC 56 1140 PUSHACC 58 1142 PUSHACC 60 1144 PUSHACC 62 1146 PUSHACC 64 1148 PUSHACC 66 1150 PUSHACC 82 1152 PUSHACC 84 1154 PUSHACC 86 1156 PUSHACC 88 1158 PUSHACC 90 1160 PUSHACC 92 1162 PUSHACC 94 1164 PUSHACC 96 1166 PUSHACC 98 1168 PUSHACC 100 1170 PUSHACC 104 1172 PUSHACC 104 1174 PUSHACC 104 1176 PUSHACC 108 1178 PUSHACC 110 1180 PUSHACC 112 1182 PUSHACC 117 1184 PUSHACC 117 1186 PUSHACC 117 1188 PUSHACC 117 1190 MAKEBLOCK 69, 0 1193 POP 53 1195 SETGLOBAL Pervasives 1197 CONST0 1198 C_CALL1 gc_compaction 1200 CONST0 1201 PUSHGETGLOBALFIELD Pervasives, 68 1204 APPLY1 1205 ATOM0 1206 SETGLOBAL T330-compact-2 1208 STOP **) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/dyzsr/ocaml-selectml/875544110abb3350e9fb5ec9bbadffa332c270d2/testsuite/tests/tool-ocaml/t330-compact-2.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> TEST include tool - ocaml - lib flags = " -w -a " ocaml_script_as_argument = " true " * setup - ocaml - build - env * * include tool-ocaml-lib flags = "-w -a" ocaml_script_as_argument = "true" * setup-ocaml-build-env ** ocaml *) open Lib;; Gc.compact ();; let _ = Pervasives.do_at_exit();; * 0 CONSTINT 42 2 PUSHACC0 3 MAKEBLOCK1 0 5 POP 1 7 9 BRANCH 746 11 RESTART 12 GRAB 1 14 ACC0 15 BRANCHIFNOT 28 17 ACC1 18 PUSHACC1 19 GETFIELD1 20 PUSHOFFSETCLOSURE0 21 APPLY2 22 PUSHACC1 23 GETFIELD0 24 MAKEBLOCK2 0 26 RETURN 2 28 ACC1 29 RETURN 2 31 RESTART 32 GRAB 3 34 CONST0 35 PUSHACC4 36 LEINT 37 BRANCHIFNOT 42 39 40 RETURN 4 42 ACC3 43 PUSHACC3 44 PUSHACC3 45 PUSHACC3 46 C_CALL4 caml_input 48 PUSHCONST0 49 PUSHACC1 50 EQ 51 BRANCHIFNOT 58 53 End_of_file 55 MAKEBLOCK1 0 57 RAISE 58 ACC0 59 PUSHACC5 60 SUBINT 61 PUSHACC1 62 PUSHACC5 63 ADDINT 64 PUSHACC4 65 PUSHACC4 66 PUSHOFFSETCLOSURE0 67 APPTERM 4 , 9 70 ACC0 71 C_CALL1 caml_input_scan_line 73 PUSHCONST0 74 PUSHACC1 75 EQ 76 BRANCHIFNOT 83 78 End_of_file 80 MAKEBLOCK1 0 82 RAISE 83 84 PUSHACC1 85 86 BRANCHIFNOT 107 88 ACC0 89 OFFSETINT -1 91 C_CALL1 create_string 93 PUSHACC1 94 OFFSETINT -1 96 PUSHCONST0 97 PUSHACC2 98 PUSHACC5 99 C_CALL4 caml_input 101 ACC2 102 C_CALL1 caml_input_char 104 ACC0 105 RETURN 3 107 ACC0 108 NEGINT 109 C_CALL1 create_string 111 PUSHACC1 112 NEGINT 113 PUSHCONST0 114 PUSHACC2 115 PUSHACC5 116 C_CALL4 caml_input 118 119 PUSHTRAP 130 121 ACC6 122 PUSHOFFSETCLOSURE0 123 APPLY1 124 PUSHACC5 125 PUSHENVACC1 126 APPLY2 127 POPTRAP 128 RETURN 3 130 PUSHGETGLOBAL End_of_file 132 PUSHACC1 133 GETFIELD0 134 EQ 135 BRANCHIFNOT 140 137 ACC1 138 RETURN 4 140 ACC0 141 RAISE 142 ACC0 143 C_CALL1 caml_flush 145 RETURN 1 147 RESTART 148 GRAB 1 150 ACC1 151 PUSHACC1 152 C_CALL2 caml_output_char 154 RETURN 2 156 RESTART 157 GRAB 1 159 ACC1 160 PUSHACC1 161 C_CALL2 caml_output_char 163 RETURN 2 165 RESTART 166 GRAB 1 168 ACC1 169 PUSHACC1 170 C_CALL2 caml_output_int 172 RETURN 2 174 RESTART 175 GRAB 1 177 ACC1 178 PUSHACC1 179 C_CALL2 caml_seek_out 181 RETURN 2 183 ACC0 184 C_CALL1 caml_pos_out 186 RETURN 1 188 ACC0 189 C_CALL1 caml_channel_size 191 RETURN 1 193 RESTART 194 GRAB 1 196 ACC1 197 PUSHACC1 198 C_CALL2 caml_set_binary_mode 200 RETURN 2 202 ACC0 203 C_CALL1 caml_input_char 205 RETURN 1 207 ACC0 208 C_CALL1 caml_input_char 210 RETURN 1 212 ACC0 213 C_CALL1 caml_input_int 215 RETURN 1 217 ACC0 218 C_CALL1 input_value 220 RETURN 1 222 RESTART 223 GRAB 1 225 ACC1 226 PUSHACC1 227 C_CALL2 caml_seek_in 229 RETURN 2 231 ACC0 232 C_CALL1 caml_pos_in 234 RETURN 1 236 ACC0 237 C_CALL1 caml_channel_size 239 RETURN 1 241 ACC0 242 C_CALL1 caml_close_channel 244 RETURN 1 246 RESTART 247 GRAB 1 249 ACC1 250 PUSHACC1 251 C_CALL2 caml_set_binary_mode 253 RETURN 2 255 CONST0 256 PUSHENVACC1 257 APPLY1 258 ACC0 259 C_CALL1 sys_exit 261 RETURN 1 263 CONST0 264 PUSHENVACC1 265 GETFIELD0 266 APPTERM1 2 268 CONST0 269 PUSHENVACC1 270 APPLY1 271 CONST0 272 PUSHENVACC2 273 APPTERM1 2 275 ENVACC1 276 GETFIELD0 277 PUSHACC0 278 PUSHACC2 279 CLOSURE 2 , 268 282 PUSHENVACC1 283 SETFIELD0 284 RETURN 2 286 ENVACC1 287 C_CALL1 caml_flush 289 ENVACC2 290 C_CALL1 caml_flush 292 RETURN 1 294 CONST0 295 PUSHENVACC1 296 APPLY1 297 C_CALL1 float_of_string 299 RETURN 1 301 CONST0 302 PUSHENVACC1 303 APPLY1 304 C_CALL1 int_of_string 306 RETURN 1 308 ENVACC2 309 C_CALL1 caml_flush 311 ENVACC1 312 PUSHENVACC3 313 APPTERM1 2 315 CONSTINT 13 317 PUSHENVACC1 318 C_CALL2 caml_output_char 320 ENVACC1 321 C_CALL1 caml_flush 323 RETURN 1 325 ACC0 326 PUSHENVACC1 327 PUSHENVACC2 328 APPLY2 329 CONSTINT 13 331 PUSHENVACC1 332 C_CALL2 caml_output_char 334 ENVACC1 335 C_CALL1 caml_flush 337 RETURN 1 339 ACC0 340 PUSHENVACC1 341 APPLY1 342 PUSHENVACC2 343 PUSHENVACC3 344 APPTERM2 3 346 ACC0 347 PUSHENVACC1 348 APPLY1 349 PUSHENVACC2 350 PUSHENVACC3 351 APPTERM2 3 353 ACC0 354 PUSHENVACC1 355 PUSHENVACC2 356 APPTERM2 3 358 ACC0 359 PUSHENVACC1 360 C_CALL2 caml_output_char 362 RETURN 1 364 CONSTINT 13 366 PUSHENVACC1 367 C_CALL2 caml_output_char 369 ENVACC1 370 C_CALL1 caml_flush 372 RETURN 1 374 ACC0 375 PUSHENVACC1 376 PUSHENVACC2 377 APPLY2 378 CONSTINT 13 380 PUSHENVACC1 381 C_CALL2 caml_output_char 383 RETURN 1 385 ACC0 386 PUSHENVACC1 387 APPLY1 388 PUSHENVACC2 389 PUSHENVACC3 390 APPTERM2 3 392 ACC0 393 PUSHENVACC1 394 APPLY1 395 PUSHENVACC2 396 PUSHENVACC3 397 APPTERM2 3 399 ACC0 400 PUSHENVACC1 401 PUSHENVACC2 402 APPTERM2 3 404 ACC0 405 PUSHENVACC1 406 C_CALL2 caml_output_char 408 RETURN 1 410 RESTART 411 GRAB 3 413 CONST0 414 PUSHACC3 415 LTINT 416 BRANCHIF 427 418 ACC1 419 C_CALL1 ml_string_length 421 PUSHACC4 422 PUSHACC4 423 ADDINT 424 GTINT 425 BRANCHIFNOT 432 427 GETGLOBAL " really_input " 429 PUSHENVACC1 430 APPTERM1 5 432 ACC3 433 PUSHACC3 434 PUSHACC3 435 PUSHACC3 436 PUSHENVACC2 437 APPTERM 4 , 8 440 RESTART 441 GRAB 3 443 CONST0 444 PUSHACC3 445 LTINT 446 BRANCHIF 457 448 ACC1 449 C_CALL1 ml_string_length 451 PUSHACC4 452 PUSHACC4 453 ADDINT 454 455 " input " 459 PUSHENVACC1 460 APPTERM1 5 462 ACC3 463 PUSHACC3 464 PUSHACC3 465 PUSHACC3 466 C_CALL4 caml_input 468 RETURN 4 470 ACC0 471 PUSHCONST0 472 PUSHGETGLOBAL < 0>(0 , < 0>(6 , 0 ) ) 474 PUSHENVACC1 475 APPTERM3 4 477 ACC0 478 PUSHCONST0 479 PUSHGETGLOBAL < 0>(0 , < 0>(7 , 0 ) ) 481 PUSHENVACC1 482 APPTERM3 4 484 RESTART 485 GRAB 2 487 ACC1 488 PUSHACC1 489 PUSHACC4 490 C_CALL3 sys_open 492 C_CALL1 caml_open_descriptor 494 RETURN 3 496 ACC0 497 C_CALL1 caml_flush 499 ACC0 500 C_CALL1 caml_close_channel 502 RETURN 1 504 RESTART 505 GRAB 1 507 CONST0 508 PUSHACC2 509 PUSHACC2 510 C_CALL3 output_value 512 RETURN 2 514 RESTART 515 GRAB 3 517 CONST0 518 PUSHACC3 519 LTINT 520 BRANCHIF 531 522 ACC1 523 C_CALL1 ml_string_length 525 PUSHACC4 526 PUSHACC4 527 ADDINT 528 529 BRANCHIFNOT 536 531 GETGLOBAL " output " 533 PUSHENVACC1 534 APPTERM1 5 536 ACC3 537 PUSHACC3 538 PUSHACC3 539 PUSHACC3 540 C_CALL4 caml_output 542 RETURN 4 544 RESTART 545 GRAB 1 547 ACC1 548 C_CALL1 ml_string_length 550 PUSHCONST0 551 PUSHACC3 552 PUSHACC3 553 C_CALL4 caml_output 555 RETURN 2 557 ACC0 558 PUSHCONSTINT 438 560 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(6 , 0 ) ) ) ) 562 PUSHENVACC1 563 APPTERM3 4 565 ACC0 566 PUSHCONSTINT 438 568 PUSHGETGLOBAL < 0>(1 , < 0>(3 , < 0>(4 , < 0>(7 , 0 ) ) ) ) 570 PUSHENVACC1 571 APPTERM3 4 573 RESTART 574 GRAB 2 576 ACC1 577 PUSHACC1 578 PUSHACC4 579 C_CALL3 sys_open 581 C_CALL1 caml_open_descriptor 583 RETURN 3 585 ACC0 586 PUSHGETGLOBAL " % .12 g " 588 C_CALL2 format_float 590 RETURN 1 592 ACC0 593 PUSHGETGLOBAL " % d " 595 C_CALL2 format_int 597 RETURN 1 599 " false " 601 PUSHACC1 602 C_CALL2 string_equal 604 BRANCHIFNOT 609 606 CONST0 607 RETURN 1 609 " true " 611 PUSHACC1 612 C_CALL2 string_equal 614 BRANCHIFNOT 619 616 CONST1 617 RETURN 1 619 " bool_of_string " 621 PUSHENVACC1 622 APPTERM1 2 624 ACC0 625 BRANCHIFNOT 631 627 " true " 629 RETURN 1 631 " false " 633 RETURN 1 635 636 PUSHACC1 637 LTINT 638 BRANCHIF 646 640 642 PUSHACC1 643 GTINT 644 BRANCHIFNOT 651 646 " char_of_int " 648 PUSHENVACC1 649 APPTERM1 2 651 ACC0 652 RETURN 1 654 RESTART 655 GRAB 1 657 ACC0 658 C_CALL1 ml_string_length 660 PUSHACC2 661 C_CALL1 ml_string_length 663 PUSHACC0 664 PUSHACC2 665 ADDINT 666 C_CALL1 create_string 668 PUSHACC2 669 PUSHCONST0 670 PUSHACC2 671 PUSHCONST0 672 PUSHACC7 673 C_CALL5 blit_string 675 ACC1 676 PUSHACC3 677 PUSHACC2 678 PUSHCONST0 679 PUSHACC 8 681 C_CALL5 blit_string 683 ACC0 684 RETURN 5 686 -1 688 PUSHACC1 689 XORINT 690 RETURN 1 692 693 PUSHACC1 694 GEINT 695 BRANCHIFNOT 700 697 ACC0 698 RETURN 1 700 ACC0 701 NEGINT 702 RETURN 1 704 RESTART 705 GRAB 1 707 ACC1 708 PUSHACC1 709 C_CALL2 greaterequal 711 BRANCHIFNOT 716 713 ACC0 714 RETURN 2 716 ACC1 717 RETURN 2 719 RESTART 720 GRAB 1 722 ACC1 723 PUSHACC1 724 C_CALL2 lessequal 726 BRANCHIFNOT 731 728 ACC0 729 RETURN 2 731 ACC1 732 RETURN 2 734 ACC0 735 737 MAKEBLOCK2 0 739 RAISE 740 ACC0 741 PUSHGETGLOBAL Failure 743 MAKEBLOCK2 0 745 RAISE 746 CLOSURE 0 , 740 749 PUSH 750 CLOSURE 0 , 734 753 PUSHGETGLOBAL " Pervasives . Exit " 755 MAKEBLOCK1 0 757 PUSHGETGLOBAL " Pervasives . Assert_failure " 759 MAKEBLOCK1 0 761 PUSH 762 CLOSURE 0 , 720 765 PUSH 766 CLOSURE 0 , 705 769 PUSH 770 CLOSURE 0 , 692 773 PUSH 774 CLOSURE 0 , 686 777 PUSHCONST0 778 PUSHCONSTINT 31 780 PUSHCONST1 781 LSLINT 782 EQ 783 BRANCHIFNOT 789 785 CONSTINT 30 787 BRANCH 791 789 CONSTINT 62 791 PUSHCONST1 792 LSLINT 793 PUSHACC0 794 OFFSETINT -1 796 PUSH 797 CLOSURE 0 , 655 800 PUSHACC 9 802 CLOSURE 1 , 635 805 PUSH 806 CLOSURE 0 , 624 809 PUSHACC 11 811 CLOSURE 1 , 599 814 PUSH 815 CLOSURE 0 , 592 818 PUSH 819 CLOSURE 0 , 585 822 PUSH 823 CLOSUREREC 0 , 12 827 828 C_CALL1 caml_open_descriptor 830 PUSHCONST1 831 C_CALL1 caml_open_descriptor 833 PUSHCONST2 834 C_CALL1 caml_open_descriptor 836 PUSH 837 CLOSURE 0 , 574 840 PUSHACC0 841 CLOSURE 1 , 565 844 PUSHACC1 845 CLOSURE 1 , 557 848 PUSH 849 CLOSURE 0 , 545 852 PUSHACC 22 854 CLOSURE 1 , 515 857 PUSH 858 CLOSURE 0 , 505 861 PUSH 862 CLOSURE 0 , 496 865 PUSH 866 CLOSURE 0 , 485 869 PUSHACC0 870 CLOSURE 1 , 477 873 PUSHACC1 874 CLOSURE 1 , 470 877 PUSHACC 28 879 CLOSURE 1 , 441 882 PUSH 883 CLOSUREREC 0 , 32 887 ACC0 888 PUSHACC 31 890 CLOSURE 2 , 411 893 PUSHACC 22 895 CLOSUREREC 1 , 70 899 ACC 15 901 CLOSURE 1 , 404 904 PUSHACC 11 906 PUSHACC 17 908 CLOSURE 2 , 399 911 PUSHACC 12 913 PUSHACC 18 915 PUSHACC 23 917 CLOSURE 3 , 392 920 PUSHACC 13 922 PUSHACC 19 924 PUSHACC 23 926 CLOSURE 3 , 385 929 PUSHACC 14 931 PUSHACC 20 933 CLOSURE 2 , 374 936 PUSHACC 20 938 CLOSURE 1 , 364 941 PUSHACC 20 943 CLOSURE 1 , 358 946 PUSHACC 17 948 PUSHACC 22 950 CLOSURE 2 , 353 953 PUSHACC 18 955 PUSHACC 23 957 PUSHACC 29 959 CLOSURE 3 , 346 962 PUSHACC 19 964 PUSHACC 24 966 PUSHACC 29 968 CLOSURE 3 , 339 971 PUSHACC 20 973 PUSHACC 25 975 CLOSURE 2 , 325 978 PUSHACC 25 980 CLOSURE 1 , 315 983 PUSHACC 12 985 PUSHACC 28 987 PUSHACC 30 989 CLOSURE 3 , 308 992 PUSHACC0 993 CLOSURE 1 , 301 996 PUSHACC1 997 CLOSURE 1 , 294 1000 PUSHACC 29 1002 PUSHACC 31 1004 CLOSURE 2 , 286 1007 MAKEBLOCK1 0 1009 PUSHACC0 1010 CLOSURE 1 , 275 1013 PUSHACC1 1014 CLOSURE 1 , 263 1017 PUSHACC0 1018 CLOSURE 1 , 255 1021 PUSHACC1 1022 PUSHACC 22 1024 PUSHACC4 1025 PUSHACC3 1026 PUSH 1027 CLOSURE 0 , 247 1030 PUSH 1031 CLOSURE 0 , 241 1034 PUSH 1035 CLOSURE 0 , 236 1038 PUSH 1039 CLOSURE 0 , 231 1042 PUSH 1043 CLOSURE 0 , 223 1046 PUSH 1047 CLOSURE 0 , 217 1050 PUSH 1051 CLOSURE 0 , 212 1054 PUSH 1055 CLOSURE 0 , 207 1058 PUSHACC 32 1060 PUSHACC 35 1062 PUSHACC 33 1064 PUSH 1065 CLOSURE 0 , 202 1068 PUSHACC 41 1070 PUSHACC 40 1072 PUSHACC 42 1074 PUSH 1075 CLOSURE 0 , 194 1078 PUSHACC 46 1080 PUSH 1081 CLOSURE 0 , 188 1084 PUSH 1085 CLOSURE 0 , 183 1088 PUSH 1089 CLOSURE 0 , 175 1092 PUSHACC 51 1094 PUSH 1095 CLOSURE 0 , 166 1098 PUSH 1099 CLOSURE 0 , 157 1102 PUSHACC 55 1104 PUSHACC 57 1106 PUSH 1107 CLOSURE 0 , 148 1110 PUSH 1111 CLOSURE 0 , 142 1114 PUSHACC 63 1116 PUSHACC 62 1118 PUSHACC 64 1120 PUSHACC 38 1122 PUSHACC 40 1124 PUSHACC 42 1126 PUSHACC 44 1128 PUSHACC 46 1130 PUSHACC 48 1132 PUSHACC 50 1134 PUSHACC 52 1136 PUSHACC 54 1138 PUSHACC 56 1140 PUSHACC 58 1142 PUSHACC 60 1144 PUSHACC 62 1146 PUSHACC 64 1148 PUSHACC 66 1150 PUSHACC 82 1152 PUSHACC 84 1154 PUSHACC 86 1156 PUSHACC 88 1158 PUSHACC 90 1160 PUSHACC 92 1162 PUSHACC 94 1164 PUSHACC 96 1166 PUSHACC 98 1168 PUSHACC 100 1170 PUSHACC 104 1172 PUSHACC 104 1174 PUSHACC 104 1176 PUSHACC 108 1178 PUSHACC 110 1180 PUSHACC 112 1182 PUSHACC 117 1184 PUSHACC 117 1186 PUSHACC 117 1188 PUSHACC 117 1190 MAKEBLOCK 69 , 0 1193 POP 53 1195 SETGLOBAL Pervasives 1197 CONST0 1198 C_CALL1 gc_compaction 1200 CONST0 1201 PUSHGETGLOBALFIELD Pervasives , 68 1204 APPLY1 1205 ATOM0 1206 SETGLOBAL T330 - compact-2 1208 STOP * 0 CONSTINT 42 2 PUSHACC0 3 MAKEBLOCK1 0 5 POP 1 7 SETGLOBAL Lib 9 BRANCH 746 11 RESTART 12 GRAB 1 14 ACC0 15 BRANCHIFNOT 28 17 ACC1 18 PUSHACC1 19 GETFIELD1 20 PUSHOFFSETCLOSURE0 21 APPLY2 22 PUSHACC1 23 GETFIELD0 24 MAKEBLOCK2 0 26 RETURN 2 28 ACC1 29 RETURN 2 31 RESTART 32 GRAB 3 34 CONST0 35 PUSHACC4 36 LEINT 37 BRANCHIFNOT 42 39 CONST0 40 RETURN 4 42 ACC3 43 PUSHACC3 44 PUSHACC3 45 PUSHACC3 46 C_CALL4 caml_input 48 PUSHCONST0 49 PUSHACC1 50 EQ 51 BRANCHIFNOT 58 53 GETGLOBAL End_of_file 55 MAKEBLOCK1 0 57 RAISE 58 ACC0 59 PUSHACC5 60 SUBINT 61 PUSHACC1 62 PUSHACC5 63 ADDINT 64 PUSHACC4 65 PUSHACC4 66 PUSHOFFSETCLOSURE0 67 APPTERM 4, 9 70 ACC0 71 C_CALL1 caml_input_scan_line 73 PUSHCONST0 74 PUSHACC1 75 EQ 76 BRANCHIFNOT 83 78 GETGLOBAL End_of_file 80 MAKEBLOCK1 0 82 RAISE 83 CONST0 84 PUSHACC1 85 GTINT 86 BRANCHIFNOT 107 88 ACC0 89 OFFSETINT -1 91 C_CALL1 create_string 93 PUSHACC1 94 OFFSETINT -1 96 PUSHCONST0 97 PUSHACC2 98 PUSHACC5 99 C_CALL4 caml_input 101 ACC2 102 C_CALL1 caml_input_char 104 ACC0 105 RETURN 3 107 ACC0 108 NEGINT 109 C_CALL1 create_string 111 PUSHACC1 112 NEGINT 113 PUSHCONST0 114 PUSHACC2 115 PUSHACC5 116 C_CALL4 caml_input 118 CONST0 119 PUSHTRAP 130 121 ACC6 122 PUSHOFFSETCLOSURE0 123 APPLY1 124 PUSHACC5 125 PUSHENVACC1 126 APPLY2 127 POPTRAP 128 RETURN 3 130 PUSHGETGLOBAL End_of_file 132 PUSHACC1 133 GETFIELD0 134 EQ 135 BRANCHIFNOT 140 137 ACC1 138 RETURN 4 140 ACC0 141 RAISE 142 ACC0 143 C_CALL1 caml_flush 145 RETURN 1 147 RESTART 148 GRAB 1 150 ACC1 151 PUSHACC1 152 C_CALL2 caml_output_char 154 RETURN 2 156 RESTART 157 GRAB 1 159 ACC1 160 PUSHACC1 161 C_CALL2 caml_output_char 163 RETURN 2 165 RESTART 166 GRAB 1 168 ACC1 169 PUSHACC1 170 C_CALL2 caml_output_int 172 RETURN 2 174 RESTART 175 GRAB 1 177 ACC1 178 PUSHACC1 179 C_CALL2 caml_seek_out 181 RETURN 2 183 ACC0 184 C_CALL1 caml_pos_out 186 RETURN 1 188 ACC0 189 C_CALL1 caml_channel_size 191 RETURN 1 193 RESTART 194 GRAB 1 196 ACC1 197 PUSHACC1 198 C_CALL2 caml_set_binary_mode 200 RETURN 2 202 ACC0 203 C_CALL1 caml_input_char 205 RETURN 1 207 ACC0 208 C_CALL1 caml_input_char 210 RETURN 1 212 ACC0 213 C_CALL1 caml_input_int 215 RETURN 1 217 ACC0 218 C_CALL1 input_value 220 RETURN 1 222 RESTART 223 GRAB 1 225 ACC1 226 PUSHACC1 227 C_CALL2 caml_seek_in 229 RETURN 2 231 ACC0 232 C_CALL1 caml_pos_in 234 RETURN 1 236 ACC0 237 C_CALL1 caml_channel_size 239 RETURN 1 241 ACC0 242 C_CALL1 caml_close_channel 244 RETURN 1 246 RESTART 247 GRAB 1 249 ACC1 250 PUSHACC1 251 C_CALL2 caml_set_binary_mode 253 RETURN 2 255 CONST0 256 PUSHENVACC1 257 APPLY1 258 ACC0 259 C_CALL1 sys_exit 261 RETURN 1 263 CONST0 264 PUSHENVACC1 265 GETFIELD0 266 APPTERM1 2 268 CONST0 269 PUSHENVACC1 270 APPLY1 271 CONST0 272 PUSHENVACC2 273 APPTERM1 2 275 ENVACC1 276 GETFIELD0 277 PUSHACC0 278 PUSHACC2 279 CLOSURE 2, 268 282 PUSHENVACC1 283 SETFIELD0 284 RETURN 2 286 ENVACC1 287 C_CALL1 caml_flush 289 ENVACC2 290 C_CALL1 caml_flush 292 RETURN 1 294 CONST0 295 PUSHENVACC1 296 APPLY1 297 C_CALL1 float_of_string 299 RETURN 1 301 CONST0 302 PUSHENVACC1 303 APPLY1 304 C_CALL1 int_of_string 306 RETURN 1 308 ENVACC2 309 C_CALL1 caml_flush 311 ENVACC1 312 PUSHENVACC3 313 APPTERM1 2 315 CONSTINT 13 317 PUSHENVACC1 318 C_CALL2 caml_output_char 320 ENVACC1 321 C_CALL1 caml_flush 323 RETURN 1 325 ACC0 326 PUSHENVACC1 327 PUSHENVACC2 328 APPLY2 329 CONSTINT 13 331 PUSHENVACC1 332 C_CALL2 caml_output_char 334 ENVACC1 335 C_CALL1 caml_flush 337 RETURN 1 339 ACC0 340 PUSHENVACC1 341 APPLY1 342 PUSHENVACC2 343 PUSHENVACC3 344 APPTERM2 3 346 ACC0 347 PUSHENVACC1 348 APPLY1 349 PUSHENVACC2 350 PUSHENVACC3 351 APPTERM2 3 353 ACC0 354 PUSHENVACC1 355 PUSHENVACC2 356 APPTERM2 3 358 ACC0 359 PUSHENVACC1 360 C_CALL2 caml_output_char 362 RETURN 1 364 CONSTINT 13 366 PUSHENVACC1 367 C_CALL2 caml_output_char 369 ENVACC1 370 C_CALL1 caml_flush 372 RETURN 1 374 ACC0 375 PUSHENVACC1 376 PUSHENVACC2 377 APPLY2 378 CONSTINT 13 380 PUSHENVACC1 381 C_CALL2 caml_output_char 383 RETURN 1 385 ACC0 386 PUSHENVACC1 387 APPLY1 388 PUSHENVACC2 389 PUSHENVACC3 390 APPTERM2 3 392 ACC0 393 PUSHENVACC1 394 APPLY1 395 PUSHENVACC2 396 PUSHENVACC3 397 APPTERM2 3 399 ACC0 400 PUSHENVACC1 401 PUSHENVACC2 402 APPTERM2 3 404 ACC0 405 PUSHENVACC1 406 C_CALL2 caml_output_char 408 RETURN 1 410 RESTART 411 GRAB 3 413 CONST0 414 PUSHACC3 415 LTINT 416 BRANCHIF 427 418 ACC1 419 C_CALL1 ml_string_length 421 PUSHACC4 422 PUSHACC4 423 ADDINT 424 GTINT 425 BRANCHIFNOT 432 427 GETGLOBAL "really_input" 429 PUSHENVACC1 430 APPTERM1 5 432 ACC3 433 PUSHACC3 434 PUSHACC3 435 PUSHACC3 436 PUSHENVACC2 437 APPTERM 4, 8 440 RESTART 441 GRAB 3 443 CONST0 444 PUSHACC3 445 LTINT 446 BRANCHIF 457 448 ACC1 449 C_CALL1 ml_string_length 451 PUSHACC4 452 PUSHACC4 453 ADDINT 454 GTINT 455 BRANCHIFNOT 462 457 GETGLOBAL "input" 459 PUSHENVACC1 460 APPTERM1 5 462 ACC3 463 PUSHACC3 464 PUSHACC3 465 PUSHACC3 466 C_CALL4 caml_input 468 RETURN 4 470 ACC0 471 PUSHCONST0 472 PUSHGETGLOBAL <0>(0, <0>(6, 0)) 474 PUSHENVACC1 475 APPTERM3 4 477 ACC0 478 PUSHCONST0 479 PUSHGETGLOBAL <0>(0, <0>(7, 0)) 481 PUSHENVACC1 482 APPTERM3 4 484 RESTART 485 GRAB 2 487 ACC1 488 PUSHACC1 489 PUSHACC4 490 C_CALL3 sys_open 492 C_CALL1 caml_open_descriptor 494 RETURN 3 496 ACC0 497 C_CALL1 caml_flush 499 ACC0 500 C_CALL1 caml_close_channel 502 RETURN 1 504 RESTART 505 GRAB 1 507 CONST0 508 PUSHACC2 509 PUSHACC2 510 C_CALL3 output_value 512 RETURN 2 514 RESTART 515 GRAB 3 517 CONST0 518 PUSHACC3 519 LTINT 520 BRANCHIF 531 522 ACC1 523 C_CALL1 ml_string_length 525 PUSHACC4 526 PUSHACC4 527 ADDINT 528 GTINT 529 BRANCHIFNOT 536 531 GETGLOBAL "output" 533 PUSHENVACC1 534 APPTERM1 5 536 ACC3 537 PUSHACC3 538 PUSHACC3 539 PUSHACC3 540 C_CALL4 caml_output 542 RETURN 4 544 RESTART 545 GRAB 1 547 ACC1 548 C_CALL1 ml_string_length 550 PUSHCONST0 551 PUSHACC3 552 PUSHACC3 553 C_CALL4 caml_output 555 RETURN 2 557 ACC0 558 PUSHCONSTINT 438 560 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(6, 0)))) 562 PUSHENVACC1 563 APPTERM3 4 565 ACC0 566 PUSHCONSTINT 438 568 PUSHGETGLOBAL <0>(1, <0>(3, <0>(4, <0>(7, 0)))) 570 PUSHENVACC1 571 APPTERM3 4 573 RESTART 574 GRAB 2 576 ACC1 577 PUSHACC1 578 PUSHACC4 579 C_CALL3 sys_open 581 C_CALL1 caml_open_descriptor 583 RETURN 3 585 ACC0 586 PUSHGETGLOBAL "%.12g" 588 C_CALL2 format_float 590 RETURN 1 592 ACC0 593 PUSHGETGLOBAL "%d" 595 C_CALL2 format_int 597 RETURN 1 599 GETGLOBAL "false" 601 PUSHACC1 602 C_CALL2 string_equal 604 BRANCHIFNOT 609 606 CONST0 607 RETURN 1 609 GETGLOBAL "true" 611 PUSHACC1 612 C_CALL2 string_equal 614 BRANCHIFNOT 619 616 CONST1 617 RETURN 1 619 GETGLOBAL "bool_of_string" 621 PUSHENVACC1 622 APPTERM1 2 624 ACC0 625 BRANCHIFNOT 631 627 GETGLOBAL "true" 629 RETURN 1 631 GETGLOBAL "false" 633 RETURN 1 635 CONST0 636 PUSHACC1 637 LTINT 638 BRANCHIF 646 640 CONSTINT 255 642 PUSHACC1 643 GTINT 644 BRANCHIFNOT 651 646 GETGLOBAL "char_of_int" 648 PUSHENVACC1 649 APPTERM1 2 651 ACC0 652 RETURN 1 654 RESTART 655 GRAB 1 657 ACC0 658 C_CALL1 ml_string_length 660 PUSHACC2 661 C_CALL1 ml_string_length 663 PUSHACC0 664 PUSHACC2 665 ADDINT 666 C_CALL1 create_string 668 PUSHACC2 669 PUSHCONST0 670 PUSHACC2 671 PUSHCONST0 672 PUSHACC7 673 C_CALL5 blit_string 675 ACC1 676 PUSHACC3 677 PUSHACC2 678 PUSHCONST0 679 PUSHACC 8 681 C_CALL5 blit_string 683 ACC0 684 RETURN 5 686 CONSTINT -1 688 PUSHACC1 689 XORINT 690 RETURN 1 692 CONST0 693 PUSHACC1 694 GEINT 695 BRANCHIFNOT 700 697 ACC0 698 RETURN 1 700 ACC0 701 NEGINT 702 RETURN 1 704 RESTART 705 GRAB 1 707 ACC1 708 PUSHACC1 709 C_CALL2 greaterequal 711 BRANCHIFNOT 716 713 ACC0 714 RETURN 2 716 ACC1 717 RETURN 2 719 RESTART 720 GRAB 1 722 ACC1 723 PUSHACC1 724 C_CALL2 lessequal 726 BRANCHIFNOT 731 728 ACC0 729 RETURN 2 731 ACC1 732 RETURN 2 734 ACC0 735 PUSHGETGLOBAL Invalid_argument 737 MAKEBLOCK2 0 739 RAISE 740 ACC0 741 PUSHGETGLOBAL Failure 743 MAKEBLOCK2 0 745 RAISE 746 CLOSURE 0, 740 749 PUSH 750 CLOSURE 0, 734 753 PUSHGETGLOBAL "Pervasives.Exit" 755 MAKEBLOCK1 0 757 PUSHGETGLOBAL "Pervasives.Assert_failure" 759 MAKEBLOCK1 0 761 PUSH 762 CLOSURE 0, 720 765 PUSH 766 CLOSURE 0, 705 769 PUSH 770 CLOSURE 0, 692 773 PUSH 774 CLOSURE 0, 686 777 PUSHCONST0 778 PUSHCONSTINT 31 780 PUSHCONST1 781 LSLINT 782 EQ 783 BRANCHIFNOT 789 785 CONSTINT 30 787 BRANCH 791 789 CONSTINT 62 791 PUSHCONST1 792 LSLINT 793 PUSHACC0 794 OFFSETINT -1 796 PUSH 797 CLOSURE 0, 655 800 PUSHACC 9 802 CLOSURE 1, 635 805 PUSH 806 CLOSURE 0, 624 809 PUSHACC 11 811 CLOSURE 1, 599 814 PUSH 815 CLOSURE 0, 592 818 PUSH 819 CLOSURE 0, 585 822 PUSH 823 CLOSUREREC 0, 12 827 CONST0 828 C_CALL1 caml_open_descriptor 830 PUSHCONST1 831 C_CALL1 caml_open_descriptor 833 PUSHCONST2 834 C_CALL1 caml_open_descriptor 836 PUSH 837 CLOSURE 0, 574 840 PUSHACC0 841 CLOSURE 1, 565 844 PUSHACC1 845 CLOSURE 1, 557 848 PUSH 849 CLOSURE 0, 545 852 PUSHACC 22 854 CLOSURE 1, 515 857 PUSH 858 CLOSURE 0, 505 861 PUSH 862 CLOSURE 0, 496 865 PUSH 866 CLOSURE 0, 485 869 PUSHACC0 870 CLOSURE 1, 477 873 PUSHACC1 874 CLOSURE 1, 470 877 PUSHACC 28 879 CLOSURE 1, 441 882 PUSH 883 CLOSUREREC 0, 32 887 ACC0 888 PUSHACC 31 890 CLOSURE 2, 411 893 PUSHACC 22 895 CLOSUREREC 1, 70 899 ACC 15 901 CLOSURE 1, 404 904 PUSHACC 11 906 PUSHACC 17 908 CLOSURE 2, 399 911 PUSHACC 12 913 PUSHACC 18 915 PUSHACC 23 917 CLOSURE 3, 392 920 PUSHACC 13 922 PUSHACC 19 924 PUSHACC 23 926 CLOSURE 3, 385 929 PUSHACC 14 931 PUSHACC 20 933 CLOSURE 2, 374 936 PUSHACC 20 938 CLOSURE 1, 364 941 PUSHACC 20 943 CLOSURE 1, 358 946 PUSHACC 17 948 PUSHACC 22 950 CLOSURE 2, 353 953 PUSHACC 18 955 PUSHACC 23 957 PUSHACC 29 959 CLOSURE 3, 346 962 PUSHACC 19 964 PUSHACC 24 966 PUSHACC 29 968 CLOSURE 3, 339 971 PUSHACC 20 973 PUSHACC 25 975 CLOSURE 2, 325 978 PUSHACC 25 980 CLOSURE 1, 315 983 PUSHACC 12 985 PUSHACC 28 987 PUSHACC 30 989 CLOSURE 3, 308 992 PUSHACC0 993 CLOSURE 1, 301 996 PUSHACC1 997 CLOSURE 1, 294 1000 PUSHACC 29 1002 PUSHACC 31 1004 CLOSURE 2, 286 1007 MAKEBLOCK1 0 1009 PUSHACC0 1010 CLOSURE 1, 275 1013 PUSHACC1 1014 CLOSURE 1, 263 1017 PUSHACC0 1018 CLOSURE 1, 255 1021 PUSHACC1 1022 PUSHACC 22 1024 PUSHACC4 1025 PUSHACC3 1026 PUSH 1027 CLOSURE 0, 247 1030 PUSH 1031 CLOSURE 0, 241 1034 PUSH 1035 CLOSURE 0, 236 1038 PUSH 1039 CLOSURE 0, 231 1042 PUSH 1043 CLOSURE 0, 223 1046 PUSH 1047 CLOSURE 0, 217 1050 PUSH 1051 CLOSURE 0, 212 1054 PUSH 1055 CLOSURE 0, 207 1058 PUSHACC 32 1060 PUSHACC 35 1062 PUSHACC 33 1064 PUSH 1065 CLOSURE 0, 202 1068 PUSHACC 41 1070 PUSHACC 40 1072 PUSHACC 42 1074 PUSH 1075 CLOSURE 0, 194 1078 PUSHACC 46 1080 PUSH 1081 CLOSURE 0, 188 1084 PUSH 1085 CLOSURE 0, 183 1088 PUSH 1089 CLOSURE 0, 175 1092 PUSHACC 51 1094 PUSH 1095 CLOSURE 0, 166 1098 PUSH 1099 CLOSURE 0, 157 1102 PUSHACC 55 1104 PUSHACC 57 1106 PUSH 1107 CLOSURE 0, 148 1110 PUSH 1111 CLOSURE 0, 142 1114 PUSHACC 63 1116 PUSHACC 62 1118 PUSHACC 64 1120 PUSHACC 38 1122 PUSHACC 40 1124 PUSHACC 42 1126 PUSHACC 44 1128 PUSHACC 46 1130 PUSHACC 48 1132 PUSHACC 50 1134 PUSHACC 52 1136 PUSHACC 54 1138 PUSHACC 56 1140 PUSHACC 58 1142 PUSHACC 60 1144 PUSHACC 62 1146 PUSHACC 64 1148 PUSHACC 66 1150 PUSHACC 82 1152 PUSHACC 84 1154 PUSHACC 86 1156 PUSHACC 88 1158 PUSHACC 90 1160 PUSHACC 92 1162 PUSHACC 94 1164 PUSHACC 96 1166 PUSHACC 98 1168 PUSHACC 100 1170 PUSHACC 104 1172 PUSHACC 104 1174 PUSHACC 104 1176 PUSHACC 108 1178 PUSHACC 110 1180 PUSHACC 112 1182 PUSHACC 117 1184 PUSHACC 117 1186 PUSHACC 117 1188 PUSHACC 117 1190 MAKEBLOCK 69, 0 1193 POP 53 1195 SETGLOBAL Pervasives 1197 CONST0 1198 C_CALL1 gc_compaction 1200 CONST0 1201 PUSHGETGLOBALFIELD Pervasives, 68 1204 APPLY1 1205 ATOM0 1206 SETGLOBAL T330-compact-2 1208 STOP **) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610285"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">1066710e01cdda0f4ce9c743f62c21693a49348e3eff5e199fadcac5b51e17b5</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">shirok/WiLiKi</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">rss.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">;;; wiliki / rss - an ad - hoc RSS generation routine for WiLiKi ;;; Copyright ( c ) 2000 - 2009 < > ;;; ;;; Permission is hereby granted, free of charge, to any person ;;; obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without restriction , ;;; including without limitation the rights to use, copy, modify, ;;; merge, publish, distribute, sublicense, and/or sell copies of the Software , and to permit persons to whom the Software is ;;; furnished to do so, subject to the following conditions: ;;; ;;; The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . ;;; THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , ;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES ;;; OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN ;;; AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF ;;; OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS ;;; IN THE SOFTWARE. ;;; ;; In future, this might be rewritten to use proper XML framework. ;; for now, I use an ad-hoc approach. (define-module wiliki.rss (use file.util) (use text.html-lite) (use text.tree) (use util.match) (use wiliki.core) (export rss-page rss-item-count rss-item-description rss-item-extra-elements rss-partial-content-lines rss-source rss-url-format)) (select-module wiliki.rss) ;; Parameters # of items included in the RSS (define rss-item-count (make-parameter 15)) ;; What to include in the 'rdf:description' of each item. ;; none - omit rdf:description ;; raw - raw wiki-marked up text. ;; html - html rendered text. (heavy) (define rss-item-description (make-parameter 'none)) ;; # of maximum lines in the original wiki format to be included ;; in the partial content (raw-partial, html-partial). (define rss-partial-content-lines (make-parameter 20)) ;; A procedure that takes maximum # of entries, and returns a list of entries to be included in the RSS . The returned list should be ;; in the following form: ;; <entries> : (<entry> ...) ;; <entry> : (<key> . <timestamp>) | ((<key> . <title>) . <timestamp>) (define rss-source (make-parameter (cut wiliki:recent-changes-alist :length <>))) Whether the url in RSS should be in the format of url?key or url / key (define rss-url-format (make-parameter 'query)) ;; If not #f, this is inserted as is into each <item>...</item> (define rss-item-extra-elements (make-parameter #f)) ;; Main entry (define (rss-page :key (count (rss-item-count)) (item-description #f)) (rss-format ((rss-source) count) (case (or item-description (rss-item-description)) [(raw) (cut raw-content <> #f)] [(raw-partial) (cut raw-content <> #t)] [(html) (cut html-content <> #f)] [(html-partial) (cut html-content <> #t)] [else (^_ "")]))) (define (rss-format entries item-description-proc) (let* ([self (wiliki)] [full-url (wiliki:url :full)]) `("Content-type: text/xml\n\n" "<?xml version=\"1.0\" encoding=\"" ,(wiliki:output-charset) "\" ?>\n" "<rdf:RDF xmlns:rdf=\"-rdf-syntax-ns#\" xmlns=\"/\" xmlns:dc=\"/\" xmlns:content=\"/\" >\n" ,(rdf-channel (wiliki:url :full) (rdf-title (ref (wiliki)'title)) (rdf-link full-url) (rdf-description (ref (wiliki)'description)) (rdf-items-seq (map (^e (rdf-li (entry->url e))) entries))) ,(map (^e (let1 url (entry->url e) (rdf-item url (rdf-title (entry->title e)) (rdf-link url) (item-description-proc (entry->key e)) (dc-date (entry->timestamp e)) (or (rss-item-extra-elements) "") ))) entries) "</rdf:RDF>\n"))) (define (raw-content entry partial?) (if-let1 page (wiliki:db-get entry) (rdf-description (trim-content (ref page 'content) partial?)) "")) (define (html-content entry partial?) (if-let1 page (wiliki:db-get entry) ($ rdf-content $ tree->string $ map wiliki:sxml->stree $ wiliki:format-content $ trim-content (~ page'content) partial?) "")) (define (trim-content raw-text partial?) (if partial? (string-join (take* (string-split raw-text "\n") (rss-partial-content-lines)) "\n") raw-text)) (define (entry->url entry) (case (rss-url-format) [(query) (wiliki:url :full "~a" (entry->key entry))] [(path) (build-path (wiliki:url :full) (entry->key entry))] [else (wiliki:url :full "config-error:invalid-rss-url-format")])) (define (entry->title entry) (match entry [((key . title) . _) title] [(key . _) key])) (define (entry->key entry) (match entry [((key . title) . _) key] [(key . _) key])) (define (entry->timestamp entry) (cdr entry)) RDF rendering utilities . NB : these should be implemented within xml framework (define (rdf-channel about . content) `("<channel rdf:about=\"" ,(html-escape-string about) "\">" ,@content "\n</channel>\n")) (define (rdf-li resource) `("<rdf:li rdf:resource=\"" ,(html-escape-string resource) "\" />\n")) (define (rdf-simple tag . content) `("<" ,tag ">" ,@content "</" ,tag ">\n")) (define (rdf-item about . content) `("<item rdf:about=\"" ,(html-escape-string about) "\">" ,@content "</item>\n")) (define (rdf-items-seq . items) `("<items><rdf:Seq>" ,@items "</rdf:Seq></items>\n")) (define (rdf-simple-1 tag content) `("<" ,tag ">" ,(html-escape-string content) "</" ,tag ">\n")) (define (rdf-title title) (rdf-simple-1 "title" title)) (define (rdf-link link) (rdf-simple-1 "link" link)) (define (rdf-description desc) (rdf-simple-1 "description" desc)) (define (rdf-content content) `("<content:encoded><![CDATA[" ,(regexp-replace-all #/\]\]>/ content "]]]]><![CDATA[>") "]]></content:encoded>")) (define (dc-date secs) (rdf-simple-1 "dc:date" (sys-strftime "%Y-%m-%dT%H:%M:%S+00:00" (sys-gmtime secs)))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/shirok/WiLiKi/c910d5d936c833887f7c7bc99e0e681e262b5334/src/wiliki/rss.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. In future, this might be rewritten to use proper XML framework. for now, I use an ad-hoc approach. Parameters What to include in the 'rdf:description' of each item. none - omit rdf:description raw - raw wiki-marked up text. html - html rendered text. (heavy) # of maximum lines in the original wiki format to be included in the partial content (raw-partial, html-partial). A procedure that takes maximum # of entries, and returns a list in the following form: <entries> : (<entry> ...) <entry> : (<key> . <timestamp>) | ((<key> . <title>) . <timestamp>) If not #f, this is inserted as is into each <item>...</item> Main entry</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> wiliki / rss - an ad - hoc RSS generation routine for WiLiKi Copyright ( c ) 2000 - 2009 < > files ( the " Software " ) , to deal in the Software without restriction , the Software , and to permit persons to whom the Software is included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , NONINFRINGEMENT . IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN (define-module wiliki.rss (use file.util) (use text.html-lite) (use text.tree) (use util.match) (use wiliki.core) (export rss-page rss-item-count rss-item-description rss-item-extra-elements rss-partial-content-lines rss-source rss-url-format)) (select-module wiliki.rss) # of items included in the RSS (define rss-item-count (make-parameter 15)) (define rss-item-description (make-parameter 'none)) (define rss-partial-content-lines (make-parameter 20)) of entries to be included in the RSS . The returned list should be (define rss-source (make-parameter (cut wiliki:recent-changes-alist :length <>))) Whether the url in RSS should be in the format of url?key or url / key (define rss-url-format (make-parameter 'query)) (define rss-item-extra-elements (make-parameter #f)) (define (rss-page :key (count (rss-item-count)) (item-description #f)) (rss-format ((rss-source) count) (case (or item-description (rss-item-description)) [(raw) (cut raw-content <> #f)] [(raw-partial) (cut raw-content <> #t)] [(html) (cut html-content <> #f)] [(html-partial) (cut html-content <> #t)] [else (^_ "")]))) (define (rss-format entries item-description-proc) (let* ([self (wiliki)] [full-url (wiliki:url :full)]) `("Content-type: text/xml\n\n" "<?xml version=\"1.0\" encoding=\"" ,(wiliki:output-charset) "\" ?>\n" "<rdf:RDF xmlns:rdf=\"-rdf-syntax-ns#\" xmlns=\"/\" xmlns:dc=\"/\" xmlns:content=\"/\" >\n" ,(rdf-channel (wiliki:url :full) (rdf-title (ref (wiliki)'title)) (rdf-link full-url) (rdf-description (ref (wiliki)'description)) (rdf-items-seq (map (^e (rdf-li (entry->url e))) entries))) ,(map (^e (let1 url (entry->url e) (rdf-item url (rdf-title (entry->title e)) (rdf-link url) (item-description-proc (entry->key e)) (dc-date (entry->timestamp e)) (or (rss-item-extra-elements) "") ))) entries) "</rdf:RDF>\n"))) (define (raw-content entry partial?) (if-let1 page (wiliki:db-get entry) (rdf-description (trim-content (ref page 'content) partial?)) "")) (define (html-content entry partial?) (if-let1 page (wiliki:db-get entry) ($ rdf-content $ tree->string $ map wiliki:sxml->stree $ wiliki:format-content $ trim-content (~ page'content) partial?) "")) (define (trim-content raw-text partial?) (if partial? (string-join (take* (string-split raw-text "\n") (rss-partial-content-lines)) "\n") raw-text)) (define (entry->url entry) (case (rss-url-format) [(query) (wiliki:url :full "~a" (entry->key entry))] [(path) (build-path (wiliki:url :full) (entry->key entry))] [else (wiliki:url :full "config-error:invalid-rss-url-format")])) (define (entry->title entry) (match entry [((key . title) . _) title] [(key . _) key])) (define (entry->key entry) (match entry [((key . title) . _) key] [(key . _) key])) (define (entry->timestamp entry) (cdr entry)) RDF rendering utilities . NB : these should be implemented within xml framework (define (rdf-channel about . content) `("<channel rdf:about=\"" ,(html-escape-string about) "\">" ,@content "\n</channel>\n")) (define (rdf-li resource) `("<rdf:li rdf:resource=\"" ,(html-escape-string resource) "\" />\n")) (define (rdf-simple tag . content) `("<" ,tag ">" ,@content "</" ,tag ">\n")) (define (rdf-item about . content) `("<item rdf:about=\"" ,(html-escape-string about) "\">" ,@content "</item>\n")) (define (rdf-items-seq . items) `("<items><rdf:Seq>" ,@items "</rdf:Seq></items>\n")) (define (rdf-simple-1 tag content) `("<" ,tag ">" ,(html-escape-string content) "</" ,tag ">\n")) (define (rdf-title title) (rdf-simple-1 "title" title)) (define (rdf-link link) (rdf-simple-1 "link" link)) (define (rdf-description desc) (rdf-simple-1 "description" desc)) (define (rdf-content content) `("<content:encoded><![CDATA[" ,(regexp-replace-all #/\]\]>/ content "]]]]><![CDATA[>") "]]></content:encoded>")) (define (dc-date secs) (rdf-simple-1 "dc:date" (sys-strftime "%Y-%m-%dT%H:%M:%S+00:00" (sys-gmtime secs)))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610286"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">c690364e591a5769521efd47dd5cff6c7cde5e8d612327964e51d04b0da6ea9d</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">tezos-checker/checker</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">testChecker.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Ctok open Kit open Tok open Lqt open Burrow open OUnit2 open TestLib open CheckerTypes open Fa2Interface open Fa2Ledger open Fa2Implementation open Error open Ptr open LiquidationAuctionTypes open LiquidationAuction let property_test_count = 10000 let qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t module PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end) let checker_address = !Ligo.Tezos.self_address let empty_checker = initial_checker { ctok_fa2 = ctok_fa2_addr; ctez_cfmm = ctez_cfmm_addr; oracle = oracle_addr; collateral_fa2 = collateral_fa2_addr; } (* The starting checker state should satisfy the invariants to begin with. *) let _ = Checker.assert_checker_invariants empty_checker Enhance the initial checker state with a populated cfmm in a consistent way . let empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) = let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal "1n")) in let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal "1n")) in let checker = { empty_checker with parameters = { empty_checker.parameters with circulating_kit = checker_kit }; cfmm = cfmm; fa2_state = let fa2_state = initial_fa2_state in let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in fa2_state; } in Checker.assert_checker_invariants checker; checker Produces a checker state with burrows . * Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state * Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state *) let checker_with_liquidatable_burrows () = let checker = empty_checker in (* Create some burrows and mint some kit *) let alice_burrow_1 = Ligo.nat_from_literal "0n" in let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in let bob_burrow_1 = Ligo.nat_from_literal "0n" in Alice burrow 1 . Will NOT be Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in burrow 2 : N. Will be Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal "100n")))) in let checker = List.fold_left ( fun checker alice_burrow_no -> Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in checker ) checker alice_burrow_nos in Bob burrow 1 . Will be . Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "20_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in Increase value of kit to make some of the burrows by touching checker (* Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high * for the burrows to be liquidatable. *) Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_100_000n")) in (* Touch burrows *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in let checker = List.fold_left ( fun checker alice_burrow_no -> Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in checker ) checker alice_burrow_nos in (* Check the expected properties of this test fixture *) assert_bool "alice_burrow_1 was liquidatable but it is expected to not be" (not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows)))); assert_bool "bob_burrow_1 was not liquidatable but it is expected to be" (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows))); List.fold_left ( fun _ alice_burrow_no -> assert_bool ("alice_burrow_" ^ (Ligo.string_of_nat alice_burrow_no) ^ " was not liquidatable but it is expected to be") (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows)))) () alice_burrow_nos; Checker.assert_checker_invariants checker; let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in liquidatable_burrow_ids, underburrowed_burrow_ids, checker (* Produces a checker state with liquidation slices in the queue but no current auction. * Returns a list of details for queued slices related to a Close liquidation, * a list of details for all other slices in the queue, and the contract state. *) let checker_with_queued_liquidation_slices () = let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in Mark the burrows for liquidation . This will add slices to the queue . let checker, close_slice_details, other_slice_details = List.fold_left (fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id -> Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in let slice_ptr = SliceList.slice_list_element_ptr new_slice in let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in let close_liquidation_slices, other_liquidation_slices = if is_burrow_now_closed then (List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices else close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]) in checker, close_liquidation_slices, other_liquidation_slices ) (checker, [], []) liquidatable_burrow_ids in assert_bool "liquidation auction queue was empty, but it was expected to have some slices" (Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices)); assert (List.length close_slice_details > 0); assert (List.length other_slice_details > 0); close_slice_details, other_slice_details, checker (* Produces a checker state with an active liquidation auction *) let checker_with_active_auction () = let _, _, checker = checker_with_queued_liquidation_slices () in Touch checker to start an auction Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in assert_bool "a current liquidation auction should have been started but was not" (Option.is_some checker.liquidation_auctions.current_auction); checker (* Produces a checker state with a completed liquidation auction *) let checker_with_completed_auction () = let checker = checker_with_active_auction () in (* Get the current auction minimum bid *) let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in (* Mint enough kit to bid *) let bidder = alice_addr in let new_burrow_no = Ligo.nat_from_literal "100n" in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in (* Place a bid *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in (* Wait until enough time has passed for the auction to be completable then touch checker *) Touch checker to start an auction Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in assert_bool "there was not a completed liquidation auction but one should exist" (Option.is_some checker.liquidation_auctions.completed_auctions); bidder, checker Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops let newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker = let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, collateral)) in ((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker) let get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat = let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in match ops with | [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit | _ -> failwith ("Unexpected fa2 response, got: " ^ show_operation_list ops) let suite = "Checker tests" >::: [ ("initial touch (noop)" >:: fun _ -> Ligo.Tezos.reset (); let checker1 = empty_checker in let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "0n")) in assert_operation_list_equal ~expected:[] ~real:ops; assert_equal checker1 checker2; (* NOTE: we really want them to be identical here, hence the '='. *) () ); ("create_burrow - updates checker storage" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "1_000_000n")) in assert_bool "No matching burrow found after calling create_burrow" (Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows)); assert_bool "The burrow existed before calling create_burrow" (Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows)) ); ("create_burrow - collateral in burrow representation does not include creation deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in let expected_collateral = tok_zero in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("create_burrow - fails when transaction amount is one mutez below creation deposit" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal "1n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; assert_raises (Failure (Ligo.string_of_int error_InsufficientFunds)) (fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount))) ); ("create_burrow - passes when transaction amount is exactly the creation deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("deposit_collateral - owner can deposit" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in (* Create the burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in (* Make a deposit *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("deposit_collateral - non-owner cannot deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n")))) ); ("withdraw_collateral - owner can withdraw" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)) in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("withdraw_collateral - non-owner cannot withdraw" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" initial_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal))) ); ("entrypoint_activate_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Create a burrow and deactivate it *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in (* Then activate it *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "1_000_000n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_add_liquidity - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in (* Create a burrow and mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_add_liquidity (checker, (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *) ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = alice_addr; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "5_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_burn_kit - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in (* Create a burrow and mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in (* Then burn the kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_create_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let amnt = tok_of_denomination (Ligo.nat_from_literal "100_000_000n") in let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amnt)) in match ops with Note : it 's not really possible to check the first parameter of the contract here which is the * function which defines the contract 's logic . * function which defines the contract's logic. *) | [ (CreateBurrowContract (_, delegate, tez, storage)) ; (Transaction (FA2TransferTransactionValue _, _, _)) as op; ] -> (* burrow creation values *) assert_key_hash_option_equal ~expected:None ~real:delegate; assert_tez_equal ~expected:Common.tez_zero ~real:tez; assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage; (* collateral initialization values *) let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal "0n")) checker.burrows) in assert_operation_equal ~expected:( LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat amnt; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ) ~real:op | _ -> failwith ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) ); ("entrypoint_deactivate_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Create a burrow and deactivate it *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (alice_addr, (Ligo.nat_from_literal "100_000_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_deposit_collateral - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Create the burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in (* Make a deposit *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "3_000_000n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_liquidation_auction_place_bid - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in (* Lookup the current minimum bid *) let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in Mint some kit to be able to bid let new_burrow_no = Ligo.nat_from_literal "100n" in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in (* Place a bid *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_mark_for_liquidation - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Use a checker state already containing some liquidatable burrows *) let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in let burrow_id = List.nth liquidatable_burrow_ids 0 in let sender = bob_addr in (* Mark one of the liquidatable burrows for liquidation *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (sender, (Ligo.nat_from_literal "1_001_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_cancel_liquidation_slice - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Use a checker state already containing some liquidatable burrows *) (* Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow *) let _, slice_details, checker = checker_with_queued_liquidation_slices () in let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in Deposit some extra collateral to one of the burrows with slices in the auction queue Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "4_000_000n"))) in Now cancel one of the burrow 's liquidation slices Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_liquidation_auction_claim_win - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let winning_bidder, checker = checker_with_completed_auction () in let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in (* Touch the remaining slices so the bid can be claimed. *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in (* Claim the winning bid *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = !Ligo.Tezos.self_address; txs = [ { to_ = winning_bidder; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat sold_tok; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_mint_kit - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in (* Create a burrow and mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_set_burrow_delegate - emits expected operations" >:: fun _ -> (* NOTE: In a collateral=FA2 deployment this would actually fail. *) Ligo.Tezos.reset (); (* Create the burrow with no delegate *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in (* Then set the burrow's delegate *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.opt_key_hash_transaction (Some charles_key_hash) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowSetDelegate" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_receive_price - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal "42n", Tok.tok_scaling_factor_nat)) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_remove_liquidity - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in (* Create a burrow and mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in (* Add some liquidity to the contract *) Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_add_liquidity (checker, (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *) ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in (* Now remove the liquidity *) Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_remove_liquidity (checker, (* Note: all values here were arbitrarily chosen based on the amount of kit we minted above *) ( lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "5_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); FIXME : Operations differ between the FA2 deployment and the TEZ deployment ( " entrypoint_touch - emits expected operations when checker needs to be touched " > : : fun _ - > Ligo.Tezos.reset ( ) ; let checker = empty_checker in Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ; let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in let expected_ops = [ ( LigoOp . Tezos.nat_contract_transaction ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_price " ! . ) ) ( Ligo.tez_from_literal " 0mutez " ) ( CheckerTypes.get_oracle_entrypoint checker.external_contracts ) ) ; ( LigoOp . Tezos.nat_nat_contract_transaction ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_ctez_marginal_price " ! . ) ) ( Ligo.tez_from_literal " 0mutez " ) ( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts ) ) ; ] in assert_operation_list_equal ~expected : expected_ops ~real : ops ) ; ("entrypoint_touch - emits expected operations when checker needs to be touched" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch (checker, ()) in let expected_ops = [ (LigoOp.Tezos.nat_contract_transaction (Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_price" !Ligo.Tezos.self_address)) (Ligo.tez_from_literal "0mutez") (CheckerTypes.get_oracle_entrypoint checker.external_contracts) ); (LigoOp.Tezos.nat_nat_contract_transaction (Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_ctez_marginal_price" !Ligo.Tezos.self_address)) (Ligo.tez_from_literal "0mutez") (CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) ("entrypoint_touch - emits expected operations when checker has already been touched" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch (checker, ()) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_touch_liquidation_slices - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let _, checker = checker_with_completed_auction () in let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded * the " contents " record accessor in LiquidationAuctionTypes * the "contents" record accessor in LiquidationAuctionTypes *) let expected_ops = let open LiquidationAuctionPrimitiveTypes in List.rev (List.map ( fun slice -> let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in LigoOp.Tezos.address_nat_transaction (checker_address, tok_to_denomination_nat slice.contents.tok) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ) slices) in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_touch_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Create the burrow *) Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in (* Then touch it *) Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_withdraw_collateral - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in (* Try to withdraw some tez from the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (alice_addr, (Ligo.nat_from_literal "1_000_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("calculate_touch_reward - expected result for last_touched 2s ago" >:: fun _ -> The division in this case should return a remainder < 1/2 Ligo.Tezos.reset (); let time_delta = 2 in remainder : 12000 / 36000 let expected_reward = Ligo.int_from_literal "3333" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("calculate_touch_reward - expected result for last_touched 3s ago" >:: fun _ -> (* The division in this case should produce no remainder *) Ligo.Tezos.reset (); let time_delta = 3 in remainder : 0 let expected_reward = Ligo.int_from_literal "5000" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("calculate_touch_reward - expected result for last_touched 4s ago" >:: fun _ -> The division in this case should return a remainder > 1/2 Ligo.Tezos.reset (); let time_delta = 4 in remainder : 24000 / 36000 let expected_reward = Ligo.int_from_literal "6666" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("burn_kit - owner can burn" >:: fun _ -> Ligo.Tezos.reset (); let sender = alice_addr in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in (* There should be no operations emitted. *) assert_operation_list_equal ~expected:[] ~real:ops; (* The owner should be able to burn it back. *) let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) in () ); ("burn_kit - non-owner cannot burn" >:: fun _ -> Ligo.Tezos.reset (); (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in (* There should be no operations emitted. *) assert_operation_list_equal ~expected:[] ~real:ops; (* Have the wrong person try to burn it back; this should fail. *) assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) ); () ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_respects_min_kit_expected" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let sender = alice_addr in let checker = empty_checker_with_cfmm cfmm in let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *) begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = ctok_to_denomination_nat ctok_amount; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; Ligo.geq_nat_nat senders_new_kit (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected)) ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_preserves_kit" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let checker = empty_checker_with_cfmm cfmm in let sender = alice_addr in let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *) begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = ctok_to_denomination_nat ctok_amount; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; Ligo.eq_nat_nat (Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit) (Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit) ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_preserves_tez" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let checker = empty_checker_with_cfmm cfmm in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_respects_min_tez_expected" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in let bought_muctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in ctok_of_denomination bought_muctok >= min_ctok_expected ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_preserves_kit" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_preserves_tez" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in let bought_muctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok ); ( let cfmm_kit = Ligo.nat_from_literal ("1_000n") in let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal ("1_000n")) in The maximum amount of kit that you can buy with a finite amount of tez is * ( 1 - fee ) * cfmm.kit - 1 * (1 - fee) * cfmm.kit - 1 *) let max_buyable_kit = 997 in let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ "n"))) QCheck.(1 -- max_buyable_kit) in let arb_tez = TestArbitrary.arb_small_positive_tez in qcheck_to_ounit @@ QCheck.Test.make ~name:"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez" ~count:property_test_count (QCheck.pair arb_kit arb_tez) @@ fun (min_expected_kit, additional_tez) -> Ligo.Tezos.reset(); let sender = alice_addr in Populate cfmm with initial liquidity let open Ratio in let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = cfmm_ctok; kit = kit_of_denomination cfmm_kit; } in Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above let ratio_minimum_tez = div_ratio (ratio_of_nat cfmm_kit) ( sub_ratio (div_ratio (ratio_of_nat (Ligo.nat_from_literal "998n")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit))) (ratio_of_nat (Ligo.nat_from_literal "1n")) ) in let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal "1mutez") in (* Adjust transaction by a random amount of extra tez *) let ctok_provided = Ctok.ctok_of_denomination (Common.tez_to_mutez_nat (Ligo.add_tez_tez minimum_tez additional_tez)) in (* UNSAFE CAST *) let senders_old_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* before *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ctok.ctok_to_denomination_nat ctok_provided; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; let senders_new_kit = Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender) in (* after *) Ligo.geq_nat_nat senders_new_kit (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit)) (* FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the * number of QCheck samples may improve this. *) ); FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have * a better way of testing different concrete cfmm implementations we should be able to re - enable this . * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *) (* ("buy_kit - returns expected kit" >:: fun _ -> Ligo.Tezos.reset (); (* Populate the cfmm with some liquidity *) let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); } in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal "1_000_000n"), kit_of_denomination (Ligo.nat_from_literal "1n"), Ligo.timestamp_from_seconds_literal 1)) in let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = alice_addr; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "1_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit; assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have * a better way of testing different concrete cfmm implementations we should be able to re - enable this . * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *) ( " sell_kit - returns expected tez " > : : fun _ - > Ligo.Tezos.reset ( ) ; let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal " 1_000_000n " ) in let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal " 1n " ) in let checker = let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination ( Ligo.nat_from_literal " 2n " ) ; kit = kit_of_denomination ( Ligo.nat_from_literal " 2n " ) ; lqt = lqt_of_denomination ( Ligo.nat_from_literal " 1n " ) ; } in { checker with parameters = { checker.parameters with circulating_kit = kit_add } ; fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ; } in Checker.assert_checker_invariants checker ; Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ; let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in let expected_ops = [ ( LigoOp . Tezos.fa2_transfer_transaction [ Fa2Interface . { from _ = checker_address ; = [ { to _ = alice_addr ; token_id = TokenMetadata.ctok_token_id ; amount = Ligo.nat_from_literal " 1n " ; } ] } ] ( Ligo.tez_from_literal " 0mutez " ) ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % transfer " checker.external_contracts.ctok_fa2 ) ) ) ; ] in assert_operation_list_equal ~expected : expected_ops ~real : ops ) ; fun _ -> Ligo.Tezos.reset (); let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal "1_000_000n") in let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in let checker = let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); lqt = lqt_of_denomination (Ligo.nat_from_literal "1n"); } in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell }; fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "1n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) ("remove_liquidity - returns expected kit and tez" >:: fun _ -> Ligo.Tezos.reset (); let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal "1n") in let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal "1n") in let sender = alice_addr in (* Populate the cfmm with some liquidity (carefully crafted) *) let checker = { empty_checker with parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal "1n")}; cfmm = { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); lqt = lqt_of_denomination (Ligo.nat_from_literal "2n"); }; fa2_state = let fa2_state = initial_fa2_state in let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal "1n")) in fa2_state; } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in let ctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in let kit = get_balance_of checker sender TokenMetadata.kit_token_id in assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit; assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:ctok; () ); (* ************************************************************************* *) (** FA2 *) (* ************************************************************************* *) ("fa2 scenario" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in let initial_addr = Ligo.address_of_string "INIT_ADDR" in (* mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal "0n"), checker) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in (* get some liquidity *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_add_liquidity ( checker, ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5n") , Ligo.timestamp_from_seconds_literal 999 ) ) in initialize alice , and leena accounts Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_ = initial_addr; txs = [ { to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal "5n" }; { to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal "5n" } ]; }]) in let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in (* you can see the initial balances here for reference *) assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "5n"); assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "5n"); assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n"); (* make leena an operator of bob for kit *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_update_operators (checker, [ (Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker)); assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker)); assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker)); (* alice can transfer some kit to bob *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal "2n"}]}]) in assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "3n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "2n"); (* but she can not transfer more than she has *) assert_raises (Failure "FA2_INSUFFICIENT_BALANCE") (fun () -> Checker.strict_entrypoint_transfer (checker, [ { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "10n"}]}])); (* and leena can send some of that kit back to alice *) Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]) in assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "4n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "1n"); but leena can not even send a single kit from 's account when he 's not an operator anymore Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_update_operators (checker, [ (Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure "FA2_NOT_OPERATOR") (fun () -> Checker.strict_entrypoint_transfer (checker, [ { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}])); () ); ("view_total_supply (FA2) - initial kit supply" >:: fun _ -> Ligo.Tezos.reset (); let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_kit_amount; () ); ("view_total_supply (FA2) - initial lqt supply" >:: fun _ -> Ligo.Tezos.reset (); let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_lqt_amount; () ); ("view_total_supply (FA2) - undefined token id" >:: fun _ -> assert_raises (Failure "FA2_TOKEN_UNDEFINED") (fun () -> Checker.view_total_supply (Ligo.nat_from_literal "3n", empty_checker)) ); ("view_all_tokens (FA2)" >:: fun _ -> Ligo.Tezos.reset (); let all_tokens = Checker.view_all_tokens ((), empty_checker) in assert_nat_list_equal ~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ] ~real:all_tokens; () ); (* ************************************************************************* *) (** LiquidationAuctions *) (* ************************************************************************* *) ("entrypoint_liquidation_auction_place_bid: should only allow the current auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) } in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal "0n"), checker) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "10_000_000n")) } in let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal "0n")) in let _, checker = Checker.entrypoint_touch (checker, ()) in let res = Checker.view_current_liquidation_auction_details ((), checker) in let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in assert_raises (Failure (Ligo.string_of_int error_InvalidLiquidationAuction)) (fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid))); ); ("can complete a liquidation auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in (* mint some kit to convert to liquidity *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _lqt_minted_ret_kit_ops, checker = Checker.entrypoint_add_liquidity ( checker , ( ctok_of_denomination (Ligo.nat_from_literal "1_000_000n") , kit_one , lqt_of_denomination (Ligo.nat_from_literal "1n") , Ligo.timestamp_from_seconds_literal 1 ) ) in (* barely on time *) (* Activation/deactivation tests *) let () = (* Creation/deactivation does not incur any costs. *) let tez = tok_of_denomination (Ligo.nat_from_literal "12_345_678n") in (* NOTE: tez is a misnomer; it's tok really *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tez)) in (* created burrow should be deposited (incl. the creation deposit) *) let burrow_addr = burrow_address (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker0.burrows)) in let () = match ops with | [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ; (Transaction (FA2TransferTransactionValue _, _, _)) as op ; ] -> (* burrow creation values *) assert_key_hash_option_equal ~expected:None ~real:cb_delegate; assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez; assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage; (* collateral initialization values *) assert_operation_equal ~expected:( LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = bob_addr; txs = [ { to_ = burrow_addr; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat tez; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ) ~real:op | _ -> assert_failure ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal "0n", alice_addr)) in assert_operation_list_equal ~expected:[ LigoOp.Tezos.address_nat_transaction (alice_addr, tok_to_denomination_nat tez) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr)) ] ~real:ops; (* deactivation/activation = identity (if conditions are met ofc). *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal "0n", tez)) in FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives . assert_equal checker0 checker2; () in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in let burrow_id = (bob_addr, Ligo.nat_from_literal "0n") in let burrow_addr = burrow_address (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker.burrows)) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in assert_nat_equal ~expected:(Ligo.nat_from_literal "4_285_714n") ~real:kit; assert_bool "should not be overburrowed right after minting" (not @@ burrow_is_overburrowed checker.parameters (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows)) ); (* Minting another kit should fail *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_MintKitFailure)) (fun () -> Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n")) ) ); (* Over time the burrows with outstanding kit should be overburrowed * (NOTE: even if the index stays where it was before, but that would * take more time I guess). *) Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_001n")) in let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in assert_operation_list_equal ~expected:[] ~real:ops; assert_bool "if the index goes up, then burrows should become overburrowed" (burrow_is_overburrowed checker.parameters (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows)) ); (* If enough time passes and the index remains up, then the burrow is even liquidatable. *) Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in assert_operation_list_equal ~expected:[] ~real:ops; assert_int_equal ~expected:(Ligo.int_from_literal "202_000_000") (* wow, high reward, many blocks have passed. *) ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in assert_operation_list_equal ~expected:[ LigoOp.Tezos.address_nat_transaction (alice_addr, Ligo.nat_from_literal "1_009_000n") (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr)) ] ~real:ops; let slice = (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices) |> Option.get |> fun i -> i.youngest_slice in (* We shouldn't be able to cancel the liquidation of this slice if the * prices don't change, even if it's not in an auction yet. *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_UnwarrantedCancellation)) (fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice)); (* Trying to cancel a liquidation using an invalid pointer should fail. *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_InvalidLeafPtr)) (fun () -> let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice) ); Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_NoOpenAuction)) (fun () -> Checker.view_current_liquidation_auction_details ((), checker)); let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in assert_bool "should start an auction" (Option.is_some checker.liquidation_auctions.current_auction); assert_int_equal ~expected:(Ligo.int_from_literal "500_000") ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in let auction_id = min_bid.auction_id in assert_kit_equal ~expected:(kit_of_denomination (Ligo.nat_from_literal "2_709_183n")) ~real:min_bid.minimum_bid; (* Bid the minimum first *) let (ops, checker) = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in assert_operation_list_equal ~expected:[] ~real:ops; (* Same person increases the bid *) let (ops, checker) = Checker.entrypoint_liquidation_auction_place_bid ( checker , (auction_id, kit_of_denomination (Ligo.nat_from_literal "4_200_000n")) ) in let auction_id = match checker.liquidation_auctions.current_auction with | None -> assert_failure "entrypoint_liquidation_auction_place_bid should have succeeded" | Some current_auction -> current_auction.contents in assert_operation_list_equal ~expected:[] ~real:ops; assert_int_equal ~expected:(Ligo.int_from_literal "500_000") ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in assert_bool "auction should be completed" (Option.is_none checker.liquidation_auctions.current_auction); assert_int_equal ~expected:(Ligo.int_from_literal "21_000_000") ~real:touch_reward; FIXME : Operations differ between the FA2 deployment and the TEZ deployment ( * Check that all the requests for burrows to send tez come _ before _ the * request to the oracle to update the index . (* Check that all the requests for burrows to send tez come _before_ the * request to the oracle to update the index. *) begin match ops with | [ Transaction (AddressNatTransactionValue _, _, _); (* send tez requests *) Transaction (NatContractTransactionValue _, _, _); (* oracle call *) call ] -> () | _ -> assert_failure ("Unexpected operations/operation order: " ^ show_operation_list ops) end; *) We do n't need to touch the slice on this test case since * Checker.entrypoint_touch_with_index already touches the oldest 5 * slices . * Checker.entrypoint_touch_with_index already touches the oldest 5 * slices. *) assert_raises (Failure (Ligo.string_of_int error_InvalidLeafPtr)) (fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice])); assert_bool "burrow should have no liquidation slices" (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None); let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral_at_auction result); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in assert_operation_list_equal ~expected:[ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "3_156_446n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] ~real:ops; (* This should fail; shouldn't be able to claim the win twice. *) assert_raises (Failure (Ligo.string_of_int error_InvalidAvlPtr)) (fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id)); () ); ("entrypoint_mark_for_liquidation - should not create empty slices" >:: fun _ -> (* Setup. *) Ligo.Tezos.reset (); let sender = alice_addr in let checker = empty_checker in (* Create a burrow with a very little tez in it. *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero; let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker "0n" (tok_of_denomination (Ligo.nat_from_literal "2_001_001n")) in CALCULATIONS ~~~~~~~~~~~~ Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is ( 1tez + 1_001mutez = 1_001_001 ) . This means that - The slice we WOULD send to auctions is empty . - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions . ~~~~~~~~~~~~ Tez in the burrow is (1_001_001mutez + 1tez) so the reward is (1tez + 1_001mutez = 1_001_001). This means that - The slice we WOULD send to auctions is empty. - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions. *) Mint as much kit as possible . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal "476_667n"))) in (* Let some time pass. Over time the burrows with outstanding kit should * become overburrowed, and eventually liquidatable. Note that this * could be because of the index, but also it can happen because of the * fees alone if the index remains the same. *) NOTE : I am a little surprised / worried about this being again 211 ... Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_105_283n")) in (* sup *) let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in Ensure that the burrow is . begin match Ligo.Big_map.find_opt burrow_id checker.burrows with | None -> assert_failure "bug" | Some burrow -> assert_bool "burrow needs to be liquidatable for the test to be potent." (Burrow.burrow_is_liquidatable checker.parameters burrow); end; Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *) Let 's mark the burrow for liquidation now ( second pass : deactivates it ) . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in Checker.assert_checker_invariants checker; (* Ensures no empty slices in the queue. *) () ); ("deposit_collateral - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to deposit some tez to the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", amount)) in () ); ("entrypoint_withdraw_collateral - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_add Constants.creation_deposit Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to withdraw some tez from the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", Constants.creation_deposit)) in () ); ("entrypoint_mint_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); (* Create a burrow *) let amount = tok_add Constants.creation_deposit Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to mint some kit out of the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in () ); ("entrypoint_burn_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_add Constants.creation_deposit Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Mint some kit out of the burrow Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to burn some kit into the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in () ); ("entrypoint_activate_burrow - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in (* Deactivate the burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to activate the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal "0n", amount)) in () ); ("entrypoint_deactivate_burrow - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to deactivate the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in () ); ("entrypoint_mark_for_liquidation - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to mark the untouched burrow for liquidation *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); (* TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation * to really succeed instead of failing for another reason. *) assert_raises (Failure (Ligo.string_of_int error_NotLiquidationCandidate)) (fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id)); ); (* TODO: Add test "entrypoint_cancel_liquidation_slice - fails on untouched burrows" *) ("entrypoint_set_burrow_delegate - does not fail on untouched burrows" >:: fun _ -> (* NOTE: In a collateral=FA2 deployment this would actually fail. *) Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to set the delegate of the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal "0n", None)) in () ); ("cfmm views" >::: let with_cfmm_setup f = fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in let burrow_id = Ligo.nat_from_literal "42n" in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in (* Mint some kit *) Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in (* Add some liquidity *) Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal "400_000n") in let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal "400_000n") in let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = f checker in () in [ "view_buy_kit_min_kit_expected" >:: with_cfmm_setup (fun checker -> let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in (* must succeed, otherwise view_buy_kit_min_kit_expected overapproximated *) Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline))); "view_buy_kit_min_kit_expected - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_BuyKitNoCtokGiven)) (fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker)) ); "view_sell_kit_min_ctok_expected" >:: with_cfmm_setup (fun checker -> let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal "100_000n") in let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in (* must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated *) Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline))); "view_sell_kit_min_ctok_expected - fail if no kit is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_SellKitNoKitGiven)) (fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker)) ); "view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted" >:: with_cfmm_setup (fun checker -> let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in (* must succeed, otherwise * view_add_liquidity_max_kit_deposited underapproximated or * view_add_liquidity_min_lqt_minted overapproximated (or both of them did) *) Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline))); "view_add_liquidity_max_kit_deposited - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven)) (fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker)) ); "view_add_liquidity_min_lqt_minted - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven)) (fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker)) ); "view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn" >:: with_cfmm_setup (fun checker -> let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in (* must succeed, otherwise * view_remove_liquidity_min_ctok_withdrawn overapproximated or * view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did) *) Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline))); "view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker)) ); "view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = checker.cfmm.lqt in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker)) ); "view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = checker.cfmm.lqt in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker)) ); ] ); ("view_burrow_max_mintable_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to view the max mintable kit from the untouched burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in () ); ("view_is_burrow_overburrowed - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in (* Try to view whether the untouched burrow is overburrowed *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in () ); ("view_is_burrow_liquidatable - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in (* Create a burrow *) Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Try to view whether the untouched burrow is Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in () ); ("view_current_liquidation_auction_details - raises error when there is no current auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in assert_raises (Failure (Ligo.string_of_int error_NoOpenAuction)) (fun _ -> Checker.view_current_liquidation_auction_details ((), checker)) ); ("view_current_liquidation_auction_details - expected value for descending auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in let auction = Option.get checker.liquidation_auctions.current_auction in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in let expected_auction_details = { auction_id = auction.contents; collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n"); minimum_bid = liquidation_auction_current_auction_minimum_bid auction; current_bid = None; remaining_blocks = None; remaining_seconds = None; } in assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details ); ("view_current_liquidation_auction_details - expected value for ascending auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in let auction = Option.get checker.liquidation_auctions.current_auction in Place a bid to turn the descending auction into an ascending one let bidder = bob_addr in let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "1n", None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "1n", bid_amnt)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let auction = Option.get checker.liquidation_auctions.current_auction in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in let expected_auction_details = { auction_id = auction.contents; collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n"); minimum_bid = liquidation_auction_current_auction_minimum_bid auction; current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;}); remaining_blocks = Some (Ligo.int_from_literal "-2"); remaining_seconds = Some (Ligo.int_from_literal "700"); } in assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details ); ] let () = run_test_tt_main suite </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/tezos-checker/checker/e4bd0f16aa14e10e8a62b28e85f8c98c388a0a6a/tests/testChecker.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> The starting checker state should satisfy the invariants to begin with. Create some burrows and mint some kit Note: setting the transaction to far in the future to ensure that the protected_index will become adequately high * for the burrows to be liquidatable. Touch burrows Check the expected properties of this test fixture Produces a checker state with liquidation slices in the queue but no current auction. * Returns a list of details for queued slices related to a Close liquidation, * a list of details for all other slices in the queue, and the contract state. Produces a checker state with an active liquidation auction Produces a checker state with a completed liquidation auction Get the current auction minimum bid Mint enough kit to bid Place a bid Wait until enough time has passed for the auction to be completable then touch checker NOTE: we really want them to be identical here, hence the '='. Create the burrow Make a deposit Create a burrow and deactivate it Then activate it Create a burrow and mint some kit Note: all values here were arbitrarily chosen based on the amount of kit we minted above Create a burrow and mint some kit Then burn the kit burrow creation values collateral initialization values Create a burrow and deactivate it Create the burrow Make a deposit Lookup the current minimum bid Place a bid Use a checker state already containing some liquidatable burrows Mark one of the liquidatable burrows for liquidation Use a checker state already containing some liquidatable burrows Note: using a non-closed burrow for this test so we don't have to also re-activate the burrow Touch the remaining slices so the bid can be claimed. Claim the winning bid Create a burrow and mint some kit NOTE: In a collateral=FA2 deployment this would actually fail. Create the burrow with no delegate Then set the burrow's delegate Create a burrow and mint some kit Add some liquidity to the contract Note: all values here were arbitrarily chosen based on the amount of kit we minted above Now remove the liquidity Note: all values here were arbitrarily chosen based on the amount of kit we minted above Create the burrow Then touch it Create a burrow Try to withdraw some tez from the untouched burrow The division in this case should produce no remainder Create a burrow There should be no operations emitted. The owner should be able to burn it back. Create a burrow There should be no operations emitted. Have the wrong person try to burn it back; this should fail. before after before after Adjust transaction by a random amount of extra tez UNSAFE CAST before after FIXME: This test only rarely evaluates the 'eq' part of 'geq'. Reducing the range of possible `additional_tez` or increasing the * number of QCheck samples may improve this. ("buy_kit - returns expected kit" >:: fun _ -> Ligo.Tezos.reset (); (* Populate the cfmm with some liquidity Populate the cfmm with some liquidity (carefully crafted) ************************************************************************* * FA2 ************************************************************************* mint some kit get some liquidity you can see the initial balances here for reference make leena an operator of bob for kit alice can transfer some kit to bob but she can not transfer more than she has and leena can send some of that kit back to alice ************************************************************************* * LiquidationAuctions ************************************************************************* mint some kit to convert to liquidity barely on time Activation/deactivation tests Creation/deactivation does not incur any costs. NOTE: tez is a misnomer; it's tok really created burrow should be deposited (incl. the creation deposit) burrow creation values collateral initialization values deactivation/activation = identity (if conditions are met ofc). Minting another kit should fail Over time the burrows with outstanding kit should be overburrowed * (NOTE: even if the index stays where it was before, but that would * take more time I guess). If enough time passes and the index remains up, then the burrow is even liquidatable. wow, high reward, many blocks have passed. We shouldn't be able to cancel the liquidation of this slice if the * prices don't change, even if it's not in an auction yet. Trying to cancel a liquidation using an invalid pointer should fail. Bid the minimum first Same person increases the bid Check that all the requests for burrows to send tez come _before_ the * request to the oracle to update the index. send tez requests oracle call This should fail; shouldn't be able to claim the win twice. Setup. Create a burrow with a very little tez in it. Let some time pass. Over time the burrows with outstanding kit should * become overburrowed, and eventually liquidatable. Note that this * could be because of the index, but also it can happen because of the * fees alone if the index remains the same. sup Ensures no empty slices in the queue. Ensures no empty slices in the queue. Create a burrow Try to deposit some tez to the untouched burrow Create a burrow Try to withdraw some tez from the untouched burrow Create a burrow Try to mint some kit out of the untouched burrow Create a burrow Try to burn some kit into the untouched burrow Create a burrow Deactivate the burrow Try to activate the untouched burrow Create a burrow Try to deactivate the untouched burrow Create a burrow Try to mark the untouched burrow for liquidation TODO: Would be nice to create the conditions for entrypoint_mark_for_liquidation * to really succeed instead of failing for another reason. TODO: Add test "entrypoint_cancel_liquidation_slice - fails on untouched burrows" NOTE: In a collateral=FA2 deployment this would actually fail. Create a burrow Try to set the delegate of the untouched burrow Create a burrow Mint some kit Add some liquidity must succeed, otherwise view_buy_kit_min_kit_expected overapproximated must succeed, otherwise view_sell_kit_min_ctok_expected overapproximated must succeed, otherwise * view_add_liquidity_max_kit_deposited underapproximated or * view_add_liquidity_min_lqt_minted overapproximated (or both of them did) must succeed, otherwise * view_remove_liquidity_min_ctok_withdrawn overapproximated or * view_remove_liquidity_min_kit_withdrawn overapproximated (or both of them did) Create a burrow Try to view the max mintable kit from the untouched burrow Create a burrow Try to view whether the untouched burrow is overburrowed Create a burrow </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">open Ctok open Kit open Tok open Lqt open Burrow open OUnit2 open TestLib open CheckerTypes open Fa2Interface open Fa2Ledger open Fa2Implementation open Error open Ptr open LiquidationAuctionTypes open LiquidationAuction let property_test_count = 10000 let qcheck_to_ounit t = OUnit.ounit2_of_ounit1 @@ QCheck_ounit.to_ounit_test t module PtrMap = Map.Make(struct type t = ptr let compare = compare_ptr end) let checker_address = !Ligo.Tezos.self_address let empty_checker = initial_checker { ctok_fa2 = ctok_fa2_addr; ctez_cfmm = ctez_cfmm_addr; oracle = oracle_addr; collateral_fa2 = collateral_fa2_addr; } let _ = Checker.assert_checker_invariants empty_checker Enhance the initial checker state with a populated cfmm in a consistent way . let empty_checker_with_cfmm (cfmm: CfmmTypes.cfmm) = let checker_kit = kit_sub cfmm.kit (kit_of_denomination (Ligo.nat_from_literal "1n")) in let checker_liquidity = lqt_sub cfmm.lqt (lqt_of_denomination (Ligo.nat_from_literal "1n")) in let checker = { empty_checker with parameters = { empty_checker.parameters with circulating_kit = checker_kit }; cfmm = cfmm; fa2_state = let fa2_state = initial_fa2_state in let fa2_state = ledger_issue_lqt (fa2_state, !Ligo.Tezos.self_address, checker_liquidity) in let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, checker_kit) in fa2_state; } in Checker.assert_checker_invariants checker; checker Produces a checker state with burrows . * Returns a list of the liquidatable burrow ids , underburrowed burrow ids , and the contract state * Returns a list of the liquidatable burrow ids, underburrowed burrow ids, and the contract state *) let checker_with_liquidatable_burrows () = let checker = empty_checker in let alice_burrow_1 = Ligo.nat_from_literal "0n" in let alice_burrow_nos = List.init 20 (fun i -> Ligo.nat_from_int64 (Int64.of_int (i+1))) in let bob_burrow_1 = Ligo.nat_from_literal "0n" in Alice burrow 1 . Will NOT be Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:2 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in burrow 2 : N. Will be Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:3 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_mint_kit (checker, (alice_burrow_1, (kit_of_denomination (Ligo.nat_from_literal "100n")))) in let checker = List.fold_left ( fun checker alice_burrow_no -> Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_create_burrow (checker, (alice_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "2_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = let max_kit = (Checker.view_burrow_max_mintable_kit ((alice_addr, alice_burrow_no), checker)) in Checker.entrypoint_mint_kit (checker, (alice_burrow_no, max_kit)) in checker ) checker alice_burrow_nos in Bob burrow 1 . Will be . Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (bob_burrow_1, None, tok_of_denomination (Ligo.nat_from_literal "20_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = let max_kit = (Checker.view_burrow_max_mintable_kit ((bob_addr, bob_burrow_1), checker)) in Checker.entrypoint_mint_kit (checker, (bob_burrow_1, max_kit)) in Increase value of kit to make some of the burrows by touching checker Ligo.Tezos.new_transaction ~seconds_passed:10_000_000 ~blocks_passed:100_000 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_100_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_1)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (bob_addr, bob_burrow_1)) in let checker = List.fold_left ( fun checker alice_burrow_no -> Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, alice_burrow_no)) in checker ) checker alice_burrow_nos in assert_bool "alice_burrow_1 was liquidatable but it is expected to not be" (not (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_1) checker.burrows)))); assert_bool "bob_burrow_1 was not liquidatable but it is expected to be" (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (bob_addr, bob_burrow_1) checker.burrows))); List.fold_left ( fun _ alice_burrow_no -> assert_bool ("alice_burrow_" ^ (Ligo.string_of_nat alice_burrow_no) ^ " was not liquidatable but it is expected to be") (Burrow.burrow_is_liquidatable checker.parameters (Option.get (Ligo.Big_map.find_opt (alice_addr, alice_burrow_no) checker.burrows)))) () alice_burrow_nos; Checker.assert_checker_invariants checker; let liquidatable_burrow_ids = List.append (List.map (fun x -> (alice_addr, x)) alice_burrow_nos) [(bob_addr, bob_burrow_1)] in let underburrowed_burrow_ids = [(alice_addr, alice_burrow_1)] in liquidatable_burrow_ids, underburrowed_burrow_ids, checker let checker_with_queued_liquidation_slices () = let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in Mark the burrows for liquidation . This will add slices to the queue . let checker, close_slice_details, other_slice_details = List.fold_left (fun (checker, close_liquidation_slices, other_liquidation_slices) burrow_id -> Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in let new_slice = Option.get (SliceList.slice_list_youngest (SliceList.slice_list_from_auction_state checker.liquidation_auctions burrow_id) checker.liquidation_auctions) in let slice_ptr = SliceList.slice_list_element_ptr new_slice in let slize_tez = (SliceList.slice_list_element_contents new_slice).tok in let is_burrow_now_closed = not (burrow_active (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows))) in let close_liquidation_slices, other_liquidation_slices = if is_burrow_now_closed then (List.append close_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]), other_liquidation_slices else close_liquidation_slices, (List.append other_liquidation_slices [(burrow_id, slice_ptr, slize_tez)]) in checker, close_liquidation_slices, other_liquidation_slices ) (checker, [], []) liquidatable_burrow_ids in assert_bool "liquidation auction queue was empty, but it was expected to have some slices" (Option.is_some (Avl.avl_peek_front checker.liquidation_auctions.avl_storage checker.liquidation_auctions.queued_slices)); assert (List.length close_slice_details > 0); assert (List.length other_slice_details > 0); close_slice_details, other_slice_details, checker let checker_with_active_auction () = let _, _, checker = checker_with_queued_liquidation_slices () in Touch checker to start an auction Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in assert_bool "a current liquidation auction should have been started but was not" (Option.is_some checker.liquidation_auctions.current_auction); checker let checker_with_completed_auction () = let checker = checker_with_active_auction () in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in let bidder = alice_addr in let new_burrow_no = Ligo.nat_from_literal "100n" in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in Touch checker to start an auction Ligo.Tezos.new_transaction ~seconds_passed:1202 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in assert_bool "there was not a completed liquidation auction but one should exist" (Option.is_some checker.liquidation_auctions.completed_auctions); bidder, checker Helper for creating new burrows and extracting their ID from the corresponding Ligo Ops let newly_created_burrow (checker: checker) (burrow_no: string) (collateral: tok) : burrow_id * checker = let _ops, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, collateral)) in ((!Ligo.Tezos.sender, Ligo.nat_from_literal burrow_no), checker) let get_balance_of (checker: checker) (addr: Ligo.address) (tok: fa2_token_id): Ligo.nat = let ops, _checker = Checker.strict_entrypoint_balance_of (checker, { requests = [{ owner=addr; token_id=tok }]; callback=Ligo.contract_of_address addr}) in match ops with | [ Transaction (FA2BalanceOfResponseTransactionValue [ { request = _; balance = kit } ], _, _) ] -> kit | _ -> failwith ("Unexpected fa2 response, got: " ^ show_operation_list ops) let suite = "Checker tests" >::: [ ("initial touch (noop)" >:: fun _ -> Ligo.Tezos.reset (); let checker1 = empty_checker in let ops, checker2 = Checker.touch_with_index checker1 (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "0n")) in assert_operation_list_equal ~expected:[] ~real:ops; () ); ("create_burrow - updates checker storage" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "1_000_000n")) in assert_bool "No matching burrow found after calling create_burrow" (Option.is_some (Ligo.Big_map.find_opt burrow_id checker.burrows)); assert_bool "The burrow existed before calling create_burrow" (Option.is_none (Ligo.Big_map.find_opt burrow_id empty_checker.burrows)) ); ("create_burrow - collateral in burrow representation does not include creation deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in let expected_collateral = tok_zero in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("create_burrow - fails when transaction amount is one mutez below creation deposit" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_sub Constants.creation_deposit (tok_of_denomination (Ligo.nat_from_literal "1n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; assert_raises (Failure (Ligo.string_of_int error_InsufficientFunds)) (fun () -> Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount))) ); ("create_burrow - passes when transaction amount is exactly the creation deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" Constants.creation_deposit in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("deposit_collateral - owner can deposit" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let expected_collateral = tok_add deposit (tok_sub initial_deposit Constants.creation_deposit) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no) as burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, deposit)) in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("deposit_collateral - non-owner cannot deposit" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n")))) ); ("withdraw_collateral - owner can withdraw" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in let expected_collateral = tok_sub initial_deposit (tok_add Constants.creation_deposit withdrawal) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let burrow_id, checker = newly_created_burrow empty_checker "0n" initial_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal)) in match Ligo.Big_map.find_opt burrow_id checker.burrows with | Some burrow -> assert_tok_equal ~expected:expected_collateral ~real:(burrow_collateral burrow) | None -> assert_failure "Expected a burrow representation to exist but none was found" ); ("withdraw_collateral - non-owner cannot withdraw" >:: fun _ -> Ligo.Tezos.reset (); let initial_deposit = tok_of_denomination (Ligo.nat_from_literal "3_000_000n") in let withdrawal = tok_of_denomination (Ligo.nat_from_literal "1_000_000n") in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" initial_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", withdrawal))) ); ("entrypoint_activate_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let ops, _ = Checker.entrypoint_activate_burrow (checker, (burrow_no, Constants.creation_deposit)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "1_000_000n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_add_liquidity - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_add_liquidity (checker, ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = alice_addr; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "5_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_burn_kit - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_create_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let amnt = tok_of_denomination (Ligo.nat_from_literal "100_000_000n") in let ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amnt)) in match ops with Note : it 's not really possible to check the first parameter of the contract here which is the * function which defines the contract 's logic . * function which defines the contract's logic. *) | [ (CreateBurrowContract (_, delegate, tez, storage)) ; (Transaction (FA2TransferTransactionValue _, _, _)) as op; ] -> assert_key_hash_option_equal ~expected:None ~real:delegate; assert_tez_equal ~expected:Common.tez_zero ~real:tez; assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) storage; let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, (Ligo.nat_from_literal "0n")) checker.burrows) in assert_operation_equal ~expected:( LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat amnt; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ) ~real:op | _ -> failwith ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) ); ("entrypoint_deactivate_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "100_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_deactivate_burrow (checker, (burrow_no, alice_addr)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (alice_addr, (Ligo.nat_from_literal "100_000_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_deposit_collateral - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let ops, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "3_000_000n"))) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = alice_addr; txs = [ { to_ = burrow_address burrow; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "3_000_000n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_liquidation_auction_place_bid - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in Mint some kit to be able to bid let new_burrow_no = Ligo.nat_from_literal "100n" in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (new_burrow_no, None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (new_burrow_no, auction_details.minimum_bid)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _checker = Checker.entrypoint_liquidation_auction_place_bid (checker, ((Option.get checker.liquidation_auctions.current_auction).contents, auction_details.minimum_bid)) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_mark_for_liquidation - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let liquidatable_burrow_ids, _, checker = checker_with_liquidatable_burrows () in let burrow_id = List.nth liquidatable_burrow_ids 0 in let sender = bob_addr in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in let burrow = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (sender, (Ligo.nat_from_literal "1_001_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_cancel_liquidation_slice - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let _, slice_details, checker = checker_with_queued_liquidation_slices () in let ((burrow_owner, burrow_no), slice_ptr, _) = List.nth slice_details 0 in Deposit some extra collateral to one of the burrows with slices in the auction queue Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_deposit_collateral (checker, (burrow_no, tok_of_denomination (Ligo.nat_from_literal "4_000_000n"))) in Now cancel one of the burrow 's liquidation slices Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:burrow_owner ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_cancel_liquidation_slice (checker, slice_ptr) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_liquidation_auction_claim_win - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let winning_bidder, checker = checker_with_completed_auction () in let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in let sold_tok = (Option.get (Avl.avl_root_data checker.liquidation_auctions.avl_storage auction_ptr)).sold_tok in let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:winning_bidder ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_ptr) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = !Ligo.Tezos.self_address; txs = [ { to_ = winning_bidder; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat sold_tok; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_mint_kit - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_set_burrow_delegate - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_set_burrow_delegate (checker, (burrow_no, Some charles_key_hash)) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.opt_key_hash_transaction (Some charles_key_hash) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowSetDelegate" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_receive_price - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:(checker.external_contracts.oracle) ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_receive_price (checker, (Ligo.nat_from_literal "42n", Tok.tok_scaling_factor_nat)) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_remove_liquidity - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", (kit_of_denomination (Ligo.nat_from_literal "10_000_000n")))) in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_add_liquidity (checker, ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_remove_liquidity (checker, ( lqt_of_denomination (Ligo.nat_from_literal "5_000_000n") , ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , Ligo.timestamp_from_seconds_literal 999 ) ) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "5_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); FIXME : Operations differ between the FA2 deployment and the TEZ deployment ( " entrypoint_touch - emits expected operations when checker needs to be touched " > : : fun _ - > Ligo.Tezos.reset ( ) ; let checker = empty_checker in Ligo . Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ; let ops , _ = Checker.entrypoint_touch ( checker , ( ) ) in let expected_ops = [ ( LigoOp . Tezos.nat_contract_transaction ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_price " ! . ) ) ( Ligo.tez_from_literal " 0mutez " ) ( CheckerTypes.get_oracle_entrypoint checker.external_contracts ) ) ; ( LigoOp . Tezos.nat_nat_contract_transaction ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % receive_ctez_marginal_price " ! . ) ) ( Ligo.tez_from_literal " 0mutez " ) ( CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts ) ) ; ] in assert_operation_list_equal ~expected : expected_ops ~real : ops ) ; ("entrypoint_touch - emits expected operations when checker needs to be touched" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch (checker, ()) in let expected_ops = [ (LigoOp.Tezos.nat_contract_transaction (Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_price" !Ligo.Tezos.self_address)) (Ligo.tez_from_literal "0mutez") (CheckerTypes.get_oracle_entrypoint checker.external_contracts) ); (LigoOp.Tezos.nat_nat_contract_transaction (Option.get (LigoOp.Tezos.get_entrypoint_opt "%receive_ctez_marginal_price" !Ligo.Tezos.self_address)) (Ligo.tez_from_literal "0mutez") (CheckerTypes.get_ctez_cfmm_price_entrypoint checker.external_contracts) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) ("entrypoint_touch - emits expected operations when checker has already been touched" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch (checker, ()) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_touch_liquidation_slices - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); let _, checker = checker_with_completed_auction () in let auction_ptr = (Option.get checker.liquidation_auctions.completed_auctions).oldest in let slice_ptrs = avl_leaves_to_list checker.liquidation_auctions.avl_storage auction_ptr in let slices = List.map (fun ptr -> Avl.avl_read_leaf checker.liquidation_auctions.avl_storage ptr) slice_ptrs in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch_liquidation_slices (checker, slice_ptrs) in Note : opening LiquidationAuctionPrimitiveTypes locally here since we have overloaded * the " contents " record accessor in LiquidationAuctionTypes * the "contents" record accessor in LiquidationAuctionTypes *) let expected_ops = let open LiquidationAuctionPrimitiveTypes in List.rev (List.map ( fun slice -> let burrow = Option.get (Ligo.Big_map.find_opt slice.contents.burrow checker.burrows) in LigoOp.Tezos.address_nat_transaction (checker_address, tok_to_denomination_nat slice.contents.tok) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ) slices) in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("entrypoint_touch_burrow - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in assert_operation_list_equal ~expected:[] ~real:ops ); ("entrypoint_withdraw_collateral - emits expected operations" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let (_, burrow_no), checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "3_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", tok_of_denomination (Ligo.nat_from_literal "1_000_000n"))) in let burrow = Option.get (Ligo.Big_map.find_opt (alice_addr, burrow_no) checker.burrows) in let expected_ops = [ (LigoOp.Tezos.address_nat_transaction (alice_addr, (Ligo.nat_from_literal "1_000_000n")) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" (burrow_address burrow))) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); ("calculate_touch_reward - expected result for last_touched 2s ago" >:: fun _ -> The division in this case should return a remainder < 1/2 Ligo.Tezos.reset (); let time_delta = 2 in remainder : 12000 / 36000 let expected_reward = Ligo.int_from_literal "3333" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("calculate_touch_reward - expected result for last_touched 3s ago" >:: fun _ -> Ligo.Tezos.reset (); let time_delta = 3 in remainder : 0 let expected_reward = Ligo.int_from_literal "5000" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("calculate_touch_reward - expected result for last_touched 4s ago" >:: fun _ -> The division in this case should return a remainder > 1/2 Ligo.Tezos.reset (); let time_delta = 4 in remainder : 24000 / 36000 let expected_reward = Ligo.int_from_literal "6666" in let last_touched = Ligo.timestamp_from_seconds_literal 0 in Ligo.Tezos.new_transaction ~seconds_passed:time_delta ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let actual_reward = kit_to_denomination_int (Checker.calculate_touch_reward last_touched) in assert_int_equal ~expected:expected_reward ~real:actual_reward; ); ("burn_kit - owner can burn" >:: fun _ -> Ligo.Tezos.reset (); let sender = alice_addr in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in assert_operation_list_equal ~expected:[] ~real:ops; let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, sender)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) in () ); ("burn_kit - non-owner cannot burn" >:: fun _ -> Ligo.Tezos.reset (); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = newly_created_burrow empty_checker "0n" (tok_of_denomination (Ligo.nat_from_literal "10_000_000n")) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in assert_operation_list_equal ~expected:[] ~real:ops; assert_raises (Failure (Ligo.string_of_int error_NonExistentBurrow)) (fun () -> let kit_token = kit_of_denomination (Fa2Ledger.get_fa2_ledger_value checker.fa2_state.ledger (TokenMetadata.kit_token_id, bob_addr)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_token)) ); () ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_respects_min_kit_expected" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let sender = alice_addr in let checker = empty_checker_with_cfmm cfmm in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = ctok_to_denomination_nat ctok_amount; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; Ligo.geq_nat_nat senders_new_kit (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_kit_expected)) ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_preserves_kit" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let checker = empty_checker_with_cfmm cfmm in let sender = alice_addr in let checker_cfmm_old_kit = kit_to_denomination_nat checker.cfmm.kit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in let checker_cfmm_new_kit = kit_to_denomination_nat checker.cfmm.kit in begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = ctok_to_denomination_nat ctok_amount; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; Ligo.eq_nat_nat (Ligo.add_nat_nat checker_cfmm_old_kit senders_old_kit) (Ligo.add_nat_nat checker_cfmm_new_kit senders_new_kit) ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_buy_kit_preserves_tez" ~count:property_test_count make_inputs_for_buy_kit_to_succeed @@ fun (cfmm, ctok_amount, min_kit_expected, deadline) -> let checker = empty_checker_with_cfmm cfmm in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, new_checker = Checker.entrypoint_buy_kit (checker, (ctok_amount, min_kit_expected, deadline)) in ctok_add checker.cfmm.ctok ctok_amount = new_checker.cfmm.ctok ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_respects_min_tez_expected" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in let bought_muctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in ctok_of_denomination bought_muctok >= min_ctok_expected ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_preserves_kit" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let _, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in kit_add checker.cfmm.kit kit_amount = new_checker.cfmm.kit ); ( Ligo.Tezos.reset(); qcheck_to_ounit @@ QCheck.Test.make ~name:"test_sell_kit_preserves_tez" ~count:property_test_count make_inputs_for_sell_kit_to_succeed @@ fun (cfmm, kit_amount, min_ctok_expected, deadline) -> let sender = alice_addr in let checker = let checker = empty_checker_with_cfmm cfmm in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_amount }; fa2_state = ledger_issue_kit (checker.fa2_state, sender, kit_amount); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, new_checker = Checker.entrypoint_sell_kit (checker, (kit_amount, min_ctok_expected, deadline)) in let bought_muctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in ctok_add new_checker.cfmm.ctok (ctok_of_denomination bought_muctok) = checker.cfmm.ctok ); ( let cfmm_kit = Ligo.nat_from_literal ("1_000n") in let cfmm_ctok = ctok_of_denomination (Ligo.nat_from_literal ("1_000n")) in The maximum amount of kit that you can buy with a finite amount of tez is * ( 1 - fee ) * cfmm.kit - 1 * (1 - fee) * cfmm.kit - 1 *) let max_buyable_kit = 997 in let arb_kit = QCheck.map (fun x -> kit_of_denomination (Ligo.nat_from_literal (string_of_int x ^ "n"))) QCheck.(1 -- max_buyable_kit) in let arb_tez = TestArbitrary.arb_small_positive_tez in qcheck_to_ounit @@ QCheck.Test.make ~name:"buy_kit - returns geq min_kit_expected kit for transactions with sufficient tez" ~count:property_test_count (QCheck.pair arb_kit arb_tez) @@ fun (min_expected_kit, additional_tez) -> Ligo.Tezos.reset(); let sender = alice_addr in Populate cfmm with initial liquidity let open Ratio in let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = cfmm_ctok; kit = kit_of_denomination cfmm_kit; } in Calculate minimum tez to get the min_expected kit given the state of the cfmm defined above let ratio_minimum_tez = div_ratio (ratio_of_nat cfmm_kit) ( sub_ratio (div_ratio (ratio_of_nat (Ligo.nat_from_literal "998n")) (ratio_of_nat (kit_to_denomination_nat min_expected_kit))) (ratio_of_nat (Ligo.nat_from_literal "1n")) ) in let minimum_tez = Ligo.mul_nat_tez (Ligo.abs (Common.cdiv_int_int ratio_minimum_tez.num ratio_minimum_tez.den)) (Ligo.tez_from_literal "1mutez") in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_provided, min_expected_kit, Ligo.timestamp_from_seconds_literal 1)) in begin match ops with | [Transaction (FA2TransferTransactionValue transfer, _, _)] -> assert_fa2_transfer_list_equal ~expected:[ Fa2Interface.{ from_ = sender; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ctok.ctok_to_denomination_nat ctok_provided; } ] } ] ~real:transfer | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) end; Ligo.geq_nat_nat senders_new_kit (Ligo.add_nat_nat senders_old_kit (kit_to_denomination_nat min_expected_kit)) ); FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have * a better way of testing different concrete cfmm implementations we should be able to re - enable this . * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *) let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); } in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_buy_kit (checker, (ctok_of_denomination (Ligo.nat_from_literal "1_000_000n"), kit_of_denomination (Ligo.nat_from_literal "1n"), Ligo.timestamp_from_seconds_literal 1)) in let kit = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = alice_addr; txs = [ { to_ = checker_address; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "1_000_000n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit; assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) FIXME : DISABLING THIS UNIT TEST . Disabled this unit test which was written for the case of indexCfmm.ml . Once we have * a better way of testing different concrete cfmm implementations we should be able to re - enable this . * a better way of testing different concrete cfmm implementations we should be able to re-enable this. *) ( " sell_kit - returns expected tez " > : : fun _ - > Ligo.Tezos.reset ( ) ; let kit_to_sell = kit_of_denomination ( Ligo.nat_from_literal " 1_000_000n " ) in let min_ctok_expected = ctok_of_denomination ( Ligo.nat_from_literal " 1n " ) in let checker = let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination ( Ligo.nat_from_literal " 2n " ) ; kit = kit_of_denomination ( Ligo.nat_from_literal " 2n " ) ; lqt = lqt_of_denomination ( Ligo.nat_from_literal " 1n " ) ; } in { checker with parameters = { checker.parameters with circulating_kit = kit_add } ; fa2_state = ledger_issue_kit ( checker.fa2_state , alice_addr , kit_to_sell ) ; } in Checker.assert_checker_invariants checker ; Ligo . Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender : alice_addr ~amount:(Ligo.tez_from_literal " 0mutez " ) ; let ops , _ = Checker.entrypoint_sell_kit ( checker , ( kit_to_sell , min_ctok_expected , Ligo.timestamp_from_seconds_literal 1 ) ) in let expected_ops = [ ( LigoOp . Tezos.fa2_transfer_transaction [ Fa2Interface . { from _ = checker_address ; = [ { to _ = alice_addr ; token_id = TokenMetadata.ctok_token_id ; amount = Ligo.nat_from_literal " 1n " ; } ] } ] ( Ligo.tez_from_literal " 0mutez " ) ( Option.get ( LigoOp . Tezos.get_entrypoint_opt " % transfer " checker.external_contracts.ctok_fa2 ) ) ) ; ] in assert_operation_list_equal ~expected : expected_ops ~real : ops ) ; fun _ -> Ligo.Tezos.reset (); let kit_to_sell = kit_of_denomination (Ligo.nat_from_literal "1_000_000n") in let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in let checker = let checker = empty_checker_with_cfmm { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); lqt = lqt_of_denomination (Ligo.nat_from_literal "1n"); } in { checker with parameters = { checker.parameters with circulating_kit = kit_add checker.parameters.circulating_kit kit_to_sell }; fa2_state = ledger_issue_kit (checker.fa2_state, alice_addr, kit_to_sell); } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ops, _ = Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_expected, Ligo.timestamp_from_seconds_literal 1)) in let expected_ops = [ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.{ from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.ctok_token_id; amount = Ligo.nat_from_literal "1n"; } ] } ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.ctok_fa2)) ); ] in assert_operation_list_equal ~expected:expected_ops ~real:ops ); *) ("remove_liquidity - returns expected kit and tez" >:: fun _ -> Ligo.Tezos.reset (); let min_kit_expected = kit_of_denomination (Ligo.nat_from_literal "1n") in let min_ctok_expected = ctok_of_denomination (Ligo.nat_from_literal "1n") in let my_liquidity_tokens = lqt_of_denomination (Ligo.nat_from_literal "1n") in let sender = alice_addr in let checker = { empty_checker with parameters = { empty_checker.parameters with circulating_kit = kit_of_denomination (Ligo.nat_from_literal "1n")}; cfmm = { empty_checker.cfmm with ctok = ctok_of_denomination (Ligo.nat_from_literal "2n"); kit = kit_of_denomination (Ligo.nat_from_literal "2n"); lqt = lqt_of_denomination (Ligo.nat_from_literal "2n"); }; fa2_state = let fa2_state = initial_fa2_state in let fa2_state = ledger_issue_lqt (fa2_state, sender, my_liquidity_tokens) in let fa2_state = ledger_issue_kit (fa2_state, !Ligo.Tezos.self_address, kit_of_denomination (Ligo.nat_from_literal "1n")) in fa2_state; } in Checker.assert_checker_invariants checker; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let ops, checker = Checker.entrypoint_remove_liquidity (checker, (my_liquidity_tokens, min_ctok_expected, min_kit_expected, Ligo.timestamp_from_seconds_literal 1)) in let ctok = match ops with | [Transaction (FA2TransferTransactionValue [{from_=from_address; txs=[tx];}], _, _)] -> begin assert_address_equal ~expected:checker_address ~real:from_address; assert_address_equal ~expected:sender ~real:tx.to_; tx.amount end | _ -> failwith ("Expected [Transaction (FA2TransferTransactionValue [{from_=_; txs=[_];}], _, _)] but got " ^ show_operation_list ops) in let kit = get_balance_of checker sender TokenMetadata.kit_token_id in assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:kit; assert_nat_equal ~expected:(Ligo.nat_from_literal "1n") ~real:ctok; () ); ("fa2 scenario" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in let initial_addr = Ligo.address_of_string "INIT_ADDR" in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "100_000_000n"))) in let max_kit = Checker.view_burrow_max_mintable_kit ((initial_addr, Ligo.nat_from_literal "0n"), checker) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_add_liquidity ( checker, ( ctok_of_denomination (Ligo.nat_from_literal "5_000_000n") , kit_of_denomination (Ligo.nat_from_literal "5_000_000n") , lqt_of_denomination (Ligo.nat_from_literal "5n") , Ligo.timestamp_from_seconds_literal 999 ) ) in initialize alice , and leena accounts Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:initial_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_ = initial_addr; txs = [ { to_ = alice_addr; token_id = TokenMetadata.kit_token_id; amount = Ligo.nat_from_literal "5n" }; { to_ = bob_addr; token_id = TokenMetadata.lqt_token_id; amount = Ligo.nat_from_literal "5n" } ]; }]) in let balance chk addr tok = Checker.view_get_balance ((addr, tok), chk) in assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "5n"); assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "5n"); assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "0n"); assert_nat_equal ~real:(balance checker leena_addr TokenMetadata.lqt_token_id) ~expected:(Ligo.nat_from_literal "0n"); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_update_operators (checker, [ (Add_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in assert_equal true (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.kit_token_id)), checker)); assert_equal false (Checker.view_is_operator ((bob_addr, (leena_addr, TokenMetadata.lqt_token_id)), checker)); assert_equal false (Checker.view_is_operator ((leena_addr, (bob_addr, TokenMetadata.kit_token_id)), checker)); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id;amount=Ligo.nat_from_literal "2n"}]}]) in assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "3n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "2n"); assert_raises (Failure "FA2_INSUFFICIENT_BALANCE") (fun () -> Checker.strict_entrypoint_transfer (checker, [ { from_=alice_addr; txs=[{to_=bob_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "10n"}]}])); Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.strict_entrypoint_transfer (checker, [ { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}]) in assert_nat_equal ~real:(balance checker alice_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "4n"); assert_nat_equal ~real:(balance checker bob_addr TokenMetadata.kit_token_id) ~expected:(Ligo.nat_from_literal "1n"); but leena can not even send a single kit from 's account when he 's not an operator anymore Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_update_operators (checker, [ (Remove_operator { owner = bob_addr; operator = leena_addr; token_id = TokenMetadata.kit_token_id })]) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:leena_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure "FA2_NOT_OPERATOR") (fun () -> Checker.strict_entrypoint_transfer (checker, [ { from_=bob_addr; txs=[{to_=alice_addr; token_id=TokenMetadata.kit_token_id; amount=Ligo.nat_from_literal "1n"}]}])); () ); ("view_total_supply (FA2) - initial kit supply" >:: fun _ -> Ligo.Tezos.reset (); let total_kit_amount = Checker.view_total_supply (TokenMetadata.kit_token_id, empty_checker) in assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_kit_amount; () ); ("view_total_supply (FA2) - initial lqt supply" >:: fun _ -> Ligo.Tezos.reset (); let total_lqt_amount = Checker.view_total_supply (TokenMetadata.lqt_token_id, empty_checker) in assert_nat_equal ~expected:(Ligo.nat_from_literal "0n") ~real:total_lqt_amount; () ); ("view_total_supply (FA2) - undefined token id" >:: fun _ -> assert_raises (Failure "FA2_TOKEN_UNDEFINED") (fun () -> Checker.view_total_supply (Ligo.nat_from_literal "3n", empty_checker)) ); ("view_all_tokens (FA2)" >:: fun _ -> Ligo.Tezos.reset (); let all_tokens = Checker.view_all_tokens ((), empty_checker) in assert_nat_list_equal ~expected:[ TokenMetadata.kit_token_id; TokenMetadata.lqt_token_id ] ~real:all_tokens; () ); ("entrypoint_liquidation_auction_place_bid: should only allow the current auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = { empty_checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) } in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in let max_kit = Checker.view_burrow_max_mintable_kit ((alice_addr, Ligo.nat_from_literal "0n"), checker) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", max_kit)) in let checker = { checker with last_index = Some (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "10_000_000n")) } in let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:1_000_000 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch (checker, ()) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_touch_burrow (checker, (alice_addr, Ligo.nat_from_literal "0n")) in let _, checker = Checker.entrypoint_mark_for_liquidation (checker, (alice_addr, Ligo.nat_from_literal "0n")) in let _, checker = Checker.entrypoint_touch (checker, ()) in let res = Checker.view_current_liquidation_auction_details ((), checker) in let other_ptr = match res.auction_id with AVLPtr i -> Ptr.ptr_next i in assert_raises (Failure (Ligo.string_of_int error_InvalidLiquidationAuction)) (fun () -> Checker.entrypoint_liquidation_auction_place_bid (checker, (AVLPtr other_ptr, res.minimum_bid))); ); ("can complete a liquidation auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "200_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _lqt_minted_ret_kit_ops, checker = Checker.entrypoint_add_liquidity ( checker , ( ctok_of_denomination (Ligo.nat_from_literal "1_000_000n") , kit_one , lqt_of_denomination (Ligo.nat_from_literal "1n") , Ligo.timestamp_from_seconds_literal 1 ) let () = Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let (ops, checker0) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tez)) in let burrow_addr = burrow_address (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker0.burrows)) in let () = match ops with | [ CreateBurrowContract (_, cb_delegate, cb_tez, cb_storage) ; (Transaction (FA2TransferTransactionValue _, _, _)) as op ; ] -> assert_key_hash_option_equal ~expected:None ~real:cb_delegate; assert_tez_equal ~expected:Common.tez_zero ~real:cb_tez; assert_equal BurrowTypes.({checker_address=checker_address; collateral_fa2=collateral_fa2_addr}) cb_storage; assert_operation_equal ~expected:( LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = bob_addr; txs = [ { to_ = burrow_addr; token_id = TokenMetadata.tok_token_id; amount = tok_to_denomination_nat tez; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ) ~real:op | _ -> assert_failure ("Expected [CreateBurrowContract (_, _, _, _); Transaction (FA2TransferTransactionValue _, _, _)] but got " ^ show_operation_list ops) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker1) = Checker.entrypoint_deactivate_burrow (checker0, (Ligo.nat_from_literal "0n", alice_addr)) in assert_operation_list_equal ~expected:[ LigoOp.Tezos.address_nat_transaction (alice_addr, tok_to_denomination_nat tez) (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr)) ] ~real:ops; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let _ops, checker2 = Checker.entrypoint_activate_burrow (checker1, (Ligo.nat_from_literal "0n", tez)) in FIXME : cfmm contains a ratio , which can not be compared for equality using ( =) . So , the next line can give false positives . assert_equal checker0 checker2; () in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:Common.tez_zero; let (_, checker) = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "0n", None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in let burrow_id = (bob_addr, Ligo.nat_from_literal "0n") in let burrow_addr = burrow_address (Option.get (Ligo.Big_map.find_opt (bob_addr, Ligo.nat_from_literal "0n") checker.burrows)) in Mint as much kit as possible Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "4_285_714n")) ) in let kit = get_balance_of checker bob_addr TokenMetadata.kit_token_id in assert_nat_equal ~expected:(Ligo.nat_from_literal "4_285_714n") ~real:kit; assert_bool "should not be overburrowed right after minting" (not @@ burrow_is_overburrowed checker.parameters (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows)) ); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_MintKitFailure)) (fun () -> Checker.entrypoint_mint_kit ( checker , (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n")) ) ); Ligo.Tezos.new_transaction ~seconds_passed:60 ~blocks_passed:1 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_001n")) in let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in assert_operation_list_equal ~expected:[] ~real:ops; assert_bool "if the index goes up, then burrows should become overburrowed" (burrow_is_overburrowed checker.parameters (Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows)) ); Ligo.Tezos.new_transaction ~seconds_passed:(211*60) ~blocks_passed:211 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in let ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in assert_operation_list_equal ~expected:[] ~real:ops; assert_int_equal ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in assert_operation_list_equal ~expected:[ LigoOp.Tezos.address_nat_transaction (alice_addr, Ligo.nat_from_literal "1_009_000n") (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%burrowTransfer" burrow_addr)) ] ~real:ops; let slice = (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices) |> Option.get |> fun i -> i.youngest_slice in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_UnwarrantedCancellation)) (fun () -> Checker.entrypoint_cancel_liquidation_slice (checker, slice)); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_InvalidLeafPtr)) (fun () -> let undefined_slice = LiquidationAuctionPrimitiveTypes.LeafPtr (ptr_next checker.liquidation_auctions.avl_storage.last_ptr) in Checker.entrypoint_cancel_liquidation_slice (checker, undefined_slice) ); Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_NoOpenAuction)) (fun () -> Checker.view_current_liquidation_auction_details ((), checker)); let kit_before_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker bob_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in assert_bool "should start an auction" (Option.is_some checker.liquidation_auctions.current_auction); assert_int_equal ~expected:(Ligo.int_from_literal "500_000") ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:(5*60) ~blocks_passed:5 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in let min_bid = Checker.view_current_liquidation_auction_details ((), checker) in let auction_id = min_bid.auction_id in assert_kit_equal ~expected:(kit_of_denomination (Ligo.nat_from_literal "2_709_183n")) ~real:min_bid.minimum_bid; let (ops, checker) = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction_id, min_bid.minimum_bid)) in assert_operation_list_equal ~expected:[] ~real:ops; let (ops, checker) = Checker.entrypoint_liquidation_auction_place_bid ( checker , (auction_id, kit_of_denomination (Ligo.nat_from_literal "4_200_000n")) ) in let auction_id = match checker.liquidation_auctions.current_auction with | None -> assert_failure "entrypoint_liquidation_auction_place_bid should have succeeded" | Some current_auction -> current_auction.contents in assert_operation_list_equal ~expected:[] ~real:ops; assert_int_equal ~expected:(Ligo.int_from_literal "500_000") ~real:touch_reward; Ligo.Tezos.new_transaction ~seconds_passed:(30*60) ~blocks_passed:30 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let kit_before_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let _ops, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_200_000n")) in let kit_after_reward = get_balance_of checker alice_addr TokenMetadata.kit_token_id in let touch_reward = Ligo.sub_nat_nat kit_after_reward kit_before_reward in assert_bool "auction should be completed" (Option.is_none checker.liquidation_auctions.current_auction); assert_int_equal ~expected:(Ligo.int_from_literal "21_000_000") ~real:touch_reward; FIXME : Operations differ between the FA2 deployment and the TEZ deployment ( * Check that all the requests for burrows to send tez come _ before _ the * request to the oracle to update the index . begin match ops with | [ call ] -> () | _ -> assert_failure ("Unexpected operations/operation order: " ^ show_operation_list ops) end; *) We do n't need to touch the slice on this test case since * Checker.entrypoint_touch_with_index already touches the oldest 5 * slices . * Checker.entrypoint_touch_with_index already touches the oldest 5 * slices. *) assert_raises (Failure (Ligo.string_of_int error_InvalidLeafPtr)) (fun () -> Checker.entrypoint_touch_liquidation_slices (checker, [slice])); assert_bool "burrow should have no liquidation slices" (Ligo.Big_map.find_opt burrow_id checker.liquidation_auctions.burrow_slices= None); let result = Option.get (Ligo.Big_map.find_opt burrow_id checker.burrows) in assert_tok_equal ~expected:tok_zero ~real:(burrow_collateral_at_auction result); Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (ops, checker) = Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id) in assert_operation_list_equal ~expected:[ (LigoOp.Tezos.fa2_transfer_transaction [ Fa2Interface.( { from_ = checker_address; txs = [ { to_ = alice_addr; token_id = TokenMetadata.tok_token_id; amount = Ligo.nat_from_literal "3_156_446n"; }; ]; } ) ] (Ligo.tez_from_literal "0mutez") (Option.get (LigoOp.Tezos.get_entrypoint_opt "%transfer" checker.external_contracts.collateral_fa2)) ); ] ~real:ops; assert_raises (Failure (Ligo.string_of_int error_InvalidAvlPtr)) (fun () -> Checker.entrypoint_liquidation_auction_claim_win (checker, auction_id)); () ); ("entrypoint_mark_for_liquidation - should not create empty slices" >:: fun _ -> Ligo.Tezos.reset (); let sender = alice_addr in let checker = empty_checker in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:Common.tez_zero; let (_, burrow_no) as burrow_id, checker = newly_created_burrow checker "0n" (tok_of_denomination (Ligo.nat_from_literal "2_001_001n")) in CALCULATIONS ~~~~~~~~~~~~ Tez in the burrow is ( 1_001_001mutez + 1tez ) so the reward is ( 1tez + 1_001mutez = 1_001_001 ) . This means that - The slice we WOULD send to auctions is empty . - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions . ~~~~~~~~~~~~ Tez in the burrow is (1_001_001mutez + 1tez) so the reward is (1tez + 1_001mutez = 1_001_001). This means that - The slice we WOULD send to auctions is empty. - The burrow remains is empty so the next liquidation WOULD create another empty slice to auctions. *) Mint as much kit as possible . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:sender ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mint_kit (checker, (burrow_no, kit_of_denomination (Ligo.nat_from_literal "476_667n"))) in NOTE : I am a little surprised / worried about this being again 211 ... Ligo.Tezos.new_transaction ~seconds_passed:(60*blocks_passed) ~blocks_passed:blocks_passed ~sender:bob_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_touch_burrow (checker, burrow_id) in Ensure that the burrow is . begin match Ligo.Big_map.find_opt burrow_id checker.burrows with | None -> assert_failure "bug" | Some burrow -> assert_bool "burrow needs to be liquidatable for the test to be potent." (Burrow.burrow_is_liquidatable checker.parameters burrow); end; Let 's mark the burrow for liquidation now ( first pass : leaves it empty but active ) . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in Let 's mark the burrow for liquidation now ( second pass : deactivates it ) . Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let (_ops, checker) = Checker.entrypoint_mark_for_liquidation (checker, burrow_id) in () ); ("deposit_collateral - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_deposit_collateral (checker, (Ligo.nat_from_literal "0n", amount)) in () ); ("entrypoint_withdraw_collateral - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_add Constants.creation_deposit Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_withdraw_collateral (checker, (Ligo.nat_from_literal "0n", Constants.creation_deposit)) in () ); ("entrypoint_mint_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_add Constants.creation_deposit Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in () ); ("entrypoint_burn_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = tok_add Constants.creation_deposit Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Mint some kit out of the burrow Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_burn_kit (checker, (Ligo.nat_from_literal "0n", kit_of_denomination (Ligo.nat_from_literal "1n"))) in () ); ("entrypoint_activate_burrow - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ = Checker.entrypoint_activate_burrow (checker, (Ligo.nat_from_literal "0n", amount)) in () ); ("entrypoint_deactivate_burrow - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_deactivate_burrow (checker, (Ligo.nat_from_literal "0n", !Ligo.Tezos.sender)) in () ); ("entrypoint_mark_for_liquidation - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); assert_raises (Failure (Ligo.string_of_int error_NotLiquidationCandidate)) (fun () -> Checker.entrypoint_mark_for_liquidation (checker, burrow_id)); ); ("entrypoint_set_burrow_delegate - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.entrypoint_set_burrow_delegate (checker, (Ligo.nat_from_literal "0n", None)) in () ); ("cfmm views" >::: let with_cfmm_setup f = fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in let burrow_id = Ligo.nat_from_literal "42n" in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (checker, (burrow_id, None, tok_of_denomination (Ligo.nat_from_literal "10_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:62 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ops, checker = Checker.entrypoint_mint_kit (checker, (burrow_id, kit_one)) in Ligo.Tezos.new_transaction ~seconds_passed:121 ~blocks_passed:2 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let ctok_to_give = Ctok.ctok_of_denomination (Ligo.nat_from_literal "400_000n") in let kit_to_give = Kit.kit_of_denomination (Ligo.nat_from_literal "400_000n") in let min_lqt_to_mint = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in let _ops, checker = Checker.entrypoint_add_liquidity (checker, (ctok_to_give, kit_to_give, min_lqt_to_mint, deadline)) in Ligo.Tezos.new_transaction ~seconds_passed:59 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = f checker in () in [ "view_buy_kit_min_kit_expected" >:: with_cfmm_setup (fun checker -> let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in let min_kit_to_buy = Checker.view_buy_kit_min_kit_expected (ctok_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in Checker.entrypoint_buy_kit (checker, (ctok_to_sell, min_kit_to_buy, deadline))); "view_buy_kit_min_kit_expected - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_BuyKitNoCtokGiven)) (fun () -> Checker.view_buy_kit_min_kit_expected (Ctok.ctok_zero, checker)) ); "view_sell_kit_min_ctok_expected" >:: with_cfmm_setup (fun checker -> let kit_to_sell = Kit.kit_of_denomination (Ligo.nat_from_literal "100_000n") in let min_ctok_to_buy = Checker.view_sell_kit_min_ctok_expected (kit_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in Checker.entrypoint_sell_kit (checker, (kit_to_sell, min_ctok_to_buy, deadline))); "view_sell_kit_min_ctok_expected - fail if no kit is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_SellKitNoKitGiven)) (fun () -> Checker.view_sell_kit_min_ctok_expected (Kit.kit_zero, checker)) ); "view_add_liquidity_max_kit_deposited / view_add_liquidity_min_lqt_minted" >:: with_cfmm_setup (fun checker -> let ctok_to_sell = Ctok.ctok_of_denomination (Ligo.nat_from_literal "100_000n") in let max_kit_to_sell = Checker.view_add_liquidity_max_kit_deposited (ctok_to_sell, checker) in let min_lqt_to_buy = Checker.view_add_liquidity_min_lqt_minted (ctok_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in Checker.entrypoint_add_liquidity (checker, (ctok_to_sell, max_kit_to_sell, min_lqt_to_buy, deadline))); "view_add_liquidity_max_kit_deposited - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven)) (fun () -> Checker.view_add_liquidity_max_kit_deposited (Ctok.ctok_zero, checker)) ); "view_add_liquidity_min_lqt_minted - fail if no ctok is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_AddLiquidityNoCtokGiven)) (fun () -> Checker.view_add_liquidity_min_lqt_minted (Ctok.ctok_zero, checker)) ); "view_remove_liquidity_min_ctok_withdrawn / view_remove_liquidity_min_kit_withdrawn" >:: with_cfmm_setup (fun checker -> let lqt_to_sell = Lqt.lqt_of_denomination (Ligo.nat_from_literal "5n") in let min_ctok_to_buy = Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_sell, checker) in let min_kit_to_buy = Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_sell, checker) in let deadline = Ligo.add_timestamp_int !Ligo.Tezos.now (Ligo.int_from_literal "20") in Checker.entrypoint_remove_liquidity (checker, (lqt_to_sell, min_ctok_to_buy, min_kit_to_buy, deadline))); "view_remove_liquidity_min_ctok_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (Lqt.lqt_zero, checker)) ); "view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = checker.cfmm.lqt in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_ctok_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_ctok_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_kit_withdrawn - fail if no liquidity is given" >:: with_cfmm_setup (fun checker -> assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityNoLiquidityBurned)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (Lqt.lqt_zero, checker)) ); "view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (equal)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = checker.cfmm.lqt in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker)) ); "view_remove_liquidity_min_kit_withdrawn - too much lqt withdrawn (more than)" >:: with_cfmm_setup (fun checker -> let lqt_to_withdraw = Lqt.lqt_add checker.cfmm.lqt (Lqt.lqt_of_denomination (Ligo.nat_from_literal "1n")) in assert_raises (Failure (Ligo.string_of_int error_RemoveLiquidityTooMuchLiquidityWithdrawn)) (fun () -> Checker.view_remove_liquidity_min_kit_withdrawn (lqt_to_withdraw, checker)) ); ] ); ("view_burrow_max_mintable_kit - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_burrow_max_mintable_kit (burrow_id, checker) in () ); ("view_is_burrow_overburrowed - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_is_burrow_overburrowed (burrow_id, checker) in () ); ("view_is_burrow_liquidatable - does not fail on untouched burrows" >:: fun _ -> Ligo.Tezos.reset (); let amount = Constants.creation_deposit in Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:Common.tez_zero; let _ops, checker = Checker.entrypoint_create_burrow (empty_checker, (Ligo.nat_from_literal "0n", None, amount)) in let burrow_id = (!Ligo.Tezos.sender, Ligo.nat_from_literal "0n") in Touch checker Ligo.Tezos.new_transaction ~seconds_passed:1 ~blocks_passed:1 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.touch_with_index checker (TestLib.index_from_chf_in_tok (Ligo.nat_from_literal "1_000_000n")) in Try to view whether the untouched burrow is Ligo.Tezos.new_transaction ~seconds_passed:0 ~blocks_passed:0 ~sender:alice_addr ~amount:(Ligo.tez_from_literal "0mutez"); let _ = Checker.view_is_burrow_liquidatable (burrow_id, checker) in () ); ("view_current_liquidation_auction_details - raises error when there is no current auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = empty_checker in assert_raises (Failure (Ligo.string_of_int error_NoOpenAuction)) (fun _ -> Checker.view_current_liquidation_auction_details ((), checker)) ); ("view_current_liquidation_auction_details - expected value for descending auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in let auction = Option.get checker.liquidation_auctions.current_auction in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in let expected_auction_details = { auction_id = auction.contents; collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n"); minimum_bid = liquidation_auction_current_auction_minimum_bid auction; current_bid = None; remaining_blocks = None; remaining_seconds = None; } in assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details ); ("view_current_liquidation_auction_details - expected value for ascending auction" >:: fun _ -> Ligo.Tezos.reset (); let checker = checker_with_active_auction () in let auction = Option.get checker.liquidation_auctions.current_auction in Place a bid to turn the descending auction into an ascending one let bidder = bob_addr in let bid_amnt = liquidation_auction_current_auction_minimum_bid auction in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:Common.tez_zero; let _, checker = Checker.entrypoint_create_burrow (checker, (Ligo.nat_from_literal "1n", None, tok_of_denomination (Ligo.nat_from_literal "1_000_000_000n"))) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_mint_kit (checker, (Ligo.nat_from_literal "1n", bid_amnt)) in Ligo.Tezos.new_transaction ~seconds_passed:10 ~blocks_passed:1 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let _, checker = Checker.entrypoint_liquidation_auction_place_bid (checker, (auction.contents, bid_amnt)) in Ligo.Tezos.new_transaction ~seconds_passed:500 ~blocks_passed:22 ~sender:bidder ~amount:(Ligo.tez_from_literal "0mutez"); let auction = Option.get checker.liquidation_auctions.current_auction in let auction_details = Checker.view_current_liquidation_auction_details ((), checker) in let expected_auction_details = { auction_id = auction.contents; collateral = tok_of_denomination (Ligo.nat_from_literal "23_669_648n"); minimum_bid = liquidation_auction_current_auction_minimum_bid auction; current_bid = Some LiquidationAuctionPrimitiveTypes.({address=bidder; kit=bid_amnt;}); remaining_blocks = Some (Ligo.int_from_literal "-2"); remaining_seconds = Some (Ligo.int_from_literal "700"); } in assert_view_current_liquidation_auction_details_result_equal ~expected:expected_auction_details ~real:auction_details ); ] let () = run_test_tt_main suite </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610287"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">0a8f25532cf0971dde3236b9b47e1c109be42b879b02b5a765e5a27342932666</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">exercism/common-lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">affine-cipher.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(defpackage :affine-cipher (:use :cl) (:export :encode :decode)) (in-package :affine-cipher) (defun encode (plaintext a b)) (defun decode (ciphertext a b)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/exercism/common-lisp/4bf94609c7ef0f9ca7ec0b6dca04cc10314cb598/exercises/practice/affine-cipher/affine-cipher.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(defpackage :affine-cipher (:use :cl) (:export :encode :decode)) (in-package :affine-cipher) (defun encode (plaintext a b)) (defun decode (ciphertext a b)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610288"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">b7ec84b46d276604ff9a1cdceaf78ec6d78cc1f28205716dd22d9eda14b7ea30</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">coq/coq</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">extend.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) v * Copyright INRIA , CNRS and contributors < O _ _ _ , , * ( see version control and CREDITS file for authors & dates ) \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * (* // * This file is distributed under the terms of the *) * GNU Lesser General Public License Version 2.1 (* * (see LICENSE file for the text of the license) *) (************************************************************************) (** Entry keys for constr notations *) type side = Left | Right type production_position = | BorderProd of side * Gramlib.Gramext.g_assoc option | InternalProd type production_level = | NextLevel | NumLevel of int | DefaultLevel (** Interpreted differently at the border or inside a rule *) val production_level_eq : production_level -> production_level -> bool (** User-level types used to tell how to parse or interpret of the non-terminal *) type 'a constr_entry_key_gen = | ETIdent | ETName | ETGlobal | ETBigint | ETBinder of bool (* open list of binders if true, closed list of binders otherwise *) | ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a | ETPattern of bool * int option (* true = strict pattern, i.e. not a single variable *) (** Entries level (left-hand side of grammar rules) *) type constr_entry_key = (production_level * production_position) constr_entry_key_gen val constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool * Entries used in productions , vernac side ( e.g. " x bigint " or " x ident " ) type simple_constr_prod_entry_key = production_level constr_entry_key_gen (** Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals *) type binder_target = ForBinder | ForTerm type binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list and constr_prod_entry_key = as an ident as a name ( ident or _ ) as a global reference as an ( unbounded ) integer as name , or name : type or ' pattern , possibly in closed form as or pattern , or a subentry of those as pattern as a binder ( as subpart of a constr ) as non - empty list of constr , or subentries of those as non - empty list of local binders * { 5 AST for user - provided entries } type 'a user_symbol = | Ulist1 of 'a user_symbol | Ulist1sep of 'a user_symbol * string | Ulist0 of 'a user_symbol | Ulist0sep of 'a user_symbol * string | Uopt of 'a user_symbol | Uentry of 'a | Uentryl of 'a * int type ('a,'b,'c) ty_user_symbol = | TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol | TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol | TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol | TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol | TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol | TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol | TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/coq/coq/f66b58cc7e6a8e245b35c3858989181825c591ce/parsing/extend.mli</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">********************************************************************** * The Coq Proof Assistant / The Coq Development Team // * This file is distributed under the terms of the * (see LICENSE file for the text of the license) ********************************************************************** * Entry keys for constr notations * Interpreted differently at the border or inside a rule * User-level types used to tell how to parse or interpret of the non-terminal open list of binders if true, closed list of binders otherwise true = strict pattern, i.e. not a single variable * Entries level (left-hand side of grammar rules) * Entries used in productions (in right-hand-side of grammar rules), to parse non-terminals </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> v * Copyright INRIA , CNRS and contributors < O _ _ _ , , * ( see version control and CREDITS file for authors & dates ) \VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * GNU Lesser General Public License Version 2.1 type side = Left | Right type production_position = | BorderProd of side * Gramlib.Gramext.g_assoc option | InternalProd type production_level = | NextLevel | NumLevel of int val production_level_eq : production_level -> production_level -> bool type 'a constr_entry_key_gen = | ETIdent | ETName | ETGlobal | ETBigint | ETConstr of Constrexpr.notation_entry * Notation_term.notation_binder_kind option * 'a type constr_entry_key = (production_level * production_position) constr_entry_key_gen val constr_entry_key_eq : constr_entry_key -> constr_entry_key -> bool * Entries used in productions , vernac side ( e.g. " x bigint " or " x ident " ) type simple_constr_prod_entry_key = production_level constr_entry_key_gen type binder_target = ForBinder | ForTerm type binder_entry_kind = ETBinderOpen | ETBinderClosed of constr_prod_entry_key option * (bool * string) list and constr_prod_entry_key = as an ident as a name ( ident or _ ) as a global reference as an ( unbounded ) integer as name , or name : type or ' pattern , possibly in closed form as or pattern , or a subentry of those as pattern as a binder ( as subpart of a constr ) as non - empty list of constr , or subentries of those as non - empty list of local binders * { 5 AST for user - provided entries } type 'a user_symbol = | Ulist1 of 'a user_symbol | Ulist1sep of 'a user_symbol * string | Ulist0 of 'a user_symbol | Ulist0sep of 'a user_symbol * string | Uopt of 'a user_symbol | Uentry of 'a | Uentryl of 'a * int type ('a,'b,'c) ty_user_symbol = | TUlist1 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol | TUlist1sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol | TUlist0 : ('a,'b,'c) ty_user_symbol -> ('a list,'b list,'c list) ty_user_symbol | TUlist0sep : ('a,'b,'c) ty_user_symbol * string -> ('a list,'b list,'c list) ty_user_symbol | TUopt : ('a,'b,'c) ty_user_symbol -> ('a option, 'b option, 'c option) ty_user_symbol | TUentry : ('a, 'b, 'c) Genarg.ArgT.tag -> ('a,'b,'c) ty_user_symbol | TUentryl : ('a, 'b, 'c) Genarg.ArgT.tag * int -> ('a,'b,'c) ty_user_symbol </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610289"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">3bf1a899432101bb7482989a15f22c893ae938fa93801a4d53a8da84702149d2</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">emanjavacas/cosycat</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">results_frame.cljs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns cosycat.review.components.results-frame (:require [reagent.core :as reagent] [re-frame.core :as re-frame] [react-bootstrap.components :as bs] [cosycat.components :refer [error-panel throbbing-panel]] [cosycat.app-utils :refer [parse-hit-id]] [cosycat.snippet :refer [snippet-modal]] [cosycat.annotation.components.annotation-component :refer [annotation-component]])) (defn highlight-fn [{{:keys [anns]} :meta}] (fn [{id :_id}] (contains? anns id))) (defn hit-row [hit-id] (let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id]) color-map (re-frame/subscribe [:project-users-colors])] (fn [hit-id] [:div.row (if (get-in @hit-map [:meta :throbbing?]) "loading..." [annotation-component @hit-map color-map :db-path :review :corpus (get-in @hit-map [:meta :corpus]) :editable? true :highlight-fn (highlight-fn @hit-map) :show-match? false :show-hit-id? true])]))) (defn sort-by-doc [hit-ids] (sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids)) (defn results-frame [] (let [results (re-frame/subscribe [:project-session :review :results :results-by-id]) throbbing? (re-frame/subscribe [:throbbing? :review-frame])] (fn [] [:div.container-fluid (cond (empty? @results) [:div.row [error-panel {:status "Ooops! Found zero annotations"}]] @throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]] :else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))] ^{:key (str "review-" hit-id)} [hit-row hit-id]))]) [snippet-modal :review]]))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/emanjavacas/cosycat/a7186363d3c0bdc7b714af126feb565f98793a6e/src/cljs/cosycat/review/components/results_frame.cljs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns cosycat.review.components.results-frame (:require [reagent.core :as reagent] [re-frame.core :as re-frame] [react-bootstrap.components :as bs] [cosycat.components :refer [error-panel throbbing-panel]] [cosycat.app-utils :refer [parse-hit-id]] [cosycat.snippet :refer [snippet-modal]] [cosycat.annotation.components.annotation-component :refer [annotation-component]])) (defn highlight-fn [{{:keys [anns]} :meta}] (fn [{id :_id}] (contains? anns id))) (defn hit-row [hit-id] (let [hit-map (re-frame/subscribe [:project-session :review :results :results-by-id hit-id]) color-map (re-frame/subscribe [:project-users-colors])] (fn [hit-id] [:div.row (if (get-in @hit-map [:meta :throbbing?]) "loading..." [annotation-component @hit-map color-map :db-path :review :corpus (get-in @hit-map [:meta :corpus]) :editable? true :highlight-fn (highlight-fn @hit-map) :show-match? false :show-hit-id? true])]))) (defn sort-by-doc [hit-ids] (sort-by #(let [{:keys [hit-start doc-id]} (parse-hit-id %)] [doc-id hit-start]) hit-ids)) (defn results-frame [] (let [results (re-frame/subscribe [:project-session :review :results :results-by-id]) throbbing? (re-frame/subscribe [:throbbing? :review-frame])] (fn [] [:div.container-fluid (cond (empty? @results) [:div.row [error-panel {:status "Ooops! Found zero annotations"}]] @throbbing? [:div.row [throbbing-panel :throbber :horizontal-loader]] :else [:div.row (doall (for [hit-id (sort-by-doc (keys @results))] ^{:key (str "review-" hit-id)} [hit-row hit-id]))]) [snippet-modal :review]]))) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610290"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">cb9ed9478a4d22f62b2ca09141e35797bb1fe89e08300a8ab41d4b959ce3b598</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kiselgra/c-mera</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">version.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(in-package :c-mera) (defparameter *version* (asdf:component-version (asdf:find-system :c-mera))) (defparameter *generator* :undefined) (defun print-version () (format t "~a~%" *version*)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/kiselgra/c-mera/d06ed96d50a40a3fefe188202c8c535d6784f392/src/c-mera/version.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(in-package :c-mera) (defparameter *version* (asdf:component-version (asdf:find-system :c-mera))) (defparameter *generator* :undefined) (defun print-version () (format t "~a~%" *version*)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610291"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">a1a25142787e2a42ab680a10e6441fea2092ed0d95338d09d9706830d633b512</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">jeffshrager/biobike</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">doc-objects.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> -*- Package : help ; mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*- (in-package :help) ;;; +=========================================================================+ | Copyright ( c ) 2002 - 2006 JP , , | ;;; | | ;;; | Permission is hereby granted, free of charge, to any person obtaining | ;;; | a copy of this software and associated documentation files (the | | " Software " ) , to deal in the Software without restriction , including | ;;; | without limitation the rights to use, copy, modify, merge, publish, | | distribute , sublicense , and/or sell copies of the Software , and to | | permit persons to whom the Software is furnished to do so , subject to | ;;; | the following conditions: | ;;; | | ;;; | The above copyright notice and this permission notice shall be included | | in all copies or substantial portions of the Software . | ;;; | | | THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , | ;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | ;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. | ;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY | | CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , | ;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE | ;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ;;; +=========================================================================+ Authors : JP Massar , . ;;; All the various types of documentation (theoretically) available ;;; in the system. ;;; Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged (defparameter *documentation-types* '( documentation-file function-documentation glossary-entry ;; macro-documentation module symbol-doc topic tutorial ;; variable-documentation )) (defparameter *doc-types-hash-types* '( (documentation-file equal) (function-documentation eq) (glossary-entry equalp) ;; macro-documentation (module equalp) (symbol-doc eq) (topic equalp) (tutorial equalp) ;; variable-documentation )) ;; Where all documentation objects are stored. ;; Use FIND-DOCUMENTATION to pull something out (eval-when (:compile-toplevel :load-toplevel :execute) (defun create-doc-hash-tables () (let ((ht (make-hash-table))) (loop for (doc-type hash-test) in *doc-types-hash-types* do (setf (gethash doc-type ht) (make-hash-table :test hash-test))) ht ))) (defvar *documentation* (create-doc-hash-tables)) (defun intern-documentation (name type) (or (find-documentation name type) (setf (gethash name (gethash type *documentation*)) (make-instance type :name name)))) (defun remove-documentation (name type) (remhash name (gethash type *documentation*)) (make-instance type :name name)) (defun find-documentation (name type) (gethash name (gethash type *documentation*))) (defun clear-documentation () (setf *documentation* (create-doc-hash-tables))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;; The hierarchy of documentation classes (defclass basicdoc () ((name :initarg :name :accessor name) ;; AKA 'summary' (docstring :initform nil :initarg :docstring :accessor docstring) (referred-to-by :initform nil :accessor referred-to-by) )) (defmethod summary ((obj basicdoc)) (docstring obj)) (defmethod text ((obj basicdoc)) nil) (defmethod keywords ((obj basicdoc)) nil) (defmethod see-also ((obj basicdoc)) nil) (defmethod explicitly-documented-p ((obj basicdoc)) nil) (defmethod author ((obj basicdoc)) nil) (defmethod print-object ((obj basicdoc) stream) (format stream "<Docobj ~A (~A)>" (help:name obj) (type-of obj))) (defclass documented (basicdoc) ((text :initform nil :accessor text) (keywords :initform nil :accessor keywords) (see-also :initform nil :accessor see-also) (author :initform nil :accessor author) (explicitly-documented-p :initform nil :accessor explicitly-documented-p))) (defclass mode-documented (documented) ((display-modes :initform (list :all) :accessor display-modes ))) (defclass documentation-file (mode-documented) ((label :initform nil :accessor label) (source-file :initform nil :accessor source-file) (associated-text-file :initform nil :accessor associated-text-file ) (matches :initform nil :accessor matches) (descriptor :initform nil :accessor descriptor) )) ;; the reader methods are defined in document-function.lisp (defclass function-documentation (documented module-element) ((parameters :initform nil :writer (setf parameters)) (return-values :initform nil :writer (setf return-values)) (syntax :initform nil :writer (setf syntax)) (vpl-syntax :initform nil :writer (setf vpl-syntax)) (examples :initform nil :writer (setf examples)) (examples-package :initform nil :writer (setf examples-package)) (synonyms :initform nil :writer (setf synonyms)) (flavor :initform :defun :writer (setf flavor)) (canonical :initform nil :accessor canonical) (aliased :initform nil :accessor aliased) )) (defmethod print-object ((obj function-documentation) stream) (print-symbol-docobj obj stream "DocFunc")) (defclass glossary-entry (documented) ()) ;; If/when actually implemented, should become a subtype of DOCUMENTED (defclass macro-documentation (basicdoc) ()) (defclass module (mode-documented) ((functions :initform nil :accessor functions) (variables :initform nil :accessor variables) (macros :initform nil :accessor macros) (submodules :initform nil :accessor submodules) (toplevel? :initform t :accessor toplevel?) (alpha-listing? :initform t :accessor alpha-listing?) )) (defclass symbol-doc (basicdoc) ( one of : special - operator , : define - function , : macro , : function , ;; :constant, :variable, or :type (stype :initform nil :initarg :stype :accessor stype) one of : function , : variable , or : type (dtype :initform nil :initarg :dtype :accessor dtype))) (defmethod print-object ((obj symbol-doc) stream) (print-symbol-docobj obj stream "Symbol")) (defclass topic (mode-documented) ()) (defclass tutorial (mode-documented) ;; :filename -- a string, must be full pathname : file - type -- either : html or : ;; :user-mode -- a keyword or a list of keywords ;; :sort-order -- an integer ;; :description -- a string, this is really the summary : section - header -- two strings , a title , and a color : lhtml - function -- used only wth file type lhtml , must be a symbol : start - function -- used only with file type : lhtml , must be a symbol ((filename :initform nil :accessor filename) (file-type :initform nil :accessor file-type) (user-mode :initform nil :accessor user-mode) (sort-order :initform nil :accessor sort-order) (description :initform nil :accessor description) (lhtml-function :initform nil :accessor lhtml-function) (start-function :initform nil :accessor start-function) (section-header :initform nil :accessor section-header) )) ;; If/when actually implemented, should become a subtype of DOCUMENTED (defclass variable-documentation (basicdoc) ()) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;; The definitions that create the verifiers and parsers for ;;; the definition forms for each documentation object. (define-doc-definer documentation-file def-documentation-file create-documentation-file ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) (:descriptor :one-or-none ddd-string-or-nil identity help:descriptor) )) ;; function-documentation has no define-doc-definer, its verifer and parser ;; are implemented by hand in document-function.lisp (define-doc-definer glossary-entry def-glossary-entry create-glossary-entry ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) )) ;; not doing macro-documentation for now since it is not used (define-doc-definer module def-module create-module ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) (:functions :list ddd-all-symbols identity help:functions) (:variables :list ddd-all-symbols identity help:variables) (:macros :list ddd-all-symbols identity help:macros) (:submodules :list ddd-all-symbols identity help:submodules) (:toplevel? :exactly-one ddd-boolean identity help:toplevel?) (:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?) (:display-modes :list ddd-all-symbols identity help:display-modes) ) :after-code (setf (explicitly-documented-p obj) t)) (defmacro document-module (name &body (docstring &rest meta)) `(def-module ,(string name) (:summary ,docstring) ,@meta)) (defmacro undocument-module (name &key remove-functions) `(progn (let ((module (find-documentation ',name 'module))) (if ,remove-functions (loop for fn in (functions module) do (remove-documentation fn 'function-documentation)) (let ((uncategorized (intern-documentation 'uncategorized 'module))) (loop for fn in (functions module) for fn-doc = (find-documentation fn 'function-documentation) when fn-doc do (setf (module fn-doc) uncategorized) (push fn (functions uncategorized)))))) (remove-documentation ',name 'module))) (defun modules () (hash-table-values (gethash 'module *documentation*))) (defparameter *uncategorized-key* "UNCATEGORIZED") ;;; Setup the Uncategorized module. (let ((uncategorized (intern-documentation *uncategorized-key* 'module))) (setf (docstring uncategorized) "Documented elements not part of any other module.")) ;; No symbol-doc creator macro because symbol-doc entries are reserved for ;; those exported symbols which do not have define-function entries. These ;; symbols are to be searched out at the end of the system load and ;; at that point symbol-doc objects are created for each such symbol ;; (using the below function) (defun create-symbol-doc (symbol &key docstring dtype stype) (make-instance 'help:symbol-doc :name symbol :docstring docstring :dtype dtype :stype stype)) (defun create-symbol-doc-entries (&key (mode :external)) (declare (ignore mode)) (loop with hash = (gethash 'help:symbol-doc *documentation*) with packages-not-to-search = (remove (find-package :cl-user) cl-user::*startup-packages*) with cl-package = (find-package :common-lisp) for package in (list-all-packages) do ;; The startup packages are those that exist at the start ;; of our system load. Hence we only look for symbols in our own packages , CL , and third party stuff we load , like PPCRE (unless (and (member package packages-not-to-search) (not (eq package cl-package))) (do-external-symbols (symbol package) (when (or (eq package cl-package) (not (eq (symbol-package symbol) cl-package))) (cond ((get symbol :alias-of) (create-alias-for symbol)) (t (vwhen (docs (maybe-create-symbol-docs symbol)) (setf (gethash symbol hash) docs) )))))))) ;; create a dummy function-documentation object whose only meaningful slots ;; are explicitly-documented-p, which is given the value :alias-of to denote ;; that this is a dummy, and see-also, which contains the real function ;; that the symbol is an alias for. (defun create-alias-for (symbol) (let ((real-function (get symbol :alias-of)) (docobj (intern-documentation symbol 'help:function-documentation))) (setf (explicitly-documented-p docobj) :alias-of) (setf (docstring docobj) (formatn "Alias for ~A" real-function)) (setf (see-also docobj) nil) ;; (list (find-documentation real-function 'help:function-documentation)) )) ;;; Create a set of HELP:SYMBOL-DOC data structures, for a symbol (defun maybe-create-symbol-docs (symbol) (remove-if 'null (list (when (fboundp symbol) ;; Don't create an entry if the symbol is already ;; documented by DOCUMENT-FUNCTION (unless (find-documentation symbol 'help:function-documentation) (create-symbol-doc symbol :docstring (documentation symbol 'function) :stype (cond ((special-operator-p symbol) :special-operator) ((define-function-p symbol) :define-function) ((macro-function symbol) :macro) (t :function)) :dtype :function ))) (when (boundp symbol) (create-symbol-doc symbol :docstring (documentation symbol 'variable) :stype (cond ((constantp symbol) :constant) (t :variable)) :dtype :variable )) ;; Figure out if a symbol denotes a type. Not portable. ;; This type checking causes the autoloading of the stream goo in ACL. (ignore-errors (typep nil symbol) (create-symbol-doc symbol :docstring (documentation symbol 'type) :stype :type :dtype :type ))))) (define-doc-definer topic def-topic create-topic ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) )) The define - doc - definer for tutorials is in live-tutorial.lisp ;; not doing variable-documentation for now since it is not used #+not-used (defmacro document-variable (name docstring) `(let ((thing (intern-documentation ',name 'variable-documentation))) (setf (explicitly-documented-p thing) t) (setf (docstring thing) ,docstring))) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Each documentation type has an associated URL which displays ;; the documentation object identified by the parameters of the URL ;; (which are taken from the SEE-ALSO data structure). See-also ;; lists can also contain objects which are not documentation-objects per se ( e.g. , references , URLs and frames ) (wb::define-url&pkg&args help-documentation-file-url "/new-help/help-documentation-file-url" :name) ;; File documentation doesn't have its own URL because the documentation directory already has its own AllegroServe PUBLISH - DIRECTORY url (wb::define-url&pkg&args help-function-documentation-url "/new-help/help-function-documentation-url" :name :package) (wb::define-url&pkg&args help-glossary-entry-url "/new-help/help-glossary-entry-url" :name) ;; not doing macro-documentation because it's not used. (wb::define-url&pkg&args help-module-url "/new-help/help-module-url" :name) (wb::define-url&pkg&args help-symbol-doc-url "/new-help/help-symbol-doc-url" :name :package :type) (wb::define-url&pkg&args help-topic-url "/new-help/help-topic-url" :name) (wb::define-url&pkg&args help-tutorial-url "/new-help/help-tutorial-url" :name) ;; not doing variable-documentation because it's not used. ;; URLs don't have their own URL because they are already a URL! ;; Frames don't have their own URL here because one exists already. ;;; A page which lists all the glossary entries (wb::define-url&pkg&args help-glossary-url "/new-help/help-glossary-url") ;;; A page which lists all the pertinent modules (wb::define-url&pkg&args help-modules-url "/help/modules") </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Help/doc-objects.lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">lisp</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> mode : lisp ; base : 10 ; Syntax : Common - Lisp ; -*- +=========================================================================+ | | | Permission is hereby granted, free of charge, to any person obtaining | | a copy of this software and associated documentation files (the | | without limitation the rights to use, copy, modify, merge, publish, | | the following conditions: | | | | The above copyright notice and this permission notice shall be included | | | | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. | | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY | | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE | | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | +=========================================================================+ All the various types of documentation (theoretically) available in the system. Arguably, FUNCTION-DOCUMENTATION and SYMBOL-DOC should be merged macro-documentation variable-documentation macro-documentation variable-documentation Where all documentation objects are stored. Use FIND-DOCUMENTATION to pull something out The hierarchy of documentation classes AKA 'summary' the reader methods are defined in document-function.lisp If/when actually implemented, should become a subtype of DOCUMENTED :constant, :variable, or :type :filename -- a string, must be full pathname :user-mode -- a keyword or a list of keywords :sort-order -- an integer :description -- a string, this is really the summary If/when actually implemented, should become a subtype of DOCUMENTED The definitions that create the verifiers and parsers for the definition forms for each documentation object. function-documentation has no define-doc-definer, its verifer and parser are implemented by hand in document-function.lisp not doing macro-documentation for now since it is not used Setup the Uncategorized module. No symbol-doc creator macro because symbol-doc entries are reserved for those exported symbols which do not have define-function entries. These symbols are to be searched out at the end of the system load and at that point symbol-doc objects are created for each such symbol (using the below function) The startup packages are those that exist at the start of our system load. Hence we only look for symbols in create a dummy function-documentation object whose only meaningful slots are explicitly-documented-p, which is given the value :alias-of to denote that this is a dummy, and see-also, which contains the real function that the symbol is an alias for. (list (find-documentation real-function 'help:function-documentation)) Create a set of HELP:SYMBOL-DOC data structures, for a symbol Don't create an entry if the symbol is already documented by DOCUMENT-FUNCTION Figure out if a symbol denotes a type. Not portable. This type checking causes the autoloading of the stream goo in ACL. not doing variable-documentation for now since it is not used Each documentation type has an associated URL which displays the documentation object identified by the parameters of the URL (which are taken from the SEE-ALSO data structure). See-also lists can also contain objects which are not documentation-objects File documentation doesn't have its own URL because the documentation not doing macro-documentation because it's not used. not doing variable-documentation because it's not used. URLs don't have their own URL because they are already a URL! Frames don't have their own URL here because one exists already. A page which lists all the glossary entries A page which lists all the pertinent modules</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> (in-package :help) | Copyright ( c ) 2002 - 2006 JP , , | | " Software " ) , to deal in the Software without restriction , including | | distribute , sublicense , and/or sell copies of the Software , and to | | permit persons to whom the Software is furnished to do so , subject to | | in all copies or substantial portions of the Software . | | THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , | | CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , | Authors : JP Massar , . (defparameter *documentation-types* '( documentation-file function-documentation glossary-entry module symbol-doc topic tutorial )) (defparameter *doc-types-hash-types* '( (documentation-file equal) (function-documentation eq) (glossary-entry equalp) (module equalp) (symbol-doc eq) (topic equalp) (tutorial equalp) )) (eval-when (:compile-toplevel :load-toplevel :execute) (defun create-doc-hash-tables () (let ((ht (make-hash-table))) (loop for (doc-type hash-test) in *doc-types-hash-types* do (setf (gethash doc-type ht) (make-hash-table :test hash-test))) ht ))) (defvar *documentation* (create-doc-hash-tables)) (defun intern-documentation (name type) (or (find-documentation name type) (setf (gethash name (gethash type *documentation*)) (make-instance type :name name)))) (defun remove-documentation (name type) (remhash name (gethash type *documentation*)) (make-instance type :name name)) (defun find-documentation (name type) (gethash name (gethash type *documentation*))) (defun clear-documentation () (setf *documentation* (create-doc-hash-tables))) (defclass basicdoc () ((name :initarg :name :accessor name) (docstring :initform nil :initarg :docstring :accessor docstring) (referred-to-by :initform nil :accessor referred-to-by) )) (defmethod summary ((obj basicdoc)) (docstring obj)) (defmethod text ((obj basicdoc)) nil) (defmethod keywords ((obj basicdoc)) nil) (defmethod see-also ((obj basicdoc)) nil) (defmethod explicitly-documented-p ((obj basicdoc)) nil) (defmethod author ((obj basicdoc)) nil) (defmethod print-object ((obj basicdoc) stream) (format stream "<Docobj ~A (~A)>" (help:name obj) (type-of obj))) (defclass documented (basicdoc) ((text :initform nil :accessor text) (keywords :initform nil :accessor keywords) (see-also :initform nil :accessor see-also) (author :initform nil :accessor author) (explicitly-documented-p :initform nil :accessor explicitly-documented-p))) (defclass mode-documented (documented) ((display-modes :initform (list :all) :accessor display-modes ))) (defclass documentation-file (mode-documented) ((label :initform nil :accessor label) (source-file :initform nil :accessor source-file) (associated-text-file :initform nil :accessor associated-text-file ) (matches :initform nil :accessor matches) (descriptor :initform nil :accessor descriptor) )) (defclass function-documentation (documented module-element) ((parameters :initform nil :writer (setf parameters)) (return-values :initform nil :writer (setf return-values)) (syntax :initform nil :writer (setf syntax)) (vpl-syntax :initform nil :writer (setf vpl-syntax)) (examples :initform nil :writer (setf examples)) (examples-package :initform nil :writer (setf examples-package)) (synonyms :initform nil :writer (setf synonyms)) (flavor :initform :defun :writer (setf flavor)) (canonical :initform nil :accessor canonical) (aliased :initform nil :accessor aliased) )) (defmethod print-object ((obj function-documentation) stream) (print-symbol-docobj obj stream "DocFunc")) (defclass glossary-entry (documented) ()) (defclass macro-documentation (basicdoc) ()) (defclass module (mode-documented) ((functions :initform nil :accessor functions) (variables :initform nil :accessor variables) (macros :initform nil :accessor macros) (submodules :initform nil :accessor submodules) (toplevel? :initform t :accessor toplevel?) (alpha-listing? :initform t :accessor alpha-listing?) )) (defclass symbol-doc (basicdoc) ( one of : special - operator , : define - function , : macro , : function , (stype :initform nil :initarg :stype :accessor stype) one of : function , : variable , or : type (dtype :initform nil :initarg :dtype :accessor dtype))) (defmethod print-object ((obj symbol-doc) stream) (print-symbol-docobj obj stream "Symbol")) (defclass topic (mode-documented) ()) (defclass tutorial (mode-documented) : file - type -- either : html or : : section - header -- two strings , a title , and a color : lhtml - function -- used only wth file type lhtml , must be a symbol : start - function -- used only with file type : lhtml , must be a symbol ((filename :initform nil :accessor filename) (file-type :initform nil :accessor file-type) (user-mode :initform nil :accessor user-mode) (sort-order :initform nil :accessor sort-order) (description :initform nil :accessor description) (lhtml-function :initform nil :accessor lhtml-function) (start-function :initform nil :accessor start-function) (section-header :initform nil :accessor section-header) )) (defclass variable-documentation (basicdoc) ()) (define-doc-definer documentation-file def-documentation-file create-documentation-file ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) (:descriptor :one-or-none ddd-string-or-nil identity help:descriptor) )) (define-doc-definer glossary-entry def-glossary-entry create-glossary-entry ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) )) (define-doc-definer module def-module create-module ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) (:functions :list ddd-all-symbols identity help:functions) (:variables :list ddd-all-symbols identity help:variables) (:macros :list ddd-all-symbols identity help:macros) (:submodules :list ddd-all-symbols identity help:submodules) (:toplevel? :exactly-one ddd-boolean identity help:toplevel?) (:alpha-listing? :exactly-one ddd-boolean identity help:alpha-listing?) (:display-modes :list ddd-all-symbols identity help:display-modes) ) :after-code (setf (explicitly-documented-p obj) t)) (defmacro document-module (name &body (docstring &rest meta)) `(def-module ,(string name) (:summary ,docstring) ,@meta)) (defmacro undocument-module (name &key remove-functions) `(progn (let ((module (find-documentation ',name 'module))) (if ,remove-functions (loop for fn in (functions module) do (remove-documentation fn 'function-documentation)) (let ((uncategorized (intern-documentation 'uncategorized 'module))) (loop for fn in (functions module) for fn-doc = (find-documentation fn 'function-documentation) when fn-doc do (setf (module fn-doc) uncategorized) (push fn (functions uncategorized)))))) (remove-documentation ',name 'module))) (defun modules () (hash-table-values (gethash 'module *documentation*))) (defparameter *uncategorized-key* "UNCATEGORIZED") (let ((uncategorized (intern-documentation *uncategorized-key* 'module))) (setf (docstring uncategorized) "Documented elements not part of any other module.")) (defun create-symbol-doc (symbol &key docstring dtype stype) (make-instance 'help:symbol-doc :name symbol :docstring docstring :dtype dtype :stype stype)) (defun create-symbol-doc-entries (&key (mode :external)) (declare (ignore mode)) (loop with hash = (gethash 'help:symbol-doc *documentation*) with packages-not-to-search = (remove (find-package :cl-user) cl-user::*startup-packages*) with cl-package = (find-package :common-lisp) for package in (list-all-packages) do our own packages , CL , and third party stuff we load , like PPCRE (unless (and (member package packages-not-to-search) (not (eq package cl-package))) (do-external-symbols (symbol package) (when (or (eq package cl-package) (not (eq (symbol-package symbol) cl-package))) (cond ((get symbol :alias-of) (create-alias-for symbol)) (t (vwhen (docs (maybe-create-symbol-docs symbol)) (setf (gethash symbol hash) docs) )))))))) (defun create-alias-for (symbol) (let ((real-function (get symbol :alias-of)) (docobj (intern-documentation symbol 'help:function-documentation))) (setf (explicitly-documented-p docobj) :alias-of) (setf (docstring docobj) (formatn "Alias for ~A" real-function)) (setf (see-also docobj) nil) )) (defun maybe-create-symbol-docs (symbol) (remove-if 'null (list (when (fboundp symbol) (unless (find-documentation symbol 'help:function-documentation) (create-symbol-doc symbol :docstring (documentation symbol 'function) :stype (cond ((special-operator-p symbol) :special-operator) ((define-function-p symbol) :define-function) ((macro-function symbol) :macro) (t :function)) :dtype :function ))) (when (boundp symbol) (create-symbol-doc symbol :docstring (documentation symbol 'variable) :stype (cond ((constantp symbol) :constant) (t :variable)) :dtype :variable )) (ignore-errors (typep nil symbol) (create-symbol-doc symbol :docstring (documentation symbol 'type) :stype :type :dtype :type ))))) (define-doc-definer topic def-topic create-topic ((:summary :one-or-none ddd-string-or-nil identity help:docstring) (:text :non-nil-list ddd-identity identity help:text) (:keywords :list ddd-all-symbols-or-strings identity help:keywords) (:see-also :list verify-see-also-entries parse-see-also-entries help:see-also) (:author :list ddd-all-strings identity help:author) )) The define - doc - definer for tutorials is in live-tutorial.lisp #+not-used (defmacro document-variable (name docstring) `(let ((thing (intern-documentation ',name 'variable-documentation))) (setf (explicitly-documented-p thing) t) (setf (docstring thing) ,docstring))) per se ( e.g. , references , URLs and frames ) (wb::define-url&pkg&args help-documentation-file-url "/new-help/help-documentation-file-url" :name) directory already has its own AllegroServe PUBLISH - DIRECTORY url (wb::define-url&pkg&args help-function-documentation-url "/new-help/help-function-documentation-url" :name :package) (wb::define-url&pkg&args help-glossary-entry-url "/new-help/help-glossary-entry-url" :name) (wb::define-url&pkg&args help-module-url "/new-help/help-module-url" :name) (wb::define-url&pkg&args help-symbol-doc-url "/new-help/help-symbol-doc-url" :name :package :type) (wb::define-url&pkg&args help-topic-url "/new-help/help-topic-url" :name) (wb::define-url&pkg&args help-tutorial-url "/new-help/help-tutorial-url" :name) (wb::define-url&pkg&args help-glossary-url "/new-help/help-glossary-url") (wb::define-url&pkg&args help-modules-url "/help/modules") </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610292"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">30a50b63c3ecf654875c6c6afdd4fa9e3bf4343a85b180670388b1bef846aa42</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">jixiuf/helloerlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">emysql_app.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2009 < > < > %% %% Permission is hereby granted, free of charge, to any person %% obtaining a copy of this software and associated documentation files ( the " Software " ) , to deal in the Software without %% restriction, including without limitation the rights to use, %% copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software , and to permit persons to whom the %% Software is furnished to do so, subject to the following %% conditions: %% %% The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software . %% THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , %% EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES %% OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND %% NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT %% HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, %% WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING %% FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR %% OTHER DEALINGS IN THE SOFTWARE. -module(emysql_app). -behaviour(application). -export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]). -include("emysql.hrl"). start(_Type, _StartArgs) -> case of % "%MAKETIME%" -> ok; % happens with rebar build _ - > io : format("Build time : ~p ~ n " , ) % end, emysql_sup:start_link(). stop(_State) -> lists:foreach( fun(Pool) -> lists:foreach( fun emysql_conn:close_connection/1, lists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked)) ) end, emysql_conn_mgr:pools() ), ok. modules() -> {ok, Modules} = application_controller:get_key(emysql, modules), Modules. default_timeout() -> case application:get_env(emysql, default_timeout) of undefined -> ?TIMEOUT; {ok, Timeout} -> Timeout end. lock_timeout() -> case application:get_env(emysql, lock_timeout) of undefined -> ?LOCK_TIMEOUT; {ok, Timeout} -> Timeout end. pools() -> case application:get_env(emysql, pools) of {ok, Pools} when is_list(Pools) -> Pools; _ -> [] end. </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/jixiuf/helloerlang/3960eb4237b026f98edf35d6064539259a816d58/gls/sgLogServer/deps/emysql/src/emysql_app.erl</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">erlang</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. "%MAKETIME%" -> ok; % happens with rebar build end,</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Copyright ( c ) 2009 < > < > files ( the " Software " ) , to deal in the Software without copies of the Software , and to permit persons to whom the included in all copies or substantial portions of the Software . THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , -module(emysql_app). -behaviour(application). -export([start/2, stop/1, modules/0, default_timeout/0, lock_timeout/0, pools/0]). -include("emysql.hrl"). start(_Type, _StartArgs) -> case of _ - > io : format("Build time : ~p ~ n " , ) emysql_sup:start_link(). stop(_State) -> lists:foreach( fun(Pool) -> lists:foreach( fun emysql_conn:close_connection/1, lists:append(queue:to_list(Pool#pool.available), gb_trees:values(Pool#pool.locked)) ) end, emysql_conn_mgr:pools() ), ok. modules() -> {ok, Modules} = application_controller:get_key(emysql, modules), Modules. default_timeout() -> case application:get_env(emysql, default_timeout) of undefined -> ?TIMEOUT; {ok, Timeout} -> Timeout end. lock_timeout() -> case application:get_env(emysql, lock_timeout) of undefined -> ?LOCK_TIMEOUT; {ok, Timeout} -> Timeout end. pools() -> case application:get_env(emysql, pools) of {ok, Pools} when is_list(Pools) -> Pools; _ -> [] end. </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610293"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">880432877c4576b2b53a6ab8d82cfcf659217ae9f40897338270e33faf2d362d</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">kenbot/church</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ChurchList.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">{-# LANGUAGE RankNTypes #-} module ChurchList where type CList a = forall r. (a -> r -> r) -> r -> r cNil :: CList a cNil f nil = nil cCons :: a -> CList a -> CList a cCons a clist = \f b -> f a (clist f b) cListToList :: CList a -> [a] cListToList clist = clist (:) [] listToCList :: [a] -> CList a listToCList [] = cNil listToCList (a : as) = a `cCons` (listToCList as) cListToString :: Show a => CList a -> String cListToString = show . cListToList </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/kenbot/church/a3da46b584dde00b66da14943154f225f062eb86/ChurchList.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE RankNTypes #</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> module ChurchList where type CList a = forall r. (a -> r -> r) -> r -> r cNil :: CList a cNil f nil = nil cCons :: a -> CList a -> CList a cCons a clist = \f b -> f a (clist f b) cListToList :: CList a -> [a] cListToList clist = clist (:) [] listToCList :: [a] -> CList a listToCList [] = cNil listToCList (a : as) = a `cCons` (listToCList as) cListToString :: Show a => CList a -> String cListToString = show . cListToList </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610294"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">4fe3c1118b59f0dea430902df4d1007ca2565110ef0da6b5820a22aaf91e766e</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">vvvvalvalval/mapdag</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">default.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns mapdag.test.runtime.default (:require [clojure.test :refer :all] [mapdag.test.core] [mapdag.runtime.default])) (deftest compute--examples (mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/vvvvalvalval/mapdag/c0758a7dcd986e7062d80c3dd368ea769d0d5b41/test/mapdag/test/runtime/default.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns mapdag.test.runtime.default (:require [clojure.test :refer :all] [mapdag.test.core] [mapdag.runtime.default])) (deftest compute--examples (mapdag.test.core/test-implementation--examples mapdag.runtime.default/compute)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610295"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">eb9144d628de1e3551cfcd9ac43fcb375008cf9a1d19819e0bdcb413fc287fd3</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">alevy/postgresql-orm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Model_old.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE Trustworthy # {-# LANGUAGE RankNTypes #-} # LANGUAGE DeriveGeneric # # LANGUAGE MultiParamTypeClasses # {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveDataTypeable #-} # LANGUAGE ScopedTypeVariables # # LANGUAGE FunctionalDependencies # # LANGUAGE TypeOperators # # LANGUAGE FlexibleInstances # # LANGUAGE DefaultSignatures # module Database.PostgreSQL.ORM.LIO.Model where import qualified Data.ByteString as S import qualified Database.PostgreSQL.Simple as M import Database.PostgreSQL.Simple.Types import Database.PostgreSQL.ORM.Model (Model, GDBRef) import qualified Database.PostgreSQL.ORM.Model as M import qualified Database.PostgreSQL.ORM.DBSelect as M import qualified Database.PostgreSQL.ORM as M import Data.Typeable import LIO import LIO.DCLabel import LIO.TCB import GHC.Generics import Database.PostgreSQL.ORM.CreateTable import Data.Vector (Vector, toList) findAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m] findAllP (ConnectionTCB c dcc) = do rows <- ioTCB $ M.dbSelect c selectModel mapM (labelModel dcc) rows findRow :: (Model r, ModelPolicy c r m) => Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m)) findRow (ConnectionTCB c dcc) k = do mrow <- ioTCB $ M.findRow c k case mrow of Nothing -> return Nothing Just row -> labelModel dcc row >>= \lr -> return $ Just lr data Connection c = ConnectionTCB M.Connection c class DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where labelModel :: c -> a -> DC (DCLabeled b) selectModel :: M.DBSelect a default selectModel :: (Model a) => M.DBSelect a selectModel = M.modelDBSelect lookupModel :: M.DBSelect a default lookupModel :: Model a => M.DBSelect a lookupModel = let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a) in M.addWhere_ (Query $ S.concat [primKey, " = ?"]) $ M.modelDBSelect class Typeable c => DCConnection c where newConnection :: DCPriv -> c connect :: forall c. DCConnection c => DC (Connection c) connect = do let tc = typeRepTyCon $ typeOf (undefined :: c) pd = concat [ tyConPackage tc , ":" , tyConModule tc , "." , tyConName tc ] cpriv = PrivTCB $ toCNF $ principal pd M.defaultConnectInfo -- { M.connectDatabase = pd } return $ ConnectionTCB conn $ newConnection cpriv --- EXAMPLE data MyConn = MyConnTCB DCPriv deriving (Typeable) instance DCConnection MyConn where newConnection = MyConnTCB data Owner = Owner { ownerId :: M.DBKey , ownerPrincipal :: String } deriving (Generic, Show) data Region = Region { regionId :: M.DBKey , regionName :: String , regionOwner :: M.DBRef Owner } deriving (Generic, Show) instance Model Region where modelInfo = M.underscoreModelInfo "region" instance Model Owner where modelInfo = M.underscoreModelInfo "region" instance ModelPolicy MyConn (Region M.:. Owner) Region where selectModel = M.addExpression "" $ M.modelDBSelect labelModel (MyConnTCB mypriv) (region M.:. owner) = do labelP mypriv (ownerPrincipal owner \/ mypriv %% ownerPrincipal owner \/ mypriv) region instance ModelPolicy MyConn Owner Owner where labelModel (MyConnTCB mypriv) owner = labelP mypriv (True %% mypriv) owner data Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable) instance Model Owners where </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/alevy/postgresql-orm/9316db2f226c512036c2b72983020f6bdefd41bd/src/Database/PostgreSQL/ORM/LIO/Model_old.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE RankNTypes # # LANGUAGE OverloadedStrings # # LANGUAGE DeriveDataTypeable # { M.connectDatabase = pd } - EXAMPLE</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "># LANGUAGE Trustworthy # # LANGUAGE DeriveGeneric # # LANGUAGE MultiParamTypeClasses # # LANGUAGE ScopedTypeVariables # # LANGUAGE FunctionalDependencies # # LANGUAGE TypeOperators # # LANGUAGE FlexibleInstances # # LANGUAGE DefaultSignatures # module Database.PostgreSQL.ORM.LIO.Model where import qualified Data.ByteString as S import qualified Database.PostgreSQL.Simple as M import Database.PostgreSQL.Simple.Types import Database.PostgreSQL.ORM.Model (Model, GDBRef) import qualified Database.PostgreSQL.ORM.Model as M import qualified Database.PostgreSQL.ORM.DBSelect as M import qualified Database.PostgreSQL.ORM as M import Data.Typeable import LIO import LIO.DCLabel import LIO.TCB import GHC.Generics import Database.PostgreSQL.ORM.CreateTable import Data.Vector (Vector, toList) findAllP :: (Model r, ModelPolicy c r m) => Connection c -> DC [DCLabeled m] findAllP (ConnectionTCB c dcc) = do rows <- ioTCB $ M.dbSelect c selectModel mapM (labelModel dcc) rows findRow :: (Model r, ModelPolicy c r m) => Connection c -> GDBRef rt r -> DC (Maybe (DCLabeled m)) findRow (ConnectionTCB c dcc) k = do mrow <- ioTCB $ M.findRow c k case mrow of Nothing -> return Nothing Just row -> labelModel dcc row >>= \lr -> return $ Just lr data Connection c = ConnectionTCB M.Connection c class DCConnection c => ModelPolicy c a b | a -> b, b -> c, b -> a where labelModel :: c -> a -> DC (DCLabeled b) selectModel :: M.DBSelect a default selectModel :: (Model a) => M.DBSelect a selectModel = M.modelDBSelect lookupModel :: M.DBSelect a default lookupModel :: Model a => M.DBSelect a lookupModel = let primKey = M.modelQPrimaryColumn (M.modelIdentifiers :: M.ModelIdentifiers a) in M.addWhere_ (Query $ S.concat [primKey, " = ?"]) $ M.modelDBSelect class Typeable c => DCConnection c where newConnection :: DCPriv -> c connect :: forall c. DCConnection c => DC (Connection c) connect = do let tc = typeRepTyCon $ typeOf (undefined :: c) pd = concat [ tyConPackage tc , ":" , tyConModule tc , "." , tyConName tc ] cpriv = PrivTCB $ toCNF $ principal pd return $ ConnectionTCB conn $ newConnection cpriv data MyConn = MyConnTCB DCPriv deriving (Typeable) instance DCConnection MyConn where newConnection = MyConnTCB data Owner = Owner { ownerId :: M.DBKey , ownerPrincipal :: String } deriving (Generic, Show) data Region = Region { regionId :: M.DBKey , regionName :: String , regionOwner :: M.DBRef Owner } deriving (Generic, Show) instance Model Region where modelInfo = M.underscoreModelInfo "region" instance Model Owner where modelInfo = M.underscoreModelInfo "region" instance ModelPolicy MyConn (Region M.:. Owner) Region where selectModel = M.addExpression "" $ M.modelDBSelect labelModel (MyConnTCB mypriv) (region M.:. owner) = do labelP mypriv (ownerPrincipal owner \/ mypriv %% ownerPrincipal owner \/ mypriv) region instance ModelPolicy MyConn Owner Owner where labelModel (MyConnTCB mypriv) owner = labelP mypriv (True %% mypriv) owner data Owners = Owners { ownersId :: M.DBKey, owners :: Vector String } deriving (Generic, Typeable) instance Model Owners where </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610296"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">079a9581678803d1db11a5532e99753bb7f9380076b7da3e20f4f3725a4665f2</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">fission-codes/fission</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">Init.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">-- | Initialize a new Fission app in an existing directory module Fission.CLI.Handler.App.Init (appInit) where import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Data.Yaml as YAML import qualified System.Console.ANSI as ANSI import Fission.Prelude import qualified Fission.App.Name as App import Fission.Authorization.ServerDID import Fission.Error.Types import qualified Fission.Internal.UTF8 as UTF8 import Fission.Web.Auth.Token.Types import Fission.Web.Client import Fission.CLI.Display.Text import qualified Fission.CLI.Display.Error as CLI.Error import qualified Fission.CLI.Display.Success as CLI.Success import qualified Fission.CLI.App.Environment as App.Env import qualified Fission.CLI.Prompt.BuildDir as BuildDir import Fission.CLI.Environment import Fission.CLI.WebNative.Mutation.Auth.Store as UCAN | Sync the current working directory to the server over IPFS appInit :: ( MonadIO m , MonadTime m , MonadLogger m , MonadEnvironment m , UCAN.MonadStore m , MonadWebClient m , ServerDID m , MonadCleanup m , m `Raises` ClientError , m `Raises` YAML.ParseException , m `Raises` NotFound FilePath , Contains (Errors m) (Errors m) , Display (OpenUnion (Errors m)) , Show (OpenUnion (Errors m)) , MonadWebAuth m Token , MonadWebAuth m Ed25519.SecretKey ) => FilePath -> Maybe FilePath -> Maybe App.Name -> m () appInit appDir mayBuildDir' mayAppName = do logDebug @Text "appInit" proof <- getRootUserProof attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \case Left err -> do logDebug $ textDisplay err CLI.Error.put err $ textDisplay err raise err Right appURL -> do logDebug $ "Created app " <> textDisplay appURL case mayBuildDir' of Nothing -> do guess <- BuildDir.prompt appDir App.Env.create appURL $ fromMaybe guess mayBuildDir' Just dir -> do logDebug $ "BuildDir passed from flag: " <> dir App.Env.create appURL dir CLI.Success.putOk $ "App initialized as " <> textDisplay appURL UTF8.putText "⏯️ Next, run " colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do UTF8.putText "fission app publish [--open|--watch]" UTF8.putText " to sync data\n" UTF8.putText "💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at " colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do UTF8.putText $ "/" <> textDisplay appURL <> "\n" return () </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/fission-codes/fission/e5a5d6f30fb4451918efba5b72787cbc7632aecf/fission-cli/library/Fission/CLI/Handler/App/Init.hs</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">haskell</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> | Initialize a new Fission app in an existing directory</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">module Fission.CLI.Handler.App.Init (appInit) where import qualified Crypto.PubKey.Ed25519 as Ed25519 import qualified Data.Yaml as YAML import qualified System.Console.ANSI as ANSI import Fission.Prelude import qualified Fission.App.Name as App import Fission.Authorization.ServerDID import Fission.Error.Types import qualified Fission.Internal.UTF8 as UTF8 import Fission.Web.Auth.Token.Types import Fission.Web.Client import Fission.CLI.Display.Text import qualified Fission.CLI.Display.Error as CLI.Error import qualified Fission.CLI.Display.Success as CLI.Success import qualified Fission.CLI.App.Environment as App.Env import qualified Fission.CLI.Prompt.BuildDir as BuildDir import Fission.CLI.Environment import Fission.CLI.WebNative.Mutation.Auth.Store as UCAN | Sync the current working directory to the server over IPFS appInit :: ( MonadIO m , MonadTime m , MonadLogger m , MonadEnvironment m , UCAN.MonadStore m , MonadWebClient m , ServerDID m , MonadCleanup m , m `Raises` ClientError , m `Raises` YAML.ParseException , m `Raises` NotFound FilePath , Contains (Errors m) (Errors m) , Display (OpenUnion (Errors m)) , Show (OpenUnion (Errors m)) , MonadWebAuth m Token , MonadWebAuth m Ed25519.SecretKey ) => FilePath -> Maybe FilePath -> Maybe App.Name -> m () appInit appDir mayBuildDir' mayAppName = do logDebug @Text "appInit" proof <- getRootUserProof attempt (sendAuthedRequest proof $ createApp mayAppName) >>= \case Left err -> do logDebug $ textDisplay err CLI.Error.put err $ textDisplay err raise err Right appURL -> do logDebug $ "Created app " <> textDisplay appURL case mayBuildDir' of Nothing -> do guess <- BuildDir.prompt appDir App.Env.create appURL $ fromMaybe guess mayBuildDir' Just dir -> do logDebug $ "BuildDir passed from flag: " <> dir App.Env.create appURL dir CLI.Success.putOk $ "App initialized as " <> textDisplay appURL UTF8.putText "⏯️ Next, run " colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do UTF8.putText "fission app publish [--open|--watch]" UTF8.putText " to sync data\n" UTF8.putText "💁 It may take DNS time to propagate this initial setup globally. In this case, you can always view your app at " colourized [ANSI.SetColor ANSI.Foreground ANSI.Vivid ANSI.Blue] do UTF8.putText $ "/" <> textDisplay appURL <> "\n" return () </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610297"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">755f3076fcb051d464f7dacf35ce09e4f8a5bb8d808cd80d344aad7c19313bc2</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">dbuenzli/remat</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">descr.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">--------------------------------------------------------------------------- Copyright 2012 . All rights reserved . Distributed under the BSD3 license , see license at the end of the file . % % NAME%% release % % --------------------------------------------------------------------------- Copyright 2012 Daniel C. Bünzli. All rights reserved. Distributed under the BSD3 license, see license at the end of the file. %%NAME%% release %%VERSION%% ---------------------------------------------------------------------------*) open Rresult open Bos (* Repository description *) type t = { dir : Path.t; mutable repo : Ddescr.Repo.t option; mutable index_ids : D.index_id list option; indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t; mutable doc_ids : D.doc_id list option; docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; } (* Description filename lookup *) let warn_junk_file = format_of_string "suspicious file `%a` in %s directory" let err_miss_repo p _ = R.msgf "no repository description file `%a'" Path.pp p let err_miss_dir dir p _ = R.msgf "missing %s directory `%a'" dir Path.pp p let err_miss_file k id p _ = R.msgf "%s `%s': missing description file `%a'" k id Path.pp p let lookup_file err_msg f = (OS.File.exists ~err:true f >>= fun _ -> R.ok f) |> R.reword_error_msg ~replace:true (err_msg f) let lookup_dir err_msg d = (OS.Dir.exists ~err:true d >>= fun _ -> R.ok d) |> R.reword_error_msg ~replace:true (err_msg d) let repo_file d = lookup_file err_miss_repo Path.(d.dir / "repo.json") let index_path d = Path.(d.dir / "i") let index_dir d = lookup_dir (err_miss_dir "index") (index_path d) let index_file d id = let err = err_miss_file "index" id in lookup_file err Path.(index_path d / strf "%s.json" id) let doc_path d = Path.(d.dir / "d") let doc_dir d = lookup_dir (err_miss_dir "document") (doc_path d) let doc_file d id = let err = err_miss_file "document" id in lookup_file err Path.(doc_path d / strf "%s.json" id) (* Description decoder *) let decode_file file codec = let decode ic () = let d = Jsonm.decoder (`Channel ic) in let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in let rec loop () = match Jsont.decode d with | `Ok v -> R.ok v | `Await -> loop () | `Error (loc, e) -> let err = (Jsont.error_to_string e) in Log.show "%a:%a: %s" Path.pp file Fmt.pp_range loc err; loop () in loop () in OS.File.with_inf decode file () let create dir = OS.Dir.exists ~err:true dir >>= fun _ -> R.ok { dir; repo = None; index_ids = None; indexes = Hashtbl.create 100; doc_ids = None; docs = Hashtbl.create 1000; } let rec repo d = match d.repo with | Some r -> r | None -> (repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec) |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec)) |> fun (_, r) -> d.repo <- Some r; r let find_ids kind dir = let add_id acc p = if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else (Log.warn warn_junk_file Path.pp p kind; acc) in (dir >>= OS.Dir.contents >>= fun paths -> R.ok (List.fold_left add_id [] paths)) |> Log.on_error_msg ~use:[] let index_ids d = match d.index_ids with | Some ids -> ids | None -> let ids = find_ids "index" (index_dir d) in d.index_ids <- Some ids; ids let index d id = match try Some (Hashtbl.find d.indexes id) with Not_found -> None with | Some i -> i | None -> (index_file d id >>= fun file -> decode_file file Ddescr.Index.codec) |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec)) |> fun (_, i) -> Hashtbl.add d.indexes id i; i let doc_ids d = match d.doc_ids with | Some ids -> ids | None -> let ids = find_ids "document" (doc_dir d) in d.doc_ids <- Some ids; ids FIXME see if modification of jsont can avoid double parse match try Some (Hashtbl.find d.docs id) with Not_found -> None with | Some d -> d | None -> (doc_file d id >>= fun file -> decode_file file Ddescr.Doc.codec >>= fun (_, doc) -> decode_file file Jsont.json >>= fun (_, meta) -> R.ok (doc, meta)) |> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O []) |> fun doc -> Hashtbl.add d.docs id doc; doc (* Member lookup *) let path_to_str ps = String.concat "." ps let value_type = function | `Null -> "null" | `Bool _ -> "boolean" | `Float _ -> "number" | `String _ -> "string" | `A _ -> "array" | `O _ -> "object" let err_find_type path seen j = R.error_msgf "path %s stops at %s: value of type %s" (path_to_str path) (path_to_str seen) (value_type j) let err_find_name path seen = R.error_msgf "path %s stops at %s: no such member." (path_to_str path) (path_to_str seen) let json_find path j = let rec loop j seen = function | [] -> R.ok j | p :: ps -> match j with | `O mems -> begin match try Some (List.assoc p mems) with Not_found -> None with | None -> err_find_name path (List.rev (p :: seen)) | Some j -> loop j (p :: seen) ps end | j -> err_find_type path (List.rev (p :: seen)) j in loop j [] path let lookup_to_str = function | `Bool b -> R.ok (strf "%b" b) | `Float f -> R.ok (strf "%g" f) | `String s -> R.ok s | `A _ | `O _ | `Null as v -> R.error_msgf "unexpected %s in member data" (value_type v) let lookup path obj = json_find path obj >>= function | `A vs -> let rec loop acc = function | v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs | [] -> R.ok (List.rev acc) in loop [] vs | v -> lookup_to_str v >>= fun s -> R.ok [s] (* Formatting TODO better error reports, correct string extractors. *) let parse_fuzzy_date s = let is_digit c = (0x0030 <= c && c <= 0x0039) || c = 0x23 (* # *) in let check_digits n s = let len = String.length s in if len <> n then false else try for i = 0 to len - 1 do if not (is_digit (Char.code s.[i])) then raise Exit done; true with Exit -> false in match String.split ~sep:"-" s with | [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d -> R.ok (y, Some m, Some d) | [y; m] when check_digits 4 y && check_digits 2 m -> R.ok (y, Some m, None) | [y] when check_digits 4 y -> R.ok (y, None, None) | _ -> R.error_msgf "could not parse fuzzy date (%s)" s let map_todo m = let err = R.msgf "map %s is unimplemented" m in Ok (fun s -> R.error (err, s)) (* let err fmt = Printf.ksprintf (fun e -> R.error e) fmt *) let err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt let map_case var kind = match kind with | "less" | "lower" | "upper" -> map_todo ("case_" ^ kind) | _ -> R.error_msgf "variable $(%s): unknown case map kind `%s`" var kind TODO implement dates correctly let map_date_y s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok y let map_date_yy s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok (String.sub y 2 2) let map_date_yyyy s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok y let map_date_m s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, m, _) -> Ok (match m with None -> "#" | Some m -> m) let map_date_mm s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, m, _) -> Ok (match m with None -> "##" | Some m -> m) let map_date_d s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, _, d) -> Ok (match d with None -> "#" | Some m -> m) let map_date_dd s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, _, d) -> Ok (match d with None -> "##" | Some m -> m) let map_date var kind = match kind with | "Y" -> Ok map_date_y | "YY" -> Ok map_date_yy | "YYYY" -> Ok map_date_yyyy | "M" -> Ok map_date_m | "MM" -> Ok map_date_mm | "d" -> Ok map_date_d | "dd" -> Ok map_date_dd | "e" -> map_todo "date_e" | _ -> R.error_msgf "variable $(%s): unknown date map kind `%s`" var kind let map_letter var n = match R.int_of_string n with | None -> R.error_msgf "variable $(%s): unknown letter map kind `%s`" var n | Some n -> let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in Ok map let map_int var count = match R.int_of_string count with | None -> R.error_msgf "variable $(%s): unknown int map kind `%s`" var count | Some count -> let map s = let fmt count i = Printf.sprintf "%0*d" count i in try Ok (fmt count (int_of_string s)) with | Failure _ -> err_map ~use:(fmt count 0) "variable $(%s): value `%s` not an int" var s in Ok map let map_id_find var smaps id = match (String.Map.find id smaps) with | None -> R.error_msgf "variable $(%s): unknown map id `%s`" var id | Some m -> Ok m let map_id var smaps id = map_id_find var smaps id >>= fun m -> let map s = match String.Map.find s m with | Some v -> Ok v | None -> err_map ~use:s "variable $(%s): map id `%s` could not map `%s`" var id s in Ok map let pmap_id var smaps id = match map_id_find var smaps id with | Error _ as e -> e | Ok m -> let map s = match String.Map.find s m with | None -> Ok s | Some s -> Ok s in Ok map let get_map var smaps m = match String.cut ~sep:"_" (String.trim m) with | Some ("case", kind) -> map_case var kind | Some ("letter", n) -> map_letter var n | Some ("date", kind) -> map_date var kind | Some ("int", count) -> map_int var count | Some ("map", id) -> map_id var smaps m | Some ("pmap", id) -> pmap_id var smaps m | None | _ -> R.error_msgf "variable $(%s): unknown map `%s`" var m TODO splicing , de - uglify let r = match String.split ~sep:"," var_spec with | var :: maps -> let add_map acc m = match acc with | Error _ as e -> e | Ok maps -> match get_map var smaps m with | Error _ as e -> e | Ok m -> Ok (m :: maps) in begin match List.fold_left add_map (Ok []) maps with | Error err -> Error (err, "MAPERROR") | Ok maps -> Ok (String.trim var, List.rev maps) end | _ -> Error (R.msgf "var `$(%s)`: illegal format variable." var_spec, "ILLEGAL") in match r with | Error _ as e -> e | Ok (var, maps) -> match String.Map.find var env with | None | Some [] -> Error (R.msgf "var `%s`: undefined variable: `$(%s)'" var_spec var, "UNDEFINED") | Some [v] -> let apply acc m = match acc with | Error _ as e -> e | Ok s -> m s in List.fold_left apply (Ok v) maps | Some l -> Error (R.msgf "var `%s`: unspliced multiple value" var_spec, "UNSPLICED") let format ?buf fmt ~env ~smaps = failwith "TODO" let buf = match buf with Some b - > b | None - > Buffer.create 255 in let err = ref ( ` Msg " " ) in let lookup_var = match lookup_var env with | Error ( e , v ) - > err : = e ; v | Ok v - > v in Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ; let data = Buffer.contents buf in if ! err < > ( ` Msg " " ) then Error ( ! err , data ) else Ok data let buf = match buf with Some b -> b | None -> Buffer.create 255 in let err = ref (`Msg "") in let lookup_var var_spec = match lookup_var env smaps var_spec with | Error (e, v) -> err := e; v | Ok v -> v in Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt; let data = Buffer.contents buf in if !err <> (`Msg "") then Error (!err, data) else Ok data *) let formats ?buf fmt ~env ~smaps = failwith "TODO" (* let rec product vss = (* ordered cartesian product of lists. *) let rec push_v acc v = function | l :: lists -> push_v ((v :: l) :: acc) v lists | [] -> acc in let rec push_vs acc lists = function | v :: vs -> push_vs (push_v acc v lists) lists vs | [] -> acc in let rec loop acc = function | vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss | [] -> acc in if vss = [] then [] else loop [[]] (List.rev vss) FIXME better error report let lookup_var env var = match try Some (List.assoc var env) with Not_found -> None with | None -> FIXME this should n't occur here Log.err "variable %s undefined" var; "UNDEFINED" | Some l -> l in let rec assigns acc = function | [] -> acc | (name, Error e) :: vars -> Log.err "var %s lookup error: %s" name e; assigns ([(name, "ERROR")] :: acc) vars | (name, Ok vs) :: vars -> assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars in let vars = Ddescr.Formatter.vars fmt in let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in let envs = product assigns in let format = Ddescr.Formatter.format fmt in let add_run b acc run = Buffer.clear b; Buffer.add_substitute b (lookup_var run) format; Buffer.contents b :: acc in let b = Buffer.create 255 in List.fold_left (add_run b) [] envs let format_str fmt j = FIXME report error in case of list ? String.concat "" (format fmt j) *) (* Variable environements *) let cache = Hashtbl.create 255 type fmt = [`Lit of string | `Var of string ] list let parse_fmt ?buf s = try let b = match buf with | None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf in let acc = ref [] in let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in let flush_lit b = if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc in let state = ref `Lit in for i = 0 to String.length s - 1 do match !state with | `Lit -> begin match s.[i] with | '$' -> state := `Dollar | c -> Buffer.add_char b c end | `Dollar -> begin match s.[i] with | '$' -> state := `Lit; Buffer.add_char b '$' | '(' -> state := `Var; flush_lit b; | _ -> raise Exit end | `Var -> begin match s.[i] with | ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc; | c -> Buffer.add_char b c end done; if !state <> `Lit then raise Exit else (flush_lit b; Ok (List.rev !acc)) with Exit -> Error (strf "malformed format: `%s`" s) let cache = Hashtbl.create 255 let file_scan pat = try Hashtbl.find cache pat with | Not_found -> (OS.Path.unify (Path.of_string pat) >>= fun envs -> R.ok (List.rev_map snd envs)) |> Log.on_error_msg ~use:[] |> fun envs -> Hashtbl.add cache pat envs; envs --------------------------------------------------------------------------- Copyright 2012 All rights reserved . Redistribution and use in source and binary forms , with or without modification , are permitted provided that the following conditions are met : 1 . Redistributions of source code must retain the above copyright notice , this list of conditions and the following disclaimer . 2 . Redistributions in binary form must reproduce the above copyright notice , this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution . 3 . Neither the name of nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . --------------------------------------------------------------------------- Copyright 2012 Daniel C. Bünzli All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Daniel C. Bünzli nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ---------------------------------------------------------------------------*) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/dbuenzli/remat/28d572e77bbd1ad46bbfde87c0ba8bd0ab99ed28/src-remat/descr.ml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">ocaml</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "> Repository description Description filename lookup Description decoder Member lookup Formatting TODO better error reports, correct string extractors. # let err fmt = Printf.ksprintf (fun e -> R.error e) fmt let rec product vss = (* ordered cartesian product of lists. Variable environements </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">--------------------------------------------------------------------------- Copyright 2012 . All rights reserved . Distributed under the BSD3 license , see license at the end of the file . % % NAME%% release % % --------------------------------------------------------------------------- Copyright 2012 Daniel C. Bünzli. All rights reserved. Distributed under the BSD3 license, see license at the end of the file. %%NAME%% release %%VERSION%% ---------------------------------------------------------------------------*) open Rresult open Bos type t = { dir : Path.t; mutable repo : Ddescr.Repo.t option; mutable index_ids : D.index_id list option; indexes : (D.index_id, Ddescr.Index.t) Hashtbl.t; mutable doc_ids : D.doc_id list option; docs : (D.doc_id, Ddescr.Doc.t * Ddescr.Doc.meta) Hashtbl.t; } let warn_junk_file = format_of_string "suspicious file `%a` in %s directory" let err_miss_repo p _ = R.msgf "no repository description file `%a'" Path.pp p let err_miss_dir dir p _ = R.msgf "missing %s directory `%a'" dir Path.pp p let err_miss_file k id p _ = R.msgf "%s `%s': missing description file `%a'" k id Path.pp p let lookup_file err_msg f = (OS.File.exists ~err:true f >>= fun _ -> R.ok f) |> R.reword_error_msg ~replace:true (err_msg f) let lookup_dir err_msg d = (OS.Dir.exists ~err:true d >>= fun _ -> R.ok d) |> R.reword_error_msg ~replace:true (err_msg d) let repo_file d = lookup_file err_miss_repo Path.(d.dir / "repo.json") let index_path d = Path.(d.dir / "i") let index_dir d = lookup_dir (err_miss_dir "index") (index_path d) let index_file d id = let err = err_miss_file "index" id in lookup_file err Path.(index_path d / strf "%s.json" id) let doc_path d = Path.(d.dir / "d") let doc_dir d = lookup_dir (err_miss_dir "document") (doc_path d) let doc_file d id = let err = err_miss_file "document" id in lookup_file err Path.(doc_path d / strf "%s.json" id) let decode_file file codec = let decode ic () = let d = Jsonm.decoder (`Channel ic) in let d = Jsont.decoder ~dups:`Error ~unknown:`Error d codec in let rec loop () = match Jsont.decode d with | `Ok v -> R.ok v | `Await -> loop () | `Error (loc, e) -> let err = (Jsont.error_to_string e) in Log.show "%a:%a: %s" Path.pp file Fmt.pp_range loc err; loop () in loop () in OS.File.with_inf decode file () let create dir = OS.Dir.exists ~err:true dir >>= fun _ -> R.ok { dir; repo = None; index_ids = None; indexes = Hashtbl.create 100; doc_ids = None; docs = Hashtbl.create 1000; } let rec repo d = match d.repo with | Some r -> r | None -> (repo_file d >>= fun file -> decode_file file Ddescr.Repo.codec) |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Repo.codec)) |> fun (_, r) -> d.repo <- Some r; r let find_ids kind dir = let add_id acc p = if Path.has_ext `Json p then Path.(basename (rem_ext p)) :: acc else (Log.warn warn_junk_file Path.pp p kind; acc) in (dir >>= OS.Dir.contents >>= fun paths -> R.ok (List.fold_left add_id [] paths)) |> Log.on_error_msg ~use:[] let index_ids d = match d.index_ids with | Some ids -> ids | None -> let ids = find_ids "index" (index_dir d) in d.index_ids <- Some ids; ids let index d id = match try Some (Hashtbl.find d.indexes id) with Not_found -> None with | Some i -> i | None -> (index_file d id >>= fun file -> decode_file file Ddescr.Index.codec) |> Log.on_error_msg ~use:Jsont.(invalid_def (default Ddescr.Index.codec)) |> fun (_, i) -> Hashtbl.add d.indexes id i; i let doc_ids d = match d.doc_ids with | Some ids -> ids | None -> let ids = find_ids "document" (doc_dir d) in d.doc_ids <- Some ids; ids FIXME see if modification of jsont can avoid double parse match try Some (Hashtbl.find d.docs id) with Not_found -> None with | Some d -> d | None -> (doc_file d id >>= fun file -> decode_file file Ddescr.Doc.codec >>= fun (_, doc) -> decode_file file Jsont.json >>= fun (_, meta) -> R.ok (doc, meta)) |> Log.on_error_msg ~use:(Jsont.(default Ddescr.Doc.codec), `O []) |> fun doc -> Hashtbl.add d.docs id doc; doc let path_to_str ps = String.concat "." ps let value_type = function | `Null -> "null" | `Bool _ -> "boolean" | `Float _ -> "number" | `String _ -> "string" | `A _ -> "array" | `O _ -> "object" let err_find_type path seen j = R.error_msgf "path %s stops at %s: value of type %s" (path_to_str path) (path_to_str seen) (value_type j) let err_find_name path seen = R.error_msgf "path %s stops at %s: no such member." (path_to_str path) (path_to_str seen) let json_find path j = let rec loop j seen = function | [] -> R.ok j | p :: ps -> match j with | `O mems -> begin match try Some (List.assoc p mems) with Not_found -> None with | None -> err_find_name path (List.rev (p :: seen)) | Some j -> loop j (p :: seen) ps end | j -> err_find_type path (List.rev (p :: seen)) j in loop j [] path let lookup_to_str = function | `Bool b -> R.ok (strf "%b" b) | `Float f -> R.ok (strf "%g" f) | `String s -> R.ok s | `A _ | `O _ | `Null as v -> R.error_msgf "unexpected %s in member data" (value_type v) let lookup path obj = json_find path obj >>= function | `A vs -> let rec loop acc = function | v :: vs -> lookup_to_str v >>= fun s -> loop (s :: acc) vs | [] -> R.ok (List.rev acc) in loop [] vs | v -> lookup_to_str v >>= fun s -> R.ok [s] let parse_fuzzy_date s = let check_digits n s = let len = String.length s in if len <> n then false else try for i = 0 to len - 1 do if not (is_digit (Char.code s.[i])) then raise Exit done; true with Exit -> false in match String.split ~sep:"-" s with | [y; m; d] when check_digits 4 y && check_digits 2 m && check_digits 2 d -> R.ok (y, Some m, Some d) | [y; m] when check_digits 4 y && check_digits 2 m -> R.ok (y, Some m, None) | [y] when check_digits 4 y -> R.ok (y, None, None) | _ -> R.error_msgf "could not parse fuzzy date (%s)" s let map_todo m = let err = R.msgf "map %s is unimplemented" m in Ok (fun s -> R.error (err, s)) let err_map ~use fmt = Printf.ksprintf (fun e -> R.error (`Msg e, use)) fmt let map_case var kind = match kind with | "less" | "lower" | "upper" -> map_todo ("case_" ^ kind) | _ -> R.error_msgf "variable $(%s): unknown case map kind `%s`" var kind TODO implement dates correctly let map_date_y s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok y let map_date_yy s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok (String.sub y 2 2) let map_date_yyyy s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (y, _, _) -> Ok y let map_date_m s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, m, _) -> Ok (match m with None -> "#" | Some m -> m) let map_date_mm s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, m, _) -> Ok (match m with None -> "##" | Some m -> m) let map_date_d s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, _, d) -> Ok (match d with None -> "#" | Some m -> m) let map_date_dd s = match parse_fuzzy_date s with | Error err -> Error (err, s) | Ok (_, _, d) -> Ok (match d with None -> "##" | Some m -> m) let map_date var kind = match kind with | "Y" -> Ok map_date_y | "YY" -> Ok map_date_yy | "YYYY" -> Ok map_date_yyyy | "M" -> Ok map_date_m | "MM" -> Ok map_date_mm | "d" -> Ok map_date_d | "dd" -> Ok map_date_dd | "e" -> map_todo "date_e" | _ -> R.error_msgf "variable $(%s): unknown date map kind `%s`" var kind let map_letter var n = match R.int_of_string n with | None -> R.error_msgf "variable $(%s): unknown letter map kind `%s`" var n | Some n -> let map s = Ok (if n > String.length s then s else (String.sub s 0 n)) in Ok map let map_int var count = match R.int_of_string count with | None -> R.error_msgf "variable $(%s): unknown int map kind `%s`" var count | Some count -> let map s = let fmt count i = Printf.sprintf "%0*d" count i in try Ok (fmt count (int_of_string s)) with | Failure _ -> err_map ~use:(fmt count 0) "variable $(%s): value `%s` not an int" var s in Ok map let map_id_find var smaps id = match (String.Map.find id smaps) with | None -> R.error_msgf "variable $(%s): unknown map id `%s`" var id | Some m -> Ok m let map_id var smaps id = map_id_find var smaps id >>= fun m -> let map s = match String.Map.find s m with | Some v -> Ok v | None -> err_map ~use:s "variable $(%s): map id `%s` could not map `%s`" var id s in Ok map let pmap_id var smaps id = match map_id_find var smaps id with | Error _ as e -> e | Ok m -> let map s = match String.Map.find s m with | None -> Ok s | Some s -> Ok s in Ok map let get_map var smaps m = match String.cut ~sep:"_" (String.trim m) with | Some ("case", kind) -> map_case var kind | Some ("letter", n) -> map_letter var n | Some ("date", kind) -> map_date var kind | Some ("int", count) -> map_int var count | Some ("map", id) -> map_id var smaps m | Some ("pmap", id) -> pmap_id var smaps m | None | _ -> R.error_msgf "variable $(%s): unknown map `%s`" var m TODO splicing , de - uglify let r = match String.split ~sep:"," var_spec with | var :: maps -> let add_map acc m = match acc with | Error _ as e -> e | Ok maps -> match get_map var smaps m with | Error _ as e -> e | Ok m -> Ok (m :: maps) in begin match List.fold_left add_map (Ok []) maps with | Error err -> Error (err, "MAPERROR") | Ok maps -> Ok (String.trim var, List.rev maps) end | _ -> Error (R.msgf "var `$(%s)`: illegal format variable." var_spec, "ILLEGAL") in match r with | Error _ as e -> e | Ok (var, maps) -> match String.Map.find var env with | None | Some [] -> Error (R.msgf "var `%s`: undefined variable: `$(%s)'" var_spec var, "UNDEFINED") | Some [v] -> let apply acc m = match acc with | Error _ as e -> e | Ok s -> m s in List.fold_left apply (Ok v) maps | Some l -> Error (R.msgf "var `%s`: unspliced multiple value" var_spec, "UNSPLICED") let format ?buf fmt ~env ~smaps = failwith "TODO" let buf = match buf with Some b - > b | None - > Buffer.create 255 in let err = ref ( ` Msg " " ) in let lookup_var = match lookup_var env with | Error ( e , v ) - > err : = e ; v | Ok v - > v in Buffer.clear buf ; Buffer.add_substitute buf lookup_var fmt ; let data = Buffer.contents buf in if ! err < > ( ` Msg " " ) then Error ( ! err , data ) else Ok data let buf = match buf with Some b -> b | None -> Buffer.create 255 in let err = ref (`Msg "") in let lookup_var var_spec = match lookup_var env smaps var_spec with | Error (e, v) -> err := e; v | Ok v -> v in Buffer.clear buf; Buffer.add_substitute buf lookup_var fmt; let data = Buffer.contents buf in if !err <> (`Msg "") then Error (!err, data) else Ok data *) let formats ?buf fmt ~env ~smaps = failwith "TODO" let rec push_v acc v = function | l :: lists -> push_v ((v :: l) :: acc) v lists | [] -> acc in let rec push_vs acc lists = function | v :: vs -> push_vs (push_v acc v lists) lists vs | [] -> acc in let rec loop acc = function | vs :: vss -> loop (push_vs [] (List.rev acc) (List.rev vs)) vss | [] -> acc in if vss = [] then [] else loop [[]] (List.rev vss) FIXME better error report let lookup_var env var = match try Some (List.assoc var env) with Not_found -> None with | None -> FIXME this should n't occur here Log.err "variable %s undefined" var; "UNDEFINED" | Some l -> l in let rec assigns acc = function | [] -> acc | (name, Error e) :: vars -> Log.err "var %s lookup error: %s" name e; assigns ([(name, "ERROR")] :: acc) vars | (name, Ok vs) :: vars -> assigns ((List.map (fun v -> (name, v)) vs) :: acc) vars in let vars = Ddescr.Formatter.vars fmt in let assigns = assigns [] (List.map (fun (k, l) -> k, lookup l j) vars) in let envs = product assigns in let format = Ddescr.Formatter.format fmt in let add_run b acc run = Buffer.clear b; Buffer.add_substitute b (lookup_var run) format; Buffer.contents b :: acc in let b = Buffer.create 255 in List.fold_left (add_run b) [] envs let format_str fmt j = FIXME report error in case of list ? String.concat "" (format fmt j) *) let cache = Hashtbl.create 255 type fmt = [`Lit of string | `Var of string ] list let parse_fmt ?buf s = try let b = match buf with | None -> Buffer.create 255 | Some buf -> Buffer.clear buf; buf in let acc = ref [] in let flush b = let s = Buffer.contents b in (Buffer.clear b; s) in let flush_lit b = if Buffer.length b <> 0 then acc := `Lit (flush b) :: !acc in let state = ref `Lit in for i = 0 to String.length s - 1 do match !state with | `Lit -> begin match s.[i] with | '$' -> state := `Dollar | c -> Buffer.add_char b c end | `Dollar -> begin match s.[i] with | '$' -> state := `Lit; Buffer.add_char b '$' | '(' -> state := `Var; flush_lit b; | _ -> raise Exit end | `Var -> begin match s.[i] with | ')' -> state := `Lit; acc := (`Var (flush b)) :: !acc; | c -> Buffer.add_char b c end done; if !state <> `Lit then raise Exit else (flush_lit b; Ok (List.rev !acc)) with Exit -> Error (strf "malformed format: `%s`" s) let cache = Hashtbl.create 255 let file_scan pat = try Hashtbl.find cache pat with | Not_found -> (OS.Path.unify (Path.of_string pat) >>= fun envs -> R.ok (List.rev_map snd envs)) |> Log.on_error_msg ~use:[] |> fun envs -> Hashtbl.add cache pat envs; envs --------------------------------------------------------------------------- Copyright 2012 All rights reserved . Redistribution and use in source and binary forms , with or without modification , are permitted provided that the following conditions are met : 1 . Redistributions of source code must retain the above copyright notice , this list of conditions and the following disclaimer . 2 . Redistributions in binary form must reproduce the above copyright notice , this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution . 3 . Neither the name of nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission . THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE . --------------------------------------------------------------------------- Copyright 2012 Daniel C. Bünzli All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Daniel C. Bünzli nor the names of contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ---------------------------------------------------------------------------*) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610298"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">657a6cd99033f97e945cf6b4f87a957ce5827c72d837271d7e728fce049d8792</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">acieroid/scala-am</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">church-2-num-1.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(letrec ((zero (lambda (f x) x)) (inc (lambda (n) (lambda (f x) (f (n f x))))) (plus (lambda (m n) (lambda (f x) (m f (n f x)))))) ((inc (inc zero)) (lambda (x) (+ x 1)) 0)) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/changesBenevolPaper/church-2-num-1.scm</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">scheme</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(letrec ((zero (lambda (f x) x)) (inc (lambda (n) (lambda (f x) (f (n f x))))) (plus (lambda (m n) (lambda (f x) (m f (n f x)))))) ((inc (inc zero)) (lambda (x) (+ x 1)) 0)) </span></div> </div></div> </td> </tr><tr class="group cursor-pointer space-x-4 divide-x border-b outline-offset-[-2px] odd:bg-gray-50 hover:bg-gray-100 dark:odd:bg-gray-925 dark:hover:bg-gray-850 " tabindex="0" data-row-idx="610299"><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">6febfa553216a882c43330bcab22bc2bad66244caf548a9e136e371549e11481</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">funcool/httpurr</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">generators.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns httpurr.test.generators (:require [clojure.test.check.generators :as gen] [httpurr.status :as http])) (defn gen-statuses [coll] (gen/such-that #(not (empty? %)) (gen/map (gen/return :status) (gen/elements coll)))) (def informational-response (gen-statuses http/informational-codes)) (def success-response (gen-statuses http/success-codes)) (def redirection-response (gen-statuses http/redirection-codes)) (def client-error-response (gen-statuses http/client-error-codes)) (def server-error-response (gen-statuses http/server-error-codes)) (def error-response (gen-statuses (concat http/client-error-codes http/server-error-codes))) </span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="text-right" dir="auto"><div class="text-right text-gray-400">null</div></div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">https://raw.githubusercontent.com/funcool/httpurr/22fb1b921864155a6b4eff113e2456ee924dd681/test/httpurr/test/generators.cljc</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">clojure</span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block "></span></div> </div></div> </td><td class="min-w-fit max-w-sm break-words p-2 "><div class="line-clamp-2 "><div class="" dir="auto"> <div> <span class="block ">(ns httpurr.test.generators (:require [clojure.test.check.generators :as gen] [httpurr.status :as http])) (defn gen-statuses [coll] (gen/such-that #(not (empty? %)) (gen/map (gen/return :status) (gen/elements coll)))) (def informational-response (gen-statuses http/informational-codes)) (def success-response (gen-statuses http/success-codes)) (def redirection-response (gen-statuses http/redirection-codes)) (def client-error-response (gen-statuses http/client-error-codes)) (def server-error-response (gen-statuses http/server-error-codes)) (def error-response (gen-statuses (concat http/client-error-codes http/server-error-codes))) </span></div> </div></div> </td> </tr></tbody></table> </div> <div class="bg-linear-to-b from-gray-100 to-white dark:from-gray-950 dark:to-gray-900 rounded-b-lg"><hr class="flex-none -translate-y-px border-t border-dashed border-gray-300 bg-white dark:border-gray-700 dark:bg-gray-950"> <nav><ul class="flex select-none items-center justify-between space-x-2 text-gray-700 sm:justify-center py-1 text-center font-mono text-xs "><li><a class="flex items-center rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800 " href="/datasets/dhuck/functional_code/viewer/default/train?p=6101"><svg class="mr-1.5" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M10 16L20 6l1.4 1.4l-8.6 8.6l8.6 8.6L20 26z" fill="currentColor"></path></svg> Previous</a></li> <li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=0">1</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 pointer-events-none cursor-default" href="#">...</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6100">6,101</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6101">6,102</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 bg-gray-50 font-semibold ring-1 ring-inset ring-gray-200 dark:bg-gray-900 dark:text-yellow-500 dark:ring-gray-900 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6102">6,103</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6103">6,104</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6104">6,105</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 pointer-events-none cursor-default" href="#">...</a> </li><li class="hidden sm:block"><a class="rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800" href="/datasets/dhuck/functional_code/viewer/default/train?p=6117">6,118</a> </li> <li><a class="flex items-center rounded-lg px-2.5 py-1 hover:bg-gray-50 dark:hover:bg-gray-800 " href="/datasets/dhuck/functional_code/viewer/default/train?p=6103">Next <svg class="ml-1.5 transform rotate-180" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M10 16L20 6l1.4 1.4l-8.6 8.6l8.6 8.6L20 26z" fill="currentColor"></path></svg></a></li></ul></nav></div></div> </div></div></div></div></div></div></div> <div class="hidden items-center md:flex"> <div class="mx-1 flex items-center justify-center"><div class="h-8 w-1 cursor-ew-resize rounded-full bg-gray-200 hover:bg-gray-400 dark:bg-gray-700 dark:hover:bg-gray-600 max-sm:hidden" role="separator"></div></div> <div class="flex h-full flex-col" style="height: calc(100vh - 48px)"><div class="my-4 mr-4 h-full overflow-auto rounded-lg border shadow-lg dark:border-gray-800" style="width: 480px"><div class="flex h-full flex-col"><div class="flex flex-col "> <div class="px-4 md:mt-4"><div class="mb-4 flex justify-end"> <span class="inline-block w-full flex justify-center"><span class="contents"><div class="flex w-full flex-col rounded-lg border-slate-200 bg-white p-2 shadow-md ring-1 ring-slate-200 dark:border-slate-700 dark:bg-slate-800 dark:ring-slate-700"> <div class="mt-0 flex items-start gap-1"><div class="flex items-center rounded-md bg-slate-100 p-2 dark:bg-slate-700"><svg class="size-4 text-gray-700 dark:text-gray-300" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 11 11"><path fill="currentColor" d="M4.881 4.182c0 .101-.031.2-.087.283a.5.5 0 0 1-.242.18l-.65.217a1.3 1.3 0 0 0-.484.299 1.3 1.3 0 0 0-.298.484l-.222.639a.46.46 0 0 1-.18.242.5.5 0 0 1-.288.092.5.5 0 0 1-.294-.097.5.5 0 0 1-.175-.242l-.211-.644a1.26 1.26 0 0 0-.299-.48 1.14 1.14 0 0 0-.479-.298L.328 4.64a.48.48 0 0 1-.247-.18.515.515 0 0 1 .247-.758l.644-.21a1.28 1.28 0 0 0 .788-.789l.211-.634a.5.5 0 0 1 .165-.242.5.5 0 0 1 .283-.103.5.5 0 0 1 .294.083c.086.058.152.14.19.237l.217.659a1.28 1.28 0 0 0 .788.788l.644.222a.476.476 0 0 1 .237.18.5.5 0 0 1 .092.288"></path><path fill="currentColor" d="M10.031 7.458a.5.5 0 0 1-.098.314.5.5 0 0 1-.267.196l-.881.293c-.272.09-.519.242-.721.443a1.8 1.8 0 0 0-.443.721l-.31.876a.5.5 0 0 1-.185.263.56.56 0 0 1-.319.098.515.515 0 0 1-.515-.366l-.294-.88a1.8 1.8 0 0 0-.443-.722c-.204-.2-.45-.353-.72-.448l-.881-.288a.57.57 0 0 1-.263-.191.56.56 0 0 1-.014-.64.5.5 0 0 1 .271-.194l.886-.294A1.82 1.82 0 0 0 6.01 5.465l.293-.87a.515.515 0 0 1 .49-.377c.11 0 .219.03.314.088a.56.56 0 0 1 .206.263l.298.896a1.82 1.82 0 0 0 1.175 1.174l.875.31a.5.5 0 0 1 .263.195c.07.09.108.2.108.314"></path><path fill="currentColor" d="M7.775 1.684a.5.5 0 0 0 .088-.262.45.45 0 0 0-.088-.263.5.5 0 0 0-.21-.155L7.24.896a.5.5 0 0 1-.165-.103.5.5 0 0 1-.103-.17l-.108-.33a.5.5 0 0 0-.165-.21A.5.5 0 0 0 6.426 0a.5.5 0 0 0-.252.098.5.5 0 0 0-.145.206l-.108.32a.5.5 0 0 1-.103.17.5.5 0 0 1-.17.102L5.334 1a.45.45 0 0 0-.216.155.5.5 0 0 0-.088.262c0 .094.029.186.083.263a.5.5 0 0 0 .216.16l.32.103q.095.03.164.103a.37.37 0 0 1 .103.165l.108.319c.031.09.088.17.165.227a.56.56 0 0 0 .252.077.42.42 0 0 0 .268-.093.5.5 0 0 0 .15-.2l.113-.325a.43.43 0 0 1 .268-.268l.32-.108a.42.42 0 0 0 .215-.155"></path></svg></div> <div class="flex min-w-0 flex-1"><textarea placeholder="Ask AI to help write your query..." class="max-h-64 min-h-8 w-full resize-none overflow-y-auto border-none bg-transparent py-1 text-sm leading-6 text-slate-700 placeholder-slate-400 [scrollbar-width:thin] focus:ring-0 dark:text-slate-200 dark:placeholder-slate-400" rows="1"></textarea> </div> </div> </div></span> </span></div> <div class="relative flex flex-col rounded-md bg-gray-100 pt-2 dark:bg-gray-800/50"> <div class="flex h-64 items-center justify-center "><svg class="animate-spin text-xs" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" fill="none" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 12 12"><path class="opacity-75" fill-rule="evenodd" clip-rule="evenodd" d="M6 0C2.6862 0 0 2.6862 0 6H1.8C1.8 4.88609 2.2425 3.8178 3.03015 3.03015C3.8178 2.2425 4.88609 1.8 6 1.8V0ZM12 6C12 9.3138 9.3138 12 6 12V10.2C7.11391 10.2 8.1822 9.7575 8.96985 8.96985C9.7575 8.1822 10.2 7.11391 10.2 6H12Z" fill="currentColor"></path><path class="opacity-25" fill-rule="evenodd" clip-rule="evenodd" d="M3.03015 8.96985C3.8178 9.7575 4.88609 10.2 6 10.2V12C2.6862 12 0 9.3138 0 6H1.8C1.8 7.11391 2.2425 8.1822 3.03015 8.96985ZM7.60727 2.11971C7.0977 1.90864 6.55155 1.8 6 1.8V0C9.3138 0 12 2.6862 12 6H10.2C10.2 5.44845 10.0914 4.9023 9.88029 4.39273C9.66922 3.88316 9.35985 3.42016 8.96985 3.03015C8.57984 2.64015 8.11684 2.33078 7.60727 2.11971Z" fill="currentColor"></path></svg></div></div> <div class="mt-2 flex flex-col gap-2"><div class="flex items-center justify-between max-sm:text-sm"><div class="flex w-full items-center justify-between gap-4"> <span class="flex flex-shrink-0 items-center gap-1"><span class="font-semibold">Subsets and Splits</span> <span class="inline-block "><span class="contents"><svg class="text-xs text-gray-500 dark:text-gray-400" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" focusable="false" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 32 32"><path d="M17 22v-8h-4v2h2v6h-3v2h8v-2h-3z" fill="currentColor"></path><path d="M16 8a1.5 1.5 0 1 0 1.5 1.5A1.5 1.5 0 0 0 16 8z" fill="currentColor"></path><path d="M16 30a14 14 0 1 1 14-14a14 14 0 0 1-14 14zm0-26a12 12 0 1 0 12 12A12 12 0 0 0 16 4z" fill="currentColor"></path></svg></span> </span> </span> <div class="ml-4 flex flex-1 items-center justify-end gap-1"> </div></div></div> <div class="flex flex-nowrap gap-1 overflow-x-auto"></div></div> <button type="button" class="btn mt-2 h-10 w-full text-sm font-semibold md:text-base" ><span class="flex items-center gap-1.5"> <span>Run Query</span> <span class="shadow-xs ml-2 hidden items-center rounded-sm border bg-white px-0.5 text-xs font-medium text-gray-700 sm:inline-flex">Ctrl+↵</span></span></button></div> <div class="flex flex-col px-2 pb-4"></div></div> <div class="mt-auto pb-4"><div class="flex justify-center"><div class="w-full sm:px-4"><div class="mb-3"><ul class="flex gap-1 text-sm "><li><button class="flex items-center whitespace-nowrap rounded-lg px-2 text-gray-500 hover:bg-gray-100 hover:text-gray-700 dark:hover:bg-gray-900 dark:hover:text-gray-300">Saved Queries </button> </li><li><button class="flex items-center whitespace-nowrap rounded-lg px-2 bg-black text-white dark:bg-gray-800">Top Community Queries </button> </li></ul></div> <div class="h-48 overflow-y-auto"><div class="flex flex-col gap-2"><div class="flex h-48 flex-col items-center justify-center rounded border border-gray-200 bg-gray-50 p-4 text-center dark:border-gray-700/60 dark:bg-gray-900"><p class="mb-1 font-semibold text-gray-600 dark:text-gray-400">No community queries yet</p> <p class="max-w-xs text-xs text-gray-500 dark:text-gray-400">The top public SQL queries from the community will appear here once available.</p></div></div></div></div></div></div></div></div></div></div> </div></div></div></main> </div> <script> import("\/front\/build\/kube-bf554b6\/index.js"); window.moonSha = "kube-bf554b6\/"; window.__hf_deferred = {}; </script> <!-- Stripe --> <script> if (["hf.co", "huggingface.co"].includes(window.location.hostname)) { const script = document.createElement("script"); script.src = "https://js.stripe.com/v3/"; script.async = true; document.head.appendChild(script); } </script> </body> </html>