{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n```\n\nThis is the console log that I'm getting and having a hard time to understand \n```\ntest.html:10 Transformers.js loaded\ntest.html:17 Summarization pipeline loaded\ntest.html:21 Hi my name is SmolLm2\ntransformers@3.0.1:175 An error occurred during model execution: \"283134224\".\nA @ transformers@3.0.1:175\nawait in A\nD @ transformers@3.0.1:175\nforward @ transformers@3.0.1:175\ngenerate @ transformers@3.0.1:175\n_call @ transformers@3.0.1:187\ne @ transformers@3.0.1:214\ntestSummarization @ test.html:23\nawait in testSummarization\n(anonymous) @ test.html:32\ntransformers@3.0.1:175 Inputs given to model: {input_ids: Proxy(o), attention_mask: Proxy(o), position_ids: Proxy(o), past_key_values.0.key: Proxy(o), past_key_values.0.value: Proxy(o), …}\nA @ transformers@3.0.1:175\nawait in A\nD @ transformers@3.0.1:175\nforward @ transformers@3.0.1:175\ngenerate @ transformers@3.0.1:175\n_call @ transformers@3.0.1:187\ne @ transformers@3.0.1:214\ntestSummarization @ test.html:23\nawait in testSummarization\n(anonymous) @ test.html:32\ntest.html:28 Error: 283134224\ntestSummarization @ test.html:28\nawait in testSummarization\n(anonymous) @ test.html:32\n```\nAny help would be much appreciated, thanks in advance. 🤗","html":"

Hi

\n

I'm new to implementing JS and transformers.js.
I was trying to run this model on the browser using the following code

\n
<!DOCTYPE html>\n<html>\n<head>\n  <title>Test Transformers.js</title>\n  <script type=\"module\">\n    async function testSummarization() {\n      try {\n        // Load transformers.js\n        const { env, AutoTokenizer, AutoModelForCausalLM, pipeline } = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.0.1');\n        console.log('Transformers.js loaded'); // Debugging statement\n        env.allowLocalModels = false\n        // Load the summarization pipeline\n        const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {\n dtype: 'q4f16', use_external_data_format: true,\n});\n        console.log('Summarization pipeline loaded'); // Debugging statement\n\n        // Run the summarization\n        const text = 'Hi my name is SmolLm2';\n        console.log(text);\n\n        const result = await summarizationPipeline(text, { max_length: 13, min_length: 3, length_penalty: 2.0, num_beams: 1 });\n        console.log('Summarization result:', result); // Debugging statement\n\n        console.log(result[0].summary_text);\n      } catch (error) {\n        console.error('Error:', error);\n      }\n    }\n\n    testSummarization();\n  </script>\n</head>\n<body>\n  <h1>Test Transformers.js</h1>\n</body>\n</html>\n
\n

This is the console log that I'm getting and having a hard time to understand

\n
test.html:10 Transformers.js loaded\ntest.html:17 Summarization pipeline loaded\ntest.html:21 Hi my name is SmolLm2\ntransformers@3.0.1:175 An error occurred during model execution: \"283134224\".\nA @ transformers@3.0.1:175\nawait in A\nD @ transformers@3.0.1:175\nforward @ transformers@3.0.1:175\ngenerate @ transformers@3.0.1:175\n_call @ transformers@3.0.1:187\ne @ transformers@3.0.1:214\ntestSummarization @ test.html:23\nawait in testSummarization\n(anonymous) @ test.html:32\ntransformers@3.0.1:175 Inputs given to model: {input_ids: Proxy(o), attention_mask: Proxy(o), position_ids: Proxy(o), past_key_values.0.key: Proxy(o), past_key_values.0.value: Proxy(o), …}\nA @ transformers@3.0.1:175\nawait in A\nD @ transformers@3.0.1:175\nforward @ transformers@3.0.1:175\ngenerate @ transformers@3.0.1:175\n_call @ transformers@3.0.1:187\ne @ transformers@3.0.1:214\ntestSummarization @ test.html:23\nawait in testSummarization\n(anonymous) @ test.html:32\ntest.html:28 Error: 283134224\ntestSummarization @ test.html:28\nawait in testSummarization\n(anonymous) @ test.html:32\n
\n

Any help would be much appreciated, thanks in advance. 🤗

\n","updatedAt":"2024-11-14T21:21:28.385Z","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10}},"numEdits":1,"identifiedLanguage":{"language":"en","probability":0.5955040454864502},"editors":["hitchhiker3010"],"editorAvatarUrls":["https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png"],"reactions":[],"isReport":false}},{"id":"67366d8cc4b32d3da8d56bd2","author":{"_id":"61b253b7ac5ecaae3d1efe0c","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png","fullname":"Joshua","name":"Xenova","type":"user","isPro":false,"isHf":true,"isHfAdmin":false,"isMod":false,"followerCount":6517,"isOwner":false,"isOrgMember":true},"createdAt":"2024-11-14T21:37:16.000Z","type":"comment","data":{"edited":false,"hidden":false,"latest":{"raw":"Hi there! We're aware of the issue (CPU implementation of f16 [Skip][Simplified]LayerNormalizaion; see [here](https://github.com/microsoft/onnxruntime/issues/22704#issuecomment-2454783993)), and this will be fixed in v3.1 (coming soon).\n\nIn the meantime, you should be able to fix it by either:\n1. Using WebGPU:\n```js\nconst summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {\n dtype: 'q4f16', device: 'webgpu',\n});\n```\n2. Using non-fp16 model:\n```js\nconst summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {\n dtype: 'q4',\n});\n```\n","html":"

Hi there! We're aware of the issue (CPU implementation of f16 [Skip][Simplified]LayerNormalizaion; see here), and this will be fixed in v3.1 (coming soon).

\n

In the meantime, you should be able to fix it by either:

\n
    \n
  1. Using WebGPU:
  2. \n
\n
const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {\n dtype: 'q4f16', device: 'webgpu',\n});\n
\n
    \n
  1. Using non-fp16 model:
  2. \n
\n
const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {\n dtype: 'q4',\n});\n
\n","updatedAt":"2024-11-14T21:37:16.645Z","author":{"_id":"61b253b7ac5ecaae3d1efe0c","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png","fullname":"Joshua","name":"Xenova","type":"user","isPro":false,"isHf":true,"isHfAdmin":false,"isMod":false,"followerCount":6517}},"numEdits":0,"identifiedLanguage":{"language":"en","probability":0.5769983530044556},"editors":["Xenova"],"editorAvatarUrls":["https://aifasthub.com/avatars/v1/production/uploads/61b253b7ac5ecaae3d1efe0c/hwiQ0uvz3t-L5a-NtBIO6.png"],"reactions":[{"reaction":"👍","users":["hitchhiker3010"],"count":1}],"isReport":false}},{"id":"673670dc506e0296dc8760f8","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10,"isOwner":false,"isOrgMember":false},"createdAt":"2024-11-14T21:51:24.000Z","type":"comment","data":{"edited":false,"hidden":false,"latest":{"raw":"Thanks for the reply, I'm able to get the output.","html":"

Thanks for the reply, I'm able to get the output.

\n","updatedAt":"2024-11-14T21:51:24.102Z","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10}},"numEdits":0,"identifiedLanguage":{"language":"en","probability":0.9636708498001099},"editors":["hitchhiker3010"],"editorAvatarUrls":["https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png"],"reactions":[],"isReport":false}},{"id":"673670e17a227756b783f58e","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10,"isOwner":false,"isOrgMember":false},"createdAt":"2024-11-14T21:51:29.000Z","type":"status-change","data":{"status":"closed"}},{"id":"673f17a30a709884ba854030","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10,"isOwner":false,"isOrgMember":false},"createdAt":"2024-11-21T11:21:07.000Z","type":"comment","data":{"edited":false,"hidden":false,"latest":{"raw":"Hi \n\nI was trying to implement chat template and generate using the code below.\n\n```\n\n\n\n Test Transformers.js\n \n\n\n

Test Transformers.js

\n\n\n``` \n\nI'm running into following error [```Error: TypeError: Cannot read properties of null (reading 'dims')```] which I'm unable to debug [attaching the stack trace below]. I see there's dims property in the *input_text* variable\n\n```\nTransformers.js loaded\ntest_messages.html:17 Tokenizer loaded\ntransformers@3.0.2:100 2024-11-21 16:46:08.285399 [W:onnxruntime:, session_state.cc:1168 VerifyEachNodeIsAssignedToAnEp] Some nodes were not assigned to the preferred execution providers which may or may not have an negative impact on performance. e.g. ORT explicitly assigns shape related ops to CPU to improve perf.\nor @ transformers@3.0.2:100\n$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449\n$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810\n$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe\n$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705\n$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9\n$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635\n$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71\n$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847\n$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f185\n$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4\n$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb\nt. @ transformers@3.0.2:100\np._OrtCreateSession @ transformers@3.0.2:100\n(anonymous) @ transformers@3.0.2:100\ned @ transformers@3.0.2:100\nbd @ transformers@3.0.2:100\nloadModel @ transformers@3.0.2:100\ncreateInferenceSessionHandler @ transformers@3.0.2:100\ncreate @ transformers@3.0.2:100\nawait in create\ng @ transformers@3.0.2:151\n(anonymous) @ transformers@3.0.2:175\nawait in (anonymous)\nE @ transformers@3.0.2:175\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\ntestSummarization @ test_messages.html:19\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\ntransformers@3.0.2:100 2024-11-21 16:46:08.286500 [W:onnxruntime:, session_state.cc:1170 VerifyEachNodeIsAssignedToAnEp] Rerunning with verbose output on a non-minimal build will show node assignments.\nor @ transformers@3.0.2:100\n$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449\n$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810\n$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe\n$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705\n$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9\n$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635\n$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71\n$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847\n$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f66a\n$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4\n$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb\nt. @ transformers@3.0.2:100\np._OrtCreateSession @ transformers@3.0.2:100\n(anonymous) @ transformers@3.0.2:100\ned @ transformers@3.0.2:100\nbd @ transformers@3.0.2:100\nloadModel @ transformers@3.0.2:100\ncreateInferenceSessionHandler @ transformers@3.0.2:100\ncreate @ transformers@3.0.2:100\nawait in create\ng @ transformers@3.0.2:151\n(anonymous) @ transformers@3.0.2:175\nawait in (anonymous)\nE @ transformers@3.0.2:175\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\ntestSummarization @ test_messages.html:19\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\ntest_messages.html:24 Model loaded\ntest_messages.html:28 chat [{…}]0: {role: 'user', content: 'What is the capital of France.'}length: 1[[Prototype]]: Array(0)\ntest_messages.html:30 input_text Proxy(o) {ort_tensor: o}[[Handler]]: Object[[Target]]: o[[IsRevoked]]: false\ntest_messages.html:31 input_text.dims (2) [1, 37]0: 11: 37length: 2[[Prototype]]: Array(0)\ntest_messages.html:40 Error: TypeError: Cannot read properties of null (reading 'dims')\n at Function.generate (transformers@3.0.2:175:29723)\n at testSummarization (test_messages.html:34:35)\ntestSummarization @ test_messages.html:40\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\n```\n\nplease help.","html":"

Hi

\n

I was trying to implement chat template and generate using the code below.

\n
<!DOCTYPE html>\n<html>\n<head>\n  <title>Test Transformers.js</title>\n  <script type=\"module\">\n    async function testSummarization() {\n      try {\n        // Load transformers.js\n        const { env, AutoTokenizer, AutoModelForCausalLM, pipeline } = await import('https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.0.2');\n        console.log('Transformers.js loaded'); // Debugging statement\n        env.allowLocalModels = true;\n        // env.allowRemoteModels = true;\n        env.useBrowserCache = true;\n        let model_name = 'HuggingFaceTB/SmolLM2-135M-Instruct'\n\n        let tokenizer = await AutoTokenizer.from_pretrained(model_name);\n        console.log('Tokenizer loaded'); // Debugging statement\n\n        let model = await AutoModelForCausalLM.from_pretrained(model_name, {\n          dtype: 'q4f16', device: 'webgpu'\n        });\n        //'onnx-community/Phi-3.5-mini-instruct-onnx-web', {\n            // dtype: 'q4' , use_external_data_format: true\n        console.log('Model loaded'); // Debugging statement\n\n        const chat = [{\"role\": \"user\", \"content\": \"What is the capital of France.\"}]\n\n        console.log(\"chat\", chat)\n        let input_text = await tokenizer.apply_chat_template(chat , {tokenize: true, return_tensor: true, add_generation_prompt: true});\n        console.log(\"input_text\", input_text)\n        console.log(\"input_text.dims\",input_text.dims)\n        // let inputs = tokenizer.encode(input_text, {return_tensors:true})\n        // console.log(\"inputs\", inputs)\n        let outputs = await model.generate(input_text) // , { max_new_tokens: 130, max_length: 130, min_length: 3, length_penalty: 2.0, num_beams: 1 });\n        console.log(outputs)\n        let decoded = tokenizer.decode(outputs[0], { skip_special_tokens: true });\n        console.log(decoded)\n        \n      } catch (error) {\n        console.error('Error:', error);\n      }\n    }\n\n    testSummarization();\n  </script>\n</head>\n<body>\n  <h1>Test Transformers.js</h1>\n</body>\n</html>\n
\n

I'm running into following error [Error: TypeError: Cannot read properties of null (reading 'dims')] which I'm unable to debug [attaching the stack trace below]. I see there's dims property in the input_text variable

\n
Transformers.js loaded\ntest_messages.html:17 Tokenizer loaded\ntransformers@3.0.2:100 2024-11-21 16:46:08.285399 [W:onnxruntime:, session_state.cc:1168 VerifyEachNodeIsAssignedToAnEp] Some nodes were not assigned to the preferred execution providers which may or may not have an negative impact on performance. e.g. ORT explicitly assigns shape related ops to CPU to improve perf.\nor @ transformers@3.0.2:100\n$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449\n$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810\n$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe\n$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705\n$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9\n$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635\n$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71\n$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847\n$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f185\n$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4\n$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb\nt.<computed> @ transformers@3.0.2:100\np._OrtCreateSession @ transformers@3.0.2:100\n(anonymous) @ transformers@3.0.2:100\ned @ transformers@3.0.2:100\nbd @ transformers@3.0.2:100\nloadModel @ transformers@3.0.2:100\ncreateInferenceSessionHandler @ transformers@3.0.2:100\ncreate @ transformers@3.0.2:100\nawait in create\ng @ transformers@3.0.2:151\n(anonymous) @ transformers@3.0.2:175\nawait in (anonymous)\nE @ transformers@3.0.2:175\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\ntestSummarization @ test_messages.html:19\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\ntransformers@3.0.2:100 2024-11-21 16:46:08.286500 [W:onnxruntime:, session_state.cc:1170 VerifyEachNodeIsAssignedToAnEp] Rerunning with verbose output on a non-minimal build will show node assignments.\nor @ transformers@3.0.2:100\n$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449\n$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810\n$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe\n$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705\n$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9\n$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635\n$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71\n$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847\n$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f66a\n$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4\n$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb\nt.<computed> @ transformers@3.0.2:100\np._OrtCreateSession @ transformers@3.0.2:100\n(anonymous) @ transformers@3.0.2:100\ned @ transformers@3.0.2:100\nbd @ transformers@3.0.2:100\nloadModel @ transformers@3.0.2:100\ncreateInferenceSessionHandler @ transformers@3.0.2:100\ncreate @ transformers@3.0.2:100\nawait in create\ng @ transformers@3.0.2:151\n(anonymous) @ transformers@3.0.2:175\nawait in (anonymous)\nE @ transformers@3.0.2:175\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\nfrom_pretrained @ transformers@3.0.2:175\nawait in from_pretrained\ntestSummarization @ test_messages.html:19\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\ntest_messages.html:24 Model loaded\ntest_messages.html:28 chat [{…}]0: {role: 'user', content: 'What is the capital of France.'}length: 1[[Prototype]]: Array(0)\ntest_messages.html:30 input_text Proxy(o) {ort_tensor: o}[[Handler]]: Object[[Target]]: o[[IsRevoked]]: false\ntest_messages.html:31 input_text.dims (2) [1, 37]0: 11: 37length: 2[[Prototype]]: Array(0)\ntest_messages.html:40 Error: TypeError: Cannot read properties of null (reading 'dims')\n    at Function.generate (transformers@3.0.2:175:29723)\n    at testSummarization (test_messages.html:34:35)\ntestSummarization @ test_messages.html:40\nawait in testSummarization\n(anonymous) @ test_messages.html:44Understand this errorAI\n
\n

please help.

\n","updatedAt":"2024-11-21T11:21:07.746Z","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10}},"numEdits":0,"identifiedLanguage":{"language":"en","probability":0.5703312158584595},"editors":["hitchhiker3010"],"editorAvatarUrls":["https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png"],"reactions":[],"isReport":false}},{"id":"673f17bce75a0093041b6488","author":{"_id":"6058a23b5ab91954363a6511","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/6058a23b5ab91954363a6511/wam6ertF3GgNdsxLXuP38.png","fullname":"Sukesh Perla","name":"hitchhiker3010","type":"user","isPro":true,"isHf":false,"isHfAdmin":false,"isMod":false,"followerCount":10,"isOwner":false,"isOrgMember":false},"createdAt":"2024-11-21T11:21:32.000Z","type":"status-change","data":{"status":"open"}},{"id":"679aba06893d74763af43f43","author":{"_id":"67969c1b0972df3a957b76b5","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/noauth/HME_-F1hqLh-V8dogA2bN.jpeg","fullname":"Jack Seymour","name":"shoebill-droyd","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false,"isOwner":false,"isOrgMember":false},"createdAt":"2025-01-29T23:30:14.000Z","type":"comment","data":{"edited":false,"hidden":false,"latest":{"raw":"good luck getting any help from these useless ass admins LMFAO","html":"

good luck getting any help from these useless ass admins LMFAO

\n","updatedAt":"2025-01-29T23:30:14.327Z","author":{"_id":"67969c1b0972df3a957b76b5","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/noauth/HME_-F1hqLh-V8dogA2bN.jpeg","fullname":"Jack Seymour","name":"shoebill-droyd","type":"user","isPro":false,"isHf":false,"isHfAdmin":false,"isMod":false}},"numEdits":0,"identifiedLanguage":{"language":"en","probability":0.7221434116363525},"editors":["shoebill-droyd"],"editorAvatarUrls":["https://aifasthub.com/avatars/v1/production/uploads/noauth/HME_-F1hqLh-V8dogA2bN.jpeg"],"reactions":[],"isReport":false}}],"pinned":false,"locked":false,"collection":"discussions","isPullRequest":false,"isReport":false},"repo":{"name":"HuggingFaceTB/SmolLM2-135M-Instruct","type":"model"},"activeTab":"discussion","discussionRole":0,"watched":false,"muted":false,"repoDiscussionsLocked":false}">

Error while trying to run using transformers.js

#5
by hitchhiker3010 - opened

Hi

I'm new to implementing JS and transformers.js.
I was trying to run this model on the browser using the following code

<!DOCTYPE html>
<html>
<head>
  <title>Test Transformers.js</title>
  <script type="module">
    async function testSummarization() {
      try {
        // Load transformers.js
        const { env, AutoTokenizer, AutoModelForCausalLM, pipeline } = await import('https://cdn.jsdelivr.net/npm/@huggingface/[email protected]');
        console.log('Transformers.js loaded'); // Debugging statement
        env.allowLocalModels = false
        // Load the summarization pipeline
        const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {
 dtype: 'q4f16', use_external_data_format: true,
});
        console.log('Summarization pipeline loaded'); // Debugging statement

        // Run the summarization
        const text = 'Hi my name is SmolLm2';
        console.log(text);

        const result = await summarizationPipeline(text, { max_length: 13, min_length: 3, length_penalty: 2.0, num_beams: 1 });
        console.log('Summarization result:', result); // Debugging statement

        console.log(result[0].summary_text);
      } catch (error) {
        console.error('Error:', error);
      }
    }

    testSummarization();
  </script>
</head>
<body>
  <h1>Test Transformers.js</h1>
</body>
</html>

This is the console log that I'm getting and having a hard time to understand

test.html:10 Transformers.js loaded
test.html:17 Summarization pipeline loaded
test.html:21 Hi my name is SmolLm2
[email protected]:175 An error occurred during model execution: "283134224".
A @ [email protected]:175
await in A
D @ [email protected]:175
forward @ [email protected]:175
generate @ [email protected]:175
_call @ [email protected]:187
e @ [email protected]:214
testSummarization @ test.html:23
await in testSummarization
(anonymous) @ test.html:32
[email protected]:175 Inputs given to model: {input_ids: Proxy(o), attention_mask: Proxy(o), position_ids: Proxy(o), past_key_values.0.key: Proxy(o), past_key_values.0.value: Proxy(o), …}
A @ [email protected]:175
await in A
D @ [email protected]:175
forward @ [email protected]:175
generate @ [email protected]:175
_call @ [email protected]:187
e @ [email protected]:214
testSummarization @ test.html:23
await in testSummarization
(anonymous) @ test.html:32
test.html:28 Error: 283134224
testSummarization @ test.html:28
await in testSummarization
(anonymous) @ test.html:32

Any help would be much appreciated, thanks in advance. 🤗

Hugging Face Smol Models Research org

Hi there! We're aware of the issue (CPU implementation of f16 [Skip][Simplified]LayerNormalizaion; see here), and this will be fixed in v3.1 (coming soon).

In the meantime, you should be able to fix it by either:

  1. Using WebGPU:
const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {
 dtype: 'q4f16', device: 'webgpu',
});
  1. Using non-fp16 model:
const summarizationPipeline = await pipeline('text-generation', 'HuggingFaceTB/SmolLM2-135M-Instruct', {
 dtype: 'q4',
});

Thanks for the reply, I'm able to get the output.

hitchhiker3010 changed discussion status to closed

Hi

I was trying to implement chat template and generate using the code below.

<!DOCTYPE html>
<html>
<head>
  <title>Test Transformers.js</title>
  <script type="module">
    async function testSummarization() {
      try {
        // Load transformers.js
        const { env, AutoTokenizer, AutoModelForCausalLM, pipeline } = await import('https://cdn.jsdelivr.net/npm/@huggingface/[email protected]');
        console.log('Transformers.js loaded'); // Debugging statement
        env.allowLocalModels = true;
        // env.allowRemoteModels = true;
        env.useBrowserCache = true;
        let model_name = 'HuggingFaceTB/SmolLM2-135M-Instruct'

        let tokenizer = await AutoTokenizer.from_pretrained(model_name);
        console.log('Tokenizer loaded'); // Debugging statement

        let model = await AutoModelForCausalLM.from_pretrained(model_name, {
          dtype: 'q4f16', device: 'webgpu'
        });
        //'onnx-community/Phi-3.5-mini-instruct-onnx-web', {
            // dtype: 'q4' , use_external_data_format: true
        console.log('Model loaded'); // Debugging statement

        const chat = [{"role": "user", "content": "What is the capital of France."}]

        console.log("chat", chat)
        let input_text = await tokenizer.apply_chat_template(chat , {tokenize: true, return_tensor: true, add_generation_prompt: true});
        console.log("input_text", input_text)
        console.log("input_text.dims",input_text.dims)
        // let inputs = tokenizer.encode(input_text, {return_tensors:true})
        // console.log("inputs", inputs)
        let outputs = await model.generate(input_text) // , { max_new_tokens: 130, max_length: 130, min_length: 3, length_penalty: 2.0, num_beams: 1 });
        console.log(outputs)
        let decoded = tokenizer.decode(outputs[0], { skip_special_tokens: true });
        console.log(decoded)
        
      } catch (error) {
        console.error('Error:', error);
      }
    }

    testSummarization();
  </script>
</head>
<body>
  <h1>Test Transformers.js</h1>
</body>
</html>

I'm running into following error [Error: TypeError: Cannot read properties of null (reading 'dims')] which I'm unable to debug [attaching the stack trace below]. I see there's dims property in the input_text variable

Transformers.js loaded
test_messages.html:17 Tokenizer loaded
[email protected]:100 2024-11-21 16:46:08.285399 [W:onnxruntime:, session_state.cc:1168 VerifyEachNodeIsAssignedToAnEp] Some nodes were not assigned to the preferred execution providers which may or may not have an negative impact on performance. e.g. ORT explicitly assigns shape related ops to CPU to improve perf.
or @ [email protected]:100
$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449
$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810
$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe
$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705
$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9
$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635
$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71
$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847
$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f185
$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4
$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb
t.<computed> @ [email protected]:100
p._OrtCreateSession @ [email protected]:100
(anonymous) @ [email protected]:100
ed @ [email protected]:100
bd @ [email protected]:100
loadModel @ [email protected]:100
createInferenceSessionHandler @ [email protected]:100
create @ [email protected]:100
await in create
g @ [email protected]:151
(anonymous) @ [email protected]:175
await in (anonymous)
E @ [email protected]:175
from_pretrained @ [email protected]:175
await in from_pretrained
from_pretrained @ [email protected]:175
await in from_pretrained
testSummarization @ test_messages.html:19
await in testSummarization
(anonymous) @ test_messages.html:44Understand this errorAI
[email protected]:100 2024-11-21 16:46:08.286500 [W:onnxruntime:, session_state.cc:1170 VerifyEachNodeIsAssignedToAnEp] Rerunning with verbose output on a non-minimal build will show node assignments.
or @ [email protected]:100
$func11798 @ ort-wasm-simd-threaded.jsep.wasm:0x103b449
$func1946 @ ort-wasm-simd-threaded.jsep.wasm:0x229810
$func836 @ ort-wasm-simd-threaded.jsep.wasm:0xc49fe
$func11721 @ ort-wasm-simd-threaded.jsep.wasm:0x1033705
$func630 @ ort-wasm-simd-threaded.jsep.wasm:0x8fad9
$func88 @ ort-wasm-simd-threaded.jsep.wasm:0xd635
$func14399 @ ort-wasm-simd-threaded.jsep.wasm:0x13b8e71
$func123 @ ort-wasm-simd-threaded.jsep.wasm:0x15847
$func2101 @ ort-wasm-simd-threaded.jsep.wasm:0x25f66a
$func11258 @ ort-wasm-simd-threaded.jsep.wasm:0xfed8f4
$La @ ort-wasm-simd-threaded.jsep.wasm:0xaf6beb
t.<computed> @ [email protected]:100
p._OrtCreateSession @ [email protected]:100
(anonymous) @ [email protected]:100
ed @ [email protected]:100
bd @ [email protected]:100
loadModel @ [email protected]:100
createInferenceSessionHandler @ [email protected]:100
create @ [email protected]:100
await in create
g @ [email protected]:151
(anonymous) @ [email protected]:175
await in (anonymous)
E @ [email protected]:175
from_pretrained @ [email protected]:175
await in from_pretrained
from_pretrained @ [email protected]:175
await in from_pretrained
testSummarization @ test_messages.html:19
await in testSummarization
(anonymous) @ test_messages.html:44Understand this errorAI
test_messages.html:24 Model loaded
test_messages.html:28 chat [{…}]0: {role: 'user', content: 'What is the capital of France.'}length: 1[[Prototype]]: Array(0)
test_messages.html:30 input_text Proxy(o) {ort_tensor: o}[[Handler]]: Object[[Target]]: o[[IsRevoked]]: false
test_messages.html:31 input_text.dims (2) [1, 37]0: 11: 37length: 2[[Prototype]]: Array(0)
test_messages.html:40 Error: TypeError: Cannot read properties of null (reading 'dims')
    at Function.generate ([email protected]:175:29723)
    at testSummarization (test_messages.html:34:35)
testSummarization @ test_messages.html:40
await in testSummarization
(anonymous) @ test_messages.html:44Understand this errorAI

please help.

hitchhiker3010 changed discussion status to open

good luck getting any help from these useless ass admins LMFAO

Sign up or log in to comment