Zeus Labs

community
Activity Feed
{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== '免费Z-image图片生成' && linkText !== '免费Z-image图片生成' ) { link.textContent = '免费Z-image图片生成'; link.href = 'https://zimage.run'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Vibevoice' ) { link.textContent = 'Vibevoice'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 替换Pricing链接 - 仅替换一次 else if ( (linkHref.includes('/pricing') || linkHref === '/pricing' || linkText === 'Pricing' || linkText.match(/^s*Pricings*$/i)) && linkText !== '免费去水印' ) { link.textContent = '免费去水印'; link.href = 'https://sora2watermarkremover.net/'; replacedLinks.add(link); } // 替换Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) && linkText !== 'GitHub加速' ) { link.textContent = 'GitHub加速'; link.href = 'https://githubproxy.cc'; replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, '免费Z-image图片生成'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n","html":"\n
\n \"Zeus\n
\n
\n
\n

We are a small but ambitious AI research group, focused on developing performant and highly capable Large Language Models that aim to excel in their domains. Our specialty lies in the exploration of cutting-edge model finetuning methods and innovative data preparation techniques.

\n

\n

Our Mission

\n

At Zeus Labs, we strive to push the boundaries of AI capabilities, particularly in the realm of language models. Our goal is to create models that not only perform well but also demonstrate exceptional abilities in specific domains, contributing to the advancement of AI technology and its applications.

\n

\n

Our Approach

\n
\n
    \n
  • Cutting-edge finetuning methods for Large Language Models
  • \n
  • Innovative data preparation and curation techniques
  • \n
  • Focus on domain-specific excellence and versatility
  • \n
  • Open collaboration and knowledge sharing within the AI community
  • \n
  • Advancing LLM research via novel techniques, which has been applied by all of our members.
  • \n
\n
\n

\n

Team:

\n
\n

Chief ML Engineer, M.S.

\n

@elinas - HuggingFace Profile

\n
\n
\n

Senior Data Scientist, PhD

\n

@ToastyPigeon - HuggingFace Profile

\n
\n
\n

Operations Engineer

\n

@fizz - HuggingFace Profile

\n
\n

\n

Notable Achievements

\n
\n
    \n
  • Revival of Llama 1 33B by training on over 500M tokens
  • \n
  • We did this based on the original pretraining token count of 1.4T and decided to add another 500M tokens to it, to which our surprise ended up\n surpassing expectations in both quality and length
  • \n
  • \n It was trained at 16384 context legth with an *effective* context legnth around 12k due to the nature of the samples, but exceeds in RP.\n
  • \n
  • \n Our next goal is to apply GQA to it, but in the meantime, we will appreciate quanters who will help with running this model on less VRAM!\n
  • \n
\n
    \n
  • Development of L3-Aethora-15B series, The first heavily fintuned 15b model that focuses in creative writing and general intelligence using a novel\n technique known as \"zeroing layers.\"
  • \n
  • Creation of the Aether-Lite-V1.8.1 dataset, a carefully curated dataset for AI training
  • \n
\n
\n

\n

Join Us

\n

We are currently growing and looking for passionate individuals interested in machine learning and AI research. Whether you're a seasoned researcher or an enthusiastic beginner, there's a place for you in our community.

\n

Join our Discord to connect with like-minded individuals, share ideas, and potentially collaborate on exciting AI projects!

\n Join The Zeus Labs Discord\n

\n

Our Work

\n

Explore our independently developed work and collaborations on our HuggingFace profiles. We're always pushing the boundaries of what's possible with AI!

\n

\n

Model Quanters!

\n

If you create quants for our models and we miss them, please post a discussion to that model and we will add it to the Model card!

\n
\n
\n\n","classNames":"hf-sanitized hf-sanitized-_aMU6IBZGSEWzkYCVrv_C"},"users":[{"_id":"626327aa13cc9ec91bd7fb7e","avatarUrl":"/avatars/e7ac09677551cdaf770ac641693f6043.svg","isPro":false,"fullname":"Andres Mariscal","user":"serialdev","type":"user"},{"_id":"630417380907b9a115c6aa9f","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/630417380907b9a115c6aa9f/hsmz_dU2AyXe1DWHW7Pvd.png","isPro":false,"fullname":"elinas","user":"elinas","type":"user"},{"_id":"634262af8d8089ebaefd410e","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/634262af8d8089ebaefd410e/pcnqe74uMV90K3HVuM76F.png","isPro":false,"fullname":"Fizz 🏳️‍⚧️","user":"Fizzarolli","type":"user"},{"_id":"64ceaecdc9d00e3847c7ae7c","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/64ceaecdc9d00e3847c7ae7c/8Te2teNBt8Jw_LjOIV7x4.png","isPro":false,"fullname":"Toaster","user":"ToastyPigeon","type":"user"}],"userCount":4,"collections":[],"datasets":[],"models":[{"author":"ZeusLabs","authorData":{"_id":"6434718c546e16f17a15dbb7","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/630417380907b9a115c6aa9f/LGvQfICEdjZX8DKQAfOPK.jpeg","fullname":"Zeus Labs","name":"ZeusLabs","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"isEnterprise":false,"followerCount":45},"downloads":74,"gated":false,"id":"ZeusLabs/Chronos-Platinum-72B","availableInferenceProviders":[{"provider":"featherless-ai","modelStatus":"live","providerStatus":"live","providerId":"ZeusLabs/Chronos-Platinum-72B","task":"conversational","isCheapestPricingOutput":false,"isFastestThroughput":false,"isModelAuthor":false}],"lastModified":"2024-10-12T02:35:24.000Z","likes":14,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":72706203648},{"author":"ZeusLabs","authorData":{"_id":"6434718c546e16f17a15dbb7","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/630417380907b9a115c6aa9f/LGvQfICEdjZX8DKQAfOPK.jpeg","fullname":"Zeus Labs","name":"ZeusLabs","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"isEnterprise":false,"followerCount":45},"downloads":28,"gated":false,"id":"ZeusLabs/Chronos-Divergence-33B","availableInferenceProviders":[],"lastModified":"2024-09-15T23:21:07.000Z","likes":30,"pipeline_tag":"text-generation","private":false,"repoType":"model","isLikedByUser":false,"widgetOutputUrls":[],"numParameters":32528970240},{"author":"ZeusLabs","authorData":{"_id":"6434718c546e16f17a15dbb7","avatarUrl":"https://aifasthub.com/avatars/v1/production/uploads/630417380907b9a115c6aa9f/LGvQfICEdjZX8DKQAfOPK.jpeg","fullname":"Zeus Labs","name":"ZeusLabs","type":"org","isHf":false,"isHfAdmin":false,"isMod":false,"isEnterprise":false,"followerCount":45},"downloads":10,"gated":false,"id":"ZeusLabs/Chronos-Divergence-33B-exl2-8.0bpw","availableInferenceProviders":[],"lastModified":"2024-09-11T06:13:52.000Z","likes":0,"private":false,"repoType":"model","isLikedByUser":false}],"paperPreviews":[],"spaces":[],"numDatasets":0,"numModels":3,"numSpaces":1,"lastOrgActivities":[],"acceptLanguages":["*"],"canReadRepos":false,"canReadSpaces":false,"blogPosts":[],"currentRepoPage":0,"filters":{},"paperView":false}">

AI & ML interests

None defined yet.

We are a small but ambitious AI research group, focused on developing performant and highly capable Large Language Models that aim to excel in their domains. Our specialty lies in the exploration of cutting-edge model finetuning methods and innovative data preparation techniques.

Our Mission

At Zeus Labs, we strive to push the boundaries of AI capabilities, particularly in the realm of language models. Our goal is to create models that not only perform well but also demonstrate exceptional abilities in specific domains, contributing to the advancement of AI technology and its applications.

Our Approach

  • Cutting-edge finetuning methods for Large Language Models
  • Innovative data preparation and curation techniques
  • Focus on domain-specific excellence and versatility
  • Open collaboration and knowledge sharing within the AI community
  • Advancing LLM research via novel techniques, which has been applied by all of our members.

Team:

Chief ML Engineer, M.S.

@elinas - HuggingFace Profile

Senior Data Scientist, PhD

@ToastyPigeon - HuggingFace Profile

Operations Engineer

@fizz - HuggingFace Profile

Notable Achievements

  • Revival of Llama 1 33B by training on over 500M tokens
  • We did this based on the original pretraining token count of 1.4T and decided to add another 500M tokens to it, to which our surprise ended up surpassing expectations in both quality and length
  • It was trained at 16384 context legth with an *effective* context legnth around 12k due to the nature of the samples, but exceeds in RP.
  • Our next goal is to apply GQA to it, but in the meantime, we will appreciate quanters who will help with running this model on less VRAM!
  • Development of L3-Aethora-15B series, The first heavily fintuned 15b model that focuses in creative writing and general intelligence using a novel technique known as "zeroing layers."
  • Creation of the Aether-Lite-V1.8.1 dataset, a carefully curated dataset for AI training

Join Us

We are currently growing and looking for passionate individuals interested in machine learning and AI research. Whether you're a seasoned researcher or an enthusiastic beginner, there's a place for you in our community.

Join our Discord to connect with like-minded individuals, share ideas, and potentially collaborate on exciting AI projects!

Join The Zeus Labs Discord

Our Work

Explore our independently developed work and collaborations on our HuggingFace profiles. We're always pushing the boundaries of what's possible with AI!

Model Quanters!

If you create quants for our models and we miss them, please post a discussion to that model and we will add it to the Model card!

datasets 0

None public yet