{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \"))\n self.label_2.setText(_translate(\"deDialog\", \"Beschreibung\"))\n\n\nif __name__ == \"__main__\":\n import sys\n app = QtWidgets.QApplication(sys.argv)\n deDialog = QtWidgets.QDialog()\n ui = Ui_deDialog()\n ui.setupUi(deDialog)\n deDialog.show()\n sys.exit(app.exec_())\n"},"avg_line_length":{"kind":"number","value":49.65625,"string":"49.65625"},"max_line_length":{"kind":"number","value":360,"string":"360"},"alphanum_fraction":{"kind":"number","value":0.7063142438,"string":"0.706314"}}},{"rowIdx":46414,"cells":{"hexsha":{"kind":"string","value":"02a2c2f99ff242e76937c1375e57dcd53493ff16"},"size":{"kind":"number","value":10924,"string":"10,924"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"extractTool/extractTool/getCSVInfo.py"},"max_stars_repo_name":{"kind":"string","value":"corneliazy/Geosoftware2"},"max_stars_repo_head_hexsha":{"kind":"string","value":"8604c79c58a61b84c602f16b5f1e74e30dfcbd0e"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"extractTool/extractTool/getCSVInfo.py"},"max_issues_repo_name":{"kind":"string","value":"corneliazy/Geosoftware2"},"max_issues_repo_head_hexsha":{"kind":"string","value":"8604c79c58a61b84c602f16b5f1e74e30dfcbd0e"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":47,"string":"47"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2018-11-13T13:55:01.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-09-16T13:38:11.000Z"},"max_forks_repo_path":{"kind":"string","value":"extractTool/extractTool/getCSVInfo.py"},"max_forks_repo_name":{"kind":"string","value":"corneliazy/Geosoftware2"},"max_forks_repo_head_hexsha":{"kind":"string","value":"8604c79c58a61b84c602f16b5f1e74e30dfcbd0e"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-11-27T12:36:51.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-10-14T18:07:04.000Z"},"content":{"kind":"string","value":"import click,json, sqlite3, pygeoj, csv\nfrom osgeo import gdal, ogr, osr\nimport pandas as pd\nimport numpy as np\nimport extractTool\nfrom scipy.spatial import ConvexHull\nimport dateparser\nfrom pyproj import Proj, transform\n#import sys\n\n#import ogr2ogr\n#ogr2ogr.BASEPATH = \"/home/caro/Vorlagen/Geosoftware2/Metadatenextraktion\"\n\n\ndef getCSVbbx(filepath, detail, folder, time):\n\n \"\"\"returns the bounding Box CSV\n @see https://www.programiz.com/python-programming/reading-csv-files\n @param path Path to the file \"\"\"\n \n #format validation\n pd.read_csv(filepath)\n click.echo(\"csv\")\n CRSinfo = True\n listlat = [\"Koordinate_Hochwert\",\"lat\",\"Latitude\",\"latitude\"]\n listlon = [\"Koordinate_Rechtswert\",\"lon\",\"Longitude\",\"longitude\",\"lng\"]\n listCRS = [\"CRS\",\"crs\",\"Koordinatensystem\",\"EPSG\",\"Coordinate reference system\", \"coordinate system\"]\n listtime = [\"time\", \"timestamp\", \"date\", \"Time\", \"Jahr\", \"Datum\"]\n try:\n deli=';'\n df = pd.read_csv(filepath, delimiter=deli,engine='python')\n #tests if there is a column named Coordinatesystem or similar\n click.echo(\"hi\")\n #click.echo(df.columns.values)\n #click.echo(intersect(listCRS,df.columns.values))\n if not intersect(listCRS,df.columns.values):\n CRSinfo= False\n print(\"hu\")\n print(\"No fitting header for a reference system\")\n\n if not(((intersect(listlat,df.columns.values) and intersect(listlon,df.columns.values)))or (intersect(listtime, df.columns.values))):\n #output=\"No fitting header for latitudes or longitudes\"\n raise Exception('No fitting ')\n #print(output)\n #return output\n\n except Exception as exce:\n deli=','\n df = pd.read_csv(filepath, delimiter=deli,engine='python')\n #tests if there is a column named Coordinatesystem or similar\n click.echo(\"hi\")\n #click.echo(df.columns.values)\n #click.echo(intersect(listCRS,df.columns.values))\n if not intersect(listCRS,df.columns.values):\n CRSinfo= False\n \n print(\"No fitting header for a reference system2\")\n z=intersect(listtime, df.columns.values)\n print (z)\n t=intersect(listlat,df.columns.values) and intersect(listlon,df.columns.values)\n print (intersect(listlat,df.columns.values))\n print(\"_______________\")\n print(t)\n if not t:\n print(\"false\")\n\n if not(((intersect(listlat,df.columns.values) and intersect(listlon,df.columns.values)))or (intersect(listtime, df.columns.values))):\n #output=\"No fitting header for latitudes or longitudes\"\n #raise Exception('No fim')\n \n raise Exception(\"evtl kein csv oder ungueltiges Trennzeichen.\")\n #print(\"keine Koordinaten vorhanden\")\n #print(output)\n #return output\n print (exce)\n\n if detail =='bbox':\n click.echo(\"bbox\")\n # Using Pandas: http://pandas.pydata.org/pandas-docs/stable/io.html\n #if folder=='single':\n mylat=intersect(listlat,df.columns.values)\n mylon=intersect(listlon,df.columns.values)\n lats=df[mylat[0]]\n lons=df[mylon[0]]\n bbox=[min(lats),min(lons),max(lats),max(lons)]\n # CRS transformation if there is information about crs\n if(CRSinfo):\n mycrsID=intersect(listCRS,df.columns.values)\n myCRS=df[mycrsID[0]]\n lat1t,lng1t = extractTool.transformToWGS84(min(lats),min(lons), myCRS)\n lat2t,lng2t = extractTool.transformToWGS84(max(lats),max(lons), myCRS)\n bbox=[lat1t,lng1t,lat2t,lng2t]\n if folder=='single':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Boundingbox of the CSV object:\")\n click.echo(bbox)\n print(\"----------------------------------------------------------------\")\n extractTool.ret_value.append(bbox)\n if folder=='whole':\n extractTool.bboxArray.append(bbox)\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Boundingbox of the CSV:\")\n click.echo(bbox)\n print(\"----------------------------------------------------------------\")\n else:\n if folder=='single':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Boundingbox of the CSV object:\")\n print(bbox)\n print(\"Missing CRS -----> Boundingbox will not be saved in zenodo.\")\n print(\"----------------------------------------------------------------\")\n extractTool.ret_value.append([None])\n if folder=='whole':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Boundingbox of the CSV file:\")\n click.echo(bbox)\n click.echo(\"because of a missing crs this CSV is not part of the folder calculation.\")\n print(\"----------------------------------------------------------------\")\n\n else:\n extractTool.ret_value.append([None])\n\n #returns the convex hull of the coordinates from the CSV object.\n if detail == 'convexHull':\n click.echo(\"convexHull\")\n mylat=intersect(listlat,df.columns.values)\n mylon=intersect(listlon,df.columns.values)\n lats=df[mylat[0]]\n lons=df[mylon[0]]\n coords=np.column_stack((lats, lons))\n #definition and calculation of the convex hull\n hull=ConvexHull(coords)\n hull_points=hull.vertices\n convHull=[]\n for z in hull_points:\n point=[coords[z][0], coords[z][1]]\n convHull.append(point)\n if(CRSinfo):\n mycrsID=intersect(listCRS,df.columns.values)\n myCRS=df[mycrsID[0]]\n inputProj='epsg:'\n inputProj+=str(myCRS[0])\n print(inputProj)\n inProj = Proj(init=inputProj)\n outProj = Proj(init='epsg:4326')\n for z in coords:\n z[0],z[1] = transform(inProj,outProj,z[0],z[1])\n if folder=='single':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"convex Hull of the csv file: \")\n click.echo(convHull)\n print(\"----------------------------------------------------------------\")\n extractTool.ret_value.append(convHull)\n if folder=='whole':\n extractTool.bboxArray=extractTool.bboxArray+convHull\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"convex hull of the CSV:\")\n click.echo(convHull)\n print(\"----------------------------------------------------------------\")\n #return convHull\n else:\n if folder=='single':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Convex hull of the CSV object:\")\n print(convHull)\n print(\"Missing CRS -----> Boundingbox will not be saved in zenodo.\")\n print(\"----------------------------------------------------------------\")\n extractTool.ret_value.append([None])\n if folder=='whole':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Filepath:\")\n click.echo(filepath)\n click.echo(\"Convex hull of the CSV file:\")\n click.echo(convHull)\n click.echo(\"because of a missing crs this CSV is not part of the folder calculation.\")\n print(\"----------------------------------------------------------------\")\n\n\n else:\n extractTool.ret_value.append([None])\n\n\n\n \n if (time):\n click.echo(\"hallo\")\n # Using Pandas: http://pandas.pydata.org/pandas-docs/stable/io.html\n df = pd.read_csv(filepath, sep=';|,',engine='python')\n click.echo(listtime)\n click.echo(df.columns.values)\n intersection=intersect(listtime, df.columns.values)\n click.echo(intersection)\n if not intersection:\n print(\"No fitting header for time-values\")\n extractTool.ret_value.append([None])\n # TODO: fehlerbehandlung \n #try:\n #for t in listtime:\n #if(x not in df.columns.values):\n #click.echo(\"This file does not include time-values\")\n #else:\n #time=df[t]\n #timeextend =[min(time), max(time)]\n #click.echo(timeextend)\n #return timeextend\n #except Exception as e:\n #click.echo (\"There is no time-value or invalid file.\")\n #return None \n else:\n \n \n time=df[intersection[0]]\n print(min(time))\n print(max(time))\n timemin=str(min(time))\n timemax=str(max(time))\n timemax_formatted=dateparser.parse(timemax)\n timemin_formatted=dateparser.parse(timemin)\n timeextend=[timemin_formatted, timemax_formatted]\n print(timeextend)\n if folder=='single':\n print(\"----------------------------------------------------------------\")\n click.echo(\"Timeextend of this CSV file:\")\n click.echo(timeextend)\n print(\"----------------------------------------------------------------\")\n extractTool.ret_value.append([timeextend])\n #return timeextend\n if folder=='whole':\n extractTool.timeextendArray.append(timeextend)\n print(\"timeextendArray:\")\n print(extractTool.timeextendArray)\n\n else:\n extractTool.ret_value.append([None])\n if folder=='single':\n print(extractTool.ret_value)\n return extractTool.ret_value\n# Hilfsfunktion fuer csv fehlerbehandlung\ndef intersect(a, b):\n return list(set(a) & set(b))\n"},"avg_line_length":{"kind":"number","value":42.8392156863,"string":"42.839216"},"max_line_length":{"kind":"number","value":141,"string":"141"},"alphanum_fraction":{"kind":"number","value":0.493958257,"string":"0.493958"}}},{"rowIdx":46415,"cells":{"hexsha":{"kind":"string","value":"02ac9a27a13a5b616135c44e6df047d30b680126"},"size":{"kind":"number","value":1762,"string":"1,762"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"headphonecom/headphonecom_compensation.py"},"max_stars_repo_name":{"kind":"string","value":"Banbeucmas/AutoEq"},"max_stars_repo_head_hexsha":{"kind":"string","value":"b8549b2347a19e1f127e6395147ecd6fb225a8ce"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-07-17T03:48:21.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-07-17T03:48:21.000Z"},"max_issues_repo_path":{"kind":"string","value":"headphonecom/headphonecom_compensation.py"},"max_issues_repo_name":{"kind":"string","value":"hyshuma/AutoEq"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4c3ad3478d6b74fc794bd7d973b330d38da338b9"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"headphonecom/headphonecom_compensation.py"},"max_forks_repo_name":{"kind":"string","value":"hyshuma/AutoEq"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4c3ad3478d6b74fc794bd7d973b330d38da338b9"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding:utf-8 -*-\n\nimport os\nfrom glob import glob\nimport matplotlib.pyplot as plt\nimport matplotlib.ticker as ticker\nimport numpy as np\nfrom frequency_response import FrequencyResponse\n\n\ndef main():\n fig, ax = plt.subplots()\n diffs = []\n # Calculate differences for all models\n for file in glob(os.path.join('compensation', 'compensated', '**', '*.csv'), recursive=True):\n file = os.path.abspath(file)\n comp = FrequencyResponse.read_from_csv(file)\n comp.interpolate()\n comp.center()\n raw_data_path = file.replace('compensated', 'raw')\n raw = FrequencyResponse.read_from_csv(raw_data_path)\n raw.interpolate()\n raw.center()\n diff = FrequencyResponse(name=comp.name, frequency=comp.frequency, raw=raw.raw-comp.raw)\n plt.plot(diff.frequency, diff.raw)\n diffs.append(diff.raw)\n\n # Average and smoothen difference\n f = FrequencyResponse.generate_frequencies()\n diffs = np.vstack(diffs)\n diff = np.mean(diffs, axis=0)\n diff = FrequencyResponse(name='Headphone.com Compensation', frequency=f, raw=diff)\n diff.smoothen_fractional_octave(window_size=1 / 9, iterations=10)\n diff.raw = diff.smoothed\n diff.smoothed = np.array([])\n\n plt.xlabel('Frequency (Hz)')\n plt.semilogx()\n plt.xlim([20, 20000])\n plt.ylabel('Amplitude (dBr)')\n plt.ylim([-15, 15])\n plt.grid(which='major')\n plt.grid(which='minor')\n plt.title('Headphone.com Compensation Function')\n ax.xaxis.set_major_formatter(ticker.StrMethodFormatter('{x:.0f}'))\n plt.show()\n\n diff.write_to_csv('headphonecom_compensation.csv')\n diff.plot_graph(show=True, f_min=10, f_max=20000, file_path='headphonecom_compensation.png')\n\n\nif __name__ == '__main__':\n main()\n"},"avg_line_length":{"kind":"number","value":32.6296296296,"string":"32.62963"},"max_line_length":{"kind":"number","value":97,"string":"97"},"alphanum_fraction":{"kind":"number","value":0.6799091941,"string":"0.679909"}}},{"rowIdx":46416,"cells":{"hexsha":{"kind":"string","value":"02e5cf7fe0b3e9ede5b09363a9704e7629b2454a"},"size":{"kind":"number","value":1225,"string":"1,225"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"tspdb/src/data/generateARMA.py"},"max_stars_repo_name":{"kind":"string","value":"swipswaps/tspdb"},"max_stars_repo_head_hexsha":{"kind":"string","value":"9c085cef7164c114bb0952519b9715dcfa072b34"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":43,"string":"43"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-12-10T00:05:51.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T21:21:20.000Z"},"max_issues_repo_path":{"kind":"string","value":"tspdb/src/data/generateARMA.py"},"max_issues_repo_name":{"kind":"string","value":"swipswaps/tspdb"},"max_issues_repo_head_hexsha":{"kind":"string","value":"9c085cef7164c114bb0952519b9715dcfa072b34"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":5,"string":"5"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-05-09T01:12:31.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-29T17:34:15.000Z"},"max_forks_repo_path":{"kind":"string","value":"tspdb/src/data/generateARMA.py"},"max_forks_repo_name":{"kind":"string","value":"swipswaps/tspdb"},"max_forks_repo_head_hexsha":{"kind":"string","value":"9c085cef7164c114bb0952519b9715dcfa072b34"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":14,"string":"14"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-01-13T21:20:07.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-31T02:11:26.000Z"},"content":{"kind":"string","value":"######################################################\r\n#\r\n# Generate ARMA data\r\n#\r\n######################################################\r\nimport numpy as np\r\n\r\ndef generate(arLagsArray, maLagsArray, startingArray, timeSteps, noiseMean, noiseSD, tStart = 0):\r\n\r\n\tp = len(arLagsArray)\r\n\tq = len(maLagsArray)\r\n\tprevPoints = len(startingArray)\r\n\r\n\tif (p > prevPoints):\r\n\t\traise Exception('startingArray must be of length >= arLagsArray.')\r\n\r\n\tif (q > prevPoints):\r\n\t\traise Exception('startingArray must be of length >= maLagsArray.')\r\n\r\n\r\n\tmaxLags = np.max([p, q])\r\n\toutputArray = np.zeros(timeSteps + maxLags)\r\n\toutputArray[0: maxLags] = startingArray\r\n\tif len(noiseSD)> 1:\r\n\t\tnoiseSD = [1 for i in range(maxLags)] + list(noiseSD)\r\n\telse: noiseSD = noiseSD[0]\r\n\terrorArray = np.random.normal(noiseMean, noiseSD, timeSteps + maxLags)\r\n\tmeanArray = np.zeros(timeSteps + maxLags)\r\n\r\n\tfor i in range(maxLags, timeSteps):\r\n\t\tvalue = 0.0\r\n\t\tfor j in range(0, p):\r\n\t\t\tvalue += (outputArray[i-j] * arLagsArray[j])\r\n\r\n\t\tfor k in range(0, q):\r\n\t\t\tvalue += (errorArray[i-k] * maLagsArray[k])\r\n\r\n\t\toutputArray[i] = value + errorArray[i]\r\n\t\tmeanArray[i] = value\r\n\r\n\treturn (outputArray[maxLags:], meanArray[maxLags:], errorArray[maxLags:])"},"avg_line_length":{"kind":"number","value":29.8780487805,"string":"29.878049"},"max_line_length":{"kind":"number","value":98,"string":"98"},"alphanum_fraction":{"kind":"number","value":0.6130612245,"string":"0.613061"}}},{"rowIdx":46417,"cells":{"hexsha":{"kind":"string","value":"b8b958d45f41d796a20ee391dc6c5d61374262f8"},"size":{"kind":"number","value":1128,"string":"1,128"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"app/websocket_message.py"},"max_stars_repo_name":{"kind":"string","value":"openbikebox/websocket-client"},"max_stars_repo_head_hexsha":{"kind":"string","value":"50b61a70ffcff1acdc13ba69c017e671bd3f983f"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"app/websocket_message.py"},"max_issues_repo_name":{"kind":"string","value":"openbikebox/websocket-client"},"max_issues_repo_head_hexsha":{"kind":"string","value":"50b61a70ffcff1acdc13ba69c017e671bd3f983f"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"app/websocket_message.py"},"max_forks_repo_name":{"kind":"string","value":"openbikebox/websocket-client"},"max_forks_repo_head_hexsha":{"kind":"string","value":"50b61a70ffcff1acdc13ba69c017e671bd3f983f"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# encoding: utf-8\n\n\"\"\"\nopenbikebox websocket-client\nCopyright (c) 2021, binary butterfly GmbH\nUse of this source code is governed by an MIT-style license that can be found in the LICENSE file.\n\"\"\"\n\nfrom .config import Config\nfrom .system import system\n\n\nclass WebsocketMessage:\n reply = None\n\n def __init__(self, message):\n self.type = message['type']\n self.state = message['state']\n self.uid = message['uid']\n self.data = message['data']\n\n if hasattr(self, 'handle%s' % self.type):\n getattr(self, 'handle%s' % self.type)()\n\n def handleRemoteChangeResourceStatus(self):\n if self.data.get('uid') not in Config.RESOURCES or self.data.get('status') not in ['open', 'closed']:\n self.reply = {'status': 'fail'}\n return\n if self.data['status'] == 'open':\n system.resources[self.data.get('uid')].open()\n else:\n system.resources[self.data.get('uid')].close()\n self.reply = {'status': 'ok'}\n\n def handleAuthorizeReply(self):\n system.take_resource(self.data['request_uid'], self.data['resource_uid'])\n"},"avg_line_length":{"kind":"number","value":30.4864864865,"string":"30.486486"},"max_line_length":{"kind":"number","value":109,"string":"109"},"alphanum_fraction":{"kind":"number","value":0.6223404255,"string":"0.62234"}}},{"rowIdx":46418,"cells":{"hexsha":{"kind":"string","value":"a7595913c31f44d9ff36696d2e655a6feb9b75ea"},"size":{"kind":"number","value":128,"string":"128"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Backtracking/Permutations.py"},"max_stars_repo_name":{"kind":"string","value":"dileeppandey/hello-interview"},"max_stars_repo_head_hexsha":{"kind":"string","value":"78f6cf4e2da4106fd07f4bd86247026396075c69"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Backtracking/Permutations.py"},"max_issues_repo_name":{"kind":"string","value":"dileeppandey/hello-interview"},"max_issues_repo_head_hexsha":{"kind":"string","value":"78f6cf4e2da4106fd07f4bd86247026396075c69"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Backtracking/Permutations.py"},"max_forks_repo_name":{"kind":"string","value":"dileeppandey/hello-interview"},"max_forks_repo_head_hexsha":{"kind":"string","value":"78f6cf4e2da4106fd07f4bd86247026396075c69"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-02-12T16:57:46.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-02-12T16:57:46.000Z"},"content":{"kind":"string","value":"\"\"\"\nhttps://leetcode.com/problems/permutations/\n\nGiven a collection of distinct integers, return all possible permutations.\n\"\"\"\n"},"avg_line_length":{"kind":"number","value":21.3333333333,"string":"21.333333"},"max_line_length":{"kind":"number","value":74,"string":"74"},"alphanum_fraction":{"kind":"number","value":0.7734375,"string":"0.773438"}}},{"rowIdx":46419,"cells":{"hexsha":{"kind":"string","value":"a76f43d76ecd9b2aaef44f9c0e051042ee44134b"},"size":{"kind":"number","value":586,"string":"586"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Tests/test_mond.py"},"max_stars_repo_name":{"kind":"string","value":"fdienesch/Solar-System"},"max_stars_repo_head_hexsha":{"kind":"string","value":"617096bcb525a19ee94ff86948b53bd8a65e4386"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Tests/test_mond.py"},"max_issues_repo_name":{"kind":"string","value":"fdienesch/Solar-System"},"max_issues_repo_head_hexsha":{"kind":"string","value":"617096bcb525a19ee94ff86948b53bd8a65e4386"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Tests/test_mond.py"},"max_forks_repo_name":{"kind":"string","value":"fdienesch/Solar-System"},"max_forks_repo_head_hexsha":{"kind":"string","value":"617096bcb525a19ee94ff86948b53bd8a65e4386"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from unittest import TestCase\nfrom Objekte.mond import Mond\nfrom Model.solarSysModel import SolarSunModel\nfrom OpenGL.GLUT import *\n\n__author__ = 'floriandienesch'\n\n\nclass TestMond(TestCase):\n\n def setUp(self):\n glutInit(sys.argv)\n glutCreateWindow(\"Solarsystem v0.9\")\n\n self.mond = Mond()\n self.model = SolarSunModel()\n\n def test_addMond(self):\n self.assertEqual(self.mond.addMond(\"dd\", self.model.rot_mond, 0, 0, 0, 20, 20), None)\n\n def test_rotation(self):\n self.assertEqual(self.mond.rotation(self.model.rot_mond,3,3,3), [0, 3, 0])"},"avg_line_length":{"kind":"number","value":26.6363636364,"string":"26.636364"},"max_line_length":{"kind":"number","value":93,"string":"93"},"alphanum_fraction":{"kind":"number","value":0.6860068259,"string":"0.686007"}}},{"rowIdx":46420,"cells":{"hexsha":{"kind":"string","value":"022c92504f696f60497ba5cc600e99336ff1917d"},"size":{"kind":"number","value":790,"string":"790"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Utils/py/ActionSelection/state.py"},"max_stars_repo_name":{"kind":"string","value":"tarsoly/NaoTH"},"max_stars_repo_head_hexsha":{"kind":"string","value":"dcd2b67ef6bf9953c81d3e1b26e543b5922b7d52"},"max_stars_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Utils/py/ActionSelection/state.py"},"max_issues_repo_name":{"kind":"string","value":"tarsoly/NaoTH"},"max_issues_repo_head_hexsha":{"kind":"string","value":"dcd2b67ef6bf9953c81d3e1b26e543b5922b7d52"},"max_issues_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Utils/py/ActionSelection/state.py"},"max_forks_repo_name":{"kind":"string","value":"tarsoly/NaoTH"},"max_forks_repo_head_hexsha":{"kind":"string","value":"dcd2b67ef6bf9953c81d3e1b26e543b5922b7d52"},"max_forks_repo_licenses":{"kind":"list like","value":["ECL-2.0","Apache-2.0"],"string":"[\n \"ECL-2.0\",\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import math\nfrom naoth import math2d as m2d\n\n# represents the robots view on the situation\n\n\nclass State:\n def __init__(self, x=1000, y=100):\n self.pose = m2d.Pose2D()\n self.pose.translation = m2d.Vector2(x, y)\n self.pose.rotation = math.radians(0)\n\n self.rotation_vel = 60 # degrees per sec\n self.walking_vel = 200 # mm per sec\n\n self.ball_position = m2d.Vector2(0.0, 0.0)\n\n # Possible options: normal, influence_01, generated\n self.potential_field_function = \"normal\"\n\n self.opp_robots = ([]) # is in global coordinates\n self.own_robots = ([]) # is in global coordinates\n\n def update_pos(self, glob_pos, rotation):\n self.pose.translation = glob_pos\n self.pose.rotation = math.radians(rotation)\n"},"avg_line_length":{"kind":"number","value":29.2592592593,"string":"29.259259"},"max_line_length":{"kind":"number","value":59,"string":"59"},"alphanum_fraction":{"kind":"number","value":0.6481012658,"string":"0.648101"}}},{"rowIdx":46421,"cells":{"hexsha":{"kind":"string","value":"5a1bf468e5992924ec181a27af9a5c4e719fcc39"},"size":{"kind":"number","value":3556,"string":"3,556"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"partner_ngos/fund_management/doctype/budget_line/budget_line.py"},"max_stars_repo_name":{"kind":"string","value":"AkramMutaher/partner_ngos"},"max_stars_repo_head_hexsha":{"kind":"string","value":"4a345fb6989ff5a21db7fca07aa4e5174dca8f59"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"partner_ngos/fund_management/doctype/budget_line/budget_line.py"},"max_issues_repo_name":{"kind":"string","value":"AkramMutaher/partner_ngos"},"max_issues_repo_head_hexsha":{"kind":"string","value":"4a345fb6989ff5a21db7fca07aa4e5174dca8f59"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"partner_ngos/fund_management/doctype/budget_line/budget_line.py"},"max_forks_repo_name":{"kind":"string","value":"AkramMutaher/partner_ngos"},"max_forks_repo_head_hexsha":{"kind":"string","value":"4a345fb6989ff5a21db7fca07aa4e5174dca8f59"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n# Copyright (c) 2020, Akram Mutaher and contributors\n# For license information, please see license.txt\n\nfrom __future__ import unicode_literals\nfrom frappe.model.document import Document\nimport frappe\nfrom frappe import _, throw\nfrom frappe.utils import add_days, cstr, date_diff, get_link_to_form, getdate\nfrom frappe.utils.nestedset import NestedSet\nfrom frappe.desk.form.assign_to import close_all_assignments, clear\nfrom frappe.utils import (cstr,date_diff)\n\n#class CircularReferenceError(frappe.ValidationError): pass\n#class EndDateCannotBeGreaterThanProjectEndDateError(frappe.ValidationError): pass\n\nclass BudgetLine(NestedSet):\n\tdef validate(self):\n\t\tself.validate_naming()\n\n\tdef validate_naming(self):\n\t\tif not self.budget_no and self.logical_framework:\n\t\t\tif self.parent_budget_line:\n\t\t\t\tbgl = frappe.get_doc(\"Budget Line\", self.parent_budget_line)\n\t\t\t\tif not bgl.last_no: bgl.last_no=0\n\t\t\t\tself.budget_no=bgl.budget_no+\".\"+cstr(bgl.last_no+1)\n\t\t\t\tbgl.db_set(\"last_no\", bgl.last_no+1)\n\t\t\t\tbgl.notify_update()\n\t\t\telse:\t\t\t\t\n\t\t\t\tlfw = frappe.get_doc(\"Logical Framework\", self.logical_framework)\n\t\t\t\tif not lfw.last_budget_no: lfw.last_budget_no=0\n\t\t\t\tself.budget_no=cstr(lfw.last_budget_no+1)\n\t\t\t\tlfw.db_set(\"last_budget_no\", lfw.last_budget_no+1)\n\t\t\t\tlfw.notify_update()\n\n\tnsm_parent_field = 'parent_budget_line'\n\ndef populate_depends_on(self):\n\t\tif self.parent_budget_line:\n\t\t\tparent = frappe.get_doc('Budget Line', self.parent_budget_line)\n\t\t\tif not self.name in [row.budget_line for row in parent.depends_on]:\n\t\t\t\tparent.append(\"depends_on\", {\n\t\t\t\t\t\"doctype\": \"Budget Line Depends On\",\n\t\t\t\t\t\"budget_line\": self.name,\n\t\t\t\t\t\"subject\": self.budget_line_description\n\t\t\t\t})\n\t\t\t\tparent.save()\n\n@frappe.whitelist()\ndef check_if_child_exists(name):\n\tchild_tasks = frappe.get_all(\"Budget Line\", filters={\"parent_budget_line\": name})\n\tchild_tasks = [get_link_to_form(\"Budget Line\", budget_line.name) for budget_line in child_tasks]\n\treturn child_tasks\n\n\n \n@frappe.whitelist()\ndef get_children(doctype, parent, budget_line=None, logical_framework=None, is_root=False):\n\n\tfilters = [['docstatus', '<', '2']]\n\n\tif budget_line:\n\t\tfilters.append(['parent_budget_line', '=', budget_line])\n\telif parent and not is_root:\n\t\t# via expand child\n\t\tfilters.append(['parent_budget_line', '=', parent])\n\telse:\n\t\tfilters.append(['ifnull(`parent_budget_line`, \"\")', '=', ''])\n\n\tif logical_framework:\n\t\tfilters.append(['logical_framework', '=', logical_framework])\n\n\tbudgets = frappe.get_list(doctype, fields=[\n\t\t'name as value',\n\t\t'budget_line_description as title',\n\t\t'is_group as expandable'\n\t], filters=filters, order_by='name')\n\n\t# return budgets\n\treturn budgets\n\n@frappe.whitelist()\ndef add_node():\n\tfrom frappe.desk.treeview import make_tree_args\n\targs = frappe.form_dict\n\targs.update({\n\t\t\"name_field\": \"budget_line_description\"\n\t})\n\targs = make_tree_args(**args)\n\n\tif args.parent_budget_line == 'All Budget Line' or args.parent_budget_line == args.logical_framework:\n\t\targs.parent_budget_line = None\n\n\tfrappe.get_doc(args).insert()\n\n@frappe.whitelist()\ndef add_multiple_budget_line(data, parent):\n\tdata = json.loads(data)\n\tnew_doc = {'doctype': 'Budget Line', 'parent_budget_line': parent if parent!=\"All Budget Line\" else \"\"}\n\tnew_doc['logical_framework'] = frappe.db.get_value('Budget Line', {\"name\": parent}, 'logical_framework') or \"\"\n\n\tfor d in data:\n\t\tif not d.get(\"budget_line\"): continue\n\t\tnew_doc['budget_line_description'] = d.get(\"budget_line_description\")\n\t\tnew_budget_line = frappe.get_doc(new_doc)\n\t\tnew_budget_line.insert()\n\n\n"},"avg_line_length":{"kind":"number","value":32.623853211,"string":"32.623853"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.7485939258,"string":"0.748594"}}},{"rowIdx":46422,"cells":{"hexsha":{"kind":"string","value":"ce56caba69cfc805f8e766d0fd8b41db839aa0be"},"size":{"kind":"number","value":17815,"string":"17,815"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"python_version/cmUI.py"},"max_stars_repo_name":{"kind":"string","value":"Hermethus/hernethus.github.io"},"max_stars_repo_head_hexsha":{"kind":"string","value":"d2577b71cedb8f60d4d9052f979f8a139d69536d"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"python_version/cmUI.py"},"max_issues_repo_name":{"kind":"string","value":"Hermethus/hernethus.github.io"},"max_issues_repo_head_hexsha":{"kind":"string","value":"d2577b71cedb8f60d4d9052f979f8a139d69536d"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"python_version/cmUI.py"},"max_forks_repo_name":{"kind":"string","value":"Hermethus/hernethus.github.io"},"max_forks_repo_head_hexsha":{"kind":"string","value":"d2577b71cedb8f60d4d9052f979f8a139d69536d"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from ciphermamager import CipherManager\nimport hashlib\nfrom tkinter import *\nfrom tkinter import messagebox, filedialog\nimport pyperclip\n\n\nclass cmUI(Tk):\n \n def __init__(self):\n super().__init__()\n self.resizable(width=False, height=False)\n self.title(\"密码管理v1.1\")\n\n self.setupUI()\n self.cm = None\n self.current_select = None\n \n def setupUI(self):\n #菜单\n self.menu = Menu(self)\n self.config(menu=self.menu)\n \n self.filemenu = Menu(self.menu, tearoff=False)\n self.filemenu.add_command(label=\"新建空白密码本\", command=self.new)\n self.filemenu.add_command(label=\"从明文新建\", command=self.new_from_file)\n self.filemenu.add_command(label=\"打开密文文件\", command=self.open_file)\n self.filemenu.add_separator()\n self.filemenu.add_command(label=\"保存为密文文件\", command=self.save, state=DISABLED)\n self.filemenu.add_command(label=\"导出明文文件\", command=self.save_as_plain, state=DISABLED)\n self.filemenu.add_separator()\n self.filemenu.add_command(label=\"关闭文件\", command=self.close_file_disable, state=DISABLED)\n \n self.passwordmenu = Menu(self.menu, tearoff=False)\n self.passwordmenu.add_command(label=\"修改密码\",\n command=self.change_password, state=DISABLED)\n \n self.helpmenu = Menu(self.menu, tearoff=False)\n self.helpmenu.add_command(label=\"帮助\", command=self.get_help)\n self.helpmenu.add_command(label=\"关于我们\", command=self.about)\n \n self.menu.add_cascade(label=\"文件\", menu=self.filemenu)\n self.menu.add_cascade(label=\"密码\", menu=self.passwordmenu)\n self.menu.add_cascade(label=\"关于\", menu=self.helpmenu)\n \n #布局\n #左半边\n self.left_frame = Frame(self, width=300, height=500, relief=GROOVE)\n self.left_frame.grid(row=0, column=0)\n \n #左半边-搜索框部分\n self.search_frame = LabelFrame(self.left_frame, text=\"搜索\", width=300, height=100)\n self.search_frame.grid(row=0, padx=5, pady=5)\n \n self.search = StringVar()\n self.search_entry = Entry(self.search_frame, width=20,\n textvariable=self.search, state=DISABLED)\n self.search_entry.grid(row=0, column=0, padx=5, pady=5)\n \n self.search_button = Button(self.search_frame, text=\"搜索\", \n command=self.search_item, state=DISABLED)\n self.search_button.grid(row=0, column=1, padx=5, pady=5)\n \n #左半边-用户名\n self.username_frame = LabelFrame(self.left_frame, text=\"用户名\", \n width=300, height=100)\n self.username_frame.grid(row=1, padx=5, pady=5)\n \n self.username = StringVar()\n self.username_entry = Entry(self.username_frame, width=20, \n textvariable=self.username, state=DISABLED)\n self.username_entry.grid(row=0, column=0, padx=5, pady=5)\n \n self.username_button = Button(self.username_frame, text=\"复制\", \n command=self.copy_username, state=DISABLED)\n self.username_button.grid(row=0, column=1, padx=5, pady=5)\n \n #左半边-密码\n self.password_frame = LabelFrame(self.left_frame, text=\"密码\", \n width=300, height=100)\n self.password_frame.grid(row=2, padx=5, pady=5)\n \n self.password = StringVar()\n self.password_entry = Entry(self.password_frame, width=20, show=\"*\",\n textvariable=self.password, state=DISABLED)\n self.password_entry.grid(row=0, column=0, padx=5, pady=5)\n \n self.password_button = Button(self.password_frame, text=\"复制\", \n command=self.copy_password, state=DISABLED)\n self.password_button.grid(row=0, column=1, padx=5, pady=5)\n \n #左半边-按钮\n self.button_frame = Frame(self.left_frame, width=300, height=100)\n self.button_frame.grid(row=3, padx=5, pady=5)\n \n self.add_button = Button(self.button_frame, text=\"添加新的一项\", \n command=self.add, state=DISABLED)\n self.add_button.grid(row=0, column=0, padx=5, pady=5)\n \n self.del_button = Button(self.button_frame, text=\"删除选中的项\", \n command=self.delete, state=DISABLED)\n self.del_button.grid(row=0, column=1, padx=5, pady=5)\n \n self.modify_button = Button(self.button_frame, text=\"修改选中的项\", \n command=self.modify, state=DISABLED)\n self.modify_button.grid(row=1, column=0, padx=5, pady=5)\n \n self.clean_button = Button(self.button_frame, text=\" 清空界面 \", \n command=self.clean_all, state=DISABLED)\n self.clean_button.grid(row=1, column=1, padx=5, pady=5)\n \n #右半边\n self.right_frame = LabelFrame(self, text=\"搜索结果\", width=300, height=500)\n self.right_frame.grid(row=0, column=1, padx=5)\n \n self.sb = Scrollbar(self)\n self.lb = Listbox(self.right_frame, height=13, \n selectmode=SINGLE, yscrollcommand=self.sb.set)\n self.lb.grid(row=0, column=0, padx=5, pady=5)\n self.lb.bind(\"\", self.get_info)\n self.sb.config(command=self.lb.yview)\n\n \n def new(self):\n self.open_path = None\n self._new_password()\n \n def new_from_file(self):\n self.open_path = filedialog.askopenfilename(\n title='打开明文文件', filetypes=[('TXT文本文档', '*.txt'), ('所有文件', '*.*')])\n if self.open_path:\n self._new_password()\n \n def _new_password(self):\n self.top = Toplevel(self)\n self.top.attributes(\"-topmost\", True)\n \n self.top_label1 = Label(self.top, text=\"请输入密码:\")\n self.top_label1.grid(row=0, column=0, padx=5, pady=5, sticky=W)\n self.top_password_entry = Entry(self.top, show=\"*\")\n self.top_password_entry.grid(row=0, column=1, padx=5, pady=5)\n \n self.top_label2 = Label(self.top, text=\"再次输入密码:\")\n self.top_label2.grid(row=1, column=0, padx=5, pady=5, sticky=W)\n self.top_password_verify = Entry(self.top, show=\"*\")\n self.top_password_verify.grid(row=1, column=1, padx=5, pady=5)\n \n self.top_button = Button(self.top, text=\" 确认 \", command=self._compare_password)\n self.top_button.grid(row=2, column=0, columnspan=2, padx=5, pady=5)\n \n def _compare_password(self):\n if self.top_password_entry.get()==self.top_password_verify.get():\n pw = self.top_password_entry.get()\n self.cm = CipherManager(password=pw, file_path=self.open_path)\n \n self.open_file_enable()\n else:\n messagebox.showerror(title='错误', message=\"两次输入密码不一样!\")\n self.top.destroy()\n \n def open_file(self):\n self.open_path = filedialog.askopenfilename(\n title='打开加密文件', filetypes=[('CIPHER文件', '*.cipher'), ('所有文件', '*.*')])\n if self.open_path:\n self._enter_password()\n\n def _enter_password(self):\n self.top = Toplevel(self)\n self.top.attributes(\"-topmost\", True)\n \n self.top_label1 = Label(self.top, text=\"请输入密码:\")\n self.top_label1.grid(row=0, column=0, padx=5, pady=5, sticky=W)\n self.top_password_entry = Entry(self.top, show=\"*\")\n self.top_password_entry.grid(row=0, column=1, padx=5, pady=5)\n \n self.top_button = Button(self.top, text=\" 确认 \", command=self._verify_password)\n self.top_button.grid(row=1, column=0, columnspan=2, padx=5, pady=5)\n \n def _verify_password(self):\n pw = self.top_password_entry.get()\n try:\n self.cm = CipherManager(password=pw, file_path=self.open_path, new=False)\n self.open_file_enable()\n except Exception as e:\n messagebox.showerror(title='错误', message=\"文件或密码错误!\")\n print(e)\n self.top.destroy()\n \n def get_info(self, event):\n self.current_select = self.lb.get(ACTIVE)\n username, password = self.cm.get(self.current_select)\n self.username.set(username)\n self.password.set(password)\n\n def save(self):\n path = filedialog.asksaveasfilename(\n title='保存为加密文件', filetypes=[('CIPHER文件', '*.cipher'), ('所有文件', '*.*')])\n self.cm.save(path)\n \n def save_as_plain(self):\n path = filedialog.asksaveasfilename(\n title='保存为txt文件', filetypes=[('TXT文件', '*.txt'), ('所有文件', '*.*')])\n self.cm.save_as_plain(path)\n \n def change_password(self):\n self.clean_all()\n self.top = Toplevel(self)\n self.top.attributes(\"-topmost\", True)\n \n self.top_label0 = Label(self.top, text=\"请输入旧密码:\")\n self.top_label0.grid(row=0, column=0, padx=5, pady=5, sticky=W)\n self.top_old_password_entry = Entry(self.top, show=\"*\")\n self.top_old_password_entry.grid(row=0, column=1, padx=5, pady=5)\n \n self.top_label1 = Label(self.top, text=\"请输入新密码:\")\n self.top_label1.grid(row=1, column=0, padx=5, pady=5, sticky=W)\n self.top_password_entry = Entry(self.top, show=\"*\")\n self.top_password_entry.grid(row=1, column=1, padx=5, pady=5)\n \n self.top_label2 = Label(self.top, text=\"再次输入密码:\")\n self.top_label2.grid(row=2, column=0, padx=5, pady=5, sticky=W)\n self.top_password_verify = Entry(self.top, show=\"*\")\n self.top_password_verify.grid(row=2, column=1, padx=5, pady=5)\n \n self.top_button = Button(self.top, text=\" 确认 \", command=self._change_password)\n self.top_button.grid(row=3, column=0, columnspan=2, padx=5, pady=5)\n \n def _change_password(self):\n m = hashlib.md5()\n m.update(bytes(self.top_old_password_entry.get(), encoding=\"utf-8\"))\n old_password = m.hexdigest()\n if old_password!=self.cm.password:\n messagebox.showerror(title='错误', message=\"密码输入错误!\")\n elif self.top_password_entry.get()!=self.top_password_verify.get():\n messagebox.showerror(title='错误', message=\"两次输入密码不一样!\")\n else:\n new_password = self.top_password_entry.get()\n self.cm.set_password(new_password)\n messagebox.showinfo(title='信息', message=\"修改成功!\")\n self.top.destroy()\n \n def about(self):\n messagebox.showinfo(title='关于',\n message=\"\"\"欢迎使用这个很简单的密码管理软件!\n==============================\n由于使用python编写,打开速度可能较慢。\n如果发现任何bug,欢迎邮件沟通。\n邮箱:dzxx_ld@sjtu.edu.cn\n\\t\\tHermethus\n\\t\\t2020/4/11\"\"\")\n \n def get_help(self):\n messagebox.showinfo(title='帮助', message=\"\"\"·进行相关操作之前,应该先读入或新建密码本文件。\n·明文读入文件格式:\n 每次读入一行,每行中账号归属、用户名、密码用空格分隔。\n 如果上述内容中包含空格的,不应用文件读入,建议手动输入。\n·查找对应归属的账户信息\n 使用搜索寻找关键词,搜索结果会列在右边的列表中。\n *(星号)是唯一通配符,匹配所有内容。\n 双击可以使选中项的用户名和密码显示在左边。\n 密码不能通过ctrl+C复制,使用时单击复制可以复制到剪贴板中。\n\"\"\")\n \n def search_item(self):\n self.lb.delete(0, END)\n self.username.set(\"\")\n self.password.set(\"\")\n self.current_select = None\n \n if self.cm and self.search.get():\n result = self.cm.search(self.search.get())\n for item in result:\n self.lb.insert(END, item)\n self.search.set(\"\")\n \n def copy_username(self):\n if self.current_select:\n pyperclip.copy(self.cm.code_dict[self.current_select][0])\n messagebox.showinfo(title='提示', message='用户名复制成功!')\n else:\n messagebox.showerror(title='错误', message='用户名复制不成功,请重新双击选中一项!')\n \n def copy_password(self):\n if self.current_select:\n pyperclip.copy(self.cm.code_dict[self.current_select][1])\n messagebox.showinfo(title='提示', message='密码复制成功!')\n else:\n messagebox.showerror(title='错误', message='密码复制不成功,请重新双击选中一项!')\n \n def modify(self):\n if not self.current_select:\n messagebox.showerror(title='错误', message=\"修改不成功,请重新双击选中一项!\")\n return\n \n self.top = Toplevel(self)\n self.top.attributes(\"-topmost\", True)\n \n self.top_label1 = Label(self.top, text=\"请输入用户名:\")\n self.top_label1.grid(row=1, column=0, padx=5, pady=5, sticky=W)\n self.top_username_entry = Entry(self.top)\n self.top_username_entry.grid(row=1, column=1, padx=5, pady=5)\n \n self.top_label2 = Label(self.top, text=\"请输入密码:\")\n self.top_label2.grid(row=2, column=0, padx=5, pady=5, sticky=W)\n self.top_password_entry = Entry(self.top, show=\"*\")\n self.top_password_entry.grid(row=2, column=1, padx=5, pady=5)\n \n self.top_label3 = Label(self.top, text=\"请再次输入密码:\")\n self.top_label3.grid(row=3, column=0, padx=5, pady=5, sticky=W)\n self.top_password_verify = Entry(self.top, show=\"*\")\n self.top_password_verify.grid(row=3, column=1, padx=5, pady=5)\n \n self.top_button = Button(self.top, text=\" 确认 \", command=self._modify)\n self.top_button.grid(row=4, column=0, columnspan=2, padx=5, pady=5)\n \n def _modify(self):\n if self.top_password_entry.get()!=self.top_password_verify.get():\n messagebox.showerror(title='错误', message=\"两次输入密码不一样!\")\n return\n \n self.cm.modify(self.current_select, self.top_username_entry.get(), 0)\n self.cm.modify(self.current_select, self.top_password_entry.get(), 1)\n self.clean()\n self.top.destroy()\n \n \n def add(self):\n self.top = Toplevel(self)\n self.top.attributes(\"-topmost\", True)\n \n self.top_label0 = Label(self.top, text=\"请输入账号归属:\")\n self.top_label0.grid(row=0, column=0, padx=5, pady=5, sticky=W)\n self.top_title_entry = Entry(self.top)\n self.top_title_entry.grid(row=0, column=1, padx=5, pady=5)\n \n self.top_label1 = Label(self.top, text=\"请输入用户名:\")\n self.top_label1.grid(row=1, column=0, padx=5, pady=5, sticky=W)\n self.top_username_entry = Entry(self.top)\n self.top_username_entry.grid(row=1, column=1, padx=5, pady=5)\n \n self.top_label2 = Label(self.top, text=\"请输入密码:\")\n self.top_label2.grid(row=2, column=0, padx=5, pady=5, sticky=W)\n self.top_password_entry = Entry(self.top, show=\"*\")\n self.top_password_entry.grid(row=2, column=1, padx=5, pady=5)\n \n self.top_label3 = Label(self.top, text=\"请再次输入密码:\")\n self.top_label3.grid(row=3, column=0, padx=5, pady=5, sticky=W)\n self.top_password_verify = Entry(self.top, show=\"*\")\n self.top_password_verify.grid(row=3, column=1, padx=5, pady=5)\n \n self.top_button = Button(self.top, text=\" 确认 \", command=self._add)\n self.top_button.grid(row=4, column=0, columnspan=2, padx=5, pady=5)\n \n def _add(self):\n if self.top_password_entry.get()!=self.top_password_verify.get():\n messagebox.showerror(title='错误', message=\"两次输入密码不一样!\")\n self.top.destroy()\n return\n \n if not self.cm.add(self.top_title_entry.get(), \n self.top_username_entry.get(), self.top_password_entry.get()):\n messagebox.showerror(title='错误', \n message='\"{}\"已存在!'.format(self.top_title_entry.get()))\n self.top.destroy()\n \n def delete(self):\n if not self.current_select:\n messagebox.showerror(title='错误', message=\"删除不成功,请重新双击选中一项!\")\n return\n victim = self.lb.get(ACTIVE)\n if victim==\"\":\n messagebox.showerror(title='错误', message=\"删除不成功,请重新双击选中一项!\")\n return\n if messagebox.askyesno(title='提示', message='确定要删除“{}”的内容吗?'.format(victim)):\n self.lb.delete(ACTIVE)\n self.cm.delete(victim)\n self.clean()\n \n def close_file_disable(self):\n self.clean_all()\n self.cm = None\n \n self.filemenu.entryconfig(\"保存为密文文件\", state=DISABLED)\n self.filemenu.entryconfig(\"导出明文文件\", state=DISABLED)\n self.filemenu.entryconfig(\"关闭文件\", state=DISABLED)\n self.passwordmenu.entryconfig(\"修改密码\", state=DISABLED) \n self.search_button['state'] = 'disabled'\n self.search_entry['state'] = 'disabled'\n self.username_entry['state'] = 'disabled'\n self.username_button['state'] = 'disabled'\n self.password_entry['state'] = 'disabled'\n self.password_button['state'] = 'disabled'\n self.add_button['state'] = 'disabled'\n self.del_button['state'] = 'disabled'\n self.modify_button['state'] = 'disabled'\n self.clean_button['state'] = 'disabled'\n \n def open_file_enable(self):\n self.filemenu.entryconfig(\"保存为密文文件\", state=NORMAL)\n self.filemenu.entryconfig(\"导出明文文件\", state=NORMAL)\n self.filemenu.entryconfig(\"关闭文件\", state=NORMAL)\n self.passwordmenu.entryconfig(\"修改密码\", state=NORMAL) \n self.search_button['state'] = 'normal'\n self.search_entry['state'] = 'normal'\n self.username_entry['state'] = 'normal'\n self.username_button['state'] = 'normal'\n self.password_entry['state'] = 'normal'\n self.password_button['state'] = 'normal'\n self.add_button['state'] = 'normal'\n self.del_button['state'] = 'normal'\n self.modify_button['state'] = 'normal'\n self.clean_button['state'] = 'normal'\n \n def clean_all(self):\n self.clean()\n self.lb.delete(0, END)\n \n def clean(self):\n self.username.set(\"\")\n self.password.set(\"\")\n self.search.set(\"\")\n self.current_select = None\n\n \napp = cmUI()\n\nmainloop()\n\n\n\n\n"},"avg_line_length":{"kind":"number","value":41.2384259259,"string":"41.238426"},"max_line_length":{"kind":"number","value":96,"string":"96"},"alphanum_fraction":{"kind":"number","value":0.5956777996,"string":"0.595678"}}},{"rowIdx":46423,"cells":{"hexsha":{"kind":"string","value":"cebde25824e3255107389e440016168cc354df6d"},"size":{"kind":"number","value":1019,"string":"1,019"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"webapp/dash_tutorial_plotly/basic_callbacks/state.py"},"max_stars_repo_name":{"kind":"string","value":"zeroam/TIL"},"max_stars_repo_head_hexsha":{"kind":"string","value":"43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"webapp/dash_tutorial_plotly/basic_callbacks/state.py"},"max_issues_repo_name":{"kind":"string","value":"zeroam/TIL"},"max_issues_repo_head_hexsha":{"kind":"string","value":"43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"webapp/dash_tutorial_plotly/basic_callbacks/state.py"},"max_forks_repo_name":{"kind":"string","value":"zeroam/TIL"},"max_forks_repo_head_hexsha":{"kind":"string","value":"43e3573be44c7f7aa4600ff8a34e99a65cbdc5d1"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import dash\r\nimport dash_core_components as dcc\r\nimport dash_html_components as html\r\nfrom dash.dependencies import Input, Output, State\r\n\r\nexternal_stylesheets = [\"https://codepen.io/chriddyp/pen/bWLwgP.css\"]\r\n\r\napp = dash.Dash(__name__, external_stylesheets=external_stylesheets)\r\n\r\napp.layout = html.Div(\r\n [\r\n dcc.Input(id=\"input-1-state\", type=\"text\", value=\"Montreal\"),\r\n dcc.Input(id=\"input-2-state\", type=\"text\", value=\"Canada\"),\r\n html.Button(id=\"submit-button-state\", n_clicks=0, children=\"Submit\"),\r\n html.Div(id=\"number-output\"),\r\n ]\r\n)\r\n\r\n\r\n@app.callback(\r\n Output(\"number-output\", \"children\"),\r\n [Input(\"submit-button-state\", \"n_clicks\")],\r\n [State(\"input-1-state\", \"value\"),\r\n State(\"input-2-state\", \"value\")])\r\ndef update_output(n_clicks, input1, input2):\r\n return f\"\"\"\r\n The Button has been passed {n_clicks} times,\r\n Input 1 is \"{input1}\",\r\n and Input 2 is \"{input2}\"\r\n \"\"\"\r\n\r\n\r\nif __name__ == \"__main__\":\r\n app.run_server(debug=True)\r\n"},"avg_line_length":{"kind":"number","value":29.1142857143,"string":"29.114286"},"max_line_length":{"kind":"number","value":78,"string":"78"},"alphanum_fraction":{"kind":"number","value":0.6447497547,"string":"0.64475"}}},{"rowIdx":46424,"cells":{"hexsha":{"kind":"string","value":"ced80906b05f43b45104e9eac4f3c8d5bf66b613"},"size":{"kind":"number","value":2001,"string":"2,001"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/doc_fragments/alicloud.py"},"max_stars_repo_name":{"kind":"string","value":"tr3ck3r/linklight"},"max_stars_repo_head_hexsha":{"kind":"string","value":"5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/doc_fragments/alicloud.py"},"max_issues_repo_name":{"kind":"string","value":"tr3ck3r/linklight"},"max_issues_repo_head_hexsha":{"kind":"string","value":"5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/doc_fragments/alicloud.py"},"max_forks_repo_name":{"kind":"string","value":"tr3ck3r/linklight"},"max_forks_repo_head_hexsha":{"kind":"string","value":"5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n# Copyright: (c) 2017 Alibaba Group Holding Limited. He Guimin \n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\n\nclass ModuleDocFragment(object):\n\n # Alicloud only documentation fragment\n DOCUMENTATION = r'''\noptions:\n alicloud_access_key:\n description:\n - Aliyun Cloud access key.\n - If not set then the value of environment variable C(ALICLOUD_ACCESS_KEY),\n C(ALICLOUD_ACCESS_KEY_ID) will be used instead.\n type: str\n aliases: [ access_key_id, access_key ]\n alicloud_secret_key:\n description:\n - Aliyun Cloud secret key.\n - If not set then the value of environment variable C(ALICLOUD_SECRET_KEY),\n C(ALICLOUD_SECRET_ACCESS_KEY) will be used instead.\n type: str\n aliases: [ secret_access_key, secret_key ]\n alicloud_region:\n description:\n - The Aliyun Cloud region to use.\n - If not specified then the value of environment variable\n C(ALICLOUD_REGION), C(ALICLOUD_REGION_ID) will be used instead.\n type: str\n aliases: [ region, region_id ]\n alicloud_security_token:\n description:\n - The Aliyun Cloud security token.\n - If not specified then the value of environment variable\n C(ALICLOUD_SECURITY_TOKEN) will be used instead.\n type: str\n aliases: [ security_token ]\nauthor:\n- He Guimin (@xiaozhu36)\nrequirements:\n- python >= 2.6\nextends_documentation_fragment:\n- community.general.alicloud\n\nnotes:\n - If parameters are not set within the module, the following\n environment variables can be used in decreasing order of precedence\n C(ALICLOUD_ACCESS_KEY) or C(ALICLOUD_ACCESS_KEY_ID),\n C(ALICLOUD_SECRET_KEY) or C(ALICLOUD_SECRET_ACCESS_KEY),\n C(ALICLOUD_REGION) or C(ALICLOUD_REGION_ID),\n C(ALICLOUD_SECURITY_TOKEN)\n - C(ALICLOUD_REGION) or C(ALICLOUD_REGION_ID) can be typically be used to specify the\n ALICLOUD region, when required, but this can also be configured in the footmark config file\n'''\n"},"avg_line_length":{"kind":"number","value":35.1052631579,"string":"35.105263"},"max_line_length":{"kind":"number","value":95,"string":"95"},"alphanum_fraction":{"kind":"number","value":0.7351324338,"string":"0.735132"}}},{"rowIdx":46425,"cells":{"hexsha":{"kind":"string","value":"0c7dfaabd2ba8694e7cfbbef5120d9931d91960f"},"size":{"kind":"number","value":219,"string":"219"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"backend/products/validators.py"},"max_stars_repo_name":{"kind":"string","value":"saulhappy/drf"},"max_stars_repo_head_hexsha":{"kind":"string","value":"5e62da54cdf0f0fead742c891d34e7eacd488a1b"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"backend/products/validators.py"},"max_issues_repo_name":{"kind":"string","value":"saulhappy/drf"},"max_issues_repo_head_hexsha":{"kind":"string","value":"5e62da54cdf0f0fead742c891d34e7eacd488a1b"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"backend/products/validators.py"},"max_forks_repo_name":{"kind":"string","value":"saulhappy/drf"},"max_forks_repo_head_hexsha":{"kind":"string","value":"5e62da54cdf0f0fead742c891d34e7eacd488a1b"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from rest_framework import serializers\nfrom rest_framework.validators import UniqueValidator\nfrom products.models import Product\n\n\nunique_product_title = UniqueValidator(queryset=Product.objects.all(), lookup=\"iexact\")\n"},"avg_line_length":{"kind":"number","value":31.2857142857,"string":"31.285714"},"max_line_length":{"kind":"number","value":87,"string":"87"},"alphanum_fraction":{"kind":"number","value":0.8538812785,"string":"0.853881"}}},{"rowIdx":46426,"cells":{"hexsha":{"kind":"string","value":"0cd35d179f90c155940228304c32f1c763f9b3a0"},"size":{"kind":"number","value":1402,"string":"1,402"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"research/cv/ntsnet/src/lr_generator.py"},"max_stars_repo_name":{"kind":"string","value":"leelige/mindspore"},"max_stars_repo_head_hexsha":{"kind":"string","value":"5199e05ba3888963473f2b07da3f7bca5b9ef6dc"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"number","value":77,"string":"77"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-10-15T08:32:37.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-30T13:09:11.000Z"},"max_issues_repo_path":{"kind":"string","value":"research/cv/ntsnet/src/lr_generator.py"},"max_issues_repo_name":{"kind":"string","value":"leelige/mindspore"},"max_issues_repo_head_hexsha":{"kind":"string","value":"5199e05ba3888963473f2b07da3f7bca5b9ef6dc"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":3,"string":"3"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-10-30T14:44:57.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-02-14T06:57:57.000Z"},"max_forks_repo_path":{"kind":"string","value":"research/cv/ntsnet/src/lr_generator.py"},"max_forks_repo_name":{"kind":"string","value":"leelige/mindspore"},"max_forks_repo_head_hexsha":{"kind":"string","value":"5199e05ba3888963473f2b07da3f7bca5b9ef6dc"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":24,"string":"24"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-10-15T08:32:45.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-24T18:45:20.000Z"},"content":{"kind":"string","value":"# Copyright 2021 Huawei Technologies Co., Ltd\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ============================================================================\n\"\"\"lr generator for ntsnet\"\"\"\nimport numpy as np\n\ndef get_lr(global_step, lr_init, lr_max, warmup_epochs, total_epochs, steps_per_epoch):\n \"\"\"\n generate learning rate\n \"\"\"\n lr_each_step = []\n total_steps = steps_per_epoch * total_epochs\n warmup_steps = steps_per_epoch * warmup_epochs\n for i in range(total_steps):\n if i < warmup_steps:\n lr = lr_init + (lr_max - lr_init) * i / warmup_steps\n elif i < 100 * steps_per_epoch:\n lr = lr_max\n else:\n lr = lr_max * 0.1\n lr_each_step.append(lr)\n current_step = global_step\n lr_each_step = np.array(lr_each_step).astype(np.float32)\n learning_rate = lr_each_step[current_step:]\n return learning_rate\n"},"avg_line_length":{"kind":"number","value":37.8918918919,"string":"37.891892"},"max_line_length":{"kind":"number","value":87,"string":"87"},"alphanum_fraction":{"kind":"number","value":0.664764622,"string":"0.664765"}}},{"rowIdx":46427,"cells":{"hexsha":{"kind":"string","value":"0b6c4e1d8aa93b0743fc829e60eb6050a5cedded"},"size":{"kind":"number","value":2135,"string":"2,135"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"packages/watchmen-data-kernel/test/watchmen_data_kernel_test/test_topic_table.py"},"max_stars_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"packages/watchmen-data-kernel/test/watchmen_data_kernel_test/test_topic_table.py"},"max_issues_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"packages/watchmen-data-kernel/test/watchmen_data_kernel_test/test_topic_table.py"},"max_forks_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from unittest import TestCase\n\nfrom watchmen_auth import PrincipalService\nfrom watchmen_data_kernel.cache import CacheService\nfrom watchmen_data_kernel.service.topic_structure_helper import create_topic_structure, drop_topic_structure, \\\n\tupdate_topic_structure\nfrom watchmen_model.admin import Factor, FactorType, Topic, TopicKind, TopicType, User, UserRole\nfrom watchmen_model.system import DataSource, DataSourceType\n\n\ndef create_fake_principal_service() -> PrincipalService:\n\treturn PrincipalService(User(userId='1', tenantId='1', name='imma-admin', role=UserRole.ADMIN))\n\n\ndef prepare_topic():\n\tdata_source = DataSource(\n\t\tdataSourceId='1', dataSourceCode='ds1', dataSourceType=DataSourceType.MYSQL,\n\t\thost='localhost', port='3306', username='watchmen', password='watchmen', name='watchmen',\n\t\ttenantId='1')\n\tCacheService.data_source().put(data_source)\n\n\treturn Topic(\n\t\ttopicId='1', name='topic_x', type=TopicType.DISTINCT, kind=TopicKind.BUSINESS,\n\t\tfactors=[\n\t\t\tFactor(factorId='1', name='topic1_id', type=FactorType.SEQUENCE, indexGroup='u-1'),\n\t\t\tFactor(factorId='2', name='topic1_text', type=FactorType.TEXT, precision='64', indexGroup='u-1')\n\t\t],\n\t\tdataSourceId=data_source.dataSourceId,\n\t\ttenantId='1')\n\n\ndef prepare_new_topic():\n\treturn Topic(\n\t\ttopicId='1', name='topic_x', type=TopicType.DISTINCT, kind=TopicKind.BUSINESS,\n\t\tfactors=[\n\t\t\tFactor(factorId='1', name='topic1_id', type=FactorType.SEQUENCE),\n\t\t\tFactor(factorId='2', name='topic1_text', type=FactorType.TEXT, precision='64', indexGroup='u-1'),\n\t\t\tFactor(factorId='2', name='topic1_text2', type=FactorType.TEXT, precision='32', indexGroup='u-1')\n\t\t],\n\t\tdataSourceId='1',\n\t\ttenantId='1')\n\n\nclass TopicTable(TestCase):\n\t# noinspection PyMethodMayBeStatic\n\tdef test_create_topic(self):\n\t\tcreate_topic_structure(prepare_topic(), create_fake_principal_service())\n\n\t# noinspection PyMethodMayBeStatic\n\tdef test_update_topic(self):\n\t\tupdate_topic_structure(prepare_new_topic(), prepare_topic(), create_fake_principal_service())\n\n\t# noinspection PyMethodMayBeStatic\n\tdef test_drop_topic(self):\n\t\tdrop_topic_structure(prepare_topic(), create_fake_principal_service())\n"},"avg_line_length":{"kind":"number","value":38.125,"string":"38.125"},"max_line_length":{"kind":"number","value":111,"string":"111"},"alphanum_fraction":{"kind":"number","value":0.7789227166,"string":"0.778923"}}},{"rowIdx":46428,"cells":{"hexsha":{"kind":"string","value":"e7f170759c49af4b665b91c469c887f34601d7f7"},"size":{"kind":"number","value":2451,"string":"2,451"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Webpage/utils/management/commands/init.py"},"max_stars_repo_name":{"kind":"string","value":"ASV-Aachen/Website"},"max_stars_repo_head_hexsha":{"kind":"string","value":"bbfc02d71dde67fdf89a4b819b795a73435da7cf"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Webpage/utils/management/commands/init.py"},"max_issues_repo_name":{"kind":"string","value":"ASV-Aachen/Website"},"max_issues_repo_head_hexsha":{"kind":"string","value":"bbfc02d71dde67fdf89a4b819b795a73435da7cf"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":46,"string":"46"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2022-01-08T12:03:24.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-30T08:51:05.000Z"},"max_forks_repo_path":{"kind":"string","value":"Webpage/utils/management/commands/init.py"},"max_forks_repo_name":{"kind":"string","value":"ASV-Aachen/Website"},"max_forks_repo_head_hexsha":{"kind":"string","value":"bbfc02d71dde67fdf89a4b819b795a73435da7cf"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from django.core.management.base import BaseCommand\n\n# from arbeitsstunden.models import *\nfrom web.models import HeadPage, infoPage, frontHeader, standartPages\n\n\nclass Command(BaseCommand):\n help = 'Creates the init Data'\n\n def add_arguments(self, parser):\n pass\n\n def handle(self, *args, **options):\n self.headerAndInfoPages()\n # self.arbeitsstundenSeason()\n self.StandartSeiten()\n\n def headerAndInfoPages(self):\n self.stdout.write(\"Starting Headers init\", ending='\\n')\n text = \"Diese Seite ist in Arbeit\"\n\n right = HeadPage(titel=\"Segeln lernen\", text=text, description=text, name=\"SL\" )\n right.save()\n temp2 = infoPage(titel=\"Segeln lernen\", text=text, headPage=right, name=\"SL\")\n temp2.save()\n\n left = HeadPage(titel=\"Der Verein\", text=text, description=text, name=\"DV\")\n left.save()\n temp2 = infoPage(titel=\"Mitglied werden\", text=text, headPage=left, name=\"MitgliedWerden\")\n temp2.save()\n temp2 = infoPage(titel=\"Personalia\", text=text, headPage=left, name=\"Personalia\")\n temp2.save()\n temp2 = infoPage(titel=\"Ausbildung\", text=text, headPage=left, name=\"Ausbildung\")\n temp2.save()\n\n temp = HeadPage(titel=\"Seeschiff\", text=text, description=text, name=\"SEE\")\n temp.save()\n temp2 = infoPage(titel=\"Aquis Granus IV\", text=text, headPage=temp, name=\"AquisGranusIV\")\n temp2.save()\n temp2 = infoPage(titel=\"Position\", text=text, headPage=temp, name=\"Position\")\n temp2.save()\n\n temp = HeadPage(titel=\"Jollenpark\", text=text, description=text, name=\"JP\")\n temp.save()\n\n temp = HeadPage(titel=\"Aktivitäten\", text=text, description=text, name=\"AK\")\n temp.save()\n temp2 = infoPage(titel=\"Regatta\", text=text, headPage=temp, name=\"Regatta\")\n temp2.save()\n\n self.stdout.write(\"Finished Headers\", ending='\\n')\n # FrontPage init\n temp = frontHeader(left=left, right=right)\n temp.save()\n pass\n\n\n # def arbeitsstundenSeason(self):\n # self.stdout.write(\"Season Arbeitsstunden init\", ending='\\n')\n # season = getCurrentSeason()\n\n def StandartSeiten(self):\n self.stdout.write(\"Standart Seiten init\", ending=\"\\n\")\n standartPages(titel=\"Impressum\", text=\"Diese Seite ist in Arbeit\").save()\n standartPages(titel=\"Datenschutz\", text=\"Diese Seite ist in Arbeit\").save()"},"avg_line_length":{"kind":"number","value":37.7076923077,"string":"37.707692"},"max_line_length":{"kind":"number","value":98,"string":"98"},"alphanum_fraction":{"kind":"number","value":0.6417788658,"string":"0.641779"}}},{"rowIdx":46429,"cells":{"hexsha":{"kind":"string","value":"d7e8c8632675de49b16e166f4623ba1684585fd6"},"size":{"kind":"number","value":18100,"string":"18,100"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"deutschland/smard/api/default_api.py"},"max_stars_repo_name":{"kind":"string","value":"kiranmusze/deutschland"},"max_stars_repo_head_hexsha":{"kind":"string","value":"86d8ead3f38ad88ad66bb338b9f5a8db06992344"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"deutschland/smard/api/default_api.py"},"max_issues_repo_name":{"kind":"string","value":"kiranmusze/deutschland"},"max_issues_repo_head_hexsha":{"kind":"string","value":"86d8ead3f38ad88ad66bb338b9f5a8db06992344"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"deutschland/smard/api/default_api.py"},"max_forks_repo_name":{"kind":"string","value":"kiranmusze/deutschland"},"max_forks_repo_head_hexsha":{"kind":"string","value":"86d8ead3f38ad88ad66bb338b9f5a8db06992344"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\n Bundesnetzagentur Strommarktdaten\n\n Bundesnetzagentur Strommarktdaten # noqa: E501\n\n The version of the OpenAPI document: 0.0.1\n Generated by: https://openapi-generator.tech\n\"\"\"\n\n\nimport re # noqa: F401\nimport sys # noqa: F401\n\nfrom deutschland.smard.api_client import ApiClient, Endpoint as _Endpoint\nfrom deutschland.smard.model_utils import ( # noqa: F401\n check_allowed_values,\n check_validations,\n date,\n datetime,\n file_type,\n none_type,\n validate_and_convert_types,\n)\nfrom deutschland.smard.model.indices import Indices\nfrom deutschland.smard.model.time_series import TimeSeries\n\n\nclass DefaultApi(object):\n \"\"\"NOTE: This class is auto generated by OpenAPI Generator\n Ref: https://openapi-generator.tech\n\n Do not edit the class manually.\n \"\"\"\n\n def __init__(self, api_client=None):\n if api_client is None:\n api_client = ApiClient()\n self.api_client = api_client\n self.filter_region_filter_copy_region_copy_resolution_timestamp_json_get_endpoint = _Endpoint(\n settings={\n \"response_type\": (TimeSeries,),\n \"auth\": [],\n \"endpoint_path\": \"/{filter}/{region}/{filterCopy}_{regionCopy}_{resolution}_{timestamp}.json\",\n \"operation_id\": \"filter_region_filter_copy_region_copy_resolution_timestamp_json_get\",\n \"http_method\": \"GET\",\n \"servers\": None,\n },\n params_map={\n \"all\": [\n \"filter\",\n \"filter_copy\",\n \"region\",\n \"region_copy\",\n \"resolution\",\n \"timestamp\",\n ],\n \"required\": [\n \"filter\",\n \"filter_copy\",\n \"region\",\n \"region_copy\",\n \"resolution\",\n \"timestamp\",\n ],\n \"nullable\": [],\n \"enum\": [\n \"filter\",\n \"filter_copy\",\n \"region\",\n \"region_copy\",\n \"resolution\",\n ],\n \"validation\": [],\n },\n root_map={\n \"validations\": {},\n \"allowed_values\": {\n (\"filter\",): {\n \"1223\": 1223,\n \"1224\": 1224,\n \"1225\": 1225,\n \"1226\": 1226,\n \"1227\": 1227,\n \"1228\": 1228,\n \"4066\": 4066,\n \"4067\": 4067,\n \"4068\": 4068,\n \"4069\": 4069,\n \"4070\": 4070,\n \"4071\": 4071,\n \"410\": 410,\n \"4359\": 4359,\n \"4387\": 4387,\n },\n (\"filter_copy\",): {\n \"1223\": 1223,\n \"1224\": 1224,\n \"1225\": 1225,\n \"1226\": 1226,\n \"1227\": 1227,\n \"1228\": 1228,\n \"4066\": 4066,\n \"4067\": 4067,\n \"4068\": 4068,\n \"4069\": 4069,\n \"4070\": 4070,\n \"4071\": 4071,\n \"410\": 410,\n \"4359\": 4359,\n \"4387\": 4387,\n },\n (\"region\",): {\n \"DE\": \"DE\",\n \"AT\": \"AT\",\n \"LU\": \"LU\",\n \"DE-LU\": \"DE-LU\",\n \"DE-AT-LU\": \"DE-AT-LU\",\n \"50HERTZ\": \"50Hertz\",\n \"AMPRION\": \"Amprion\",\n \"TENNET\": \"TenneT\",\n \"TRANSNETBW\": \"TransnetBW\",\n \"APG\": \"APG\",\n \"CREOS\": \"Creos\",\n },\n (\"region_copy\",): {\n \"DE\": \"DE\",\n \"AT\": \"AT\",\n \"LU\": \"LU\",\n \"DE-LU\": \"DE-LU\",\n \"DE-AT-LU\": \"DE-AT-LU\",\n \"50HERTZ\": \"50Hertz\",\n \"AMPRION\": \"Amprion\",\n \"TENNET\": \"TenneT\",\n \"TRANSNETBW\": \"TransnetBW\",\n \"APG\": \"APG\",\n \"CREOS\": \"Creos\",\n },\n (\"resolution\",): {\n \"HOUR\": \"hour\",\n \"QUARTERHOUR\": \"quarterhour\",\n \"DAY\": \"day\",\n \"WEEK\": \"week\",\n \"MONTH\": \"month\",\n \"YEAR\": \"year\",\n },\n },\n \"openapi_types\": {\n \"filter\": (int,),\n \"filter_copy\": (int,),\n \"region\": (str,),\n \"region_copy\": (str,),\n \"resolution\": (str,),\n \"timestamp\": (int,),\n },\n \"attribute_map\": {\n \"filter\": \"filter\",\n \"filter_copy\": \"filterCopy\",\n \"region\": \"region\",\n \"region_copy\": \"regionCopy\",\n \"resolution\": \"resolution\",\n \"timestamp\": \"timestamp\",\n },\n \"location_map\": {\n \"filter\": \"path\",\n \"filter_copy\": \"path\",\n \"region\": \"path\",\n \"region_copy\": \"path\",\n \"resolution\": \"path\",\n \"timestamp\": \"path\",\n },\n \"collection_format_map\": {},\n },\n headers_map={\n \"accept\": [\"application/json\"],\n \"content_type\": [],\n },\n api_client=api_client,\n )\n self.filter_region_index_resolution_json_get_endpoint = _Endpoint(\n settings={\n \"response_type\": (Indices,),\n \"auth\": [],\n \"endpoint_path\": \"/{filter}/{region}/index_{resolution}.json\",\n \"operation_id\": \"filter_region_index_resolution_json_get\",\n \"http_method\": \"GET\",\n \"servers\": None,\n },\n params_map={\n \"all\": [\n \"filter\",\n \"region\",\n \"resolution\",\n ],\n \"required\": [\n \"filter\",\n \"region\",\n \"resolution\",\n ],\n \"nullable\": [],\n \"enum\": [\n \"filter\",\n \"region\",\n \"resolution\",\n ],\n \"validation\": [],\n },\n root_map={\n \"validations\": {},\n \"allowed_values\": {\n (\"filter\",): {\n \"1223\": 1223,\n \"1224\": 1224,\n \"1225\": 1225,\n \"1226\": 1226,\n \"1227\": 1227,\n \"1228\": 1228,\n \"4066\": 4066,\n \"4067\": 4067,\n \"4068\": 4068,\n \"4069\": 4069,\n \"4070\": 4070,\n \"4071\": 4071,\n \"410\": 410,\n \"4359\": 4359,\n \"4387\": 4387,\n },\n (\"region\",): {\n \"DE\": \"DE\",\n \"AT\": \"AT\",\n \"LU\": \"LU\",\n \"DE-LU\": \"DE-LU\",\n \"DE-AT-LU\": \"DE-AT-LU\",\n \"50HERTZ\": \"50Hertz\",\n \"AMPRION\": \"Amprion\",\n \"TENNET\": \"TenneT\",\n \"TRANSNETBW\": \"TransnetBW\",\n \"APG\": \"APG\",\n \"CREOS\": \"Creos\",\n },\n (\"resolution\",): {\n \"HOUR\": \"hour\",\n \"QUARTERHOUR\": \"quarterhour\",\n \"DAY\": \"day\",\n \"WEEK\": \"week\",\n \"MONTH\": \"month\",\n \"YEAR\": \"year\",\n },\n },\n \"openapi_types\": {\n \"filter\": (int,),\n \"region\": (str,),\n \"resolution\": (str,),\n },\n \"attribute_map\": {\n \"filter\": \"filter\",\n \"region\": \"region\",\n \"resolution\": \"resolution\",\n },\n \"location_map\": {\n \"filter\": \"path\",\n \"region\": \"path\",\n \"resolution\": \"path\",\n },\n \"collection_format_map\": {},\n },\n headers_map={\n \"accept\": [\"application/json\"],\n \"content_type\": [],\n },\n api_client=api_client,\n )\n\n def filter_region_filter_copy_region_copy_resolution_timestamp_json_get(\n self,\n filter,\n filter_copy,\n region_copy,\n timestamp,\n region=\"DE\",\n resolution=\"hour\",\n **kwargs\n ):\n \"\"\"Zeitreihendaten # noqa: E501\n\n Zeitreihendaten nach Filter, Region und Auflösung ab Timestamp # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n\n >>> thread = api.filter_region_filter_copy_region_copy_resolution_timestamp_json_get(filter, filter_copy, region_copy, timestamp, region=\"DE\", resolution=\"hour\", async_req=True)\n >>> result = thread.get()\n\n Args:\n filter (int): Mögliche Filter: * `1223` - Stromerzeugung: Braunkohle * `1224` - Stromerzeugung: Kernenergie * `1225` - Stromerzeugung: Wind Offshore * `1226` - Stromerzeugung: Wasserkraft * `1227` - Stromerzeugung: Sonstige Konventionelle * `1228` - Stromerzeugung: Sonstige Erneuerbare * `4066` - Stromerzeugung: Biomasse * `4067` - Stromerzeugung: Wind Onshore * `4068` - Stromerzeugung: Photovoltaik * `4069` - Stromerzeugung: Steinkohle * `4070` - Stromerzeugung: Pumpspeicher * `4071` - Stromerzeugung: Erdgas * `410` - Stromverbrauch: Gesamt (Netzlast) * `4359` - Stromverbrauch: Residuallast * `4387` - Stromverbrauch: Pumpspeicher\n filter_copy (int): Muss dem Wert von \\\"filter\\\" entsprechen. (Kaputtes API-Design)\n region_copy (str): Muss dem Wert von \\\"region\\\" entsprechen. (Kaputtes API-Design)\n timestamp (int):\n region (str): Land / Regelzone / Marktgebiet: * `DE` - Land: Deutschland * `AT` - Land: Österreich * `LU` - Land: Luxemburg * `DE-LU` - Marktgebiet: DE/LU (ab 01.10.2018) * `DE-AT-LU` - Marktgebiet: DE/AT/LU (bis 30.09.2018) * `50Hertz` - Regelzone (DE): 50Hertz * `Amprion`- Regelzone (DE): Amprion * `TenneT` - Regelzone (DE): TenneT * `TransnetBW` - Regelzone (DE): TransnetBW * `APG` - Regelzone (AT): APG * `Creos` - Regelzone (LU): Creos . defaults to \"DE\", must be one of [\"DE\"]\n resolution (str): Auflösung der Daten: * `hour` - Stündlich * `quater_hour` - Viertelstündlich * `day` - Täglich * `week` - Wöchentlich * `month` - Monatlich * `year` - Jährlich . defaults to \"hour\", must be one of [\"hour\"]\n\n Keyword Args:\n _return_http_data_only (bool): response data without head status\n code and headers. Default is True.\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n async_req (bool): execute request asynchronously\n\n Returns:\n TimeSeries\n If the method is called asynchronously, returns the request\n thread.\n \"\"\"\n kwargs[\"async_req\"] = kwargs.get(\"async_req\", False)\n kwargs[\"_return_http_data_only\"] = kwargs.get(\"_return_http_data_only\", True)\n kwargs[\"_preload_content\"] = kwargs.get(\"_preload_content\", True)\n kwargs[\"_request_timeout\"] = kwargs.get(\"_request_timeout\", None)\n kwargs[\"_check_input_type\"] = kwargs.get(\"_check_input_type\", True)\n kwargs[\"_check_return_type\"] = kwargs.get(\"_check_return_type\", True)\n kwargs[\"_host_index\"] = kwargs.get(\"_host_index\")\n kwargs[\"filter\"] = filter\n kwargs[\"filter_copy\"] = filter_copy\n kwargs[\"region\"] = region\n kwargs[\"region_copy\"] = region_copy\n kwargs[\"resolution\"] = resolution\n kwargs[\"timestamp\"] = timestamp\n return self.filter_region_filter_copy_region_copy_resolution_timestamp_json_get_endpoint.call_with_http_info(\n **kwargs\n )\n\n def filter_region_index_resolution_json_get(\n self, filter, region=\"DE\", resolution=\"hour\", **kwargs\n ):\n \"\"\"Indizes # noqa: E501\n\n Verfügbare Timestamps für Filter, Region und Auflösung # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n\n >>> thread = api.filter_region_index_resolution_json_get(filter, region=\"DE\", resolution=\"hour\", async_req=True)\n >>> result = thread.get()\n\n Args:\n filter (int): Mögliche Filter: * `1223` - Stromerzeugung: Braunkohle * `1224` - Stromerzeugung: Kernenergie * `1225` - Stromerzeugung: Wind Offshore * `1226` - Stromerzeugung: Wasserkraft * `1227` - Stromerzeugung: Sonstige Konventionelle * `1228` - Stromerzeugung: Sonstige Erneuerbare * `4066` - Stromerzeugung: Biomasse * `4067` - Stromerzeugung: Wind Onshore * `4068` - Stromerzeugung: Photovoltaik * `4069` - Stromerzeugung: Steinkohle * `4070` - Stromerzeugung: Pumpspeicher * `4071` - Stromerzeugung: Erdgas * `410` - Stromverbrauch: Gesamt (Netzlast) * `4359` - Stromverbrauch: Residuallast * `4387` - Stromverbrauch: Pumpspeicher\n region (str): Land / Regelzone / Marktgebiet: * `DE` - Land: Deutschland * `AT` - Land: Österreich * `LU` - Land: Luxemburg * `DE-LU` - Marktgebiet: DE/LU (ab 01.10.2018) * `DE-AT-LU` - Marktgebiet: DE/AT/LU (bis 30.09.2018) * `50Hertz` - Regelzone (DE): 50Hertz * `Amprion`- Regelzone (DE): Amprion * `TenneT` - Regelzone (DE): TenneT * `TransnetBW` - Regelzone (DE): TransnetBW * `APG` - Regelzone (AT): APG * `Creos` - Regelzone (LU): Creos . defaults to \"DE\", must be one of [\"DE\"]\n resolution (str): Auflösung der Daten: * `hour` - Stündlich * `quater_hour` - Viertelstündlich * `day` - Täglich * `week` - Wöchentlich * `month` - Monatlich * `year` - Jährlich . defaults to \"hour\", must be one of [\"hour\"]\n\n Keyword Args:\n _return_http_data_only (bool): response data without head status\n code and headers. Default is True.\n _preload_content (bool): if False, the urllib3.HTTPResponse object\n will be returned without reading/decoding response data.\n Default is True.\n _request_timeout (int/float/tuple): timeout setting for this request. If\n one number provided, it will be total request timeout. It can also\n be a pair (tuple) of (connection, read) timeouts.\n Default is None.\n _check_input_type (bool): specifies if type checking\n should be done one the data sent to the server.\n Default is True.\n _check_return_type (bool): specifies if type checking\n should be done one the data received from the server.\n Default is True.\n _host_index (int/None): specifies the index of the server\n that we want to use.\n Default is read from the configuration.\n async_req (bool): execute request asynchronously\n\n Returns:\n Indices\n If the method is called asynchronously, returns the request\n thread.\n \"\"\"\n kwargs[\"async_req\"] = kwargs.get(\"async_req\", False)\n kwargs[\"_return_http_data_only\"] = kwargs.get(\"_return_http_data_only\", True)\n kwargs[\"_preload_content\"] = kwargs.get(\"_preload_content\", True)\n kwargs[\"_request_timeout\"] = kwargs.get(\"_request_timeout\", None)\n kwargs[\"_check_input_type\"] = kwargs.get(\"_check_input_type\", True)\n kwargs[\"_check_return_type\"] = kwargs.get(\"_check_return_type\", True)\n kwargs[\"_host_index\"] = kwargs.get(\"_host_index\")\n kwargs[\"filter\"] = filter\n kwargs[\"region\"] = region\n kwargs[\"resolution\"] = resolution\n return (\n self.filter_region_index_resolution_json_get_endpoint.call_with_http_info(\n **kwargs\n )\n )\n"},"avg_line_length":{"kind":"number","value":45.1371571072,"string":"45.137157"},"max_line_length":{"kind":"number","value":680,"string":"680"},"alphanum_fraction":{"kind":"number","value":0.4714364641,"string":"0.471436"}}},{"rowIdx":46430,"cells":{"hexsha":{"kind":"string","value":"0be6f0d2053f21b23e3aa646fe3967b753218da4"},"size":{"kind":"number","value":289,"string":"289"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"packages/watchmen-model/src/watchmen_model/gui/last_snapshot.py"},"max_stars_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"packages/watchmen-model/src/watchmen_model/gui/last_snapshot.py"},"max_issues_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"packages/watchmen-model/src/watchmen_model/gui/last_snapshot.py"},"max_forks_repo_name":{"kind":"string","value":"Indexical-Metrics-Measure-Advisory/watchmen"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c54ec54d9f91034a38e51fd339ba66453d2c7a6d"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from pydantic import BaseModel\n\nfrom watchmen_model.common import DashboardId, LastVisit, UserBasedTuple\n\n\nclass LastSnapshot(UserBasedTuple, LastVisit, BaseModel):\n\tlanguage: str = None\n\tlastDashboardId: DashboardId = None\n\tadminDashboardId: DashboardId = None\n\tfavoritePin: bool = False\n"},"avg_line_length":{"kind":"number","value":26.2727272727,"string":"26.272727"},"max_line_length":{"kind":"number","value":72,"string":"72"},"alphanum_fraction":{"kind":"number","value":0.8166089965,"string":"0.816609"}}},{"rowIdx":46431,"cells":{"hexsha":{"kind":"string","value":"aca4b0f602222ac06d77437f23f2768496a50bce"},"size":{"kind":"number","value":1467,"string":"1,467"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"api/clean/get_set_speci.py"},"max_stars_repo_name":{"kind":"string","value":"Latent-Lxx/dazhou-dw"},"max_stars_repo_head_hexsha":{"kind":"string","value":"902b4b625cda4c9e4eb205017b8955b81f37a0b5"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"api/clean/get_set_speci.py"},"max_issues_repo_name":{"kind":"string","value":"Latent-Lxx/dazhou-dw"},"max_issues_repo_head_hexsha":{"kind":"string","value":"902b4b625cda4c9e4eb205017b8955b81f37a0b5"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"api/clean/get_set_speci.py"},"max_forks_repo_name":{"kind":"string","value":"Latent-Lxx/dazhou-dw"},"max_forks_repo_head_hexsha":{"kind":"string","value":"902b4b625cda4c9e4eb205017b8955b81f37a0b5"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2022-02-11T04:44:37.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-02-11T04:44:37.000Z"},"content":{"kind":"string","value":"# !/usr/bin/python3\n# -*- coding: utf-8 -*-\n# @Time : 2021/7/20 下午5:11\n# @Author : Latent\n# @Email : latentsky@gmail.com\n# @File : set_specification.py\n# @Software: PyCharm\n# @class : 获取所有的规格,然后对规格合并后形成集合\n\nfrom db.mongo_db import Mongo\n\nclass Get_Set_Sepeci(object):\n\n def __init__(self):\n tables = Mongo().mongo_get_tables()\n self.table_details = tables['table_details']\n\n # 抽取所有的 规格 放到集合中 -----> 只针对props\n def sepeci_get_props(self, startDate, endDate , keyword):\n if keyword:\n match = {\n \"$match\": {\"$and\": [{\"keyword\":keyword},\n {\"request_date\": {\"$gte\": startDate,\n \"$lte\": endDate}},\n ]\n }\n }\n else:\n match = {\n \"$match\": {\"$and\": [\n {\"request_date\": {\"$gte\": startDate,\n \"$lte\": endDate}},\n ]\n }\n }\n\n mongo_data = self.table_details.aggregate([match])\n set_sepeci = set()\n for data in mongo_data:\n try:\n props = data['props']\n if props:\n for prop in props:\n set_sepeci.add(prop['name'])\n except KeyError:\n pass\n\n\n\n\n\n\n\n"},"avg_line_length":{"kind":"number","value":27.1666666667,"string":"27.166667"},"max_line_length":{"kind":"number","value":72,"string":"72"},"alphanum_fraction":{"kind":"number","value":0.408997955,"string":"0.408998"}}},{"rowIdx":46432,"cells":{"hexsha":{"kind":"string","value":"96ca3fc8bf03ccd489406d8f998556c93531c837"},"size":{"kind":"number","value":2662,"string":"2,662"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Co-Simulation/Sumo/sumo-1.7.0/tools/simpla/_reporting.py"},"max_stars_repo_name":{"kind":"string","value":"uruzahe/carla"},"max_stars_repo_head_hexsha":{"kind":"string","value":"940c2ab23cce1eda1ef66de35f66b42d40865fb1"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":4,"string":"4"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-11-13T02:35:56.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-03-29T20:15:54.000Z"},"max_issues_repo_path":{"kind":"string","value":"Co-Simulation/Sumo/sumo-1.7.0/tools/simpla/_reporting.py"},"max_issues_repo_name":{"kind":"string","value":"uruzahe/carla"},"max_issues_repo_head_hexsha":{"kind":"string","value":"940c2ab23cce1eda1ef66de35f66b42d40865fb1"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":9,"string":"9"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-12-09T02:12:39.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-02-18T00:15:28.000Z"},"max_forks_repo_path":{"kind":"string","value":"Co-Simulation/Sumo/sumo-1.7.0/tools/simpla/_reporting.py"},"max_forks_repo_name":{"kind":"string","value":"uruzahe/carla"},"max_forks_repo_head_hexsha":{"kind":"string","value":"940c2ab23cce1eda1ef66de35f66b42d40865fb1"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-11-20T19:31:26.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-11-20T19:31:26.000Z"},"content":{"kind":"string","value":"# Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo\n# Copyright (C) 2014-2020 German Aerospace Center (DLR) and others.\n# This program and the accompanying materials are made available under the\n# terms of the Eclipse Public License 2.0 which is available at\n# https://www.eclipse.org/legal/epl-2.0/\n# This Source Code may also be made available under the following Secondary\n# Licenses when the conditions for such availability set forth in the Eclipse\n# Public License 2.0 are satisfied: GNU General Public License, version 2\n# or later which is available at\n# https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html\n# SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later\n\n# @file _reporting.py\n# @author Leonhard Luecken\n# @date 2017-04-09\n\nfrom collections import deque\nimport sys\nimport traci\n\nVERBOSITY = 1\nWARNING_LOG = deque()\nREPORT_LOG = deque()\n\n\ndef initDefaults():\n global VERBOSITY, MAX_LOG_SIZE, WARNING_LOG, REPORT_LOG\n # control level of verbosity\n # 0 - silent (only errors)\n # 1 - standard (errors and warnings)\n # 2 - log (additional status messages for platoons)\n # 3 - extended log (more status information)\n # 4 - insane (all kind of single vehicle detailed state infos)\n VERBOSITY = 1\n\n # log storage\n MAX_LOG_SIZE = 1000\n WARNING_LOG = deque()\n REPORT_LOG = deque()\n\n\n# perform default init\ninitDefaults()\n\n\ndef simTime():\n return traci.simulation.getTime()\n\n\ndef array2String(a):\n if len(a) > 0 and hasattr(a[0], \"getID\"):\n return str([e.getID() for e in a])\n return str([str(e) for e in a])\n\n\nclass Warner(object):\n\n def __init__(self, domain):\n self._domain = domain\n\n def __call__(self, msg, omitReportTime=False):\n global MAX_LOG_SIZE, WARNING_LOG\n if len(WARNING_LOG) >= MAX_LOG_SIZE:\n WARNING_LOG.popleft()\n time = str(simTime())\n rep = \"WARNING: \" + str(msg) + \" (\" + self._domain + \")\"\n if not omitReportTime:\n sys.stderr.write(time + \": \" + rep + \"\\n\")\n else:\n sys.stderr.write(rep + \"\\n\")\n WARNING_LOG.append((time, rep))\n\n\nclass Reporter(object):\n\n def __init__(self, domain):\n self._domain = domain\n\n def __call__(self, msg, omitReportTime=False):\n global MAX_LOG_SIZE, REPORT_LOG\n if len(REPORT_LOG) >= MAX_LOG_SIZE:\n REPORT_LOG.popleft()\n time = str(simTime())\n rep = str(msg) + \" (\" + self._domain + \")\"\n if not omitReportTime:\n sys.stdout.write(time + \": \" + rep + \"\\n\")\n else:\n sys.stdout.write(rep + \"\\n\")\n REPORT_LOG.append((time, rep))\n"},"avg_line_length":{"kind":"number","value":29.5777777778,"string":"29.577778"},"max_line_length":{"kind":"number","value":77,"string":"77"},"alphanum_fraction":{"kind":"number","value":0.6506386176,"string":"0.650639"}}},{"rowIdx":46433,"cells":{"hexsha":{"kind":"string","value":"509e06e68ee104045d2354b105ca31f67f1db0ab"},"size":{"kind":"number","value":414,"string":"414"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"exercises/fr/exc_03_16_02.py"},"max_stars_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_stars_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2085,"string":"2,085"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-04-17T13:10:40.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-30T21:51:46.000Z"},"max_issues_repo_path":{"kind":"string","value":"exercises/fr/exc_03_16_02.py"},"max_issues_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_issues_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":79,"string":"79"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-04-18T14:42:55.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-07T08:15:43.000Z"},"max_forks_repo_path":{"kind":"string","value":"exercises/fr/exc_03_16_02.py"},"max_forks_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_forks_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":361,"string":"361"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-04-17T13:34:32.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T04:42:45.000Z"},"content":{"kind":"string","value":"import spacy\n\nnlp = spacy.load(\"fr_core_news_sm\")\ntext = (\n \"Le groupe aéronautique Airbus construit des avions et des \"\n \"hélicoptères vendus dans le monde entier. Le siège opérationnel du \"\n \"groupe est situé en France à Toulouse dans la région Occitanie.\"\n)\n\n# Désactive le tagger et le parser\nwith ____.____(____):\n # Traite le texte\n doc = ____\n # Affiche les entités du doc\n print(____)\n"},"avg_line_length":{"kind":"number","value":25.875,"string":"25.875"},"max_line_length":{"kind":"number","value":73,"string":"73"},"alphanum_fraction":{"kind":"number","value":0.7077294686,"string":"0.707729"}}},{"rowIdx":46434,"cells":{"hexsha":{"kind":"string","value":"50acaed13f496524d7a3113eb5571513f6cad0ec"},"size":{"kind":"number","value":610,"string":"610"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"tarefas-poo/lista-02/processa-numeros/view/paineis/painel_duplica.py"},"max_stars_repo_name":{"kind":"string","value":"victoriaduarte/POO_UFSC"},"max_stars_repo_head_hexsha":{"kind":"string","value":"0c65b4f26383d1e3038d8469bd91fd2c0cb98c1a"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"tarefas-poo/lista-02/processa-numeros/view/paineis/painel_duplica.py"},"max_issues_repo_name":{"kind":"string","value":"victoriaduarte/POO_UFSC"},"max_issues_repo_head_hexsha":{"kind":"string","value":"0c65b4f26383d1e3038d8469bd91fd2c0cb98c1a"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"tarefas-poo/lista-02/processa-numeros/view/paineis/painel_duplica.py"},"max_forks_repo_name":{"kind":"string","value":"victoriaduarte/POO_UFSC"},"max_forks_repo_head_hexsha":{"kind":"string","value":"0c65b4f26383d1e3038d8469bd91fd2c0cb98c1a"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# --------------------------\n# UFSC - CTC - INE - INE5603\n# Exercício Processa Números\n# --------------------------\n# Classe responsável por uma lista com números duplicados (2 vezes cada número).\n\nfrom view.paineis.painel_abstrato import PainelAbstrato\nfrom model.processa_numeros import duplica\n\nclass PainelDuplica(PainelAbstrato):\n def __init__(self):\n super().__init__('Duplica')\n\n def interaja(self):\n numeros = self._leiaints()\n duplicados = duplica(numeros)\n msg = 'A lista {} contém os seguintes números duplicados: {}'.format(numeros, duplicados)\n print(msg)\n"},"avg_line_length":{"kind":"number","value":32.1052631579,"string":"32.105263"},"max_line_length":{"kind":"number","value":97,"string":"97"},"alphanum_fraction":{"kind":"number","value":0.6442622951,"string":"0.644262"}}},{"rowIdx":46435,"cells":{"hexsha":{"kind":"string","value":"50ed8d04a738703549d91193899b488f6c9c3e3f"},"size":{"kind":"number","value":359,"string":"359"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"elements/python/6/6/soln.py"},"max_stars_repo_name":{"kind":"string","value":"mmcloughlin/problems"},"max_stars_repo_head_hexsha":{"kind":"string","value":"6095842ffe007a12ec8c2093850515aa4e046616"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":11,"string":"11"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-02-08T06:54:34.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-08-07T18:57:39.000Z"},"max_issues_repo_path":{"kind":"string","value":"elements/python/6/6/soln.py"},"max_issues_repo_name":{"kind":"string","value":"mmcloughlin/problems"},"max_issues_repo_head_hexsha":{"kind":"string","value":"6095842ffe007a12ec8c2093850515aa4e046616"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":1,"string":"1"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-05-21T08:14:10.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-05-21T08:14:10.000Z"},"max_forks_repo_path":{"kind":"string","value":"elements/python/6/6/soln.py"},"max_forks_repo_name":{"kind":"string","value":"mmcloughlin/problems"},"max_forks_repo_head_hexsha":{"kind":"string","value":"6095842ffe007a12ec8c2093850515aa4e046616"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"def dedupe(a):\n if len(a) == 0:\n return 0\n rd = 1\n wr = 1\n while rd < len(a):\n if a[rd] != a[wr-1]:\n a[wr] = a[rd]\n wr += 1\n rd += 1\n return wr\n\n\ndef test():\n a = [2,3,5,5,7,11,11,11,13]\n n = dedupe(a)\n assert a[:n] == [2,3,5,7,11,13]\n print 'pass'\n\n\nif __name__ == '__main__':\n test()\n"},"avg_line_length":{"kind":"number","value":15.6086956522,"string":"15.608696"},"max_line_length":{"kind":"number","value":35,"string":"35"},"alphanum_fraction":{"kind":"number","value":0.4066852368,"string":"0.406685"}}},{"rowIdx":46436,"cells":{"hexsha":{"kind":"string","value":"ba06c620d8cdf37df8f6e1a1146279397bd840c1"},"size":{"kind":"number","value":3459,"string":"3,459"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Projekt/CNN-Ansatz/TEST2.py"},"max_stars_repo_name":{"kind":"string","value":"Griizz/ComputerVisionPraktikum"},"max_stars_repo_head_hexsha":{"kind":"string","value":"30276eb0b039ea42728d7433c573414d2dfe1ec2"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"Projekt/CNN-Ansatz/TEST2.py"},"max_issues_repo_name":{"kind":"string","value":"Griizz/ComputerVisionPraktikum"},"max_issues_repo_head_hexsha":{"kind":"string","value":"30276eb0b039ea42728d7433c573414d2dfe1ec2"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Projekt/CNN-Ansatz/TEST2.py"},"max_forks_repo_name":{"kind":"string","value":"Griizz/ComputerVisionPraktikum"},"max_forks_repo_head_hexsha":{"kind":"string","value":"30276eb0b039ea42728d7433c573414d2dfe1ec2"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":3,"string":"3"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-11-17T01:33:07.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-03-26T10:13:39.000Z"},"content":{"kind":"string","value":"import os\nfrom keras.models import Sequential\nfrom keras.layers import Dense, Conv2D, MaxPooling2D, Flatten\nfrom keras.optimizers import SGD\nfrom keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, EarlyStopping\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\nimport wandb\nfrom wandb.keras import WandbCallback\n\nBATCHSIZE = 8\nLR = 0.01 / 16 * BATCHSIZE\nFILEPATH = \"./Best2.h5\"\n\nwandb.init(project=\"cv_project\")\n\ntrain_datagen = ImageDataGenerator(rescale=1. / 255, horizontal_flip=True)\nval_datagen = ImageDataGenerator(rescale=1. / 255)\ntest_datagen = ImageDataGenerator(rescale=1. / 255)\n\ntrain_generator = train_datagen.flow_from_directory(\n '../DataSetNew/Training',\n target_size=(256, 256),\n color_mode=\"rgb\",\n class_mode=\"categorical\",\n shuffle=True,\n batch_size=BATCHSIZE)\n\nvalidation_generator = val_datagen.flow_from_directory(\n '../DataSetNew/Validation',\n target_size=(256, 256),\n color_mode=\"rgb\",\n class_mode=\"categorical\",\n batch_size=BATCHSIZE)\n\ntest_generator = test_datagen.flow_from_directory(\n '../DataSetNew/Test',\n target_size=(256, 256),\n color_mode=\"rgb\",\n class_mode=\"categorical\",\n batch_size=BATCHSIZE)\n\nmodel = Sequential()\n\nmodel.add(Conv2D(32, (3, 3), activation='relu', padding='same', name='conv1.1', input_shape=(256, 256, 3)))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(64, (3, 3), activation='relu', padding='same', name='conv2.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(128, (3, 3), activation='relu', padding='same', name='conv3.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(256, (3, 3), activation='relu', padding='same', name='conv4.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(512, (3, 3), activation='relu', padding='same', name='conv5.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(1024, (3, 3), activation='relu', padding='same', name='conv6.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Conv2D(2048, (3, 3), activation='relu', padding='same', name='conv7.1'))\nmodel.add(MaxPooling2D(pool_size=(2, 2)))\nmodel.add(Flatten())\nmodel.add(Dense(256, activation='relu', name='fc1', ))\nmodel.add(Dense(16, activation='softmax')) # Für jedes Label ein output\n\nmodelCheckpoint = ModelCheckpoint(FILEPATH, monitor='val_loss', verbose=0, save_best_only=True,\n save_weights_only=False, mode='auto', period=1)\n\nreduceLROnPlateau = ReduceLROnPlateau(monitor='val_loss', factor=0.25,\n patience=5, min_lr=0.0005)\n\nearlyStopping = EarlyStopping(patience=15, monitor='val_loss')\n\nmodel.compile(loss='categorical_crossentropy',\n optimizer=SGD(lr=LR, momentum=0.9),\n metrics=['accuracy'])\n\nmodel.fit_generator(train_generator,\n steps_per_epoch=16 * 800 // BATCHSIZE, # (Num_cat * pics_cat / batchSize)\n epochs=100,\n validation_data=validation_generator,\n validation_steps=16 * 150 // BATCHSIZE,\n callbacks=[modelCheckpoint, WandbCallback(), reduceLROnPlateau, earlyStopping])\n\n\nmodel.load_weights(FILEPATH, by_name=True)\nmodel.save(os.path.join(wandb.run.dir, \"model.h5\"))\n\ntest_accuracy = model.evaluate_generator(test_generator,\n steps=16 * 50 // BATCHSIZE) # (Num_cat * pics_cat / batchSize)\n\nwandb.run.summary[\"test_accuracy\"] = test_accuracy[1]\n"},"avg_line_length":{"kind":"number","value":38.8651685393,"string":"38.865169"},"max_line_length":{"kind":"number","value":107,"string":"107"},"alphanum_fraction":{"kind":"number","value":0.6967331599,"string":"0.696733"}}},{"rowIdx":46437,"cells":{"hexsha":{"kind":"string","value":"2ce1826b3b1a6c75aac1be79ab02727e445fadd9"},"size":{"kind":"number","value":1375,"string":"1,375"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"internal/weights.py"},"max_stars_repo_name":{"kind":"string","value":"cixel/sl-shadow-priest"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3159c79fb305a226aeddfa6b884734ddfc108ccd"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":13,"string":"13"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-04-10T17:34:49.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-13T04:04:30.000Z"},"max_issues_repo_path":{"kind":"string","value":"internal/weights.py"},"max_issues_repo_name":{"kind":"string","value":"cixel/sl-shadow-priest"},"max_issues_repo_head_hexsha":{"kind":"string","value":"3159c79fb305a226aeddfa6b884734ddfc108ccd"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":213,"string":"213"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-04-10T04:15:00.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-01-20T19:18:55.000Z"},"max_forks_repo_path":{"kind":"string","value":"internal/weights.py"},"max_forks_repo_name":{"kind":"string","value":"cixel/sl-shadow-priest"},"max_forks_repo_head_hexsha":{"kind":"string","value":"3159c79fb305a226aeddfa6b884734ddfc108ccd"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":7,"string":"7"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2020-10-08T07:22:08.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-07-08T21:09:33.000Z"},"content":{"kind":"string","value":"\"\"\"weight dict definitions\"\"\"\n\nweights_sanctum_of_domination = {\n 'pw_ba_1': 0.000,\n 'pw_sa_1': 0.055,\n 'pw_na_1': 0.570,\n 'lm_ba_1': 0.000,\n 'lm_sa_1': 0.050,\n 'lm_na_1': 0.205,\n 'hm_ba_1': 0.000,\n 'hm_sa_1': 0.000,\n 'hm_na_1': 0.000,\n 'pw_ba_2': 0.020,\n 'pw_sa_2': 0.020,\n 'pw_na_2': 0.050,\n 'lm_ba_2': 0.020,\n 'lm_sa_2': 0.000,\n 'lm_na_2': 0.010,\n 'hm_ba_2': 0.000,\n 'hm_sa_2': 0.000,\n 'hm_na_2': 0.000,\n}\n\nweights_castle_nathria = {\n 'pw_ba_1': 0.0200000,\n 'pw_sa_1': 0.0600000,\n 'pw_na_1': 0.2750000,\n 'lm_ba_1': 0.0000000,\n 'lm_sa_1': 0.0050000,\n 'lm_na_1': 0.1700000,\n 'hm_ba_1': 0.0000000,\n 'hm_sa_1': 0.0000000,\n 'hm_na_1': 0.0500000,\n 'pw_ba_2': 0.0400000,\n 'pw_sa_2': 0.0400000,\n 'pw_na_2': 0.1200000,\n 'lm_ba_2': 0.0500000,\n 'lm_sa_2': 0.0800000,\n 'lm_na_2': 0.0800000,\n 'hm_ba_2': 0.0000000,\n 'hm_sa_2': 0.0000000,\n 'hm_na_2': 0.0100000,\n}\n\nweights_single = {\n 'pw_na_1': 0.73548387097,\n 'lm_na_1': 0.26451612903,\n 'hm_na_1': 0.00000000000,\n}\n\n\ndef find_weights(key):\n \"\"\"return the matching dict\"\"\"\n if key == 'weightsSingle':\n return weights_single\n if key == 'weightsCastleNathria':\n return weights_castle_nathria\n if key == 'weightsSanctumOfDomination':\n return weights_sanctum_of_domination\n return None\n"},"avg_line_length":{"kind":"number","value":22.5409836066,"string":"22.540984"},"max_line_length":{"kind":"number","value":44,"string":"44"},"alphanum_fraction":{"kind":"number","value":0.5941818182,"string":"0.594182"}}},{"rowIdx":46438,"cells":{"hexsha":{"kind":"string","value":"d774214153ece5adc56b12ff593d918fd39f157a"},"size":{"kind":"number","value":92,"string":"92"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"2015/01/measles-national-cases/graphic_config.py"},"max_stars_repo_name":{"kind":"string","value":"nprapps/graphics-archive"},"max_stars_repo_head_hexsha":{"kind":"string","value":"97b0ef326b46a959df930f5522d325e537f7a655"},"max_stars_repo_licenses":{"kind":"list like","value":["FSFAP"],"string":"[\n \"FSFAP\"\n]"},"max_stars_count":{"kind":"number","value":14,"string":"14"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2015-05-08T13:41:51.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-02-24T12:34:55.000Z"},"max_issues_repo_path":{"kind":"string","value":"2015/01/measles-national-cases/graphic_config.py"},"max_issues_repo_name":{"kind":"string","value":"nprapps/graphics-archive"},"max_issues_repo_head_hexsha":{"kind":"string","value":"97b0ef326b46a959df930f5522d325e537f7a655"},"max_issues_repo_licenses":{"kind":"list like","value":["FSFAP"],"string":"[\n \"FSFAP\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"2015/01/measles-national-cases/graphic_config.py"},"max_forks_repo_name":{"kind":"string","value":"nprapps/graphics-archive"},"max_forks_repo_head_hexsha":{"kind":"string","value":"97b0ef326b46a959df930f5522d325e537f7a655"},"max_forks_repo_licenses":{"kind":"list like","value":["FSFAP"],"string":"[\n \"FSFAP\"\n]"},"max_forks_count":{"kind":"number","value":7,"string":"7"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2015-04-04T04:45:54.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-02-18T11:12:48.000Z"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\nCOPY_GOOGLE_DOC_KEY = '13YTshKL3Difi1oIKL4cM9Cf_WouGgS_FL4rdGcWmAm8'\n"},"avg_line_length":{"kind":"number","value":23,"string":"23"},"max_line_length":{"kind":"number","value":68,"string":"68"},"alphanum_fraction":{"kind":"number","value":0.847826087,"string":"0.847826"}}},{"rowIdx":46439,"cells":{"hexsha":{"kind":"string","value":"0f167163ff0689b64ce3c0277bc552535ecb9f86"},"size":{"kind":"number","value":368,"string":"368"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"cs/lambda_cs/07_computer_architecture/ls8/ls8.py"},"max_stars_repo_name":{"kind":"string","value":"tobias-fyi/vela"},"max_stars_repo_head_hexsha":{"kind":"string","value":"b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"cs/lambda_cs/07_computer_architecture/ls8/ls8.py"},"max_issues_repo_name":{"kind":"string","value":"tobias-fyi/vela"},"max_issues_repo_head_hexsha":{"kind":"string","value":"b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":8,"string":"8"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-03-24T17:47:23.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-12T00:33:21.000Z"},"max_forks_repo_path":{"kind":"string","value":"cs/lambda_cs/07_computer_architecture/ls8/ls8.py"},"max_forks_repo_name":{"kind":"string","value":"tobias-fyi/vela"},"max_forks_repo_head_hexsha":{"kind":"string","value":"b0b3d3c6dc3fa397c8c7a492098a02cf75e0ff82"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"Computer Architecture :: Main LS-8 runtime script\"\"\"\n\nimport sys\nfrom cpu import *\n\n# Extract file to use from command line\ntry:\n program_filepath = sys.argv[1]\nexcept IndexError:\n print(\"Please input valid filepath.\")\n sys.exit()\n\n# Instantiate CPU instance\ncpu = CPU()\n\n# Load the program into memory and run the CPU\ncpu.load(program_filepath)\ncpu.run()\n"},"avg_line_length":{"kind":"number","value":19.3684210526,"string":"19.368421"},"max_line_length":{"kind":"number","value":55,"string":"55"},"alphanum_fraction":{"kind":"number","value":0.7201086957,"string":"0.720109"}}},{"rowIdx":46440,"cells":{"hexsha":{"kind":"string","value":"adc0da5a2de20b9cfdcc88ae2630e76780c63c9b"},"size":{"kind":"number","value":5744,"string":"5,744"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"shinrl/envs/cartpole/calc.py"},"max_stars_repo_name":{"kind":"string","value":"omron-sinicx/ShinRL"},"max_stars_repo_head_hexsha":{"kind":"string","value":"09f4ae274a33d1fc1d9d542f816aef40014af6b5"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":34,"string":"34"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-12-09T07:12:57.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-11T08:17:20.000Z"},"max_issues_repo_path":{"kind":"string","value":"shinrl/envs/cartpole/calc.py"},"max_issues_repo_name":{"kind":"string","value":"omron-sinicx/ShinRL"},"max_issues_repo_head_hexsha":{"kind":"string","value":"09f4ae274a33d1fc1d9d542f816aef40014af6b5"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"shinrl/envs/cartpole/calc.py"},"max_forks_repo_name":{"kind":"string","value":"omron-sinicx/ShinRL"},"max_forks_repo_head_hexsha":{"kind":"string","value":"09f4ae274a33d1fc1d9d542f816aef40014af6b5"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":4,"string":"4"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-12-11T07:48:01.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-01T23:50:33.000Z"},"content":{"kind":"string","value":"\"\"\"\nAuthor: Toshinori Kitamura\nAffiliation: NAIST & OSX\n\"\"\"\nfrom typing import Tuple\n\nimport chex\nimport jax\nimport jax.numpy as jnp\nfrom chex import Array\n\nfrom .config import CartPoleConfig\n\n\n@jax.jit\ndef to_discrete_act(config: CartPoleConfig, c_act: float) -> int:\n \"\"\"Convert a continuous action to a discrete action.\n\n Args:\n config (CartPoleConfig)\n c_act (float): Continuous action in range [-1, 1].\n\n Returns:\n A discretized action id.\n \"\"\"\n chex.assert_type(c_act, float)\n dA = config.dA\n c_act = jnp.clip(c_act, -1.0, 1.0)\n c_step = 2 / dA\n act = jnp.floor((c_act + 1.0) / c_step + 1e-5).astype(jnp.uint32)\n return jnp.clip(act, 0, dA - 1)\n\n\n@jax.jit\ndef to_continuous_act(config: CartPoleConfig, act: int) -> float:\n \"\"\"Convert a discrete action to a continuous action.\n\n Args:\n config (CartPoleConfig)\n act (int): Discrete action in [0, ..., dA-1].\n\n Returns:\n A continuous action in range [-1.0, 1.0]\n \"\"\"\n chex.assert_type(act, int)\n dA = config.dA\n c_step = 2 / dA\n c_act = act * c_step - 1.0\n return jnp.clip(c_act, -1.0, 1.0)\n\n\n@jax.jit\ndef state_to_x_th(config: CartPoleConfig, state: int) -> Tuple[float, float]:\n \"\"\"Convert a state id to x, x_dot, th, th_dot\n\n Args:\n config (CartPoleConfig)\n state (int)\n\n Returns:\n x, x_dot, th, th_dot\n \"\"\"\n x_res, x_dot_res = config.x_res, config.x_dot_res\n th_res, th_dot_res = config.th_res, config.th_dot_res\n x_max, x_dot_max = config.x_max, config.x_dot_max\n th_max, th_dot_max = config.th_max, config.th_dot_max\n\n x_step = 2 * x_max / (x_res - 1)\n x_dot_step = 2 * x_dot_max / (x_dot_res - 1)\n th_step = 2 * th_max / (th_res - 1)\n th_dot_step = 2 * th_dot_max / (th_dot_res - 1)\n\n x_idx = state % x_res\n state = (state - x_idx) / x_res\n x_dot_idx = state % x_dot_res\n state = (state - x_dot_idx) / x_dot_res\n th_idx = state % th_res\n th_dot_idx = (state - th_idx) / th_res\n\n x = -x_max + x_step * x_idx\n x = jnp.clip(x, -x_max, x_max)\n x_dot = -x_dot_max + x_dot_step * x_dot_idx\n x_dot = jnp.clip(x_dot, -x_dot_max, x_dot_max)\n th = -th_max + th_step * th_idx\n th = jnp.clip(th, -th_max, th_max)\n th_dot = -th_dot_max + th_dot_step * th_dot_idx\n th_dot = jnp.clip(th_dot, -th_dot_max, th_dot_max)\n return x, x_dot, th, th_dot\n\n\n@jax.jit\ndef x_th_to_state(\n config: CartPoleConfig, x: float, x_dot: float, th: float, th_dot\n) -> float:\n \"\"\"Convert x, x_dot, th, th_dot to state id\n\n Args:\n config (CartPoleConfig)\n\n Returns:\n state id (int)\n \"\"\"\n x_res, x_dot_res = config.x_res, config.x_dot_res\n th_res, th_dot_res = config.th_res, config.th_dot_res\n x_max, x_dot_max = config.x_max, config.x_dot_max\n th_max, th_dot_max = config.th_max, config.th_dot_max\n\n x_step = 2 * x_max / (x_res - 1)\n x_dot_step = 2 * x_dot_max / (x_dot_res - 1)\n th_step = 2 * th_max / (th_res - 1)\n th_dot_step = 2 * th_dot_max / (th_dot_res - 1)\n\n x_idx = jnp.floor((x + x_max) / x_step + 1e-5)\n x_dot_idx = jnp.floor((x_dot + x_dot_max) / x_dot_step + 1e-5)\n th_idx = jnp.floor((th + th_max) / th_step + 1e-5)\n th_dot_idx = jnp.floor((th_dot + th_dot_max) / th_dot_step + 1e-5)\n state = x_idx + x_res * (x_dot_idx + x_dot_res * (th_idx + th_res * th_dot_idx))\n state = jnp.clip(state, 0, x_res * x_dot_res * th_res * th_dot_res - 1)\n return state.astype(jnp.uint32)\n\n\n@jax.jit\ndef transition(config: CartPoleConfig, state: int, action: int) -> Tuple[Array, Array]:\n chex.assert_type([state, action], int)\n polemass_length = config.masspole * config.length\n total_mass = config.masspole + config.masscart\n c_act = to_continuous_act(config, action)\n force = jnp.squeeze(c_act) * config.force_mag\n\n def body_fn(_, x_th):\n x, x_dot, th, th_dot = x_th\n costh, sinth = jnp.cos(th), jnp.sin(th)\n temp = (force + polemass_length * th_dot ** 2 * sinth) / total_mass\n thetaacc = (config.gravity * sinth - costh * temp) / (\n config.length * (4.0 / 3.0 - config.masspole * costh ** 2 / total_mass)\n )\n xacc = temp - polemass_length * thetaacc * costh / total_mass\n x = x + config.tau * x_dot\n x_dot = x_dot + config.tau * xacc\n th = th + config.tau * th_dot\n th_dot = th_dot + config.tau * thetaacc\n\n x = jnp.clip(x, -config.x_max, config.x_max)\n x_dot = jnp.clip(x_dot, -config.x_dot_max, config.x_dot_max)\n th = jnp.clip(th, -config.th_max, config.th_max)\n th_dot = jnp.clip(th_dot, -config.th_dot_max, config.th_dot_max)\n return (x, x_dot, th, th_dot)\n\n x, x_dot, th, th_dot = state_to_x_th(config, state)\n out = (jnp.abs(x) >= config.x_max) + (jnp.abs(th) >= config.th_max)\n # one step is not enough when state is discretized\n x, x_dot, th, th_dot = jax.lax.fori_loop(0, 1, body_fn, (x, x_dot, th, th_dot))\n next_state = x_th_to_state(config, x, x_dot, th, th_dot)\n next_state = jax.lax.cond(\n out, lambda _: state.astype(jnp.uint32), lambda _: next_state, None\n )\n next_state = next_state.reshape(-1)\n prob = jnp.array((1.0,), dtype=float)\n return next_state, prob\n\n\n@jax.jit\ndef reward(config: CartPoleConfig, state: int, action: int) -> float:\n x, _, th, _ = state_to_x_th(config, state)\n out = (jnp.abs(x) >= config.x_max) + (jnp.abs(th) >= config.th_max)\n return jax.lax.cond(out, lambda _: 0.0, lambda _: 1.0, None)\n\n\n@jax.jit\ndef observation_tuple(config: CartPoleConfig, state: int) -> Array:\n \"\"\"Make the tuple observation.\"\"\"\n x, x_dot, th, th_dot = state_to_x_th(config, state)\n return jnp.array([x, x_dot, th, th_dot], dtype=float)\n"},"avg_line_length":{"kind":"number","value":33.0114942529,"string":"33.011494"},"max_line_length":{"kind":"number","value":87,"string":"87"},"alphanum_fraction":{"kind":"number","value":0.635097493,"string":"0.635097"}}},{"rowIdx":46441,"cells":{"hexsha":{"kind":"string","value":"70d4c27d0e07a286297e05297caadbbf99389707"},"size":{"kind":"number","value":793,"string":"793"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Problems/Depth-First Search/easy/SubtreeAnotherTree/subtree_another_tree.py"},"max_stars_repo_name":{"kind":"string","value":"dolong2110/Algorithm-By-Problems-Python"},"max_stars_repo_head_hexsha":{"kind":"string","value":"31ecc7367aaabdd2b0ac0af7f63ca5796d70c730"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1,"string":"1"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-08-16T14:52:05.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-08-16T14:52:05.000Z"},"max_issues_repo_path":{"kind":"string","value":"Problems/Depth-First Search/easy/SubtreeAnotherTree/subtree_another_tree.py"},"max_issues_repo_name":{"kind":"string","value":"dolong2110/Algorithm-By-Problems-Python"},"max_issues_repo_head_hexsha":{"kind":"string","value":"31ecc7367aaabdd2b0ac0af7f63ca5796d70c730"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"Problems/Depth-First Search/easy/SubtreeAnotherTree/subtree_another_tree.py"},"max_forks_repo_name":{"kind":"string","value":"dolong2110/Algorithm-By-Problems-Python"},"max_forks_repo_head_hexsha":{"kind":"string","value":"31ecc7367aaabdd2b0ac0af7f63ca5796d70c730"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from typing import Optional\n\n\n# Definition for a binary tree node.\nclass TreeNode:\n def __init__(self, val=0, left=None, right=None):\n self.val = val\n self.left = left\n self.right = right\n\ndef isSubtree(self, root: Optional[TreeNode], subRoot: Optional[TreeNode]) -> bool:\n if not root:\n return False\n\n if root.val == subRoot.val and self.is_valid(root, subRoot):\n return True\n\n return self.isSubtree(root.left, subRoot) or self.isSubtree(root.right, subRoot)\n\ndef is_valid(self, root1, root2):\n if not root1 and not root2:\n return True\n\n if not root1 or not root2:\n return False\n\n if root1.val != root2.val:\n return False\n\n return self.is_valid(root1.left, root2.left) and self.is_valid(root1.right, root2.right)"},"avg_line_length":{"kind":"number","value":26.4333333333,"string":"26.433333"},"max_line_length":{"kind":"number","value":92,"string":"92"},"alphanum_fraction":{"kind":"number","value":0.6645649433,"string":"0.664565"}}},{"rowIdx":46442,"cells":{"hexsha":{"kind":"string","value":"cbcdf1501f1bf3b7eb5d12ee8956fcf19192e046"},"size":{"kind":"number","value":1685,"string":"1,685"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"AP5/get_results.py"},"max_stars_repo_name":{"kind":"string","value":"TensorVision/MediSeg"},"max_stars_repo_head_hexsha":{"kind":"string","value":"222fcab98d82f48f09304eda3cfbfe4d6ac825b7"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":6,"string":"6"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-08-15T17:57:45.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2019-03-19T05:08:29.000Z"},"max_issues_repo_path":{"kind":"string","value":"AP5/get_results.py"},"max_issues_repo_name":{"kind":"string","value":"TensorVision/MediSeg"},"max_issues_repo_head_hexsha":{"kind":"string","value":"222fcab98d82f48f09304eda3cfbfe4d6ac825b7"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":10,"string":"10"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-04-11T10:20:34.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2016-08-09T21:47:48.000Z"},"max_forks_repo_path":{"kind":"string","value":"AP5/get_results.py"},"max_forks_repo_name":{"kind":"string","value":"TensorVision/MediSeg"},"max_forks_repo_head_hexsha":{"kind":"string","value":"222fcab98d82f48f09304eda3cfbfe4d6ac825b7"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":7,"string":"7"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-06-21T04:08:58.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2018-09-01T14:02:40.000Z"},"content":{"kind":"string","value":"#!/usr/bin/env python\n\n\"\"\"Get quality measures for a series of confuscation matrices.\"\"\"\n\nimport tensorvision.analyze as tva\n\ncms = [{0: {0: 37565942, 1: 1483281}, 1: {0: 809240, 1: 3149537}},\n {0: {0: 37905696, 1: 1143527}, 1: {0: 645126, 1: 3313651}},\n {0: {0: 38422970, 1: 626253}, 1: {0: 673707, 1: 3285070}},\n {0: {0: 38295979, 1: 753244}, 1: {0: 587353, 1: 3371424}},\n {0: {0: 38467952, 1: 581271}, 1: {0: 682262, 1: 3276515}},\n {0: {0: 38105025, 1: 944198}, 1: {0: 483851, 1: 3474926}},\n {0: {0: 38287640, 1: 761583}, 1: {0: 494029, 1: 3464748}},\n {0: {0: 38457492, 1: 591731}, 1: {0: 607086, 1: 3351691}},\n {0: {0: 38203714, 1: 845509}, 1: {0: 371213, 1: 3587564}},\n {0: {0: 38585168, 1: 464055}, 1: {0: 598966, 1: 3359811}},\n {0: {0: 38355948, 1: 693275}, 1: {0: 406054, 1: 3552723}},\n {0: {0: 38426068, 1: 623155}, 1: {0: 521862, 1: 3436915}},\n {0: {0: 38874301, 1: 174922}, 1: {0: 1574695, 1: 2384082}},\n {0: {0: 38480293, 1: 568930}, 1: {0: 570331, 1: 3388446}},\n {0: {0: 38405584, 1: 643639}, 1: {0: 434697, 1: 3524080}},\n {0: {0: 38587684, 1: 461539}, 1: {0: 559590, 1: 3399187}},\n {0: {0: 38354039, 1: 695184}, 1: {0: 397331, 1: 3561446}},\n {0: {0: 38601633, 1: 447590}, 1: {0: 554482, 1: 3404295}},\n {0: {0: 38428370, 1: 620853}, 1: {0: 542135, 1: 3416642}},\n {0: {0: 38497023, 1: 552200}, 1: {0: 549347, 1: 3409430}}]\n\nprint(\"F1: %s\" % [tva.get_f_score(cm) for cm in cms])\nprint(\"Accuracy: %s\" % [tva.get_accuracy(cm) for cm in cms])\nprint(\"Precision: %s\" % [tva.get_precision(cm) for cm in cms])\nprint(\"Recall: %s\" % [tva.get_recall(cm) for cm in cms])\n"},"avg_line_length":{"kind":"number","value":52.65625,"string":"52.65625"},"max_line_length":{"kind":"number","value":66,"string":"66"},"alphanum_fraction":{"kind":"number","value":0.543620178,"string":"0.54362"}}},{"rowIdx":46443,"cells":{"hexsha":{"kind":"string","value":"2ac86f935b8dc9358147832dd75471cda0d750a4"},"size":{"kind":"number","value":980,"string":"980"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Algorithms/Sort_Algorithms/Bucket_Sort/Bucket_Sort.py"},"max_stars_repo_name":{"kind":"string","value":"hussamEL-Hwary/DS-Algo-Handbook"},"max_stars_repo_head_hexsha":{"kind":"string","value":"86a97d586a4ca8b17168c0a9f5a9f43f856eba58"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":18,"string":"18"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-11-01T04:00:36.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2021-09-13T14:26:35.000Z"},"max_issues_repo_path":{"kind":"string","value":"Algorithms/Sort_Algorithms/Bucket_Sort/Bucket_Sort.py"},"max_issues_repo_name":{"kind":"string","value":"JEERU/DS-Algo-Handbook"},"max_issues_repo_head_hexsha":{"kind":"string","value":"86a97d586a4ca8b17168c0a9f5a9f43f856eba58"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":60,"string":"60"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-10-11T14:50:47.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2016-10-31T11:05:01.000Z"},"max_forks_repo_path":{"kind":"string","value":"Algorithms/Sort_Algorithms/Bucket_Sort/Bucket_Sort.py"},"max_forks_repo_name":{"kind":"string","value":"JEERU/DS-Algo-Handbook"},"max_forks_repo_head_hexsha":{"kind":"string","value":"86a97d586a4ca8b17168c0a9f5a9f43f856eba58"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":87,"string":"87"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-09-08T05:04:50.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2016-10-30T19:19:53.000Z"},"content":{"kind":"string","value":"def insertionsort( aList ):\n for i in range( 1, len( aList ) ):\n tmp = aList[i]\n k = i\n while k > 0 and tmp < aList[k - 1]:\n aList[k] = aList[k - 1]\n k -= 1\n aList[k] = tmp\n \ndef bucketsort( A ):\n # get hash codes\n code = hashing( A )\n buckets = [list() for _ in range( code[1] )]\n # distribute data into buckets: O(n)\n for i in A:\n x = re_hashing( i, code )\n buck = buckets[x]\n buck.append( i )\n\n for bucket in buckets:\n insertionsort( bucket )\n \n ndx = 0\n # merge the buckets: O(n)\n for b in range( len( buckets ) ):\n for v in buckets[b]:\n A[ndx] = v\n ndx += 1\n \nimport math\n \ndef hashing( A ):\n m = A[0]\n for i in range( 1, len( A ) ):\n if ( m < A[i] ):\n m = A[i]\n result = [m, int( math.sqrt( len( A ) ) )]\n return result\n \n \ndef re_hashing( i, code ):\n return int( i / code[0] * ( code[1] - 1 ) )\n\n\nif __name__ == '__main__':\n A = [8, 5, 3, 1, 9, 6, 0, 7, 4, 2, 5]\n bucketsort(A)\n print A\n"},"avg_line_length":{"kind":"number","value":20,"string":"20"},"max_line_length":{"kind":"number","value":46,"string":"46"},"alphanum_fraction":{"kind":"number","value":0.5142857143,"string":"0.514286"}}},{"rowIdx":46444,"cells":{"hexsha":{"kind":"string","value":"e211603f25587778ff697b84831aa2c4896874d5"},"size":{"kind":"number","value":1439,"string":"1,439"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"erode.py"},"max_stars_repo_name":{"kind":"string","value":"gray0018/Normal-integration-benchmark"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3f4fff86e659ae2a3588c0960ebb0af39e4a1e21"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"erode.py"},"max_issues_repo_name":{"kind":"string","value":"gray0018/Normal-integration-benchmark"},"max_issues_repo_head_hexsha":{"kind":"string","value":"3f4fff86e659ae2a3588c0960ebb0af39e4a1e21"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"erode.py"},"max_forks_repo_name":{"kind":"string","value":"gray0018/Normal-integration-benchmark"},"max_forks_repo_head_hexsha":{"kind":"string","value":"3f4fff86e659ae2a3588c0960ebb0af39e4a1e21"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"import sys\nimport argparse\nimport numpy as np\nimport matplotlib.pyplot as plt\n\n\n# command line parser\nparser = argparse.ArgumentParser(description='Crop boundary to avoid artifacts')\nparser.add_argument('obj_name', help='the name of the object')\nparser.add_argument('--erode', type=int, default=1, help='how many times you want to erode')\n\ndef erode_mask(mask):\n new_mask = np.zeros_like(mask)\n for i in range(1, mask.shape[0]-1):\n for j in range(1, mask.shape[1]-1):\n if mask[i+1,j] and mask[i-1,j] and mask[i,j-1] and mask[i,j+1]:\n new_mask[i, j] = 1\n return new_mask.astype(np.bool_)\n\nif __name__ == '__main__':\n args = parser.parse_args()\n\n n = np.load(\"{0}/make_{0}/{0}_normal.npy\".format(args.obj_name))\n d = np.load(\"{0}/make_{0}/{0}_depth.npy\".format(args.obj_name))\n\n mask = d>-9000\n for i in range(args.erode):\n mask = erode_mask(mask)\n\n for i in range(3):\n n[...,i][~mask] = 0\n d[~mask] = np.nan\n\n\n plt.style.use(['science','no-latex'])\n\n fig, axes = plt.subplots(nrows=1, ncols=2,figsize=(10,4.5))\n axes[0].imshow(n/2+.5)\n axes[0].set_xticks([]), axes[0].set_yticks([]), axes[0].set_title(\"Normal Map\")\n\n axes[1].imshow(d)\n axes[1].set_xticks([]), axes[1].set_yticks([]), axes[1].set_title(\"Depth Map\")\n\n plt.show()\n\n np.save(\"{0}_gt_depth.npy\".format(args.obj_name), d)\n np.save(\"{0}_normal.npy\".format(args.obj_name), n)\n"},"avg_line_length":{"kind":"number","value":29.9791666667,"string":"29.979167"},"max_line_length":{"kind":"number","value":92,"string":"92"},"alphanum_fraction":{"kind":"number","value":0.6296038916,"string":"0.629604"}}},{"rowIdx":46445,"cells":{"hexsha":{"kind":"string","value":"357892301a0b2db171ecc7415c30c6d649a48a95"},"size":{"kind":"number","value":33338,"string":"33,338"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Packs/NetscoutArborSightline/Integrations/NetscoutArborSightline/NetscoutArborSightline.py"},"max_stars_repo_name":{"kind":"string","value":"diCagri/content"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":799,"string":"799"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-08-02T06:43:14.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T11:10:11.000Z"},"max_issues_repo_path":{"kind":"string","value":"Packs/NetscoutArborSightline/Integrations/NetscoutArborSightline/NetscoutArborSightline.py"},"max_issues_repo_name":{"kind":"string","value":"diCagri/content"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":9317,"string":"9,317"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-08-07T19:00:51.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-31T21:56:04.000Z"},"max_forks_repo_path":{"kind":"string","value":"Packs/NetscoutArborSightline/Integrations/NetscoutArborSightline/NetscoutArborSightline.py"},"max_forks_repo_name":{"kind":"string","value":"diCagri/content"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1297,"string":"1,297"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-08-04T13:59:00.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-31T23:43:06.000Z"},"content":{"kind":"string","value":"from time import sleep\n\nfrom CommonServerPython import * # noqa # pylint: disable=unused-wildcard-import\nfrom CommonServerUserPython import * # noqa\n\nfrom copy import deepcopy\nimport requests\nimport traceback\nfrom typing import Dict, Tuple\nfrom datetime import timezone\n\n# Disable insecure warnings\nrequests.packages.urllib3.disable_warnings() # pylint: disable=no-member\n\n''' CONSTANTS '''\n\nDATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' # ISO8601 format with UTC, default in XSOAR\nIMPORTANCE_DICTIONARY = {\n 'Low': '0',\n 'Medium': '1',\n 'High': '2'\n}\nONGOING_DICTIONARY = {\n 'Ongoing': 'true',\n 'Not Ongoing': 'false',\n}\nIP_DICTIONARY = {\n 'IPv4': 4,\n 'IPv6': 6\n}\n\nROUTERS_HR_HEADERS = [\n 'id',\n 'name',\n 'description',\n 'is_proxy',\n 'license_type',\n 'snmp_authprotocol',\n 'snmp_priv_protocol',\n 'snmp_security_level',\n 'snmp_version',\n]\n\nMANAGED_OBJECTS_HR_HEADERS = [\n 'id',\n 'name',\n 'tags',\n 'match_type',\n 'match_enabled',\n 'match',\n 'family',\n 'autodetected'\n]\n\n''' CLIENT CLASS '''\n\n\nclass NetscoutClient(BaseClient):\n \"\"\"Client class to interact with the service API\n\n This Client implements API calls, and does not contain any XSOAR logic.\n Should only do requests and return data.\n It inherits from BaseClient defined in CommonServer Python.\n Most calls use _http_request() that handles proxy, SSL verification, etc.\n \"\"\"\n\n OPERATOR_NAME_DICTIONARY = {\n # : \n 'importance': 'importance_operator',\n 'start_time': 'start_time_operator',\n 'stop_time': 'stop_time_operator',\n }\n\n RELATIONSHIP_TO_TYPE = {\n 'routers': 'router'\n }\n\n MAX_ALERTS_FOR_FIRST_FETCH = 10000\n\n def __init__(self, base_url, verify, proxy, first_fetch, headers=None, max_fetch=None, alert_class=None,\n alert_type=None, classification=None, importance=None, ongoing=None):\n self.first_fetch = first_fetch\n self.max_fetch = max_fetch\n self.alert_class = alert_class\n self.alert_type = alert_type\n self.classification = classification\n self.importance = importance\n self.ongoing = ongoing\n self.importance_operator = '>'\n\n super().__init__(base_url=base_url, verify=verify, headers=headers, proxy=proxy)\n\n def http_request(self, method: str, url_suffix: Optional[str] = None, params: Optional[dict] = None,\n json_data: Optional[dict] = None, return_empty_response: Optional[bool] = None,\n status_list_to_retry: list = None):\n\n return super()._http_request(method=method, url_suffix=url_suffix, params=params, json_data=json_data,\n error_handler=self.error_handler, return_empty_response=return_empty_response,\n status_list_to_retry=status_list_to_retry)\n\n @staticmethod\n def error_handler(res: requests.Response):\n \"\"\"\n Error handler for API calls\n Args:\n res (requests.Response): Response to handle error for\n\n \"\"\"\n try:\n # Try to parse json error response\n error_entry = res.json()\n error: str = f'Error in API call [{res.status_code}] - {res.reason}'\n if res.status_code in (400, 422, 404):\n error_list: list = []\n for err in error_entry.get('errors'):\n # Building the list of errors\n new_error_source = err.get('source', {}).get('pointer', '').split('/')[-1]\n new_error_details = err.get('detail')\n new_error = f'{new_error_source}: {new_error_details}' if new_error_source else new_error_details\n error_list.append(new_error)\n\n # If we manged to build a list of errors use it otherwise use basic information\n if error_list:\n error = f'{error}: \\n' + '\\n'.join(error_list)\n\n elif res.status_code in (500, 401):\n message = error_entry.get('errors', [])[0].get('message')\n if message:\n error = f'{error}\\n{message}'\n demisto.error(res.text)\n raise DemistoException(error)\n\n except ValueError:\n raise DemistoException(\n f'Could not parse error returned from Netscout Arbor Sightline server:\\n{str(res.content)}')\n\n def calculate_amount_of_incidents(self, start_time: str, params_dict: dict) -> int:\n \"\"\"\n Perform an API call with page size = 1 (perPage=1) to calculate the amount of incidents (#pages will be equal to\n #incidents).\n\n Arguments:\n start_time (str): Starting time to search by\n params_dict (dict): The params configured by the user to perform the fetch with.\n\n Returns:\n (int) The amount of pages (incidents) in total in the given query, 0 if none.\n \"\"\"\n time_attributes_dict = assign_params(start_time=start_time, start_time_operator='>')\n params_dict.update(time_attributes_dict)\n data_attribute_filter = self.build_data_attribute_filter(params_dict)\n page_size = 1\n results = self.list_alerts(page_size=page_size, search_filter=data_attribute_filter, status_list_to_retry=[500])\n last_page_link = results.get('links', {}).get('last')\n if last_page_link:\n last_page_number_matcher = re.match(r'.*&page=(\\d+)', last_page_link)\n if not last_page_number_matcher:\n raise DemistoException(\n f'Could not calculate page size, last page number was not found:\\n{last_page_link}')\n last_page_number = last_page_number_matcher.group(1)\n else:\n last_page_number = 0\n\n return int(last_page_number)\n\n def build_relationships(self, **kwargs) -> dict:\n \"\"\"\n Builds the relationships object for creating a mitigation. An example of relationships object is:\n {\n \"mitigation_template\": {\n \"data\": {\n \"id\": \"4\", \"type\": \"mitigation_template\"\n }\n },\n \"alert\": {\n \"data\": {\n \"id\": \"101\", \"type\": \"alert\"\n \"id\": \"101\", \"type\": \"alert\"\n }\n }\n }\n Args:\n kwargs (dict): Dict containing key values parameters to be used for relationships. for example:\n {'ip_version': 4}\n\n Returns:\n (dict): Netscout relationships object\n \"\"\"\n relationships: Dict[str, Any] = {}\n for key, val in kwargs.items():\n if val:\n # In some cases the name of the relationships is not the same as the type (most cases it is)\n _type = self.RELATIONSHIP_TO_TYPE.get(key, key)\n if key == 'routers':\n relationships[key] = {\n 'data': [{\n 'type': _type,\n 'id': val[0]\n }]\n }\n else:\n relationships[key] = {\n 'data': {\n 'type': _type,\n 'id': val\n }\n }\n return relationships\n\n def build_data_attribute_filter(self, attributes_dict: dict) -> str:\n \"\"\"\n Builds data attribute filter in the NetscoutArbor form. For example: '/data/attributes/importance>1' where\n key=importance operator='>' and value=1.\n The function iterates over all arguments (besides operators listed in the OPERATOR_NAME_DICTIONARY) and chain\n together the 'key operator val' such that the argument name is 'key', its value is 'val' and operator is '=' if\n no relevant operator is present. In case of multiple parameters the attributes are separated with 'AND'.\n\n Args:\n attributes_dict (dict): Dict containing key values filter parameters. for example: {'importance': 1}\n\n Returns:\n (str): Netscout data attribute filter string. For example:\n /data/attributes/importance>1 AND /data/attributes/ongoing=true\n \"\"\"\n param_list = []\n operator_names = self.OPERATOR_NAME_DICTIONARY.values()\n for key, val in attributes_dict.items():\n\n # We don't create a filter for operator names\n if key not in operator_names and val:\n operator = '=' # type: str\n\n # If the current parameter supports a special operator (it appears in the OPERATOR_NAME_DICTIONARY),\n # we take the operator value using the operator name (that appears in the OPERATOR_NAME_DICTIONARY)\n if operator_name := self.OPERATOR_NAME_DICTIONARY.get(key):\n operator = attributes_dict.get(operator_name, '') if attributes_dict.get(\n operator_name) else '='\n\n param_list.append(f'/data/attributes/{key + operator + val}')\n return ' AND '.join(param_list)\n\n def fetch_incidents(self, params_dict: dict) -> Tuple[list, str]:\n \"\"\"\n Perform fetch incidents process.\n 1. We first save the current time to know what was the time at the beginning of the incidents counting process.\n 2. We calculate the amount of incidents we need to fetch by performing a query for all incident newer\n than last run (or first fetch), we do this by setting the page size to 1, which makes the amount of returned\n pages to be equal to the amount of incidents.\n 3. Then, to get the relevant incidents, we query for all incidents *older* then the time we sampled in the\n step 1, with page size equal to the amount of incidents from step 2. This ensures that the first page in\n this search will have all of the incidents created after the given start time and only them.\n 4. Finally out of the relevant incidents we take the older ones (from the end of the list) and set the new\n start time to the creation time of the first incidnt in the list.\n Args:\n params_dict (dict): The params configured by the user to perform the fetch with.\n Returns:\n (list, str): List of incidents to save and string representing the creation time of the latest incident to\n be saved.\n \"\"\"\n last_run = demisto.getLastRun()\n new_last_start_time = last_start_time = last_run.get('LastFetchTime', self.first_fetch)\n demisto.debug(f'Last fetch time to use is: {last_start_time}')\n\n # We calculate the page size to query, by performing an incidents query with page size = 1, the amount of\n # returned pages will equal to amount of incidents\n now = datetime.now(timezone.utc).isoformat()\n amount_of_incidents = self.calculate_amount_of_incidents(start_time=last_start_time, params_dict=params_dict)\n incidents: list = []\n\n if amount_of_incidents:\n time_attributes_dict = assign_params(start_time=now, start_time_operator='<')\n params_dict.update(time_attributes_dict)\n data_attribute_filter = self.build_data_attribute_filter(params_dict)\n demisto.debug(\n f'NetscoutArborSightline fetch params are: page_size={amount_of_incidents}, '\n f'search_filter={data_attribute_filter}')\n\n # We use the status_list_to_retry since in some rare cases the API returns 500 error on consecutive API\n # calls.\n results = self.list_alerts(page_size=amount_of_incidents, search_filter=data_attribute_filter,\n status_list_to_retry=[500])\n all_alerts = results.get('data', [])\n short_alert_list = all_alerts[-1 * self.max_fetch:]\n if short_alert_list:\n new_last_start_time = short_alert_list[0].get('attributes', {}).get('start_time')\n\n for alert in reversed(short_alert_list):\n start_time = alert.get('attributes', {}).get('start_time')\n alert_type = alert.get('attributes', {}).get('alert_type')\n incidents.append({\n 'name': f\"{alert_type}: {alert.get('id')}\",\n 'occurred': start_time,\n 'rawJSON': json.dumps(alert)\n })\n return incidents, new_last_start_time\n\n def fetch_incidents_loop(self) -> Tuple[list, str]:\n \"\"\"\n Calls the fetch incidents function to pull incidents with for each alert_type/alert_class separately.\n\n Returns:\n (list, str): List of incidents to save and string representing the creation time of the latest incident to\n be saved.\n \"\"\"\n incidents = []\n params_dict = assign_params(alert_class=self.alert_class, alert_type=self.alert_type,\n importance=self.importance, classification=self.classification,\n importance_operator=self.importance_operator, ongoing=self.ongoing)\n if self.alert_type:\n key = 'alert_type'\n class_type_list = self.alert_type\n\n elif self.alert_class:\n key = 'alert_class'\n class_type_list = self.alert_class\n\n if self.alert_class or self.alert_type:\n for item in class_type_list:\n params_dict[key] = item\n\n last_incidents, new_last_start_time = self.fetch_incidents(params_dict)\n incidents += last_incidents\n sleep(5)\n else:\n incidents, new_last_start_time = self.fetch_incidents(params_dict)\n\n return incidents, new_last_start_time\n\n def list_alerts(self, page: Optional[int] = None, page_size: Optional[int] = None,\n search_filter: Optional[str] = None, status_list_to_retry: list = None) -> dict:\n return self.http_request(\n method='GET',\n url_suffix='alerts',\n status_list_to_retry=status_list_to_retry,\n params=assign_params(page=page, perPage=page_size, filter=search_filter)\n )\n\n def get_alert(self, alert_id: str) -> dict:\n return self.http_request(\n method='GET',\n url_suffix=f'alerts/{alert_id}'\n )\n\n def get_annotations(self, alert_id: str) -> dict:\n return self.http_request(\n method='GET',\n url_suffix=f'alerts/{alert_id}/annotations'\n )\n\n def list_mitigations(self, mitigation_id: str, page: Optional[int] = None, page_size: Optional[int] = None) -> dict:\n return self.http_request(\n method='GET',\n url_suffix=f'mitigations/{mitigation_id}' if mitigation_id else 'mitigations',\n params=assign_params(page=page, perPage=page_size)\n\n )\n\n def create_mitigation(self, data: dict) -> dict:\n return self.http_request(\n method='POST',\n url_suffix='mitigations/',\n json_data=data\n )\n\n def delete_mitigation(self, mitigation_id: str):\n self.http_request(\n method='DELETE',\n url_suffix=f'mitigations/{mitigation_id}',\n return_empty_response=True\n )\n\n def mitigation_template_list(self) -> dict:\n return self.http_request(\n method='GET',\n url_suffix='mitigation_templates/'\n )\n\n def router_list(self) -> dict:\n return self.http_request(\n method='GET',\n url_suffix='routers/'\n )\n\n def managed_object_list(self, page: Optional[int] = None, page_size: Optional[int] = None) -> dict:\n return self.http_request(\n method='GET',\n url_suffix='managed_objects/',\n params=assign_params(page=page, perPage=page_size)\n )\n\n def tms_group_list(self) -> dict:\n return self.http_request(\n method='GET',\n url_suffix='tms_groups/'\n )\n\n\n''' HELPER FUNCTIONS '''\n\n\ndef clean_links(target_obj: Union[dict, list]):\n \"\"\"\n Recursively look for a all keys named 'links' and remove them from the object.\n Args:\n target_obj (dict/list): An object to remove the links key from.\n \"\"\"\n\n if isinstance(target_obj, dict):\n remove_keys(target_obj, ['links'])\n for val in target_obj.values():\n clean_links(val)\n\n if isinstance(target_obj, list):\n for i in target_obj:\n clean_links(i)\n\n\ndef validate_json_arg(json_str: str, arg_name: str) -> dict:\n \"\"\"\n Parse the json data. If the format is invalid an appropriate exception will be raised\n Args:\n json_str (str): The data to parse\n arg_name (str): The argument name where the data eas given (for exception purposes)\n Return:\n (dict): dict representing the given json\n \"\"\"\n try:\n sub_object = json.loads(json_str)\n return sub_object\n except Exception as err:\n raise DemistoException(\n f'The value given in the {arg_name} argument is not a valid JSON format:\\n{json_str}\\nERROR:\\n{err}')\n\n\ndef remove_keys(obj: dict, keys_to_remove: list):\n \"\"\"\n Removes the the given keys from a given dict.\n Args:\n obj (dict): The object to remove the key from.\n keys_to_remove (lst): List of keys to remove.\n \"\"\"\n for key in keys_to_remove:\n if obj.get(key):\n del obj[key]\n\n\ndef flatten_key(obj: dict, key_to_flatten: str):\n \"\"\"\n Extract the data inside a given key to the root level of the object.\n Args:\n obj (dict): The object to extract the data from.\n key_to_flatten (str): The key name to extract.\n \"\"\"\n if sub_dictionary := obj.get(key_to_flatten):\n for sub_key, sub_val in sub_dictionary.items():\n obj[sub_key] = sub_val\n del obj[key_to_flatten]\n\n\ndef build_human_readable(data: dict) -> dict:\n \"\"\"\n Removes the relationships and subobject data from the object and extracts the data inside attributes to the root\n level of the object to be displayed nicely in human readable.\n Args:\n data (dict): The data to create human readable from.\n Return:\n (dict): The same object without the relationships data and with the attributes extracted to the root level.\n \"\"\"\n hr = deepcopy(data)\n flatten_key(hr, 'attributes')\n remove_keys(hr, ['relationships', 'subobject'])\n return hr\n\n\ndef build_output(data: dict, extend_data: bool = False, key_to_flat: str = 'attributes',\n keys_to_remove: list = None) -> dict:\n keys_to_remove = ['relationships'] if not keys_to_remove else keys_to_remove\n data_copy = deepcopy(data)\n clean_links(data_copy)\n if key_to_flat:\n flatten_key(data_copy, key_to_flat)\n if not extend_data:\n remove_keys(data_copy, keys_to_remove)\n return data_copy\n\n\ndef cast_importance_to_minimal(importance: str) -> Optional[str]:\n \"\"\"\n If a minimal importance param was given, cast it to the corresponding minimal value to be used with the '>'\n operator.\n That is:\n High -> '2' -> '1'\n Medium -> '1' -> '0'\n Low -> '0' -> None (so it will be ignored and will not be used as an importance param)\n Args:\n importance (str): The importance to cast.\n Returns:\n (str): The value to be used withh the '>` operator.\n \"\"\"\n str_importance = IMPORTANCE_DICTIONARY.get(importance)\n if str_importance and str_importance != '0':\n return str(int(str_importance) - 1)\n else:\n return None\n\n\n''' COMMAND FUNCTIONS '''\n\n\ndef test_module(client: NetscoutClient) -> str:\n client.fetch_incidents_loop()\n return 'ok'\n\n\ndef fetch_incidents_command(client: NetscoutClient):\n incidents, last_start_time = client.fetch_incidents_loop()\n demisto.incidents(incidents)\n demisto.setLastRun({'LastFetchTime': last_start_time})\n\n\ndef list_alerts_command(client: NetscoutClient, args: dict):\n limit = arg_to_number(args.get('limit'))\n page = arg_to_number(args.get('page'))\n alert_id = args.get('alert_id')\n alert_class = args.get('alert_class')\n alert_type = args.get('alert_type')\n classification = args.get('classification')\n importance = IMPORTANCE_DICTIONARY.get(args.get('importance', ''))\n importance_operator = args.get('importance_operator')\n ongoing = args.get('ongoing') if args.get('ongoing') else None\n start_time = args.get('start_time')\n start_time_operator = args.get('start_time_operator')\n stop_time = args.get('stop_time')\n stop_time_operator = args.get('stop_time_operator')\n managed_object_id = args.get('managed_object_id')\n extend_data = argToBoolean(args.get('extend_data', False))\n if alert_id:\n raw_result = client.get_alert(alert_id)\n else:\n attributes_dict = assign_params(alert_id=alert_id, alert_class=alert_class, alert_type=alert_type,\n classification=classification, importance=importance,\n importance_operator=importance_operator, ongoing=ongoing, start_time=start_time,\n start_time_operator=start_time_operator, stop_time=stop_time,\n stop_time_operator=stop_time_operator)\n data_attribute_filter = client.build_data_attribute_filter(attributes_dict)\n data_relationships_filter = f'AND /data/relationships/managed_object/data/id={managed_object_id}' if \\\n managed_object_id else ''\n search_filter = data_attribute_filter + data_relationships_filter\n raw_result = client.list_alerts(page=page, page_size=limit, search_filter=search_filter)\n\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n hr = [build_human_readable(data=alert) for alert in data]\n outputs = [build_output(data=alert, extend_data=extend_data) for alert in data]\n\n return CommandResults(outputs_prefix='NASightline.Alert',\n outputs_key_field='id',\n outputs=outputs,\n readable_output=tableToMarkdown('Alerts', hr),\n raw_response=raw_result)\n\n\ndef alert_annotation_list_command(client: NetscoutClient, args: dict):\n alert_id = args.get('alert_id', '')\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.get_annotations(alert_id)\n data = raw_result.get('data', [])\n hr = [build_human_readable(data=annotation) for annotation in data]\n annotations = [build_output(data=annotation, extend_data=extend_data) for annotation in data]\n context = {'AlertID': alert_id, 'Annotations': annotations}\n return CommandResults(outputs_prefix='NASightline.AlertAnnotation',\n outputs_key_field='AlertID',\n outputs=context,\n readable_output=tableToMarkdown(f'Alert {alert_id} annotations', hr),\n raw_response=raw_result)\n\n\ndef mitigation_list_command(client: NetscoutClient, args: dict):\n page = arg_to_number(args.get('page'))\n limit = arg_to_number(args.get('limit'))\n mitigation_id = args.get('mitigation_id', '')\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.list_mitigations(mitigation_id, page=page, page_size=limit)\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n hr = [build_human_readable(data=mitigation) for mitigation in data]\n mitigations = [build_output(data=mitigation, keys_to_remove=['relationships', 'subobject'], extend_data=extend_data)\n for mitigation in data]\n return CommandResults(outputs_prefix='NASightline.Mitigation',\n outputs_key_field='id',\n outputs=mitigations,\n readable_output=tableToMarkdown('Mitigation list', hr),\n raw_response=raw_result)\n\n\ndef mitigation_create_command(client: NetscoutClient, args: dict):\n ip_version = IP_DICTIONARY.get(args['ip_version'])\n if not ip_version:\n raise DemistoException('ip_version value can be one of the following: '\n f'{\",\".join(list(IP_DICTIONARY.keys()))}. {args.get(\"ip_version\")} was given.')\n description = args.get('description')\n name = args.get('name')\n ongoing = args.get('ongoing', 'false')\n sub_type = args.get('sub_type')\n sub_object = validate_json_arg(args['sub_object'], 'sub_object')\n alert_id = args.get('alert_id')\n managed_object_id = args.get('managed_object_id')\n mitigation_template_id = args.get('mitigation_template_id')\n router_ids = argToList(args.get('router_ids'))\n tms_group_id = args.get('tms_group_id')\n extend_data = argToBoolean(args.get('extend_data', False))\n\n relationships = client.build_relationships(alert=alert_id, managed_object=managed_object_id,\n mitigation_template=mitigation_template_id, routers=router_ids,\n tms_group=tms_group_id)\n attributes = assign_params(description=description, ip_version=ip_version, name=name, ongoing=ongoing,\n subtype=sub_type, subobject=sub_object)\n object_data = {'relationships': relationships, 'attributes': attributes}\n raw_result = client.create_mitigation(data={'data': object_data})\n data = raw_result.get('data', {})\n hr = build_human_readable(data=data)\n mitigation = build_output(data=data, extend_data=extend_data)\n return CommandResults(outputs_prefix='NASightline.Mitigation',\n outputs_key_field='id',\n outputs=mitigation,\n readable_output=tableToMarkdown('Mitigation was created', hr),\n raw_response=raw_result)\n\n\ndef mitigation_delete_command(client: NetscoutClient, args: Dict[str, str]):\n mitigation_id = args.get('mitigation_id', '')\n client.delete_mitigation(mitigation_id)\n hr = f'### Mitigation {mitigation_id} was deleted'\n return CommandResults(readable_output=hr)\n\n\ndef mitigation_template_list_command(client: NetscoutClient, args: dict):\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.mitigation_template_list()\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n hr = [build_human_readable(data=mitigation_template) for mitigation_template in data]\n mitigation_templates = [\n build_output(data=mitigation_template, extend_data=extend_data, keys_to_remove=['relationships', 'subobject'])\n for mitigation_template in data]\n\n return CommandResults(outputs_prefix='NASightline.MitigationTemplate',\n outputs_key_field='id',\n outputs=mitigation_templates,\n readable_output=tableToMarkdown('Mitigation template list', hr, removeNull=True),\n raw_response=raw_result)\n\n\ndef router_list_command(client: NetscoutClient, args: dict):\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.router_list()\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n hr = [build_human_readable(router) for router in data]\n routers = [build_output(data=router, extend_data=extend_data) for router in data]\n return CommandResults(outputs_prefix='NASightline.Router',\n outputs_key_field='id',\n outputs=routers,\n readable_output=tableToMarkdown('Router list', hr, headers=ROUTERS_HR_HEADERS,\n removeNull=True),\n raw_response=raw_result)\n\n\ndef managed_object_list_command(client: NetscoutClient, args: dict):\n page = arg_to_number(args.get('page'))\n limit = arg_to_number(args.get('limit'))\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.managed_object_list(page=page, page_size=limit)\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n objects = [build_output(data=managed_object, extend_data=extend_data) for managed_object in data]\n hr = [build_human_readable(data=managed_object) for managed_object in data]\n return CommandResults(outputs_prefix='NASightline.ManagedObject',\n outputs_key_field='id',\n outputs=objects,\n readable_output=tableToMarkdown('Managed object list', hr,\n headers=MANAGED_OBJECTS_HR_HEADERS, removeNull=True),\n raw_response=raw_result)\n\n\ndef tms_group_list_command(client: NetscoutClient, args: dict):\n extend_data = argToBoolean(args.get('extend_data', False))\n raw_result = client.tms_group_list()\n data = raw_result.get('data')\n data = data if isinstance(data, list) else [data]\n hr = [build_human_readable(data=tms_group) for tms_group in data]\n groups = [build_output(data=group, extend_data=extend_data) for group in data]\n return CommandResults(outputs_prefix='NASightline.TMSGroup',\n outputs_key_field='id',\n outputs=groups,\n readable_output=tableToMarkdown('TMS group list', hr, removeNull=True),\n raw_response=raw_result)\n\n\n''' MAIN FUNCTION '''\n\n\ndef main() -> None:\n try:\n command = demisto.command()\n params = demisto.params()\n\n if not params.get('User') or not (api_token := params.get('User', {}).get('password')):\n raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.')\n base_url = urljoin(params['url'], 'api/sp')\n verify_certificate = not params.get('insecure', False)\n proxy = params.get('proxy', False)\n first_fetch = None\n if first_fetch_dt := arg_to_datetime(params.get('first_fetch', '3 days')):\n first_fetch = first_fetch_dt.isoformat()\n max_fetch = min(arg_to_number(params.get('max_fetch', 50)), 100)\n alert_class = argToList(params.get('alert_class'))\n alert_type = argToList(params.get('alert_type'))\n if alert_class and alert_type:\n raise DemistoException(\n 'Cannot filter alerts with both \\'Alert Class\\' and \\'Alert Type\\' configured. Either choose '\n 'the entire class you want to fetch or the specific types from within that class.')\n classification = params.get('classification')\n importance = cast_importance_to_minimal(params.get('importance'))\n ongoing = ONGOING_DICTIONARY.get(params.get('ongoing'))\n\n demisto.debug(f'Command being called is {demisto.command()}')\n\n headers: Dict = {\n 'X-Arbux-APIToken': api_token\n }\n\n client = NetscoutClient(\n base_url=base_url,\n verify=verify_certificate,\n headers=headers,\n proxy=proxy,\n first_fetch=first_fetch,\n max_fetch=max_fetch,\n alert_class=alert_class,\n alert_type=alert_type,\n classification=classification,\n importance=importance,\n ongoing=ongoing\n )\n args: dict = demisto.args()\n\n result = ''\n if command == 'test-module':\n result = test_module(client)\n elif command == 'fetch-incidents':\n fetch_incidents_command(client)\n elif command == 'na-sightline-alert-list':\n result = list_alerts_command(client, args)\n elif command == 'na-sightline-alert-annotation-list':\n result = alert_annotation_list_command(client, args)\n elif command == 'na-sightline-mitigation-list':\n result = mitigation_list_command(client, args)\n elif command == 'na-sightline-mitigation-create':\n result = mitigation_create_command(client, args)\n elif command == 'na-sightline-mitigation-delete':\n result = mitigation_delete_command(client, args)\n elif command == 'na-sightline-mitigation-template-list':\n result = mitigation_template_list_command(client, args)\n elif command == 'na-sightline-router-list':\n result = router_list_command(client, args)\n elif command == 'na-sightline-managed-object-list':\n result = managed_object_list_command(client, args)\n elif command == 'na-sightline-tms-group-list':\n result = tms_group_list_command(client, args)\n else:\n raise NotImplementedError(f'Command: {command} is not implemented')\n\n if result:\n return_results(result)\n\n except Exception as e:\n demisto.error(traceback.format_exc()) # print the traceback\n return_error(f'Failed to execute {command} command.\\nError:\\n{str(e)}')\n\n\n''' ENTRY POINT '''\nif __name__ in ('__main__', '__builtin__', 'builtins'):\n main()\n"},"avg_line_length":{"kind":"number","value":42.4687898089,"string":"42.46879"},"max_line_length":{"kind":"number","value":120,"string":"120"},"alphanum_fraction":{"kind":"number","value":0.6280820685,"string":"0.628082"}}},{"rowIdx":46446,"cells":{"hexsha":{"kind":"string","value":"359354cc31cb25184bb81a7d1da99be4ed8f4a87"},"size":{"kind":"number","value":714,"string":"714"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"Packs/Ransomware/Scripts/RansomwareDataEncryptionStatus/RansomwareDataEncryptionStatus.py"},"max_stars_repo_name":{"kind":"string","value":"diCagri/content"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":799,"string":"799"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2016-08-02T06:43:14.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T11:10:11.000Z"},"max_issues_repo_path":{"kind":"string","value":"Packs/Ransomware/Scripts/RansomwareDataEncryptionStatus/RansomwareDataEncryptionStatus.py"},"max_issues_repo_name":{"kind":"string","value":"diCagri/content"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":9317,"string":"9,317"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2016-08-07T19:00:51.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-31T21:56:04.000Z"},"max_forks_repo_path":{"kind":"string","value":"Packs/Ransomware/Scripts/RansomwareDataEncryptionStatus/RansomwareDataEncryptionStatus.py"},"max_forks_repo_name":{"kind":"string","value":"diCagri/content"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c532c50b213e6dddb8ae6a378d6d09198e08fc9f"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":1297,"string":"1,297"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2016-08-04T13:59:00.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-31T23:43:06.000Z"},"content":{"kind":"string","value":"import demistomock as demisto # noqa: F401\nfrom CommonServerPython import * # noqa: F401\n\nincident = demisto.incidents()\nquery = incident[0].get('CustomFields', {}).get('ransomwaredataencryptionstatus', \"Pending Confirmation\")\nColor = 'green'\n\nif query == \"Encrypted\":\n color = 'red'\n html = \"

Encrypted

\"\n\nelif query == \"Decrypted\":\n color = 'green'\n html = \"

Decrypted

\"\n\nelse:\n html = \"

Pending Confirmation

\"\n\n\ndemisto.results({\n 'ContentsFormat': formats['html'],\n 'Type': entryTypes['note'],\n 'Contents': html\n})\n"},"avg_line_length":{"kind":"number","value":28.56,"string":"28.56"},"max_line_length":{"kind":"number","value":105,"string":"105"},"alphanum_fraction":{"kind":"number","value":0.6680672269,"string":"0.668067"}}},{"rowIdx":46447,"cells":{"hexsha":{"kind":"string","value":"35bb0ca6184fb04174e57035d8b72fa813684d5f"},"size":{"kind":"number","value":219,"string":"219"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"pacman-termux/test/pacman/tests/query007.py"},"max_stars_repo_name":{"kind":"string","value":"Maxython/pacman-for-termux"},"max_stars_repo_head_hexsha":{"kind":"string","value":"3b208eb9274cbfc7a27fca673ea8a58f09ebad47"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":23,"string":"23"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2021-05-21T19:11:06.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-31T18:14:20.000Z"},"max_issues_repo_path":{"kind":"string","value":"source/pacman-6.0.1/test/pacman/tests/query007.py"},"max_issues_repo_name":{"kind":"string","value":"Scottx86-64/dotfiles-1"},"max_issues_repo_head_hexsha":{"kind":"string","value":"51004b1e2b032664cce6b553d2052757c286087d"},"max_issues_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_issues_count":{"kind":"number","value":11,"string":"11"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2021-05-21T12:08:44.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2021-12-21T08:30:08.000Z"},"max_forks_repo_path":{"kind":"string","value":"source/pacman-6.0.1/test/pacman/tests/query007.py"},"max_forks_repo_name":{"kind":"string","value":"Scottx86-64/dotfiles-1"},"max_forks_repo_head_hexsha":{"kind":"string","value":"51004b1e2b032664cce6b553d2052757c286087d"},"max_forks_repo_licenses":{"kind":"list like","value":["Unlicense"],"string":"[\n \"Unlicense\"\n]"},"max_forks_count":{"kind":"number","value":1,"string":"1"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2021-09-26T08:44:40.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2021-09-26T08:44:40.000Z"},"content":{"kind":"string","value":"self.description = \"Query ownership of file in root\"\n\nsp = pmpkg(\"dummy\")\nsp.files = [\"etc/config\"]\nself.addpkg2db(\"local\", sp)\n\nself.filesystem = [\"config\"]\n\nself.args = \"-Qo /config\"\n\nself.addrule(\"PACMAN_RETCODE=1\")\n"},"avg_line_length":{"kind":"number","value":18.25,"string":"18.25"},"max_line_length":{"kind":"number","value":52,"string":"52"},"alphanum_fraction":{"kind":"number","value":0.6894977169,"string":"0.689498"}}},{"rowIdx":46448,"cells":{"hexsha":{"kind":"string","value":"17de573493b1e9783ba83df66e0d3739657b2190"},"size":{"kind":"number","value":89,"string":"89"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"backend/apps/mapview/apps.py"},"max_stars_repo_name":{"kind":"string","value":"n-hackert/match4healthcare"},"max_stars_repo_head_hexsha":{"kind":"string","value":"761248c27b49e568c545c643a72eac9a040649d7"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":3,"string":"3"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2020-03-27T20:39:31.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2020-03-31T20:24:55.000Z"},"max_issues_repo_path":{"kind":"string","value":"backend/apps/mapview/apps.py"},"max_issues_repo_name":{"kind":"string","value":"n-hackert/match4healthcare"},"max_issues_repo_head_hexsha":{"kind":"string","value":"761248c27b49e568c545c643a72eac9a040649d7"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":21,"string":"21"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2020-03-28T09:57:15.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2020-03-31T11:38:00.000Z"},"max_forks_repo_path":{"kind":"string","value":"backend/apps/mapview/apps.py"},"max_forks_repo_name":{"kind":"string","value":"n-hackert/match4healthcare"},"max_forks_repo_head_hexsha":{"kind":"string","value":"761248c27b49e568c545c643a72eac9a040649d7"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from django.apps import AppConfig\n\n\nclass MapviewConfig(AppConfig):\n name = 'mapview'\n"},"avg_line_length":{"kind":"number","value":14.8333333333,"string":"14.833333"},"max_line_length":{"kind":"number","value":33,"string":"33"},"alphanum_fraction":{"kind":"number","value":0.7528089888,"string":"0.752809"}}},{"rowIdx":46449,"cells":{"hexsha":{"kind":"string","value":"17ee6d9891fbf670ec801d6f147e785d49ba48ef"},"size":{"kind":"number","value":282,"string":"282"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"nettests/server/main.py"},"max_stars_repo_name":{"kind":"string","value":"Laighno/evt"},"max_stars_repo_head_hexsha":{"kind":"string","value":"90b94e831aebb62c6ad19ce59c9089e9f51cfd77"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":1411,"string":"1,411"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2018-04-23T03:57:30.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-02-13T10:34:22.000Z"},"max_issues_repo_path":{"kind":"string","value":"nettests/server/main.py"},"max_issues_repo_name":{"kind":"string","value":"Zhang-Zexi/evt"},"max_issues_repo_head_hexsha":{"kind":"string","value":"e90fe4dbab4b9512d120c79f33ecc62791e088bd"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"number","value":27,"string":"27"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2018-06-11T10:34:42.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2019-07-27T08:50:02.000Z"},"max_forks_repo_path":{"kind":"string","value":"nettests/server/main.py"},"max_forks_repo_name":{"kind":"string","value":"Zhang-Zexi/evt"},"max_forks_repo_head_hexsha":{"kind":"string","value":"e90fe4dbab4b9512d120c79f33ecc62791e088bd"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"number","value":364,"string":"364"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2018-06-09T12:11:53.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2020-12-15T03:26:48.000Z"},"content":{"kind":"string","value":"import zmq\nfrom twisted.internet import reactor\n\nfrom handler import Handler\nfrom server import Server\n\nctx = zmq.Context()\nsocket = ctx.socket(zmq.REP)\nsocket.bind('tcp://*:6666')\n\nhandler = Handler()\n\nserver = Server(socket).onReadable(handler).registerOn(reactor)\n\nreactor.run()\n"},"avg_line_length":{"kind":"number","value":17.625,"string":"17.625"},"max_line_length":{"kind":"number","value":63,"string":"63"},"alphanum_fraction":{"kind":"number","value":0.7588652482,"string":"0.758865"}}},{"rowIdx":46450,"cells":{"hexsha":{"kind":"string","value":"f4c89fb117109acc7a91e4f6e11f7fcaab41eb74"},"size":{"kind":"number","value":353,"string":"353"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"accounts/urls.py"},"max_stars_repo_name":{"kind":"string","value":"JanakiRaman-2002/Arre-yaar"},"max_stars_repo_head_hexsha":{"kind":"string","value":"c0b44ca1f8884a09116241dcd0bf7cfcee3b785d"},"max_stars_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"accounts/urls.py"},"max_issues_repo_name":{"kind":"string","value":"JanakiRaman-2002/Arre-yaar"},"max_issues_repo_head_hexsha":{"kind":"string","value":"c0b44ca1f8884a09116241dcd0bf7cfcee3b785d"},"max_issues_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"accounts/urls.py"},"max_forks_repo_name":{"kind":"string","value":"JanakiRaman-2002/Arre-yaar"},"max_forks_repo_head_hexsha":{"kind":"string","value":"c0b44ca1f8884a09116241dcd0bf7cfcee3b785d"},"max_forks_repo_licenses":{"kind":"list like","value":["Apache-2.0"],"string":"[\n \"Apache-2.0\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"from django.shortcuts import redirect\nfrom django.urls import path\nfrom . import views\nfrom django.contrib.auth.views import LoginView,LogoutView\n\napp_name = \"accounts\"\n\nurlpatterns = [\n path('signup/',views.signup,name = 'signup'),\n path('userlogin/',views.userlogin,name = 'userlogin'),\n path('logout/',LogoutView.as_view(),name = 'logout')\n]"},"avg_line_length":{"kind":"number","value":29.4166666667,"string":"29.416667"},"max_line_length":{"kind":"number","value":58,"string":"58"},"alphanum_fraction":{"kind":"number","value":0.730878187,"string":"0.730878"}}},{"rowIdx":46451,"cells":{"hexsha":{"kind":"string","value":"872fa07d2ebdd55abe7e41cdc5b0e5aa49b123eb"},"size":{"kind":"number","value":161,"string":"161"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"exercises/de/test_03_14_02.py"},"max_stars_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_stars_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"number","value":2085,"string":"2,085"},"max_stars_repo_stars_event_min_datetime":{"kind":"string","value":"2019-04-17T13:10:40.000Z"},"max_stars_repo_stars_event_max_datetime":{"kind":"string","value":"2022-03-30T21:51:46.000Z"},"max_issues_repo_path":{"kind":"string","value":"exercises/de/test_03_14_02.py"},"max_issues_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_issues_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"number","value":79,"string":"79"},"max_issues_repo_issues_event_min_datetime":{"kind":"string","value":"2019-04-18T14:42:55.000Z"},"max_issues_repo_issues_event_max_datetime":{"kind":"string","value":"2022-03-07T08:15:43.000Z"},"max_forks_repo_path":{"kind":"string","value":"exercises/de/test_03_14_02.py"},"max_forks_repo_name":{"kind":"string","value":"Jette16/spacy-course"},"max_forks_repo_head_hexsha":{"kind":"string","value":"32df0c8f6192de6c9daba89740a28c0537e4d6a0"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"number","value":361,"string":"361"},"max_forks_repo_forks_event_min_datetime":{"kind":"string","value":"2019-04-17T13:34:32.000Z"},"max_forks_repo_forks_event_max_datetime":{"kind":"string","value":"2022-03-28T04:42:45.000Z"},"content":{"kind":"string","value":"def test():\n assert (\n \"docs = list(nlp.pipe(TEXTS))\" in __solution__\n ), \"Verwendest du nlp.pipe in einer Liste?\"\n __msg__.good(\"Gute Arbeit!\")\n"},"avg_line_length":{"kind":"number","value":26.8333333333,"string":"26.833333"},"max_line_length":{"kind":"number","value":54,"string":"54"},"alphanum_fraction":{"kind":"number","value":0.6149068323,"string":"0.614907"}}},{"rowIdx":46452,"cells":{"hexsha":{"kind":"string","value":"5e44f560502732f97e57d340d29c5802a56d4051"},"size":{"kind":"number","value":402109,"string":"402,109"},"ext":{"kind":"string","value":"py"},"lang":{"kind":"string","value":"Python"},"max_stars_repo_path":{"kind":"string","value":"ardundzdf.py"},"max_stars_repo_name":{"kind":"string","value":"rotdrop/Kodi-Addon-ARDundZDF"},"max_stars_repo_head_hexsha":{"kind":"string","value":"ceb7ab2b580eb68b7e3cd52318a7058f69518988"},"max_stars_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_stars_count":{"kind":"null"},"max_stars_repo_stars_event_min_datetime":{"kind":"null"},"max_stars_repo_stars_event_max_datetime":{"kind":"null"},"max_issues_repo_path":{"kind":"string","value":"ardundzdf.py"},"max_issues_repo_name":{"kind":"string","value":"rotdrop/Kodi-Addon-ARDundZDF"},"max_issues_repo_head_hexsha":{"kind":"string","value":"ceb7ab2b580eb68b7e3cd52318a7058f69518988"},"max_issues_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_issues_count":{"kind":"null"},"max_issues_repo_issues_event_min_datetime":{"kind":"null"},"max_issues_repo_issues_event_max_datetime":{"kind":"null"},"max_forks_repo_path":{"kind":"string","value":"ardundzdf.py"},"max_forks_repo_name":{"kind":"string","value":"rotdrop/Kodi-Addon-ARDundZDF"},"max_forks_repo_head_hexsha":{"kind":"string","value":"ceb7ab2b580eb68b7e3cd52318a7058f69518988"},"max_forks_repo_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"max_forks_count":{"kind":"null"},"max_forks_repo_forks_event_min_datetime":{"kind":"null"},"max_forks_repo_forks_event_max_datetime":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\n\n# Python3-Kompatibilität:\nfrom __future__ import absolute_import\t\t# sucht erst top-level statt im akt. Verz. \nfrom __future__ import division\t\t\t\t# // -> int, / -> float\nfrom __future__ import print_function\t\t# PYTHON2-Statement -> Funktion\nfrom kodi_six import xbmc, xbmcaddon, xbmcplugin, xbmcgui, xbmcvfs\n\n# o. Auswirkung auf die unicode-Strings in PYTHON3:\nfrom kodi_six.utils import py2_encode, py2_decode\n\nimport os, sys, subprocess \nPYTHON2 = sys.version_info.major == 2\nPYTHON3 = sys.version_info.major == 3\nif PYTHON2:\n\tfrom urllib import quote, unquote, quote_plus, unquote_plus, urlencode, urlretrieve\n\tfrom urllib2 import Request, urlopen, URLError \n\tfrom urlparse import urljoin, urlparse, urlunparse, urlsplit, parse_qs\nelif PYTHON3:\n\tfrom urllib.parse import quote, unquote, quote_plus, unquote_plus, urlencode, urljoin, urlparse, urlunparse, urlsplit, parse_qs\n\tfrom urllib.request import Request, urlopen, urlretrieve\n\tfrom urllib.error import URLError\n\ttry:\t\t\t\t\t\t\t\t\t# https://github.com/xbmc/xbmc/pull/18345 (Matrix 19.0-alpha 2)\n\t\txbmc.translatePath = xbmcvfs.translatePath\n\texcept:\n\t\tpass\n\n\n# Python\nimport base64 \t\t\t# url-Kodierung für Kontextmenüs\nimport sys\t\t\t\t# Plattformerkennung\nimport shutil\t\t\t# Dateioperationen\nimport re\t\t\t\t# u.a. Reguläre Ausdrücke, z.B. in CalculateDuration\nimport datetime, time\nimport json\t\t\t\t# json -> Textstrings\nimport string\nimport importlib\t\t# dyn. Laden zur Laufzeit, s. router\n\n\n# ständige Addonmodule - Rest dyn. in router\nimport resources.lib.updater as updater\t\nfrom resources.lib.util import *\nimport resources.lib.EPG as EPG\nimport resources.lib.epgRecord as epgRecord\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n# +++++ ARDundZDF - Addon Kodi-Version, migriert von der Plexmediaserver-Version +++++\n\n# VERSION -> addon.xml aktualisieren\nVERSION = '3.5.5'\nVDATE = '13.11.2020'\n\n#\n#\n\n# (c) 2019 by Roland Scholz, rols1@gmx.de\n# \n# Functions -> README.md\n# \n# \tLicensed under MIT License (MIT)\n# \t(previously licensed under GPL 3.0)\n# \tA copy of the License you find here:\n#\t\thttps://github.com/rols1/Kodi-Addon-ARDundZDF/blob/master/LICENSE.txt\n\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, \n# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR \n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE \n# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR \n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \n# DEALINGS IN THE SOFTWARE.\n\n\n####################################################################################################\nNAME\t\t\t= 'ARD und ZDF'\nPREFIX \t\t\t= '/video/ardundzdf'\t\t#\t\n\t\t\t\t\t\t\t\t\t\t\t\t\nPLAYLIST \t\t= 'livesenderTV.xml'\t\t# TV-Sender-Logos erstellt von: Arauco (Plex-Forum). \t\t\t\t\t\t\t\t\t\t\t\nFAVORITS_Pod \t= 'podcast-favorits.txt' \t# Lesezeichen für Podcast-Erweiterung \nFANART\t\t\t\t\t= 'fanart.png'\t\t# ARD + ZDF - breit\nART \t\t\t\t\t= 'art.png'\t\t\t# ARD + ZDF\nICON \t\t\t\t\t= 'icon.png'\t\t# ARD + ZDF\nICON_SEARCH \t\t\t= 'ard-suche.png'\nICON_ZDF_SEARCH \t\t= 'zdf-suche.png'\t\t\t\t\nICON_FILTER\t\t\t\t= 'icon-filter.png'\t\n\nICON_MAIN_ARD \t\t\t= 'ard-mediathek.png'\nICON_MAIN_ARD_Classic\t= 'ard-mediathek-classic.png'\nICON_MAIN_ZDF \t\t\t= 'zdf-mediathek.png'\nICON_MAIN_ZDFMOBILE\t\t= 'zdf-mobile.png'\nICON_MAIN_TVLIVE \t\t= 'tv-livestreams.png'\nICON_MAIN_RADIOLIVE \t= 'radio-livestreams.png'\nICON_MAIN_UPDATER \t\t= 'plugin-update.png'\nICON_UPDATER_NEW \t\t= 'plugin-update-new.png'\n\nICON_ARD_AZ \t\t\t= 'ard-sendungen-az.png'\nICON_ARD_VERP \t\t\t= 'ard-sendung-verpasst.png'\nICON_ARD_RUBRIKEN \t\t= 'ard-rubriken.png'\nICON_ARD_BARRIEREARM \t= 'ard-barrierearm.png'\nICON_ARD_HOERFASSUNGEN\t= 'ard-hoerfassungen.png'\nICON_ARD_BILDERSERIEN \t= 'ard-bilderserien.png'\n\nICON_ZDF_AZ \t\t\t= 'zdf-sendungen-az.png'\nICON_ZDF_VERP \t\t\t= 'zdf-sendung-verpasst.png'\nICON_ZDF_RUBRIKEN \t\t= 'zdf-rubriken.png'\nICON_ZDF_MEIST \t\t\t= 'zdf-meist-gesehen.png'\nICON_ZDF_BARRIEREARM \t= 'zdf-barrierearm.png'\nICON_ZDF_BILDERSERIEN \t= 'zdf-bilderserien.png'\n\nICON_MAIN_POD\t\t\t= 'radio-podcasts.png'\nICON_POD_AZ\t\t\t\t= 'pod-az.png'\nICON_POD_FEATURE \t\t= 'pod-feature.png'\nICON_POD_TATORT \t\t= 'pod-tatort.png'\nICON_POD_RUBRIK\t \t\t= 'pod-rubriken.png'\nICON_POD_NEU\t\t\t= 'pod-neu.png'\nICON_POD_MEIST\t\t\t= 'pod-meist.png'\nICON_POD_REFUGEE \t\t= 'pod-refugee.png'\nICON_POD_FAVORITEN\t\t= 'pod-favoriten.png'\n\nICON_MAIN_AUDIO\t\t\t= 'ard-audiothek.png'\nICON_AUDIO_LIVE\t\t\t= 'ard-audio-live.png'\nICON_AUDIO_AZ\t\t\t= 'ard-audio-az.png'\n\nICON_OK \t\t\t\t= \"icon-ok.png\"\nICON_INFO \t\t\t\t= \"icon-info.png\"\nICON_WARNING \t\t\t= \"icon-warning.png\"\nICON_NEXT \t\t\t\t= \"icon-next.png\"\nICON_CANCEL \t\t\t= \"icon-error.png\"\nICON_MEHR \t\t\t\t= \"icon-mehr.png\"\nICON_DOWNL \t\t\t\t= \"icon-downl.png\"\nICON_DOWNL_DIR\t\t\t= \"icon-downl-dir.png\"\nICON_DELETE \t\t\t= \"icon-delete.png\"\nICON_STAR \t\t\t\t= \"icon-star.png\"\nICON_NOTE \t\t\t\t= \"icon-note.png\"\nICON_SPEAKER \t\t\t= \"icon-speaker.png\"\nICON_TOOLS \t\t\t\t= \"icon-tools.png\"\nICON_PREFS \t\t\t\t= \"icon-preferences.png\"\n\n# Basis DIR-Icons: Tango/folder.png s. Wikipedia Tango_Desktop_Project\nICON_DIR_CURLWGET \t\t= \"Dir-curl-wget.png\"\nICON_DIR_FOLDER\t\t\t= \"Dir-folder.png\"\nICON_DIR_PRG \t\t\t= \"Dir-prg.png\"\nICON_DIR_IMG \t\t\t= \"Dir-img.png\"\nICON_DIR_TXT \t\t\t= \"Dir-text.png\"\nICON_DIR_MOVE \t\t\t= \"Dir-move.png\"\nICON_DIR_MOVE_SINGLE\t= \"Dir-move-single.png\"\nICON_DIR_MOVE_ALL \t\t= \"Dir-move-all.png\"\nICON_DIR_BACK\t \t\t= \"Dir-back.png\"\nICON_DIR_SAVE \t\t\t= \"Dir-save.png\"\n\nICON_DIR_VIDEO \t\t\t= \"Dir-video.png\"\nICON_DIR_WORK \t\t\t= \"Dir-work.png\"\nICON_MOVEDIR_DIR \t\t= \"Dir-moveDir.png\"\nICON_DIR_FAVORITS\t\t= \"Dir-favorits.png\"\n\nICON_DIR_WATCH\t\t\t= \"Dir-watch.png\"\nICON_PHOENIX\t\t\t= 'phoenix.png'\t\t\t\n\n# Github-Icons zum Nachladen aus Platzgründen\nICON_MAINXL \t= 'https://github.com/rols1/PluginPictures/blob/master/ARDundZDF/TagesschauXL/tagesschau.png?raw=true'\nGIT_CAL\t\t\t= \"https://github.com/rols1/PluginPictures/blob/master/ARDundZDF/KIKA_tivi/icon-calendar.png?raw=true\"\n\n# 01.12.2018 \tÄnderung der BASE_URL von www.ardmediathek.de zu classic.ardmediathek.de\n# 06.12.2018 \tÄnderung der BETA_BASE_URL von beta.ardmediathek.de zu www.ardmediathek.de\nBASE_URL \t\t= 'https://classic.ardmediathek.de'\nBETA_BASE_URL\t= 'https://www.ardmediathek.de'\t\t\t\t\t\t\t\t# vorher beta.ardmediathek.de\nARD_VERPASST \t= '/tv/sendungVerpasst?tag='\t\t\t\t\t\t\t\t# ergänzt mit 0, 1, 2 usw.\n# ARD_AZ \t\t\t= 'https://www.ardmediathek.de/ard/shows'\t\t\t\t# ARDneu, komplett (#, A-Z)\nARD_AZ \t\t\t= '/tv/sendungen-a-z?buchstabe='\t\t\t\t\t\t\t# ARD-Classic ergänzt mit 0-9, A, B, usw.\nARD_Suche \t\t= '/tv/suche?searchText=%s&words=and&source=tv&sort=date'\t# Vorgabe UND-Verknüpfung\nARD_Live \t\t= '/tv/live'\n\n\n# ARD-Podcasts\nPOD_SEARCH = '/suche?source=radio&sort=date&searchText=%s&pod=on&playtime=all&words=and&to=all='\nPOD_AZ \t\t= 'https://classic.ardmediathek.de/radio/sendungen-a-z?sendungsTyp=podcast&buchstabe=' \nPOD_RUBRIK \t= 'https://classic.ardmediathek.de/radio/Rubriken/mehr?documentId=37981136'\nPOD_FEATURE = 'https://classic.ardmediathek.de/radio/das-ARD-radiofeature/Sendung?documentId=3743362&bcastId=3743362'\nPOD_TATORT \t= 'https://classic.ardmediathek.de/radio/ARD-Radio-Tatort/Sendung?documentId=1998988&bcastId=1998988'\nPOD_NEU \t= 'https://classic.ardmediathek.de/radio/Neueste-Audios/mehr?documentId=23644358'\nPOD_MEIST \t= 'https://classic.ardmediathek.de/radio/Meistabgerufene-Audios/mehr?documentId=23644364'\nPOD_REFUGEE = 'https://www1.wdr.de/mediathek/audio/cosmo/refugee-radio/index.html'\t# geändert 28.07.2019\n\n\n# ARD Audiothek\nARD_AUDIO_BASE = 'https://www.ardaudiothek.de'\nAUDIO_HEADERS=\"{'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36', \\\n\t'Referer': '%s', 'Accept-Encoding': 'gzip, deflate, br', 'Accept': 'application/json, text/plain, */*'}\"\nAUDIOSENDER = ['br','dlf','hr','mdr','ndr','\"radio-bremen\"','rbb','sr','swr','wdr']\n\n# Relaunch der Mediathek beim ZDF ab 28.10.2016: xml-Service abgeschaltet\nZDF_BASE\t\t\t\t= 'https://www.zdf.de'\n# ZDF_Search_PATH: ganze Sendungen, sortiert nach Datum, bei Bilderserien ohne ganze Sendungen (ZDF_Search)\n#\ts. ZDF_Search + SearchARDundZDF \nZDF_SENDUNG_VERPASST \t= 'https://www.zdf.de/sendung-verpasst?airtimeDate=%s' # Datumformat 2016-10-31\nZDF_SENDUNGEN_AZ\t\t= 'https://www.zdf.de/sendungen-a-z?group=%s'\t\t\t# group-Format: a,b, ... 0-9: group=0+-+9\nZDF_WISSEN\t\t\t\t= 'https://www.zdf.de/doku-wissen'\t\t\t\t\t\t# Basis für Ermittlung der Rubriken\nZDF_SENDUNGEN_MEIST\t\t= 'https://www.zdf.de/meist-gesehen'\nZDF_BARRIEREARM\t\t\t= 'https://www.zdf.de/barrierefreiheit-im-zdf'\n\nREPO_NAME\t\t \t= 'Kodi-Addon-ARDundZDF'\nGITHUB_REPOSITORY \t= 'rols1/' + REPO_NAME\n\nPLog('Addon: lade Code')\nPluginAbsPath = os.path.dirname(os.path.abspath(__file__))\t\t\t\t# abs. Pfad für Dateioperationen\nADDON_ID \t= 'plugin.video.ardundzdf'\nSETTINGS \t\t= xbmcaddon.Addon(id=ADDON_ID)\nADDON_NAME \t= SETTINGS.getAddonInfo('name')\nSETTINGS_LOC \t= SETTINGS.getAddonInfo('profile')\nADDON_PATH \t= SETTINGS.getAddonInfo('path')\nADDON_VERSION \t= SETTINGS.getAddonInfo('version')\nPLUGIN_URL \t\t= sys.argv[0]\nHANDLE\t\t\t= int(sys.argv[1])\n\nICON = R(ICON)\nPLog(\"ICON: \" + ICON)\nTEMP_ADDON\t\t= xbmc.translatePath(\"special://temp\")\nUSERDATA\t\t= xbmc.translatePath(\"special://userdata\")\nADDON_DATA\t\t= os.path.join(\"%sardundzdf_data\") % USERDATA\n\nif \tcheck_AddonXml('\"xbmc.python\" version=\"3.0.0\"'):\t\t\t\t\t# ADDON_DATA-Verzeichnis anpasen\n\tPLog('Matrix-Version')\n\tADDON_DATA\t= os.path.join(\"%s\", \"%s\", \"%s\") % (USERDATA, \"addon_data\", ADDON_ID)\nPLog(\"ADDON_DATA: \" + ADDON_DATA)\n\nM3U8STORE \t\t= os.path.join(ADDON_DATA, \"m3u8\") \nDICTSTORE \t\t= os.path.join(ADDON_DATA, \"Dict\") \nSLIDESTORE \t\t= os.path.join(ADDON_DATA, \"slides\") \nSUBTITLESTORE \t= os.path.join(ADDON_DATA, \"subtitles\") \nTEXTSTORE \t\t= os.path.join(ADDON_DATA, \"Inhaltstexte\")\nWATCHFILE\t\t= os.path.join(ADDON_DATA, \"merkliste.xml\") \nJOBFILE\t\t\t= os.path.join(ADDON_DATA, \"jobliste.xml\") \t\t# Jobliste für epgRecord\nMONITOR_ALIVE \t= os.path.join(ADDON_DATA, \"monitor_alive\") \t\t# Lebendsignal für JobMonitor\nPLog(SLIDESTORE); PLog(WATCHFILE); \ncheck \t\t\t= check_DataStores()\t\t\t\t\t# Check /Initialisierung / Migration \nPLog('check: ' + str(check))\n\n# die tvtoday-Seiten decken 12 Tage ab, trotzdem EPG-Lauf alle 12 Stunden\n#\t (dto. Cachezeit für einz. EPG-Seite in EPG.EPG).\n# 26.10.2020 Update der Datei livesenderTV.xml hinzugefügt - s. thread_getepg\nif SETTINGS.getSetting('pref_epgpreload') == 'true':\t\t# EPG im Hintergrund laden?\n\tEPGACTIVE = os.path.join(DICTSTORE, 'EPGActive') \t\t# Marker thread_getepg aktiv\n\tEPGCacheTime = 43200\t\t\t\t\t\t\t\t\t# 12 STd.\n\tis_activ=False\n\tif os.path.exists(EPGACTIVE):\t\t\t\t\t\t\t# gesetzt in thread_getepg \n\t\tis_activ=True\n\t\tnow = time.time()\n\t\tmtime = os.stat(EPGACTIVE).st_mtime\n\t\tdiff = int(now) - mtime\n\t\tPLog(diff)\n\t\tif diff > EPGCacheTime:\t\t\t\t\t\t\t\t# entf. wenn älter als 1 Tag\t\n\t\t\tos.remove(EPGACTIVE)\n\t\t\tis_activ=False\n\tif is_activ == False:\t\t\t\t\t\t\t\t\t# EPG-Daten veraltet, neu holen\n\t\tfrom threading import Thread\n\t\tbg_thread = Thread(target=EPG.thread_getepg, args=(EPGACTIVE, DICTSTORE, PLAYLIST))\n\t\tbg_thread.start()\t\t\t\t\t\t\t\t\t\t\t\n\t\t\n\nMERKACTIVE = os.path.join(DICTSTORE, 'MerkActive') \t\t# Marker aktive Merkliste\nif os.path.exists(MERKACTIVE):\n\tos.remove(MERKACTIVE)\nMERKFILTER \t= os.path.join(DICTSTORE, 'Merkfilter') \n# Ort FILTER_SET wie filterfile (check_DataStores):\nFILTER_SET \t= os.path.join(ADDON_DATA, \"filter_set\")\nAKT_FILTER\t= ''\nif os.path.exists(FILTER_SET):\t\n\tAKT_FILTER\t= RLoad(FILTER_SET, abs_path=True)\nAKT_FILTER\t= AKT_FILTER.splitlines()\t\t\t\t\t# gesetzte Filter initialiseren \n\ntry:\t# 28.11.2019 exceptions.IOError möglich, Bsp. iOS ARM (Thumb) 32-bit\n\tfrom platform import system, architecture, machine, release, version\t# Debug\n\tOS_SYSTEM = system()\n\tOS_ARCH_BIT = architecture()[0]\n\tOS_ARCH_LINK = architecture()[1]\n\tOS_MACHINE = machine()\n\tOS_RELEASE = release()\n\tOS_VERSION = version()\n\tOS_DETECT = OS_SYSTEM + '-' + OS_ARCH_BIT + '-' + OS_ARCH_LINK\n\tOS_DETECT += ' | host: [%s][%s][%s]' %(OS_MACHINE, OS_RELEASE, OS_VERSION)\nexcept:\n\tOS_DETECT =''\n\t\nKODI_VERSION = xbmc.getInfoLabel('System.BuildVersion')\n\nPLog('Addon: ClearUp')\n# Dict: Simpler Ersatz für Dict-Modul aus Plex-Framework\nARDStartCacheTime = 300\t\t\t\t\t\t# 5 Min.\t\n \ndays = int(SETTINGS.getSetting('pref_DICT_store_days'))\nDict('ClearUp', days)\t\t\t\t# Dict bereinigen \nClearUp(M3U8STORE, days*86400)\t\t# M3U8STORE bereinigen\t\n\ndays = int(SETTINGS.getSetting('pref_UT_store_days'))\nClearUp(SUBTITLESTORE, days*86400)\t# SUBTITLESTORE bereinigen\t\ndays = int(SETTINGS.getSetting('pref_SLIDES_store_days'))\nClearUp(SLIDESTORE, days*86400)\t\t# SLIDEESTORE bereinigen\ndays = int(SETTINGS.getSetting('pref_TEXTE_store_days'))\nClearUp(TEXTSTORE, days*86400)\t\t# TEXTSTORE bereinigen\n\nif SETTINGS.getSetting('pref_epgRecord') == 'true':\n\tepgRecord.JobMain(action='init')\t\t\t\t\t\t# EPG_Record starten\n\n# Skin-Anpassung:\nskindir = xbmc.getSkinDir()\nPLog(\"skindir: %s\" % skindir)\nif 'confluence' in skindir:\t\t\t\t\t\t\t\t\t# ermöglicht Plot-Infos in Medienansicht\n\txbmcplugin.setContent(HANDLE, 'movies')\t\n\nARDSender = ['ARD-Alle:ard::ard-mediathek.png:ARD-Alle']\t# Rest in ARD_NEW\n\n#---------------------------------------------------------------- \n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ndef Main():\n\tPLog('Main:'); \n\tPLog('Addon-Version: ' + VERSION); PLog('Addon-Datum: ' + VDATE)\t\n\tPLog(OS_DETECT)\t\n\tPLog('Addon-Python-Version: %s' % sys.version)\n\tPLog('Kodi-Version: %s' % KODI_VERSION)\n\t\t\t\n\tPLog(PluginAbsPath)\t\n\n\ticon = R(ICON_MAIN_ARD)\n\tlabel \t\t= NAME\n\t\n\tli = xbmcgui.ListItem(\"ARD und ZDF\")\n\ttitle=\"Suche in ARD und ZDF\"\n\tif SETTINGS.getSetting('pref_use_classic') == 'true':\n\t\ttagline = 'gesucht wird in ARD Mediathek Classic und in der ZDF Mediathek '\n\t\tsumm\t= 'gesucht wird nur nach Einzelbeiträgen - Sendereihen bleiben unberücksichtigt.'\n\t\tfparams=\"&fparams={'title': '%s'}\" % quote(title)\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"SearchARDundZDF\", fanart=R('suche_ardundzdf.png'), \n\t\t\tthumb=R('suche_ardundzdf.png'), tagline=tagline, summary=summ, fparams=fparams)\n\telse:\n\t\ttagline = 'gesucht wird in ARD Mediathek Neu und in der ZDF Mediathek.'\n\t\tsumm\t= 'beim ZDF wird nur nach Einzelbeiträgen gesucht, bei ARD Neu auch nach Sendereihen.'\n\t\tfparams=\"&fparams={'title': '%s'}\" % quote(title)\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"resources.lib.ARDnew.SearchARDundZDFnew\", \n\t\t\tfanart=R('suche_ardundzdf.png'), thumb=R('suche_ardundzdf.png'), tagline=tagline, \n\t\t\tsummary=summ, fparams=fparams)\n\t\t\n\n\tif SETTINGS.getSetting('pref_use_classic') == 'true':\t# Classic-Version der ARD-Mediathek\n\t\tPLog('classic_set: ')\n\t\ttitle = \"ARD Mediathek Classic\"\n\t\ttagline = 'in den Settings sind ARD Mediathek Neu und ARD Mediathek Classic austauschbar'\n\t\tfparams=\"&fparams={'name': '%s', 'sender': '%s'}\" % (title, '')\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Main_ARD\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_MAIN_ARD_Classic), tagline=tagline, fparams=fparams)\n\telse:\n\t\ttitle = \"ARD Mediathek Neu\"\n\t\ttagline = 'in den Settings sind ARD Mediathek Neu und ARD Mediathek Classic austauschbar'\n\t\tsumm = u'Die barrierefreien Angebote befinden sich im Menü Start in .'\n\t\tsumm = summ + u'\\nDas Menü ist zur Zeit nur in der Classic-Version verfügbar.'\n\t\tfparams=\"&fparams={'name': '%s', 'CurSender': '%s'}\" % (title, '')\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"resources.lib.ARDnew.Main_NEW\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_MAIN_ARD), tagline=tagline, summary=summ, fparams=fparams)\n\t\n\t# Retro-Version ab 12.11.2020, V3.5.4\t\t\n\ttitle = \"ARD Mediathek RETRO\"\n\terbe = u\"[COLOR darkgoldenrod]%s[/COLOR]\" % \"UNESCO Welttag des Audiovisuellen Erbes\"\n\ttag = u'Die ARD Sender öffneten zum %s ihre Archive und stellen zunehmend zeitgeschichtlich relevante Videos frei zugänglich ins Netz' % erbe\n\ttag = u\"%s\\n\\nDeutsche Geschichte und Kultur nacherleben: Mit ARD Retro können Sie in die Zeit der 1950er und frühen 1960er Jahre eintauchen. Hier stoßen Sie auf spannende, informative und auch mal kuriose Sendungen aus den Anfängen der Fernsehgeschichte des öffentlich-rechtlichen Rundfunks.\" % tag\n\ttag = u\"%s\\n\\nMehr: NDR ardretro100.html\" % tag\n\tfparams=\"&fparams={}\"\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"resources.lib.ARDnew.ARDRetro\", fanart=R(FANART), \n\t\tthumb=R('ard-mediathek-retro.png'), tagline=tag, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_zdfmobile') == 'true':\n\t\tPLog('zdfmobile_set: ')\n\t\ttagline = 'in den Settings sind ZDF Mediathek und ZDFmobile austauschbar'\n\t\tfparams=\"&fparams={}\"\n\t\taddDir(li=li, label=\"ZDFmobile\", action=\"dirList\", dirID=\"resources.lib.zdfmobile.Main_ZDFmobile\", \n\t\t\tfanart=R(FANART), thumb=R(ICON_MAIN_ZDFMOBILE), tagline=tagline, fparams=fparams)\n\telse:\n\t\ttagline = 'in den Settings sind ZDF Mediathek und ZDFmobile austauschbar'\n\t\tfparams=\"&fparams={'name': 'ZDF Mediathek'}\"\n\t\taddDir(li=li, label=\"ZDF Mediathek\", action=\"dirList\", dirID=\"Main_ZDF\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_MAIN_ZDF), tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_3sat') == 'true':\n\t\ttagline = 'in den Settings kann das Modul 3Sat ein- und ausgeschaltet werden'\n\t\tfparams=\"&fparams={'name': '3Sat'}\"\t\t\t\t\t\t\t\t\t# 3Sat-Modul\n\t\taddDir(li=li, label=\"3Sat Mediathek\", action=\"dirList\", dirID=\"resources.lib.my3Sat.Main_3Sat\", \n\t\t\tfanart=R('3sat.png'), thumb=R('3sat.png'), tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_funk') == 'true':\n\t\ttagline = 'in den Settings kann das Modul FUNK ein- und ausgeschaltet werden'\n\t\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t\t\t# funk-Modul\n\t\taddDir(li=li, label=\"FUNK\", action=\"dirList\", dirID=\"resources.lib.funk.Main_funk\", \n\t\t\tfanart=R('funk.png'), thumb=R('funk.png'), tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_childprg') == 'true':\n\t\ttagline = 'in den Settings kann das Modul Kinderprogramme ein- und ausgeschaltet werden'\n\t\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t\t\t# Kinder-Modul\n\t\taddDir(li=li, label=\"Kinderprogramme\", action=\"dirList\", dirID=\"resources.lib.childs.Main_childs\", \n\t\t\tfanart=R('childs.png'), thumb=R('childs.png'), tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_XL') == 'true':\n\t\ttagline = 'in den Settings kann das Modul TagesschauXL ein- und ausgeschaltet werden'\n\t\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t\t\t# TagesschauXL-Modul\n\t\taddDir(li=li, label=\"TagesschauXL\", action=\"dirList\", dirID=\"resources.lib.TagesschauXL.Main_XL\", \n\t\t\tfanart=ICON_MAINXL, thumb=ICON_MAINXL, tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_phoenix') == 'true':\n\t\ttagline = 'in den Settings kann das Modul phoenix ein- und ausgeschaltet werden'\n\t\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t\t\t# Phoenix-Modul\n\t\taddDir(li=li, label=\"phoenix\", action=\"dirList\", dirID=\"resources.lib.phoenix.Main_phoenix\", \n\t\t\tfanart=R(ICON_PHOENIX), thumb=R(ICON_PHOENIX), tagline=tagline, fparams=fparams)\n\t\t\t\n\tif SETTINGS.getSetting('pref_use_arte') == 'true':\n\t\ttagline = 'in den Settings kann das Modul Arte-Kategorien ein- und ausgeschaltet werden'\n\t\tsumm = 'Ein komplettes Arte-Addon befindet sich im Kodinerds-Repo (ARTE.TV)'\n\t\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t\t\t# arte-Modul\n\t\taddDir(li=li, label=\"Arte-Kategorien\", action=\"dirList\", dirID=\"resources.lib.arte.Main_arte\", \n\t\t\tfanart=R('icon-arte_kat.png'), thumb=R('icon-arte_kat.png'), tagline=tagline,\n\t\t\tsummary=summ, fparams=fparams)\n\t\t\t\n\tlabel = 'TV-Livestreams'\n\tif SETTINGS.getSetting('pref_epgRecord') == 'true':\t\t\n\t\tlabel = 'TV-Livestreams | Sendungen aufnehmen'; \n\ttagline = 'TV-Livestreams stehen auch in ARD Mediathek Neu zur Verfügung'\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\tfparams=\"&fparams={'title': 'TV-Livestreams'}\"\n\taddDir(li=li, label=label, action=\"dirList\", dirID=\"SenderLiveListePre\", \n\t\tfanart=R(FANART), thumb=R(ICON_MAIN_TVLIVE), tagline=tagline, fparams=fparams)\n\t\n\t# 29.09.2019 Umstellung Livestreams auf ARD Audiothek\n\t#\terneut ab 02.11.2020 nach Wegfall web.ard.de/radio/radionet\n\t# Button für Livestreams anhängen (eigenes ListItem)\t\t# Radio-Livestreams\n\ttagline = 'die Radio-Livestreams stehen auch in der neuen ARD Audiothek zur Verfügung'\n\ttitle = 'Radio-Livestreams'\t\n\tfparams=\"&fparams={'title': '%s'}\" % (title)\t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartLive\", fanart=R(FANART), \n\t\tthumb=R(ICON_MAIN_RADIOLIVE), fparams=fparams)\n\t\t\n\t\t\n\tif SETTINGS.getSetting('pref_use_podcast') == 'true':\t\t# Podcasts / Audiothek\n\t\tif SETTINGS.getSetting('pref_use_audio') == 'true':\t# Audiothek\n\t\t\ttagline\t= 'ARD Audiothek - Entdecken, Themen, Livestreams'\n\t\t\tsummary = 'in den Settings sind Audiothek und Podcasts Classic austauschbar'\n\t\t\tfparams=\"&fparams={'title': 'ARD Audiothek'}\"\n\t\t\tlabel = 'ARD Audiothek - NEU'\n\t\t\taddDir(li=li, label=label, action=\"dirList\", dirID=\"AudioStart\", fanart=R(FANART), \n\t\t\t\tthumb=R(ICON_MAIN_AUDIO), summary=summary, tagline=tagline, fparams=fparams)\n\t\telse:\t\t\t\t\t\t\t\t\t\t\t\t\t# Podcasts\n\t\t\ttagline\t= 'ARD-Radio-Podcasts suchen, hören und herunterladen'\n\t\t\tsummary = 'in den Settings sind Audiothek und Podcasts Classic austauschbar'\n\t\t\tsummary = \"%s\\n\\n%s\" % (summary, 'Podcast-Favoriten befinden sich in der ARD Audiothek')\n\t\t\tfparams=\"&fparams={'name': 'PODCAST'}\"\n\t\t\tlabel = 'Radio-Podcasts Classic'\n\t\t\taddDir(li=li, label=label, action=\"dirList\", dirID=\"Main_POD\", fanart=R(FANART), \n\t\t\t\tthumb=R(ICON_MAIN_POD), summary=summary, tagline=tagline, fparams=fparams)\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Download-/Aufnahme-Tools. zeigen\n\tif SETTINGS.getSetting('pref_use_downloads')=='true' or SETTINGS.getSetting('pref_epgRecord')=='true':\t\n\t\ttagline = 'Downloads und Aufnahmen: Verschieben, Löschen, Ansehen, Verzeichnisse bearbeiten'\n\t\tfparams=\"&fparams={}\"\n\t\taddDir(li=li, label='Download- und Aufnahme-Tools', action=\"dirList\", dirID=\"DownloadTools\", \n\t\t\tfanart=R(FANART), thumb=R(ICON_DOWNL_DIR), tagline=tagline, fparams=fparams)\t\n\t\t\t\t\n\tif SETTINGS.getSetting('pref_showFavs') == 'true':\t\t\t# Favoriten einblenden\n\t\ttagline = \"Kodi's ARDundZDF-Favoriten zeigen und aufrufen\"\n\t\tfparams=\"&fparams={'mode': 'Favs'}\"\n\t\taddDir(li=li, label='Favoriten', action=\"dirList\", dirID=\"ShowFavs\", \n\t\t\tfanart=R(FANART), thumb=R(ICON_DIR_FAVORITS), tagline=tagline, fparams=fparams)\t\n\t\t\t\t\n\tif SETTINGS.getSetting('pref_watchlist') == 'true':\t\t# Merkliste einblenden\n\t\ttagline = 'interne Merkliste des Addons'\n\t\tfparams=\"&fparams={'mode': 'Merk'}\"\n\t\taddDir(li=li, label='Merkliste', action=\"dirList\", dirID=\"ShowFavs\", \n\t\t\tfanart=R(FANART), thumb=R(ICON_DIR_WATCH), tagline=tagline, fparams=fparams)\t\t\n\t\t\t\t\t\t\t\t\n\trepo_url = 'https://github.com/{0}/releases/'.format(GITHUB_REPOSITORY)\n\tcall_update = False\n\tif SETTINGS.getSetting('pref_info_update') == 'true': # Updatehinweis beim Start des Addons \n\t\tret = updater.update_available(VERSION)\n\t\tif ret[0] == False:\t\t\n\t\t\tmsg1 = \"Github ist nicht erreichbar\"\n\t\t\tmsg2 = 'update_available: False'\n\t\t\tPLog(\"%s | %s\" % (msg1, msg2))\n\t\t\tMyDialog(msg1, msg2, '')\n\t\telse:\t\n\t\t\tint_lv = ret[0]\t\t\t# Version Github\n\t\t\tint_lc = ret[1]\t\t\t# Version aktuell\n\t\t\tlatest_version = ret[2]\t# Version Github, Format 1.4.1\n\t\t\t\n\t\t\tif int_lv > int_lc:\t\t\t\t\t\t\t\t# Update-Button \"installieren\" zeigen\n\t\t\t\tcall_update = True\n\t\t\t\ttitle = 'neues Update vorhanden - jetzt installieren'\n\t\t\t\tsumm = 'Addon aktuell: ' + VERSION + ', neu auf Github: ' + latest_version\n\t\t\t\t# Bsp.: https://github.com/rols1/Kodi-Addon-ARDundZDF/releases/download/0.5.4/Kodi-Addon-ARDundZDF.zip\n\t\t\t\turl = 'https://github.com/{0}/releases/download/{1}/{2}.zip'.format(GITHUB_REPOSITORY, latest_version, REPO_NAME)\n\t\t\t\tfparams=\"&fparams={'url': '%s', 'ver': '%s'}\" % (quote_plus(url), latest_version) \n\t\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"resources.lib.updater.update\", fanart=R(FANART), \n\t\t\t\t\tthumb=R(ICON_UPDATER_NEW), fparams=fparams, summary=summ)\n\t\t\t\n\tif call_update == False:\t\t\t\t\t\t\t# Update-Button \"Suche\" zeigen\t\n\t\ttitle = 'Addon-Update | akt. Version: ' + VERSION + ' vom ' + VDATE\t\n\t\tsumm='Suche nach neuen Updates starten'\n\t\ttag ='Bezugsquelle: ' + repo_url\t\t\t\n\t\tfparams=\"&fparams={'title': 'Addon-Update'}\"\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"SearchUpdate\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_MAIN_UPDATER), fparams=fparams, summary=summ, tagline=tagline)\n\n\t# Menü Einstellungen (obsolet) ersetzt durch Info-Button\n\t#\tfreischalten nach Posting im Kodi-Forum\n\n\ttag = 'Infos zu diesem Addon'\t\t\t\t\t# Menü Info + Filter\n\tsumm= u'Ausschluss-Filter (nur für Beiträge von ARD und ZDF)'\n\tfparams=\"&fparams={}\" \n\taddDir(li=li, label='Info', action=\"dirList\", dirID=\"InfoAndFilter\", fanart=R(FANART), thumb=R(ICON_INFO), \n\t\tfparams=fparams, summary=summ, tagline=tag)\n\n\t# Updatehinweis wird beim Caching nicht aktualisiert\n\tif SETTINGS.getSetting('pref_info_update') == 'true':\n\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=False)\n\telse:\n\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n#----------------------------------------------------------------\n# Aufruf Main\n# div. Addon-Infos + Filter (Titel) setzen/anlegen/löschen\n# Filter-Button nur zeigen, wenn in Settings gewählt\ndef InfoAndFilter():\n\tPLog('InfoAndFilter:'); \n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\t\t\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Button changelog.txt\n\ttag= u'Störungsmeldungen via Kodinerds-Forum, Github-Issue oder rols1@gmx.de'\n\tsumm = u'für weitere Infos (changelog.txt) klicken'\n\tpath = os.path.join(ADDON_PATH, \"changelog.txt\") \n\ttitle = \"Änderungsliste (changelog.txt)\"\n\ttitle=py2_encode(title)\n\tfparams=\"&fparams={'path': '%s', 'title': '%s'}\" % (quote(path), quote(title))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ShowText\", fanart=R(FANART), thumb=R(ICON_TOOLS), \n\t\tfparams=fparams, summary=summ, tagline=tag)\t\t\n\t\t\t\t\t\t\t\n\ttitle = u\"Addon-Infos\"\t\t\t\t\t\t\t\t# Button für Addon-Infos\n\ttag = \"Infos zu Version, Cache und Dateipfaden.\" \n\tsumm = \"Bei aktiviertem Debug-Log erfolgt die Ausgabe auch dort\"\n\tsumm = \"%s (nützlich zum Kopieren der Pfade).\" % summ\n\tfparams=\"&fparams={}\" \n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AddonInfos\", fanart=R(FANART), \n\t\tthumb=R(ICON_PREFS), tagline=tag, summary=summ, fparams=fparams)\t\n\t\t\t\n\tif SETTINGS.getSetting('pref_usefilter') == 'true':\t\t\t\t\t\t\t\t\t\t\t\n\t\ttitle = u\"Filter bearbeiten \"\t\t\t\t\t# Button für Filter\n\t\ttag = \"Ausschluss-Filter bearbeiten (nur für Beiträge von ARD und ZDF)\" \n\t\tfparams=\"&fparams={}\" \n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterTools\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_FILTER), tagline=tag, fparams=fparams)\t\t\n\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n#----------------------------------------------------------------\n# Aufruf InfoAndFilter\n# Menüs für FilterToolsWork \ndef FilterTools():\n\tPLog('FilterTools:'); \n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\t\t\n\tfilterfile = os.path.join(ADDON_DATA, \"filter.txt\") \t\t# init: check_DataStores\n\tfilter_page = RLoad(filterfile, abs_path=True)\t\t\t\t# Filterliste laden\n\n\tif filter_page == '' or len(filter_page) <= 20:\n\t\tmsg1 = \"Problem Filterliste\"\n\t\tmsg2 = 'Liste kann nicht geladen werden'\t\t\t\t# -> nur Button Hinzufügen\n\t\tPLog(msg2); PLog(filter_page)\n\t\tfilter_page=''\t\t\t\t\t\t\t\t\t\t\t# fehlerhaft=leer\n\t\ticon = R(ICON_FILTER)\n\t\txbmcgui.Dialog().notification(msg1,msg2,icon,5000)\n\t\t\n\takt_filter=''; \n\tif os.path.isfile(FILTER_SET):\n\t\tpage = RLoad(FILTER_SET, abs_path=True)\n\t\tpage = page.strip()\n\t\takt_filter = page.splitlines()\n\tPLog(akt_filter)\n\t\t\t\t\t\t\t\t\t\t\t\t\n\tsumm = u\"Ausschluss-Filter für Beiträge von ARD und ZDF.\"\n\tsumm = u\"%s\\n\\nWirkung: Einzelbeiträge, die einen gesetzten Filter in Titel, Subtitel oder Beschreibung enthalten, werden aussortiert.\" % summ \n\t\n\tif filter_page:\n\t\tif akt_filter:\n\t\t\ttitle = u\"aktuell gesetzte(n) Filter zeigen (%d)\" % len(akt_filter)\n\t\t\tfparams=\"&fparams={'action': 'show_set'}\" \n\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterToolsWork\", fanart=R(FANART), \n\t\t\t\tthumb=R(ICON_FILTER), summary=summ, fparams=fparams)\t\t\n\n\t\ttitle = u\"alle Filterwörter zeigen\" \n\t\tfparams=\"&fparams={'action': 'show_list'}\" \n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterToolsWork\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_FILTER), summary=summ, fparams=fparams)\t\t\t\t\n\t\n\t\ttitle = u\"Filter [COLOR blue]setzen (aktuell: %d)[/COLOR]\" % len(akt_filter)\n\t\ttag = u\"ein oder mehrere Filterworte [COLOR blue]setzen[/COLOR]\" \n\t\tfparams=\"&fparams={'action': 'set'}\" \n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterToolsWork\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_FILTER), tagline=tag, summary=summ, fparams=fparams)\n\t\t\t\t\t\n\t\ttitle = u\"Filterwort [COLOR red]löschen[/COLOR]\"\n\t\ttag = u\"ein Filterwort aus der Ausschluss-Liste [COLOR red]löschen[/COLOR]\" \n\t\tfparams=\"&fparams={'action': 'delete'}\" \n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterToolsWork\", fanart=R(FANART), \n\t\t\tthumb=R(ICON_FILTER), tagline=tag, summary=summ, fparams=fparams)\t\t\n\t\t\n\ttitle = u\"Filterwort [COLOR green]hinzufügen[/COLOR]\"\n\ttag = u\"ein Filterwort der Ausschluss-Liste [COLOR green]hinzufügen[/COLOR]\" \n\tfparams=\"&fparams={'action': 'add'}\" \n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"FilterToolsWork\", fanart=R(FANART), \n\t\tthumb=R(ICON_FILTER), tagline=tag, summary=summ, fparams=fparams)\t\t\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=False)\n#----------------------------------------------------------------\n# Aufruf FilterTools\n# Ausschluss-Filter Anzeigen/Setzen/Hinzufügen/Löschen\n# 13.05.2020 'Container.Refresh' muss für LibreElec + Android vor \n#\tnotification erfolgen und cacheToDisc=False - sonst wirkungslos.\n#\ndef FilterToolsWork(action):\n\tPLog('FilterToolsWork: ' + action) \n\tdialog = xbmcgui.Dialog()\n\n\tfilter_pat = \"\\n%s\\n\\n\" \t\t\t\t\t# Rahmen Filterliste\n\tfilterfile = os.path.join(ADDON_DATA, \"filter.txt\")\t\t\t# init: check_DataStores\n\tpage = RLoad(filterfile, abs_path=True)\t\t\t\t\t\t# Filterliste laden\n\tfilter_list = stringextract('', '', page)\n\tfilter_list = filter_list.splitlines()\n\tfilter_list.remove('')\t\t\t\t\t\t\t\t\t\t# aus ev. Leerz.\n\tfilter_list=sorted(filter_list, key=str.lower)\n\tPLog(filter_list)\n\t\n\tpage = RLoad(FILTER_SET, abs_path=True)\t\t\t\t\t\t# akt. Filter laden\n\takt_filter = page.splitlines()\n\takt_filter=sorted(akt_filter, key=str.lower)\n\tPLog(akt_filter)\t\n\n\tif action == 'show_set':\t\t\t\t\t\t\t\t\t# gesetzte Filter zeigen\n\t\ttitle = u\"aktuell gesetzte(r) Filter\"\n\t\takt_filter = \"\\n\".join(akt_filter)\n\t\tdialog.textviewer(title, akt_filter,usemono=True)\n\t\t\t\n\tif action == 'set':\n\t\tindex_list = get_list_indices(akt_filter, filter_list)\t# akt. Filter-Indices ermitteln\n\t\tPLog(index_list); \n\t\ttitle = u\"Filter setzen (grün: gesetzt)\"\n\t\tret = dialog.multiselect(title, filter_list, preselect=index_list)\n\t\tPLog(ret)\t\t\t\t\t\t\t\t\t\t\t\t# ret hier Liste\n\t\tif ret != None:\t\t\t\t\t\t\t\t\t\t# None bei Abbruch\n\t\t\tif len(ret) > 0:\n\t\t\t\titems = get_items_from_list(ret, filter_list)\t# Indices -> Filter-items\n\t\t\t\titems = \"\\n\".join(items) \n\t\t\telse:\n\t\t\t\titems = ''\n\t\t\tRSave(FILTER_SET, items)\n\t\t\tmsg1 = u\"Filter setzen\"\n\t\t\tmsg2 = u\"gesetzte Filter: %d\" % len(ret)\n\t\t\ticon = R(ICON_FILTER)\n\t\t\txbmc.executebuiltin('Container.Refresh')\n\t\t\txbmcgui.Dialog().notification(msg1,msg2,icon,5000)\n\t\t\n\tif action == 'add':\n\t\ttitle = u'Filterwort hinzufügen (Groß/klein egal)'\n\t\tret = dialog.input(title, type=xbmcgui.INPUT_ALPHANUM)\t# Eingabe Filterwort\n\t\tPLog(ret)\n\t\tif ret:\n\t\t\tret = py2_encode(up_low(ret, mode='low'))\n\t\t\tif ret in filter_list:\t\t\t\t\t\t\t\t# Check: vorhanden?\n\t\t\t\tmsg1 = \"Filterliste\"\n\t\t\t\tmsg2 = '%s existiert schon. Anzahl: %d' % (ret.strip(), len(filter_list))\t\t\n\t\t\t\ticon = R(ICON_FILTER)\n\t\t\t\txbmcgui.Dialog().notification(msg1,msg2,icon,5000)\n\t\t\telse:\t\n\t\t\t\tfilter_list.append(ret.strip())\t\t\t\t\t# Filterwort hinzufügen\n\t\t\t\tif '' in filter_list:\n\t\t\t\t\tfilter_list.remove('')\t\t\t\t\t\t# aus ev. Leerz.\n\t\t\t\titems = \"\\n\".join(filter_list)\n\t\t\t\titems = py2_encode(items)\n\t\t\t\tfilter_pat = filter_pat % items\t\t\t\t\t# Filter -> xml-Rahmen\n\t\t\t\tPLog(filter_pat)\n\t\t\t\terr_msg = RSave(filterfile, filter_pat)\t\t\t# speichern\n\t\t\t\tif err_msg:\n\t\t\t\t\tmsg1 = \"Fehler beim Speichern der Filterliste\" \n\t\t\t\t\tPLog(msg1)\t\n\t\t\t\t\tMyDialog(msg1, '', '')\n\t\t\t\telse:\n\t\t\t\t\tmsg1 = \"Filterliste\"\n\t\t\t\t\tmsg2 = '%s hinzugefügt. Anzahl: %d' % (ret.strip(), len(filter_list))\t\t\n\t\t\t\t\ticon = R(ICON_FILTER)\n\t\t\t\t\txbmc.executebuiltin('Container.Refresh')\t\t\t\t\t\n\t\t\t\t\txbmcgui.Dialog().notification(msg1,msg2,icon,5000)\n\t\n\tif action == 'delete':\n\t\ttitle = u\"Filterwort löschen (ev. gesetzter Filter wird mitgelöscht)\"\n\t\tret = dialog.select(title, filter_list)\t\t\t\t\t# Auswahl Filterliste\n\t\tPLog(ret)\n\t\tif ret >= 0:\n\t\t\tret = filter_list[ret]\t\t\t\t\t\t\t\t# Index -> item\n\t\t\titem = py2_encode(ret)\n\t\t\tPLog(item)\n\t\t\tis_filter=False;\n\t\t\tif item in akt_filter:\t\t\t\t\t\t\t\t# auch gesetzter Filter?\n\t\t\t\tis_filter=True\n\t\t\tmsg2 = \"[COLOR red]%s[/COLOR] ist kein gesetzter Filter.\" % ret\n\t\t\tif is_filter:\t\n\t\t\t\tmsg2 = \"gesetzter Filter [COLOR red]%s[/COLOR] wird mitgelöscht\" % ret\n\t\t\tmsg1 = \"Filterwort [COLOR red]%s[/COLOR] wirklich löschen?\" % ret \n\n\t\t\tret = MyDialog(msg1=msg1, msg2=msg2, msg3='', ok=False, cancel='Abbruch', yes='JA', heading=title)\n\t\t\tPLog(ret)\n\t\t\tif ret == 1:\n\t\t\t\tfilter_list.remove(item)\t\t\t\t\t\t# Filterwort entfernen\n\t\t\t\tfilter_len = len(filter_list)\n\t\t\t\titems = \"\\n\".join(filter_list)\n\t\t\t\titems = py2_encode(items)\n\t\t\t\tfilter_pat = filter_pat % items\t\t\t\t\t# Filter -> xml-Rahmen\n\t\t\t\tPLog(filter_pat)\n\t\t\t\terr_msg1 = RSave(filterfile, filter_pat)\t\t\t# speichern\n\t\t\t\tif is_filter:\n\t\t\t\t\takt_filter.remove(item)\n\t\t\t\t\titems = \"\\n\".join(akt_filter)\n\t\t\t\t\terr_msg2 = RSave(FILTER_SET, items)\t\n\n\t\t\t\tif err_msg1 or err_msg2:\n\t\t\t\t\tif err_msg1:\n\t\t\t\t\t\tmsg1 = \"Fehler beim Speichern der Filterliste\" \n\t\t\t\t\t\tPLog(msg1)\t\n\t\t\t\t\t\tMyDialog(msg1, '', '')\n\t\t\t\t\tif err_msg2:\n\t\t\t\t\t\tmsg1 = \"Fehler beim Speichern der aktuell gesetzten Filter\" \n\t\t\t\t\t\tPLog(msg1)\t\n\t\t\t\t\t\tMyDialog(msg1, '', '')\n\t\t\t\telse:\n\t\t\t\t\tmsg1 = \"Filterliste\"\n\t\t\t\t\tmsg2 = u'%s gelöscht. Anzahl: %d' % (item, filter_len)\t\t\n\t\t\t\t\ticon = R(ICON_FILTER)\n\t\t\t\t\txbmc.executebuiltin('Container.Refresh')\t\t\t\t\t\n\t\t\t\t\txbmcgui.Dialog().notification(msg1,msg2,icon,5000)\t\t\n\t\t\t\n\tif action == 'show_list':\t\t\t\t\t\t\t\t\t# Filterliste zeigen\n\t\ttitle = u\"Liste verfügbarer Filter\"\n\t\tfilter_list = \"\\n\".join(filter_list)\n\t\tdialog.textviewer(title, filter_list,usemono=True)\n\t\t\n\tif action == 'state_change':\t\t\t\t\t\t\t\t# aus Kontextmenü\n\t\tif SETTINGS.getSetting('pref_usefilter') == 'true':\n\t\t\tSETTINGS.setSetting('pref_usefilter','false')\n\t\telse:\t\t\t\t\t\t\t\t\t\t\t\n\t\t\tSETTINGS.setSetting('pref_usefilter','true')\n\t\txbmc.executebuiltin('Container.Refresh')\t\t\t\t\t\t\t\n\n#----------------------------------------------------------------\n# Aufruf InfoAndFilter\n# Addon-Infos (Pfade, Cache, ..)\n# einschl. Log-Ausgabe \ndef AddonInfos():\n\tPLog('AddonInfos:'); \n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\tdialog = xbmcgui.Dialog()\n\tt = \" \"\t\t# Tab (5)\n\n\ta = \"[COLOR red]Addon, System:[/COLOR]\"\n\tb = \"%s%s, Version %s vom %s\" % (t, ADDON_ID, VERSION, VDATE)\n\tc = \"%sGithub-Releases https://github.com/%s/releases\" % (t, GITHUB_REPOSITORY)\n\td = \"%sOS: %s\" % (t, OS_DETECT)\n\te = \"%sKodi-Version: %s\" % (t, KODI_VERSION)\n\tp1 = \"%s\\n%s\\n%s\\n%s\\n%s\\n\" % (a,b,c,d,e)\n\t\n\ta = \"[COLOR red]Cache:[/COLOR]\"\n\tb = \"%s %s Dict\" % (t, get_dir_size(DICTSTORE))\n\tc = \"%s %s Inhaltstexte\" % (t, get_dir_size(TEXTSTORE))\n\td = \"%s %s m3u8\" % (t, get_dir_size(M3U8STORE))\n\te = \"%s %s Slides (Bilder)\" % (t, get_dir_size(SLIDESTORE))\n\tf = \"%s %s subtitles (Untertitel)\" % (t, get_dir_size(SUBTITLESTORE))\n\tg = ''\n\tpath = SETTINGS.getSetting('pref_download_path')\n\tPLog(path); PLog(os.path.isdir(path))\n\tif path and os.path.isdir(path):\n\t\tg = \"%s %s Downloads\\n\" % (t, get_dir_size(path))\n\tp2 = \"%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\" % (a,b,c,d,e,f,g)\n\n\ta = \"[COLOR red]Pfade:[/COLOR]\"\n\tb = \"%s Addon-Home: %s\" % (t, PluginAbsPath)\n\tc = \"%s Cache: %s\" % (t,ADDON_DATA)\n\tfname = WATCHFILE\n\td1 = \"%s Merkliste intern: %s\" % (t, WATCHFILE)\n\td2 = \"%s Merkliste extern: nicht aktiviert\" % t\n\tif SETTINGS.getSetting('pref_merkextern') == 'true':\t# externe Merkliste gewählt?\n\t\tfname = SETTINGS.getSetting('pref_MerkDest_path')\n\t\td2 = \"%s Merkliste extern: %s\" % (t,fname)\n\te = \"%s Downloadverzeichnis: %s\" % (t,SETTINGS.getSetting('pref_download_path'))\n\tf = \"%s Verschiebeverzeichnis: %s\" % (t,SETTINGS.getSetting('pref_VideoDest_path'))\n\tfilterfile = os.path.join(ADDON_DATA, \"filter.txt\")\n\tg = \"%s Filterliste: %s\" % (t,filterfile)\n\tfname = SETTINGS.getSetting('pref_podcast_favorits')\n\tif os.path.isfile(fname) == False:\n\t\tfname = os.path.join(PluginAbsPath, \"resources\", \"podcast-favorits.txt\") \n\th = \"%s Podcast-Favoriten:\\n%s%s\" % (t,t,fname)\t\t# fname in 2. Zeile\n\tlog = xbmc.translatePath(\"special://logpath\")\n\tlog = os.path.join(log, \"kodi.log\") \t\n\ti = \"%s Debug-Log: %s\" % (t, log)\n\t\n\tp3 = \"%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n%s\\n\" % (a,b,c,d1,d2,e,f,g,h,i)\n\tpage = \"%s\\n%s\\n%s\" % (p1,p2,p3)\n\tPLog(page)\n\tdialog.textviewer(\"Addon-Infos\", page,usemono=True)\n\t\n#\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n#----------------------------------------------------------------\n# Aufruf Info_Filter\n# 20.01.2020 usemono für textviewer (ab Kodi v18)\n#\ndef ShowText(path, title):\n\tPLog('ShowText:'); \n\t\n\tpage = RLoad(path, abs_path=True)\n\tpage = page.replace('\\t', ' ')\t\t# ersetze Tab's durch Blanks\n\tdialog = xbmcgui.Dialog()\n\tdialog.textviewer(title, page,usemono=True)\n\treturn\n\t\n#----------------------------------------------------------------\n# sender neu belegt in Senderwahl (Classic: deaktiviert) \ndef Main_ARD(name, sender=''):\n\tPLog('Main_ARD:'); \n\tPLog(name); PLog(sender)\n\t\n\t# Senderwahl in Classic-Version deaktivert\n\t# sender \t= ARDSender[0]\t\t\t# Default 1. Element ARD-Alle\n\t\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\tPLog(\"li:\" + str(li))\t\t\t\t\t\t\n\t\t\t\n\ttitle=\"Suche in ARD-Mediathek\"\t\t# ARD-New verwendet die Classic-Suche\n\tfparams=\"&fparams={'title': '%s', 'query': '', 'channel': 'ARD'}\" % quote(title)\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Search\", fanart=R(ICON_MAIN_ARD), \n\t\tthumb=R(ICON_SEARCH), fparams=fparams)\n\t\t\n\timg = R(ICON_MAIN_ARD_Classic)\n\ttitle = 'Start | Sender: alle Sender' \n\tfparams=\"&fparams={'title': '%s'}\" % (quote(title))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ARDStart\", fanart=img, thumb=img, \n\t\tfparams=fparams)\n\n\t# title = 'Sendung verpasst | Sender: %s' % sendername\n\ttitle = 'Sendung verpasst (alle Sender)'\n\tfparams=\"&fparams={'name': 'ARD', 'title': 'Sendung verpasst'}\"\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"VerpasstWoche\", \n\t\tfanart=R(ICON_MAIN_ARD), thumb=R(ICON_ARD_VERP), fparams=fparams)\n\t\n\ttitle = 'Sendungen A-Z (alle Sender)'\n\tfparams=\"&fparams={'name': 'Sendungen A-Z', 'ID': 'ARD'}\"\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"SendungenAZ\", \n\t\tfanart=R(ICON_MAIN_ARD), thumb=R(ICON_ARD_AZ), fparams=fparams)\n\t\t\t\t\t\n\ttitle = 'Rubriken'\n\tnext_cbKey = 'SinglePage'\t\n\turl = BASE_URL + '/tv/Rubriken/mehr?documentId=21282550'\n\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'cbKey': '%s', 'mode': 'Sendereihen', 'ID': 'ARD'}\" \\\n\t\t% (quote(title), quote(url), next_cbKey)\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PageControl\", fanart=R(ICON_ARD_RUBRIKEN) , \n\t\tthumb=R(ICON_ARD_RUBRIKEN) , fparams=fparams)\n\t\t\t\t\n\ttitle = 'ARD Sportschau'\n\tfparams=\"&fparams={'title': '%s'}\"\t% title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ARDSport\", \n\t\tfanart=R(\"tv-ard-sportschau.png\"), thumb=R(\"tv-ard-sportschau.png\"), fparams=fparams)\n\t\t\t\t\t\t\n\tfparams=\"&fparams={'name': 'Barrierearm'}\"\n\taddDir(li=li, label=\"Barrierearm\", action=\"dirList\", dirID=\"BarriereArmARD\", \n\t\tfanart=R(ICON_MAIN_ARD), thumb=R(ICON_ARD_BARRIEREARM), fparams=fparams)\n\n\t# 10.12.2018 nicht mehr verfügbar, 02.01.2018 Code in Search entfernt:\n\t#\twww.ard.de/home/ard/23116/index.html?q=Bildergalerie\n\t# 10.02.2020 Ersatz: Bildergalerien des Senders Das Erste\n\ttitle = 'Bildgalerien Das Erste'\t\n\tfparams=\"&fparams={}\" \n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"BilderDasErste\", fanart=R(ICON_MAIN_ARD),\n\t\tthumb=R('ard-bilderserien.png'), fparams=fparams)\n\n\t# 25.01.2019 Senderwahl hier deaktivert - s. Modul ARDnew\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\t \t\t\n#---------------------------------------------------------------- \ndef Main_ZDF(name):\n\tPLog('Main_ZDF:'); PLog(name)\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\t\n\ttitle=\"Suche in ZDF-Mediathek\"\n\tfparams=\"&fparams={'query': '', 'title': '%s'}\" % title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ZDF_Search\", fanart=R(ICON_ZDF_SEARCH), \n\t\tthumb=R(ICON_ZDF_SEARCH), fparams=fparams)\n\n\ttitle = 'Startseite' \n\tfparams=\"&fparams={'title': '%s'}\" % (quote(title))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ZDFStart\", fanart=R(ICON_MAIN_ZDF), thumb=R(ICON_MAIN_ZDF), \n\t\tfparams=fparams)\n\n\tfparams=\"&fparams={'name': 'ZDF-Mediathek', 'title': 'Sendung verpasst'}\" \n\taddDir(li=li, label='Sendung verpasst', action=\"dirList\", dirID=\"VerpasstWoche\", fanart=R(ICON_ZDF_VERP), \n\t\tthumb=R(ICON_ZDF_VERP), fparams=fparams)\t\n\n\tfparams=\"&fparams={'name': 'Sendungen A-Z'}\"\t\t\t\t\t\t# Startseite: Alles auf einen Blick\n\taddDir(li=li, label=\"Sendungen A-Z\", action=\"dirList\", dirID=\"ZDFSendungenAZ\", fanart=R(ICON_ZDF_AZ), \n\t\tthumb=R(ICON_ZDF_AZ), fparams=fparams)\n\n\tfparams=\"&fparams={'name': 'Rubriken'}\"\n\taddDir(li=li, label=\"Rubriken\", action=\"dirList\", dirID=\"ZDFRubriken\", fanart=R(ICON_ZDF_RUBRIKEN), \n\t\tthumb=R(ICON_ZDF_RUBRIKEN), fparams=fparams)\n\n\tfparams=\"&fparams={'name': 'Meist gesehen'}\"\t\t\t\t\t\t# Startseite: Alles auf einen Blick\n\taddDir(li=li, label=\"Meist gesehen (1 Woche)\", action=\"dirList\", dirID=\"MeistGesehen\", \n\t\tfanart=R(ICON_ZDF_MEIST), thumb=R(ICON_ZDF_MEIST), fparams=fparams)\n\t\t\n\tfparams=\"&fparams={'title': 'Sport Live im ZDF'}\"\n\taddDir(li=li, label=\"Sport Live im ZDF\", action=\"dirList\", dirID=\"ZDFSportLive\", \n\t\tfanart=R(\"zdf-sportlive.png\"), thumb=R(\"zdf-sportlive.png\"), fparams=fparams)\n\t\t\n\tfparams=\"&fparams={'title': 'Barrierearm'}\"\t\t\t\t\t\t\t# Startseite: Alles auf einen Blick\n\taddDir(li=li, label=\"Barrierearm\", action=\"dirList\", dirID=\"BarriereArm\", fanart=R(ICON_ZDF_BARRIEREARM), \n\t\tthumb=R(ICON_ZDF_BARRIEREARM), fparams=fparams)\n\n\tfparams=\"&fparams={'title': 'ZDFenglish'}\"\n\taddDir(li=li, label=\"ZDFenglish\", action=\"dirList\", dirID=\"International\", fanart=R('ZDFenglish.png'), \n\t\tthumb=R('ZDFenglish.png'), fparams=fparams)\n\n\tfparams=\"&fparams={'title': 'ZDFarabic'}\"\n\taddDir(li=li, label=\"ZDFarabic\", action=\"dirList\", dirID=\"International\", fanart=R('ZDFarabic.png'), \n\t\tthumb=R('ZDFarabic.png'), fparams=fparams)\n\n\tfparams=\"&fparams={'s_type': 'Bilderserien', 'title': 'Bilderserien', 'query': 'Bilderserien'}\"\n\taddDir(li=li, label=\"Bilderserien\", action=\"dirList\", dirID=\"ZDF_Search\", fanart=R(ICON_ZDF_BILDERSERIEN), \n\t\tthumb=R(ICON_ZDF_BILDERSERIEN), fparams=fparams)\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\n#----------------------------------------------------------------\ndef Main_POD(name):\n\tPLog('Main_POD:')\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t# Home-Button\n\t\t\n\ttitle=\"Suche Podcasts in ARD-Mediathek\"\n\tfparams=\"&fparams={'channel': 'PODCAST', 'title': '%s'}\" % title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Search\", fanart=R(ICON_MAIN_ARD), \n\t\tthumb=R(ICON_SEARCH), fparams=fparams)\n\n\ttitle = 'Sendungen A-Z'\n\tfparams=\"&fparams={'name': '%s', 'ID': 'PODCAST'}\"\t% title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"SendungenAZ\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_ARD_AZ), \n\t\tfparams=fparams)\n\n\ttitle = 'Rubriken'\t\n\tfparams=\"&fparams={'title': '%s', 'morepath': '%s', 'next_cbKey': 'SinglePage', 'ID': 'PODCAST', 'mode': 'Sendereihen'}\" \\\n\t\t% (title,quote(POD_RUBRIK))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PODMore\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_RUBRIK), \n\t\tfparams=fparams)\n\n\ttitle=\"Radio-Feature\"\t \n\tfparams=\"&fparams={'title': '%s', 'morepath': '%s', 'next_cbKey': 'SingleSendung', 'ID': 'PODCAST', 'mode': 'Sendereihen'}\" \\\n\t\t% (title,quote(POD_FEATURE))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PODMore\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_FEATURE), \n\t\tfparams=fparams)\n\n\ttitle=\"Radio-Tatort\"\n\tfparams=\"&fparams={'title': '%s', 'morepath': '%s', 'next_cbKey': 'SingleSendung', 'ID': 'PODCAST', 'mode': 'Sendereihen'}\" \\\n\t\t% (title,quote(POD_TATORT))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PODMore\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_TATORT), \n\tfparams=fparams)\n\t\t \n\ttitle=\"Neueste Audios\"\t \n\tfparams=\"&fparams={'title': '%s', 'morepath': '%s', 'next_cbKey': 'SingleSendung', 'ID': 'PODCAST', 'mode': 'Sendereihen'}\" \\\n\t\t% (title,quote(POD_NEU))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PODMore\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_NEU), \n\t\tfparams=fparams)\n\n\ttitle=\"Meist abgerufen\"\t \n\tfparams=\"&fparams={'title': '%s', 'morepath': '%s', 'next_cbKey': 'SingleSendung', 'ID': 'PODCAST', 'mode': 'Sendereihen'}\" \\\n\t\t% (title,quote(POD_MEIST))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PODMore\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_MEIST), \n\t\tfparams=fparams)\n\n\ttitle=\"Refugee-Radio\"; query='Refugee Radio'\t# z.Z. Refugee Radio via Suche\n\ttag = \"Quelle Cosmo WDR:\"\n\tsumm = \"www1.wdr.de/mediathek/audio/cosmo\"\n\tfparams=\"&fparams={'title': '%s', 'query': '%s', 'channel': 'PODCAST'}\" % (query, query)\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Search\", fanart=R(ICON_MAIN_POD), thumb=R(ICON_POD_REFUGEE), \n\t\tfparams=fparams, tagline=tag, summary=summ)\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\n##################################### Start Audiothek ###############################################\n# Aufruf: Main\n# Buttons für Highlights, Unsere Favoriten, Sammlungen, Ausgewählte \n#\tSendungen, Meistgehört - zusätzlich Themen + LIVESTREAMS.\n# Rubriken: getrennte Auswertung in AudioStartRubriken \n# Revision: 08-10.03.2020 \n#\ndef AudioStart(title):\n\tPLog('AudioStart:')\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID=NAME)\t\t\t\t\t\t# Home-Button\n\n\tpath = ARD_AUDIO_BASE\t\t\t\t\t\n\tpage, msg = get_page(path=path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioStart:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\t\t\t\t\t\n\ttitle=\"Suche in ARD Audiothek\"\t\t\t\t# Button Suche voranstellen\n\tfparams=\"&fparams={'title': '%s'}\" % title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioSearch\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R(ICON_SEARCH), fparams=fparams)\n\n\t# Liste der Rubriken: Themen + Livestreams fest (am Ende), der Rest \n\t#\twird im Web geprüft:\n\ttitle_list = ['Highlights']\t\t\t\t\t\t\t\t# -> Audio_get_homejson\n\tif \"Sendungsauswahl Unsere Favoriten\" in page:\n\t\ttitle_list.append('Unsere Favoriten')\n\tif \"Sendungsauswahl Themen\" in page:\n\t\ttitle_list.append('Themen')\n\tif \"Sendungsauswahl Sammlungen\" in page:\n\t\ttitle_list.append('Sammlungen')\n\tif u'aria-label=\"Meistgehört\"' in page:\n\t\ttitle_list.append(u'Meistgehört')\t\t\t\t\t# -> Audio_get_homejson\n\tif u'Sendungsauswahl Ausgewählte Sendungen' in page:\n\t\ttitle_list.append(u'Ausgewählte Sendungen')\n\t\n\tfor title in title_list:\n\t\tPLog(title)\n\t\tfparams=\"&fparams={'title': '%s', 'ID': '%s'}\" % (title, title)\t\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartThemen\", fanart=R(ICON_MAIN_AUDIO), \n\t\t\tthumb=R(ICON_DIR_FOLDER), fparams=fparams)\n\n\t# Button für Rubriken anhängen (eigenes ListItem)\t\t\t# Rubriken\n\ttitle = 'Rubriken'\n\tfparams=\"&fparams={}\" \t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartRubrik\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R(ICON_POD_RUBRIK), fparams=fparams)\n\n\t# Button für A-Z anhängen \t\t\t\t\t\t\t\t\t# A-Z alle Sender\n\ttitle = 'Sendungen A-Z (alle Radiosender)'\n\tfparams=\"&fparams={'title': '%s'}\" % (title)\t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStart_AZ\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R(ICON_AUDIO_AZ), fparams=fparams)\n\t\n\t# Button für Sender anhängen \t\t\t\t\t\t\t\t# Sender/Sendungen (via AudioStartLive)\n\ttitle = 'Sender (Sendungen einzelner Radiosender)'\n\tfparams=\"&fparams={'title': '%s', 'programs': 'yes'}\" % (title)\t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartLive\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R(ICON_DIR_FOLDER), fparams=fparams)\n\t\n\t# Button für funk anhängen \t\t\t\t\t\t\t\t\t# funk\n\ttitle = 'FUNK-Podcasts - Pop und Szene'\n\thref = ARD_AUDIO_BASE + '/sender/funk'\n\tfparams=\"&fparams={'url': '%s'}\" % quote(href)\t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Audio_get_rubrik_funk\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R('funk.png'), fparams=fparams)\n\t\n\t# Button für Podcast-Favoriten anhängen \t\t\t\t\t# Podcast-Favoriten\n\ttitle=\"Podcast-Favoriten\"; \n\ttagline = u'konfigurierbar mit der Datei podcast-favorits.txt im Addon-Verzeichnis resources'\n\tsumm = u'Suchergebnisse der Audiothek lassen sich hinzufügen\\n'\n\tsumm = u\"%s\\nMehrfach-Downloads (komplette Liste) möglich\" % summ\n\tfparams=\"&fparams={'title': '%s'}\" % title\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PodFavoritenListe\", fanart=R(ICON_MAIN_POD), \n\t\tthumb=R(ICON_POD_FAVORITEN), tagline=tagline, summary=summ, fparams=fparams)\n\n\t# Button für Livestreams anhängen (eigenes ListItem)\t\t# Livestreams\n\ttitle = 'Livestreams'\t\n\tfparams=\"&fparams={'title': '%s'}\" % (title)\t\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartLive\", fanart=R(ICON_MAIN_AUDIO), \n\t\tthumb=R(ICON_AUDIO_LIVE), fparams=fparams)\n\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\n#----------------------------------------------------------------\n# Die Startseite liefert html/json gemischt. mp3-Url wird im html-Bereich\n#\termittelt, bei Fehlen wird die Homepage des Beitrags weitergegeben.\n#\timg wird im json-Bereich ermittelt - bei Fehlen \"kein-Bild\".\n# Hier wird zur ID der passende page-Ausschnitt ermittelt - Auswertung in \n#\tAudio_get_rubrik oder Audio_get_sendungen (Highlights, Meistgehört)\n#\n# 05.10.2020 für Entdecken (Highlights) + Meistgehört Wechsel der Auswertung\n#\tvon Homepage zu homescreen.json (Audio_get_homejson).\n#\ndef AudioStartThemen(title, ID, page='', path=''):\t# Entdecken, Unsere Favoriten, ..\n\tPLog('AudioStartThemen: ' + ID)\n\tli = xbmcgui.ListItem()\n\t# li = home(li, ID='ARD Audiothek')\t\t\t\t# Home-Button\n\t\n\tif ID == 'Highlights' or ID == 'Meistgehört':\t# json-Auswertung (s.o.)\n\t\tpath = \"https://audiothek.ardmediathek.de/homescreen\"\t\t\t\t\n\t\tpage, msg = get_page(path=path)\t\n\t\tif page == '':\t\n\t\t\tmsg1 = \"Fehler in AudioStartThemen:\"\n\t\t\tmsg2 = msg\n\t\t\tMyDialog(msg1, msg2, '')\t\n\t\t\treturn li\n\t\tPLog(len(page))\t\n\t\t\n\t\tli = home(li, ID='ARD Audiothek')\t\t\t\t# Home-Button\n\t\tAudio_get_homejson(page, ID)\n\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\t\n\t#--------------------------------------------------# Auswertung Homepage\t\n\tif not page:\n\t\tpath = ARD_AUDIO_BASE\t\t\t\t\t# Default\t\t\t\t\n\t\tpage, msg = get_page(path=path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioStartThemen:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\n\tif ID == 'Unsere Favoriten':\n\t\tAudio_get_rubriken(page, ID)\n\tif ID == 'Themen':\n\t\tAudio_get_rubriken(page, ID)\n\tif ID == 'Sammlungen':\n\t\tAudio_get_rubriken(page, ID)\n\tif ID == u'Ausgewählte Sendungen':\n\t\tAudio_get_rubriken(page, ID)\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\n#----------------------------------------------------------------\ndef AudioStart_AZ(title):\t\t\n\tPLog('AudioStart_AZ:')\n\tCacheTime = 6000\t\t\t\t\t\t\t\t\t# 1 Std.\n\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD Audiothek')\t\t\t\t\t# Home-Button\n\n\tpath = ARD_AUDIO_BASE + '/alphabetisch?al=a'\t# A-Z-Seite laden für Prüfung auf inaktive Buchstaben\n\tpage = Dict(\"load\", \"Audio_AZ\", CacheTime=CacheTime)\n\tif page == False:\t\t\t\t\t\t\t\t\t# Cache miss - vom Sender holen\n\t\tpage, msg = get_page(path)\t\t\n\t\tDict(\"store\", \"Audio_AZ\", page) \t\t\t\t# Seite -> Cache: aktualisieren\t\t\t\n\tif page == '':\n\t\tmsg1 = \"Fehler in AudioStart_AZ\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\n\t\treturn li\t\t\t\n\tPLog(len(page))\n\t\n\tpage = stringextract('Alle Sendungen von A bis Z durchsuchen', '', page)\n\tgridlist = blockextract('aria-label=', page) \n\tdel gridlist[0] \t\t\t\t\t\t# skip 1. Eintrag\n\tPLog(len(gridlist))\n\t\n\timg = R(ICON_DIR_FOLDER)\n\tfor grid in gridlist:\t\n\t\tif \"isDisabled\" in grid:\n\t\t\tcontinue\n\t\tbutton \t= stringextract('label=\"', '\"', grid)\n\t\ttitle = \"Sendungen mit \" + up_low(button)\n\t\tif button == '#':\n\t\t\ttitle = \"Sendungen mit #, 0-9\" \n\t\thref\t= ARD_AUDIO_BASE + stringextract('href=\"', '\"', grid)\n\t\t\n\t\tPLog('1Satz:');\n\t\tPLog(button); PLog(href); \n\t\tfparams=\"&fparams={'button': '%s'}\" % button\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStart_AZ_content\", fanart=R(ICON_MAIN_AUDIO), \n\t\t\tthumb=img, fparams=fparams)\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\n#----------------------------------------------------------------\n# Auswertung A-Z\n# Besonderheit: der Quelltext der Leitseite enthält sämtliche Beiträge. \n#\tIm Web sorgen java-scripts für die Auswahl zum gewählten Buchstaben\n#\t(Button \"WEITERE LADEN\").\n# Der Quelltext enthält im html-Teil die Beiträge bis zum Nachlade-Button -\n#\thier unbeachtet.\n# Die Sätze im json-Teil sind inkompatibel mit den Sätzen in AudioContentJSON.\n#\tNachladebutton (java-script, ohne api-Call).\n# Auswahl der Sätze: Vergleich des 1. Buchstaben des Titels mit dem Button, \n#\tSonderbehandlung für Button # (Ascii-Wert 35 (#), 34 (\") oder 48-57 (0-9).\n#\t\t Außerdem werden führende \" durch # ersetzt (Match mit Ascii 35).\n# 11.03.2020 ab Version 2.7.3 Umstellung auf Seite ../api/podcasts und \n#\tAuswertung in AudioContentJSON (Leitseite ohne Begleitinfos). Nach-\n#\tteil: keine alph. Sortierung - Abhilfe: Sortierung in addDir, Trigger\n#\t\tsortlabel (s. AudioContentJSON)\n#\tBei Ausfall der api-Seite Rückfall zur Leitseite möglich.\n#\ndef AudioStart_AZ_content(button):\t\t\n\tPLog('AudioStart_AZ_content: ' + button)\n\tCacheTime = 6000\t\t\t\t\t\t\t\t\t# 1 Std.\n\n\t# path = ARD_AUDIO_BASE + '/alphabetisch?al=a'\t\t# Leitseite entf., s.o.\n\tpath = ARD_AUDIO_BASE + '/api/podcasts?limit=300' \t# enthält alle A-Z (wie Leitseite)\t\n\tpage = Dict(\"load\", \"Audio_AZ_json\", CacheTime=CacheTime)\n\tif page == False:\t\t\t\t\t\t\t\t\t# Cache miss - vom Sender holen\n\t\tpage, msg = get_page(path)\n\t\tDict(\"store\", \"Audio_AZ_json\", page) \t\t\t# Seite -> Cache: aktualisieren\t\t\t\n\tif page == '':\n\t\tmsg1 = \"Fehler in AudioStart_AZ_content\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\n\t\treturn li\t\t\t\n\tPLog(len(page))\n\t\n\ttitle='A-Z -Button %s' % button\n\treturn AudioContentJSON(title, page, AZ_button=button)\t\n\n#----------------------------------------------------------------\n# Radio-Live-Sender via Haupt-Menü \"Radio-Livestreams\"\n# 25.09.2020 wegen fehlender regionaler Sender in der Audiothek Nutzung \n#\tder Webseite web.ard.de/radio/radionet/index_infowellen.php - \n#\tRedaktion: SÜDWESTRUNDFUNK\n# \n# 26.09.2020 Anpassung für BR (neue Streamlinks),\n#\tzusätzl. Param. sender für Ausgabe von Einzelsendern - Nutzung durch \n#\tARDSportHoerfunkSingle\n# 02.11.2020 AudioLiveAll deaktiviert - web.ard.de/radio/radionet umge-\n#\tleitet auf Audiothek\n# def AudioLiveAll(anstalt='', sender=''):\n\n#----------------------------------------------------------------\n# Liste der Livestreams der Audiothek (nur Programmsender) - Mit- \n#\tnutzung durch Haupt-Menü \"Radio-Livestreams\" (erneut ab 02.11.2020\n#\tnach Wegfall web.ard.de/radio/radionet)\n# 09.09.2020 Mitnutzung durch AudioSenderPrograms (programs=yes)\n# 02.11.2020 Button für ARDSportHoerfunk hinzugefügt\n#\ndef AudioStartLive(title, sender='', myhome='', programs=''):\t# Sender / Livestreams \n\tPLog('AudioStartLive: ' + sender)\n\n\tli = xbmcgui.ListItem()\n\tif myhome:\n\t\tli = home(li, ID=myhome)\n\telse:\t\n\t\tli = home(li, ID='ARD Audiothek')\t\t\t# Home-Button\n\n\tpath = ARD_AUDIO_BASE + '/sender'\n\tpage, msg = get_page(path=path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioStartLive:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\t\n\tpos = page.find('{podcastOrganizations:')\t# json-Teil\n\tpage= page[pos:]\t\t\t\t\t\t\t# \n\tpage= page.replace('\\\\u002F', '/')\t\t\t# Pfadbehandlung gesamte Seite\n\n\tif programs == 'yes':\t\t\t\t\t\t# Sendungen der Sender listen\n\t\tAUDIOSENDER.append('funk')\n\t\t\n\tif sender == '':\n\t\tfor sender in AUDIOSENDER:\n\t\t\t# Bsp. title: data-v-f66b06a0>Theater, Film\n\t\t\tpos1 = page.find('%s:' % sender)\t# keine Blockbildung für sender möglich\n\t\t\tpos2 = page.find('}},', pos1)\n\t\t\tgrid = page[pos1:pos2]\n\t\t\t# PLog(grid)\t\t\t# bei Bedarf\n\t\t\ttitle \t= up_low(sender)\t\t\n\t\t\timg \t= stringextract('image_16x9:\"', '\"', grid)\t\t# Bild 1. Sender\n\t\t\timg\t\t= img.replace('{width}', '640')\t\t\t\t\n\t\t\ttitle = repl_json_chars(title)\t\t\t\t\t\t\t# für \"bremen\" erf.\n\t\t\tsender = repl_json_chars(sender)\t\t\t\t\t\t# für \"bremen\" erf.\n\t\t\t\n\t\t\tif programs == 'yes':\t\t\t\t\t\t\t\t\t# Sendungen der Sender listen\n\t\t\t\tadd = \"Programmen\"\n\t\t\telse:\n\t\t\t\tadd = \"Livestreams\"\n\t\t\ttag = \"Weiter zu den %s der Einzelsender von: %s\" % (add, title)\n\t\t\t\n\t\t\tPLog('2Satz:');\n\t\t\tPLog(title); PLog(img);\n\t\t\ttitle=py2_encode(title); sender=py2_encode(sender);\n\t\t\tfparams=\"&fparams={'title': '%s', 'sender': '%s', 'myhome': '%s', 'programs': '%s'}\" %\\\n\t\t\t\t(quote(title), quote(sender), myhome, programs)\t\n\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioStartLive\", fanart=img, \n\t\t\t\tthumb=img, tagline=tag, fparams=fparams)\n\n\t\ttitle = u\"Die Fussball-Bundesliga im ARD-Hörfunk\"\t\t\t# Button Bundesliga ARD-Hörfunk \n\t\thref = 'https://www.sportschau.de/sportimradio/bundesligaimradio102.html'\n\t\timg = R(\"radio-livestreams.png\")\n\t\ttag = u'An Spieltagen der Fußball-Bundesliga übertragen die Landesrundanstalten ' \n\t\ttag = tag + u'im ARD-Hörfunk die Spiele live aus dem Stadion mit der berühmten ARD-Schlusskonferenz.'\n\t\ttitle=py2_encode(title); href=py2_encode(href);\timg=py2_encode(img);\n\t\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'img': '%s'}\"\t% (quote(title), \n\t\t\tquote(href), quote(img))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ARDSportHoerfunk\", fanart=img, \n\t\t\tthumb=img, tagline=tag, fparams=fparams)\t\t\t\t\t\n\n\t\n\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t#-------------------------------------------------------------------\n\t\n\telse:\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# 2. Durchlauf: einzelne Streams\n\t\tmy_sender = sender\n\t\tsender = sender.replace('radio-bremen', '\"radio-bremen\"')\t# Quotes für Bremen\n\t\t# Bsp. title: data-v-f66b06a0>Theater, Film\n\t\tpos1 = page.find('%s:' % sender)\t# keine Blockbildung für sender möglich\n\t\tpos2 = page.find('}},', pos1)\n\t\tgrid = page[pos1:pos2]\n\t\tgridlist = blockextract('image_16x9:\"https', grid)\t\n\t\tPLog(len(gridlist))\n\t\tfor rec in gridlist:\n\t\t\ttitle \t= stringextract('title:\"', '\"', rec)\t# Anfang Satz\n\t\t\tif title == '':\n\t\t\t\tcontinue\n\t\t\t\n\t\t\timg \t= stringextract('image_16x9:\"', '\"', rec)\t\t\n\t\t\timg\t\t= img.replace('{width}', '640')\t\n\t\t\tdescr \t= stringextract('synopsis:\"', '\",', rec)\t\n\t\t\t\n\t\t\ttitle=py2_decode(title)\n\t\t\t# Zusammensetzung Streamlink plus Entf. Sonderzeichen:\n\t\t\turl \t= u\"{0}/{1}/{2}\".format(path, my_sender, title)\t# nicht website_url verwenden\n\t\t\turl\t\t= url.lower()\n\t\t\turl= url.replace(' ', '-')\t\t\t# Webseiten-URL: Blanks -> -\n\t\t\turl= url.replace(',', '-')\t\t\t# dto Komma -> -\n\t\t\turl= url.replace(u'ü', '-')\t\t\t# MDR THÜRINGEN\n\t\t\turl= (url.replace(\"b'\", '').replace(\"'\", '')) # Byte-Mark entfernen\n\t\t\t\n\t\t\tif my_sender == 'funk':\n\t\t\t\turl = \"https://www.ardaudiothek.de/sender/funk/funk\"\t\t# Korrektur für funk\n\t\t\t\n\t\t\ttitle = repl_json_chars(title)\n\t\t\tdescr = repl_json_chars(descr)\t\n\t\t\tsumm_par = descr\n\t\t\t\n\t\t\tif programs:\n\t\t\t\ttag = \"Weiter zu den Programmen von: %s\" % title\t\n\t\t\telse:\n\t\t\t\ttag = \"Weiter zum Livestream von: %s\" % title\n\t\t\t\n\t\t\tdestDir = \"AudioLiveSingle\"\t\t\n\t\t\tif programs == 'yes':\t\t\t\t\t\t# Sendungen der Sender listen\n\t\t\t\tdestDir = \"AudioSenderPrograms\"\n\t\t\tPLog('3Satz:');\n\t\t\tPLog(destDir); PLog(title); PLog(img); PLog(url); PLog(descr);\n\t\t\ttitle=py2_encode(title); summ_par=py2_encode(summ_par);\n\t\t\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(url), \n\t\t\t\tquote(title), quote(img), quote(summ_par))\n\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"%s\" % destDir, fanart=img, thumb=img, \n\t\t\t\tfparams=fparams, tagline=tag, summary=descr, mediatype='music')\t\n\t\t\t\t\t\n\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\t\t\t\t\t\n\t\t\n#----------------------------------------------------------------\n# hier wird der Streamlink von der Website der Audiothek im json-Teil\n#\termitelt.\n#\ndef AudioLiveSingle(url, title, thumb, Plot):\t\t# startet einzelnen Livestream für AudioStartLive\n\tPLog('AudioLiveSingle:')\n\n\tpage, msg = get_page(path=url)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioLiveSingle:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\t\n\turl = stringextract('playback_url:\"', '\"', page)\n\turl= url.replace('\\\\u002F', '/')\n\tPLog(url)\n\tif 'playback_url:\"' not in page:\t\t\t\t\t# Bsp.: MDR Wissen\n\t\tmsg1 = u\"kein Livestream gefunden für: %s\" % title\n\t\tMyDialog(msg1, '', '')\t\n\t\treturn\n\t\t\t\n\tPlayAudio(url, title, thumb, Plot, url_template='1') # direkt\t\n\t\n\treturn\t\n\t\n#----------------------------------------------------------------\n# listet Sendungen einzelner Radiosender\n#\tSendungen sind bereits alph. sortiert\n#\tAuswertung Sendung -> Audio_get_rubrik\n# 09.09.2020 img-Reihenfolge OK\n# \ndef AudioSenderPrograms(url, title, thumb, Plot):\n\tPLog('AudioSenderPrograms:')\n\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD Audiothek')\t\t# Home-Button, nach ev. Ausleitung (Doppel vermeiden)\t\n\n\tpage, msg = get_page(path=url)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioSenderPrograms:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\t\n\tgridlist = blockextract('class=\"headlines\">', page)\n\tPLog(len(gridlist))\n\tfor grid in gridlist:\n\t\tcategory = stringextract('category\">', '
', grid)\n\t\tcategory = mystrip(category); category = unescape(category)\n\t\tanzahl = stringextract('episode-count\">', '
', grid)\n\t\tanzahl = mystrip(anzahl)\n\t\thref = ARD_AUDIO_BASE + stringextract('href=\"', '\"', grid)\n\t\ttitle = stringextract('main-title\">', '', '', grid)\n\t\tdescr = mystrip(descr); descr = unescape(descr)\n\t\timg = img_via_audio_href(href=href, page=page)\t\t\t# img im json-Teil holen\n\t\t\n\t\ttag = category\n\t\ttag = u\"%s\\n[B]Folgeseiten[/B] | %s\" % (tag, anzahl)\t\t\n\t\n\t\tPLog('14Satz:');\n\t\tPLog(title); PLog(img); PLog(href); PLog(descr); PLog(anzahl);\n\t\ttitle=py2_encode(title); href=py2_encode(href);\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s'}\" % (quote(href), quote(title))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Audio_get_rubrik\", fanart=img, thumb=img, fparams=fparams, \n\t\t\tsummary=descr, tagline=tag)\t\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\t\n\t\n#----------------------------------------------------------------\n# Aufruf: AudioStart\n# html/json gemischt - Rubriken der Startseite/Menüleiste (../kategorie)\n# ohne path: Übersicht laden \n# usetitle sorgt für api-call in Audio_get_rubrik mit titel statt\n#\tkategorie-nr\ndef AudioStartRubrik(path=''):\t\t\t\t\t\t\t\n\tPLog('AudioStartRubrik:')\n\t\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD Audiothek')\t\t\t# Home-Button\n\t\n\tif path == '':\n\t\tpath = ARD_AUDIO_BASE + '/kategorie'\t\t\t\t\t\t\t\n\tpage, msg = get_page(path=path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioStartRubrik:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\n\t\n\tpos = page.find('', page)\n\t\tgridlist = blockextract('class=\"swiper-slide\"', stage)\n\telse:\n\t\tpath = path + '/alle'\n\t\tpage, msg = get_page(path=path)\t\n\t\tif page == '':\t\n\t\t\tmsg1 = \"Fehler in Audio_get_rubriken:\"\n\t\t\tmsg2 = msg\n\t\t\tMyDialog(msg1, msg2, '')\t\n\t\t\treturn li\n\t\t\n\t\t\t\n\tPLog(len(page))\t\n\tPLog(len(gridlist))\t\n\trepl_list_title = [\"Sendung: \", \", Sender: Sammlung\"]\n\t\t\t\n\tcnt=0\t\t\n\tfor grid in gridlist:\t\t\t\t\t\t\t\t\t\t\t\t\n\t\ttitle \t= stringextract('aria-label=\"', '\"', grid)\n\t\tfor repl in repl_list_title:\n\t\t\ttitle = title.replace(repl, '')\n\t\ttitle \t= unescape(title.strip())\n\t\thref\t\t= ARD_AUDIO_BASE + stringextract('href=\"', '\"', grid) # Homepage Beiträge\n\t\t\n\t\timg = img_via_web(href)\t\t\t\t\t\t\t\t\t\t# im json-Teil nicht eindeutig\n\t\t\n\t\timg_alt = stringextract('', grid)\n\t\timg_alt = stringextract('title=\"', '\"', img_alt)\n\t\ttag=''\n\t\tif img_alt:\n\t\t\ttag = img_alt\n\n\t\tanzahl \t= stringextract('class=\"station\"', '', grid) # nicht wie json-Anzahl s.o.\n\t\tanzahl\t= rm_datav(anzahl)\n\t\t\n\t\tdescr\t= \"[B]Folgeseiten[/B] | %s\" % anzahl\n\t\tdescr = repl_json_chars(descr)\n\t\tsumm_par= descr\n\t\ttitle = unescape(title); title = repl_json_chars(title)\n\t\t\t\t\n\t\tPLog('5Satz:');\n\t\tPLog(title); PLog(img); PLog(href); PLog(descr); PLog(anzahl);\n\t\ttitle=py2_encode(title); href=py2_encode(href);\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s'}\" % (quote(href), quote(title))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Audio_get_rubrik\", fanart=img, thumb=img, fparams=fparams, \n\t\t\tsummary=descr, tagline=tag)\t\n\t\tcnt=cnt+1\n\t\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\t\t\n#----------------------------------------------------------------\n# Auswertung Einzelrubrik\n# Aufruf: Audio_get_rubriken, AudioStartRubrik, Audio_get_rubrik_funk.\n# Auswertung in Audio_get_sendungen nur wenn usetitle nicht gesetzt\n#\t(echte Rubrik) und die Seiten keinen Weiter-Button enthält.\n# I.d.R.: api-Call (header wird in AudioContentJSON gesetzt) - Voraus-\n#\tsetzung: Seite enhält Nachladebuttons (Weitere Laden, ALLE EPISODEN);\n#\tbei usetitle erfolgt der api-Call mit Titel (quote_plus), ansonsten\n# \t\tmit der url-id-nr\n# Hier keine Mehr-Buttons (funktionierende pagenr-Verwaltung fehlt in\n#\tden Webseiten, anders als in den folgenden json-Seiten).\n# Sendungs-Titel im json-Inhalt können von der Webseite abweichen/fehlen,\n#\tdafür kann im Web Rubrikbeschreibung fehlen.\n#\t\ndef Audio_get_rubrik(url, title, usetitle='', ID=''):\t\t\t# extrahiert Einzelbeiträge einer Rubrik \n\tPLog('Audio_get_rubrik: ' + title)\n\tPLog(url); PLog(usetitle) \n\tli = xbmcgui.ListItem()\n\t\t\n\tpath = url + '/alle'\n\tpage, msg = get_page(path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in Audio_get_rubrik:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\n\t# \"Weitere laden\", \"ALLE EPISODEN\", usetitle -> API-Call\n\tif '\"Weitere Laden\"' in page or '\"ALLE EPISODEN\"' in page or usetitle:\t\t\n\t\t# Header für api-Call:\n\t\tPLog('Rubrik_mit_api_call')\n\t\tif usetitle:\t\t\t\t\t\t\t\t\t\n\t\t\turl_id=quote(py2_encode(ID))\t\t\t\t\t\t# Quotierung hier - skip in get_page via safe=False\t\t\t\t\t\n\t\t\tPLog(\"ID: %s, url_id: %s\" % (ID, url_id))\n\t\t\t# api-Call mit ID (Titel) statt url_id (echte Rubriken)\n\t\t\t#\tohne pagenr! (falsche Ergebnisse), trotzdem komplett:\n\t\t\tpath = ARD_AUDIO_BASE + \"/api/podcasts?category=%s\" % (url_id)\n\t\t\tskip=''\t\t\t\t\t\t\t\t\t\t\t\t# Beiträge mehrfach + single\t\t\t\n\t\telse:\t\n\t\t\tpagenr = 1\n\t\t\turl_id = url.split('/')[-1]\n\t\t\tpath = ARD_AUDIO_BASE + \"/api/podcasts/%s/episodes?items_per_page=24&page=%d\" % (url_id, pagenr)\t\n\t\t\tskip='1'\n\t\t\t\n\t\tAudioContentJSON(title, page='', path=path, skip=skip)\t# Beiträge nur single\t\t\n\t\t\n\telse:\n\t\t# kein api-Call gefunden, aktuelle html/json-Seite auswerten\n\t\tPLog('Rubrik_ohne_api_call')\n\t\tID = 'Audio_get_rubrik'\n\t\tgridlist = blockextract('class=\"podcast-title\"', page)\n\t\tPLog(len(gridlist))\t\n\t\tli = Audio_get_sendungen(li, gridlist, page, ID) # Einzelbeiträge holen\t\t\n\t\t\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\n#----------------------------------------------------------------\n# Auswertung funk (Übersicht, html/json).\n# Aufruf: AudioStart\n# Bezeichner im Web abweichend von restl. html-Seiten,\n#\tAudio_get_rubriken + Audio_get_sendungen nicht nutzbar\n# Die hrefs entsprechen dem Rest (url_id am Ende) - Auswertung\n#\tder Einzelbeiträge in Audio_get_rubrik\n#\t\ndef Audio_get_rubrik_funk(url):\t\t\t# Übersicht der funk-Podcasts\n\tPLog('Audio_get_rubrik_funk:')\n\t\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD Audiothek')\t# Home-Button\n\t\t\n\tpath = url + '/funk'\n\tpage, msg = get_page(path)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in Audio_get_rubrik_funk:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\t\t\n\tgridlist = blockextract('class=\"podcast-teaser-complex', page)\n\tPLog(len(gridlist))\t\n\tcnt=0; img_list=[]\t\t\t\t\t\t\t\t# img_list für Doppel-Check\n\tfor grid in gridlist:\n\t\ttitle \t= stringextract('class=\"main-title\">', '<', grid)\t\n\t\tstitle \t= stringextract('class=\"category\">', '<', grid)\n\t\tanzahl \t= stringextract('class=\"episode-count\">', '<', grid)\n\t\tdescr \t= stringextract('\"podcast-summary\">', '<', grid)\n\t\tdescr = unescape(descr)\t\n\t\ttitle = title.strip(); stitle = stitle.strip(); anzahl = anzahl.strip(); \n\t\ttitle = repl_json_chars(title) \n\t\t\n\t\ttag = u\"[B]Folgeseiten[/B] | %s\" % (anzahl)\n\n\t\thref = stringextract('href=\"', '\"', grid)\n\t\tPLog(href)\n\t\thref = ARD_AUDIO_BASE + href\n\t\t\n\t\t# img_via_audio_href OK nur hier für Übersicht, nicht für Folgeseiten Rubrik_ohne_api_call,\n\t\t#\tFolgeseiten Rubrik_mit_api_call OK\n\t\timg=''\t\n\t\timg = img_via_audio_href(href=href, page=page) \t\t\t# img im json-Teil holen\n\t\tif img == '':\n\t\t\timg = R('icon-bild-fehlt.png')\n\n\t\tPLog('10Satz:');\n\t\tPLog(title); PLog(img); PLog(href); PLog(descr); PLog(anzahl);\n\t\ttitle=py2_encode(title); href=py2_encode(href);\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s'}\" % (quote(href), quote(title))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"Audio_get_rubrik\", fanart=img, thumb=img, fparams=fparams, \n\t\t\tsummary=descr, tagline=tag)\t\n\t\t\t\n\tfparams=\"&fparams={}\"\t\t\t\t\t\t\t\t\t\t\t# Button funk-Modul hinzufügen\n\taddDir(li=li, label=\"zum FUNK-Modul\", action=\"dirList\", dirID=\"resources.lib.funk.Main_funk\", \n\t\tfanart=R('funk.png'), thumb=R('funk.png'), fparams=fparams)\n\t\t\t\n\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\t\n\n#----------------------------------------------------------------\n# Aufrufer: Audio_get_rubrik ('Rubrik_ohne_api_call')\n# gridlist: \tBlöcke aus page-Ausschnitt (Highlights + Meistgehört \n#\t\t\t\tder Startseite)\n# page:\t\t\tkompl. Seite für die img-Suche (html/json gemischt)\n# img_via_audio_href hier mit Param. search_back (sonst Versatz um 1)\n#\ndef Audio_get_sendungen(li, gridlist, page, ID):\t# extrahiert Einzelbeiträge\n\tPLog('Audio_get_sendungen: ' + ID)\n\tPLog(len(gridlist))\t\n\n\tli = home(li, ID='ARD Audiothek')\t\t\t\t# Home-Button\n\t\n\tcnt=0; img_list=[]\t\t\t\t\t\t\t\t# img_list für Doppel-Check\n\tfor grid in gridlist:\n\t\t# PLog(grid) # Debug\n\t\tdescr2\t= ''; title='';\tstitle=''\t\t\t\t\t\t\t\t\t\t\n\t\ttitle \t= stringextract('podcast-title\"', '<', grid)\t# Default ergänzt 01.05.2020\n\t\tstitle \t= stringextract('episode-title\"', '<', grid)\n\t\ttitle = rm_datav(title); stitle = rm_datav(stitle);\n\t\tdescr = stitle\t\t\t\t\t\t\t\t\t\t\t# Fallback descr\n\t\tif stitle:\n\t\t\ttitle = \"%s | %s\" % (title, stitle)\n\t\t\n\t\tlabel_list = blockextract('aria-label=\"', grid)\t\t\t# title/stitle geändert 30.03.2020\n\t\tif ID == u'Audio_get_rubrik':\n\t\t\tstitle\t= title\n\t\t\ttitle \t= descr\t\t\t\n\t\tif ID == 'Highlights':\n\t\t\ttitle \t= stringextract('aria-label=\"', '\"', label_list[1])\n\t\t\tstitle \t= stringextract('aria-label=\"', '\"', label_list[0])\n\t\tif ID == u'Meistgehört':\n\t\t\ttitle \t= stringextract('aria-label=\"', '\"', label_list[0])\n\t\t\tstitle \t= stringextract('aria-label=\"', '\"', label_list[1])\n\t\ttitle \t= unescape(title); stitle = unescape(stitle)\n\t\t\n\t\tPLog(\"title: \" + title); \n\t\tif ' | ' in title:\t\t\t\t\t\t\t\t\t# Titel aufteilen \n\t\t\ttitle = title.split('|')\t\t\t\t\t\t# mehr als 1 | möglich\n\t\t\tdescr2 = title[-1]\t\t\t\t\t\t\t\t# Fallback descr\n\t\t\ttitle.remove(descr2)\t\t\t\t\t\t\t# entf.\n\t\t\tif len(title) > 0:\n\t\t\t\ttitle = \" | \".join(title)\n\t\t\telse:\n\t\t\t\ttitle = title[0]\n\t\t\t\t\t\t\n\t\tmp3_url\t= stringextract('share-menu-button', 'aria-label', grid)\t# teilw. ohne mp3-url\n\t\tmp3_url\t= stringextract('href=\"', '\"', mp3_url) \t\t# mp3-File\n\t\thref \t= stringextract('podcast-title\"', 'aria-label', grid)\t\t\t\t# Default\n\t\tif ID == u\"Meistgehört\":\n\t\t\thref \t= stringextract('class=\"podcast-title\"', 'aria-label', grid)\t# Homepage Beitrag\n\t\thref \t= stringextract('href=\"', '\"', href)\n\t\thref\t= ARD_AUDIO_BASE + href\n\t\t\n\t\timg=''\n\t\tif ID == 'Highlights' or ID == u'Meistgehört':\t\t\t\t\t\t\t# img von Zielseite holen\n\t\t\timg = img_via_web(href)\t\t\t\t\t\t\t\t\t\t\t\t# in json nicht eindeutig\n\t\telse:\n\t\t\timg = img_via_audio_href(href=href, page=page, search_back=True)\t# img im json-Teil holen\n\n\t\tif not descr:\n\t\t\tdescr \t= stringextract('href\"', '\"', grid)\n\t\tif not descr:\n\t\t\tdescr = descr2\n\t\tdauer\t= stringextract('duration\"', '', grid)\n\t\tdauer\t= cleanhtml(dauer); dauer = mystrip(dauer)\n\t\tpos\t\t= dauer.find('>'); dauer = dauer[pos+1:]\t# entfernen: data-v-7c906280>\t\t\n\t\t\n\t\t\n\t\tif dauer:\n\t\t\tdescr\t= \"[B]Audiobeitrag[/B] | %s\\n\\n%s\" % (dauer, descr)\n\t\telse:\n\t\t\tdescr\t= \"[B]Audiobeitrag[/B]\\n\\n%s\" % (descr)\n\t\tif stitle:\n\t\t\tif stitle.strip() in descr == False:\t\t\t# Doppler vermeiden\t\t\t\n\t\t\t\tdescr\t= \"%s\\n%s\" % (stitle, descr)\n\t\t\t \n\t\tdescr\t= unescape(descr); descr = repl_json_chars(descr)\n\t\tsumm_par= descr.replace('\\n', '||')\n\t\ttitle = repl_json_chars(title)\n\t\t\t\n\t\tPLog('6Satz:');\n\t\tPLog(title); PLog(stitle); PLog(img); PLog(href); PLog(mp3_url);\n\t\ttitle=py2_encode(title); mp3_url=py2_encode(mp3_url);\n\t\timg=py2_encode(img); summ_par=py2_encode(summ_par);\t\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(mp3_url), \n\t\t\tquote(title), quote(img), quote_plus(summ_par))\n\t\tif mp3_url:\n\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioPlayMP3\", fanart=img, thumb=img, \n\t\t\t\tfparams=fparams, summary=descr)\n\t\telse:\n\t\t\ttitle=py2_encode(title); href=py2_encode(href);\n\t\t\timg=py2_encode(img); summ_par=py2_encode(summ_par);\t\t\n\t\t\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(href), \n\t\t\t\tquote(title), quote(img), quote_plus(summ_par))\n\t\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioSingle\", fanart=img, thumb=img, \n\t\t\t\tfparams=fparams, summary=descr, mediatype='music')\n\t\n\t\tcnt=cnt+1\n\treturn li\t\n#-----------------------------\n# entfernt data-v-Markierung + LF in Titel und Subtitel,\n#\tBsp.: episode-title\" data-v-132985da> ... \n# Aufruf: Audio_get_sendungen, Audio_get_rubriken\n#\ndef rm_datav(line):\n\tPLog(\"rm_datav:\")\n\t\n\tline=cleanhtml(line)\n\tpos\t= line.find('>')\n\tif pos >= 0:\n\t\tline = line[pos+1:]\n\t\tline = line.strip()\n\treturn line\n#----------------------------------------------------------------\n# AudioSingle gibt direkt das Thema-mp3 seiner Homepage wieder - die \n# \tFunktion ist Fallback für Beiträge (Bsp. Startseite), für die sonst\n#\tkeine mp3-Quelle gefunden wurde.\n# Die mp3-Quelle wird zusammen mit den Params zu AudioPlayMP3 durch-\n#\tgereicht\t \n#\ndef AudioSingle(url, title, thumb, Plot):\n\tPLog('AudioSingle:')\n\tpage, msg = get_page(path=url)\t\n\tif page == '':\t\n\t\tmsg1 = \"Fehler in AudioSingle:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn\n\tPLog(len(page))\t\n\t\n\tpos1 = page.rfind('.mp3')\t\t\t# Thema-mp3 an letzer Stelle im json-Teil\n\tpage = page[:pos1]\n\tpos2 = page.rfind('https')\n\tPLog('pos1 %d, pos2 %d' % (pos1, pos2))\n\tl = page[pos2:] + '.mp3'\n\n\turl = l.replace('\\\\u002F', '/')\n\tPLog(url[:100])\n\n\tAudioPlayMP3(url, title, thumb, Plot) # direkt\t\n\treturn\n\t\n#-----------------------------\n# img_via_audio_href: ermittelt im json-Teil der Webseite (ARD Audiothek) \n#\t img mittels letztem href-url-Teil, z.B. ../-diversen-peinlichkeiten/63782268\n# Wegen der url-Quotierung (u002F) kann nicht die gesamte url\n#\tverwendet werden.\n# Bei einigen Webseiten (Audio_get_rubrik -> Audio_get_sendungen) muss \n#\trückwärts gesucht werden, Bsp. Unsere Favoriten/True Crime (sonst\n#\t Versatz um 1) - Trigger: search_back.\n# Für inkompatible Seiten dienen img_via_web + thread_img_via_web.\n# 19.08.2020 für funk-Beiträge renoviert\n# \ndef img_via_audio_href(href, page, ID='', search_back=''):\n\tPLog(\"img_via_audio_href: \" + href)\n\tPLog(ID)\n\n\turl_part = href.split('/')[-1]\t\t# letzten url-Teil abschneiden\n\turl_part = \"u002F%s\" % url_part\t\t# eindeutiger machen\n\tPLog(url_part)\n\tpos1 = page.find(url_part)\n\t\t\n\tpos2 = page.find('img.ardmediathek.de', pos1)\n\tif search_back:\t\t\t\t\t\t# rückwärts suchen\n\t\tpos2 = page.rfind('img.ardmediathek.de', pos1)\n\tPLog('pos1 %d, pos2 %d' % (pos1, pos2))\n\tl = page[pos2:]\n\n\tl = l.replace('\\/', '/')\t\t\t# Migration PY2/PY3: Maskierung\n\tPLog(l[:100])\n\t\n\timg=''\n\tif 'img.ardmediathek.de' in l:\t\t# image_16x9 fehlt manchmal\n\t\timg = stringextract('img.ardmediathek.de', '\"', l)\n\t\tif img:\n\t\t\timg = 'https://img.ardmediathek.de' + img\n\timg = img.replace('{width}', '640')\n\timg = img.replace('\\\\u002F', '/')\t# Migration PY2/PY3: Maskierung\n\t\t\t\n\tif img == '':\n\t\timg = R('icon-bild-fehlt.png')\t# Fallback bei fehlendem Bild\n\n\treturn img\t\t\t\t\t\t\t\n\n#----------------------------------------------------------------\n# 21.08.2020 für Hintergrundprozess erneuert\n# Aufrufer: Audio_get_rubriken (Highlights, Meistgehört),\n#\tAudio_get_sendungen, \n# ermittelt img auf Webseite href (i.d.R. html/json gemischt) und\n#\tspeichert es im Hintergrund (thread_img_via_web). Die Webseite \n#\tselbst wird verworfen (nicht benötigt).\n# Als Dateiname dient der letzte Teil von href (i.d.R. Beitrags-ID).\n# Ablage: Cache-Ordner Bildersammlungen/Audiothek.\n# Auf Fallback-img wird verzichtet, um den Thread nicht abzuwarten.\n#\tAchtung: abweichende img's auch auf Zielseite möglich. \n# 21.08.2020 Umstellung auf Thread: Bild statt Webseite speichern \n#\t(Performance) - früher gespeicherte Webseite wird entfernt\n#\ndef img_via_web(href):\n\tPLog('img_via_web:')\n\t\n\timg=''\n\tID = href.split('/')[-1]\n\tfpath = os.path.join(SLIDESTORE, 'Audiothek')\t# Bildverzeichnis\n\tif os.path.isdir(fpath) == False:\n\t\ttry: \n\t\t\tos.mkdir(fpath)\n\t\t\tPLog('erzeugt: %s' % fpath)\n\t\texcept OSError: \n\t\t\tmsg1 = 'Bildverzeichnis konnte nicht erzeugt werden:'\n\t\t\tmsg2 = fpath\n\t\t\tPLog(msg1); PLog(msg2)\n\t\t\tMyDialog(msg1, msg2, '')\n\t\t\treturn li\t\n\t\n\tfpath = os.path.join(fpath, '%s.png' % ID)\t\t# Pfad um Bildname erweitern\n\tfname = '%s.png' % ID\n\toldfpath = os.path.join(TEXTSTORE, ID)\t\t\t# alte Webseite (vor Umstellung)\n\tPLog('fpath: ' + fpath)\n\tif os.path.exists(fpath) and os.stat(fpath).st_size == 0: # leer? = fehlerhaft -> entfernen \n\t\tPLog('fpath_leer: %s' % fpath)\n\t\tos.remove(fpath)\n\tif os.path.exists(oldfpath):\t\t\t\t\t# aufräumen\n\t\tos.remove(oldfpath)\n\n\tif os.path.exists(fpath):\t\t\t\t\t\t# lokale Bildadresse existiert\n\t\tPLog('lade_img_lokal') \n\t\timg = fpath\t\n\telse:\t\n\t\tfrom threading import Thread\n\t\ticon = R(ICON_MAIN_AUDIO) \n\t\tbg_thread = Thread(target=thread_img_via_web, args=(href, fpath, fname, icon))\n\t\tbg_thread.start()\t\t\t\t\t\t\t\t\t\t\t\n\n\t#if img == '':\t\t\t\t\t\t\t\t\t# ohne Fallback bei thread\n\t#\timg = R('icon-bild-fehlt.png')\t\t\t\t\n\treturn img\n\n#----------------------------------------------------------------\n# Hintergrundroutine für img_via_web\n# Extrahiert img aus Webseite url und speichert in path\n# \ndef thread_img_via_web(url, path, fname, icon):\n\tPLog(\"thread_img_via_web:\")\n\t\n\ttry:\n\t\tpage, msg = get_page(path=url)\t\n\t\tif page:\t\t\t\t\t\t\t\t\t# Bild holen\n\t\t\timg_web = stringextract('property=\"og:image\" content=\"', '\"', page) # Bildadresse \n\t\t\turlretrieve(img_web, path)\n\t\t\tmsg1 = \"Lade Bild\"\n\t\t\tmsg2 = fname\n\t\t\txbmcgui.Dialog().notification(msg1,msg2,icon,1000, sound=False)\t \n\t\telse:\n\t\t\tPLog(msg)\t\t\t\t\t\t\t\t# hier ohne Dialog\n\texcept Exception as exception:\n\t\tPLog(\"thread_getsinglepic:\" + str(exception))\n\t\n\treturn\n\t\t\t\t\t\t\t\t\t\t\n#----------------------------------------------------------------\n# AudioSearch verwendet api-Call -> Seiten im json-Format, img-Zuordnung\n#\tvia img_via_audio_href möglich.\n# Auswertung in AudioContentJSON (li-Behandl. + Mehr-Button dort).\n# Achtung: cacheToDisc in endOfDirectory nicht verwenden, \n#\tcacheToDisc=False springt bei Rückkehr in get_query\n# AudioContentJSON listet erst Folgeseiten, dann Einzelbeiträge\n# \ndef AudioSearch(title, query=''):\n\tPLog('AudioSearch:')\n\t# Default items_per_page: 8, hier 24 - bisher ohne Wirkung\n\tbase = u'https://www.ardaudiothek.de/api/search/%s?items_per_page=24&page=1' \n\n\tif \tquery == '':\t\n\t\tquery = get_query(channel='ARD Audiothek')\n\tPLog(query)\n\tif query == None or query.strip() == '':\n\t\treturn \"\"\n\t\t\n\tquery=py2_encode(query)\t\t# encode für quote\n\tquery = query.strip()\n\tquery_org = query\t\n\t\n\tpath = base % quote(query)\n\treturn AudioContentJSON(title=query, path=path, ID='AudioSearch|%s' % query_org)\t# ohne skip-Param.\t\t\n\n#----------------------------------------------------------------\n# listet Sendungen mit Folgebeiträgen (zuerst) und / oder Einzelbeiträge \n#\tim json-Format.\n# die Ergebnisseiten enthalten gemischt Einzelbeiträge und Links zu\n#\t Folgeseiten (xml-Url's).\n# Aufrufer sorgt für page im json-format (Bsp. api-call in AudioSearch)\n#\toder sendet nur den api-call in path (Bsp. Audio_get_rubrik)\n# Aufrufer: AudioStart_AZ_content, Audio_get_rubrik (Rubrik_mit_api_call),\n#\tAudioSearch, AudioContentXML (Ausleitung).\n# Aufruf-Formate: \n#\t1. \"/api/podcasts/%s/episodes?items_per_page=24&page=%d\" % (url_id, pagenr)\n#\t2. \"/api/podcasts?category=%s\" % (url_id) url_id: Titel aus Rubriken\n#\n# 11.03.2020 zusätzl. Auswertung der A-Z-Seiten, einschl. Sortierung\n#\t(Kodi sortiert Umlaute richtig, Web falsch), \n#\tBlöcke '\"category\":', Aufruf: AudioStart_AZ_content\n#\n# 31.07.2020 die feed_url zu xml-Inhalten funktioniert nicht mehr \n#\t(..synd_rss?offset..) - die Hostadresse ist falsch - Austausch \n#\ts. url_xml + Ausleitung_json-Seite in AudioContentXML\n#\t20.08.2020 die Adressen stimmen wieder, wir bleiben jetzt aber bei\n#\tder json-Ausleitung \n#\n# 16.08.2020 \"Anzahl Episoden\" bei Folgebeiträgen wieder entfernt - \n#\tWerte stimmen nicht, außer Beiträge A-Z.\n#\n# skip: 1 = skip mehrfach-Beiträge, 2 = Einzel-Beiträge \n# ohne skip: AudioSearch, AudioStart_AZ_content\n#\ndef AudioContentJSON(title, page='', path='', AZ_button='', ID='', skip=''):\t\t\t\t\n\tPLog('AudioContentJSON: ' + title)\n\tPLog(skip)\n\ttitle_org = title\n\t\n\tli = xbmcgui.ListItem()\n\tif AZ_button:\t\t\t\t\t\t\t\t\n\t\tsortlabel='1'\t\t\t\t\t\t\t\t\t\t# Sortierung erford.\t\n\telse:\n\t\tli = home(li, ID='ARD Audiothek')\t\t\t\t\t# Home-Button\n\t\tsortlabel=''\t\t\t\t\t\t\t\t\t\t# Default: keine Sortierung\t\n\t\n\tpath_org=''\n\tif path:\t\t\t\t\t\t\t\t\t\t\t\t# s. Mehr-Button\n\t\theaders=AUDIO_HEADERS % ARD_AUDIO_BASE\n\t\tpage, msg = get_page(path=path, header=headers, do_safe=False)\t# skip Quotierung in get_page\n\t\tif page == '':\t\n\t\t\tmsg1 = \"Fehler in AudioContentJSON:\"\n\t\t\tmsg2 = msg\n\t\t\tMyDialog(msg1, msg2, '')\t\n\t\t\treturn li\n\t\tPLog(len(page))\n\t\tpath_org = \tpath\t\t\t\n\t\tpage = page.replace('\\\\\"', '*')\t\t\t\t\t\t# quotiere Marks entf.\n\n\tcnt=0\n\tgridlist = blockextract('podcast\":{\"category\":', page)\t# 1. Sendungen / Rubriken\n\tif len(gridlist) == 0:\n\t\tgridlist = blockextract('\"category\":', page)\t\t# echte Rubriken, A-Z Podcasts\n\tif skip == '1':\t\t\t\t\t\t\t\t\t\t\t# skip Mehrfachbeiträge, Bsp. AudioContentXML\n\t\tgridlist = []\n\t\tPLog('skip_1_mehrfach')\n\tPLog(len(gridlist))\n\t\n\t\n\thref_pre=[]; mehrfach=0\n\tfor rec in gridlist:\n\t\trec\t\t= rec.replace('\\\\\"', '')\n\t\trubrik \t= stringextract('category\":\"', '\"', rec) \n\t\tdescr \t= stringextract('description\":\"', '\"', rec)\n\t\tclip \t= stringextract('clipTitle\":\"', '\"', rec)\t# Teaser (nicht 1. Beitrag) für Folgeseiten \n\t\thref\t= stringextract('link\":\"', '\"', rec)\n\t\tif href == '':\n\t\t\thref\t= stringextract('url\":\"', '\"', rec)\n\t\tif href in href_pre:\t\t\t\t\t\t\t\t# Dublette?\n\t\t\tcontinue\n\t\thref_pre.append(href)\t\n\n\t\tanzahl\t= stringextract('_elements\":', ',', rec) \t# int, Anzahl stimmt nicht (wie auch \"total\")\n\t\tsender\t= stringextract('station\":\"', '\"', rec) \n\t\ttitle\t= stringextract('title\":\"', '\"', rec) \n\t\turl_xml\t= stringextract('feed_url\":\"', '\"', rec) \t\t\t\n\t\turl_xml = url_xml.replace('api-origin.ardaudiothek', 'audiothek.ardmediathek') # s.o.\n\t\timg \t= stringextract('image_16x9\":\"', '\"', rec)\n\t\timg\t\t= img.replace('{width}', '640')\n\t\t\n\t\t\n\t\t# Aufruf AudioStart_AZ_content:\n\t\tif AZ_button:\t\t\t\t\t\t\t\t\t\t# Abgleich Button A-Z und #,0-9\t\n\t\t\tb = up_low(title)[0]\n\t\t\tif AZ_button == '#':\t\t\t\t\t\t\t# Abgleich: #,0-9 \n\t\t\t\ttry:\n\t\t\t\t\tb_val = ord(b)\t\t\t\t\t\t\t# Werte / Zeichen s.o.\n\t\t\t\t\t#PLog(\"b_val: %d\" % b_val)\n\t\t\t\t\t#PLog(\"title: %s\" % title)\n\t\t\t\texcept:\n\t\t\t\t\tPLog(\"title: %s\" % title)\n\t\t\t\t\tPLog(\"title[0]: %s\" % title[0])\n\t\t\t\t\tb_val = 0\n\t\t\t\t# 195: \tÜ, Ö, Ä (bei Unicode identisch für den 1. des 2-Byte-Wertes)\n\t\t\t\t# 64:\t@ (z.B. @mediasres)\n\t\t\t\tif (b_val < 48 or b_val > 57) and b_val != 35 and b_val != 195 and b_val != 64:\n\t\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tAZ_button = py2_encode(AZ_button)\n\t\t\t\tif b != up_low(AZ_button):\n\t\t\t\t\tcontinue\t\t\t\n\t\t\tdescr\t= u\"[B]Folgeseiten[/B] | %s Episoden | %s\\n\\n%s\" % (anzahl, sender, descr)\t\n\t\t\t\t\t\t\t\t\n\t\telse:\t\t\t\t\t\t\t\t\t\t\t\t# Titel/descr <> A-Z\t\t\t\t\t\t\t\t\t\t\n\t\t\tif title == rubrik: \t\t\t\t\t\t\t# häufig doppelt\n\t\t\t\ttitle\t= \"%s | %s\" % (rubrik, sender)\n\t\t\telse:\n\t\t\t\ttitle\t= \"%s | %s | %s\" % (rubrik, sender, title)\n\t\t\tdescr\t= u\"[B]Folgeseiten[/B] | %s\\n\\n%s\" % (sender, descr)\n\t\t\tif clip:\t\t\t\t\t\t\t\t\t\t# Teaser anhängen\n\t\t\t\tdescr\t= u\"[B]Teaser:[/B] %s\\n\\n%s\" % (clip, descr)\t\t\n\t\t\ttitle = repl_json_chars(title)\n\t\t\tdescr = repl_json_chars(descr)\n\t\n\t\tPLog('7Satz:');\n\t\tPLog(rubrik); PLog(title); PLog(img); PLog(href); PLog(url_xml);\n\t\ttitle=py2_encode(title); url_xml=py2_encode(href);\n\n\t\tfparams=\"&fparams={'path': '%s', 'title': '%s', 'url_html': '%s'}\" %\\\n\t\t\t(quote(url_xml), quote(title), quote(href))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioContentXML\", fanart=img, thumb=img, \n\t\t\tfparams=fparams, summary=descr, sortlabel=sortlabel)\t\t\t\t\t\t\t\t\t\t\t\t\t\n\t\tcnt=cnt+1\n\t\tmehrfach=mehrfach+1\n\n\tgridlist = blockextract('\"duration\":', page)\t\t\t# 2. Einzelbeiträge\n\tif skip == '2':\t\t\t\t\t\t\t\t\t\t\t# skip single-Beiträge\n\t\tPLog('skip_2_single')\n\t\tgridlist = []\n\t\t\n\tPLog(len(gridlist))\n\tfor rec in gridlist:\n\t\trec\t\t= rec.replace('\\\\\"', '')\n\t\trubrik \t= stringextract('category\":\"', '\"', rec) \n\t\t# dauer \t= stringextract('duration\":\"', '\"', rec)\n\t\tdauer \t= stringextract('clipLength\":\"', '\"', rec)\n\t\tif dauer == '':\t\t\t\t\t\t\t\t\t\t# mp3 fehlt, kein single-Beitrag\n\t\t\tcontinue\n\t\tdauer = seconds_translate(dauer)\n\t\tdescr \t= stringextract('description\":\"', '\"', rec)\n\t\turl\t= stringextract('playback_url\":\"', '\"', rec) \n\t\tcount\t= stringextract('_elements\":', ',', rec) \t# int\n\t\tsender\t= stringextract('station\":\"', '\"', rec) \n\t\ttitle\t= stringextract('clipTitle\":\"', '\"', rec) \n\t\thref\t= stringextract('link\":\"', '\"', rec) \t\t# Link zur Website\n\t\timg \t= stringextract('image_16x9\":\"', '\"', rec)\n\t\timg\t\t= img.replace('{width}', '640')\n\t\t\n\t\ttitle = repl_json_chars(title)\n\t\tdescr = repl_json_chars(descr)\t\t\n\t\t\n\t\t# title\t= \"%s | %s\" % (rubrik, title)\t\t\t\t# rubrik kann hier fehlen\n\t\tdescr\t= u\"[B]Audiobeitrag[/B] | Dauer %s | %s\\n\\n%s\" % (dauer, sender, descr) \n\t\tsumm_par= descr.replace('\\n', '||')\n\t\n\t\tPLog('8Satz:');\n\t\tPLog(dauer); PLog(rubrik); PLog(title); PLog(img); PLog(url)\n\t\ttitle=py2_encode(title); img=py2_encode(img); summ_par=py2_encode(summ_par);\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(url), \n\t\t\tquote(title), quote(img), quote_plus(summ_par))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioPlayMP3\", fanart=img, thumb=img, fparams=fparams, \n\t\t\tsummary=descr)\n\t\tcnt=cnt+1\n\n\tPLog(cnt)\t\n\tif cnt == 0:\t\t\t\t\t\t\t\t\t\t\t# ohne Ergebnis raus\n\t\tmsg1 = 'nichts (mehr) gefunden zu >%s<' % title_org\n\t\tMyDialog(msg1, '', '')\n\t\txbmcplugin.endOfDirectory(HANDLE)\n\t\t\n\tPLog(\"cnt: %d\" % cnt)\n\t# pages in der json-Seite bezieht sich auf den letzten Beitrag, nicht\n\t#\tauf das Thema dieser Seite! Auch \"total\" falsch - keine Seitensteuerung \n\t#\tgefunden - Calls bis Leerseite\n\t# Anzahl Episoden in Aufrufer stimmt aber mit Anzahl Einzelbeiträge überein\n\t# api-Calls o. pagenr möglich: ../api/podcasts?category=Corona (usetitle aus\n\t#\tAudioStartRubrik)\n\t# cnt < 24: keine weiteren Sätze mehr\n\tif path_org and '&page=' in path_org and cnt == 24:\t\t# Mehr-Button\n\t\tPLog(path_org)\n\t\tpagenr = re.search('&page=(.d?)', path_org).group(1)\n\t\tpagenr = int(pagenr)\n\t\tnextpage = pagenr + 1\n\t\tPLog('nextpage: %d' % nextpage)\n\t\tnext_path = path_org.replace('&page=%d' % pagenr, '&page=%d' % nextpage)\n\t\tPLog('nextpage: %d, next_path: %s' % (nextpage, next_path))\t\n\t\ttag = \"weiter zu Seite %d\" % nextpage\n\t\t\n\t\ttitle_org=py2_encode(title_org); next_path=py2_encode(next_path)\n\t\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'ID': '%s', 'skip': '%s'}\" %\\\n\t\t\t(quote(title_org), quote(next_path), ID, skip)\n\t\taddDir(li=li, label=title_org, action=\"dirList\", dirID=\"AudioContentJSON\", \n\t\t\tfanart=R(ICON_MEHR), thumb=R(ICON_MEHR), tagline=tag, fparams=fparams)\t\n\t\t\t\n\t\t\n\t#-------------------\t\t\t\t\t\t\t\t\t# Button bei Suche anhängen\n\tfav_path = SETTINGS.getSetting('pref_podcast_favorits')\n\tif fav_path == 'podcast-favorits.txt' or fav_path == '':# im Verz. resources\n\t\tfav_path = R(fav_path)\n\t\tfname = os.path.basename(fav_path)\n\telse:\n\t\tfav_path = os.path.abspath(fav_path)\n\t\tfname = \"%s..%s\" % (os.path.dirname(fav_path[:10]), os.path.basename(fav_path))\n\tPLog(\"fav_path: \" + fav_path)\n\tPLog(os.path.isfile(fav_path)); PLog('AudioSearch' in ID)\n\t\n\tif os.path.isfile(fav_path) and 'AudioSearch' in ID:\t# Button zum Anfügen in podcast-favorits.txt\n\t\t# pagenr \t= path.split('=')[-1]\t\t\t\t\t# page-nr in path - n.b.\n\t\tquery \t= ID.split('|')[1]\t\t\t\t\t\t\t# Sucheingabe\t\t\n\t\tquery\t= \"Suchergebnis: %s\" % query\n\t\ttitle \t= u\"Suchergebnis den Podcast-Favoriten hinzufügen\"\n\t\ttag \t= query\n\t\tsumm \t= \"Ablage: %s\" % fname\n\t\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'fav_path': '%s', 'mehrfach': '%s'}\" % \\\n\t\t\t(quote(query), quote(path_org), quote(fav_path), mehrfach)\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"resources.lib.Podcontent.PodAddFavs\", \n\t\t\tfanart=R(ICON_STAR), thumb=R(ICON_STAR), fparams=fparams, summary=summ, \n\t\t\ttagline=tag)\t\n\t\t\t\t\t\t\t\t\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\n#----------------------------------------------------------------\n# listet Sendungen und / oder Einzelbeiträge im xml-format\n# die Ergebnisseiten enthalten gemischt Einzelbeiträge und Links zu Folgeseiten.\n# Aufrufer übergibt path, nicht page wie AudioSearch zu AudioContentJSON\n# Problem Datumkonvert. s. transl_pubDate - GMT-Datum bleibt hier unverändert.\n# Im xml-Format fehlt die Dauer der Beiträge.\n# Begrenzung 100 pro Liste da Seiten mit über 1000 Beiträgen,\n#\tBsp. \tHörspiel artmix.galerie 461, \n#\t\t\tWissen radioWissen 2106, Wissen SWR2 2488\n#\tCaching hier nicht erforderlich (xml, i.d.R. 1 Bild/Liste)\n# 16.08.2020 Ausleitung auf json-Seite (url_html), falls xml-Seite ohne Inhalt,\n#\turl_html -> api-Call mittels url_id, Mehrfach-Sätze überspringen in\n#\tAudioContentJSON.\t\n# \ndef AudioContentXML(title, path, offset='', url_html=''):\t\t\t\t\n\tPLog('AudioContentXML: ' + title)\n\ttitle_org = title\n\tmax_len = 100\t\t\t\t\t\t\t\t\t\t\t# 100 Beiträge / Seite\n\tif offset:\n\t\toffset = int(offset)\n\telse:\n\t\toffset = 0\n\tPLog(\"offset: %d\" % offset)\n\t\n\tli = xbmcgui.ListItem()\n\t\n\tpage, msg = get_page(path=path)\t\n\tif page == '':\n\t\tmsg1 = \"Fehler in AudioContentXML:\"\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\t\n\t\treturn li\n\tPLog(len(page))\t\t\t\t\n\t\n\timg_list = blockextract('', page)\t\t\t\t# img Dachsatz\n\timg=''\n\tif len(img_list) == 1:\t\n\t\timg\t= stringextract('', '', page)\n\t\timg\t= stringextract('', '', img)\n\t\n\tcnt=0\n\tgridlist = blockextract('', page)\t\n\tPLog(len(gridlist))\n\tif len(gridlist) == 0:\t\t\t\t\t\t\t\t\t# Fallback Ausleitung json-Seite\n\t\tif url_html:\n\t\t\tPLog('Ausleitung_json_Seite')\n\t\t\tpagenr = 1\n\t\t\turl_id = url_html.split('/')[-1]\n\t\t\tpath = ARD_AUDIO_BASE + \"/api/podcasts/%s/episodes?items_per_page=24&page=%d\" % (url_id, pagenr)\t\n\t\t\tAudioContentJSON(title, page='', path=path, skip='1')\t# Mehrfach-Sätze überspringen\t\n\t\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\t\treturn\n\t\n\tlen_all = len(gridlist)\n\tif offset and offset <= len(gridlist):\n\t\tgridlist = gridlist[offset:]\n\tPLog(len(gridlist))\n\t\n\tli = home(li, ID='ARD Audiothek')\t\t# Home-Button, nach ev. Ausleitung (Doppel vermeiden)\t\n\t\n\tfor rec in gridlist:\n\t\ttitle\t= stringextract('', '', rec) \n\t\turl\t= stringextract('url=\"', '\"', rec) \t\t\t\t# mp3\n\t\tlink\t= stringextract('', '', rec) \t# Website\n\t\tdescr\t= stringextract('', '', rec) \n\t\tdatum\t= stringextract('', '', rec) \n\t\t# datum\t= transl_pubDate(datum)\t\t\t\t\t\t# s. transl_pubDate\n\t\tsender\t= stringextract('', '', rec) \t\n\t\t\n\t\ttitle = unescape(title); title = repl_json_chars(title); \n\t\tdescr = unescape(descr); descr = repl_json_chars(descr); \n\t\tdescr\t= \"Sender: %s | gesendet: %s\\n\\n%s\" % (sender, datum, descr)\t\n\t\tsumm_par= descr.replace('\\n', '||')\n\t\n\t\tPLog('9Satz:');\n\t\tPLog(title); PLog(url); PLog(link); PLog(datum);\n\t\ttitle=py2_encode(title); url=py2_encode(url);img=py2_encode(img); \n\t\tsumm_par=py2_encode(summ_par);\n\t\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(url), \n\t\t\tquote(title), quote(img), quote_plus(summ_par))\n\t\taddDir(li=li, label=title, action=\"dirList\", dirID=\"AudioPlayMP3\", fanart=img, thumb=img, fparams=fparams, \n\t\t\tsummary=descr)\n\t\t\t\n\t\tcnt=cnt+1\n\t\tif cnt >= max_len:\n\t\t\tsumm = u\"gezeigt %d, gesamt %d\" % (cnt+offset,len_all)\n\t\t\ttitle_org=py2_encode(title_org); \n\t\t\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'offset': '%s'}\" % (quote(title_org), \n\t\t\t\tquote(path), offset+cnt+1)\n\t\t\taddDir(li=li, label=\"Mehr..\", action=\"dirList\", dirID=\"AudioContentXML\", fanart=R(ICON_MEHR), \n\t\t\t\tthumb=R(ICON_MEHR), fparams=fparams, tagline=title_org, summary=summ)\n\t\t\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\t\n\tPLog(cnt)\t\n\tif cnt == 0:\n\t\tmsg1 = 'keine Audios gefunden zu >%s<' % title\n\t\tMyDialog(msg1, '', '')\n\t\t\t\n\txbmcplugin.endOfDirectory(HANDLE, cacheToDisc=True)\n\t\n#----------------------------------------------------------------\n# Ausgabe Audiobeitrag\n# Falls pref_use_downloads eingeschaltet, werden 2 Buttons erstellt\n#\t(Abspielen + Download).\n# Falls pref_use_downloads abgeschaltet, wird direkt an PlayAudio\n#\tübergeben.\n#\ndef AudioPlayMP3(url, title, thumb, Plot):\n\tPLog('AudioPlayMP3: ' + title)\n\t\n\tif SETTINGS.getSetting('pref_use_downloads') == 'false':\n\t\tPLog('starte PlayAudio direkt')\n\t\tPlayAudio(url, title, thumb, Plot) # PlayAudio\tdirekt\n\t\treturn\n\t\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD Audiothek')\t\t# Home-Button\n\t\t\n\tsummary = Plot.replace('||', '\\n')\t\t\t# Display\n\t \n\tPLog(title); PLog(url); PLog(Plot);\n\ttitle=py2_encode(title); url=py2_encode(url);\n\tthumb=py2_encode(thumb); Plot=py2_encode(Plot);\n\tfparams=\"&fparams={'url': '%s', 'title': '%s', 'thumb': '%s', 'Plot': '%s'}\" % (quote(url), \n\t\tquote(title), quote(thumb), quote_plus(Plot))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"PlayAudio\", fanart=thumb, thumb=thumb, fparams=fparams, \n\t\tsummary=summary, mediatype='music')\n\t\n\tdownload_list = []\t\t\t\t\t# 2-teilige Liste für Download: 'title # url'\n\tdownload_list.append(\"%s#%s\" % (title, url))\n\tPLog(download_list)\n\ttitle_org=title; tagline_org=''; summary_org=Plot\n\tli = test_downloads(li,download_list,title_org,summary_org,tagline_org,thumb,high=-1) # Downloadbutton\n\t\t\n\txbmcplugin.endOfDirectory(HANDLE)\n\t\n##################################### Ende Audiothek ###############################################\n\ndef ARDSport(title):\n\tPLog('ARDSport:'); \n\ttitle_org = title\n\n\tli = xbmcgui.ListItem()\n\tli = home(li, ID='ARD')\t\t\t\t\t\t# Home-Button\n\n\tSBASE = 'https://www.sportschau.de'\n\tpath = 'https://www.sportschau.de/index.html'\t \t\t# Leitseite\t\t\n\tpage, msg = get_page(path=path)\t\t\n\tif page == '':\n\t\tmsg1 = 'Seite kann nicht geladen werden.'\n\t\tmsg2 = msg\n\t\tMyDialog(msg1, msg2, '')\n\t\treturn li \n\tPLog(len(page))\t\n\t\n\ttitle = \"Live\"\t\t\t\t\t\t\t\t# Zusatz: Live (fehlt in tabpanel)\n\thref = 'https://www.sportschau.de/ticker/index.html'\n\timg = R(ICON_DIR_FOLDER)\n\t# summ = \"Livestreams nur hier im Menü [B]Live[/B] oder unten bei den Direktlinks unterhalb der Moderatoren\"\n\ttagline = 'aktuelle Liveberichte (Video, Audio)'\n\ttitle=py2_encode(title); href=py2_encode(href); \n\thref=py2_encode(href); img=py2_encode(img);\n\tfparams=\"&fparams={'title': '%s', 'path': '%s', 'img': '%s'}\"\t% (quote(title), \n\t\tquote(href), quote(img))\n\taddDir(li=li, label=title, action=\"dirList\", dirID=\"ARDSportPanel\", fanart=img, \n\t\tthumb=img, tagline=tagline, fparams=fparams)\t\t\t\n\n\ttabpanel = stringextract('