\\n\" +\n \"\";\n String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString);\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n"},"new_contents":{"kind":"string","value":"package converter;\n\nimport processors.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"
Don't forget me this weekend!
\\n\" +\n \"\";\n String jsonString = null;\n try {\n jsonString = new XmlToJsonConverter().process(xmlString, null);\n } catch (Throwable throwable) {\n throwable.printStackTrace();\n }\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n"},"subject":{"kind":"string","value":"Update test for new interface."},"message":{"kind":"string","value":"Update test for new interface.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage converter;\n\nimport converters.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"
Don't forget me this weekend!
\\n\" +\n \"\";\n String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString);\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n\n## Instruction:\nUpdate test for new interface.\n\n## Code After:\npackage converter;\n\nimport processors.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"
\\n\" +\n \"\";\n String jsonString = null;\n try {\n jsonString = new XmlToJsonConverter().process(xmlString, null);\n } catch (Throwable throwable) {\n throwable.printStackTrace();\n }\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n\n\n// ... rest of the code ..."}}},{"rowIdx":3725,"cells":{"commit":{"kind":"string","value":"19faea809ec3ea8a9722b0e87bb028fd23c721a1"},"old_file":{"kind":"string","value":"modlib.c"},"new_file":{"kind":"string","value":"modlib.c"},"old_contents":{"kind":"string","value":"\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n unsigned char Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n unsigned char Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tunsigned char j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"new_contents":{"kind":"string","value":"\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"subject":{"kind":"string","value":"Change 'unsigned character' type variables to 'uint8_t'"},"message":{"kind":"string","value":"Change 'unsigned character' type variables to 'uint8_t'\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Jacajack/modlib"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n unsigned char Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n unsigned char Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tunsigned char j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n\n## Instruction:\nChange 'unsigned character' type variables to 'uint8_t'\n\n## Code After:\n\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n\n\n ... \n\n\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\n\n ... "}}},{"rowIdx":3726,"cells":{"commit":{"kind":"string","value":"09f649ac0b14269067c43df9f879d963ab99cdac"},"old_file":{"kind":"string","value":"backend/breach/views.py"},"new_file":{"kind":"string","value":"backend/breach/views.py"},"old_contents":{"kind":"string","value":"import json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return HttpResponse(json.dumps(new_work), content_type='application/json')\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"new_contents":{"kind":"string","value":"import json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"subject":{"kind":"string","value":"Fix response with json for get_work"},"message":{"kind":"string","value":"Fix response with json for get_work\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return HttpResponse(json.dumps(new_work), content_type='application/json')\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n\n## Instruction:\nFix response with json for get_work\n\n## Code After:\nimport json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n\n\n ... "}}},{"rowIdx":3727,"cells":{"commit":{"kind":"string","value":"77f0c9dcf7fdc4ebe904c32b64fbcb5c9c1f4d6b"},"old_file":{"kind":"string","value":"src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java"},"new_file":{"kind":"string","value":"src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java"},"old_contents":{"kind":"string","value":"package com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"new_contents":{"kind":"string","value":"package com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"subject":{"kind":"string","value":"Add comment describing dpi conversion"},"message":{"kind":"string","value":"Add comment describing dpi conversion\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"depsypher/pngtastic"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n\n## Instruction:\nAdd comment describing dpi conversion\n\n## Code After:\npackage com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\n// ... rest of the code ..."}}},{"rowIdx":3728,"cells":{"commit":{"kind":"string","value":"99c06cff63c4dc661b5cffc5a80f79327269684a"},"old_file":{"kind":"string","value":"c/anagrams.c"},"new_file":{"kind":"string","value":"c/anagrams.c"},"old_contents":{"kind":"string","value":"\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Need exactly one argument!\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"new_contents":{"kind":"string","value":"\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"subject":{"kind":"string","value":"Make the error message consistent with all of the other languages"},"message":{"kind":"string","value":"Make the error message consistent with all of the other languages\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Need exactly one argument!\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n\n## Instruction:\nMake the error message consistent with all of the other languages\n\n## Code After:\n\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n\n\n ... "}}},{"rowIdx":3729,"cells":{"commit":{"kind":"string","value":"377ff1dabc595fab4af9cb35bb8c071f2872c612"},"old_file":{"kind":"string","value":"agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java"},"new_file":{"kind":"string","value":"agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java"},"old_contents":{"kind":"string","value":"package iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ERROR\n}\n"},"new_contents":{"kind":"string","value":"package iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n"},"subject":{"kind":"string","value":"Add more device status type"},"message":{"kind":"string","value":"Add more device status type\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"epl-1.0"},"repos":{"kind":"string","value":"muka/agile-api-spec,muka/agile-api-spec,muka/agile-api-spec"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ERROR\n}\n\n## Instruction:\nAdd more device status type\n\n## Code After:\npackage iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3730,"cells":{"commit":{"kind":"string","value":"243adb38e3d4f61404f4df14a9a5aa18af8638d9"},"old_file":{"kind":"string","value":"app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java"},"new_file":{"kind":"string","value":"app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java"},"old_contents":{"kind":"string","value":"package de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n DexFile dexFile = new DexFile(app.sourceDir());\n List classNames = Collections.list(dexFile.entries());\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if(currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"new_contents":{"kind":"string","value":"package de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"subject":{"kind":"string","value":"Fix not existing .apks failing"},"message":{"kind":"string","value":"Fix not existing .apks failing\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"philipphager/disclosure-android-app"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n DexFile dexFile = new DexFile(app.sourceDir());\n List classNames = Collections.list(dexFile.entries());\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if(currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n\n## Instruction:\nFix not existing .apks failing\n\n## Code After:\npackage de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\n\n\n# ... modified code ... \n\n\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n\n\n ... \n\n\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n\n\n# ... rest of the code ..."}}},{"rowIdx":3731,"cells":{"commit":{"kind":"string","value":"03cab833e3d1a4117adc763620f7a1c78ee2cc95"},"old_file":{"kind":"string","value":"shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java"},"new_file":{"kind":"string","value":"shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java"},"old_contents":{"kind":"string","value":"package org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n}\n"},"new_contents":{"kind":"string","value":"package org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n"},"subject":{"kind":"string","value":"Add unit test support to load class path resources as JSON nodes"},"message":{"kind":"string","value":"Add unit test support to load class path resources as JSON nodes\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n}\n\n## Instruction:\nAdd unit test support to load class path resources as JSON nodes\n\n## Code After:\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n\n\n# ... modified code ... \n\n\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3732,"cells":{"commit":{"kind":"string","value":"50089e4a1d55414e0ae88b1699eeca0980bcfc15"},"old_file":{"kind":"string","value":"org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java"},"new_file":{"kind":"string","value":"org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java"},"old_contents":{"kind":"string","value":"package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif(Tools.hasConstructor(t, \"True\",0)){\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"False\",0)){\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"Int\", 1)){\n\t\t\treturn new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"String\", 1)){\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\t\t\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"new_contents":{"kind":"string","value":"package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"subject":{"kind":"string","value":"Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings."},"message":{"kind":"string","value":"Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"metaborg/dynsem,metaborg/dynsem"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif(Tools.hasConstructor(t, \"True\",0)){\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"False\",0)){\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"Int\", 1)){\n\t\t\treturn new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"String\", 1)){\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\t\t\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n\n## Instruction:\nFix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.\n\n## Code After:\npackage org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3733,"cells":{"commit":{"kind":"string","value":"2ba5f562edb568653574d329a9f1ffbe8b15e7c5"},"old_file":{"kind":"string","value":"tests/test_caching.py"},"new_file":{"kind":"string","value":"tests/test_caching.py"},"old_contents":{"kind":"string","value":"import os\nimport tempfile\n\nfrom . import RTRSSTestCase\nfrom rtrss import caching, config\n\n\nclass CachingTestCase(RTRSSTestCase):\n def setUp(self):\n fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"new_contents":{"kind":"string","value":"import os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"subject":{"kind":"string","value":"Update test case to use new base class"},"message":{"kind":"string","value":"Update test case to use new base class\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport tempfile\n\nfrom . import RTRSSTestCase\nfrom rtrss import caching, config\n\n\nclass CachingTestCase(RTRSSTestCase):\n def setUp(self):\n fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n\n## Instruction:\nUpdate test case to use new base class\n\n## Code After:\nimport os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n\n\n// ... rest of the code ..."}}},{"rowIdx":3734,"cells":{"commit":{"kind":"string","value":"61b5bc8a7e81225a83d195e016bc4adbd7ca1db5"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"from setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"new_contents":{"kind":"string","value":"from setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"subject":{"kind":"string","value":"Add Python 2.6 to classifiers"},"message":{"kind":"string","value":"Add Python 2.6 to classifiers\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"paltman/pymediainfo,paltman-archive/pymediainfo"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n\n## Instruction:\nAdd Python 2.6 to classifiers\n\n## Code After:\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n\n\n// ... rest of the code ..."}}},{"rowIdx":3735,"cells":{"commit":{"kind":"string","value":"94996a2a78743020945bf708616e202e7b988173"},"old_file":{"kind":"string","value":"aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt"},"new_file":{"kind":"string","value":"aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt"},"old_contents":{"kind":"string","value":"package io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.BadRequestException\nimport io.aconite.UnsupportedMediaTypeException\nimport io.aconite.BodyBuffer\nimport io.aconite.BodySerializer\nimport io.aconite.Buffer\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"new_contents":{"kind":"string","value":"package io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"subject":{"kind":"string","value":"Add support for empty body without contentType specification"},"message":{"kind":"string","value":"Add support for empty body without contentType specification\n"},"lang":{"kind":"string","value":"Kotlin"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"AcapellaSoft/Aconite,AcapellaSoft/Aconite"},"config":{"kind":"string","value":"kotlin"},"content":{"kind":"string","value":"## Code Before:\npackage io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.BadRequestException\nimport io.aconite.UnsupportedMediaTypeException\nimport io.aconite.BodyBuffer\nimport io.aconite.BodySerializer\nimport io.aconite.Buffer\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}\n## Instruction:\nAdd support for empty body without contentType specification\n\n## Code After:\npackage io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\n\n\n// ... modified code ... \n\n\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n\n\n// ... rest of the code ..."}}},{"rowIdx":3736,"cells":{"commit":{"kind":"string","value":"2d64c01daebd918c3e6196b1eb3ad62f105c56e0"},"old_file":{"kind":"string","value":"django_google_charts/charts.py"},"new_file":{"kind":"string","value":"django_google_charts/charts.py"},"old_contents":{"kind":"string","value":"import six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"new_contents":{"kind":"string","value":"import six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"subject":{"kind":"string","value":"Make this Python 2.x compatible"},"message":{"kind":"string","value":"Make this Python 2.x compatible\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"danpalmer/django-google-charts,danpalmer/django-google-charts"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )\n## Instruction:\nMake this Python 2.x compatible\n\n## Code After:\nimport six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\n\n\n// ... modified code ... \n\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n\n\n// ... rest of the code ..."}}},{"rowIdx":3737,"cells":{"commit":{"kind":"string","value":"a385490e82e3ac3f909fe2b407e692206212748b"},"old_file":{"kind":"string","value":"main/src/main/java/com/bloatit/framework/Offer.java"},"new_file":{"kind":"string","value":"main/src/main/java/com/bloatit/framework/Offer.java"},"old_contents":{"kind":"string","value":"package com.bloatit.framework;\n\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"new_contents":{"kind":"string","value":"package com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"subject":{"kind":"string","value":"Add get amount method in offer"},"message":{"kind":"string","value":"Add get amount method in offer\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.bloatit.framework;\n\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n\n## Instruction:\nAdd get amount method in offer\n\n## Code After:\npackage com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npackage com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\n\n\n# ... modified code ... \n\n\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n\n\n# ... rest of the code ..."}}},{"rowIdx":3738,"cells":{"commit":{"kind":"string","value":"4a98d2ce95d6a082588e4ccc8e04454c26260ca0"},"old_file":{"kind":"string","value":"helpers.py"},"new_file":{"kind":"string","value":"helpers.py"},"old_contents":{"kind":"string","value":"def get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"new_contents":{"kind":"string","value":"def get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"subject":{"kind":"string","value":"Make get_readable_list process tuples, too"},"message":{"kind":"string","value":"Make get_readable_list process tuples, too\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n\n## Instruction:\nMake get_readable_list process tuples, too\n\n## Code After:\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\n// ... modified code ... \n\n\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\n\n// ... rest of the code ..."}}},{"rowIdx":3739,"cells":{"commit":{"kind":"string","value":"7627b8759ab08df562048ec1fa94fe9d69d01374"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict==1.1')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"new_contents":{"kind":"string","value":"\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"subject":{"kind":"string","value":"Add importlib if not included"},"message":{"kind":"string","value":"Add importlib if not included\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"tadpol/exoline,azdle/exoline,asolz/exoline,danslimmon/exoline,tadpol/exoline,asolz/exoline,azdle/exoline,danslimmon/exoline"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict==1.1')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n\n## Instruction:\nAdd importlib if not included\n\n## Code After:\n\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n\n\n# ... rest of the code ..."}}},{"rowIdx":3740,"cells":{"commit":{"kind":"string","value":"d9fc83ec526df1bf732d8f65f445f48f1b764dfe"},"old_file":{"kind":"string","value":"selvbetjening/api/rest/models.py"},"new_file":{"kind":"string","value":"selvbetjening/api/rest/models.py"},"old_contents":{"kind":"string","value":"\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1)\n"},"new_contents":{"kind":"string","value":"\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n"},"subject":{"kind":"string","value":"Fix mistake returning the wrong authenticated user"},"message":{"kind":"string","value":"Fix mistake returning the wrong authenticated user\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1)\n\n## Instruction:\nFix mistake returning the wrong authenticated user\n\n## Code After:\n\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n\n\n# ... rest of the code ..."}}},{"rowIdx":3741,"cells":{"commit":{"kind":"string","value":"ec439270a5460a6e1b649f7ceaca35b8d8827407"},"old_file":{"kind":"string","value":"src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java"},"new_file":{"kind":"string","value":"src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java"},"old_contents":{"kind":"string","value":"package org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230));\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230));\n }\n\n @Override\n public Component getListCellRendererComponent(JList> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"new_contents":{"kind":"string","value":"package org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n public Component getListCellRendererComponent(JList> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"subject":{"kind":"string","value":"Make sparse / complete more distinguishable"},"message":{"kind":"string","value":"Make sparse / complete more distinguishable\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"casid/jusecase-properties-editor"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230));\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230));\n }\n\n @Override\n public Component getListCellRendererComponent(JList> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n\n## Instruction:\nMake sparse / complete more distinguishable\n\n## Code After:\npackage org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n public Component getListCellRendererComponent(JList> list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n\n\n# ... rest of the code ..."}}},{"rowIdx":3742,"cells":{"commit":{"kind":"string","value":"ee32b2e48acd47f1f1ff96482abf20f3d1818fc4"},"old_file":{"kind":"string","value":"tests/__init__.py"},"new_file":{"kind":"string","value":"tests/__init__.py"},"old_contents":{"kind":"string","value":"import sys\nimport unittest\n\nsys.path.append(\"../pythainlp\")\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"new_contents":{"kind":"string","value":"import sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"subject":{"kind":"string","value":"Add load wordnet to tests"},"message":{"kind":"string","value":"Add load wordnet to tests\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"PyThaiNLP/pythainlp"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sys\nimport unittest\n\nsys.path.append(\"../pythainlp\")\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n\n## Instruction:\nAdd load wordnet to tests\n\n## Code After:\nimport sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\n\n\n# ... rest of the code ..."}}},{"rowIdx":3743,"cells":{"commit":{"kind":"string","value":"21fdc8c2c20a75b5431a2fb549fd9632a27ab410"},"old_file":{"kind":"string","value":"src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java"},"new_file":{"kind":"string","value":"src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java"},"old_contents":{"kind":"string","value":"package org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\n\nimport java.io.Console;\nimport java.io.File;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * @author Sebastian Rühl\n */\npublic class RCodeRunner {\n\n public static void main(String... args) throws Exception {\n CamelContext camelContext = new DefaultCamelContext();\n File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty(\"user.home\") + \"/.rcode-example\");\n\n camelContext.addRoutes(new RCodeRouteBuilder(basePath));\n\n camelContext.start();\n Console console = System.console();\n if (console != null) {\n console.printf(\"Please press enter to shutdown route.\");\n console.readLine();\n } else {\n TimeUnit.SECONDS.sleep(5);\n }\n camelContext.stop();\n }\n}\n"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n"},"subject":{"kind":"string","value":"Revert \"migrate example to rcoderunner\""},"message":{"kind":"string","value":"Revert \"migrate example to rcoderunner\"\n\nThis reverts commit b03405d0726820a1f184e0e80892826d02994763."},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"sruehl/camel-example-rcode,sruehl/camel-example-rcode"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\n\nimport java.io.Console;\nimport java.io.File;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * @author Sebastian Rühl\n */\npublic class RCodeRunner {\n\n public static void main(String... args) throws Exception {\n CamelContext camelContext = new DefaultCamelContext();\n File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty(\"user.home\") + \"/.rcode-example\");\n\n camelContext.addRoutes(new RCodeRouteBuilder(basePath));\n\n camelContext.start();\n Console console = System.console();\n if (console != null) {\n console.printf(\"Please press enter to shutdown route.\");\n console.readLine();\n } else {\n TimeUnit.SECONDS.sleep(5);\n }\n camelContext.stop();\n }\n}\n\n## Instruction:\nRevert \"migrate example to rcoderunner\"\n\nThis reverts commit b03405d0726820a1f184e0e80892826d02994763.\n## Code After:\n/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3744,"cells":{"commit":{"kind":"string","value":"9ceace60593f133b4f6dfdbd9b6f583362415294"},"old_file":{"kind":"string","value":"src/configuration.py"},"new_file":{"kind":"string","value":"src/configuration.py"},"old_contents":{"kind":"string","value":"import ConfigParser\nimport os\n\ndef class ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self)\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.'\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"new_contents":{"kind":"string","value":"import ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"subject":{"kind":"string","value":"Fix a few syntax errors"},"message":{"kind":"string","value":"Fix a few syntax errors\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport ConfigParser\nimport os\n\ndef class ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self)\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.'\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n\n## Instruction:\nFix a few syntax errors\n\n## Code After:\nimport ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n\n\n ... \n\n\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n\n\n ... "}}},{"rowIdx":3745,"cells":{"commit":{"kind":"string","value":"721f6f7916d698f22c9d96ce52cce3773fa514cc"},"old_file":{"kind":"string","value":"uwsgiplugin.py"},"new_file":{"kind":"string","value":"uwsgiplugin.py"},"old_contents":{"kind":"string","value":"import os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/plugin.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\n\nif os.system(\"rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs\" % (base_path, base_path)) != 0:\n os._exit(1)\n"},"new_contents":{"kind":"string","value":"import os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n"},"subject":{"kind":"string","value":"Update script to build rust code via cargo"},"message":{"kind":"string","value":"Update script to build rust code via cargo\n\nSigned-off-by: Luca Bruno \n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"unbit/uwsgi-rust,unbit/uwsgi-rust,unbit/uwsgi-rust"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/plugin.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\n\nif os.system(\"rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs\" % (base_path, base_path)) != 0:\n os._exit(1)\n\n## Instruction:\nUpdate script to build rust code via cargo\n\nSigned-off-by: Luca Bruno \n\n## Code After:\nimport os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\n\n\n// ... modified code ... \n\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n\n\n// ... rest of the code ..."}}},{"rowIdx":3746,"cells":{"commit":{"kind":"string","value":"59b6d5b7e4f337320ea12d381e9cad0aa9c9fa75"},"old_file":{"kind":"string","value":"tests/slice.c"},"new_file":{"kind":"string","value":"tests/slice.c"},"old_contents":{"kind":"string","value":"\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"new_contents":{"kind":"string","value":"\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"subject":{"kind":"string","value":"Fix 'unused variable' warning on fast build"},"message":{"kind":"string","value":"Fix 'unused variable' warning on fast build\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"mcinglis/libmacro,mcinglis/libmacro,mcinglis/libmacro"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n\n## Instruction:\nFix 'unused variable' warning on fast build\n\n## Code After:\n\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n\n\n ... \n\n\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n\n ... "}}},{"rowIdx":3747,"cells":{"commit":{"kind":"string","value":"9a2cc99b068b2aaa572f52b4516852b239577c34"},"old_file":{"kind":"string","value":"dummyserver/server.py"},"new_file":{"kind":"string","value":"dummyserver/server.py"},"old_contents":{"kind":"string","value":"\nimport threading, socket\n\n\n\n\"\"\"\nDummy server using for unit testing\n\"\"\"\n\nclass Server(threading.Thread):\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"new_contents":{"kind":"string","value":"\nimport threading, socket\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"subject":{"kind":"string","value":"Put docstring inside Server class"},"message":{"kind":"string","value":"Put docstring inside Server class\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"psf/requests"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport threading, socket\n\n\n\n\"\"\"\nDummy server using for unit testing\n\"\"\"\n\nclass Server(threading.Thread):\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n\n## Instruction:\nPut docstring inside Server class\n\n## Code After:\n\nimport threading, socket\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n\n\n# ... rest of the code ..."}}},{"rowIdx":3748,"cells":{"commit":{"kind":"string","value":"44d103359cff312865f409ff34f528f63e441ef4"},"old_file":{"kind":"string","value":"graphapi/views.py"},"new_file":{"kind":"string","value":"graphapi/views.py"},"old_contents":{"kind":"string","value":"from simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n # if not show_graphiql:\n # error = verify_request(request, 'graphapi')\n # if error:\n # print('graphapi/views: get_response bailed ')\n # return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"new_contents":{"kind":"string","value":"from simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"subject":{"kind":"string","value":"Revert \"Reimplement using explicit variable lookup\""},"message":{"kind":"string","value":"Revert \"Reimplement using explicit variable lookup\"\n\nThis reverts commit 94683e6c\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n # if not show_graphiql:\n # error = verify_request(request, 'graphapi')\n # if error:\n # print('graphapi/views: get_response bailed ')\n # return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n\n## Instruction:\nRevert \"Reimplement using explicit variable lookup\"\n\nThis reverts commit 94683e6c\n\n## Code After:\nfrom simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":3749,"cells":{"commit":{"kind":"string","value":"8e622ec1c83b064ed3d47d41dc49b179dc9a9f54"},"old_file":{"kind":"string","value":"app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java"},"new_file":{"kind":"string","value":"app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java"},"old_contents":{"kind":"string","value":"package coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View rootView;\n if (convertView == null) {\n rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n } else {\n rootView = convertView;\n }\n final Movie movie = getItem(position);\n\n final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);\n titleView.setText(movie.getTitle());\n\n final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);\n releaseView.setText(String.valueOf(movie.getReleased()));\n\n return rootView;\n }\n}\n"},"new_contents":{"kind":"string","value":"package coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n"},"subject":{"kind":"string","value":"Introduce ViewHolder pattern into ArrayAdapter"},"message":{"kind":"string","value":"Introduce ViewHolder pattern into ArrayAdapter\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"jarst/PopMovies"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View rootView;\n if (convertView == null) {\n rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n } else {\n rootView = convertView;\n }\n final Movie movie = getItem(position);\n\n final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);\n titleView.setText(movie.getTitle());\n\n final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);\n releaseView.setText(String.valueOf(movie.getReleased()));\n\n return rootView;\n }\n}\n\n## Instruction:\nIntroduce ViewHolder pattern into ArrayAdapter\n\n## Code After:\npackage coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n\n ... \n\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n\n\n ... "}}},{"rowIdx":3750,"cells":{"commit":{"kind":"string","value":"0261c895cb41f5caba42ae432b997fd3c941e96f"},"old_file":{"kind":"string","value":"tests.py"},"new_file":{"kind":"string","value":"tests.py"},"old_contents":{"kind":"string","value":"import pytest\nimport cleaner\n\nclass TestTagRemoval():\n def test_span_removal(self):\n text = ('This is some'\n ' dummy text lalalala This is some more dummy text '\n 'test')\n\n expected = ('This is some dummy text lalalala This is some more dummy '\n 'text test')\n\n cleaned = cleaner.remove_superflous_markup(text)\n\n assert cleaned == expected\n"},"new_contents":{"kind":"string","value":"import pytest\nimport cleaner\n\nclass TestTagTools():\n def test_get_pure_tag(self):\n tag1 = '
'\n tag2 = '
'\n tag3 = '
'\n\n assert cleaner.get_pure_tag(tag1) == '
'\n assert cleaner.get_pure_tag(tag2) == '
'\n assert cleaner.get_pure_tag(tag3) == '
'\n"},"subject":{"kind":"string","value":"Add test for getting pure html tag"},"message":{"kind":"string","value":"Add test for getting pure html tag\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"jamalmoir/blogger_html_cleaner"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport pytest\nimport cleaner\n\nclass TestTagRemoval():\n def test_span_removal(self):\n text = ('This is some'\n ' dummy text lalalala This is some more dummy text '\n 'test')\n\n expected = ('This is some dummy text lalalala This is some more dummy '\n 'text test')\n\n cleaned = cleaner.remove_superflous_markup(text)\n\n assert cleaned == expected\n\n## Instruction:\nAdd test for getting pure html tag\n\n## Code After:\nimport pytest\nimport cleaner\n\nclass TestTagTools():\n def test_get_pure_tag(self):\n tag1 = '
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.features
import io.ktor.client.*
import io.ktor.util.*
internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry")
/**
* Base interface representing a [HttpClient] feature.
*/
interface HttpClientFeature<out TConfig : Any, TFeature : Any> {
/**
* The [AttributeKey] for this feature.
*/
val key: AttributeKey<TFeature>
/**
* Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver.
*/
fun prepare(block: TConfig.() -> Unit = {}): TFeature
/**
* Installs the [feature] class for a [HttpClient] defined at [scope].
*/
fun install(feature: TFeature, scope: HttpClient)
}
/**
* Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed.
*/
fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? =
attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.features
import io.ktor.client.*
import io.ktor.util.*
import kotlin.native.concurrent.*
@SharedImmutable
internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry")
/**
* Base interface representing a [HttpClient] feature.
*/
interface HttpClientFeature<out TConfig : Any, TFeature : Any> {
/**
* The [AttributeKey] for this feature.
*/
val key: AttributeKey<TFeature>
/**
* Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver.
*/
fun prepare(block: TConfig.() -> Unit = {}): TFeature
/**
* Installs the [feature] class for a [HttpClient] defined at [scope].
*/
fun install(feature: TFeature, scope: HttpClient)
}
/**
* Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed.
*/
fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? =
attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
Make attribute key available on native
Make attribute key available on native
Kotlin
apache-2.0
ktorio/ktor,ktorio/ktor,ktorio/ktor,ktorio/ktor
kotlin
## Code Before:
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.features
import io.ktor.client.*
import io.ktor.util.*
internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry")
/**
* Base interface representing a [HttpClient] feature.
*/
interface HttpClientFeature<out TConfig : Any, TFeature : Any> {
/**
* The [AttributeKey] for this feature.
*/
val key: AttributeKey<TFeature>
/**
* Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver.
*/
fun prepare(block: TConfig.() -> Unit = {}): TFeature
/**
* Installs the [feature] class for a [HttpClient] defined at [scope].
*/
fun install(feature: TFeature, scope: HttpClient)
}
/**
* Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed.
*/
fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? =
attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
## Instruction:
Make attribute key available on native
## Code After:
/*
* Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license.
*/
package io.ktor.client.features
import io.ktor.client.*
import io.ktor.util.*
import kotlin.native.concurrent.*
@SharedImmutable
internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry")
/**
* Base interface representing a [HttpClient] feature.
*/
interface HttpClientFeature<out TConfig : Any, TFeature : Any> {
/**
* The [AttributeKey] for this feature.
*/
val key: AttributeKey<TFeature>
/**
* Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver.
*/
fun prepare(block: TConfig.() -> Unit = {}): TFeature
/**
* Installs the [feature] class for a [HttpClient] defined at [scope].
*/
fun install(feature: TFeature, scope: HttpClient)
}
/**
* Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed.
*/
fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? =
attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
# ... existing code ...
import io.ktor.client.*
import io.ktor.util.*
import kotlin.native.concurrent.*
@SharedImmutable
internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry")
/**
# ... rest of the code ...
## Code Before:
package com.ridi.books.helper.text
import java.util.Calendar
fun Long.elapsedTimeString(): String {
var elapsed = System.currentTimeMillis() - this
val second = 1000
val minute = second * 60
val hour = minute * 60
val day = (hour * 24).toLong()
val week = day * 7
val suffix: String
if (elapsed / week > 3) {
val cal = Calendar.getInstance()
cal.timeInMillis = this
return "${cal.get(Calendar.YEAR)}.${cal.get(Calendar.MONDAY) + 1}.${cal.get(Calendar.DAY_OF_MONTH)}"
} else if (elapsed / week > 0) {
suffix = "주 전"
elapsed /= week
} else if (elapsed / day > 0) {
suffix = "일 전"
elapsed /= day
} else if (elapsed / hour > 0) {
suffix = "시간 전"
elapsed /= hour
} else if (elapsed / minute > 0) {
suffix = "분 전"
elapsed /= minute
} else if (elapsed / second > 10) {
suffix = "초 전"
elapsed /= second
} else {
return "방금 전"
}
return "$elapsed$suffix"
}
## Instruction:
Update elapsedTimeString - add ‘tooMuchElasped’ parameter that returned if more than 3 weeks have elapsed
## Code After:
package com.ridi.books.helper.text
fun Long.elapsedTimeString(tooMuchElapsed: String): String {
var elapsed = System.currentTimeMillis() - this
val second = 1000
val minute = second * 60
val hour = minute * 60
val day = (hour * 24).toLong()
val week = day * 7
val suffix: String
if (elapsed / week > 3) {
return tooMuchElapsed
} else if (elapsed / week > 0) {
suffix = "주 전"
elapsed /= week
} else if (elapsed / day > 0) {
suffix = "일 전"
elapsed /= day
} else if (elapsed / hour > 0) {
suffix = "시간 전"
elapsed /= hour
} else if (elapsed / minute > 0) {
suffix = "분 전"
elapsed /= minute
} else if (elapsed / second > 10) {
suffix = "초 전"
elapsed /= second
} else {
return "방금 전"
}
return "$elapsed$suffix"
}
...
package com.ridi.books.helper.text
fun Long.elapsedTimeString(tooMuchElapsed: String): String {
var elapsed = System.currentTimeMillis() - this
val second = 1000
val minute = second * 60
...
val suffix: String
if (elapsed / week > 3) {
return tooMuchElapsed
} else if (elapsed / week > 0) {
suffix = "주 전"
elapsed /= week
...
aed18a3f9cbaf1eae1d7066b438437446513d912
sphinxcontrib/traceables/__init__.py
sphinxcontrib/traceables/__init__.py
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
Fix missing call to display.setup()
Fix missing call to display.setup()
Python
apache-2.0
t4ngo/sphinxcontrib-traceables
python
## Code Before:
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
## Instruction:
Fix missing call to display.setup()
## Code After:
import infrastructure
import display
import traceables
import matrix
import graph
# ==========================================================================
# Setup and register extension
def setup(app):
# Perform import within this function to avoid an import circle.
from sphinxcontrib import traceables
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
# Register business logic of extension parts. This is done explicitly
# here to ensure correct ordering during processing.
traceables.infrastructure.ProcessorManager.register_processor_classes([
traceables.traceables.RelationshipsProcessor,
traceables.display.TraceableDisplayProcessor,
traceables.traceables.XrefProcessor,
traceables.matrix.ListProcessor,
traceables.matrix.MatrixProcessor,
traceables.graph.GraphProcessor,
])
return {"version": "0.0"}
// ... existing code ...
# Allow extension parts to set themselves up.
traceables.infrastructure.setup(app)
traceables.display.setup(app)
traceables.traceables.setup(app)
traceables.matrix.setup(app)
traceables.graph.setup(app)
// ... rest of the code ...
8befea283830f76dfa41cfd10d7eb916c68f7ef9
intern/views.py
intern/views.py
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all()
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from filer.models import File
from filer.models import Folder
@login_required
def documents(request):
files = File.objects.all().order_by("-modified_at")
folders = Folder.objects.all()
#print(files[0])
return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
Test the parsing of all of the uploaded Jaguar files
Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27
Python
lgpl-2.1
Clyde-fare/cclib_bak,Clyde-fare/cclib_bak
python
## Code Before:
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
os.chdir("eg01")
for file in ["dvb_gopt.out"]:
t = Jaguar(file)
t.parse()
print t.moenergies[0,:]
print t.homos[0]
print t.moenergies[0,t.homos[0]]
## Instruction:
Test the parsing of all of the uploaded Jaguar files
git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27
## Code After:
import os
from cclib.parser import Jaguar
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
...
os.chdir(os.path.join("..","data","Jaguar","basicJaguar"))
files = [ ["eg01","dvb_gopt.out"],
["eg02","dvb_sp.out"],
["eg03","dvb_ir.out"],
["eg06","dvb_un_sp.out"] ]
for f in files:
t = Jaguar(os.path.join(f[0],f[1]))
t.parse()
if f[0]!="eg03":
print t.scfvalues
...
6a508d01fa3fa0d4084406fcb2b5e41d1b614b7c
datalogger/__main__.py
datalogger/__main__.py
import sys
from PyQt5.QtWidgets import QApplication
from datalogger.api.workspace import Workspace
from datalogger.analysis_window import AnalysisWindow
from datalogger import __version__
def run_datalogger_full():
print("CUED DataLogger {}".format(__version__))
app = 0
app = QApplication(sys.argv)
# Create the window
w = AnalysisWindow()
w.CurrentWorkspace = Workspace()
#w.CurrentWorkspace.path = "//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/"
# Load the workspace
#CurrentWorkspace.load("//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/tests/test_workspace.wsp")
w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/")
# Run the program
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
run_datalogger_full()
import sys
from PyQt5.QtWidgets import QApplication
from datalogger.api.workspace import Workspace
from datalogger.analysis_window import AnalysisWindow
from datalogger import __version__
def run_datalogger_full():
print("CUED DataLogger {}".format(__version__))
app = 0
app = QApplication(sys.argv)
CurrentWorkspace = Workspace()
# Create the window
w = AnalysisWindow()
w.CurrentWorkspace = CurrentWorkspace
w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/")
# Run the program
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
run_datalogger_full()
Move workspace before window creation so config set for window
Move workspace before window creation so config set for window
Python
bsd-3-clause
torebutlin/cued_datalogger
python
## Code Before:
import sys
from PyQt5.QtWidgets import QApplication
from datalogger.api.workspace import Workspace
from datalogger.analysis_window import AnalysisWindow
from datalogger import __version__
def run_datalogger_full():
print("CUED DataLogger {}".format(__version__))
app = 0
app = QApplication(sys.argv)
# Create the window
w = AnalysisWindow()
w.CurrentWorkspace = Workspace()
#w.CurrentWorkspace.path = "//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/"
# Load the workspace
#CurrentWorkspace.load("//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/tests/test_workspace.wsp")
w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/")
# Run the program
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
run_datalogger_full()
## Instruction:
Move workspace before window creation so config set for window
## Code After:
import sys
from PyQt5.QtWidgets import QApplication
from datalogger.api.workspace import Workspace
from datalogger.analysis_window import AnalysisWindow
from datalogger import __version__
def run_datalogger_full():
print("CUED DataLogger {}".format(__version__))
app = 0
app = QApplication(sys.argv)
CurrentWorkspace = Workspace()
# Create the window
w = AnalysisWindow()
w.CurrentWorkspace = CurrentWorkspace
w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/")
# Run the program
w.show()
sys.exit(app.exec_())
if __name__ == '__main__':
run_datalogger_full()
# ... existing code ...
app = 0
app = QApplication(sys.argv)
CurrentWorkspace = Workspace()
# Create the window
w = AnalysisWindow()
w.CurrentWorkspace = CurrentWorkspace
w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/")
# Run the program
# ... rest of the code ...
44ac6ece920bb1602a053b31b78326a3f30be151
setup.py
setup.py
from setuptools import setup
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='[email protected]',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='bats acoustics metadata',
py_modules=['guano'],
scripts=['bin/sb2guano.py'],
)
from setuptools import setup
from glob import glob
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='[email protected]',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='bats acoustics metadata guano',
py_modules=['guano'],
scripts=glob('bin/*.py'),
)
Declare to the PyPI that we support Python 3
Declare to the PyPI that we support Python 3
Python
mit
riggsd/guano-py
python
## Code Before:
from setuptools import setup
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='[email protected]',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='bats acoustics metadata',
py_modules=['guano'],
scripts=['bin/sb2guano.py'],
)
## Instruction:
Declare to the PyPI that we support Python 3
## Code After:
from setuptools import setup
from glob import glob
from guano import __version__
setup(
name='guano',
version=__version__,
description='GUANO, the "Grand Unified" bat acoustics metadata format',
long_description=open('README.md').read(),
url='https://github.com/riggsd/guano-py',
license='MIT',
author='David A. Riggs',
author_email='[email protected]',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='bats acoustics metadata guano',
py_modules=['guano'],
scripts=glob('bin/*.py'),
)
...
from setuptools import setup
from glob import glob
from guano import __version__
...
author='David A. Riggs',
author_email='[email protected]',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
keywords='bats acoustics metadata guano',
py_modules=['guano'],
scripts=glob('bin/*.py'),
)
...
96b592b00b9a353127ef9ee4c676d873e77edbfd
buildSrc/src/main/kotlin/Versions.kt
buildSrc/src/main/kotlin/Versions.kt
object Versions {
const val LORITTA = "2021-SNAPSHOT"
const val PUDDING = "0.0.2-20220412.131908-317"
const val KOTLIN = "1.6.20-M1"
const val KTOR = "1.6.7"
const val JDA = "4.3.0_324"
const val KOTLIN_SERIALIZATION = "1.3.2"
const val KOTLIN_COROUTINES = "1.6.0"
const val EXPOSED = "0.37.3"
const val KOTLIN_LOGGING = "2.1.16"
}
object Versions {
const val LORITTA = "2021-SNAPSHOT"
const val PUDDING = "0.0.2-last-version-using-kt-1621"
const val KOTLIN = "1.6.20-M1"
const val KTOR = "1.6.7"
const val JDA = "4.3.0_324"
const val KOTLIN_SERIALIZATION = "1.3.2"
const val KOTLIN_COROUTINES = "1.6.0"
const val EXPOSED = "0.37.3"
const val KOTLIN_LOGGING = "2.1.16"
}
Change Pudding version to the last version before we updated it to Kotlin 1.7.0
Change Pudding version to the last version before we updated it to Kotlin 1.7.0
## Code Before:
object Versions {
const val LORITTA = "2021-SNAPSHOT"
const val PUDDING = "0.0.2-20220412.131908-317"
const val KOTLIN = "1.6.20-M1"
const val KTOR = "1.6.7"
const val JDA = "4.3.0_324"
const val KOTLIN_SERIALIZATION = "1.3.2"
const val KOTLIN_COROUTINES = "1.6.0"
const val EXPOSED = "0.37.3"
const val KOTLIN_LOGGING = "2.1.16"
}
## Instruction:
Change Pudding version to the last version before we updated it to Kotlin 1.7.0
## Code After:
object Versions {
const val LORITTA = "2021-SNAPSHOT"
const val PUDDING = "0.0.2-last-version-using-kt-1621"
const val KOTLIN = "1.6.20-M1"
const val KTOR = "1.6.7"
const val JDA = "4.3.0_324"
const val KOTLIN_SERIALIZATION = "1.3.2"
const val KOTLIN_COROUTINES = "1.6.0"
const val EXPOSED = "0.37.3"
const val KOTLIN_LOGGING = "2.1.16"
}
// ... existing code ...
object Versions {
const val LORITTA = "2021-SNAPSHOT"
const val PUDDING = "0.0.2-last-version-using-kt-1621"
const val KOTLIN = "1.6.20-M1"
const val KTOR = "1.6.7"
const val JDA = "4.3.0_324"
// ... rest of the code ...
cf8b6685f8fcadb3f50999ee587c7316741808f9
kotlin/src/main/kotlin/2018/Lib05.kt
kotlin/src/main/kotlin/2018/Lib05.kt
package aoc.kt.y2018;
/**
* Day 5.
*/
/** Part 1 */
fun processPolymer1(input: String): String {
val output = input.toCharArray()
.forEachIndexed { i, c ->
if (i != 0) {
var reacting = true
var range = 0..0
while (reacting) {
var offset = 0
if (reactionOccurs(c, input.get(-1))) {
} else {
reacting = false
}
}
}
}
return output.toString()
}
/** Part 2 */
fun processPolymer2(input: String): String {
return "42"
}
fun reactionOccurs(char: Char, prev: Char): Boolean {
return false
}
package aoc.kt.y2018;
/**
* Day 5.
*/
/** Part 1 */
fun processPolymer1(input: String): String {
var polymer = Pair(input, true)
while (polymer.second) {
polymer = react(polymer.first)
}
return polymer
//.first.length
.toString()
}
/** Part 2 */
fun processPolymer2(input: String): String {
return "42"
}
fun react(input: String): Pair<String, Boolean> {
var result = mutableListOf<Char>()
var polymer = input.toMutableList()
var reactionOccured = false
while (polymer.next() != null) {
polymer.dequeue()?.let { a ->
if (polymer.next() != null) {
polymer.dequeue()?.let { b ->
if (a.equals(b, true)) {
reactionOccured = true
} else {
result.push(a)
polymer.enqueue(b)
}
}
}
}
}
val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n }
return Pair(resultStr, reactionOccured)
}
fun <T> MutableList<T>.push(e: T) {
this.add(e)
}
fun <T> MutableList<T>.dequeue(): T? {
if (this.isNotEmpty()) {
return this.removeAt(0)
} else {
return null
}
}
fun <T> MutableList<T>.enqueue(e: T) {
this.add(0, e)
}
fun <T> MutableList<T>.next(): T? {
return this.getOrNull(0)
}
Update with new approach to day 5
Update with new approach to day 5
Kotlin
mit
nathanjent/adventofcode-rust
kotlin
## Code Before:
package aoc.kt.y2018;
/**
* Day 5.
*/
/** Part 1 */
fun processPolymer1(input: String): String {
val output = input.toCharArray()
.forEachIndexed { i, c ->
if (i != 0) {
var reacting = true
var range = 0..0
while (reacting) {
var offset = 0
if (reactionOccurs(c, input.get(-1))) {
} else {
reacting = false
}
}
}
}
return output.toString()
}
/** Part 2 */
fun processPolymer2(input: String): String {
return "42"
}
fun reactionOccurs(char: Char, prev: Char): Boolean {
return false
}
## Instruction:
Update with new approach to day 5
## Code After:
package aoc.kt.y2018;
/**
* Day 5.
*/
/** Part 1 */
fun processPolymer1(input: String): String {
var polymer = Pair(input, true)
while (polymer.second) {
polymer = react(polymer.first)
}
return polymer
//.first.length
.toString()
}
/** Part 2 */
fun processPolymer2(input: String): String {
return "42"
}
fun react(input: String): Pair<String, Boolean> {
var result = mutableListOf<Char>()
var polymer = input.toMutableList()
var reactionOccured = false
while (polymer.next() != null) {
polymer.dequeue()?.let { a ->
if (polymer.next() != null) {
polymer.dequeue()?.let { b ->
if (a.equals(b, true)) {
reactionOccured = true
} else {
result.push(a)
polymer.enqueue(b)
}
}
}
}
}
val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n }
return Pair(resultStr, reactionOccured)
}
fun <T> MutableList<T>.push(e: T) {
this.add(e)
}
fun <T> MutableList<T>.dequeue(): T? {
if (this.isNotEmpty()) {
return this.removeAt(0)
} else {
return null
}
}
fun <T> MutableList<T>.enqueue(e: T) {
this.add(0, e)
}
fun <T> MutableList<T>.next(): T? {
return this.getOrNull(0)
}
// ... existing code ...
/** Part 1 */
fun processPolymer1(input: String): String {
var polymer = Pair(input, true)
while (polymer.second) {
polymer = react(polymer.first)
}
return polymer
//.first.length
.toString()
}
/** Part 2 */
// ... modified code ...
return "42"
}
fun react(input: String): Pair<String, Boolean> {
var result = mutableListOf<Char>()
var polymer = input.toMutableList()
var reactionOccured = false
while (polymer.next() != null) {
polymer.dequeue()?.let { a ->
if (polymer.next() != null) {
polymer.dequeue()?.let { b ->
if (a.equals(b, true)) {
reactionOccured = true
} else {
result.push(a)
polymer.enqueue(b)
}
}
}
}
}
val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n }
return Pair(resultStr, reactionOccured)
}
fun <T> MutableList<T>.push(e: T) {
this.add(e)
}
fun <T> MutableList<T>.dequeue(): T? {
if (this.isNotEmpty()) {
return this.removeAt(0)
} else {
return null
}
}
fun <T> MutableList<T>.enqueue(e: T) {
this.add(0, e)
}
fun <T> MutableList<T>.next(): T? {
return this.getOrNull(0)
}
// ... rest of the code ...
1f9bc1b6f9a796458d104c01b9a344cbb0c84a9b
Lib/fontParts/fontshell/groups.py
Lib/fontParts/fontshell/groups.py
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
Add defcon implementation of group lookup methods.
Add defcon implementation of group lookup methods.
## Code Before:
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
## Instruction:
Add defcon implementation of group lookup methods.
## Code After:
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
# ... existing code ...
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
# ... rest of the code ...
3c0d52aa0a936b3ae138ddfba66e7ba9dcc5f934
sympy/plotting/proxy_pyglet.py
sympy/plotting/proxy_pyglet.py
from warnings import warn
from sympy.core.compatibility import SymPyDeprecationWarning
from pygletplot import PygletPlot
def Plot(*args, **kwargs):
""" A temporary proxy for an interface under deprecation.
This proxy is the one imported by `from sympy import *`.
The Plot class will change in future versions of sympy to use the new
plotting module. That new plotting module is already used by the
plot() function (lowercase). To write code compatible with future versions
of sympy use that function (plot() lowercase). Or if you want to use the
old plotting module just import it directly:
`from sympy.plotting.pygletplot import PygletPlot`
To use Plot from the new plotting module do:
`from sympy.plotting.plot import Plot`
In future version of sympy you will also be able to use
`from sympy.plotting import Plot` but in the current version this will
import this proxy object. It's done for backward compatibility.
The old plotting module is not deprecated. Only the location will
change. The new location is sympy.plotting.pygletplot.
"""
warn('This interface will change in future versions of sympy.'
' As a precatuion use the plot() function (lowercase).'
' See the docstring for details.',
SymPyDeprecationWarning)
return PygletPlot(*args, **kwargs)
from warnings import warn
from sympy.utilities.exceptions import SymPyDeprecationWarning
from pygletplot import PygletPlot
def Plot(*args, **kwargs):
""" A temporary proxy for an interface under deprecation.
This proxy is the one imported by `from sympy import *`.
The Plot class will change in future versions of sympy to use the new
plotting module. That new plotting module is already used by the
plot() function (lowercase). To write code compatible with future versions
of sympy use that function (plot() lowercase). Or if you want to use the
old plotting module just import it directly:
`from sympy.plotting.pygletplot import PygletPlot`
To use Plot from the new plotting module do:
`from sympy.plotting.plot import Plot`
In future version of sympy you will also be able to use
`from sympy.plotting import Plot` but in the current version this will
import this proxy object. It's done for backward compatibility.
The old plotting module is not deprecated. Only the location will
change. The new location is sympy.plotting.pygletplot.
"""
warn('This interface will change in future versions of sympy.'
' As a precatuion use the plot() function (lowercase).'
' See the docstring for details.',
SymPyDeprecationWarning)
return PygletPlot(*args, **kwargs)
Change the import location of DeprecationWarning used by plotting module
Change the import location of DeprecationWarning used by plotting module
The SympyDeprecationWarning was moved from its original location. The change
was done in the master branch. The same change must be mirrored in this
development branch.
## Code Before:
from warnings import warn
from sympy.core.compatibility import SymPyDeprecationWarning
from pygletplot import PygletPlot
def Plot(*args, **kwargs):
""" A temporary proxy for an interface under deprecation.
This proxy is the one imported by `from sympy import *`.
The Plot class will change in future versions of sympy to use the new
plotting module. That new plotting module is already used by the
plot() function (lowercase). To write code compatible with future versions
of sympy use that function (plot() lowercase). Or if you want to use the
old plotting module just import it directly:
`from sympy.plotting.pygletplot import PygletPlot`
To use Plot from the new plotting module do:
`from sympy.plotting.plot import Plot`
In future version of sympy you will also be able to use
`from sympy.plotting import Plot` but in the current version this will
import this proxy object. It's done for backward compatibility.
The old plotting module is not deprecated. Only the location will
change. The new location is sympy.plotting.pygletplot.
"""
warn('This interface will change in future versions of sympy.'
' As a precatuion use the plot() function (lowercase).'
' See the docstring for details.',
SymPyDeprecationWarning)
return PygletPlot(*args, **kwargs)
## Instruction:
Change the import location of DeprecationWarning used by plotting module
The SympyDeprecationWarning was moved from its original location. The change
was done in the master branch. The same change must be mirrored in this
development branch.
## Code After:
from warnings import warn
from sympy.utilities.exceptions import SymPyDeprecationWarning
from pygletplot import PygletPlot
def Plot(*args, **kwargs):
""" A temporary proxy for an interface under deprecation.
This proxy is the one imported by `from sympy import *`.
The Plot class will change in future versions of sympy to use the new
plotting module. That new plotting module is already used by the
plot() function (lowercase). To write code compatible with future versions
of sympy use that function (plot() lowercase). Or if you want to use the
old plotting module just import it directly:
`from sympy.plotting.pygletplot import PygletPlot`
To use Plot from the new plotting module do:
`from sympy.plotting.plot import Plot`
In future version of sympy you will also be able to use
`from sympy.plotting import Plot` but in the current version this will
import this proxy object. It's done for backward compatibility.
The old plotting module is not deprecated. Only the location will
change. The new location is sympy.plotting.pygletplot.
"""
warn('This interface will change in future versions of sympy.'
' As a precatuion use the plot() function (lowercase).'
' See the docstring for details.',
SymPyDeprecationWarning)
return PygletPlot(*args, **kwargs)
// ... existing code ...
from warnings import warn
from sympy.utilities.exceptions import SymPyDeprecationWarning
from pygletplot import PygletPlot
def Plot(*args, **kwargs):
// ... rest of the code ...
b0efb7db50080dd1e9e96ad8d818e3b0859bbca3
retry/__init__.py
retry/__init__.py
from functools import wraps
import time
class RetryExceededError(Exception):
pass
class retry(object):
'''A decorator encapsulated retry logic.
Usage:
@retry(errors=(TTransportException, AnyExpectedError))
'''
def __init__(self, errors=(Exception, ), tries=3, delay=0):
self.errors = errors
self.tries = tries
self.delay = delay
def __call__(self, func):
@wraps(func)
def _(*args, **kw):
retry_left_count = self.tries
while retry_left_count:
try:
return func(*args, **kw)
except Exception, e:
retry_left_count -= 1
if not isinstance(e, self.errors):
raise e
if not retry_left_count:
raise RetryExceededError
if self.delay:
time.sleep(self.delay)
return _
from functools import wraps
import time
class RetryExceededError(Exception):
pass
class retry(object):
'''A decorator encapsulated retry logic.
Usage:
@retry(errors=(TTransportException, AnyExpectedError))
@retry() # detect whatsoever errors and retry 3 times
'''
def __init__(self, errors=(Exception, ), tries=3, delay=0):
self.errors = errors
self.tries = tries
self.delay = delay
def __call__(self, func):
@wraps(func)
def _(*args, **kw):
retry_left_count = self.tries
while retry_left_count:
try:
return func(*args, **kw)
except Exception, e:
retry_left_count -= 1
if not isinstance(e, self.errors):
raise e
if not retry_left_count:
raise RetryExceededError
if self.delay:
time.sleep(self.delay)
return _
Add a usage in retry
Add a usage in retry
Python
mit
soasme/retries
python
## Code Before:
from functools import wraps
import time
class RetryExceededError(Exception):
pass
class retry(object):
'''A decorator encapsulated retry logic.
Usage:
@retry(errors=(TTransportException, AnyExpectedError))
'''
def __init__(self, errors=(Exception, ), tries=3, delay=0):
self.errors = errors
self.tries = tries
self.delay = delay
def __call__(self, func):
@wraps(func)
def _(*args, **kw):
retry_left_count = self.tries
while retry_left_count:
try:
return func(*args, **kw)
except Exception, e:
retry_left_count -= 1
if not isinstance(e, self.errors):
raise e
if not retry_left_count:
raise RetryExceededError
if self.delay:
time.sleep(self.delay)
return _
## Instruction:
Add a usage in retry
## Code After:
from functools import wraps
import time
class RetryExceededError(Exception):
pass
class retry(object):
'''A decorator encapsulated retry logic.
Usage:
@retry(errors=(TTransportException, AnyExpectedError))
@retry() # detect whatsoever errors and retry 3 times
'''
def __init__(self, errors=(Exception, ), tries=3, delay=0):
self.errors = errors
self.tries = tries
self.delay = delay
def __call__(self, func):
@wraps(func)
def _(*args, **kw):
retry_left_count = self.tries
while retry_left_count:
try:
return func(*args, **kw)
except Exception, e:
retry_left_count -= 1
if not isinstance(e, self.errors):
raise e
if not retry_left_count:
raise RetryExceededError
if self.delay:
time.sleep(self.delay)
return _
# ... existing code ...
Usage:
@retry(errors=(TTransportException, AnyExpectedError))
@retry() # detect whatsoever errors and retry 3 times
'''
def __init__(self, errors=(Exception, ), tries=3, delay=0):
# ... rest of the code ...
115197d42b380ae65de75d74a4d28933eb8defde
testproj/testproj/testapp/models.py
testproj/testproj/testapp/models.py
from django.db import models
from django.utils import timezone
class SecretFile(models.Model):
filename = models.CharField(max_length=255, blank=True, null=True)
order = models.IntegerField(blank=True, null=True)
size = models.PositiveIntegerField(blank=True, null=True)
created_on = models.DateTimeField(default=timezone.now)
is_secret = models.BooleanField()
def __unicode__(self):
return "#%d %s" % (self.pk, self.filename)
from django.db import models
from django.utils import timezone
class SecretFile(models.Model):
filename = models.CharField(max_length=255, blank=True, null=True)
order = models.IntegerField(blank=True, null=True)
size = models.PositiveIntegerField(blank=True, null=True)
created_on = models.DateTimeField(default=timezone.now)
is_secret = models.BooleanField(default=False)
def __unicode__(self):
return "#%d %s" % (self.pk, self.filename)
## Code Before:
package codechicken.lib.lighting;
import net.minecraft.client.renderer.Tessellator;
import codechicken.lib.colour.Colour;
import codechicken.lib.colour.ColourRGBA;
import codechicken.lib.render.CCModel;
import codechicken.lib.render.CCRenderState;
import codechicken.lib.render.IVertexModifier;
import codechicken.lib.render.UV;
import codechicken.lib.vec.Vector3;
/**
* Faster precomputed version of LightModel that only works for axis planar sides
*/
public class PlanarLightModel implements IVertexModifier
{
public ColourRGBA[] colours;
public PlanarLightModel(int[] colours)
{
this.colours = new ColourRGBA[6];
for(int i = 0; i < 6; i++)
this.colours[i] = new ColourRGBA(colours[i]);
}
@Override
public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i)
{
ColourRGBA light = colours[CCModel.findSide(normal)];
int colour = (m == null || m.colours == null) ? -1 : m.colours[i];
Colour res = new ColourRGBA(colour).multiply(light);
CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ;
}
@Override
public boolean needsNormals()
{
return true;
}
}
## Instruction:
Add standard planar light model
## Code After:
package codechicken.lib.lighting;
import net.minecraft.client.renderer.Tessellator;
import codechicken.lib.colour.Colour;
import codechicken.lib.colour.ColourRGBA;
import codechicken.lib.render.CCModel;
import codechicken.lib.render.CCRenderState;
import codechicken.lib.render.IVertexModifier;
import codechicken.lib.render.UV;
import codechicken.lib.vec.Vector3;
/**
* Faster precomputed version of LightModel that only works for axis planar sides
*/
public class PlanarLightModel implements IVertexModifier
{
public static PlanarLightModel standardLightModel = LightModel.standardLightModel.reducePlanar();
public ColourRGBA[] colours;
public PlanarLightModel(int[] colours)
{
this.colours = new ColourRGBA[6];
for(int i = 0; i < 6; i++)
this.colours[i] = new ColourRGBA(colours[i]);
}
@Override
public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i)
{
ColourRGBA light = colours[CCModel.findSide(normal)];
int colour = (m == null || m.colours == null) ? -1 : m.colours[i];
Colour res = new ColourRGBA(colour).multiply(light);
CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ;
}
@Override
public boolean needsNormals()
{
return true;
}
}
# ... existing code ...
*/
public class PlanarLightModel implements IVertexModifier
{
public static PlanarLightModel standardLightModel = LightModel.standardLightModel.reducePlanar();
public ColourRGBA[] colours;
public PlanarLightModel(int[] colours)
# ... rest of the code ...
try:
from .allauth import *
except ImportError:
from .chinup import *
from .exceptions import *
__version__ = '0.1'
from __future__ import absolute_import, unicode_literals
try:
from .allauth import *
except ImportError:
from .chinup import *
from .exceptions import *
__version__ = '0.1'
# Configure logging to avoid warning.
# https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library
import logging
if hasattr(logging, 'NullHandler'):
logging.getLogger('chinup').addHandler(logging.NullHandler())
Configure package-level logging to avoid warning.
Configure package-level logging to avoid warning.
Python
mit
pagepart/chinup
python
## Code Before:
try:
from .allauth import *
except ImportError:
from .chinup import *
from .exceptions import *
__version__ = '0.1'
## Instruction:
Configure package-level logging to avoid warning.
## Code After:
from __future__ import absolute_import, unicode_literals
try:
from .allauth import *
except ImportError:
from .chinup import *
from .exceptions import *
__version__ = '0.1'
# Configure logging to avoid warning.
# https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library
import logging
if hasattr(logging, 'NullHandler'):
logging.getLogger('chinup').addHandler(logging.NullHandler())
# ... existing code ...
from __future__ import absolute_import, unicode_literals
try:
from .allauth import *
except ImportError:
# ... modified code ...
__version__ = '0.1'
# Configure logging to avoid warning.
# https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library
import logging
if hasattr(logging, 'NullHandler'):
logging.getLogger('chinup').addHandler(logging.NullHandler())
# ... rest of the code ...
0472c1cabdfdf0f8a193552dac3370ae93bbdaed
scripts/get_top_hashtags.py
scripts/get_top_hashtags.py
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
Use a more compact functional style for instantiating hashtagCounter
Use a more compact functional style for instantiating hashtagCounter
Python
mpl-2.0
aDataAlchemist/election-tweets
python
## Code Before:
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtags = []
for line in f:
if line.startswith('{'):
hashtags.extend(json.loads(line)['hashtags'])
hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
## Instruction:
Use a more compact functional style for instantiating hashtagCounter
## Code After:
import json
import sys
from collections import Counter
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
// ... existing code ...
f = open(sys.argv[1], 'r')
topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10
hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ])
for (hashtag, count) in hashtagCounter.most_common(topk):
print hashtag, count
// ... rest of the code ...
package org.openmhealth.shim.common.mapper;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;
/**
* @author Emerson Farrugia
*/
public abstract class DataPointMapperUnitTests {
protected static final ObjectMapper objectMapper = newObjectMapper();
}
package org.openmhealth.shim.common.mapper;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.io.InputStream;
import static java.lang.String.format;
import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;
/**
* @author Emerson Farrugia
*/
public abstract class DataPointMapperUnitTests {
protected static final ObjectMapper objectMapper = newObjectMapper();
/**
* @param classPathResourceName the name of the class path resource to load
* @return the contents of the resource as a {@link JsonNode}
* @throws RuntimeException if the resource can't be loaded
*/
protected JsonNode asJsonNode(String classPathResourceName) {
ClassPathResource resource = new ClassPathResource(classPathResourceName);
try {
InputStream resourceInputStream = resource.getInputStream();
return objectMapper.readTree(resourceInputStream);
}
catch (IOException e) {
throw new RuntimeException(
format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e);
}
}
}
Add unit test support to load class path resources as JSON nodes
Add unit test support to load class path resources as JSON nodes
## Code Before:
package org.openmhealth.shim.common.mapper;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;
/**
* @author Emerson Farrugia
*/
public abstract class DataPointMapperUnitTests {
protected static final ObjectMapper objectMapper = newObjectMapper();
}
## Instruction:
Add unit test support to load class path resources as JSON nodes
## Code After:
package org.openmhealth.shim.common.mapper;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.io.InputStream;
import static java.lang.String.format;
import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;
/**
* @author Emerson Farrugia
*/
public abstract class DataPointMapperUnitTests {
protected static final ObjectMapper objectMapper = newObjectMapper();
/**
* @param classPathResourceName the name of the class path resource to load
* @return the contents of the resource as a {@link JsonNode}
* @throws RuntimeException if the resource can't be loaded
*/
protected JsonNode asJsonNode(String classPathResourceName) {
ClassPathResource resource = new ClassPathResource(classPathResourceName);
try {
InputStream resourceInputStream = resource.getInputStream();
return objectMapper.readTree(resourceInputStream);
}
catch (IOException e) {
throw new RuntimeException(
format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e);
}
}
}
# ... existing code ...
package org.openmhealth.shim.common.mapper;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.core.io.ClassPathResource;
import java.io.IOException;
import java.io.InputStream;
import static java.lang.String.format;
import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;
# ... modified code ...
public abstract class DataPointMapperUnitTests {
protected static final ObjectMapper objectMapper = newObjectMapper();
/**
* @param classPathResourceName the name of the class path resource to load
* @return the contents of the resource as a {@link JsonNode}
* @throws RuntimeException if the resource can't be loaded
*/
protected JsonNode asJsonNode(String classPathResourceName) {
ClassPathResource resource = new ClassPathResource(classPathResourceName);
try {
InputStream resourceInputStream = resource.getInputStream();
return objectMapper.readTree(resourceInputStream);
}
catch (IOException e) {
throw new RuntimeException(
format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e);
}
}
}
# ... rest of the code ...
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;
import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;
import org.spoofax.interpreter.core.Tools;
import org.spoofax.interpreter.terms.IStrategoAppl;
import org.spoofax.terms.util.NotImplementedException;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.source.SourceSection;
public abstract class LiteralMatchPattern extends MatchPattern {
public LiteralMatchPattern(SourceSection source) {
super(source);
}
public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {
SourceSection source = SourceSectionUtil.fromStrategoTerm(t);
if(Tools.hasConstructor(t, "True",0)){
return new TrueLiteralTermMatchPattern(source);
}
if(Tools.hasConstructor(t, "False",0)){
return new FalseLiteralTermMatchPattern(source);
}
if(Tools.hasConstructor(t, "Int", 1)){
return new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);
}
if(Tools.hasConstructor(t, "String", 1)){
return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);
}
throw new NotImplementedException("Unsupported literal: " + t);
}
}
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;
import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;
import org.spoofax.interpreter.core.Tools;
import org.spoofax.interpreter.terms.IStrategoAppl;
import org.spoofax.terms.util.NotImplementedException;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.source.SourceSection;
public abstract class LiteralMatchPattern extends MatchPattern {
public LiteralMatchPattern(SourceSection source) {
super(source);
}
public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {
SourceSection source = SourceSectionUtil.fromStrategoTerm(t);
if (Tools.hasConstructor(t, "True", 0)) {
return new TrueLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "False", 0)) {
return new FalseLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "Int", 1)) {
return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);
}
if (Tools.hasConstructor(t, "String", 1)) {
return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);
}
throw new NotImplementedException("Unsupported literal: " + t);
}
}
Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.
Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.
Java
apache-2.0
metaborg/dynsem,metaborg/dynsem
java
## Code Before:
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;
import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;
import org.spoofax.interpreter.core.Tools;
import org.spoofax.interpreter.terms.IStrategoAppl;
import org.spoofax.terms.util.NotImplementedException;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.source.SourceSection;
public abstract class LiteralMatchPattern extends MatchPattern {
public LiteralMatchPattern(SourceSection source) {
super(source);
}
public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {
SourceSection source = SourceSectionUtil.fromStrategoTerm(t);
if(Tools.hasConstructor(t, "True",0)){
return new TrueLiteralTermMatchPattern(source);
}
if(Tools.hasConstructor(t, "False",0)){
return new FalseLiteralTermMatchPattern(source);
}
if(Tools.hasConstructor(t, "Int", 1)){
return new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);
}
if(Tools.hasConstructor(t, "String", 1)){
return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);
}
throw new NotImplementedException("Unsupported literal: " + t);
}
}
## Instruction:
Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.
## Code After:
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;
import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;
import org.spoofax.interpreter.core.Tools;
import org.spoofax.interpreter.terms.IStrategoAppl;
import org.spoofax.terms.util.NotImplementedException;
import com.oracle.truffle.api.frame.FrameDescriptor;
import com.oracle.truffle.api.source.SourceSection;
public abstract class LiteralMatchPattern extends MatchPattern {
public LiteralMatchPattern(SourceSection source) {
super(source);
}
public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {
SourceSection source = SourceSectionUtil.fromStrategoTerm(t);
if (Tools.hasConstructor(t, "True", 0)) {
return new TrueLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "False", 0)) {
return new FalseLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "Int", 1)) {
return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);
}
if (Tools.hasConstructor(t, "String", 1)) {
return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);
}
throw new NotImplementedException("Unsupported literal: " + t);
}
}
# ... existing code ...
public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {
SourceSection source = SourceSectionUtil.fromStrategoTerm(t);
if (Tools.hasConstructor(t, "True", 0)) {
return new TrueLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "False", 0)) {
return new FalseLiteralTermMatchPattern(source);
}
if (Tools.hasConstructor(t, "Int", 1)) {
return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);
}
if (Tools.hasConstructor(t, "String", 1)) {
return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);
}
throw new NotImplementedException("Unsupported literal: " + t);
}
}
# ... rest of the code ...
2ba5f562edb568653574d329a9f1ffbe8b15e7c5
tests/test_caching.py
tests/test_caching.py
import os
import tempfile
from . import RTRSSTestCase
from rtrss import caching, config
class CachingTestCase(RTRSSTestCase):
def setUp(self):
fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)
os.close(fh)
def tearDown(self):
os.remove(self.filename)
def test_open_for_atomic_write_writes(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
def test_atomic_write_really_atomic(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename, 'w') as f1:
f1.write('this will be overwritten')
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
import os
import tempfile
from . import TempDirTestCase
from rtrss import caching
class CachingTestCase(TempDirTestCase):
def setUp(self):
super(CachingTestCase, self).setUp()
fh, self.filename = tempfile.mkstemp(dir=self.dir.path)
os.close(fh)
def tearDown(self):
os.remove(self.filename)
super(CachingTestCase, self).tearDown()
def test_open_for_atomic_write_writes(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
def test_atomic_write_really_atomic(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename, 'w') as f1:
f1.write('this will be overwritten')
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
## Code Before:
import os
import tempfile
from . import RTRSSTestCase
from rtrss import caching, config
class CachingTestCase(RTRSSTestCase):
def setUp(self):
fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)
os.close(fh)
def tearDown(self):
os.remove(self.filename)
def test_open_for_atomic_write_writes(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
def test_atomic_write_really_atomic(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename, 'w') as f1:
f1.write('this will be overwritten')
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
## Instruction:
Update test case to use new base class
## Code After:
import os
import tempfile
from . import TempDirTestCase
from rtrss import caching
class CachingTestCase(TempDirTestCase):
def setUp(self):
super(CachingTestCase, self).setUp()
fh, self.filename = tempfile.mkstemp(dir=self.dir.path)
os.close(fh)
def tearDown(self):
os.remove(self.filename)
super(CachingTestCase, self).tearDown()
def test_open_for_atomic_write_writes(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
def test_atomic_write_really_atomic(self):
test_data = 'test'
with caching.open_for_atomic_write(self.filename) as f:
f.write(test_data)
with open(self.filename, 'w') as f1:
f1.write('this will be overwritten')
with open(self.filename) as f:
data = f.read()
self.assertEqual(test_data, data)
// ... existing code ...
import os
import tempfile
from . import TempDirTestCase
from rtrss import caching
class CachingTestCase(TempDirTestCase):
def setUp(self):
super(CachingTestCase, self).setUp()
fh, self.filename = tempfile.mkstemp(dir=self.dir.path)
os.close(fh)
def tearDown(self):
os.remove(self.filename)
super(CachingTestCase, self).tearDown()
def test_open_for_atomic_write_writes(self):
test_data = 'test'
// ... rest of the code ...
61b5bc8a7e81225a83d195e016bc4adbd7ca1db5
setup.py
setup.py
from setuptools import setup, find_packages
setup(
name='pymediainfo',
version='2.1.5',
author='Louis Sautier',
author_email='[email protected]',
url='https://github.com/sbraz/pymediainfo',
description="""A Python wrapper for the mediainfo library.""",
packages=find_packages(),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
license='MIT',
tests_require=["nose"],
test_suite="nose.collector",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"License :: OSI Approved :: MIT License",
]
)
from setuptools import setup, find_packages
setup(
name='pymediainfo',
version='2.1.5',
author='Louis Sautier',
author_email='[email protected]',
url='https://github.com/sbraz/pymediainfo',
description="""A Python wrapper for the mediainfo library.""",
packages=find_packages(),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
license='MIT',
tests_require=["nose"],
test_suite="nose.collector",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"License :: OSI Approved :: MIT License",
]
)
Add Python 2.6 to classifiers
Add Python 2.6 to classifiers
Python
mit
paltman/pymediainfo,paltman-archive/pymediainfo
python
## Code Before:
from setuptools import setup, find_packages
setup(
name='pymediainfo',
version='2.1.5',
author='Louis Sautier',
author_email='[email protected]',
url='https://github.com/sbraz/pymediainfo',
description="""A Python wrapper for the mediainfo library.""",
packages=find_packages(),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
license='MIT',
tests_require=["nose"],
test_suite="nose.collector",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"License :: OSI Approved :: MIT License",
]
)
## Instruction:
Add Python 2.6 to classifiers
## Code After:
from setuptools import setup, find_packages
setup(
name='pymediainfo',
version='2.1.5',
author='Louis Sautier',
author_email='[email protected]',
url='https://github.com/sbraz/pymediainfo',
description="""A Python wrapper for the mediainfo library.""",
packages=find_packages(),
namespace_packages=[],
include_package_data=True,
zip_safe=False,
license='MIT',
tests_require=["nose"],
test_suite="nose.collector",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Microsoft :: Windows",
"License :: OSI Approved :: MIT License",
]
)
// ... existing code ...
test_suite="nose.collector",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
// ... rest of the code ...
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
## Code Before:
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
## Instruction:
Make get_readable_list process tuples, too
## Code After:
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
if len(passed_list) is 1:
output += str(item)
else:
if i is not (len(passed_list) - 1):
output += str(item) + sep
else:
output += str(item)
return output + end
def get_list_as_english(passed_list):
output = ""
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item) + ' '
elif len(passed_list) is 2:
output += str(item)
if i is not (len(passed_list) - 1):
output += " and "
else:
output += ""
else:
if i is not (len(passed_list) - 1):
output += str(item) + ", "
else:
output += "and " + str(item) + ", "
return output
// ... existing code ...
def get_readable_list(passed_list, sep=', ', end=''):
output = ""
if isinstance(passed_list, list) or isinstance(passed_list, tuple):
for i, item in enumerate(passed_list):
if len(passed_list) is 1:
output += str(item)
// ... modified code ...
output += str(item) + sep
else:
output += str(item)
elif isinstance(passed_list, dict):
for i, item in enumerate(passed_list.values()):
// ... rest of the code ...
7627b8759ab08df562048ec1fa94fe9d69d01374
setup.py
setup.py
from setuptools import setup
from exoline import __version__ as version
with open('requirements.txt') as f:
required = f.read().splitlines()
try:
from collections import OrderedDict
except ImportError:
required.append('ordereddict==1.1')
setup(
name='exoline',
version=version,
url = 'http://github.com/dweaver/exoline',
author = 'Dan Weaver',
author_email = '[email protected]',
description = 'Command line interface for Exosite platform.',
long_description = open('README.md').read() + '\n\n' +
open('HISTORY.md').read(),
packages=['exoline'],
package_dir={'exoline': 'exoline'},
scripts=['bin/exo', 'bin/exoline'],
keywords=['exosite', 'onep', 'one platform', 'm2m'],
install_requires=required,
zip_safe=False,
)
from setuptools import setup
from exoline import __version__ as version
with open('requirements.txt') as f:
required = f.read().splitlines()
try:
from collections import OrderedDict
except ImportError:
required.append('ordereddict>=1.1')
try:
import importlib
except ImportError:
required.append('importlib>=1.0.2')
setup(
name='exoline',
version=version,
url = 'http://github.com/dweaver/exoline',
author = 'Dan Weaver',
author_email = '[email protected]',
description = 'Command line interface for Exosite platform.',
long_description = open('README.md').read() + '\n\n' +
open('HISTORY.md').read(),
packages=['exoline'],
package_dir={'exoline': 'exoline'},
scripts=['bin/exo', 'bin/exoline'],
keywords=['exosite', 'onep', 'one platform', 'm2m'],
install_requires=required,
zip_safe=False,
)
package org.apacheextras.camel.examples.rcode;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import java.io.Console;
import java.io.File;
import java.util.concurrent.TimeUnit;
/**
* @author Sebastian Rühl
*/
public class RCodeRunner {
public static void main(String... args) throws Exception {
CamelContext camelContext = new DefaultCamelContext();
File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty("user.home") + "/.rcode-example");
camelContext.addRoutes(new RCodeRouteBuilder(basePath));
camelContext.start();
Console console = System.console();
if (console != null) {
console.printf("Please press enter to shutdown route.");
console.readLine();
} else {
TimeUnit.SECONDS.sleep(5);
}
camelContext.stop();
}
}
/*
* Copyright 2013 Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apacheextras.camel.examples.rcode;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
/**
*
* @author cemmersb
*/
public class RCodeRunner {
private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);
private CamelContext camelContext;
private RCodeRouteBuilder routeBuilder = null;
public RCodeRunner() {
try {
initializeContext();
} catch (Exception ex) {
LOGGER.error("Unable to initialize context: {}", ex.getMessage());
}
}
private void initializeContext() throws Exception {
routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data"));
camelContext = new DefaultCamelContext();
camelContext.addRoutes(routeBuilder);
camelContext.start();
}
@Override
protected void finalize() throws Throwable {
camelContext.stop();
super.finalize();
}
public static void main(String... args) throws InterruptedException, Throwable {
LOGGER.info("Starting RCodeRunner.");
RCodeRunner rCodeRunner = new RCodeRunner();
Thread.sleep(1000);
LOGGER.info("Stopping RCodeRunner.");
rCodeRunner.finalize();
}
}
Revert "migrate example to rcoderunner"
Revert "migrate example to rcoderunner"
This reverts commit b03405d0726820a1f184e0e80892826d02994763.
## Code Before:
package org.apacheextras.camel.examples.rcode;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import java.io.Console;
import java.io.File;
import java.util.concurrent.TimeUnit;
/**
* @author Sebastian Rühl
*/
public class RCodeRunner {
public static void main(String... args) throws Exception {
CamelContext camelContext = new DefaultCamelContext();
File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty("user.home") + "/.rcode-example");
camelContext.addRoutes(new RCodeRouteBuilder(basePath));
camelContext.start();
Console console = System.console();
if (console != null) {
console.printf("Please press enter to shutdown route.");
console.readLine();
} else {
TimeUnit.SECONDS.sleep(5);
}
camelContext.stop();
}
}
## Instruction:
Revert "migrate example to rcoderunner"
This reverts commit b03405d0726820a1f184e0e80892826d02994763.
## Code After:
/*
* Copyright 2013 Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apacheextras.camel.examples.rcode;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
/**
*
* @author cemmersb
*/
public class RCodeRunner {
private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);
private CamelContext camelContext;
private RCodeRouteBuilder routeBuilder = null;
public RCodeRunner() {
try {
initializeContext();
} catch (Exception ex) {
LOGGER.error("Unable to initialize context: {}", ex.getMessage());
}
}
private void initializeContext() throws Exception {
routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data"));
camelContext = new DefaultCamelContext();
camelContext.addRoutes(routeBuilder);
camelContext.start();
}
@Override
protected void finalize() throws Throwable {
camelContext.stop();
super.finalize();
}
public static void main(String... args) throws InterruptedException, Throwable {
LOGGER.info("Starting RCodeRunner.");
RCodeRunner rCodeRunner = new RCodeRunner();
Thread.sleep(1000);
LOGGER.info("Stopping RCodeRunner.");
rCodeRunner.finalize();
}
}
# ... existing code ...
/*
* Copyright 2013 Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apacheextras.camel.examples.rcode;
import org.apache.camel.CamelContext;
import org.apache.camel.impl.DefaultCamelContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
/**
*
* @author cemmersb
*/
public class RCodeRunner {
private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);
private CamelContext camelContext;
private RCodeRouteBuilder routeBuilder = null;
public RCodeRunner() {
try {
initializeContext();
} catch (Exception ex) {
LOGGER.error("Unable to initialize context: {}", ex.getMessage());
}
}
private void initializeContext() throws Exception {
routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data"));
camelContext = new DefaultCamelContext();
camelContext.addRoutes(routeBuilder);
camelContext.start();
}
@Override
protected void finalize() throws Throwable {
camelContext.stop();
super.finalize();
}
public static void main(String... args) throws InterruptedException, Throwable {
LOGGER.info("Starting RCodeRunner.");
RCodeRunner rCodeRunner = new RCodeRunner();
Thread.sleep(1000);
LOGGER.info("Stopping RCodeRunner.");
rCodeRunner.finalize();
}
}
# ... rest of the code ...
9ceace60593f133b4f6dfdbd9b6f583362415294
src/configuration.py
src/configuration.py
import ConfigParser
import os
def class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self)
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.'
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
import ConfigParser
import os
class ConfigDlstats(object):
"""Cross platform configuration file handler.
This class manages dlstats configuration files, providing
easy access to the options."""
def __init__(self):
"""Open the configuration files handler, choosing the right
path depending on the platform."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
self.filename = os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
self.filename = '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
self.filename = ("%s/Library/Application Support/%s" %
(os.environ["HOME"], appname))
elif os.name == 'nt':
self.filename = ("%s\Application Data\%s" %
(os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
self.config = ConfigParser.ConfigParser()
self.config.read(self.filename)
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
Put docstring inside Server class
Put docstring inside Server class
Python
apache-2.0
psf/requests
python
## Code Before:
import threading, socket
"""
Dummy server using for unit testing
"""
class Server(threading.Thread):
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
## Instruction:
Put docstring inside Server class
## Code After:
import threading, socket
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
self.host = host
self.port = port
self.ready_event = threading.Event()
self.stop_event = threading.Event()
def run(self):
sock = socket.socket()
sock.bind((self.host, self.port))
sock.listen(0)
self.ready_event.set()
self.handler(sock)
self.stop_event.set()
sock.close()
def __enter__(self):
self.start()
self.ready_event.wait()
return self.host, self.port
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.stop_event.wait()
return False # allow exceptions to propagate
# ... existing code ...
class Server(threading.Thread):
""" Dummy server using for unit testing """
def __init__(self, handler, host='localhost', port=8021):
threading.Thread.__init__(self)
self.handler = handler
# ... rest of the code ...
44d103359cff312865f409ff34f528f63e441ef4
graphapi/views.py
graphapi/views.py
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
# if not show_graphiql:
# error = verify_request(request, 'graphapi')
# if error:
# print('graphapi/views: get_response bailed ')
# return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
if not show_graphiql:
error = verify_request(request, 'graphapi')
if error:
return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
Revert "Reimplement using explicit variable lookup"
Revert "Reimplement using explicit variable lookup"
This reverts commit 94683e6c
## Code Before:
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
# if not show_graphiql:
# error = verify_request(request, 'graphapi')
# if error:
# print('graphapi/views: get_response bailed ')
# return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
## Instruction:
Revert "Reimplement using explicit variable lookup"
This reverts commit 94683e6c
## Code After:
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
if not show_graphiql:
error = verify_request(request, 'graphapi')
if error:
return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
// ... existing code ...
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
if not show_graphiql:
error = verify_request(request, 'graphapi')
if error:
return error, error.status_code
return super().get_response(request, data, show_graphiql)
// ... rest of the code ...
package coderefactory.net.popmovies;
import android.app.Activity;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
public class MovieAdapter extends ArrayAdapter<Movie> {
public MovieAdapter(final Activity context, final List<Movie> movies) {
super(context, 0, movies);
}
@NonNull
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final View rootView;
if (convertView == null) {
rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);
} else {
rootView = convertView;
}
final Movie movie = getItem(position);
final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);
titleView.setText(movie.getTitle());
final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);
releaseView.setText(String.valueOf(movie.getReleased()));
return rootView;
}
}
package coderefactory.net.popmovies;
import android.app.Activity;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
public class MovieAdapter extends ArrayAdapter<Movie> {
private ViewHolder viewHolder;
public MovieAdapter(final Activity context, final List<Movie> movies) {
super(context, 0, movies);
}
@NonNull
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final View itemView;
if (convertView == null) {
itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);
viewHolder = new ViewHolder(itemView);
itemView.setTag(viewHolder);
} else {
itemView = convertView;
viewHolder = (ViewHolder) convertView.getTag();
}
populateView(position);
return itemView;
}
private void populateView(final int position) {
final Movie movie = getItem(position);
viewHolder.titleView.setText(movie.getTitle());
viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));
}
private static class ViewHolder {
private final TextView titleView;
private final TextView releaseView;
private ViewHolder(final View itemView) {
titleView = (TextView) itemView.findViewById(R.id.movie_title);
releaseView = (TextView) itemView.findViewById(R.id.movie_released);
}
}
}
Introduce ViewHolder pattern into ArrayAdapter
Introduce ViewHolder pattern into ArrayAdapter
Java
mit
jarst/PopMovies
java
## Code Before:
package coderefactory.net.popmovies;
import android.app.Activity;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
public class MovieAdapter extends ArrayAdapter<Movie> {
public MovieAdapter(final Activity context, final List<Movie> movies) {
super(context, 0, movies);
}
@NonNull
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final View rootView;
if (convertView == null) {
rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);
} else {
rootView = convertView;
}
final Movie movie = getItem(position);
final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);
titleView.setText(movie.getTitle());
final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);
releaseView.setText(String.valueOf(movie.getReleased()));
return rootView;
}
}
## Instruction:
Introduce ViewHolder pattern into ArrayAdapter
## Code After:
package coderefactory.net.popmovies;
import android.app.Activity;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
public class MovieAdapter extends ArrayAdapter<Movie> {
private ViewHolder viewHolder;
public MovieAdapter(final Activity context, final List<Movie> movies) {
super(context, 0, movies);
}
@NonNull
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final View itemView;
if (convertView == null) {
itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);
viewHolder = new ViewHolder(itemView);
itemView.setTag(viewHolder);
} else {
itemView = convertView;
viewHolder = (ViewHolder) convertView.getTag();
}
populateView(position);
return itemView;
}
private void populateView(final int position) {
final Movie movie = getItem(position);
viewHolder.titleView.setText(movie.getTitle());
viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));
}
private static class ViewHolder {
private final TextView titleView;
private final TextView releaseView;
private ViewHolder(final View itemView) {
titleView = (TextView) itemView.findViewById(R.id.movie_title);
releaseView = (TextView) itemView.findViewById(R.id.movie_released);
}
}
}
...
public class MovieAdapter extends ArrayAdapter<Movie> {
private ViewHolder viewHolder;
public MovieAdapter(final Activity context, final List<Movie> movies) {
super(context, 0, movies);
}
...
@NonNull
@Override
public View getView(final int position, final View convertView, final ViewGroup parent) {
final View itemView;
if (convertView == null) {
itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);
viewHolder = new ViewHolder(itemView);
itemView.setTag(viewHolder);
} else {
itemView = convertView;
viewHolder = (ViewHolder) convertView.getTag();
}
populateView(position);
return itemView;
}
private void populateView(final int position) {
final Movie movie = getItem(position);
viewHolder.titleView.setText(movie.getTitle());
viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));
}
private static class ViewHolder {
private final TextView titleView;
private final TextView releaseView;
private ViewHolder(final View itemView) {
titleView = (TextView) itemView.findViewById(R.id.movie_title);
releaseView = (TextView) itemView.findViewById(R.id.movie_released);
}
}
}
...
0261c895cb41f5caba42ae432b997fd3c941e96f
tests.py
tests.py
import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
## Code Before:
import pytest
import cleaner
class TestTagRemoval():
def test_span_removal(self):
text = ('<span style="font-family: "helvetica neue" ,'
'"arial" , "helvetica" , sans-serif;">This is some'
' dummy text lalalala</span> This is some more dummy text '
'<span>test</span>')
expected = ('This is some dummy text lalalala This is some more dummy '
'text test')
cleaned = cleaner.remove_superflous_markup(text)
assert cleaned == expected
## Instruction:
Add test for getting pure html tag
## Code After:
import pytest
import cleaner
class TestTagTools():
def test_get_pure_tag(self):
tag1 = '<div>'
tag2 = '</div>'
tag3 = '<pre class="prettyprint">'
assert cleaner.get_pure_tag(tag1) == '<div>'
assert cleaner.get_pure_tag(tag2) == '</div>'
assert cleaner.get_pure_tag(tag3) == '<pre>'
from xml.dom import minidom
import xml.etree.ElementTree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
from xml.dom import minidom
import lxml.etree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
Use lxml instead of elementtree.
Use lxml instead of elementtree.
Python
mit
Schwarzschild/TBGXMLUtils
python
## Code Before:
from xml.dom import minidom
import xml.etree.ElementTree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
## Instruction:
Use lxml instead of elementtree.
## Code After:
from xml.dom import minidom
import lxml.etree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
// ... existing code ...
from xml.dom import minidom
import lxml.etree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
// ... rest of the code ...
6b15d7151f2703ce049ac9ab14b13c8b13122bf8
http.h
http.h
typedef struct
{
int method; /* request method */
char* path; /* requested path */
char* host; /* hostname field */
char* type; /* content-type */
size_t length; /* content-length */
int flags;
}
http_request;
/* Write an error page (and header). Returns number of bytes written. */
size_t gen_error_page( int fd, int error );
/*
Write 200 Ok header with content length and content type.
Returns the number of bytes written, 0 on failure.
*/
size_t http_ok( int fd, const char* type, unsigned long size );
/* parse a HTTP request, returns non-zero on success, zero on failure */
int http_request_parse( char* buffer, http_request* request );
#endif /* HTTP_H */
typedef struct
{
int method; /* request method */
char* path; /* requested path */
char* host; /* hostname field */
char* type; /* content-type */
size_t length; /* content-length */
}
http_request;
/* Write an error page (and header). Returns number of bytes written. */
size_t gen_error_page( int fd, int error );
/*
Write 200 Ok header with content length and content type.
Returns the number of bytes written, 0 on failure.
*/
size_t http_ok( int fd, const char* type, unsigned long size );
/* parse a HTTP request, returns non-zero on success, zero on failure */
int http_request_parse( char* buffer, http_request* request );
#endif /* HTTP_H */
Remove unused flags field from request
Remove unused flags field from request
Signed-off-by: David Oberhollenzer <[email protected]>
C
agpl-3.0
AgentD/websrv,AgentD/websrv,AgentD/websrv
c
## Code Before:
typedef struct
{
int method; /* request method */
char* path; /* requested path */
char* host; /* hostname field */
char* type; /* content-type */
size_t length; /* content-length */
int flags;
}
http_request;
/* Write an error page (and header). Returns number of bytes written. */
size_t gen_error_page( int fd, int error );
/*
Write 200 Ok header with content length and content type.
Returns the number of bytes written, 0 on failure.
*/
size_t http_ok( int fd, const char* type, unsigned long size );
/* parse a HTTP request, returns non-zero on success, zero on failure */
int http_request_parse( char* buffer, http_request* request );
#endif /* HTTP_H */
## Instruction:
Remove unused flags field from request
Signed-off-by: David Oberhollenzer <[email protected]>
## Code After:
typedef struct
{
int method; /* request method */
char* path; /* requested path */
char* host; /* hostname field */
char* type; /* content-type */
size_t length; /* content-length */
}
http_request;
/* Write an error page (and header). Returns number of bytes written. */
size_t gen_error_page( int fd, int error );
/*
Write 200 Ok header with content length and content type.
Returns the number of bytes written, 0 on failure.
*/
size_t http_ok( int fd, const char* type, unsigned long size );
/* parse a HTTP request, returns non-zero on success, zero on failure */
int http_request_parse( char* buffer, http_request* request );
#endif /* HTTP_H */
# ... existing code ...
char* host; /* hostname field */
char* type; /* content-type */
size_t length; /* content-length */
}
http_request;
# ... rest of the code ...
ad0151eee0027237c8cdd433ef2f24bfa47af5df
pyreaclib/nucdata/tests/test_binding.py
pyreaclib/nucdata/tests/test_binding.py
import os
from pyreaclib.nucdata import BindingTable
class TestAME(object):
@classmethod
def setup_class(cls):
""" this is run once for each class before any tests """
pass
@classmethod
def teardown_class(cls):
""" this is run once for each class after all tests """
pass
def setup_method(self):
""" this is run before each test """
self.bintable = BindingTable()
def teardown_method(self):
""" this is run after each test """
self.bintable = None
def test_get(self):
nuc = self.bintable.get_nuclide(n=1, z=1)
assert nuc.z == 1
assert nuc.n == 1
assert nuc.nucbind == 1.112283
import os
from pyreaclib.nucdata import BindingTable
class TestAME(object):
@classmethod
def setup_class(cls):
""" this is run once for each class before any tests """
pass
@classmethod
def teardown_class(cls):
""" this is run once for each class after all tests """
pass
def setup_method(self):
""" this is run before each test """
self.bintable = BindingTable()
def teardown_method(self):
""" this is run after each test """
self.bintable = None
def test_get(self):
nuc = self.bintable.get_nuclide(n=1, z=1)
assert nuc.z == 1
assert nuc.n == 1
assert nuc.nucbind == 1.112283
nuc = self.bintable.get_nuclide(n=5, z=6)
assert nuc.z == 6
assert nuc.n == 5
assert nuc.nucbind == 6.676456
nuc = self.bintable.get_nuclide(n=17, z=23)
assert nuc.z == 23
assert nuc.n == 17
assert nuc.nucbind == 7.317
nuc = self.bintable.get_nuclide(n=90, z=78)
assert nuc.z == 78
assert nuc.n == 90
assert nuc.nucbind == 7.773605
Add some more binding energy table tests.
Add some more binding energy table tests.
Python
bsd-3-clause
pyreaclib/pyreaclib
python
## Code Before:
import os
from pyreaclib.nucdata import BindingTable
class TestAME(object):
@classmethod
def setup_class(cls):
""" this is run once for each class before any tests """
pass
@classmethod
def teardown_class(cls):
""" this is run once for each class after all tests """
pass
def setup_method(self):
""" this is run before each test """
self.bintable = BindingTable()
def teardown_method(self):
""" this is run after each test """
self.bintable = None
def test_get(self):
nuc = self.bintable.get_nuclide(n=1, z=1)
assert nuc.z == 1
assert nuc.n == 1
assert nuc.nucbind == 1.112283
## Instruction:
Add some more binding energy table tests.
## Code After:
import os
from pyreaclib.nucdata import BindingTable
class TestAME(object):
@classmethod
def setup_class(cls):
""" this is run once for each class before any tests """
pass
@classmethod
def teardown_class(cls):
""" this is run once for each class after all tests """
pass
def setup_method(self):
""" this is run before each test """
self.bintable = BindingTable()
def teardown_method(self):
""" this is run after each test """
self.bintable = None
def test_get(self):
nuc = self.bintable.get_nuclide(n=1, z=1)
assert nuc.z == 1
assert nuc.n == 1
assert nuc.nucbind == 1.112283
nuc = self.bintable.get_nuclide(n=5, z=6)
assert nuc.z == 6
assert nuc.n == 5
assert nuc.nucbind == 6.676456
nuc = self.bintable.get_nuclide(n=17, z=23)
assert nuc.z == 23
assert nuc.n == 17
assert nuc.nucbind == 7.317
nuc = self.bintable.get_nuclide(n=90, z=78)
assert nuc.z == 78
assert nuc.n == 90
assert nuc.nucbind == 7.773605
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy documentation for information on '
'migrating to using pytest-astropy to customize the pytest '
'header.', AstropyDeprecationWarning)
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy-header documentation for information on '
'migrating to using pytest-astropy-header to customize the '
'pytest header.', AstropyDeprecationWarning)
## Code Before:
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy documentation for information on '
'migrating to using pytest-astropy to customize the pytest '
'header.', AstropyDeprecationWarning)
## Instruction:
TST: Fix typo in deprecation warning [ci skip]
## Code After:
import warnings
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy-header documentation for information on '
'migrating to using pytest-astropy-header to customize the '
'pytest header.', AstropyDeprecationWarning)
...
from astropy.utils.exceptions import AstropyDeprecationWarning
try:
from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
PYTEST_HEADER_MODULES = {}
TESTED_VERSIONS = {}
warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '
'See the pytest-astropy-header documentation for information on '
'migrating to using pytest-astropy-header to customize the '
'pytest header.', AstropyDeprecationWarning)
...
## Code Before:
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.conf import settings
import markdown
import bleach
class UserProfile(models.Model):
user = models.OneToOneField(User)
about_me = models.TextField()
rendered_about_me = models.TextField(editable=False,
null=True)
@models.permalink
def get_absolute_url(self):
return ('view_profile', (), {'username': self.user.username})
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
@receiver(pre_save, sender=UserProfile)
def user_profile_pre_save(sender, instance, **kwargs):
# Render the about_me field as HTML instead of markdown
rendered = markdown.markdown(instance.about_me, safe_mode='escape')
clean_rendered = bleach.clean(rendered,
tags=settings.ALLOWED_HTML_TAGS,
attributes=settings.ALLOWED_HTML_ATTRS)
instance.rendered_about_me = clean_rendered
## Instruction:
Add maximum length validator to about_me
## Code After:
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.conf import settings
from django.core.validators import MaxLengthValidator
import markdown
import bleach
class UserProfile(models.Model):
user = models.OneToOneField(User)
about_me = models.TextField(validators=[MaxLengthValidator(500)])
rendered_about_me = models.TextField(editable=False,
null=True)
@models.permalink
def get_absolute_url(self):
return ('view_profile', (), {'username': self.user.username})
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
@receiver(pre_save, sender=UserProfile)
def user_profile_pre_save(sender, instance, **kwargs):
# Render the about_me field as HTML instead of markdown
rendered = markdown.markdown(instance.about_me, safe_mode='escape')
clean_rendered = bleach.clean(rendered,
tags=settings.ALLOWED_HTML_TAGS,
attributes=settings.ALLOWED_HTML_ATTRS)
instance.rendered_about_me = clean_rendered
# ... existing code ...
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.conf import settings
from django.core.validators import MaxLengthValidator
import markdown
import bleach
# ... modified code ...
class UserProfile(models.Model):
user = models.OneToOneField(User)
about_me = models.TextField(validators=[MaxLengthValidator(500)])
rendered_about_me = models.TextField(editable=False,
null=True)
# ... rest of the code ...
af51ef98d8575e7832d79c1068c092d388866dcb
donut/donut_SMTP_handler.py
donut/donut_SMTP_handler.py
from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY, [])
res = cursor.fetchall()
return [result['email'] for result in res]
from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
DEFAULT_DEV_TEAM_EMAILS = ['[email protected]']
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
try:
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY)
res = cursor.fetchall()
return [result['email'] for result in res]
except Exception:
# If the database is inaccessible, fallback to a hard-coded email list
return DEFAULT_DEV_TEAM_EMAILS
Allow error email to still be sent if DB is down
Allow error email to still be sent if DB is down
We were seeing errors in the logs where the database was inaccessible,
but the errors were not being emailed out because the handler makes a DB query.
Python
mit
ASCIT/donut,ASCIT/donut,ASCIT/donut
python
## Code Before:
from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY, [])
res = cursor.fetchall()
return [result['email'] for result in res]
## Instruction:
Allow error email to still be sent if DB is down
We were seeing errors in the logs where the database was inaccessible,
but the errors were not being emailed out because the handler makes a DB query.
## Code After:
from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
DEFAULT_DEV_TEAM_EMAILS = ['[email protected]']
class DonutSMTPHandler(SMTPHandler):
def __init__(self,
mailhost,
fromaddr,
toaddrs,
subject,
db_instance,
credentials=None,
secure=None,
timeout=5.0):
super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,
secure, timeout)
self.db_instance = db_instance
def emit(self, record):
'''
Overrides SMTPHandler's emit such that we dynamically
get current donut dev team members
'''
self.toaddrs = self.getAdmins()
super().emit(record)
def getAdmins(self):
''' Returns current members in Devteam '''
try:
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY)
res = cursor.fetchall()
return [result['email'] for result in res]
except Exception:
# If the database is inaccessible, fallback to a hard-coded email list
return DEFAULT_DEV_TEAM_EMAILS
# ... existing code ...
from logging.handlers import SMTPHandler
DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM
members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups
WHERE group_name = "Devteam"
'''
DEFAULT_DEV_TEAM_EMAILS = ['[email protected]']
class DonutSMTPHandler(SMTPHandler):
# ... modified code ...
def getAdmins(self):
''' Returns current members in Devteam '''
try:
with self.db_instance.cursor() as cursor:
cursor.execute(DEV_TEAM_EMAILS_QUERY)
res = cursor.fetchall()
return [result['email'] for result in res]
except Exception:
# If the database is inaccessible, fallback to a hard-coded email list
return DEFAULT_DEV_TEAM_EMAILS
# ... rest of the code ...
ca3978b6068add93418b4c5db8346143533beb7e
examples/forwarder_device.py
examples/forwarder_device.py
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
Print ports when forwarder device starts.
MNT: Print ports when forwarder device starts.
Python
bsd-3-clause
ericdill/bluesky,ericdill/bluesky
python
## Code Before:
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
## Instruction:
MNT: Print ports when forwarder device starts.
## Code After:
import os
import zmq
import yaml
name = 'zmq_document_forwarder'
filenames = [
os.path.join('/etc', name + '.yml'),
os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),
]
config = {}
for filename in filenames:
if os.path.isfile(filename):
print('found config file at', filename)
with open(filename) as f:
config.update(yaml.load(f))
def main(frontend_port, backend_port):
try:
context = zmq.Context(1)
# Socket facing clients
frontend = context.socket(zmq.SUB)
frontend.bind("tcp://*:%d" % frontend_port)
frontend.setsockopt_string(zmq.SUBSCRIBE, "")
# Socket facing services
backend = context.socket(zmq.PUB)
backend.bind("tcp://*:%d" % backend_port)
print("Receiving on %d; publishing to %d" % (frontend_port,
backend_port))
zmq.device(zmq.FORWARDER, frontend, backend)
finally:
frontend.close()
backend.close()
context.term()
if __name__ == "__main__":
main(int(config['frontend_port']), int(config['backend_port']))
## Code Before:
from os import path
import sys
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
from django.test.runner import DiscoverRunner
settings.configure(
INSTALLED_APPS=(
# Put contenttypes before auth to work around test issue.
# See: https://code.djangoproject.com/ticket/10827#comment:12
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django-admin-sso',
'django-crispy-forms',
'incuna_auth',
),
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),
AUTH_USER_MODEL='tests.User',
ROOT_URLCONF='incuna_auth.urls',
REST_FRAMEWORK={
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),
'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),
},
TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),
)
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
## Instruction:
Reorder imports to dodge a settings problem.
## Code After:
from os import path
import sys
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
INSTALLED_APPS=(
# Put contenttypes before auth to work around test issue.
# See: https://code.djangoproject.com/ticket/10827#comment:12
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django-admin-sso',
'django-crispy-forms',
'incuna_auth',
),
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),
AUTH_USER_MODEL='tests.User',
ROOT_URLCONF='incuna_auth.urls',
REST_FRAMEWORK={
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),
'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),
},
TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),
)
from django.test.runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
// ... existing code ...
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
// ... modified code ...
)
from django.test.runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
// ... rest of the code ...
e751cb4f4805aed079fc025b9b1655f30cf5e69a
watson/html/entities.py
watson/html/entities.py
import re
from html import _escape_map_full
from html.entities import codepoint2name
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
import re
from html.entities import codepoint2name
try:
from html import _escape_map_full
except:
# taken from the 3.3 standard lib, as it's removed in 3.4
_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>',
ord('"'): '"', ord('\''): '''}
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
Fix for Python 3.4 html module not containing _escape_map_full
Fix for Python 3.4 html module not containing _escape_map_full
Python
bsd-3-clause
watsonpy/watson-html
python
## Code Before:
import re
from html import _escape_map_full
from html.entities import codepoint2name
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
## Instruction:
Fix for Python 3.4 html module not containing _escape_map_full
## Code After:
import re
from html.entities import codepoint2name
try:
from html import _escape_map_full
except:
# taken from the 3.3 standard lib, as it's removed in 3.4
_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>',
ord('"'): '"', ord('\''): '''}
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
// ... existing code ...
import re
from html.entities import codepoint2name
try:
from html import _escape_map_full
except:
# taken from the 3.3 standard lib, as it's removed in 3.4
_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>',
ord('"'): '"', ord('\''): '''}
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
// ... rest of the code ...
7cef87a81278c227db0cb07329d1b659dbd175b3
mail_factory/models.py
mail_factory/models.py
import django
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
import django
from django.conf import settings
from django.utils.module_loading import module_has_submodule
try:
from importlib import import_module
except ImportError:
# Compatibility for python-2.6
from django.utils.importlib import import_module
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
Use standard library instead of django.utils.importlib
Use standard library instead of django.utils.importlib
> django.utils.importlib is a compatibility library for when Python 2.6 was
> still supported. It has been obsolete since Django 1.7, which dropped support
> for Python 2.6, and is removed in 1.9 per the deprecation cycle.
> Use Python's import_module function instead
> — [1]
References:
[1] http://stackoverflow.com/a/32763639
[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
## Code Before:
import django
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
## Instruction:
Use standard library instead of django.utils.importlib
> django.utils.importlib is a compatibility library for when Python 2.6 was
> still supported. It has been obsolete since Django 1.7, which dropped support
> for Python 2.6, and is removed in 1.9 per the deprecation cycle.
> Use Python's import_module function instead
> — [1]
References:
[1] http://stackoverflow.com/a/32763639
[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
## Code After:
import django
from django.conf import settings
from django.utils.module_loading import module_has_submodule
try:
from importlib import import_module
except ImportError:
# Compatibility for python-2.6
from django.utils.importlib import import_module
def autodiscover():
"""Auto-discover INSTALLED_APPS mails.py modules."""
for app in settings.INSTALLED_APPS:
module = '%s.mails' % app # Attempt to import the app's 'mails' module
try:
import_module(module)
except:
# Decide whether to bubble up this error. If the app just
# doesn't have a mails module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
app_module = import_module(app)
if module_has_submodule(app_module, 'mails'):
raise
# If we're using Django >= 1.7, use the new app-loading mecanism which is way
# better.
if django.VERSION < (1, 7):
autodiscover()
# ... existing code ...
import django
from django.conf import settings
from django.utils.module_loading import module_has_submodule
try:
from importlib import import_module
except ImportError:
# Compatibility for python-2.6
from django.utils.importlib import import_module
def autodiscover():
# ... rest of the code ...
10ae930f6f14c2840d0b87cbec17054b4cc318d2
facebook_auth/models.py
facebook_auth/models.py
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
from uuid import uuid1
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
def get_auth_address(request, redirect_to, scope=''):
state = unicode(uuid1())
request.session['state'] = state
return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (
settings.FACEBOOK_APP_ID, redirect_to, scope, state
)
Add support for server side authentication.
Add support for server side authentication.
Change-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e
Reviewed-on: http://review.pozytywnie.pl:8080/5087
Reviewed-by: Tomasz Wysocki <[email protected]>
Tested-by: Tomasz Wysocki <[email protected]>
## Code Before:
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
## Instruction:
Add support for server side authentication.
Change-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e
Reviewed-on: http://review.pozytywnie.pl:8080/5087
Reviewed-by: Tomasz Wysocki <[email protected]>
Tested-by: Tomasz Wysocki <[email protected]>
## Code After:
from uuid import uuid1
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
import simplejson
from facebook_auth import utils
class FacebookUser(auth_models.User):
user_id = models.BigIntegerField(unique=True)
access_token = models.TextField(blank=True, null=True)
app_friends = models.ManyToManyField('self')
@property
def graph(self):
return facepy.GraphAPI(self.access_token)
@property
def js_session(self):
return simplejson.dumps({
'access_token': self.access_token,
'uid': self.user_id
})
@property
def friends(self):
return utils.get_from_graph_api(self.graph, "me/friends")['data']
def update_app_friends(self):
friends = self.friends
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
def get_auth_address(request, redirect_to, scope=''):
state = unicode(uuid1())
request.session['state'] = state
return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (
settings.FACEBOOK_APP_ID, redirect_to, scope, state
)
...
from uuid import uuid1
from django.conf import settings
from django.contrib.auth import models as auth_models
from django.db import models
import facepy
...
friends_ids = [f['id'] for f in friends]
self.app_friends.clear()
self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
def get_auth_address(request, redirect_to, scope=''):
state = unicode(uuid1())
request.session['state'] = state
return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (
settings.FACEBOOK_APP_ID, redirect_to, scope, state
)
...
ccf3bcfc962a37d088507b542bd8e3af2ce515b6
tests/test_with_testcase.py
tests/test_with_testcase.py
import time
import unittest
import pytest
class TerribleTerribleWayToWriteTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark):
self.benchmark = benchmark
def test_foo(self):
self.benchmark(time.sleep, 0.000001)
class TerribleTerribleWayToWritePatchTests(unittest.TestCase):
@pytest.fixture(autouse=True)
def setupBenchmark(self, benchmark_weave):
self.benchmark_weave = benchmark_weave
def test_foo2(self):
with self.benchmark_weave('time.sleep'):
time.sleep(0.0000001)
package com.google.appengine.tools.cloudstorage.oauth;
import com.google.appengine.api.appidentity.AppIdentityService;
import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;
import com.google.appengine.api.appidentity.AppIdentityServiceFactory;
import com.google.appengine.api.utils.SystemProperty;
import java.util.List;
/**
* Provider that uses the AppIdentityService for generating access tokens.
*/
final class AppIdentityAccessTokenProvider implements AccessTokenProvider {
private final AppIdentityService appIdentityService;
public AppIdentityAccessTokenProvider() {
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {
throw new IllegalStateException(
"The access token from AppIdentity won't work in the development environment.");
}
this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();
}
@Override
public GetAccessTokenResult getNewAccessToken(List<String> scopes) {
return appIdentityService.getAccessToken(scopes);
}
}
package com.google.appengine.tools.cloudstorage.oauth;
import com.google.appengine.api.appidentity.AppIdentityService;
import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;
import com.google.appengine.api.appidentity.AppIdentityServiceFactory;
import com.google.appengine.api.utils.SystemProperty;
import java.util.List;
/**
* Provider that uses the AppIdentityService for generating access tokens.
*/
final class AppIdentityAccessTokenProvider implements AccessTokenProvider {
private final AppIdentityService appIdentityService;
public AppIdentityAccessTokenProvider() {
this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();
}
@Override
public GetAccessTokenResult getNewAccessToken(List<String> scopes) {
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {
throw new IllegalStateException(
"The access token from AppIdentity won't work in the development environment.");
}
return appIdentityService.getAccessToken(scopes);
}
}
Move check for development environment into the getNewAccessToken method.
Move check for development environment into the getNewAccessToken method.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7102
## Code Before:
package com.google.appengine.tools.cloudstorage.oauth;
import com.google.appengine.api.appidentity.AppIdentityService;
import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;
import com.google.appengine.api.appidentity.AppIdentityServiceFactory;
import com.google.appengine.api.utils.SystemProperty;
import java.util.List;
/**
* Provider that uses the AppIdentityService for generating access tokens.
*/
final class AppIdentityAccessTokenProvider implements AccessTokenProvider {
private final AppIdentityService appIdentityService;
public AppIdentityAccessTokenProvider() {
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {
throw new IllegalStateException(
"The access token from AppIdentity won't work in the development environment.");
}
this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();
}
@Override
public GetAccessTokenResult getNewAccessToken(List<String> scopes) {
return appIdentityService.getAccessToken(scopes);
}
}
## Instruction:
Move check for development environment into the getNewAccessToken method.
Revision created by MOE tool push_codebase.
MOE_MIGRATION=7102
## Code After:
package com.google.appengine.tools.cloudstorage.oauth;
import com.google.appengine.api.appidentity.AppIdentityService;
import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;
import com.google.appengine.api.appidentity.AppIdentityServiceFactory;
import com.google.appengine.api.utils.SystemProperty;
import java.util.List;
/**
* Provider that uses the AppIdentityService for generating access tokens.
*/
final class AppIdentityAccessTokenProvider implements AccessTokenProvider {
private final AppIdentityService appIdentityService;
public AppIdentityAccessTokenProvider() {
this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();
}
@Override
public GetAccessTokenResult getNewAccessToken(List<String> scopes) {
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {
throw new IllegalStateException(
"The access token from AppIdentity won't work in the development environment.");
}
return appIdentityService.getAccessToken(scopes);
}
}
...
private final AppIdentityService appIdentityService;
public AppIdentityAccessTokenProvider() {
this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();
}
@Override
public GetAccessTokenResult getNewAccessToken(List<String> scopes) {
if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {
throw new IllegalStateException(
"The access token from AppIdentity won't work in the development environment.");
}
return appIdentityService.getAccessToken(scopes);
}
}
...
824c8cd3eb563de60ddf13fac1f7ca1341aa01f1
astral/api/tests/test_streams.py
astral/api/tests/test_streams.py
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Stream.get_by(name=data['name']))
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
Update tests for new redirect-after-create stream.
Update tests for new redirect-after-create stream.
Python
mit
peplin/astral
python
## Code Before:
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)
response = self.wait()
eq_(response.code, 200)
ok_(Stream.get_by(name=data['name']))
## Instruction:
Update tests for new redirect-after-create stream.
## Code After:
from tornado.httpclient import HTTPRequest
from nose.tools import eq_, ok_
import json
import faker
from astral.api.tests import BaseTest
from astral.models import Stream
from astral.models.tests.factories import StreamFactory
class StreamsHandlerTest(BaseTest):
def test_get_streams(self):
[StreamFactory() for _ in range(3)]
response = self.fetch('/streams')
eq_(response.code, 200)
result = json.loads(response.body)
ok_('streams' in result)
for stream in result['streams']:
ok_(Stream.get_by(name=stream['name']))
def test_create_stream(self):
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
// ... existing code ...
data = {'name': faker.lorem.sentence()}
eq_(Stream.get_by(name=data['name']), None)
self.http_client.fetch(HTTPRequest(
self.get_url('/streams'), 'POST', body=json.dumps(data),
follow_redirects=False), self.stop)
response = self.wait()
eq_(response.code, 302)
ok_(Stream.get_by(name=data['name']))
// ... rest of the code ...
70d009834123cb5a10788763fed3193017cc8162
libpebble2/__init__.py
libpebble2/__init__.py
__author__ = 'katharine'
from .exceptions import *
__author__ = 'katharine'
import logging
from .exceptions import *
logging.getLogger('libpebble2').addHandler(logging.NullHandler())
Add a default null logger per python recommendations.
Add a default null logger per python recommendations.
Python
mit
pebble/libpebble2
python
## Code Before:
__author__ = 'katharine'
from .exceptions import *
## Instruction:
Add a default null logger per python recommendations.
## Code After:
__author__ = 'katharine'
import logging
from .exceptions import *
logging.getLogger('libpebble2').addHandler(logging.NullHandler())
# ... existing code ...
__author__ = 'katharine'
import logging
from .exceptions import *
logging.getLogger('libpebble2').addHandler(logging.NullHandler())
# ... rest of the code ...
29562b08e436abc8465404e49d9193537721b717
src/odin/contrib/money/fields.py
src/odin/contrib/money/fields.py
from __future__ import absolute_import, print_function
from odin import exceptions
from odin.fields import ScalarField
from odin.validators import EMPTY_VALUES
from .datatypes import Amount
__all__ = ('AmountField', )
class AmountField(ScalarField):
"""
Field that contains a monetary amount (with an optional currency).
"""
default_error_messages = {
'invalid': "'%s' value must be a (amount, currency).",
'invalid_currency': "'%s' currency is not supported.",
}
data_type_name = "Amount"
def __init__(self, allowed_currencies=None, **kwargs):
super(AmountField, self).__init__(**kwargs)
self.allowed_currencies = allowed_currencies
def to_python(self, value):
if value in EMPTY_VALUES:
return
if isinstance(value, Amount):
return value
try:
return Amount(value)
except (ValueError, TypeError):
msg = self.error_messages['invalid'] % value
raise exceptions.ValidationError(msg)
def validate(self, value):
super(AmountField, self).validate(value)
if self.allowed_currencies and value not in EMPTY_VALUES:
if value.currency not in self.allowed_currencies:
msg = self.error_messages['invalid_currency'] % str(value.currency)
raise exceptions.ValidationError(msg)
def prepare(self, value):
if value in EMPTY_VALUES:
return
return float(value), value.currency.code
from __future__ import absolute_import, print_function
from odin import exceptions
from odin.fields import ScalarField
from odin.validators import EMPTY_VALUES
from .datatypes import Amount
__all__ = ("AmountField",)
class AmountField(ScalarField):
"""
Field that contains a monetary amount (with an optional currency).
"""
default_error_messages = {
"invalid": "'%s' value must be a (amount, currency).",
"invalid_currency": "'%s' currency is not supported.",
}
data_type_name = "Amount"
def __init__(self, allowed_currencies=None, **kwargs):
super(AmountField, self).__init__(**kwargs)
self.allowed_currencies = allowed_currencies
def to_python(self, value):
if value in EMPTY_VALUES:
return
if isinstance(value, Amount):
return value
try:
return Amount(value)
except (ValueError, TypeError):
msg = self.error_messages["invalid"] % value
raise exceptions.ValidationError(msg)
def validate(self, value):
super(AmountField, self).validate(value)
if (
self.allowed_currencies
and (value not in EMPTY_VALUES)
and (value.currency not in self.allowed_currencies)
):
msg = self.error_messages["invalid_currency"] % str(value.currency)
raise exceptions.ValidationError(msg)
def prepare(self, value):
if value in EMPTY_VALUES:
return
return float(value), value.currency.code
Correct issue from Sonar (and black file)
Correct issue from Sonar (and black file)
Python
bsd-3-clause
python-odin/odin
python
## Code Before:
from __future__ import absolute_import, print_function
from odin import exceptions
from odin.fields import ScalarField
from odin.validators import EMPTY_VALUES
from .datatypes import Amount
__all__ = ('AmountField', )
class AmountField(ScalarField):
"""
Field that contains a monetary amount (with an optional currency).
"""
default_error_messages = {
'invalid': "'%s' value must be a (amount, currency).",
'invalid_currency': "'%s' currency is not supported.",
}
data_type_name = "Amount"
def __init__(self, allowed_currencies=None, **kwargs):
super(AmountField, self).__init__(**kwargs)
self.allowed_currencies = allowed_currencies
def to_python(self, value):
if value in EMPTY_VALUES:
return
if isinstance(value, Amount):
return value
try:
return Amount(value)
except (ValueError, TypeError):
msg = self.error_messages['invalid'] % value
raise exceptions.ValidationError(msg)
def validate(self, value):
super(AmountField, self).validate(value)
if self.allowed_currencies and value not in EMPTY_VALUES:
if value.currency not in self.allowed_currencies:
msg = self.error_messages['invalid_currency'] % str(value.currency)
raise exceptions.ValidationError(msg)
def prepare(self, value):
if value in EMPTY_VALUES:
return
return float(value), value.currency.code
## Instruction:
Correct issue from Sonar (and black file)
## Code After:
from __future__ import absolute_import, print_function
from odin import exceptions
from odin.fields import ScalarField
from odin.validators import EMPTY_VALUES
from .datatypes import Amount
__all__ = ("AmountField",)
class AmountField(ScalarField):
"""
Field that contains a monetary amount (with an optional currency).
"""
default_error_messages = {
"invalid": "'%s' value must be a (amount, currency).",
"invalid_currency": "'%s' currency is not supported.",
}
data_type_name = "Amount"
def __init__(self, allowed_currencies=None, **kwargs):
super(AmountField, self).__init__(**kwargs)
self.allowed_currencies = allowed_currencies
def to_python(self, value):
if value in EMPTY_VALUES:
return
if isinstance(value, Amount):
return value
try:
return Amount(value)
except (ValueError, TypeError):
msg = self.error_messages["invalid"] % value
raise exceptions.ValidationError(msg)
def validate(self, value):
super(AmountField, self).validate(value)
if (
self.allowed_currencies
and (value not in EMPTY_VALUES)
and (value.currency not in self.allowed_currencies)
):
msg = self.error_messages["invalid_currency"] % str(value.currency)
raise exceptions.ValidationError(msg)
def prepare(self, value):
if value in EMPTY_VALUES:
return
return float(value), value.currency.code
...
from odin.validators import EMPTY_VALUES
from .datatypes import Amount
__all__ = ("AmountField",)
class AmountField(ScalarField):
...
"""
Field that contains a monetary amount (with an optional currency).
"""
default_error_messages = {
"invalid": "'%s' value must be a (amount, currency).",
"invalid_currency": "'%s' currency is not supported.",
}
data_type_name = "Amount"
...
try:
return Amount(value)
except (ValueError, TypeError):
msg = self.error_messages["invalid"] % value
raise exceptions.ValidationError(msg)
def validate(self, value):
super(AmountField, self).validate(value)
if (
self.allowed_currencies
and (value not in EMPTY_VALUES)
and (value.currency not in self.allowed_currencies)
):
msg = self.error_messages["invalid_currency"] % str(value.currency)
raise exceptions.ValidationError(msg)
def prepare(self, value):
if value in EMPTY_VALUES:
...
bf24b8dab13c3779514a00d61c3ea440704b1cbf
setup.py
setup.py
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
}
)
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
"ar": ["arpy==1.1.1"],
}
)
Add optional dependency on arpy
Add optional dependency on arpy
Python
bsd-2-clause
angr/cle
python
## Code Before:
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
}
)
## Instruction:
Add optional dependency on arpy
## Code After:
try:
from setuptools import setup
from setuptools import find_packages
packages = find_packages()
except ImportError:
from distutils.core import setup
import os
packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')]
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
setup(
name='cle',
description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',
version='8.20.1.7',
python_requires='>=3.5',
packages=packages,
install_requires=[
'pyelftools>=0.25',
'cffi',
'pyvex==8.20.1.7',
'pefile',
'sortedcontainers>=2.0',
],
extras_require={
"minidump": ["minidump==0.0.10"],
"xbe": ["pyxbe==0.0.2"],
"ar": ["arpy==1.1.1"],
}
)
import collections
import functools
from ydf import meta
__all__ = []
FROM = 'FROM'
RUN = 'RUN'
CMD = 'CMD'
LABEL = 'LABEL'
EXPOSE = 'EXPOSE'
ENV = 'ENV'
ADD = 'ADD'
COPY = 'COPY'
ENTRYPOINT = 'ENTRYPOINT'
VOLUME = 'VOLUME'
USER = 'USER'
WORKDIR = 'WORKDIR'
ARG = 'ARG'
ONBUILD = 'ONBUILD'
STOPSIGNAL = 'STOPSIGNAL'
HEALTHCHECK = 'HEALTHCHECK'
SHELL = 'SHELL'
def get_instructions():
"""
Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.
"""
instructions = collections.defaultdict(dict)
for func in (value for key, value in globals().items() if meta.is_instruction(value)):
instructions[func.instruction_name][func.instruction_type] = func
return instructions
def instruction(name, type, desc):
"""
Decorate a function to indicate that it is responsible for converting a python type to a Docker
instruction.
:param name: Name of docker instruction
:param type: Type of python object it can convert
:param desc: Short description of expected format for the python object.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return '{} {}'.format(name, func(*args, **kwargs))
wrapper.instruction_name = name
wrapper.instruction_type = type
wrapper.instruction_desc = desc
return wrapper
return decorator
Add @instruction decorator to mark module level funcs as handlers.
Add @instruction decorator to mark module level funcs as handlers.
Python
apache-2.0
ahawker/ydf
python
## Code Before:
__all__ = []
FROM = 'FROM'
RUN = 'RUN'
CMD = 'CMD'
LABEL = 'LABEL'
EXPOSE = 'EXPOSE'
ENV = 'ENV'
ADD = 'ADD'
COPY = 'COPY'
ENTRYPOINT = 'ENTRYPOINT'
VOLUME = 'VOLUME'
USER = 'USER'
WORKDIR = 'WORKDIR'
ARG = 'ARG'
ONBUILD = 'ONBUILD'
STOPSIGNAL = 'STOPSIGNAL'
HEALTHCHECK = 'HEALTHCHECK'
SHELL = 'SHELL'
## Instruction:
Add @instruction decorator to mark module level funcs as handlers.
## Code After:
import collections
import functools
from ydf import meta
__all__ = []
FROM = 'FROM'
RUN = 'RUN'
CMD = 'CMD'
LABEL = 'LABEL'
EXPOSE = 'EXPOSE'
ENV = 'ENV'
ADD = 'ADD'
COPY = 'COPY'
ENTRYPOINT = 'ENTRYPOINT'
VOLUME = 'VOLUME'
USER = 'USER'
WORKDIR = 'WORKDIR'
ARG = 'ARG'
ONBUILD = 'ONBUILD'
STOPSIGNAL = 'STOPSIGNAL'
HEALTHCHECK = 'HEALTHCHECK'
SHELL = 'SHELL'
def get_instructions():
"""
Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.
"""
instructions = collections.defaultdict(dict)
for func in (value for key, value in globals().items() if meta.is_instruction(value)):
instructions[func.instruction_name][func.instruction_type] = func
return instructions
def instruction(name, type, desc):
"""
Decorate a function to indicate that it is responsible for converting a python type to a Docker
instruction.
:param name: Name of docker instruction
:param type: Type of python object it can convert
:param desc: Short description of expected format for the python object.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return '{} {}'.format(name, func(*args, **kwargs))
wrapper.instruction_name = name
wrapper.instruction_type = type
wrapper.instruction_desc = desc
return wrapper
return decorator
# ... existing code ...
import collections
import functools
from ydf import meta
__all__ = []
# ... modified code ...
STOPSIGNAL = 'STOPSIGNAL'
HEALTHCHECK = 'HEALTHCHECK'
SHELL = 'SHELL'
def get_instructions():
"""
Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.
"""
instructions = collections.defaultdict(dict)
for func in (value for key, value in globals().items() if meta.is_instruction(value)):
instructions[func.instruction_name][func.instruction_type] = func
return instructions
def instruction(name, type, desc):
"""
Decorate a function to indicate that it is responsible for converting a python type to a Docker
instruction.
:param name: Name of docker instruction
:param type: Type of python object it can convert
:param desc: Short description of expected format for the python object.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return '{} {}'.format(name, func(*args, **kwargs))
wrapper.instruction_name = name
wrapper.instruction_type = type
wrapper.instruction_desc = desc
return wrapper
return decorator
# ... rest of the code ...
## Code Before:
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
LOG = logging.getLogger(__name__)
class DecreaseTTL(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
## Instruction:
Add step to check instances status
## Code After:
import logging
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
def undo(self, workflow_dict):
LOG.info("Running undo...")
try:
return True
except Exception:
traceback = full_stack()
workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)
workflow_dict['exceptions']['traceback'].append(traceback)
return False
...
from util import full_stack
from workflow.steps.util.base import BaseStep
from workflow.exceptions.error_codes import DBAAS_0020
from drivers.base import ConnectionError
LOG = logging.getLogger(__name__)
class CheckInstancesStatus(BaseStep):
def __unicode__(self):
return "Checking instances status..."
...
def do(self, workflow_dict):
try:
databaseinfra = workflow_dict['databaseinfra']
driver = databaseinfra.get_driver()
for instance in driver.get_database_instances():
msg = "Instance({}) is down".format(instance)
exception_msg = Exception(msg)
try:
status = driver.check_status(instance)
except ConnectionError:
raise exception_msg
else:
if status is False:
raise exception_msg
return True
except Exception:
traceback = full_stack()
...
f651d51d97b75f12ba68f1cbfca914724136d121
tools/halide_image.h
tools/halide_image.h
/*
This allows code that relied on halide_image.h and Halide::Tools::Image to
continue to work with newer versions of Halide where HalideBuffer.h and
Halide::Buffer are the way to work with data.
Besides mapping Halide::Tools::Image to Halide::Buffer, it defines
USING_HALIDE_BUFFER to allow code to conditionally compile for one or the
other.
It is intended as a stop-gap measure until the code can be updated.
*/
#include "HalideBuffer.h"
namespace Halide {
namespace Tools {
#define USING_HALIDE_BUFFER
template< typename T >
using Image = Buffer<T>;
} // namespace Tools
} // mamespace Halide
#endif // #ifndef HALIDE_TOOLS_IMAGE_H
/** \file
*
* This allows code that relied on halide_image.h and
* Halide::Tools::Image to continue to work with newer versions of
* Halide where HalideBuffer.h and Halide::Buffer are the way to work
* with data.
*
* Besides mapping Halide::Tools::Image to Halide::Buffer, it defines
* USING_HALIDE_BUFFER to allow code to conditionally compile for one
* or the other.
*
* It is intended as a stop-gap measure until the code can be updated.
*/
#include "HalideBuffer.h"
namespace Halide {
namespace Tools {
#define USING_HALIDE_BUFFER
template< typename T >
using Image = Buffer<T>;
} // namespace Tools
} // mamespace Halide
#endif // #ifndef HALIDE_TOOLS_IMAGE_H
## Code Before:
/*
This allows code that relied on halide_image.h and Halide::Tools::Image to
continue to work with newer versions of Halide where HalideBuffer.h and
Halide::Buffer are the way to work with data.
Besides mapping Halide::Tools::Image to Halide::Buffer, it defines
USING_HALIDE_BUFFER to allow code to conditionally compile for one or the
other.
It is intended as a stop-gap measure until the code can be updated.
*/
#include "HalideBuffer.h"
namespace Halide {
namespace Tools {
#define USING_HALIDE_BUFFER
template< typename T >
using Image = Buffer<T>;
} // namespace Tools
} // mamespace Halide
#endif // #ifndef HALIDE_TOOLS_IMAGE_H
## Instruction:
Reformat comment into Doxygen comment for file.
## Code After:
/** \file
*
* This allows code that relied on halide_image.h and
* Halide::Tools::Image to continue to work with newer versions of
* Halide where HalideBuffer.h and Halide::Buffer are the way to work
* with data.
*
* Besides mapping Halide::Tools::Image to Halide::Buffer, it defines
* USING_HALIDE_BUFFER to allow code to conditionally compile for one
* or the other.
*
* It is intended as a stop-gap measure until the code can be updated.
*/
#include "HalideBuffer.h"
namespace Halide {
namespace Tools {
#define USING_HALIDE_BUFFER
template< typename T >
using Image = Buffer<T>;
} // namespace Tools
} // mamespace Halide
#endif // #ifndef HALIDE_TOOLS_IMAGE_H
# ... existing code ...
/** \file
*
* This allows code that relied on halide_image.h and
* Halide::Tools::Image to continue to work with newer versions of
* Halide where HalideBuffer.h and Halide::Buffer are the way to work
* with data.
*
* Besides mapping Halide::Tools::Image to Halide::Buffer, it defines
* USING_HALIDE_BUFFER to allow code to conditionally compile for one
* or the other.
*
* It is intended as a stop-gap measure until the code can be updated.
*/
#include "HalideBuffer.h"
# ... rest of the code ...
98b0eb3d492cb816db7ffa7ad062dde36a1feadf
tests/unit/test_gettext.py
tests/unit/test_gettext.py
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
Use testtools as test base class.
Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98
Python
apache-2.0
varunarya10/oslo.i18n,openstack/oslo.i18n
python
## Code Before:
import logging
import unittest
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(unittest.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
## Instruction:
Use testtools as test base class.
On the path to testr migration, we need to replace the unittest base classes
with testtools.
Replace tearDown with addCleanup, addCleanup is more resilient than tearDown.
The fixtures library has excellent support for managing and cleaning
tempfiles. Use it.
Replace skip_ with testtools.skipTest
Part of blueprint grizzly-testtools.
Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98
## Code After:
import logging
import testtools
from openstack.common.gettextutils import _
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
// ... existing code ...
import logging
import testtools
from openstack.common.gettextutils import _
// ... modified code ...
LOG = logging.getLogger(__name__)
class GettextTest(testtools.TestCase):
def test_gettext_does_not_blow_up(self):
LOG.info(_('test'))
// ... rest of the code ...
c3284516e8dc2c7fccfbf7e4bff46a66b4ad2f15
cref/evaluation/__init__.py
cref/evaluation/__init__.py
import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every run
Save output for every run
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
python
## Code Before:
import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 100
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
output_files = predict_fasta(fasta_file, output_dir, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
## Instruction:
Save output for every run
## Code After:
import os
import statistics
from cref.structure import rmsd
from cref.app.terminal import download_pdb, download_fasta, predict_fasta
pdbs = ['1zdd', '1gab']
runs = 5
fragment_sizes = range(5, 13, 2)
number_of_clusters = range(4, 20, 1)
for pdb in pdbs:
output_dir = 'predictions/evaluation/{}/'.format(pdb)
try:
os.mkdir(output_dir)
except FileExistsError as e:
print(e)
for fragment_size in fragment_sizes:
fasta_file = output_dir + pdb + '.fasta'
download_fasta(pdb, fasta_file)
for n in number_of_clusters:
rmsds = []
for run in range(runs):
params = {
'pdb': pdb,
'fragment_size': fragment_size,
'number_of_clusters': n
}
prediction_output = output_dir + str(run)
os.mkdir(prediction_output)
output_files = predict_fasta(fasta_file, prediction_output, params)
predicted_structure = output_files[0]
filepath = os.path.join(
os.path.dirname(predicted_structure),
'experimental_structure.pdb'
)
experimental_structure = download_pdb(pdb, filepath)
rmsds.append(rmsd(predicted_structure, experimental_structure))
print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
import numpy
import Axon
class MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):
channels = 8
bufferSize = 1024
def __init__(self, **argd):
super(MonoMixer, self).__init__(**argd)
for i in range(self.channels):
self.addInbox("in%i" % i)
def main(self):
while 1:
output = numpy.zeros(self.bufferSize)
for i in range(self.channels):
if self.dataReady("in%i" % i):
output += self.recv("in%i" % i)
output /= self.channels
self.send(output, "outbox")
if not self.anyReady():
self.pause()
yield 1
import numpy
import Axon
import time
from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent
class MonoMixer(SchedulingAdaptiveCommsComponent):
channels = 8
bufferSize = 1024
sampleRate = 44100
def __init__(self, **argd):
super(MonoMixer, self).__init__(**argd)
for i in range(self.channels):
self.addInbox("in%i" % i)
self.period = float(self.bufferSize)/self.sampleRate
self.lastSendTime = time.time()
self.scheduleAbs("Send", self.lastSendTime + self.period)
def main(self):
while 1:
if self.dataReady("event"):
output = numpy.zeros(self.bufferSize)
self.recv("event")
for i in range(self.channels):
if self.dataReady("in%i" % i):
data = self.recv("in%i" % i)
if data != None:
output += data
output /= self.channels
self.send(output, "outbox")
self.lastSendTime += self.period
self.scheduleAbs("Send", self.lastSendTime + self.period)
else:
self.pause()
Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.
Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.
## Code Before:
import numpy
import Axon
class MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):
channels = 8
bufferSize = 1024
def __init__(self, **argd):
super(MonoMixer, self).__init__(**argd)
for i in range(self.channels):
self.addInbox("in%i" % i)
def main(self):
while 1:
output = numpy.zeros(self.bufferSize)
for i in range(self.channels):
if self.dataReady("in%i" % i):
output += self.recv("in%i" % i)
output /= self.channels
self.send(output, "outbox")
if not self.anyReady():
self.pause()
yield 1
## Instruction:
Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.
## Code After:
import numpy
import Axon
import time
from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent
class MonoMixer(SchedulingAdaptiveCommsComponent):
channels = 8
bufferSize = 1024
sampleRate = 44100
def __init__(self, **argd):
super(MonoMixer, self).__init__(**argd)
for i in range(self.channels):
self.addInbox("in%i" % i)
self.period = float(self.bufferSize)/self.sampleRate
self.lastSendTime = time.time()
self.scheduleAbs("Send", self.lastSendTime + self.period)
def main(self):
while 1:
if self.dataReady("event"):
output = numpy.zeros(self.bufferSize)
self.recv("event")
for i in range(self.channels):
if self.dataReady("in%i" % i):
data = self.recv("in%i" % i)
if data != None:
output += data
output /= self.channels
self.send(output, "outbox")
self.lastSendTime += self.period
self.scheduleAbs("Send", self.lastSendTime + self.period)
else:
self.pause()
...
import numpy
import Axon
import time
from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent
class MonoMixer(SchedulingAdaptiveCommsComponent):
channels = 8
bufferSize = 1024
sampleRate = 44100
def __init__(self, **argd):
super(MonoMixer, self).__init__(**argd)
for i in range(self.channels):
self.addInbox("in%i" % i)
self.period = float(self.bufferSize)/self.sampleRate
self.lastSendTime = time.time()
self.scheduleAbs("Send", self.lastSendTime + self.period)
def main(self):
while 1:
if self.dataReady("event"):
output = numpy.zeros(self.bufferSize)
self.recv("event")
for i in range(self.channels):
if self.dataReady("in%i" % i):
data = self.recv("in%i" % i)
if data != None:
output += data
output /= self.channels
self.send(output, "outbox")
self.lastSendTime += self.period
self.scheduleAbs("Send", self.lastSendTime + self.period)
else:
self.pause()
...
b2155e167b559367bc24ba614f51360793951f12
mythril/support/source_support.py
mythril/support/source_support.py
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
self.meta = ""
from mythril.solidity.soliditycontract import SolidityContract
from mythril.ethereum.evmcontract import EVMContract
class Source:
def __init__(
self, source_type=None, source_format=None, source_list=None, meta=None
):
self.source_type = source_type
self.source_format = source_format
self.source_list = []
self.meta = meta
def get_source_from_contracts_list(self, contracts):
if contracts is None or len(contracts) == 0:
return
if isinstance(contracts[0], SolidityContract):
self.source_type = "solidity-file"
self.source_format = "text"
for contract in contracts:
self.source_list += [file.filename for file in contract.solidity_files]
elif isinstance(contracts[0], EVMContract):
self.source_format = "evm-byzantium-bytecode"
self.source_type = (
"raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address"
)
for contract in contracts:
self.source_list.append(contract.bytecode_hash)
else:
assert False # Fail hard
Remove meta from source class (belongs to issue not source)
Remove meta from source class (belongs to issue not source)
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
Simplify the icon finder function.
Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
Python
mit
mkhl/haskell.sugar
python
## Code Before:
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
## Instruction:
Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
## Code After:
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
// ... existing code ...
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
// ... modified code ...
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
...
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
// ... rest of the code ...
44d20ecaf13cb0245ee562d234939e762b5b0921
include/agent.h
include/agent.h
// forward declaration
namespace Url
{
class Url;
}
namespace Rep
{
class Agent
{
public:
/* The type for the delay. */
typedef float delay_t;
/**
* Construct an agent.
*/
explicit Agent(const std::string& host) :
directives_(), delay_(-1.0), sorted_(true), host_(host) {}
/**
* Add an allowed directive.
*/
Agent& allow(const std::string& query);
/**
* Add a disallowed directive.
*/
Agent& disallow(const std::string& query);
/**
* Set the delay for this agent.
*/
Agent& delay(delay_t value) {
delay_ = value;
return *this;
}
/**
* Return the delay for this agent.
*/
delay_t delay() const { return delay_; }
/**
* A vector of the directives, in priority-sorted order.
*/
const std::vector<Directive>& directives() const;
/**
* Return true if the URL (either a full URL or a path) is allowed.
*/
bool allowed(const std::string& path) const;
std::string str() const;
private:
bool is_external(const Url::Url& url) const;
mutable std::vector<Directive> directives_;
delay_t delay_;
mutable bool sorted_;
std::string host_;
};
}
#endif
// forward declaration
namespace Url
{
class Url;
}
namespace Rep
{
class Agent
{
public:
/* The type for the delay. */
typedef float delay_t;
/**
* Default constructor
*/
Agent() : Agent("") {}
/**
* Construct an agent.
*/
explicit Agent(const std::string& host) :
directives_(), delay_(-1.0), sorted_(true), host_(host) {}
/**
* Add an allowed directive.
*/
Agent& allow(const std::string& query);
/**
* Add a disallowed directive.
*/
Agent& disallow(const std::string& query);
/**
* Set the delay for this agent.
*/
Agent& delay(delay_t value) {
delay_ = value;
return *this;
}
/**
* Return the delay for this agent.
*/
delay_t delay() const { return delay_; }
/**
* A vector of the directives, in priority-sorted order.
*/
const std::vector<Directive>& directives() const;
/**
* Return true if the URL (either a full URL or a path) is allowed.
*/
bool allowed(const std::string& path) const;
std::string str() const;
private:
bool is_external(const Url::Url& url) const;
mutable std::vector<Directive> directives_;
delay_t delay_;
mutable bool sorted_;
std::string host_;
};
}
#endif
Add back default constructor for Agent.
Add back default constructor for Agent.
Previously, this was removed in #28, but the Cython bindings in reppy
*really* want there to be a default constructor, so I'm adding it back
for convenience.
C
mit
seomoz/rep-cpp,seomoz/rep-cpp
c
## Code Before:
// forward declaration
namespace Url
{
class Url;
}
namespace Rep
{
class Agent
{
public:
/* The type for the delay. */
typedef float delay_t;
/**
* Construct an agent.
*/
explicit Agent(const std::string& host) :
directives_(), delay_(-1.0), sorted_(true), host_(host) {}
/**
* Add an allowed directive.
*/
Agent& allow(const std::string& query);
/**
* Add a disallowed directive.
*/
Agent& disallow(const std::string& query);
/**
* Set the delay for this agent.
*/
Agent& delay(delay_t value) {
delay_ = value;
return *this;
}
/**
* Return the delay for this agent.
*/
delay_t delay() const { return delay_; }
/**
* A vector of the directives, in priority-sorted order.
*/
const std::vector<Directive>& directives() const;
/**
* Return true if the URL (either a full URL or a path) is allowed.
*/
bool allowed(const std::string& path) const;
std::string str() const;
private:
bool is_external(const Url::Url& url) const;
mutable std::vector<Directive> directives_;
delay_t delay_;
mutable bool sorted_;
std::string host_;
};
}
#endif
## Instruction:
Add back default constructor for Agent.
Previously, this was removed in #28, but the Cython bindings in reppy
*really* want there to be a default constructor, so I'm adding it back
for convenience.
## Code After:
// forward declaration
namespace Url
{
class Url;
}
namespace Rep
{
class Agent
{
public:
/* The type for the delay. */
typedef float delay_t;
/**
* Default constructor
*/
Agent() : Agent("") {}
/**
* Construct an agent.
*/
explicit Agent(const std::string& host) :
directives_(), delay_(-1.0), sorted_(true), host_(host) {}
/**
* Add an allowed directive.
*/
Agent& allow(const std::string& query);
/**
* Add a disallowed directive.
*/
Agent& disallow(const std::string& query);
/**
* Set the delay for this agent.
*/
Agent& delay(delay_t value) {
delay_ = value;
return *this;
}
/**
* Return the delay for this agent.
*/
delay_t delay() const { return delay_; }
/**
* A vector of the directives, in priority-sorted order.
*/
const std::vector<Directive>& directives() const;
/**
* Return true if the URL (either a full URL or a path) is allowed.
*/
bool allowed(const std::string& path) const;
std::string str() const;
private:
bool is_external(const Url::Url& url) const;
mutable std::vector<Directive> directives_;
delay_t delay_;
mutable bool sorted_;
std::string host_;
};
}
#endif
# ... existing code ...
public:
/* The type for the delay. */
typedef float delay_t;
/**
* Default constructor
*/
Agent() : Agent("") {}
/**
* Construct an agent.
# ... rest of the code ...
1a71fba6224a9757f19e702a3b9a1cebf496a754
src/loop+blkback/plugin.py
src/loop+blkback/plugin.py
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
Use the new xapi.storage package hierarchy
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <[email protected]>
## Code Before:
import os
import sys
import xapi
import xapi.plugin
from xapi.storage.datapath import log
class Implementation(xapi.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.plugin.Unimplemented(base)
## Instruction:
Use the new xapi.storage package hierarchy
Signed-off-by: David Scott <[email protected]>
## Code After:
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
"plugin": "loopdev+blkback",
"name": "The loopdev+blkback kernel-space datapath plugin",
"description": ("This plugin manages and configures loop"
" devices which can be connected to VMs"
" directly via kernel-space blkback"),
"vendor": "Citrix",
"copyright": "(C) 2015 Citrix Inc",
"version": "3.0",
"required_api_version": "3.0",
"features": [
],
"configuration": {},
"required_cluster_stack": []}
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
# ... existing code ...
import os
import sys
import xapi
import xapi.storage.api.plugin
from xapi.storage import log
class Implementation(xapi.storage.api.plugin.Plugin_skeleton):
def query(self, dbg):
return {
# ... modified code ...
if __name__ == "__main__":
log.log_call_argv()
cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())
base = os.path.basename(sys.argv[0])
if base == "Plugin.Query":
cmd.query()
else:
raise xapi.storage.api.plugin.Unimplemented(base)
# ... rest of the code ...
fc7db2a55ad3f612ac6ef01cfa57ce03040708a5
evelink/__init__.py
evelink/__init__.py
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
## Code Before:
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import parsing
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
## Instruction:
Remove parsing from public interface
## Code After:
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
# ... existing code ...
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
# Implement NullHandler because it was only added in Python 2.7+.
# ... rest of the code ...
package seedu.jimi.commons.events.ui;
import seedu.jimi.commons.events.BaseEvent;
/**
* Indicates user request to show a section of the taskList panel.
* @author zexuan
*
*/
public class ShowTaskPanelSectionEvent extends BaseEvent{
String sectionToDisplay;
public ShowTaskPanelSectionEvent(String sectionToDisplay) {
this.sectionToDisplay = sectionToDisplay;
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
package seedu.jimi.commons.events.ui;
import seedu.jimi.commons.events.BaseEvent;
/**
* Indicates user request to show a section of the taskList panel.
* @author zexuan
*
*/
public class ShowTaskPanelSectionEvent extends BaseEvent{
public final String sectionToDisplay;
public ShowTaskPanelSectionEvent(String sectionToDisplay) {
this.sectionToDisplay = sectionToDisplay;
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
## Code Before:
package seedu.jimi.commons.events.ui;
import seedu.jimi.commons.events.BaseEvent;
/**
* Indicates user request to show a section of the taskList panel.
* @author zexuan
*
*/
public class ShowTaskPanelSectionEvent extends BaseEvent{
String sectionToDisplay;
public ShowTaskPanelSectionEvent(String sectionToDisplay) {
this.sectionToDisplay = sectionToDisplay;
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
## Instruction:
Modify access level of member var
## Code After:
package seedu.jimi.commons.events.ui;
import seedu.jimi.commons.events.BaseEvent;
/**
* Indicates user request to show a section of the taskList panel.
* @author zexuan
*
*/
public class ShowTaskPanelSectionEvent extends BaseEvent{
public final String sectionToDisplay;
public ShowTaskPanelSectionEvent(String sectionToDisplay) {
this.sectionToDisplay = sectionToDisplay;
}
@Override
public String toString() {
return this.getClass().getSimpleName();
}
}
...
*/
public class ShowTaskPanelSectionEvent extends BaseEvent{
public final String sectionToDisplay;
public ShowTaskPanelSectionEvent(String sectionToDisplay) {
this.sectionToDisplay = sectionToDisplay;
...
## Code Before:
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
## Instruction:
Include start date, end date, and active flag in comics list
## Code After:
from django.contrib import admin
from comics.core import models
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
class ReleaseAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')
list_filter = ['pub_date', 'fetched', 'comic']
date_hierarchy = 'pub_date'
exclude = ('images',)
class ImageAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')
list_filter = ['fetched', 'comic']
date_hierarchy = 'fetched'
admin.site.register(models.Comic, ComicAdmin)
admin.site.register(models.Release, ReleaseAdmin)
admin.site.register(models.Image, ImageAdmin)
// ... existing code ...
class ComicAdmin(admin.ModelAdmin):
list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',
'end_date', 'active')
prepopulated_fields = {
'slug': ('name',)
}
// ... rest of the code ...
c6a161b5c0fa3d76b09b34dfab8f057e8b10bce2
tests/test_extensions.py
tests/test_extensions.py
import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
Add a test for importing a nonexistent extension
Add a test for importing a nonexistent extension
Python
mit
pybel/pybel,pybel/pybel,pybel/pybel
python
## Code Before:
import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
## Instruction:
Add a test for importing a nonexistent extension
## Code After:
import unittest
class TestExtensions(unittest.TestCase):
def test_import_extension(self):
import pybel.ext.test
assert pybel.ext.test.an_extension_function() == 42
def test_import_extension_2(self):
from pybel.ext.test import an_extension_function
assert an_extension_function() == 42
def test_import_extension_3(self):
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
...
from pybel.ext import test
assert test.an_extension_function() == 42
def test_import_extension_4(self):
with self.assertRaises(ImportError):
from pybel.ext import not_an_extension
...
bf007267246bd317dc3ccad9f5cf8a9f452b3e0b
firecares/utils/__init__.py
firecares/utils/__init__.py
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
## Code Before:
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
def convert_png_to_jpg(img):
"""
Converts a png to a jpg.
:param img: Absolute path to the image.
:returns: the filename
"""
im = Image.open(img)
bg = Image.new("RGB", im.size, (255, 255, 255))
bg.paste(im, im)
filename = img.replace('png', 'jpg')
bg.save(filename, quality=85)
return filename
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
## Instruction:
Remove the unused convert_png_to_jpg method.
## Code After:
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
"""
S3 storage backend that saves the files locally, too.
"""
def __init__(self, *args, **kwargs):
super(CachedS3BotoStorage, self).__init__(*args, **kwargs)
self.local_storage = get_storage_class(
"compressor.storage.CompressorFileStorage")()
def save(self, name, content):
name = super(CachedS3BotoStorage, self).save(name, content)
self.local_storage._save(name, content)
return name
def dictfetchall(cursor):
"""
Returns all rows from a cursor as a dict
"""
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
# ... existing code ...
from django.core.files.storage import get_storage_class
from storages.backends.s3boto import S3BotoStorage
from PIL import Image
class CachedS3BotoStorage(S3BotoStorage):
# ... rest of the code ...
## Code Before:
from django.conf import settings
from scheduler.spawners.templates.env_vars import get_from_app_secret
def get_service_env_vars():
return [
get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),
get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),
get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',
settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME)
]
## Instruction:
Add api url to default env vars
## Code After:
from django.conf import settings
from libs.api import API_KEY_NAME, get_settings_api_url
from scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret
def get_service_env_vars():
return [
get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),
get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),
get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',
settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME),
get_env_var(name=API_KEY_NAME, value=get_settings_api_url()),
]
# ... existing code ...
from django.conf import settings
from libs.api import API_KEY_NAME, get_settings_api_url
from scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret
def get_service_env_vars():
# ... modified code ...
get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),
get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),
get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',
settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME),
get_env_var(name=API_KEY_NAME, value=get_settings_api_url()),
]
# ... rest of the code ...
1f3730ac4d531ca0d582a8b8bded871acb409847
backend/api-server/warehaus_api/events/models.py
backend/api-server/warehaus_api/events/models.py
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
user_id = user_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
## Code Before:
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
## Instruction:
Fix api-server events not saving the user ID
## Code After:
from .. import db
class Event(db.Model):
timestamp = db.Field()
obj_id = db.Field() # The object for which this event was created about
user_id = db.Field() # The user who performed the action
# A list of IDs which are interested in this event. For example, when creating
# a server we obviously want this event to be shows in the server page, but we
# also want it to be shown in the lab page. So we put two IDs in the list: the
# server ID and the lab ID.
# Another example is when we delete the server. Then we would be able to show
# that event in the lab page although the server is already deleted.
interested_ids = db.Field()
title = db.Field() # Event title
content = db.Field() # Event content
def create_event(obj_id, user_id, interested_ids, title, content=''):
event = Event(
timestamp = db.times.now(),
obj_id = obj_id,
user_id = user_id,
interested_ids = interested_ids,
title = title,
content = content,
)
event.save()
package org.hildan.fx.components;
import java.util.function.Predicate;
import javafx.scene.control.cell.TextFieldListCell;
import javafx.util.StringConverter;
import org.hildan.fxlog.themes.Css;
public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> {
private final Predicate<String> validator;
public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) {
super(converter);
this.validator = validator;
}
public void commitEdit(T item) {
if (!isEditing()) {
return;
}
boolean itemIsValid = validator.test(getText());
pseudoClassStateChanged(Css.INVALID, !itemIsValid);
if (itemIsValid) {
// only commit if the item is valid, otherwise we stay in edit state
super.commitEdit(item);
}
}
}
package org.hildan.fx.components;
import java.util.function.Predicate;
import javafx.scene.control.*;
import javafx.scene.control.cell.TextFieldListCell;
import javafx.util.StringConverter;
import org.hildan.fxlog.themes.Css;
public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> {
private final Predicate<String> validator;
public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) {
super(converter);
this.validator = validator;
}
public void commitEdit(T item) {
if (!isEditing()) {
return;
}
// the edited text is not in getText() but in the TextField used as Graphic for this cell
TextField textField = (TextField) getGraphic();
String editedText = textField.getText();
boolean itemIsValid = validator.test(editedText);
pseudoClassStateChanged(Css.INVALID, !itemIsValid);
if (itemIsValid) {
// only commit if the item is valid, otherwise we stay in edit state
super.commitEdit(item);
}
}
}
Fix ValidatedTextFieldListCell to use the actual edited text
Fix ValidatedTextFieldListCell to use the actual edited text
Java
mit
joffrey-bion/fx-log
java
## Code Before:
package org.hildan.fx.components;
import java.util.function.Predicate;
import javafx.scene.control.cell.TextFieldListCell;
import javafx.util.StringConverter;
import org.hildan.fxlog.themes.Css;
public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> {
private final Predicate<String> validator;
public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) {
super(converter);
this.validator = validator;
}
public void commitEdit(T item) {
if (!isEditing()) {
return;
}
boolean itemIsValid = validator.test(getText());
pseudoClassStateChanged(Css.INVALID, !itemIsValid);
if (itemIsValid) {
// only commit if the item is valid, otherwise we stay in edit state
super.commitEdit(item);
}
}
}
## Instruction:
Fix ValidatedTextFieldListCell to use the actual edited text
## Code After:
package org.hildan.fx.components;
import java.util.function.Predicate;
import javafx.scene.control.*;
import javafx.scene.control.cell.TextFieldListCell;
import javafx.util.StringConverter;
import org.hildan.fxlog.themes.Css;
public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> {
private final Predicate<String> validator;
public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) {
super(converter);
this.validator = validator;
}
public void commitEdit(T item) {
if (!isEditing()) {
return;
}
// the edited text is not in getText() but in the TextField used as Graphic for this cell
TextField textField = (TextField) getGraphic();
String editedText = textField.getText();
boolean itemIsValid = validator.test(editedText);
pseudoClassStateChanged(Css.INVALID, !itemIsValid);
if (itemIsValid) {
// only commit if the item is valid, otherwise we stay in edit state
super.commitEdit(item);
}
}
}
// ... existing code ...
import java.util.function.Predicate;
import javafx.scene.control.*;
import javafx.scene.control.cell.TextFieldListCell;
import javafx.util.StringConverter;
// ... modified code ...
if (!isEditing()) {
return;
}
// the edited text is not in getText() but in the TextField used as Graphic for this cell
TextField textField = (TextField) getGraphic();
String editedText = textField.getText();
boolean itemIsValid = validator.test(editedText);
pseudoClassStateChanged(Css.INVALID, !itemIsValid);
if (itemIsValid) {
// only commit if the item is valid, otherwise we stay in edit state
// ... rest of the code ...
234df393c438fdf729dc050d20084e1fe1a4c2ee
backend/mcapi/mcdir.py
backend/mcapi/mcdir.py
import utils
from os import environ
import os.path
MCDIR = environ.get("MCDIR") or '/mcfs/data'
def for_uid(uidstr):
pieces = uidstr.split('-')
path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])
utils.mkdirp(path)
return path
import utils
from os import environ
import os.path
MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons'
def for_uid(uidstr):
pieces = uidstr.split('-')
path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])
utils.mkdirp(path)
return path
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
# Test constant to see if css actually gets loaded
self.assertEqual('rgb(55, 71, 79)',
page.bank_cash.value_of_css_property('border-color'))
## Code Before:
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
## Instruction:
Test is loaded CSS is applied
## Code After:
import unittest
from .base import FunctionalTestCase
from .pages import game
class StylesheetTests(FunctionalTestCase):
def test_color_css_loaded(self):
self.story('Create a game')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
page.start_button.click()
self.assertTrue(any('css/color.css' in s.get_attribute('href')
for s in page.stylesheets))
def test_main_stylesheet_loaded(self):
self.story('Load the start page')
self.browser.get(self.live_server_url)
page = game.Homepage(self.browser)
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
# Test constant to see if css actually gets loaded
self.assertEqual('rgb(55, 71, 79)',
page.bank_cash.value_of_css_property('border-color'))
// ... existing code ...
self.assertTrue(any('css/main.css' in s.get_attribute('href')
for s in page.stylesheets))
# Test constant to see if css actually gets loaded
self.assertEqual('rgb(55, 71, 79)',
page.bank_cash.value_of_css_property('border-color'))
// ... rest of the code ...
a077a5b7731e7d609b5c3adc8f8176ad79053f17
rmake/lib/twisted_extras/tools.py
rmake/lib/twisted_extras/tools.py
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
self._lock.release()
del self._waiting[d]
return result
return d
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
del self._waiting[d]
self._lock.release()
return result
return d
Fix Serializer locking bug that caused it to skip calls it should have made
Fix Serializer locking bug that caused it to skip calls it should have made
## Code Before:
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
self._lock.release()
del self._waiting[d]
return result
return d
## Instruction:
Fix Serializer locking bug that caused it to skip calls it should have made
## Code After:
from twisted.internet import defer
class Serializer(object):
def __init__(self):
self._lock = defer.DeferredLock()
self._waiting = {}
def call(self, func, args=(), kwargs=None, collapsible=False):
d = self._lock.acquire()
self._waiting[d] = collapsible
if not kwargs:
kwargs = {}
@d.addCallback
def _locked(_):
if collapsible and len(self._waiting) > 1:
# Superseded
return
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
del self._waiting[d]
self._lock.release()
return result
return d
// ... existing code ...
return func(*args, **kwargs)
@d.addBoth
def _unlock(result):
del self._waiting[d]
self._lock.release()
return result
return d
// ... rest of the code ...
d45391429f01d5d4ea22e28bef39a2bb419df04f
djangae/apps.py
djangae/apps.py
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
from django.conf import settings
if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and (
not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS):
raise ImproperlyConfigured(
"If you're using django.contrib.contenttypes, then you need "
"to add djangae.contrib.contenttypes to INSTALLED_APPS after "
"django.contrib.contenttypes."
)
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
from django.conf import settings
contenttype_configuration_error = ImproperlyConfigured(
"If you're using django.contrib.contenttypes, then you need "
"to add djangae.contrib.contenttypes to INSTALLED_APPS after "
"django.contrib.contenttypes."
)
if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:
if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:
# Raise error if User is using Django CT, but not Djangae
raise contenttype_configuration_error
else:
if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \
settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):
# Raise error if User is using both Django and Djangae CT, but
# Django CT comes after Djangae CT
raise contenttype_configuration_error
Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes
Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes
## Code Before:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
from django.conf import settings
if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and (
not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS):
raise ImproperlyConfigured(
"If you're using django.contrib.contenttypes, then you need "
"to add djangae.contrib.contenttypes to INSTALLED_APPS after "
"django.contrib.contenttypes."
)
## Instruction:
Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes
## Code After:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ImproperlyConfigured
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
from django.conf import settings
contenttype_configuration_error = ImproperlyConfigured(
"If you're using django.contrib.contenttypes, then you need "
"to add djangae.contrib.contenttypes to INSTALLED_APPS after "
"django.contrib.contenttypes."
)
if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:
if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:
# Raise error if User is using Django CT, but not Djangae
raise contenttype_configuration_error
else:
if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \
settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):
# Raise error if User is using both Django and Djangae CT, but
# Django CT comes after Djangae CT
raise contenttype_configuration_error
# ... existing code ...
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
from django.conf import settings
contenttype_configuration_error = ImproperlyConfigured(
"If you're using django.contrib.contenttypes, then you need "
"to add djangae.contrib.contenttypes to INSTALLED_APPS after "
"django.contrib.contenttypes."
)
if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:
if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:
# Raise error if User is using Django CT, but not Djangae
raise contenttype_configuration_error
else:
if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \
settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):
# Raise error if User is using both Django and Djangae CT, but
# Django CT comes after Djangae CT
raise contenttype_configuration_error
# ... rest of the code ...
99496d97f3e00284840d2127556bba0e21d1a99e
frappe/tests/test_commands.py
frappe/tests/test_commands.py
from __future__ import unicode_literals
import shlex
import subprocess
import unittest
import frappe
def clean(value):
if isinstance(value, (bytes, str)):
value = value.decode().strip()
return value
class BaseTestCommands:
def execute(self, command):
command = command.format(**{"site": frappe.local.site})
command = shlex.split(command)
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
self.stderr = clean(self._proc.stderr)
self.returncode = clean(self._proc.returncode)
from __future__ import unicode_literals
import shlex
import subprocess
import unittest
import frappe
def clean(value):
if isinstance(value, (bytes, str)):
value = value.decode().strip()
return value
class BaseTestCommands:
def execute(self, command):
command = command.format(**{"site": frappe.local.site})
command = shlex.split(command)
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
self.stderr = clean(self._proc.stderr)
self.returncode = clean(self._proc.returncode)
class TestCommands(BaseTestCommands, unittest.TestCase):
def test_execute(self):
# execute a command expecting a numeric output
self.execute("bench --site {site} execute frappe.db.get_database_size")
self.assertEquals(self.returncode, 0)
self.assertIsInstance(float(self.stdout), float)
# execute a command expecting an errored output as local won't exist
self.execute("bench --site {site} execute frappe.local.site")
self.assertEquals(self.returncode, 1)
self.assertIsNotNone(self.stderr)
# execute a command with kwargs
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""")
self.assertEquals(self.returncode, 0)
self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = "Update peering session states for Internet Exchanges."
logger = logging.getLogger("peering.manager.peering")
def handle(self, *args, **options):
self.logger.info("Updating peering session states...")
internet_exchanges = InternetExchange.objects.all()
for internet_exchange in internet_exchanges:
internet_exchange.update_peering_session_states()
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = "Update peering session states for Internet Exchanges."
logger = logging.getLogger("peering.manager.peering")
def handle(self, *args, **options):
self.logger.info("Updating peering session states...")
internet_exchanges = InternetExchange.objects.all()
for internet_exchange in internet_exchanges:
internet_exchange.poll_peering_sessions()
## Code Before:
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = "Update peering session states for Internet Exchanges."
logger = logging.getLogger("peering.manager.peering")
def handle(self, *args, **options):
self.logger.info("Updating peering session states...")
internet_exchanges = InternetExchange.objects.all()
for internet_exchange in internet_exchanges:
internet_exchange.update_peering_session_states()
## Instruction:
Fix command polling sessions for IX.
## Code After:
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = "Update peering session states for Internet Exchanges."
logger = logging.getLogger("peering.manager.peering")
def handle(self, *args, **options):
self.logger.info("Updating peering session states...")
internet_exchanges = InternetExchange.objects.all()
for internet_exchange in internet_exchanges:
internet_exchange.poll_peering_sessions()
# ... existing code ...
internet_exchanges = InternetExchange.objects.all()
for internet_exchange in internet_exchanges:
internet_exchange.poll_peering_sessions()
# ... rest of the code ...
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson as json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
from django.conf import settings
from django.http import HttpResponse
import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
## Code Before:
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson as json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
## Instruction:
Remove deprecated django json shim
## Code After:
from django.conf import settings
from django.http import HttpResponse
import json
from taggit.models import Tag
MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)
def list_tags(request):
"""
Returns a list of JSON objects with a `name` and a `value` property that
all start like your query string `q` (not case sensitive).
"""
query = request.GET.get('q', '')
limit = request.GET.get('limit', MAX_SUGGESTIONS)
try:
request.GET.get('limit', MAX_SUGGESTIONS)
limit = min(int(limit), MAX_SUGGESTIONS) # max or less
except ValueError:
limit = MAX_SUGGESTIONS
tag_name_qs = Tag.objects.filter(name__istartswith=query).\
values_list('name', flat=True)
data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]
return HttpResponse(json.dumps(data), mimetype='application/json')
def list_all_tags(request):
"""Returns all the tags in the database"""
all_tags = Tag.objects.all().values_list('name', flat=True)
return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
...
from django.conf import settings
from django.http import HttpResponse
import json
from taggit.models import Tag
...
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.