__id__
int64 3.09k
19,722B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
256
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 3
values | repo_name
stringlengths 5
109
| repo_url
stringlengths 24
128
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 6.65k
581M
⌀ | star_events_count
int64 0
1.17k
| fork_events_count
int64 0
154
| gha_license_id
stringclasses 16
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
5.76M
⌀ | gha_stargazers_count
int32 0
407
⌀ | gha_forks_count
int32 0
119
⌀ | gha_open_issues_count
int32 0
640
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 2
classes | gha_disabled
bool 1
class | content
stringlengths 9
4.53M
| src_encoding
stringclasses 18
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | year
int64 1.97k
2.01k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
12,627,203,872,661 |
4634d78c07d0b5fd6589501f060dc905063f9c10
|
9108c7b68d8ac1cf7c8a0e7136183a580c11a0d4
|
/data_wrangling/cps_wrangling/analysis/tests/test_add_to_panel.py
|
458e7120431d9c2f7058107a0c03cffc0693cd8a
|
[] |
no_license
|
zaknbur/dnwr-zlb
|
https://github.com/zaknbur/dnwr-zlb
|
d3f47883d1828a24595fff853d9d67c00a650f54
|
c9847fd58e1cdded11b52cbc81dd481d9483b996
|
refs/heads/master
| 2021-05-27T22:12:17.456128 | 2014-05-21T15:47:17 | 2014-05-21T15:47:17 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
import pandas as pd
import numpy as np
from numpy import nan
import pandas.util.testing as tm
from data_wrangling.cps_wrangling.analysis import add_to_panel
class TestAddHistory(unittest.TestCase):
def setUp(self):
self.test = pd.HDFStore('test_store.h5')
# will be transposed
status_frame = pd.DataFrame({'a': [1, 1, 1, 1, 1, 1, 1, 1],
'b': [1, 2, 1, 2, 1, 1, 1, 3],
'c': [3, 1, 1, 1, 3, 3, 3, 1],
'd': [3, 1, 1, 3, 1, 1, 1, 5],
'e': [5, 1, 1, 1, 3, 3, 3, 3],
'f': [5, 1, 1, 5, 5, 5, 1, 1],
'g': [5, 1, 1, 3, 1, 1, 6, 1],
'h': [1, 1, 1, 3, 1, 1, 3, 1],
'i': [1, 1, 1, 5, 1, 1, 1, 2]
}, index=range(1, 9)).T
self.wp = pd.Panel({'labor_status': status_frame})
def test_history(self):
wp = self.wp.copy()
result = add_to_panel._add_employment_status_last_period(wp, 'unemployed',
inplace=False)
expected = pd.DataFrame([np.nan]).reindex_like(wp['labor_status'])
expected.loc['a', [4, 8]] = 0
expected.loc['b', 4] = 0
expected.loc['c', [4, 8]] = 1
# expected.loc['e', 4] = np.NaN # donesn't match kind
# expected.loc['f', 8] = np.Nan
# expected.loc['g', 8] = np.Nan
expected.loc['h', 8] = 1
expected.loc['i', 8] = 0
expected = expected.fillna(-1)
expected = expected.astype('int64')
tm.assert_frame_equal(result, expected)
def test_status(self):
result = add_to_panel._add_flows_panel(self.wp, inplace=False)
expected = pd.DataFrame({'a': [nan, 'ee', 'ee', 'ee', 'ee', 'ee', 'ee', 'ee'],
'b': [nan, 'ee', 'ee', 'ee', 'ee', 'ee', 'ee', 'eu'],
'c': [nan, 'ue', 'ee', 'ee', 'eu', 'uu', 'uu', 'ue'],
'd': [nan, 'ue', 'ee', 'eu', 'ue', 'ee', 'ee', 'en'],
'e': [nan, 'ne', 'ee', 'ee', 'eu', 'uu', 'uu', 'uu'],
'f': [nan, 'ne', 'ee', 'en', 'nn', 'nn', 'ne', 'ee'],
'g': [nan, 'ne', 'ee', 'eu', 'ue', 'ee', 'en', 'ne'],
'h': [nan, 'ee', 'ee', 'eu', 'ue', 'ee', 'eu', 'ue'],
'i': [nan, 'ee', 'ee', 'en', 'ne', 'ee', 'ee', 'ee']
}, index=range(1, 9)).T
expected = expected.convert_objects(convert_numeric=True)
tm.assert_frame_equal(result, expected)
def test_status_partial(self):
wp = self.wp.loc[:, :, (1, 2, 3, 4, 5)]
result = add_to_panel._add_flows_panel(wp, inplace=False)
expected = pd.DataFrame({'a': [nan, 'ee', 'ee', 'ee', 'ee'],
'b': [nan, 'ee', 'ee', 'ee', 'ee'],
'c': [nan, 'ue', 'ee', 'ee', 'eu'],
'd': [nan, 'ue', 'ee', 'eu', 'ue'],
'e': [nan, 'ne', 'ee', 'ee', 'eu'],
'f': [nan, 'ne', 'ee', 'en', 'nn'],
'g': [nan, 'ne', 'ee', 'eu', 'ue'],
'h': [nan, 'ee', 'ee', 'eu', 'ue'],
'i': [nan, 'ee', 'ee', 'en', 'ne']
}, index=range(1, 6)).T
expected = expected.convert_objects(convert_numeric=True)
tm.assert_frame_equal(result, expected)
def test_history_partial(self):
wp = self.wp.copy().loc[:, :, (1, 2, 3, 4, 5)]
result = add_to_panel._add_employment_status_last_period(wp, 'unemployed',
inplace=False)
expected = pd.DataFrame([np.nan]).reindex_like(wp['labor_status'])
expected.loc['a', 4] = 0
expected.loc['b', 4] = 0
expected.loc['c', 4] = 1
expected = expected.fillna(-1)
expected = expected.astype('int64')
tm.assert_frame_equal(result, expected)
def tearDown(self):
import os
self.test.close()
os.remove('test_store.h5')
|
UTF-8
|
Python
| false | false | 2,014 |
11,192,684,800,258 |
d4fad3ee6b0f876967a15ee668d84ff0006c52e7
|
5286255a93db21ea9defc1f8f6fc71990c3c2fa9
|
/samples/python/montecarlo/L1OffSet_V14_04_00/.svn/text-base/GJets_TuneZ2_200_HT_inf_7TeV_madgraph_Summer11_PU_S4_START42_V11_v1_V15_04_00_jetCorrections_L1Offset_L2Relative_L3Absolute_jetCollections_ak5calo_ak5pf_hbheNoiseFilterDefaultIsoReq_1.py.svn-base
|
40b67b1716bec3f335dea430d4042aaa8465291f
|
[] |
no_license
|
brynmathias/AnalysisV2
|
https://github.com/brynmathias/AnalysisV2
|
1367767dbf22eef6924700c4b0a00581ea8ed965
|
ee17c019bb04243876a51c7ef7719cc58a52adea
|
refs/heads/master
| 2021-01-01T19:20:27.277628 | 2012-04-17T13:34:26 | 2012-04-17T13:34:26 | 2,600,415 | 0 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from icf.core import PSet
GJets_TuneZ2_200_HT_inf_7TeV_madgraph_Summer11_PU_S4_START42_V11_v1_V15_04_00_jetCorrections_L1Offset_L2Relative_L3Absolute_jetCollections_ak5calo_ak5pf_hbheNoiseFilterDefaultIsoReq_1=PSet(
Name="GJets_TuneZ2_200_HT_inf_7TeV_madgraph_Summer11_PU_S4_START42_V11_v1_V15_04_00_jetCorrections_L1Offset_L2Relative_L3Absolute_jetCollections_ak5calo_ak5pf_hbheNoiseFilterDefaultIsoReq_1",
Format=("ICF",3),
File=[
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_100_1_7PV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_101_1_TXs.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_102_1_gH9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_103_1_wOq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_104_1_Rcf.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_105_1_KxJ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_106_1_69I.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_107_1_dFs.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_108_1_9E4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_109_2_stM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_10_1_4O3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_110_1_2Vq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_111_1_FvK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_112_1_mij.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_113_1_HQL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_114_1_zv8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_115_1_a1e.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_116_1_qvM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_117_1_pF2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_118_1_1iO.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_119_1_Oyr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_11_1_6uF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_120_1_Rk4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_121_2_ylO.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_122_1_qSx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_123_1_kbr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_124_1_3qp.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_125_1_mjs.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_126_1_BIY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_127_1_5z4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_128_1_ru3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_129_1_6cb.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_12_1_cel.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_130_1_qFC.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_131_1_WHJ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_132_1_Bfo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_133_1_SCI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_134_1_Gzc.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_135_1_vVo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_136_1_lne.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_137_1_9GV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_138_1_eBp.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_139_1_Qni.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_13_1_1RG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_140_1_QyV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_141_1_RoN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_142_1_9gu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_143_2_Pf3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_144_1_YjG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_145_1_kF8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_146_1_9nR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_147_1_jQr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_148_1_4zR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_149_1_TxB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_14_1_n8J.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_150_1_ZqN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_151_1_7uE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_152_1_rgy.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_153_1_KOd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_154_1_DOD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_155_1_Jp2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_156_1_1zG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_157_1_ur7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_158_1_sG4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_159_1_abW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_15_1_PQn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_160_1_3TM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_161_1_XZB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_162_1_3Bm.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_163_1_FQc.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_164_1_4uh.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_165_1_ERW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_166_1_dKh.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_167_1_VVd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_168_1_9Kl.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_169_1_dvI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_16_1_utr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_170_2_9qe.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_171_1_lH8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_172_2_6aq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_173_1_Ffb.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_174_1_INa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_175_1_hfd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_176_1_DaN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_177_1_cfa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_178_1_8uc.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_179_1_DcL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_17_1_CfP.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_180_1_lJu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_181_1_ssr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_182_1_TFL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_183_1_4SR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_184_1_gRl.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_185_1_mtQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_186_1_Ik1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_187_1_vhE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_188_1_zHZ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_189_1_PRB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_18_1_GOk.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_190_1_7JS.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_191_1_y2c.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_192_1_FNz.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_193_1_sna.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_194_1_bZA.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_195_1_aj3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_196_1_Qey.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_197_1_KYD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_198_1_DG7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_199_1_8q4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_19_1_rU2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_1_1_T5I.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_200_1_uNz.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_201_1_WMK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_202_1_X7i.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_203_1_x5H.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_204_1_ZBJ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_205_1_aaF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_206_1_EtE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_207_1_zvy.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_208_1_k4K.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_209_1_AyT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_20_1_77F.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_210_1_5K3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_211_1_Izm.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_212_1_NJo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_213_1_3Yi.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_214_1_OYx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_215_1_kBX.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_216_1_bUF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_217_1_Rf5.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_218_1_ZWU.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_219_1_ugt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_21_1_b0p.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_220_1_diM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_221_1_rK7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_222_1_Khc.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_223_1_E6y.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_224_1_eOt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_225_1_DbE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_226_1_ciu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_227_1_Ub5.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_228_1_lBP.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_229_1_6ct.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_22_1_oSd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_230_1_LYv.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_231_1_qTh.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_232_1_HVP.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_233_1_c57.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_234_1_aNp.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_235_1_ofB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_236_1_wgD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_237_1_TGO.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_238_1_uZ5.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_239_1_fzQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_23_1_PnL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_240_1_Y6K.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_241_1_qla.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_242_1_1KT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_243_1_cs7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_244_1_3S7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_245_1_fiM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_246_1_xyD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_247_1_51c.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_248_1_TrF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_249_1_9Mg.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_24_1_f30.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_250_1_SX2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_251_1_vMu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_252_1_IBN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_253_1_NLY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_254_1_ujK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_255_1_hZa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_256_1_8Gl.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_257_1_DjV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_258_1_1C1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_259_1_cB3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_25_2_b8O.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_260_1_OiF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_261_1_fOR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_262_1_VIS.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_263_1_ZeX.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_264_1_Evw.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_265_1_GBo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_266_1_QlM.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_267_1_VH3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_268_1_liY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_269_1_wI8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_26_1_pYi.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_270_1_i0T.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_271_1_1hH.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_272_1_NgQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_273_1_WA7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_274_1_kqs.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_275_1_bpf.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_276_1_zvr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_277_1_dsD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_278_1_ZBO.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_279_1_cSC.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_27_1_iNT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_280_1_rGQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_281_1_dYv.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_282_1_5q8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_283_1_lM4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_284_1_4ds.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_285_1_LQD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_286_1_Gnf.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_287_2_1WZ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_288_1_bj8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_289_2_Rb6.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_28_1_idI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_290_1_7Bw.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_291_1_SDJ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_292_2_PX9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_293_2_iJW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_294_1_kuD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_295_1_J8P.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_296_1_ozJ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_297_1_A1b.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_298_2_dhK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_299_1_sr2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_29_1_gYg.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_2_1_jn9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_300_1_mF8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_301_1_GGj.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_302_1_eVt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_303_1_EhX.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_304_1_RaL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_305_1_e87.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_306_1_25B.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_307_1_W7J.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_308_1_pkN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_309_1_zy8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_30_1_Yil.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_310_1_eAh.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_311_1_ASn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_312_1_sgB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_313_1_usa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_314_1_CqN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_315_1_Zjk.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_316_1_mQ1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_317_1_4w0.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_318_1_udn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_319_1_F8u.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_31_1_6F0.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_320_1_fq1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_321_1_eo4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_322_1_opW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_323_1_ZRS.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_324_1_i5Q.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_325_1_mjl.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_326_1_1y1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_327_1_3MB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_328_1_3lL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_329_2_pLd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_32_1_9s6.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_330_1_aYR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_331_1_zF2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_332_1_6vb.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_333_1_poo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_334_1_vaT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_335_1_DYh.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_336_1_lUq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_337_1_JL9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_338_2_98X.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_339_1_BgL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_33_2_Pr6.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_340_1_5s0.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_341_2_83A.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_342_1_gu2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_343_2_5t6.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_344_1_Syr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_345_1_kSN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_346_1_ZBv.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_347_1_e9Y.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_348_1_qId.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_349_1_6Zx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_34_1_WQ1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_350_1_4Vv.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_351_1_zuV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_352_1_8Xx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_353_1_wNd.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_354_1_L8H.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_355_1_geA.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_356_2_OaK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_357_1_aln.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_358_1_TPw.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_359_2_eQ9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_35_1_Ejj.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_360_2_UDT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_361_2_2Sb.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_362_1_gGQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_363_1_axL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_364_2_UNf.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_365_2_ahI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_366_1_ABn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_367_1_x3x.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_368_2_1xn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_369_1_weW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_36_1_BQ0.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_370_1_0IG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_371_1_Uwl.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_372_1_enH.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_373_2_MUH.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_374_1_El6.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_375_1_uDt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_376_1_ZsW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_377_1_hve.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_378_1_sYk.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_379_1_Jrj.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_37_1_sqE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_380_1_C3k.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_381_1_GOy.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_382_1_JfY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_383_1_s30.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_384_1_rh2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_385_1_mgx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_386_1_g0P.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_387_1_xdS.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_388_1_xpI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_389_1_vLT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_38_1_jGN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_390_1_zNZ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_391_1_rNu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_392_1_btT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_393_1_6VY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_394_1_Y8f.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_395_1_MU0.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_396_1_jiW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_397_1_AiR.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_398_1_MMB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_399_1_Lp1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_39_1_2oL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_3_1_QTQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_400_1_mTY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_401_1_5O3.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_402_1_QWa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_403_1_2Rz.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_404_1_f2w.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_405_1_dGo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_406_1_eWI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_407_1_l5h.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_408_1_3ED.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_409_1_1F7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_40_1_P7o.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_410_1_OKe.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_411_1_yXT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_412_1_Je1.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_413_1_r56.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_414_1_yXv.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_415_1_Dho.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_416_1_VdY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_417_1_jI9.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_418_1_Bkk.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_419_1_VId.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_41_1_mCn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_420_1_8Pm.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_421_1_mIB.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_422_1_MQF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_423_1_m7H.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_424_1_nKn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_425_1_Ybq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_426_1_ETT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_427_2_R8k.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_428_1_xVK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_429_1_FuT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_42_1_x05.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_430_1_cSH.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_431_2_3nU.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_432_2_eVa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_433_1_IWt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_434_1_TnH.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_435_1_eVU.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_436_1_F3U.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_437_1_Pkq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_438_1_O19.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_439_1_3qg.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_43_1_sM8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_440_1_X2E.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_441_1_ELa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_442_1_WAT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_443_1_prG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_444_1_RrP.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_445_2_y0b.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_446_1_lIg.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_447_1_RsK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_448_1_FmI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_449_1_rsO.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_44_1_ryw.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_450_1_B8A.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_451_1_jeW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_452_1_xh8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_453_1_Q1h.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_454_1_32Z.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_455_1_76r.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_456_1_cww.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_457_1_kLk.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_458_1_bZA.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_459_1_NOo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_45_1_RVz.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_460_1_DJf.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_461_1_lGo.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_462_1_ojF.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_463_1_55k.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_464_1_DbX.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_465_1_y8R.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_466_1_7Zw.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_467_1_8tI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_468_1_vW4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_469_1_vub.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_46_1_BtY.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_470_1_Lsz.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_471_1_hXI.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_472_1_woW.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_473_1_bMt.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_47_1_6yQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_48_1_gqX.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_49_1_sSr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_4_1_kzn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_50_1_3N4.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_51_1_Rc7.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_52_1_0Os.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_53_1_UrN.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_54_2_BsQ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_55_1_aKn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_56_1_2Kn.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_57_1_oVV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_58_1_bbe.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_59_1_74B.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_5_1_LeT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_60_1_fQK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_61_1_qBZ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_62_1_pic.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_63_1_I5Y.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_64_1_mYU.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_65_2_31B.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_66_1_QeA.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_67_1_H1k.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_68_1_xtT.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_69_1_yeU.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_6_1_5W2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_70_1_a6M.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_71_1_oGq.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_72_1_4Mx.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_73_1_5xK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_74_1_4bK.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_75_1_KqE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_76_1_a9C.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_77_1_TSe.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_78_1_1hr.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_79_1_K7V.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_7_1_kSa.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_80_1_w9e.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_81_1_akC.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_82_1_gZV.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_83_1_OZc.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_84_1_O1T.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_85_1_RTP.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_86_1_xYD.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_87_1_UbG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_88_1_Ov2.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_89_1_R8M.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_8_1_LwG.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_90_1_Rp8.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_91_1_455.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_92_1_ctE.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_93_1_vDu.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_94_1_CPC.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_95_1_Pwj.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_96_1_GlL.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_97_1_Q3y.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_98_1_YDZ.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_99_1_H3U.root" ,
"root://xrootd.grid.hep.ph.ic.ac.uk//store/user/elaird/ICF/automated/2012_02_04_18_27_43/GJets_TuneZ2_200_HT_inf_7TeV-madgraph.Summer11-PU_S4_START42_V11-v1.AODSIM/SusyCAF_Tree_9_1_8Ip.root" ,
],
Weight=1.0,
)
|
UTF-8
|
Python
| false | false | 2,012 |
12,592,844,148,769 |
17cd39421d7c34f1d5658b51a045093d37612b8e
|
27d01d04f68e62ae1d43f1af37f616568b0e61b0
|
/problem_38.py
|
ca6d56fe0512b59a36c925fe87ce7faaac8a2659
|
[] |
no_license
|
dagar/project_euler
|
https://github.com/dagar/project_euler
|
9b38a1d9c5b0e9a514ca20d559f8bcb8842ccdec
|
6fc67548bcba9396a3008ea10e577048c919e9e6
|
refs/heads/master
| 2020-12-24T15:58:32.041013 | 2014-10-13T04:10:29 | 2014-10-13T04:10:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#! /usr/bin/env python
"""
problem 38: largest 1 to 9 pandigital 9-digit number that can be formed
as the concatenated product of an integer with (1,2, ... , n) where n > 1?
"""
def ispandigital(n):
N = str(n)
if len(N) != 9:
return False
for c in '123456789':
if c not in N:
return False
return True
def concatenated_product(number, n):
product = ""
for i in range(1, n + 1):
product += str(number * i)
return int(product)
largest_concatenated_product = 0
for number in range(100000):
for n in range(1, 10):
cp = concatenated_product(number, n)
if ispandigital(cp):
if cp > largest_concatenated_product:
largest_concatenated_product = cp
print "new largest: product of", number, "and", tuple(i for i in range(1, n+1)), "=", cp
|
UTF-8
|
Python
| false | false | 2,014 |
10,866,267,309,305 |
67208e89703f3c023ea72895b24d8224c501dcaf
|
d12291a4328e9034af2d74dd242dd8a0447d46e5
|
/src/local_options.py
|
b279e3d876bffe1ddbeda4090bf6ee103bfee39e
|
[] |
no_license
|
michalneoral/clopema_collect_model_data
|
https://github.com/michalneoral/clopema_collect_model_data
|
f62210b3fecc37f91b77c63628576d130090bfd3
|
a254d0fe431c276b31a7290912613e0ca47d6363
|
refs/heads/master
| 2016-09-06T07:35:35.731270 | 2014-02-06T22:18:10 | 2014-02-06T22:18:10 | 14,369,165 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Local options"""
"""Folder where is a clopema_collect_model_data"""
pcglocate='/home/neosh/ros_catkin_ws/src/clopema_collect_model_data/'
"""Folder where will be saved a collected data"""
savefolder='/media/neosh/5FB92F7D501A5B3A/Clopema/Pokusy/'
|
UTF-8
|
Python
| false | false | 2,014 |
10,118,942,954,733 |
46c38c34714502ef04a7980f9623d54490b21f8a
|
39c97a9dc0f35621860e4bc6c7c9bb05b3930df3
|
/fixFlashCards.py
|
910b24adeb116cfa351a85eefff6c5cedfbe4e09
|
[] |
no_license
|
danrasmuson/Course_Smart_Flashcards
|
https://github.com/danrasmuson/Course_Smart_Flashcards
|
85d8a5d887cbc9a4ecb6fcd08a6965870ce79ce3
|
bafe8da6c23c4b51632f601b05e1d1f90ce9a624
|
refs/heads/master
| 2022-04-30T20:59:12.390382 | 2013-11-09T19:20:16 | 2013-11-09T19:20:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import re
from tkinter import Tk
def lengthOfTerm(term):
termLength = term.split()[-1].replace("Word","").lower()
if termLength == "one":
return 1
elif termLength == "two":
return 2
elif termLength == "three":
return 3
elif termLength == "four":
return 4
elif termLength == "five":
return 5
return 0
#get clipboard data
r = Tk()
r.withdraw()
result = r.selection_get(selection = "CLIPBOARD")
clipBoard = re.sub(r'[^\x00-\x7F]',"f",result)
#split and format
fullTermList = clipBoard.split("\n\n")
toClipboardStr = ""
for term in fullTermList:
#remove commas for csv
if len(term) > 1:
length = lengthOfTerm(term) #Astronomy - the study of stars
if length != 0:
term = term.replace(",","")
term = term.replace("\n"," ") #remove lines
term = term.replace("-","") #remove lines joins
term = term.replace(" "," ") #remove lines joins
term = term.replace(" ",",",length).replace(","," ",length-1) #puts the comma in
term = term.replace(", ",",") #remove lines joins
term = " ".join(term.split()[:-1]) #drop last word (OneWord)
toClipboardStr += term+"\n"
else:
toClipboardStr += term
#put to clipboard
r.clipboard_clear()
r.clipboard_append(toClipboardStr)
r.destroy()
|
UTF-8
|
Python
| false | false | 2,013 |
4,011,499,496,446 |
7cf29e50878617c8110305e1fa136855340e521e
|
e73a32283194e334a9d0d7e4abea131b15a25aed
|
/Modules/Init/Modules/_LastDrafts/01d_Representer/Representer.py
|
872c35f569df91022494c678ff3df85df525a71a
|
[] |
no_license
|
Ledoux/ProfessionalSYS
|
https://github.com/Ledoux/ProfessionalSYS
|
300c45d524f6eeea98a6b9d77c2b47f49b36663b
|
2cb9a3efc1dcc38353a239e624737bdbf4edcfa9
|
refs/heads/master
| 2017-01-04T12:13:28.294791 | 2014-12-02T10:17:09 | 2014-12-02T10:17:09 | 24,479,051 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#<ImportModules>
import collections
import copy
import inspect
import numpy
import ShareYourSystem as SYS
#</ImportModules>
#<DefineLocals>
BasingLocalTypeString="Inspecter"
BasedLocalClass=getattr(SYS,SYS.getClassStringWithTypeString(BasingLocalTypeString))
RepresentingDictIndentString=" "
RepresentingListIndentString=" "
RepresentingIndentString=" /"
RepresentingEofString="\n"
RepresentingIdBool=True
RepresentedAlineaString=""
#</DefineLocals>
#<DefineFunctions>
def getRepresentedNumpyArray(_NumpyArray):
#Define the ShapeList
ShapeList=list(numpy.shape(_NumpyArray))
#Debug
'''
print('Representer l.25 : getRepresentedNumpyArray')
print('ShapeList is',ShapeList)
print('')
'''
#Return the array directly if it is small or either a short represented version of it
if (len(ShapeList)==1 and ShapeList[0]<3) or (len(ShapeList)>1 and ShapeList[1]<3):
return str(_NumpyArray)
return "<numpy.ndarray shape "+str(ShapeList)+">"
def getRepresentedPointerStringWithVariable(_Variable):
#Debug
'''
print('Representer l.39 : getRepresentedPointerStringWithVariable')
print('')
'''
if RepresentingIdBool:
return RepresentedAlineaString+"<"+(
_Variable.__name__ if hasattr(_Variable,__name__) else ""
)+" ("+_Variable.__class__.__name__+"), "+str(id(_Variable))+">"
else:
return RepresentedAlineaString+"<"+(
_Variable.__name__ if hasattr(_Variable,__name__) else ""
)+" ("+_Variable.__class__.__name__+")"+" >"
def getRepresentedStringWithDictatedVariable(
_DictatedVariable,**_KwargVariablesDict
):
#Set in the _KwargVariablesDict
if 'RepresentedDeepInt' not in _KwargVariablesDict:
_KwargVariablesDict['RepresentedDeepInt']=0
#Debug
'''
print('Representer l.59 : getRepresentedStringWithDictatedVariable')
print('_KwargVariablesDict is ',str(_KwargVariablesDict))
print('')
'''
#Global
global RepresentedAlineaString
#Define the LocalRepresentedAlineaString
LocalRepresentedAlineaString=RepresentedAlineaString+"".join(
[RepresentingIndentString]*(_KwargVariablesDict['RepresentedDeepInt']))
#Init the RepresentedDictString
RepresentedDictString="\n"+LocalRepresentedAlineaString+"{ "
#Scan the Items (integrativ loop)
if type(_DictatedVariable)==collections.OrderedDict:
TuplesList=_DictatedVariable.items()
else:
TuplesList=sorted(_DictatedVariable.iteritems(), key=lambda key_value: key_value[0])
#Integrativ loop for seriaizing the items
for KeyString,ValueVariable in TuplesList:
#Set the begin of the line
RepresentedDictString+="\n"+LocalRepresentedAlineaString+RepresentingDictIndentString
#Force the cast into string
if type(KeyString) not in [unicode,str]:
KeyString=str(KeyString)
#Get the WordStringsList
WordStringsList=SYS.getWordStringsListWithString(KeyString)
#Init the RepresentedValueVariableString
RepresentedValueVariableString="None"
if len(WordStringsList)>0:
#Value is displayed
if SYS.getWordStringsListWithString(KeyString)[-1]=="Pointer":
#Pointer Case
RepresentedValueVariableString=getRepresentedPointerStringWithVariable(
ValueVariable
)
elif ''.join(SYS.getWordStringsListWithString(KeyString)[-2:])=="PointersList":
#Pointer Case
RepresentedValueVariableString=str(
map(
lambda ListedVariable:
getRepresentedPointerStringWithVariable(
ListedVariable,
**_KwargVariablesDict
),
ValueVariable
)
)
#Special Suffix Cases
Type=type(ValueVariable)
TypeString=Type.__name__
if TypeString=="instancemethod":
RepresentedValueVariableString="<"+ValueVariable.__name__+" "+TypeString+">"
elif Type==numpy.ndarray:
RepresentedValueVariableString=getRepresentedNumpyArray(ValueVariable)
elif RepresentedValueVariableString=="None":
#Other Cases
RepresentedValueVariableString=getRepresentedStringWithVariable(
ValueVariable,**_KwargVariablesDict)
#Key and Value Case
RepresentedDictString+="'"+KeyString+"' : "+RepresentedValueVariableString
#Add a last line
RepresentedDictString+="\n"+LocalRepresentedAlineaString+"}"
#return the DictString
return RepresentedDictString
def getRepresentedStringWithListedVariable(_ListedVariable,**_KwargVariablesDict):
#Global
global RepresentedAlineaString
#Set in the _KwargVariablesDict
if 'RepresentedDeepInt' not in _KwargVariablesDict:
_KwargVariablesDict['RepresentedDeepInt']=0
#Debug
'''
print('Representer l.166 : getRepresentedStringWithListedVariable')
print('_KwargVariablesDict is ',str(_KwargVariablesDict))
print('_ListedVariable is '+str(_ListedVariable))
print('')
'''
#Init the RepresentedDictString
if type(_ListedVariable)==list:
BeginBracketString='['
EndBracketString=']'
else:
BeginBracketString='('
EndBracketString=')'
#Define the LocalRepresentedAlineaString
LocalRepresentedAlineaString=RepresentedAlineaString+"".join(
[RepresentingIndentString]*(_KwargVariablesDict['RepresentedDeepInt']))
#Do the first Jump
RepresentedListString="\n"+LocalRepresentedAlineaString+BeginBracketString
#Scan the Items (integrativ loop)
for ListedVariableInt,ListedVariable in enumerate(_ListedVariable):
#Set the begin of the line
RepresentedListString+="\n"+LocalRepresentedAlineaString+RepresentingListIndentString
#Instance method case
if type(ListedVariable).__name__=="instancemethod":
RepresentedValueVariableString="instancemethod"
else:
#Other Cases
RepresentedValueVariableString=getRepresentedStringWithVariable(
ListedVariable,**_KwargVariablesDict)
#Key and Value Case
RepresentedListString+=str(ListedVariableInt)+" : "+RepresentedValueVariableString
#Add a last line
RepresentedListString+="\n"+LocalRepresentedAlineaString+EndBracketString
#return the DictString
return RepresentedListString
def getRepresentedStringWithVariable(_Variable,**_KwargVariablesDict):
#Set in the _KwargVariablesDict
if 'RepresentedDeepInt' not in _KwargVariablesDict:
_KwargVariablesDict['RepresentedDeepInt']=0
#Debug
'''
print('Representer l.213 : getRepresentedStringWithVariable')
print('_KwargVariablesDict is ',str(_KwargVariablesDict))
print('_Variable is '+str(_Variable))
print("hasattr(_Variable,'__repr__') is "+str(hasattr(_Variable,"__repr__")))
if hasattr(_Variable,"__repr__"):
print('hasattr(_Variable.__class__,"InspectedOrderedDict") is '+str(
hasattr(_Variable.__class__,"InspectedOrderedDict")))
if hasattr(_Variable.__class__,"InspectedOrderedDict"):
print("_Variable.__class__.InspectedOrderedDict['__repr__']['KwargVariablesDictKeyString'] is "+str(
_Variable.__class__.InspectedOrderedDict['__repr__']['KwargVariablesDictKeyString']))
print(_Variable.__class__.InspectedOrderedDict['__repr__']['KwargVariablesDictKeyString'])
print('')
'''
#Dict types print
if type(_Variable) in [dict,collections.OrderedDict]:
#Increment the deep
_KwargVariablesDict['RepresentedDeepInt']+=1
#Debug
'''
print('This is a dictated type so get a represent like a dict')
print('')
'''
#Return
return getRepresentedStringWithDictatedVariable(_Variable,**_KwargVariablesDict)
#List types print
elif type(_Variable) in [list,tuple]:
#Debug
'''
print('This is a listed type so get a represent like a list')
print('')
'''
#Check if it is a List of Object or Python Types
if all(
map(
lambda ListedVariable:
type(ListedVariable) in [float,int,str,unicode,SYS.sys.modules['numpy'].float64],
_Variable
)
)==False:
#Increment the deep
_KwargVariablesDict['RepresentedDeepInt']+=1
#Debug
'''
print('Print a represented version of the list')
print('')
'''
#Return
return getRepresentedStringWithListedVariable(_Variable,**_KwargVariablesDict)
else:
#Debug
'''
print('Here just print the list directly')
print('')
'''
#Return
return RepresentedAlineaString+repr(
_Variable).replace("\n","\n"+RepresentedAlineaString)
#Instance print
elif type(_Variable).__name__=="instancemethod":
return RepresentedAlineaString+"instancemethod"
#String types
elif type(_Variable) in SYS.StringTypesList:
#Debug
'''
print('This is a string type so get a represent like a string')
print('')
'''
#Return
return RepresentedAlineaString+_Variable.replace("\n","\n"+RepresentedAlineaString)
#Other
elif hasattr(_Variable,"__repr__") and hasattr(
_Variable.__class__,"InspectedOrderedDict") and _Variable.__class__.InspectedOrderedDict[
'__repr__']['KwargVariablesDictKeyString']!="":
#Debug
'''
print('This is a representer so call the repr of it with the _KwargVariablesDict')
print('')
'''
#Return the repr of the _Variable but shifted with the RepresentedAlineaString
return _Variable.__repr__(**_KwargVariablesDict)
else:
#Debug
'''
print('This is not identified so call the repr of it')
print('')
'''
#Return a repr of the _Variable but shifted with the RepresentedAlineaString
return RepresentedAlineaString+repr(_Variable).replace("\n","\n"+RepresentedAlineaString)
def _print(_Variable):
print(represent(_Variable))
def represent(_Variable):
return getRepresentedStringWithVariable(_Variable)
#</DefineFunctions>
#<DefineClass>
class RepresenterClass(BasedLocalClass):
def __init__(self,**_KwargVariablesDict):
#<DefineSpecificDict>
#</DefineSpecificDict>
#Call the parent init method
BasedLocalClass.__init__(self,**_KwargVariablesDict)
def __call__(self,_Class):
#Call the parent init method
BasedLocalClass.__call__(self,_Class)
#Represent
self.represent(self.ClassingClass)
#Return
return self.ClassingClass
def represent(self,_Class):
#Set in the class the represented key strings if not already
RepresentedBoolKeyString='Represented'+self.ClassedString+'Bool'
if hasattr(_Class,RepresentedBoolKeyString
)==False or getattr(_Class,RepresentedBoolKeyString)==False:
#Look for specific Dict
_Class.RepresentedSpecificKeyStringsList=[]
#Define the RepresentedSourceString
RepresentedSourceString=inspect.getsource(_Class)
#Check that there is a '<DefineSpecificDict>' part
_Class.RepresentedSpecificKeyStringsList=[]
_Class.RepresentedNotGettingStringsList=[]
if self.ClassedString!='Representer' and '<DefineSpecificDict>' in RepresentedSourceString and '</DefineSpecificDict>' in RepresentedSourceString:
#Define the DefineSpecificDictString
DefineSpecificDictString=SYS.getStringsListWithBeginStringAndEndStringAndStringsIntAndString(
'<DefineSpecificDict>',
'</DefineSpecificDict>',
1,
RepresentedSourceString
)[0]
#print(DefineSpecificDictString)
#Check that there is at least one definition
if 'self.' in DefineSpecificDictString:
_Class.RepresentedSpecificKeyStringsList=map(
lambda _KeyString:
_KeyString.split('=')[0],
SYS.filter_(
lambda _ExpressionString:
"=" in _ExpressionString and _ExpressionString[0] not in ['\n'],
DefineSpecificDictString.split('self.')
)
)
_Class.RepresentedNotGettingStringsList=map(
lambda _KeyString:
_KeyString.split('=')[0],
SYS.filter_(
lambda _ExpressionString:
"<NotRepresented>" in _ExpressionString and "=" in _ExpressionString and _ExpressionString[0] not in ['\n'],
DefineSpecificDictString.split('self.')
)
)
elif self.ClassedString=='Representer':
_Class.RepresentedSpecificKeyStringsList=[
'RepresentedNotGettingVariablesList',
'RepresentingKeyVariablesList',
'RepresentedTuplesList'
]
_Class.RepresentedNotGettingStringsList=[
'RepresentedNotGettingVariablesList',
'RepresentingKeyVariablesList',
'RepresentedTuplesList'
]
#Get the BasedKeyStringsList
_Class.RepresentedBasedKeyStringsList=list(SYS.grab(
_Class,
'__bases__',
'RepresentedSpecificKeyStringsList'
))
#Debug
'''
print(
_Class.__name__,
#Class.__mro__,
#Class.RepresentedNotGettingStringsList,
list(_Class.RepresentedBasedKeyStringsList)
)
'''
#Set in the class
setattr(_Class,RepresentedBoolKeyString,True)
#Define the representing methods
def represent(_InstanceVariable,**_KwargVariablesDict):
#Refresh the attributes
_InstanceVariable.RepresentedTuplesList=_InstanceVariable.__dict__.items()
#Remove the class NotRepresented attributes
_InstanceVariable.RepresentedTuplesList=filter(
lambda _RepresentedTuple:
_RepresentedTuple[0] not in _Class.RepresentedNotGettingStringsList,
_InstanceVariable.RepresentedTuplesList
)
#Remove the instance NotRepresented attributes
#_InstanceVariable.RepresentedTuplesList=filter(
# lambda _RepresentedTuple:
# _RepresentedTuple[0] not in _InstanceVariable.RepresentedNotGettingVariablesList,
# _InstanceVariable.RepresentedTuplesList
# )
#First keeps only the Specific and New attributes
_InstanceVariable.RepresentedTuplesList=map(
lambda _RepresentedTuple:
("<Spe>"+_RepresentedTuple[0],_RepresentedTuple[1]),
filter(
lambda _Tuple:
_Tuple[0] in _Class.RepresentedSpecificKeyStringsList,
_InstanceVariable.RepresentedTuplesList
)
)+map(
lambda _NewTuple:
("<New>"+_NewTuple[0],_NewTuple[1]),
filter(
lambda _Tuple:
_Tuple[0] not in _Class.RepresentedBasedKeyStringsList
+_Class.RepresentedSpecificKeyStringsList,
_InstanceVariable.RepresentedTuplesList
)
)
#Add some forced Values with the instance RepresentingKeyVariables
#_InstanceVariable.RepresentedTuplesList+=map(
# lambda _KeyVariable:
# ("<NotSpe>"+str(_KeyVariable),_InstanceVariable[_KeyVariable]),
# _InstanceVariable.RepresentingKeyVariablesList
# )
#Simplify the numpy variables repr
'''
_InstanceVariable.RepresentedTuplesList=map(
lambda _RepresentedTuple:
_RepresentedTuple
if type(_RepresentedTuple[1]) not in [numpy.ndarray]
else (
_RepresentedTuple[0],
SYS.getRepresentedNumpyArray(_RepresentedTuple[1])
),
_InstanceVariable.RepresentedTuplesList
)
'''
#return the representedVariable
return getRepresentedPointerStringWithVariable(_InstanceVariable
)+getRepresentedStringWithVariable(
dict(_InstanceVariable.RepresentedTuplesList),**_KwargVariablesDict)
#Bound and set in the InspectedOrderedDict
_Class.__repr__=represent
_Class.InspectedOrderedDict['__repr__']=SYS.Inspecter.getInspectedOrderedDictWithMethod(
_Class.__repr__)
#</DefineClass>
|
UTF-8
|
Python
| false | false | 2,014 |
14,688,788,185,077 |
16a38fb883ffdc94900b8972ed97c312ce313857
|
c110bbd14907eb152529b8d825dab9f07bab438c
|
/src/SceneGraph/stat_mesh_node.py
|
750e8870e7cee94931745285cfde1e2809547c28
|
[] |
no_license
|
TN1ck/robocup-ss13
|
https://github.com/TN1ck/robocup-ss13
|
24ed50b4efc3ca0108fc5c6756c50b0c88c30d53
|
9ffecd7c4a91e27bde313b72593ec4770d4a4a73
|
refs/heads/master
| 2021-01-20T23:24:02.377387 | 2013-07-22T08:08:33 | 2013-07-22T08:08:33 | 9,834,007 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from tree_node import Tree_Node
# static mesh node. Stores information about objects like Naos or the ball.
class Stat_Mesh_Node(Tree_Node):
# Params:
# id id of the node
# load the model to be loaded (path to .obj file)
# sSc the scale of the object (three values)
# visible holds setVisible. As this is an optional parameter of the sceneGraph message use None if it's not specified
# transparent holds setTransparent. As this is an optional parameter of the sceneGraph message use None if it's not specified
# reset defines the list of materials used in the associated .obj file
def __init__(self, id, load, sSc, visible, transparent, reset):
super(Stat_Mesh_Node, self).__init__(id);
self.__load = load;
self.__sSc = sSc;
self.__reset = reset;
self.__visible = visible;
self.__transparent = transparent;
def get_load(self):
return self.__load;
def get_sSc(self):
return self.__sSc;
def get_visible(self):
return self.__visible;
def get_transparent(self):
return self.__transparent;
def get_reset(self):
return self.__reset;
def set_load(self, load):
self.__load = load;
def set_sSc(self, sSc):
self.__sSc = sSc;
def set_visible(self, visible):
self.__visible = visible;
def set_transparent(self, transparent):
self.__transparent = transparent;
def set_reset(self,reset):
self.__reset = reset;
def update(self, load, sSc, visible, transparent, reset):
self.set_load(load);
self.set_sSc(sSc);
self.set_visible(visible);
self.set_transparent(transparent);
self.set_reset(reset);
|
UTF-8
|
Python
| false | false | 2,013 |
12,317,966,234,391 |
8a3889f293457dd215b7b8f673f5edb0dc81c311
|
ac2a085b5fefff1021ea7a6573e3d73866f6dd28
|
/c0ntr0l/c0ntr0l.py
|
224520a0261c947983af96733599e93840048f59
|
[] |
no_license
|
a1k0n/x0xb0x
|
https://github.com/a1k0n/x0xb0x
|
0a4bf39446623e176d9d5511f8e5828df2af7bce
|
b4e279e2a3432d7606cbf0eb6914f1ed157fdba3
|
refs/heads/master
| 2021-01-24T08:46:24.507955 | 2006-02-07T00:48:40 | 2016-09-23T23:13:54 | 69,067,016 | 9 | 7 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/bin/env python
#
# Copyright (c) 2002-2004. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#----------------------------------------------------------------------------
# Name: main.py
# Purpose:
#
# Author: Michael Broxton
#
# Created: A long time ago, in a galaxy far, far away...
# Copyright: (c) 2005
#----------------------------------------------------------------------------
## import all of the wxPython GUI package
from wxPython.wx import *
from Globals import *
import model
import controller
import view
#---------------------------------------------------------------------------
# Every wxWindows application must have a class derived from wxApp. The wxApp
# class sets up the basic event handling system (which is behind the scenes --
# you never really see it, but you can access it through calls to the wxApp object).
# Once everything for the wxApp has been properly initialized, the OnInit() function
# is called, where your code takes over to spawn windows in the GUI and initialize app
# logic.
#
# This particular application is built around the
# Model-View-Controller abstraction for graphical user interfaces.
# This means that in addition to creating objects for the graphical
# user interface and the software itself, this class acts as a
# controller that is the intermediary -- the abstraction barrier --
# between software and the graphical interface.
#
# Thus, beyond the initialization routine ( OnInit() ) that gets everything
# going, this class will have two types of methods: 1) Methods that are called
# when events occur in the GUI that need to interact with software components
# in the back end (actions), and 2) Methods that are called by the software back end
# to update the state of the GUI (outlets).
#---------------------------------------------------------------------------
class x0xc0ntr0l_App(wxApp):
#
# =================== Initialization =========================
# wxPython calls this method to initialize the application.
# This is where the controller object creates the model and
# view objects.
#
def OnInit(self):
# Create the controller, then the model and the view.
c = controller.Controller(self)
# Create the data model. this should take care of serial ports
# and application logic.
m = model.Model(c)
# Create the View class and the GUI.
v = view.View(c)
c.setView(v)
c.setModel(m)
m.initialize()
v.initialize()
self.m = m
self.c = c
self.v = v
# Return a success flag
return true
def OnExit(self):
# Save the configuration to file and exit.
self.v.destroy()
self.m.destroy()
self.c.destroy()
#---------------------------------------------------------------------------
x0x_app = x0xc0ntr0l_App(0) # Create an instance of the application class
x0x_app.MainLoop() # Tell it to start processing events
#----------------------------------------------------------------------------
|
UTF-8
|
Python
| false | false | 2,006 |
15,006,615,778,497 |
c0072b562daf0ae57f70b813c7fe2defea937fa8
|
a704892d86252dde1bc0ff885ea5e7d23b45ce84
|
/addons-extra/c2c_budget/wizard/advanced_search.py
|
2cad0df86b598d2c2545300f9df475f0587431d5
|
[] |
no_license
|
oneyoung/openerp
|
https://github.com/oneyoung/openerp
|
5685bf8cce09131afe9b9b270f6cfadf2e66015e
|
7ee9ec9f8236fe7c52243b5550fc87e74a1ca9d5
|
refs/heads/master
| 2016-03-31T18:22:41.917881 | 2013-05-24T06:10:53 | 2013-05-24T06:10:53 | 9,902,716 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) Camptocamp SA - http://www.camptocamp.com
# Author: Arnaud WÃŒst
#
# This file is part of the c2c_budget module
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
import wizard
import netsvc
import pooler
from tools.misc import UpdateableStr
arch = UpdateableStr()
_form_header = """<?xml version="1.0"?>
<form string="Budget lines search" height="200" width="800">
<separator string="Choose periods (empty for all)" colspan="2"/>
<separator string="Choose items (empty for all)" colspan="2"/>
<field name="periods" nolabel="1" colspan="2" width="400" height="150"/>
<field name="items" nolabel="1" colspan="2" width="400"/>"""
_form_footer = """</form>"""
_fields = {
'periods': {'string':'Periods', 'type':'many2many', 'relation':'account.period'},
'items': {'string':'Budget Items', 'type':'many2many', 'relation':'c2c_budget.item'},
}
class wiz_advanced_search(wizard.interface):
""" this wizard provide a advanced search form for budget lines """
def _build_form(self, cr, uid, data, context):
"""complete the form with abstracted parts from c2c_budget.wizard_abstraction """
wiz_abstract_obj = pooler.get_pool(cr.dbname).get('c2c_budget.wizard_abstraction')
#complete the form with the abstraction
arch.string = _form_header + wiz_abstract_obj.advanced_search_get_form(cr, uid, data,context) + _form_footer
#complete the fields with the abstraction
fields = wiz_abstract_obj.advanced_search_get_fields(cr, uid, data,context)
for f in fields:
_fields[f] = fields[f]
return {}
def _get_budget_lines(self, cr, uid, data, context):
""" retrieve lines to work on """
line_obj = pooler.get_pool(cr.dbname).get('c2c_budget.line')
item_obj = pooler.get_pool(cr.dbname).get('c2c_budget.item')
anal_account_obj = pooler.get_pool(cr.dbname).get('account.analytic.account')
period_ids = data['form']['periods'][0][2]
item_ids = item_obj.get_sub_items(cr, data['form']['items'][0][2])
version_ids = data['form']['versions'][0][2]
anal_account_ids = anal_account_obj.get_children_flat_list(cr, uid, data['form']['analytic_accounts'][0][2])
if data['form']['empty_aa_too']:
anal_account_ids.append(False)
#build the search criteria list
criteria = []
if len(item_ids) > 0:
criteria.append(('budget_item_id', 'in', item_ids))
if len(period_ids) > 0:
criteria.append(('period_id', 'in', period_ids))
if len(version_ids) > 0:
criteria.append(('budget_version_id', 'in', version_ids))
if len(anal_account_ids) > 0:
criteria.append(('analytic_account_id', 'in', anal_account_ids))
line_ids = line_obj.search(cr, uid, criteria)
# Construct domain: if there is only one item selected,
# put it in the domain to improve input of lines (what is in the domain will be auto-selected)
domain=[]
if len(item_ids)==1:
domain.append("('budget_item_id','=',%d)"%item_ids[0])
elif len(item_ids) > 1:
domain.append("('budget_item_id','in',["+','.join(map(str,item_ids))+"])")
if len(period_ids)==1:
domain.append("('period_id','=',%d)"%period_ids[0])
elif len(period_ids) > 1:
domain.append("('period_id','in',["+','.join(map(str,period_ids))+"])")
if len(version_ids)==1:
domain.append("('budget_version_id','=',%d)"%version_ids[0])
elif len(version_ids) > 1:
domain.append("('budget_version_id','in',["+','.join(map(str,version_ids))+"])")
if len(anal_account_ids)==1:
domain.append("('analytic_account_id','=',%d)"%anal_account_ids[0])
elif len(anal_account_ids) > 1:
domain.append("('analytic_account_id','in',["+','.join(map(str,anal_account_ids))+"])")
domain = "[%s]"%','.join(map(str,domain))
result = {
'domain': domain,
'name': 'Selected Budget Lines',
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'c2c_budget.line',
'view_id': False,
'type': 'ir.actions.act_window',
'res_id':line_ids,
}
return result
states = {
'init' : {
'actions':[_build_form],
'result' : {'type':'form', 'arch':arch, 'fields':_fields, 'state': [('end','Cancel'),('open','Show lines')]},
},
'open' : {
'actions' : [],
'result' : {'type':'action', 'action':_get_budget_lines, 'state':'end'},
},
}
wiz_advanced_search('budget.advanced_search')
|
UTF-8
|
Python
| false | false | 2,013 |
6,287,832,124,779 |
81f1d13972d3a8d39b8e885e17ee8aa918110397
|
b7537cbad920aa90d7d774964c065ef3762721c5
|
/tools/download_datastore.py
|
df6b196f2947775c1b617e0dc92f7e8d85239d53
|
[] |
no_license
|
jcrocholl/minderbot
|
https://github.com/jcrocholl/minderbot
|
ef6216a2b2d9a76bdf4f3984b94a9f95df709819
|
b584f6f26c419011517f0914c0ff943d7f4cb2cb
|
refs/heads/master
| 2020-07-09T08:42:26.025781 | 2009-10-18T22:43:42 | 2009-10-18T22:43:42 | 328,870 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os, sys
APPS = 'auth feedback suggestions tags'.split()
DUMP_COMMAND = ' '.join("""
./manage.py dumpdata
--format=json --indent=2 --remote %(app)s
> fixtures/%(app)s.json
""".split())
def system(command):
print command
status = os.system(command)
if status:
print "failed with exit code %d" % status
sys.exit(status)
def main(argv):
apps = APPS
if len(argv) > 1:
apps = argv[1:]
for app in apps:
system(DUMP_COMMAND % locals())
if __name__ == '__main__':
main(sys.argv)
|
UTF-8
|
Python
| false | false | 2,009 |
18,777,597,033,062 |
73920f2b1270f30fccc6902d0b7953912810b416
|
db59db9e0571314ee0dfaeb5f9f25527bd61e81e
|
/projeto/estabelecimento/management/commands/balancear_promotes.py
|
7ecd5ab304e3f9eb1bcbc03a97382e48c2ba78ec
|
[] |
no_license
|
nandel/locais
|
https://github.com/nandel/locais
|
8cb0e1d4761513106de30a13fdee002f389ec3bb
|
e36eb4375d2cc17ad72d78c6967821ce2219a812
|
refs/heads/master
| 2020-03-05T14:15:37.041670 | 2013-12-19T23:50:30 | 2013-12-19T23:50:30 | 14,248,392 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
from django.core.management.base import NoArgsCommand
from estabelecimento.models import Estabelecimento, Promote
class Command(NoArgsCommand):
def handle_noargs(self, **options):
estabelecimentos = Estabelecimento.objects.aprovados()
max = Estabelecimento.objects.order_by('-promotes')[0:1].get().promotes
# Realizamos o balanceamento
fator = estabelecimentos.count()
for obj in estabelecimentos:
obj.promotes = (fator * ((obj.promotes * 100 + 1) / (max + 1))) / 100
obj.save()
self.stdout.write('Promotes de todos Estabelecimentos balanceados com fator ' + str(fator))
|
UTF-8
|
Python
| false | false | 2,013 |
6,665,789,275,332 |
e1bc7c699b9f827549f6866c59cfd73a5e98478d
|
0f832da4019852c6b4a17df2a1a0c70aab88ee82
|
/ZenPlayer/playing.py
|
302b12b2ef08afc493173b6ae7daad2124498d92
|
[
"MIT"
] |
permissive
|
metaxnet/kivybits
|
https://github.com/metaxnet/kivybits
|
bc31553dc30b3c7a72b1496f840692efc18d238a
|
5749a5a7edca01f1ead4cc7a7e875a9cce14ffe3
|
refs/heads/master
| 2021-01-15T22:52:27.385639 | 2014-07-11T14:03:58 | 2014-07-11T14:03:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from kivy.lang import Builder
from kivy.properties import ObjectProperty
from playlist import PlayList, PlayListScreen
from kivy.clock import Clock
from filebrowser import ZenFileBrowser
from kivy.utils import platform
if platform == 'linux': # Enable Mp3
from audioplayer import SoundLoader
else:
from kivy.core.audio import SoundLoader
from kivy.logger import Logger
from kivy.storage.jsonstore import JsonStore
from kivy.uix.screenmanager import Screen
class Controller(object):
"""
Controls the playing of audio and coordinates the updating of the playlist
and screen displays
"""
volume = 100
def __init__(self):
self.playlist = PlayList()
self._store = JsonStore("zenplayer.json")
self.playlist.load(self._store)
if self._store.exists('state'):
state = self._store.get("state")
if "volume" in state.keys():
self.volume = state["volume"]
def get_current_art(self):
return self.playlist.get_current_art()
def get_current_info(self):
return self.playlist.get_current_info()
def get_current_file(self):
return self.playlist.get_current_file()
def move_next(self):
self.playlist.move_next()
def move_previous(self):
self.playlist.move_previous()
def save(self):
self.playlist.save(self._store)
self._store.put("state", volume=self.volume)
Builder.load_string('''
<PlayingScreen>:
# Define the buttons
but_previous: previous
but_stop: stop
but_playpause: playpause
but_next: next
volume_slider: volume
progress_slider: progress
info_label1: info_label1
info_label2: info_label2
info_label3: info_label3
time_label: time_label
album_image: album_image
BoxLayout:
BoxLayout:
orientation: "vertical"
size_hint_x: 0.1
padding: 10
spacing: 10
Slider:
id: progress
size_hint_y: 0.9
orientation: "vertical"
max: 1
#on_value: root.set_volume()
Image:
size_hint_y: 0.075
source: 'images/progress.png'
BoxLayout:
# Center column
size_hint_x: 0.8
orientation: "vertical"
padding: 10, 10, 10, 10
BoxLayout:
size_hint_y: 0.05
Image:
source: 'images/add.png'
on_touch_down: self.collide_point(*args[1].pos) and root.show_filebrowser()
Image:
source: 'images/zencode.jpg'
Image:
source: 'images/playlist.png'
on_touch_down: self.collide_point(*args[1].pos) and root.show_playlist()
Label:
id: info_label1
size_hint_y: 0.05
Label:
id: info_label2
size_hint_y: 0.05
Label:
id: info_label3
size_hint_y: 0.05
BoxLayout:
size_hint_y: 0.65
padding: 10, 10, 10, 10
Image:
id: album_image
source: "images/zencode.jpg"
Label:
id: time_label
size_hint_y: 0.05
BoxLayout:
size_hint_y: 0.075
orientation: "horizontal"
MediaButton:
id: previous
source: 'images/previous.png'
on_click: root.play_previous()
MediaButton:
id: stop
source: 'images/stop.png'
on_click: root.stop()
MediaButton:
id: playpause
source: 'images/play.png'
on_click: root.playpause()
MediaButton:
id: next
source: 'images/next.png'
on_click: root.play_next()
BoxLayout:
# Right sidebar
orientation: "vertical"
size_hint_x: 0.1
padding: 10
spacing: 10
Slider:
id: volume
size_hint_y: 0.9
orientation: "vertical"
value: 0.5
max: 1
on_value: root.set_volume()
Image:
size_hint_y: 0.075
source: 'images/speaker.png'
''')
class PlayingScreen(Screen):
"""
The main screen that shows whats currently playing
"""
#TODO : Document properties once stable
album_image = ObjectProperty()
sound = None
advance = True # This flag indicates whether to advance to the next track
# once the currently playing one had ended
but_previous = ObjectProperty()
but_stop = ObjectProperty()
but_playpause = ObjectProperty()
but_next = ObjectProperty()
info_label = ObjectProperty()
volume_slider = ObjectProperty()
progress_slider = ObjectProperty()
time_label = ObjectProperty()
ctrl = Controller()
def __init__(self, sm, **kwargs):
self.sm = sm
super(PlayingScreen, self).__init__(**kwargs)
Clock.schedule_interval(self._update_progress, 1/25)
self.volume_slider.value = self.ctrl.volume
def init_display(self):
""" Initialize the display """
self.album_image.source = self.ctrl.get_current_art()
info = self.ctrl.get_current_info()
if info:
self.info_label1.text = info["artist"]
self.info_label2.text = info["album"]
self.info_label3.text = info["file"]
def playpause(self):
""" Start playing any audio if nothing is playing """
self.advance = True
if not self.sound:
audiof = self.ctrl.get_current_file()
if audiof:
Logger.info("main.py: playing " + audiof)
self.sound = SoundLoader.load(audiof)
self.sound.bind(on_stop=self._on_sound_stop)
self.sound.play()
self.init_display()
self.but_playpause.source = "images/pause.png"
self.sound.volume = self.volume_slider.value
Logger.info("main.py: Sounds is a " + str(self.sound))
elif self.sound.state == "play":
self.sound.stop()
self.but_playpause.source = "images/play.png"
else:
self.sound.play()
self.but_playpause.source = "images/pause.png"
self.sound.volume = self.volume_slider.value
def play_next(self):
""" Play the next track. """
Logger.info("main.py: PlayingScreen.play_next")
if self.sound:
self.stop()
self.sound = None
self.ctrl.move_next()
if self.ctrl.get_current_file():
self.init_display()
self.playpause()
def play_previous(self):
""" Ply the previous track. """
if self.sound:
self.stop()
self.sound = None
self.ctrl.move_previous()
if self.ctrl.get_current_file():
self.init_display()
self.playpause()
def stop(self):
""" Stop any playing audio """
self.advance = False
if self.sound:
self.sound.stop()
self.but_playpause.source = "images/play.png"
self.sound = None
def save(self):
""" Save the current playlist state """
self.ctrl.save()
def set_volume(self):
""" Set the volume of the currently playing track if there is one. """
if self.sound:
self.sound.volume = self.volume_slider.value
def show_playlist(self):
""" Switch to the playlist screen """
if "playlist" not in self.sm.screen_names:
self.sm.add_widget(PlayListScreen(self.sm,
self.ctrl.playlist,
name="playlist"))
self.sm.current = "playlist"
def show_filebrowser(self):
""" Switch to the playlist screen """
if "filebrowser" not in self.sm.screen_names:
self.sm.add_widget(ZenFileBrowser(self.sm,
self.ctrl.playlist,
name="filebrowser"))
self.sm.current = "filebrowser"
def _on_sound_stop(self, *args):
Logger.info("main.py: sound has stopped. args=" + str(args))
self.sound = None
if self.advance:
self.ctrl.move_next()
self.init_display()
self.playpause()
def _update_progress(self, dt):
""" Update the progressbar """
if self.sound:
length = self.sound._get_length()
if length > 0:
pos = self.sound.get_pos()
self.progress_slider.value = pos / length
self.time_label.text = "{0}m {1:02d}s / {2}m {3:02d}s".format(
int(pos / 60),
int(pos % 60),
int(length / 60),
int(length % 60))
|
UTF-8
|
Python
| false | false | 2,014 |
9,234,179,723,322 |
c62ff1093b521149f44ab5477d06ed04f42eca67
|
9382cfe86a25aeff1f17b500f400c988450cd802
|
/screenshotrequests.py
|
5b8f635033f494c62575cf81f19427433b15eb43
|
[] |
no_license
|
vexis211/WebScreenShotter
|
https://github.com/vexis211/WebScreenShotter
|
c03b1c348e740dff6ba3edee3bbdd97fe494eec8
|
dce0c3e8c657412f89de67a6f941bf37ed5aee2f
|
refs/heads/master
| 2021-01-13T02:36:42.221099 | 2014-05-20T16:30:35 | 2014-05-20T16:30:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding=utf-8
import sys
sys.path.insert(0, 'libs')
from services.screenshots import ScreenShotRequestManager
from django.core.validators import URLValidator
from templating import BaseHandler
__author__ = 'Jan Skalicky <[email protected]>'
class RequestsHandler(BaseHandler):
def get(self):
# requests = self.get_waiting_requests()
# approx_request_count = requests.count()
approx_request_count = ScreenShotRequestManager.get_approx_requests_count()
template_values = {
# 'requests': requests,
'approx_request_count': approx_request_count,
}
self.render_response('page_requests.html', template_values)
# def get_waiting_requests(self):
# return []
class CreateRequestHandler(BaseHandler):
def get(self):
self.render_response('page_create_request.html')
def post(self):
site_uri = self.request.get('site_uri')
if self.is_form_valid(site_uri):
ScreenShotRequestManager.create(site_uri)
# render request list
self.redirect('/Request')
else:
template_values = {
'site_uri': site_uri,
'site_uri_errors': 'This is not valid URL! Please check and try to submit again.'
}
# render create request with validation
self.render_response('page_create_request.html', template_values)
@staticmethod
def is_form_valid(site_uri):
val = URLValidator(verify_exists=False)
try:
val(site_uri)
except: # TODO not nice :-)
return False
return True
|
UTF-8
|
Python
| false | false | 2,014 |
4,363,686,820,156 |
51fdec3abe5475b45c97e9db2eb164aca7f8e261
|
aa509dd780cce2f827d63922c745512e79927986
|
/sitemaps.py
|
1d0b83aaf64f16582f98070e5d744fa1480dc872
|
[] |
no_license
|
patrickbeeson/knoxd
|
https://github.com/patrickbeeson/knoxd
|
05e047c9510581b30b8e856d864a6b8938552121
|
01f083dc723339b5a9389418d458a6ab7fa180fa
|
refs/heads/master
| 2021-01-22T20:45:30.697641 | 2013-11-22T20:55:38 | 2013-11-22T20:55:38 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.contrib.sitemaps import Sitemap
from knoxd.apps.aggregator.models import Feed
import datetime
class AggregatorSitemap(Sitemap):
changefreq = 'hourly'
priority = 0.5
def items(self):
return Feed.objects.all()
def lastmod(self, obj):
return obj.date_modified
def location(self, obj):
return "/categories/%s/%s" % obj.category.slug, obj.id
|
UTF-8
|
Python
| false | false | 2,013 |
841,813,623,222 |
adef90e5eb0bddc35c8c3537914fc591a19bbce0
|
358dbae855f7179c5bba0685f1ed059a34c8bed1
|
/converter.py
|
0896c492021df9e0a276e89455a31ca6421bebe8
|
[] |
no_license
|
m-mehta/p2g2mp4
|
https://github.com/m-mehta/p2g2mp4
|
96f7f95f7571c86f870bb0bec3a65943fefd5bee
|
0fe0488f8d202bb9435c2db67b6d1f10e6e2e031
|
refs/heads/master
| 2020-06-05T13:36:40.143702 | 2013-04-13T00:19:41 | 2013-04-13T00:19:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
"""
This utility allows for the conversion of a P2G archive into an mp4 video file.
Two methods of usage:
1 - >python converter.py target_directory
where target directory is an unziped P2G archive
2 - >python converter.py
In usage 2, a prompt will allow the user to browse for the zip file
Script is only designed for use on Mac OS X.
Developed by Manan Mehta ([email protected])
"""
import os
import re
import sys
import tkFileDialog
def getFirstBlock(xml, type, cast = str):
opentag = '<%s>' % type
closetag = '</%s>' % type
intervals = zip([m.end() for m in re.finditer(opentag, xml)],[m.start() for m in re.finditer(closetag, xml)])
x = intervals[0]
return cast(xml[x[0]:x[1]].strip())
def getAllBlocks(xml, type, cast = str):
opentag = '<%s>' % type
closetag = '</%s>' % type
intervals = zip([m.end() for m in re.finditer(opentag, xml)],[m.start() for m in re.finditer(closetag, xml)])
return [cast(xml[x[0]:x[1]].strip()) for x in intervals]
def make_filelist(num, d):
files = []
for i in range(1,num+1):
s = "%sslide_%04d_full.jpg" % (d,i)
files.append(s)
return files
def ms_to_ts(ms):
return "%.3f" % (ms/1000.0)
def times_to_lengths(times,total_length):
prev = 0
l = []
for t in times:
l.append(t-prev)
prev = t
l.append(total_length-times[-1])
delay = l.pop(0)
return ms_to_ts(delay), [ms_to_ts(i) for i in l]
def concat_jpgs(filenames):
fid = open('filelist.txt','w')
for file in filenames:
nfile = file[0:-3] + 'mp4'
s = "file \'%s\'\n" % nfile
fid.write(s)
fid.close()
os.system("./ffmpeg -f concat -i filelist.txt -c copy video.mp4")
for file in filenames:
nfile = file[0:-3] + 'mp4'
cmd = 'rm \'%s\'' % nfile
os.system(cmd)
os.system("rm filelist.txt")
def make_mp4_from_jpg(filename, length):
outname = filename[0:-3]+'mp4'
cmd = "./ffmpeg -loop 1 -f image2 -i \'%s\' -an -vcodec libx264 -pix_fmt yuv420p -r 5 -t %s -y \'%s\'" % (filename,length,outname)
os.system(cmd)
def make_audio(video):
cmd = "./ffmpeg -i \'%s\' -vn -acodec copy audio.wma" % video
os.system(cmd)
def make_full_mp4(filenames, lengths, video, delay, outname):
for i in range(len(filenames)):
make_mp4_from_jpg(filenames[i],lengths[i])
concat_jpgs(filenames)
make_audio(video)
cmd = "./ffmpeg -i video.mp4 -itsoffset %s -i audio.wma -vcodec copy -strict -2 \'%s\'" % (delay,outname)
print cmd
os.system(cmd)
os.system("rm video.mp4")
os.system("rm audio.wma")
def main():
basedir = './'
file_dir = 'Content/'
xml_file = 'MediasitePresentation_60.xml'
outname = 'lecture.mp4'
if len(sys.argv)==2:
basedir = sys.argv[1]
outname = basedir+outname
else:
fullpath = tkFileDialog.askopenfilename()
basedir = os.path.splitext(fullpath)[0]+'/'
outname = os.path.splitext(fullpath)[0]+'.mp4'
cmd = 'unzip \"%s\" -d \"%s\"' % (fullpath,basedir)
os.system(cmd)
file_dir = basedir+file_dir
xml_file = basedir+xml_file
fid = open(xml_file)
xml = fid.read()
fid.close()
times = getAllBlocks(getFirstBlock(xml, 'Slides'),'Time',int)
video_data = getFirstBlock(xml, 'OnDemandContentList')
video_file = getFirstBlock(video_data, 'FileName')
total_length = getFirstBlock(video_data,'Length',int)
video_file = file_dir+video_file
file_list = make_filelist(len(times),file_dir)
delay,length_list = times_to_lengths(times,total_length)
make_full_mp4(file_list,length_list,video_file,delay, outname)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
9,878,424,829,355 |
c2e4cf475b4a33955e12961ffcf8695a690478f5
|
ceebd0cc47512abf28250bf867d459d3e4009f80
|
/figure5.py
|
7818fea9fd64b584c72cf94b93626f3f38643aaa
|
[] |
no_license
|
AUESG/european_grid_ext
|
https://github.com/AUESG/european_grid_ext
|
2cfdf40feb9f45a450797a34d8048ac39a385caf
|
cd38b48f828517e546924af130cc0ecf97d3c589
|
refs/heads/master
| 2019-02-01T07:15:29.259225 | 2013-11-27T17:01:20 | 2013-11-27T17:01:20 | 13,417,060 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import multiprocessing as mp
import numpy as np
import magnus_figutils as fig
pool = mp.Pool(8)
scalefactorsA = [0.5, 1, 2, 4, 6, 8, 10, 12, 14]
#scalefactorsA = np.linspace(0,1,11) # for use with the alternative A rule
# that is downscaling the unsconst.
# flow.
pool.map(fig.solve_lin_interpol, scalefactorsA)
scalefactorsB = np.linspace(0, 2.5, 10)
#pool.map(fig.solve_linquant_interpol, scalefactorsB)
quantiles = [0.5, 0.8, 0.9, 0.95, 0.97, 0.99, 0.995, 0.999, 0.9995, 0.9999, 1]
#pool.map(fig.solve_quant_interpol, quantiles)
|
UTF-8
|
Python
| false | false | 2,013 |
12,240,656,813,575 |
dad674de9560e407edeabab57d743797c8d3aa1e
|
b503faaf9100bba54c173393330628c334fc0050
|
/Assignment-3/assignment3.py
|
46756c0e7a4e6fccbe432c9b029549b5e6cb3ea5
|
[] |
no_license
|
sumanth1308/AllPythonAssignment-2013
|
https://github.com/sumanth1308/AllPythonAssignment-2013
|
9d13beb70614850f799aee1b282f43b8dd478348
|
a03940321d4027fe8ad75db434bdc63cff7fd0ff
|
refs/heads/master
| 2016-09-06T10:50:40.410778 | 2013-06-04T07:00:30 | 2013-06-04T07:00:30 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#This module solves the third python assignment of building a web crawler
from urllib import *
from urllib2 import *
from pymongo import Connection
import pymongo
import argparse
#import rest
import re
import urlparse
import time
url = ""
f = ""
request = None
dbname = ""
coll = ""
verbose = ""
host = ""
crawl_count = 0
r = {"ModelNumber":"","Manufacturer":"","OperatingSystem":"","Talktime":"","Touch":"","SecondaryCamera":"","GPS":"","Thickness":""}
#Modelnumber
#Manufacturer
#Operating system
#Talktime
#Touch
#Secondary camera
class myLogger():
f_name = ""
f_handler = None
def __init__(self, f_name):
self.f_name = f_name
self.f_handler = open(self.f_name,"a")
def write(self, line):
self.f_handler.write(line+"\n")
def close(self):
self.f_handler.close()
def getPage(url):
try:
page = None
request = Request(url)
page = urlopen(request)
return page
except URLError as e:
raise Exception("error: Please connect to a network and try again")
except IOError as e:
raise Exception("error: Invalid URL")
def makeList(page):
#print "Dummy makeList method"
pageString = page.read()
k=0
model_manufacturer = ""
model = ""
manufacturer = ""
os_type = ""
android_os = False
ios = False
meego_os = False
blackberry_os = False
talktime = ""
touch = ""
secondary_camera = ""
gps = ""
thickness = ""
temp = []
pageList = pageString.split("\n")
#print pageList
os_l = "<td class=\"ttl\"><a href=\"glossary.php3?term=os\">OS</a></td>\r"
touch_l = "<td class=\"ttl\"><a href=\"glossary.php3?term=display-type\">Type</a></td>\r"
talktime_l = "<td class=\"ttl\"><a href=\"glossary.php3?term=talk-time\">Talk time</a></td>\r"
secondary_l = "<td class=\"ttl\"><a href=\"glossary.php3?term=video-call\">Secondary</a></td>\r"
gps_l = "<td class=\"ttl\"><a href=\"glossary.php3?term=gps\">GPS</a></td>\r"
thickness_l = "<td class=\"ttl\"><a href=# onClick=\"helpW('h_dimens.htm');\">Dimensions</a></td>\r"
try:
os_type = pageList[pageList.index(os_l)+1] #retrieveing the os type
os_type = re.sub('<[^<]+?>', '', os_type)
os_type = os_type[:-1]
if re.findall("android", os_type.lower()) != []:
os_type = "android"
elif re.findall("blackberry", os_type.lower()) != []:
os_type = "BBOS"
elif re.findall("ios", os_type.lower()) != []:
os_type = "ios"
elif re.findall("symbian", os_type.lower()) != []:
os_type = "symbian"
elif re.findall("windows", os_type.lower()) != []:
os_type = "windows phone"
elif re.findall("bada", os_type.lower()) != []:
os_type = "bada"
elif re.findall("meego", os_type.lower()) != []:
os_type = "meego"
elif re.findall("linux", os_type.lower()) != []:
os_type = "linux"
except ValueError as e:
os_type = "N/A"
try:
talktime = pageList[pageList.index(talktime_l)+1] #retrieveing the talktime
talktime = re.sub('<[^>]+?>', '', talktime)
talktime = talktime[:-1]
talktime = talktime.split("/")
if talktime == [''] or talktime == ['No official data']:
talktime = "N/A"
else:
m = re.findall('\d+\sh', talktime[0].lower())
if m != []:
try:
talktime = int(m[0][:-2])
except ValueError as e:
try:
m = re.findall('\d+\sh', talktime[1].lower())
if m != []:
talktime = int(m[0][:-2])
except ValueError as e:
talktime = "N/A"
else:
try:
m = re.findall('\d+\sh', talktime[1].lower())
if m != []:
talktime = int(m[0][:-2])
except ValueError as e:
talktime = "N/A"
except ValueError as e:
talktime = "N/A"
try:
touch = pageList[pageList.index(touch_l)+1]
touch = re.sub('<[^>]+?>', '', touch)
if re.findall("touch",touch.lower()) != []:
touch = True
else:
touch = False
except ValueError as e:
touch = "N/A"
try:
gps = pageList[pageList.index(gps_l)+1]
gps = re.sub('<[^>]+?>', '', gps)
if re.findall("yes", gps.lower()) != []:
gps = True
else:
gps = False
except ValueError as e:
gps = "N/A"
model_manufacturer = re.sub('<[^<]+?>', '',pageList[96])
temp = model_manufacturer.split(" ",1)
manufacturer = temp[0]
model = temp[1][:-1]
try:
secondary_camera = pageList[pageList.index(secondary_l)+1]
secondary_camera = re.sub('<[^>]+?>', '', secondary_camera)
if re.findall("yes",secondary_camera.lower()) != []:
secondary_camera = True
else:
secondary_camera = False
except ValueError as e:
secondary_camera = "N/A"
try:
thickness = pageList[pageList.index(thickness_l)+1]
thickness = re.sub('<[^>]+?>', '', thickness)
k = thickness.split("(")
if len(k) == 2:
k = k[0].split("x")[2]
k = re.findall("(.+\smm)",k)[0]
k = k[:-3]
thickness = float(k)
else:
thickness = "N/A"
except ValueError as e:
thickness = "N/A"
if talktime == ['']:
talktime = 'N/A'
model = model.lower()
manufacturer = manufacturer.lower()
os_type = os_type.lower()
rs = r = {"model_number":model,"manufacturer":manufacturer,"operating_system":os_type,"talktime":talktime,"touch":touch,"secondary_camera":secondary_camera,"gps":gps,"thickness":thickness}
return [rs]
def printrs(rs):
print rs
print "----------------------------------------------------------------------------------------"
#---------------------------------------------------------------------------
def insertdb(data):
"""
data is a list of dict's which ll be inserted into db called dbname and a collection called dbname
"""
global host
connection = Connection(host, 27017)
db = connection[dbname]
collection = db[coll]
post_id=[]
posts = db.coll
post_id.append(posts.insert(data))
if verbose:
print 'inserted:',data
connection.close()
return post_id
def displaydb():
"""
displays all data inside db called file name
something similar to read db
"""
global host
connection = Connection(host, 27017)
db = connection[dbname]
collection = db[coll]
posts = db.coll
if posts.count():
for post in posts.find():
for key in post:
if key != '_id':
print '%22s' %(str(key)),
print
break;
for post in posts.find():
for key,val in post.iteritems():
if key != '_id':
print '%22s' % (str(val)),
print
else:
return 'no data'
connection.close()
#-----------------------------------------------------------------------------
def init():
parser = argparse.ArgumentParser()
global verbose
global f
global dbname
global coll
global host
parser.add_argument('-f', action='store',dest='file',type=str,default="http://",help="File containing list of urls")
parser.add_argument('-v', action='store_true',dest='Verbose',default=False,help="Toggle verbose mode, default to off")
parser.add_argument('-d', action='store',dest='dbname',type=str,default="dev_db",help="Mongo DB, databse name")
parser.add_argument('-c', action='store',dest='coll',type=str,default="all",help="Mongo DB, collection name")
parser.add_argument('-n', action='store',dest='host',type=str,default='localhost',help="Hostname on which the mongo db server is running")
parser.add_argument('--version', action='version',version="K-crawler version 1.0")
results = parser.parse_args()
if results.file:
f = results.file
else:
raise Exception("URL file not specified")
host = results.host
verbose = results.Verbose
if results.dbname:
dbname = results.dbname
if results.coll:
coll = results.coll
def crawl():
rs = None
page = None
hostname = urlparse.urlunparse(urlparse.urlparse(url)[:2] + ("",) * 4)
if hostname == "http://www.gsmarena.com":
page = getPage(url)
rs = makeList(page)
if verbose:
printrs(rs)
insertdb(rs)
else:
print "Only specific pages from http://gsmarena.com are supported\nexample: http://www.gsmarena.com/samsung_galaxy_s_iv-5125.php"
if __name__ == "__main__":
try:
crawl_count = 0
init()
log = myLogger("crawl_log.txt")
start_time = time.time()
log.write("crawler started "+time.ctime())
print "crawler started "+time.ctime()
handler = open(f,"r")
for line in handler:
url = line
log.write("Currently crawling through:"+url[:-1]+time.ctime())
if verbose:
print "Current page = ", url
crawl()
crawl_count += 1
end_time = time.time()
log.write("crawler terminated,"+time.ctime()+" total no. of pages crawled "+str(crawl_count)+" total time taken "+str(end_time - start_time))
print "crawler terminated,"+time.ctime()+" total no. of pages crawled "+str(crawl_count)+" total time taken "+str(end_time - start_time)
'''if verbose:
displaydb()'''
#db = dbHelper("dbname","coll")
#insertCollection(rs,db)
#retrieveCollection(db)
handler.close()
log.close()
except IOError as e:
print "error: "+f+" no such file exists"
except KeyboardInterrupt as e:
print "\nProgram interrupted"
end_time = time.time()
log.write("crawler terminated,"+time.ctime()+" total no. of pages crawled "+str(crawl_count)+" total time taken "+str(end_time - start_time))
log.close()
print "crawler terminated,"+time.ctime()+" total no. of pages crawled "+str(crawl_count)+" total time taken "+str(end_time - start_time)
'''if verbose:
displaydb()'''
except Exception as e:
print e #, type(e)
|
UTF-8
|
Python
| false | false | 2,013 |
10,926,396,821,818 |
3e5facffb2613db3f3b92249caf4e16398240db3
|
81bb8d72ba11401fee576e90bc6399ea0f50cd76
|
/repo/src/shareddatamiddleware.py
|
25beedd5ff936d6ff4abe32d62fe2e67fa3e45a4
|
[
"AGPL-3.0-only"
] |
non_permissive
|
skarphed/skarphed
|
https://github.com/skarphed/skarphed
|
e75d745cbd8f1cfc00b6902f73366f2c5f270e85
|
9c4925a5ca3c19955d411c4313491bc8bead20e5
|
refs/heads/master
| 2021-01-19T11:14:35.813766 | 2014-04-30T18:23:08 | 2014-04-30T18:23:08 | 6,285,242 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from logger import logger
from mimetypes import guess_type
class SharedDataMiddleware(object):
"""
A WSGI middleware that provides static content.
"""
def __init__(self, wrap_app, location):
"""
Initializes this SharedDataMiddleware with a wrapped application and
the location of the static content (e. g. 'static').
"""
self._location = location
self._wrap_app = wrap_app
def __call__(self, environ, start_response):
"""
If the wsgi PATH_INFO starts with the static contents location, it will be returned.
Otherwise the wrapped application will be called.
"""
if environ['REQUEST_METHOD'] == 'GET' and environ['PATH_INFO'].startswith('/%s/' % self._location):
logger.info('GET from %s: %s' % (environ.get('REMOTE_ADDR', 'unknown'), environ['PATH_INFO']))
prefix = "/usr/share/skdrepo/"
path = prefix + environ['PATH_INFO'][1:]
try:
f = open(path, 'r')
data = f.read()
f.close()
(mime, encoding) = guess_type(path)
status = '200 OK'
response_headers = [('Content-Type', mime)]
response_body = [data]
except IOError, e:
logger.warning('failed to open file: %s' % path)
status = '404 Not Found'
response_headers = [('Content-Type', 'text/plain')]
response_body = ['404 Not Found - \'%s\'' % path]
start_response(status, response_headers)
logger.debug('response to %s: %s, %s' % (environ['REMOTE_ADDR'], status, str(response_headers)))
return response_body
else:
return self._wrap_app(environ, start_response)
|
UTF-8
|
Python
| false | false | 2,014 |
3,461,743,688,186 |
ea1eebe9e2f30068b2ab9faa3999924d24a6e29f
|
427542e51907eee4355d5a175f674ba8cc0e8b70
|
/CS188.x/project3/Test.py
|
5b5b764aff94b0de6efa6366fe88d4560daca04b
|
[] |
no_license
|
huiwq1990/CourseHomework
|
https://github.com/huiwq1990/CourseHomework
|
66e4bfc7cce48a2dc1108cdf1931a3ccbc231757
|
877ba58bbb880f4e96641d158e85aa986ebd3ac8
|
refs/heads/master
| 2020-02-05T08:50:55.726441 | 2013-12-17T09:58:57 | 2013-12-17T09:58:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import util
a = util.Counter()
a['test'] = 2
print a['test']
b = util.Counter()
b['ss'] = 2
a = b
print a['test']
|
UTF-8
|
Python
| false | false | 2,013 |
13,709,535,631,744 |
92f8b440f748ca456e3e8a77ebb471fc35839de9
|
077163df079b390ad1d3698fc463fcb8efc2a7ad
|
/tests/test_driver.py
|
c8f449a5cb97aa69795658b9397455b215f50e43
|
[
"Apache-2.0"
] |
permissive
|
anukat2015/dycapo
|
https://github.com/anukat2015/dycapo
|
abc6f387e38aac95b5b1e5fd11daf88f86cab136
|
298cd787211ee917e66301f4a25405ea7b0cf0dd
|
refs/heads/master
| 2021-01-10T23:56:22.023636 | 2011-04-19T16:02:57 | 2011-04-19T16:02:57 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Copyright 2010 Daniel Graziotin <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import classes
import utils
import copy
import settings
import response_codes
class TestDriver():
def setup_class(self):
self.driver = classes.Driver(settings.DRIVER_USERNAME,settings.DRIVER_PASSWORD,settings.DYCAPO_URL)
self.rider = classes.Rider(settings.RIDER_USERNAME,settings.RIDER_PASSWORD,settings.DYCAPO_URL)
self.driver_position = '46.490200 11.342294'
self.driver_destination = '46.500740 11.345073'
self.rider_position = '46.494957 11.340239'
self.rider_destination = '46.500891 11.344306'
def teardown_class(self):
if settings.FINISH_TRIP_AFTER_TESTS:
self.driver.finish_trip(self.driver.trip)
def setup_method(self,method):
self.driver.location = classes.Location(georss_point=self.driver_position)
self.driver.destination = classes.Location(georss_point=self.driver_destination,point='dest')
self.rider.location = classes.Location(georss_point=self.rider_position)
self.rider.destination = classes.Location(georss_point=self.rider_destination,point='dest')
def test_position(self):
old_position = self.driver.location
new_position = classes.Location(georss_point='46.000 11.000')
self.driver.update_position(location=new_position)
response = self.driver.get_position()
assert response['value']['georss_point'] != old_position.georss_point
assert response['value']['georss_point'] == new_position.georss_point
self.driver.update_position(location=old_position)
response = self.driver.get_position()
assert response['value']['georss_point'] == old_position.georss_point
assert response['value']['georss_point'] != new_position.georss_point
def test_insert_trip(self):
response = self.driver.insert_trip_exp()
assert [location for location in response['value']['locations'] if location['point']=='dest'][0]['georss_point'] == self.driver_destination
assert response['code']==response_codes.ALL_OK
self.driver.trip = response['value']
def test_start_trip(self):
response = self.driver.start_trip()
assert response['code'] == response_codes.ALL_OK
def test_check_ride_requests(self):
response = self.driver.check_ride_requests()
assert len(response['value']) <= 1
def test_accept_ride_request(self):
response = self.driver.accept_ride_request({'username':'rider1'})
assert response['code'] == response_codes.NOT_FOUND
|
UTF-8
|
Python
| false | false | 2,011 |
3,341,484,587,981 |
dc807709825be63b8fc2c9b7ced1b458ab634bf7
|
136aa0127d985b7dca7fabe14c73e2f28f09f4a4
|
/plugin.video.israelive/default.py
|
1479d4ddbb9c2a9bdf851a12118cc3df8fd0ad79
|
[] |
no_license
|
eli022/xbmc-israel
|
https://github.com/eli022/xbmc-israel
|
7de7daa81450a382f37f9349ac6bd4f2a2d2d93b
|
090cf2107b4d51bd596ce054174387aba5bdadfd
|
refs/heads/master
| 2021-01-18T01:16:32.207301 | 2014-09-10T19:08:55 | 2014-09-10T19:08:55 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
#code by Avigdor
import urllib, sys, xbmcplugin ,xbmcgui, xbmcaddon, xbmc, os, json
AddonID = 'plugin.video.israelive'
Addon = xbmcaddon.Addon(AddonID)
localizedString = Addon.getLocalizedString
icon = Addon.getAddonInfo('icon')
AddonLogosDir = os.path.join(xbmc.translatePath("special://home/addons/").decode("utf-8"), AddonID, 'resources', 'logos')
libDir = os.path.join(xbmc.translatePath("special://home/addons/").decode("utf-8"), AddonID, 'resources', 'lib')
sys.path.insert(0, libDir)
import common, myFilmon, myIPTVSimple
def Categories():
addDir("[COLOR yellow][{0}][/COLOR]".format(localizedString(20101).encode('utf-8')), "settings", 10, os.path.join(AddonLogosDir, "settings.jpg"))
lists = ["israel", "news", "music", "radio", "localRadio", "france", "russia", "others"]
markedLists = common.GetMarkedLists()
for listName in markedLists:
if listName == "israel":
Category(listName)
else:
addDir("[COLOR blue][{0}][/COLOR]".format(localizedString(30101 + lists.index(listName)).encode('utf-8')) , listName, 1, os.path.join(AddonLogosDir, "{0}.png".format(listName)))
def Category(categoryName):
logosDir = os.path.join(xbmc.translatePath("special://userdata/addon_data").decode("utf-8"), AddonID, 'logos')
list = common.ReadChannelsList(categoryName)
common.updateLogos(list)
for channel in list:
logoFile = os.path.join(logosDir, "{0}.png".format(channel["logo"]))
mode = 3 if channel["type"]== "filmon" else 2
addDir(channel["display_name"].encode("utf-8"), channel["url"], mode, logoFile, isFolder=False)
def SettingsCat():
addDir(localizedString(20102).encode('utf-8'), 'settings', 11, os.path.join(AddonLogosDir, "settings.jpg"), isFolder=False)
addDir(localizedString(20103).encode('utf-8'), 'settings', 12, os.path.join(AddonLogosDir, "settings.jpg"), isFolder=False)
def RefreshIPTVlinks():
if myIPTVSimple.RefreshIPTVlinks():
xbmc.executebuiltin('StartPVRManager')
def PlayUrl(name, url, iconimage=None):
listitem = xbmcgui.ListItem(path=url, thumbnailImage=iconimage)
listitem.setInfo(type="Video", infoLabels={ "Title": name })
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, listitem)
def PlayFilmon(chNum):
direct, channelName, programmeName, iconimage = myFilmon.GetChannelStream(chNum)
if direct == None:
return
PlayUrl(programmeName, direct, iconimage)
def addDir(name, url, mode, iconimage, description="", isFolder=True):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)+"&description="+urllib.quote_plus(description)
liz = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name, "Plot": description} )
if (mode == 2 or mode == 3):
liz.setProperty('IsPlayable', 'true')
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=isFolder)
def get_params():
param = []
paramstring = sys.argv[2]
if len(paramstring) >= 2:
params = sys.argv[2]
cleanedparams = params.replace('?','')
if (params[len(params)-1] == '/'):
params = params[0:len(params)-2]
pairsofparams = cleanedparams.split('&')
param = {}
for i in range(len(pairsofparams)):
splitparams = {}
splitparams = pairsofparams[i].split('=')
if (len(splitparams)) == 2:
param[splitparams[0].lower()] = splitparams[1]
return param
params=get_params()
url=None
name=None
mode=None
iconimage=None
description=None
try:
url = urllib.unquote_plus(params["url"])
except:
pass
try:
name = urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage = urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode = int(params["mode"])
except:
pass
try:
description = urllib.unquote_plus(params["description"])
except:
pass
if mode == None or url == None or len(url) < 1:
Categories()
elif mode == 1:
Category(url)
elif mode == 2:
PlayUrl(name, url, iconimage)
elif mode == 3:
PlayFilmon(url)
elif mode== 10:
SettingsCat()
elif mode == 11:
Addon.openSettings()
elif mode == 12:
RefreshIPTVlinks()
elif mode == 20:
RefreshIPTVlinks()
sys.exit()
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
UTF-8
|
Python
| false | false | 2,014 |
16,630,113,393,480 |
76480c2a0a31e378d9611906b2ddf32f0fd758a1
|
a3ba25f2426ef7d5343a9e6bdd9cc47beb63b3df
|
/tivi/admin.py
|
17761bab89bbb55d8ef492d8bba69d7977ff2d1d
|
[] |
no_license
|
kimenye/hellodjango
|
https://github.com/kimenye/hellodjango
|
c929933c90bd44ecd7baf66c403aa30539026e00
|
2ace36fff9c9c1a25bf2daa7b8dfe4980e010842
|
refs/heads/master
| 2020-04-15T02:22:56.163091 | 2013-03-21T12:38:34 | 2013-03-21T12:38:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from tivi.models import Show
from django.contrib import admin
class ShowAdmin(admin.ModelAdmin):
list_display = ('name', 'description')
admin.site.register(Show, ShowAdmin)
|
UTF-8
|
Python
| false | false | 2,013 |
4,896,262,735,641 |
85e0d72e25957c3b56930b265ac45cb6a23f6d1a
|
bfb457c63878d583e8a6d6f39b520ce25b92448a
|
/sap/administracion/models.py
|
00790e16394b621b52c449574f588051ff65fedd
|
[] |
no_license
|
9andresc/proyecto-sap
|
https://github.com/9andresc/proyecto-sap
|
4b73568ea60234e6448ea8a31973b8bf0103165c
|
605596bcf4b5c7b2c0118ca22e8469b4bd7d2532
|
refs/heads/master
| 2021-05-27T16:50:21.004722 | 2014-07-05T02:34:05 | 2014-07-05T02:34:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
from django.contrib.auth.models import User
class Permiso(models.Model):
"""
::
Clase que describe la estructura de cada instancia de un Permiso, los atributos
que posee un permiso son:
nombre: nombre del permiso.
"""
nombre = models.CharField(max_length=50, blank=False)
def __unicode__(self):
return self.nombre
class Meta:
ordering = ["nombre"]
class Rol(models.Model):
"""
::
Clase que describe la estructura de cada instancia de un Rol, los atributos
que posee un rol son:
nombre: nombre del rol.
descripcion: breve descripcion del rol.
permisos: permisos que posee el rol.
"""
nombre = models.CharField(max_length=50, blank=False)
descripcion = models.TextField(blank=True)
permisos = models.ManyToManyField(Permiso, blank=False)
def __unicode__(self):
return self.nombre
class Meta:
ordering = ["nombre"]
TIPO_DATO = (
(0, "Numerico"),
(1, "Fecha"),
(2, "Texto grande"),
(3, "Texto chico"),
(4, "Logico"),
(5, "Archivo"),
)
class TipoAtributo(models.Model):
"""
::
Clase que describe la estructura de cada instancia de un Tipo atributo, los atributos
que posee un tipo atributo son:
nombre: nombre del tipo atributo.
tipo de dato: el tipo de dato al que corresponde.
descripcion: una breve descripcion del tipo atributo.
num_longitud: indica la cantidad de digitos no decimales
num_max: indica el mayor valor para el dato de tipo Numerico.
num_min: indica el minimo valor para el dato de tipo Numerico.
num_precision: precision decimal de un tipo de dato Numerico.
textg_longitud: longitud de un tipo de dato Texto grande.
textch_longitud: longitud de un tipo de dato Texto chico.
obligatorio: valor booleano que indica si un atributo debe ser obligatorio o no.
"""
nombre = models.CharField(max_length=50, blank=False)
tipo_dato = models.IntegerField(max_length=30,choices= TIPO_DATO, default=0)
descripcion = models.TextField(blank=True)
num_longitud = models.IntegerField(null=True, blank=True)
num_precision = models.IntegerField(null=True, blank=True)
num_max = models.CharField(max_length=20, null=True, blank=True)
num_min = models.CharField(max_length=20, null=True, blank=True)
patron_precision = models.CharField(max_length=15, null=True, blank=True)
textg_longitud = models.IntegerField(null=True, blank=True)
textch_longitud = models.IntegerField(null=True, blank=True)
obligatorio = models.BooleanField(default=False, blank=True)
def __unicode__(self):
return self.nombre
class Meta:
ordering = ["nombre"]
ESTADOS_PROYECTO = (
(0, "Inactivo"),
(1, "En Curso"),
(2, "Finalizado"),
)
class Proyecto(models.Model):
"""
::
Clase que describe la estructura de cada instancia de un Proyecto, los atributos
que posee un proyecto son:
nombre: nombre del proyecto.
descripcion: una breve descripcion sobre el proyecto.
fecha de inicio: fecha de inicio del proyecto.
estado: estado actual del proyecto.
presupuesto: presupuesto total del proyecto.
complejidad: nivel de complejidad del proyecto.
usuario lider: usuario lider del proyecto.
usuarios: usuarios que participan en el proyecto.
comite de cambios: comite encargado de aprobar o rechazar solicitudes de cambio.
roles: roles asociados al proyecto.
fases: fases asociadas al proyecto.
"""
nombre = models.CharField(max_length=50, blank=False)
descripcion = models.TextField(blank=True)
fecha_inicio = models.DateField()
estado = models.IntegerField(max_length=30, choices=ESTADOS_PROYECTO, default=0)
presupuesto = models.FloatField(null=True, blank=True, default=0)
complejidad = models.IntegerField(null=True, blank=True, default=0)
usuario_lider = models.ForeignKey(User, related_name='proyectos', null=True, blank=True)
usuarios = models.ManyToManyField(User, related_name='usuarios_proyecto', blank=True)
comite_de_cambios = models.ManyToManyField(User, related_name='comite_de_cambios_proyecto', blank=True)
roles = models.ManyToManyField(Rol, related_name='roles_proyecto', null=True, blank=True)
def __unicode__(self):
return self.nombre
class Meta:
ordering = ["nombre"]
class Fase(models.Model):
"""
::
Clase que describe la estructura de cada instancia de una Fase, los atributos
que posee una fase son:
nombre: nombre de la fase.
descripcion: una breve descripcion sobre la fase.
estado: estado actual de la fase.
num_secuencia: define el orden numerico de la fase dentro de un proyecto.
fecha de inicio: fecha de inicio de la fase.
duracion: duracion de la fase.
roles: roles asociados a la fase.
proyecto: el proyecto al cual pertenece la fase.
"""
ESTADOS_FASE = (
(0, "Inactivo"),
(1, "En curso"),
(2, "Finalizada"),
)
nombre = models.CharField(max_length=50, blank=False)
descripcion = models.TextField(blank=True)
estado = models.IntegerField(max_length=1, choices=ESTADOS_FASE, default=0)
num_secuencia = models.IntegerField(max_length=30, null=True)
fecha_inicio = models.DateField(null=True)
duracion = models.IntegerField(null=True, blank=True, default=0)
roles = models.ManyToManyField(Rol, null=True, blank=True)
proyecto = models.ForeignKey(Proyecto, related_name="fases", null=True, blank=True)
def __unicode__(self):
return self.nombre
class Meta:
ordering = ["num_secuencia"]
ESTADOS_USUARIO = (
(0, "Activo"),
(1, "Inactivo"),
)
User.add_to_class('estado', models.IntegerField(max_length=30, choices=ESTADOS_USUARIO, default=0))
User.add_to_class('telefono', models.CharField(max_length=100, blank=True))
User.add_to_class('direccion', models.CharField(max_length=100, blank=True))
User.add_to_class('roles', models.ManyToManyField(Rol, null=True, blank=True))
|
UTF-8
|
Python
| false | false | 2,014 |
19,473,381,757,920 |
bece98ed6457b81fc4f3b6f9a4b9f3680deb153c
|
432eee18cc45ab88534a235298a7648c8fa41f56
|
/cbserver/process.py
|
dc42149aa8425cce40d4b82cf7df720d8841e2d6
|
[] |
no_license
|
nithintumma/cbserver
|
https://github.com/nithintumma/cbserver
|
728c5727e154b24229bbce8f2f300b13cd11dbf9
|
889c747a308573c79aebf46a62fd531431cccc92
|
refs/heads/master
| 2021-01-18T14:00:13.480051 | 2013-08-04T13:07:50 | 2013-08-04T13:07:50 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import string
import random
import mimetypes
import math
from pymongo import MongoClient
from pymongo import DESCENDING
import pymongo as pm
from bson.objectid import ObjectId
client = MongoClient()
db = client.data
answer_queue = db.answer_queue
rec_collection = db.recs
dif_collection = db.difs
products_to_process = db.toprocess
calc_recs = db.calcrecs
product_collection = db.processedproducts
PLAYER_A = 1
PLAYER_B = 2
def processAnswerQueue():
answer_records = answer_queue.find()
for answer in answer_records:
print "For Facebook Id"
print answer["forFacebookId"]
rec = rec_collection.find_one({"userId": answer["forFacebookId"]})
user_id = answer["forFacebookId"]
wrong_product = answer["wrongProduct"]
chosen_product = answer["chosenProduct"]
if rec:
try:
winning_score = rec[answer["chosenProduct"]]
except:
wining_score = 1600
try:
losing_score = rec[answer['wrongProduct']]
except:
losing_score = 1600
new_chosen_score, new_wrong_score = calculate_elo_rank(winning_score, losing_score)
rec_collection.update({"_id": ObjectId(rec['_id'])}, {'$set': {answer['chosenProduct']: new_chosen_score, answer['wrongProduct']: new_wrong_score}})
print "Found rec for user"
else:
new_chosen_score, new_wrong_score = calculate_elo_rank()
print new_chosen_score
print new_wrong_score
rec_collection.insert({'userId': answer["forFacebookId"], str(answer["wrongProduct"]): new_wrong_score, str(answer["chosenProduct"]): new_chosen_score})
print "inserted new rec to database"
products_to_process.insert({"product": chosen_product, "userId": user_id})
products_to_process.insert({"product": wrong_product, "userId": user_id})
answer_queue.remove({"_id": ObjectId(answer["_id"])})
def processProductQueue():
product_records = products_to_process.find()
for product_r in product_records:
user_id = product_r["userId"]
product_id = product_r["product"]
user_rec = rec_collection.find_one({"userId": user_id})
if (not user_rec):
print "Not a user rec"
return
for product, rating in user_rec.iteritems():
if (not (product == "userId" or product == "_id" or product == product_id)):
product_1 = product_id
product_2 = product
if (int(product_id) > int(product)):
product_1 = product
product_2 = product_id
dif = user_rec[product_1] - user_rec[product_2]
dif_collection.update({"product1": product_1, "product2": product_2}, {"$inc": {"freq": 1, "dif": dif}}, True)
products_to_process.remove({"_id": ObjectId(product_r["_id"])})
def updateUniqueProducts():
product_list_1 = dif_collection.distinct("product1")
product_list_2 = dif_collection.distinct("product2")
products = list(set(product_list_1).intersection(set(product_list_2)))
products = [{"product": id} for id in products]
print products
if (products.count > 0):
product_collection.remove()
product_collection.insert(products)
def generateCalculatedRatings(user_id):
to_update = {}
for product_r in product_collection.find():
product_id = product_r["product"]
# what to do here
user_rec = rec_collection.find_one({"userId": user_id})
if (not user_rec):
return
calc_rating = 0
calc_weight = 0
for product, rating in user_rec.iteritems():
if (not (product == "userId" or product == "_id")):
# determine if the new product was in the
#calc_rating += (rating + dif_collection.find_one()
product_1 = product_id
product_2 = product
if (int(product_id) > int(product)):
product_1 = product
product_2 = product_id
dif_doc = dif_collection.find_one({"product1": product_1, "product2": product_2})
if(dif_doc):
calc_rating += (rating + dif_doc["dif"]) * dif_doc["freq"]
calc_weight += dif_doc["freq"]
# update user calc_rec vector for given product
to_update[product_id] = calc_rating/calc_weight
print to_update
calc_recs.update({"userId": user_id}, {"$set": to_update}, True)
def calculate_elo_rank(player_a_rank=1600, player_b_rank=1600, winner=PLAYER_A, penalize_loser=True):
if winner is PLAYER_A:
winner_rank, loser_rank = player_a_rank, player_b_rank
else:
winner_rank, loser_rank = player_b_rank, player_a_rank
rank_diff = winner_rank - loser_rank
exp = (rank_diff * -1) / 400
odds = 1 / (1 + math.pow(10, exp))
if winner_rank < 2100:
k = 32
elif winner_rank >= 2100 and winner_rank < 2400:
k = 24
else:
k = 16
new_winner_rank = round(winner_rank + (k * (1 - odds)))
if penalize_loser:
new_rank_diff = new_winner_rank - winner_rank
new_loser_rank = loser_rank - new_rank_diff
else:
new_loser_rank = loser_rank
if new_loser_rank < 1:
new_loser_rank = 1
if winner is PLAYER_A:
return (new_winner_rank, new_loser_rank)
return (new_loser_rank, new_winner_rank)
#main code
#processAnswerQueue()
#processProductQueue()
#updateUniqueProducts()
generateCalculatedRatings("551733910")
|
UTF-8
|
Python
| false | false | 2,013 |
13,460,427,537,397 |
21d167f0a1ae4243317cfdec2e592b9d6aac631d
|
a5711461920334d441f6bcc9da0c4f5c42b7518c
|
/poker.py
|
0564ddadfee656270869795cd7ac0a0cb7a32b58
|
[] |
no_license
|
shunfu/enova-poker
|
https://github.com/shunfu/enova-poker
|
4714cb5f5ed512d63bad0b8215327dcfb13fa2f5
|
9efd79ceef4d927dad148fb52d640ce60b681ca9
|
refs/heads/master
| 2021-01-18T14:14:57.977053 | 2013-10-18T04:37:07 | 2013-10-18T04:37:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import requests
import time
import json
valuecode = {'A':14,'K':13,'Q':12,'J':11, 'T':10, '9':9, '8':8, '7':7, '6':6, '5':5, '4':4, '3':3, '2':2}
def poker(PLAYER_KEY):
# Infinite Loop
while True:
# Your client should sleep 1 second.
# If you send too many requests at once, we will start throttling your requests.
# This will cause you to reach the timeout limit which will cause your player to FOLD.
time.sleep(1)
# GET request.
# Ask the server "What is going on?"
# Gets JSON in response
turn_data = game_state(PLAYER_KEY)
# Logic!
# Check if it's your turn
if turn_data["your_turn"]:
card1 = turn_data['hand'][0]
card2 = turn_data['hand'][1]
current_money = turn_data['stack']
return_action = {}
turn = turn_data["betting_phase"]
#deal phase
if turn == "deal":
if turn_data["call_amount"] > turn_data["stack"]/2:
return_action = {'action_name': "fold"}
elif ( prob_deal(card1, card2) or
duplicates(turn_data['hand'], []) or
mini_flush(turn_data['hand'], []) or
mini_straight(turn_data['hand'], []) ):
return_action = {'action_name': "call"}
else:
return_action = {'action_name': "fold"}
# flop phases
if turn == "flop":
if turn_data["call_amount"] > turn_data["stack"]/1.5:
return_action = {'action_name': "fold"}
elif ( duplicates(turn_data['hand'], turn_data['community_cards']) or
mini_flush(turn_data['hand'], turn_data['community_cards']) or
mini_straight(turn_data['hand'], turn_data['community_cards']) ):
return_action = {'action_name': "call"}
else:
return_action = {'action_name': "fold"}
# other phases
if turn == "turn" or turn == "river" or turn == "showdown":
if ( duplicates(turn_data['hand'], turn_data['community_cards']) or
close_flush(turn_data['hand'], turn_data['community_cards']) or
close_straight(turn_data['hand'], turn_data['community_cards']) ):
return_action = {'action_name': "call"}
else:
return_action = {'action_name': "fold"}
# POST a request to the server
response = player_action(PLAYER_KEY, return_action)
# turn_data['community_cards']
def prob_deal(card1, card2):
num_card1 = card1[0]
num_card2 = card2[0]
suit_card1 = card1[1]
suit_card2 = card2[1]
return abs(valuecode[num_card1] - valuecode[num_card2]) < 5 or suit_card1 == suit_card2
def mini_flush(our_hand, community_cards):
all_cards = our_hand + community_cards
all_suits = [card[1] for card in all_cards]
for x in all_suits:
if all_suits.count(x) > 2:
return True
return False
def mini_straight(our_hand, community_cards):
all_cards = our_hand + community_cards
all_nums = list(set([int(valuecode[card[0]]) for card in all_cards]))
all_nums.sort()
for i in range(len(all_nums) - 3):
if all_nums[i] == all_nums[i+1]-1 and all_nums[i+1] == all_nums[i+2]-1 and all_nums[i+2] == all_nums[i+3]-1:
return True
return False
def close_flush(our_hand, community_cards):
all_cards = our_hand + community_cards
all_suits = [card[1] for card in all_cards]
for x in all_suits:
if all_suits.count(x) > 3:
return True
return False
def close_straight(our_hand, community_cards):
all_cards = our_hand + community_cards
all_nums = list(set([int(valuecode[card[0]]) for card in all_cards]))
all_nums.sort()
for i in range(len(all_nums) - 4):
if all_nums[i] == all_nums[i+1]-1 and all_nums[i+1] == all_nums[i+2]-1 and all_nums[i+2] == all_nums[i+3]-1 and all_nums[i+3] == all_nums[i+4]-1:
return True
return False
def duplicates(our_hand, community_cards):
all_cards = our_hand + community_cards
all_nums = [card[0] for card in all_cards]
for x in all_nums:
if all_nums.count(x) > 1:
return True
return False
"""
GETs are made to the following URL:
http://nolimitcodeem.com/sandbox/players/SANDBOX_KEY
POSTs are made to the following URL:
http://nolimitcodeem.com/sandbox/players/SANDBOX_KEY/action
To simulate how the game would look after the initial deal, use deal-phase-key as SANDBOX_KEY.
To simulate how the game would look after the flop, use flop-phase-key as SANDBOX_KEY.
To simulate how the game would look in the turn phase, use turn-phase-key as SANDBOX_KEY.
To simulate how the game would look in the final, river phase, use river-phase-key as SANDBOX_KEY.
"""
# GET
def game_state(key):
# do a get request to http://nolimitcodeem.com/api/players/:key
# get a Response object
# return json
# r = requests.get('http://nolimitcodeem.com/sandbox/players/deal-phase-key')
r = requests.get('http://nolimitcodeem.com/api/players/{}'.format(key))
return r.json()
# POST
def player_action(key, json_params):
# do a post request to http://nolimitcodeem.com/api/players/:key/action
# get a Response object
# return json
headers = {'Content-type': 'application/json'}
# r = requests.post('http://nolimitcodeem.com/sandbox/players/deal-phase-key/action',
r = requests.post('http://nolimitcodeem.com/api/players/{}/action'.format(key),
data=json.dumps(json_params),
headers=headers)
return r.json()
def main():
# the key is generated when we register for the tournament
our_key = '11f55134-1e72-4111-9dc6-38824de1a002'
if our_key:
poker(our_key)
else:
print "No key entered!"
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
11,398,843,249,857 |
6e66e29c39865df971b41e368bcb1f7198ea8c3b
|
384d0be5ac54b306b945cf38c10d9b0a44c975ea
|
/stack/nova/nova/scheduler/least_cost.py
|
903d786cd37a05334bb0ceced26650317bf421c4
|
[
"Apache-2.0"
] |
permissive
|
ashokcse/openstack-bill
|
https://github.com/ashokcse/openstack-bill
|
05ae313637b3cfecba946d2a9b32e8c7609fc721
|
1a3d7575d4b341f64fa1764ed47e47a7504a9bcc
|
refs/heads/master
| 2021-01-18T14:05:24.696165 | 2012-09-12T11:29:20 | 2012-09-12T11:29:20 | 5,424,267 | 5 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Least Cost Scheduler is a mechanism for choosing which host machines to
provision a set of resources to. The input of the least-cost-scheduler is a
set of objective-functions, called the 'cost-functions', a weight for each
cost-function, and a list of candidate hosts (gathered via FilterHosts).
The cost-function and weights are tabulated, and the host with the least cost
is then selected for provisioning.
"""
import collections
from nova import flags
from nova import log as logging
from nova.scheduler import base_scheduler
from nova import utils
from nova import exception
LOG = logging.getLogger('nova.scheduler.least_cost')
FLAGS = flags.FLAGS
flags.DEFINE_list('least_cost_scheduler_cost_functions',
['nova.scheduler.least_cost.noop_cost_fn'],
'Which cost functions the LeastCostScheduler should use.')
# TODO(sirp): Once we have enough of these rules, we can break them out into a
# cost_functions.py file (perhaps in a least_cost_scheduler directory)
flags.DEFINE_integer('noop_cost_fn_weight', 1,
'How much weight to give the noop cost function')
flags.DEFINE_integer('compute_fill_first_cost_fn_weight', 1,
'How much weight to give the fill-first cost function')
def noop_cost_fn(host):
"""Return a pre-weight cost of 1 for each host"""
return 1
def compute_fill_first_cost_fn(host):
"""Prefer hosts that have less ram available, filter_hosts will exclude
hosts that don't have enough ram.
"""
hostname, service = host
caps = service.get("compute", {})
free_mem = caps.get("host_memory_free", 0)
return free_mem
def normalize_list(L):
"""Normalize an array of numbers such that each element satisfies:
0 <= e <= 1
"""
if not L:
return L
max_ = max(L)
if max_ > 0:
return [(float(e) / max_) for e in L]
return L
def weighted_sum(domain, weighted_fns, normalize=True):
"""Use the weighted-sum method to compute a score for an array of objects.
Normalize the results of the objective-functions so that the weights are
meaningful regardless of objective-function's range.
domain - input to be scored
weighted_fns - list of weights and functions like:
[(weight, objective-functions)]
Returns an unsorted list of scores. To pair with hosts do:
zip(scores, hosts)
"""
# Table of form:
# { domain1: [score1, score2, ..., scoreM]
# ...
# domainN: [score1, score2, ..., scoreM] }
score_table = collections.defaultdict(list)
for weight, fn in weighted_fns:
scores = [fn(elem) for elem in domain]
if normalize:
norm_scores = normalize_list(scores)
else:
norm_scores = scores
for idx, score in enumerate(norm_scores):
weighted_score = score * weight
score_table[idx].append(weighted_score)
# Sum rows in table to compute score for each element in domain
domain_scores = []
for idx in sorted(score_table):
elem_score = sum(score_table[idx])
domain_scores.append(elem_score)
return domain_scores
class LeastCostScheduler(base_scheduler.BaseScheduler):
def __init__(self, *args, **kwargs):
self.cost_fns_cache = {}
super(LeastCostScheduler, self).__init__(*args, **kwargs)
def get_cost_fns(self, topic):
"""Returns a list of tuples containing weights and cost functions to
use for weighing hosts
"""
if topic in self.cost_fns_cache:
return self.cost_fns_cache[topic]
cost_fns = []
for cost_fn_str in FLAGS.least_cost_scheduler_cost_functions:
if '.' in cost_fn_str:
short_name = cost_fn_str.split('.')[-1]
else:
short_name = cost_fn_str
cost_fn_str = "%s.%s.%s" % (
__name__, self.__class__.__name__, short_name)
if not (short_name.startswith('%s_' % topic) or
short_name.startswith('noop')):
continue
try:
# NOTE(sirp): import_class is somewhat misnamed since it can
# any callable from a module
cost_fn = utils.import_class(cost_fn_str)
except exception.ClassNotFound:
raise exception.SchedulerCostFunctionNotFound(
cost_fn_str=cost_fn_str)
try:
flag_name = "%s_weight" % cost_fn.__name__
weight = getattr(FLAGS, flag_name)
except AttributeError:
raise exception.SchedulerWeightFlagNotFound(
flag_name=flag_name)
cost_fns.append((weight, cost_fn))
self.cost_fns_cache[topic] = cost_fns
return cost_fns
def weigh_hosts(self, topic, request_spec, hosts):
"""Returns a list of dictionaries of form:
[ {weight: weight, hostname: hostname, capabilities: capabs} ]
"""
cost_fns = self.get_cost_fns(topic)
costs = weighted_sum(domain=hosts, weighted_fns=cost_fns)
weighted = []
weight_log = []
for cost, (hostname, service) in zip(costs, hosts):
caps = service[topic]
weight_log.append("%s: %s" % (hostname, "%.2f" % cost))
weight_dict = dict(weight=cost, hostname=hostname,
capabilities=caps)
weighted.append(weight_dict)
LOG.debug(_("Weighted Costs => %s") % weight_log)
return weighted
|
UTF-8
|
Python
| false | false | 2,012 |
4,277,787,442,139 |
2577fae975d20ffb52fb56c953e903f6a7731e86
|
c5568423763a87d9e6540a6b887a10e85a7e3a9e
|
/render_2013_actual.py
|
e37e44747150a11fd0435f75653a9ff1195d6f53
|
[] |
no_license
|
samanthanakata/redistricter
|
https://github.com/samanthanakata/redistricter
|
18274fee5e684a0e510a2dbd5a6aba4febc75c06
|
66847e370c486513f1680b85aea35c31a693560f
|
refs/heads/master
| 2020-12-31T00:18:29.678211 | 2013-05-27T21:24:51 | 2013-05-27T21:24:51 | 48,717,575 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
"""Writes out per-state map images to current directory.
Usage:
render_2013_actual.py --datadir=/Volumes/bulktogo/redata/2010 --districtdir=/Volumes/bulktogo/redata/2010/2013_actual/cd113
"""
# drend --csv-solution=09_CT_CD113.txt -P=ct.pb --mppb=CT.mppb --pngout=ct113.png
import csv
import logging
import optparse
import os
import re
import subprocess
import sys
from analyze_submissions import getStatesCsvSources, processActualsSource
from newerthan import newerthan, any_newerthan
import states
def csvToSimpleCsv(csvpath, outpath):
"""Convert CSV with district 'number' that could be '00A' '01A' 'MISC' to simple numeric district numbers."""
fin = open(csvpath, 'rb')
reader = csv.reader(fin)
row = reader.next()
# expect header row either:
# BLOCKID,CD113
# BLOCKID,DISTRICT,NAME
assert row[0] == 'BLOCKID'
assert ((row[1] == 'CD113') or (row[1] == 'DISTRICT'))
unmapped = []
districts = set()
for row in reader:
unmapped.append( (row[0], row[1]) )
districts.add(row[1])
fin.close()
dl = list(districts)
dl.sort()
dmap = {}
for i, dname in enumerate(dl):
dmap[dname] = i + 1 # 1 based csv file district numbering
fout = open(outpath, 'wb')
writer = csv.writer(fout)
for blockid, dname in unmapped:
writer.writerow( (blockid, dmap[dname]) )
fout.close()
_drendpath = None
def drendpath():
global _drendpath
if _drendpath is None:
_drendpath = os.path.join(srcdir_, 'drend')
if not os.path.exists(_drendpath):
logging.error('no drend binary at %r', drendpath)
sys.exit(1)
return _drendpath
def main():
srcdir_ = os.path.dirname(os.path.abspath(__file__))
op = optparse.OptionParser()
op.add_option('--datadir', dest='datadir')
op.add_option('--districtdir', dest='distdir', help='where the district CSV files live')
options, args = op.parse_args()
stDistFiles, anyError = getStatesCsvSources(options.distdir)
#for k,v in stDistFiles.iteritems():
# print '%s\t%s' % (k, v)
if anyError:
sys.exit(1)
for stu, sourceCsvFname in stDistFiles.iteritems():
print stu
stl = stu.lower()
# common datadir inputs
stdir = os.path.join(options.datadir, stu)
pb = os.path.join(stdir, stl + '.pb')
mppb = os.path.join(stdir, stu + '.mppb')
zipname = os.path.join(stdir, 'zips', stl + '2010.pl.zip')
processActualsSource(options.distdir, stu, sourceCsvFname, pb, mppb, zipname)
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,013 |
10,651,518,922,363 |
7bc9944f56949c2d03e7ddb5f18c564ef65806e4
|
69d02f27b624d732451d8be3fdfb8ec5dbfaf3d8
|
/doj/backends/zxjdbc/sybase/operations.py
|
2dc5eb400f0e95d63d9ed910816f3f51b190d4c7
|
[
"Unlicense"
] |
permissive
|
VanyaDNDZ/doj-sybase
|
https://github.com/VanyaDNDZ/doj-sybase
|
e5a3f4fee0c2294bbe0f134e48bb35285fa19f22
|
c438eb9b731e8180c070f4b6d641f9d220bbe132
|
refs/heads/master
| 2016-09-05T14:05:46.821095 | 2014-03-11T09:09:29 | 2014-03-11T09:09:29 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db.backends import BaseDatabaseOperations
import datetime
import time
from doj.backends.zxjdbc.sybase import query
class DatabaseOperations(BaseDatabaseOperations):
# Define the parts of an ODBC date string
# so we can do substring operations to match
compiler_module = "doj.backends.zxjdbc.sybase.compiler"
DATE_PARTS = {'year': (1, 4),
'month': (6, 2),
'day': (9, 2)}
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
if lookup_type == 'regex':
ignore_case = 0
else:
ignore_case = 1
return "dbo.regex(%%s, %%s, %s) = 1" % ignore_case
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN TRANSACTION"
def date_extract_sql(self, lookup_type, field_name):
start, end = self.DATE_PARTS[lookup_type]
return "CONVERT(INT, SUBSTRING(%s, %s, %s))" % (self.quote_name(field_name), start, end)
def _unquote_fieldname(self, fieldname):
'''
Try to unquote the fieldname so that SQL Server doesn't assign a
weird semi-random name to the converted column.
We *only* return the column name part though - we drop the table name.
This method is really only used by the date_trunc_sql method and isn't meant
for any other use.
'''
assert fieldname.startswith('[') and fieldname.endswith(']')
short_name = fieldname.split('.')[-1][1:-1]
return short_name
def date_trunc_sql(self, lookup_type, field_name):
quoted_field_name = self.quote_name(field_name)
short_name = self.quote_name(self._unquote_fieldname(quoted_field_name))
sql_dict = {'name': quoted_field_name, 'short_name': short_name}
if lookup_type == 'year':
return "CONVERT(datetime, CONVERT(varchar, DATEPART(year, %(name)s)) + '/01/01') AS %(short_name)s" % sql_dict
if lookup_type == 'month':
return "CONVERT(datetime, CONVERT(varchar, DATEPART(year, %(name)s)) + '/' + CONVERT(varchar, DATEPART(month, %(name)s)) + '/01') AS %(short_name)s" % \
sql_dict
if lookup_type == 'day':
return "CONVERT(datetime, CONVERT(varchar(12), %(name)s)) AS %(short_name)s" % sql_dict
def last_insert_id(self, cursor, table_name, pk_name):
cursor.execute("SELECT @@IDENTITY as PK")
rez = cursor.fetchone()[0]
return rez
def fetch_returned_insert_id(self, cursor):
resultset = cursor.cursor.statement.__statement__.getReturnResultSet()
resultset.next()
return int(resultset.getObject(1).toString())
def no_limit_value(self):
return None
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
from django.utils.encoding import smart_unicode
return (
smart_unicode(x).\
replace("\\", "\\\\").\
replace("%", "\%").\
replace("_", "\_").\
replace("[", "\[").\
replace("]", "\]")
)
def query_class(self, DefaultQueryClass):
return query.query_class(DefaultQueryClass)
def quote_name(self, name):
if 'CONVERT(' in name:
# SQL Server has a fragile parser. If we'v already applied CONVERT on a column, treat this
# column as pre-quoted. No - it doesn't make any sense. Don't think too hard about this.
return name
if name.startswith('[') and name.endswith(']'):
return name # already quoted
return '[%s]' % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
Originally taken from django-pyodbc project.
"""
if not tables:
return list()
qn = self.quote_name
# Cannot use TRUNCATE on tables that are referenced by a FOREIGN KEY; use DELETE instead.
# (which is slow)
from django.db import connection
cursor = connection.cursor()
# Try to minimize the risks of the braindeaded inconsistency in
# DBCC CHEKIDENT(table, RESEED, n) behavior.
seqs = []
for seq in sequences:
cursor.execute("SELECT COUNT(*) FROM %s" % qn(seq["table"]))
rowcnt = cursor.fetchone()[0]
elem = dict()
if rowcnt:
elem['start_id'] = 0
else:
elem['start_id'] = 1
elem.update(seq)
seqs.append(elem)
cursor.execute("SELECT TABLE_NAME, CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS")
fks = cursor.fetchall()
sql_list = list()
# Turn off constraints.
sql_list.extend(['ALTER TABLE %s NOCHECK CONSTRAINT %s;' % (
qn(fk[0]), qn(fk[1])) for fk in fks])
# Delete data from tables.
sql_list.extend(['%s %s %s;' % (
style.SQL_KEYWORD('DELETE'),
style.SQL_KEYWORD('FROM'),
style.SQL_FIELD(qn(t))
) for t in tables])
# Reset the counters on each table.
sql_list.extend(['%s %s (%s, %s, %s) %s %s;' % (
style.SQL_KEYWORD('DBCC'),
style.SQL_KEYWORD('CHECKIDENT'),
style.SQL_FIELD(qn(seq["table"])),
style.SQL_KEYWORD('RESEED'),
style.SQL_FIELD('%d' % seq['start_id']),
style.SQL_KEYWORD('WITH'),
style.SQL_KEYWORD('NO_INFOMSGS'),
) for seq in seqs])
# Turn constraints back on.
sql_list.extend(['ALTER TABLE %s CHECK CONSTRAINT %s;' % (
qn(fk[0]), qn(fk[1])) for fk in fks])
return sql_list
def tablespace_sql(self, tablespace, inline=False):
return "ON %s" % self.quote_name(tablespace)
def value_to_db_datetime(self, value):
if value is None:
return None
# SQL Server 2005 doesn't support microseconds
return value.replace(microsecond=0)
def value_to_db_time(self, value):
# MS SQL 2005 doesn't support microseconds
# ...but it also doesn't really suport bare times
if value is None:
return None
return value.replace(microsecond=0)
def value_to_db_decimal(self, value, max_digits, decimal_places):
if value is None or value == '':
return None
return value # Should be a decimal type (or string)
def year_lookup_bounds(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a field value using a year lookup
`value` is an int, containing the looked-up year.
"""
first = '%s-01-01 00:00:00'
second = '%s-12-31 23:59:59'
return [first % value, second % value]
def field_cast_sql(self, db_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR'), returns the SQL necessary
to cast it before using it in a WHERE statement. Note that the
resulting string should contain a '%s' placeholder for the column being
searched against.
"""
return '%s'
if db_type is None:
return '%s'
if 'DATETIME' == db_type.upper():
# We need to convert date and datetime columns into
# ODBC canonical format.
# See: http://msdn.microsoft.com/en-us/library/ms187928.aspx
return "CONVERT(varchar, %s, 120)"
elif 'SMALLDATETIME' == db_type.upper():
return "SUBSTRING(CONVERT(varchar, %s, 120), 1, 10)"
return '%s'
|
UTF-8
|
Python
| false | false | 2,014 |
9,122,510,580,776 |
4055174164963038e94b79bd62e8bb986f0173eb
|
df10a8fc261c7ed3fe88ed217e405335df3c14b1
|
/two/data/stats3.py
|
c7dc4c1af10079fb84cd53528e37896244fcd733
|
[] |
no_license
|
bgianfo/pc
|
https://github.com/bgianfo/pc
|
2949303c7120336fc38080ae1466b948f93442fd
|
e8fb8c5832838b291cb327ba92b0f011f28720da
|
refs/heads/master
| 2021-01-19T06:34:42.201463 | 2011-02-21T04:49:28 | 2011-02-21T04:49:28 | 1,148,295 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
def speedup( seq, par ):
return seq/par
def eff( speed, k ):
return speed/k
def esdf( seq, par, k ):
return (k*par-seq)/(k*seq-seq)
seqen = 486774.0
od = 408043.0
td = 210629.0
fd = 131618.0
ed = 66904.0
print "test"
print "#1 %s %s " % ( speedup(seqen,od), eff(speedup(seqen,od),1) )
print "#2 %s %s %s" % ( speedup(seqen,td), eff(speedup(seqen,td),2), esdf(seqen,td,2) )
print "#4 %s %s %s" % ( speedup(seqen,fd), eff(speedup(seqen,fd),4), esdf(seqen,fd,4) )
print "#8 %s %s %s" % ( speedup(seqen,ed), eff(speedup(seqen,ed),8), esdf(seqen,ed,8) )
|
UTF-8
|
Python
| false | false | 2,011 |
9,887,014,739,400 |
2b23761606e44f8752984f41e270b2d24ae91e6b
|
174aa0025a4f69e0de93774b5ced3b01f4d4b58b
|
/23.py
|
34b667ddf423c2db935afa872e230d90c397fb28
|
[] |
no_license
|
nicgirault/euler
|
https://github.com/nicgirault/euler
|
2fc11fac7d65ec6194c8aa832d7e90dfeb221f21
|
a06c11fc70cdac192bb98b028a15935bec89cb74
|
refs/heads/master
| 2020-05-18T15:49:49.391616 | 2014-08-02T14:53:53 | 2014-08-02T14:53:53 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#s = {}
#for x in range(28123)[2:]:
# div = []
# for d in range(x)[1:]:
# if x%d == 0 and d != x:
# div.append(d)
# s[x] = sum(div)
#
#ab = []
#for x in s.keys():
# if s[x] < 28123 and s[x]>1 and s[x] > x:
# ab.append(x)
#print ab
ab = [12, 18, 20, 24, 30, 36, 40, 42, 48, 54, 56, 60, 66, 70, 72, 78, 80, 84, 88, 90, 96, 100, 102, 104, 108, 112, 114, 120, 126, 132, 138, 140, 144, 150, 156, 160, 162, 168, 174, 176, 180, 186, 192, 196, 198, 200, 204, 208, 210, 216, 220, 222, 224, 228, 234, 240, 246, 252, 258, 260, 264, 270, 272, 276, 280, 282, 288, 294, 300, 304, 306, 308, 312, 318, 320, 324, 330, 336, 340, 342, 348, 350, 352, 354, 360, 364, 366, 368, 372, 378, 380, 384, 390, 392, 396, 400, 402, 408, 414, 416, 420, 426, 432, 438, 440, 444, 448, 450, 456, 460, 462, 464, 468, 474, 476, 480, 486, 490, 492, 498, 500, 504, 510, 516, 520, 522, 528, 532, 534, 540, 544, 546, 550, 552, 558, 560, 564, 570, 572, 576, 580, 582, 588, 594, 600, 606, 608, 612, 616, 618, 620, 624, 630, 636, 640, 642, 644, 648, 650, 654, 660, 666, 672, 678, 680, 684, 690, 696, 700, 702, 704, 708, 714, 720, 726, 728, 732, 736, 738, 740, 744, 748, 750, 756, 760, 762, 768, 770, 774, 780, 784, 786, 792, 798, 800, 804, 810, 812, 816, 820, 822, 828, 832, 834, 836, 840, 846, 852, 858, 860, 864, 868, 870, 876, 880, 882, 888, 894, 896, 900, 906, 910, 912, 918, 920, 924, 928, 930, 936, 940, 942, 945, 948, 952, 954, 960, 966, 968, 972, 978, 980, 984, 990, 992, 996, 1000, 1002, 1008, 1014, 1020, 1026, 1032, 1036, 1038, 1040, 1044, 1050, 1056, 1060, 1062, 1064, 1068, 1074, 1080, 1086, 1088, 1092, 1098, 1100, 1104, 1110, 1116, 1120, 1122, 1128, 1134, 1140, 1144, 1146, 1148, 1152, 1158, 1160, 1164, 1170, 1176, 1180, 1182, 1184, 1188, 1190, 1194, 1200, 1204, 1206, 1212, 1216, 1218, 1220, 1224, 1230, 1232, 1236, 1240, 1242, 1248, 1254, 1260, 1266, 1272, 1278, 1280, 1284, 1288, 1290, 1296, 1300, 1302, 1308, 1312, 1314, 1316, 1320, 1326, 1330, 1332, 1338, 1340, 1344, 1350, 1352, 1356, 1360, 1362, 1368, 1372, 1374, 1376, 1380, 1386, 1392, 1398, 1400, 1404, 1408, 1410, 1416, 1420, 1422, 1428, 1430, 1434, 1440, 1446, 1452, 1456, 1458, 1460, 1464, 1470, 1472, 1476, 1480, 1482, 1484, 1488, 1494, 1496, 1500, 1504, 1506, 1512, 1518, 1520, 1524, 1530, 1536, 1540, 1542, 1548, 1554, 1560, 1566, 1568, 1572, 1575, 1578, 1580, 1584, 1590, 1596, 1600, 1602, 1608, 1610, 1614, 1620, 1624, 1626, 1632, 1638, 1640, 1644, 1650, 1652, 1656, 1660, 1662, 1664, 1668, 1672, 1674, 1680, 1686, 1692, 1696, 1698, 1700, 1704, 1708, 1710, 1716, 1720, 1722, 1728, 1734, 1736, 1740, 1746, 1750, 1752, 1758, 1760, 1764, 1768, 1770, 1776, 1780, 1782, 1788, 1792, 1794, 1800, 1806, 1812, 1818, 1820, 1824, 1830, 1836, 1840, 1842, 1848, 1854, 1856, 1860, 1866, 1870, 1872, 1876, 1878, 1880, 1884, 1888, 1890, 1896, 1900, 1902, 1904, 1908, 1914, 1920, 1926, 1932, 1936, 1938, 1940, 1944, 1950, 1952, 1956, 1960, 1962, 1968, 1974, 1976, 1980, 1984, 1986, 1988, 1992, 1998, 2000, 2002, 2004, 2010, 2016, 2020, 2022, 2024, 2028, 2030, 2034, 2040, 2044, 2046, 2052, 2058, 2060, 2064, 2070, 2072, 2076, 2080, 2082, 2088, 2090, 2094, 2100, 2106, 2112, 2118, 2120, 2124, 2128, 2130, 2136, 2140, 2142, 2148, 2154, 2156, 2160, 2166, 2170, 2172, 2176, 2178, 2180, 2184, 2190, 2196, 2200, 2202, 2205, 2208, 2210, 2212, 2214, 2220, 2226, 2232, 2238, 2240, 2244, 2250, 2256, 2260, 2262, 2268, 2274, 2280, 2286, 2288, 2292, 2296, 2298, 2300, 2304, 2310, 2316, 2320, 2322, 2324, 2328, 2334, 2340, 2346, 2352, 2358, 2360, 2364, 2368, 2370, 2376, 2380, 2382, 2388, 2392, 2394, 2400, 2406, 2408, 2412, 2418, 2420, 2424, 2430, 2432, 2436, 2440, 2442, 2448, 2450, 2454, 2460, 2464, 2466, 2470, 2472, 2478, 2480, 2484, 2490, 2492, 2496, 2500, 2502, 2508, 2514, 2520, 2526, 2530, 2532, 2538, 2540, 2544, 2548, 2550, 2552, 2556, 2560, 2562, 2568, 2574, 2576, 2580, 2584, 2586, 2590, 2592, 2598, 2600, 2604, 2610, 2616, 2620, 2622, 2624, 2628, 2632, 2634, 2640, 2646, 2652, 2658, 2660, 2664, 2670, 2676, 2680, 2682, 2688, 2694, 2700, 2704, 2706, 2712, 2716, 2718, 2720, 2724, 2728, 2730, 2736, 2740, 2742, 2744, 2748, 2750, 2752, 2754, 2760, 2766, 2772, 2778, 2780, 2784, 2790, 2796, 2800, 2802, 2808, 2814, 2816, 2820, 2826, 2828, 2832, 2835, 2838, 2840, 2844, 2850, 2856, 2860, 2862, 2868, 2870, 2874, 2880, 2884, 2886, 2892, 2898, 2900, 2904, 2910, 2912, 2916, 2920, 2922, 2928, 2934, 2940, 2944, 2946, 2952, 2958, 2960, 2964, 2968, 2970, 2976, 2980, 2982, 2988, 2990, 2992, 2994, 2996, 3000, 3006, 3008, 3010, 3012, 3016, 3018, 3020, 3024, 3030, 3036, 3040, 3042, 3048, 3052, 3054, 3060, 3066, 3072, 3078, 3080, 3084, 3090, 3096, 3100, 3102, 3108, 3114, 3120, 3126, 3128, 3132, 3136, 3138, 3140, 3144, 3150, 3156, 3160, 3162, 3164, 3168, 3174, 3180, 3186, 3190, 3192, 3198, 3200, 3204, 3210, 3216, 3220, 3222, 3224, 3228, 3230, 3234, 3240, 3246, 3248, 3250, 3252, 3256, 3258, 3260, 3264, 3270, 3276, 3280, 3282, 3288, 3290, 3294, 3300, 3304, 3306, 3312, 3318, 3320, 3324, 3328, 3330, 3332, 3336, 3340, 3342, 3344, 3348, 3354, 3360, 3366, 3372, 3378, 3380, 3384, 3388, 3390, 3392, 3396, 3400, 3402, 3408, 3410, 3414, 3416, 3420, 3426, 3430, 3432, 3438, 3440, 3444, 3450, 3456, 3460, 3462, 3465, 3468, 3472, 3474, 3480, 3486, 3492, 3496, 3498, 3500, 3504, 3510, 3516, 3520, 3522, 3528, 3534, 3536, 3540, 3546, 3552, 3556, 3558, 3560, 3564, 3570, 3576, 3580, 3582, 3584, 3588, 3594, 3600, 3606, 3608, 3612, 3618, 3620, 3624, 3630, 3636, 3640, 3642, 3648, 3654, 3660, 3666, 3668, 3672, 3678, 3680, 3684, 3690, 3696, 3700, 3702, 3708, 3710, 3712, 3714, 3720, 3724, 3726, 3732, 3738, 3740, 3744, 3750, 3752, 3756, 3760, 3762, 3768, 3770, 3774, 3776, 3780, 3784, 3786, 3792, 3798, 3800, 3804, 3808, 3810, 3816, 3820, 3822, 3828, 3834, 3836, 3840, 3846, 3848, 3850, 3852, 3858, 3860, 3864, 3870, 3872, 3876, 3880, 3882, 3888, 3892, 3894, 3900, 3904, 3906, 3912, 3918, 3920, 3924, 3930, 3936, 3940, 3942, 3944, 3948, 3952, 3954, 3960, 3966, 3968, 3972, 3976, 3978, 3980, 3984, 3990, 3996, 4000, 4002, 4004, 4008, 4014, 4020, 4026, 4030, 4032, 4038, 4040, 4044, 4048, 4050, 4056, 4060, 4062, 4068, 4070, 4074, 4080, 4086, 4088, 4092, 4095, 4098, 4100, 4104, 4110, 4116, 4120, 4122, 4128, 4130, 4134, 4136, 4140, 4144, 4146, 4152, 4158, 4160, 4164, 4170, 4172, 4176, 4180, 4182, 4188, 4194, 4200, 4206, 4212, 4216, 4218, 4220, 4224, 4228, 4230, 4236, 4240, 4242, 4248, 4254, 4256, 4260, 4264, 4266, 4270, 4272, 4278, 4280, 4284, 4288, 4290, 4296, 4300, 4302, 4308, 4312, 4314, 4320, 4326, 4332, 4338, 4340, 4344, 4350, 4352, 4356, 4360, 4362, 4368, 4374, 4380, 4386, 4392, 4396, 4398, 4400, 4404, 4408, 4410, 4416, 4420, 4422, 4424, 4428, 4434, 4440, 4446, 4452, 4458, 4460, 4464, 4470, 4472, 4476, 4480, 4482, 4488, 4494, 4500, 4506, 4508, 4510, 4512, 4518, 4520, 4524, 4530, 4536, 4540, 4542, 4544, 4548, 4550, 4554, 4560, 4564, 4566, 4572, 4576, 4578, 4580, 4584, 4590, 4592, 4596, 4600, 4602, 4608, 4614, 4620, 4624, 4626, 4632, 4638, 4640, 4644, 4648, 4650, 4656, 4660, 4662, 4664, 4668, 4672, 4674, 4676, 4680, 4686, 4690, 4692, 4698, 4700, 4704, 4710, 4712, 4716, 4720, 4722, 4725, 4728, 4730, 4732, 4734, 4736, 4740, 4746, 4752, 4758, 4760, 4764, 4770, 4776, 4780, 4782, 4784, 4788, 4794, 4800, 4806, 4812, 4816, 4818, 4820, 4824, 4830, 4836, 4840, 4842, 4844, 4848, 4854, 4860, 4864, 4866, 4872, 4878, 4880, 4884, 4888, 4890, 4896, 4900, 4902, 4908, 4914, 4920, 4926, 4928, 4932, 4938, 4940, 4944, 4950, 4956, 4960, 4962, 4968, 4970, 4974, 4980, 4984, 4986, 4992, 4998, 5000, 5004, 5010, 5012, 5016, 5020, 5022, 5028, 5032, 5034, 5040, 5046, 5052, 5056, 5058, 5060, 5064, 5068, 5070, 5076, 5080, 5082, 5088, 5094, 5096, 5100, 5104, 5106, 5110, 5112, 5118, 5120, 5124, 5130, 5136, 5140, 5142, 5148, 5152, 5154, 5160, 5166, 5168, 5170, 5172, 5178, 5180, 5184, 5190, 5192, 5196, 5200, 5202, 5208, 5214, 5220, 5226, 5232, 5236, 5238, 5240, 5244, 5248, 5250, 5256, 5260, 5262, 5264, 5268, 5274, 5280, 5286, 5292, 5298, 5300, 5304, 5310, 5312, 5316, 5320, 5322, 5328, 5334, 5336, 5340, 5346, 5348, 5352, 5355, 5358, 5360, 5364, 5368, 5370, 5376, 5380, 5382, 5388, 5390, 5394, 5400, 5404, 5406, 5408, 5412, 5418, 5420, 5424, 5430, 5432, 5436, 5440, 5442, 5448, 5454, 5456, 5460, 5466, 5472, 5478, 5480, 5484, 5488, 5490, 5496, 5500, 5502, 5504, 5508, 5512, 5514, 5516, 5520, 5526, 5530, 5532, 5538, 5540, 5544, 5550, 5556, 5560, 5562, 5568, 5572, 5574, 5576, 5580, 5586, 5592, 5598, 5600, 5604, 5610, 5616, 5620, 5622, 5624, 5628, 5632, 5634, 5640, 5646, 5652, 5656, 5658, 5660, 5664, 5670, 5676, 5680, 5682, 5684, 5688, 5694, 5696, 5700, 5704, 5706, 5712, 5718, 5720, 5724, 5730, 5736, 5740, 5742, 5748, 5754, 5760, 5766, 5768, 5772, 5775, 5776, 5778, 5780, 5784, 5790, 5796, 5800, 5802, 5808, 5810, 5814, 5820, 5824, 5826, 5830, 5832, 5838, 5840, 5844, 5848, 5850, 5852, 5856, 5860, 5862, 5868, 5874, 5880, 5886, 5888, 5892, 5896, 5898, 5900, 5904, 5908, 5910, 5916, 5920, 5922, 5928, 5934, 5936, 5940, 5946, 5950, 5952, 5958, 5960, 5964, 5970, 5976, 5980, 5982, 5984, 5985, 5988, 5992, 5994, 6000, 6006, 6012, 6016, 6018, 6020, 6024, 6030, 6032, 6036, 6040, 6042, 6048, 6050, 6054, 6060, 6066, 6072, 6076, 6078, 6080, 6084, 6090, 6096, 6100, 6102, 6104, 6108, 6114, 6120, 6126, 6132, 6136, 6138, 6140, 6144, 6150, 6156, 6160, 6162, 6168, 6174, 6180, 6186, 6188, 6192, 6198, 6200, 6204, 6208, 6210, 6216, 6220, 6222, 6228, 6230, 6232, 6234, 6240, 6244, 6246, 6248, 6252, 6256, 6258, 6260, 6264, 6270, 6272, 6276, 6280, 6282, 6288, 6292, 6294, 6300, 6306, 6312, 6318, 6320, 6324, 6328, 6330, 6336, 6340, 6342, 6344, 6348, 6354, 6356, 6360, 6366, 6370, 6372, 6378, 6380, 6384, 6390, 6392, 6396, 6400, 6402, 6408, 6412, 6414, 6420, 6424, 6426, 6432, 6435, 6438, 6440, 6444, 6448, 6450, 6456, 6460, 6462, 6464, 6468, 6474, 6480, 6486, 6492, 6496, 6498, 6500, 6504, 6510, 6512, 6516, 6520, 6522, 6524, 6528, 6534, 6536, 6540, 6546, 6552, 6558, 6560, 6564, 6570, 6576, 6580, 6582, 6588, 6592, 6594, 6600, 6606, 6608, 6612, 6615, 6618, 6620, 6624, 6630, 6636, 6640, 6642, 6648, 6650, 6654, 6656, 6660, 6664, 6666, 6672, 6678, 6680, 6684, 6688, 6690, 6692, 6696, 6700, 6702, 6708, 6714, 6720, 6726, 6732, 6738, 6740, 6744, 6748, 6750, 6756, 6760, 6762, 6768, 6774, 6776, 6780, 6784, 6786, 6790, 6792, 6798, 6800, 6804, 6808, 6810, 6816, 6820, 6822, 6825, 6828, 6832, 6834, 6840, 6846, 6848, 6852, 6858, 6860, 6864, 6870, 6876, 6880, 6882, 6888, 6894, 6900, 6906, 6912, 6916, 6918, 6920, 6924, 6930, 6936, 6940, 6942, 6944, 6948, 6952, 6954, 6960, 6966, 6968, 6972, 6976, 6978, 6980, 6984, 6990, 6992, 6996, 7000, 7002, 7008, 7014, 7020, 7026, 7028, 7032, 7038, 7040, 7044, 7050, 7056, 7060, 7062, 7068, 7070, 7072, 7074, 7080, 7084, 7086, 7092, 7098, 7100, 7104, 7110, 7112, 7116, 7120, 7122, 7128, 7134, 7140, 7144, 7146, 7150, 7152, 7158, 7160, 7164, 7168, 7170, 7176, 7180, 7182, 7188, 7192, 7194, 7196, 7200, 7206, 7208, 7210, 7212, 7216, 7218, 7220, 7224, 7230, 7232, 7236, 7240, 7242, 7245, 7248, 7252, 7254, 7260, 7266, 7272, 7278, 7280, 7284, 7290, 7296, 7300, 7302, 7304, 7308, 7314, 7320, 7326, 7332, 7336, 7338, 7340, 7344, 7350, 7356, 7360, 7362, 7364, 7368, 7374, 7380, 7384, 7386, 7392, 7398, 7400, 7404, 7410, 7416, 7420, 7422, 7424, 7425, 7428, 7434, 7436, 7440, 7446, 7448, 7452, 7458, 7460, 7464, 7470, 7476, 7480, 7482, 7488, 7490, 7494, 7500, 7504, 7506, 7512, 7518, 7520, 7524, 7530, 7532, 7536, 7540, 7542, 7544, 7548, 7552, 7554, 7560, 7566, 7568, 7572, 7578, 7580, 7584, 7588, 7590, 7592, 7596, 7600, 7602, 7608, 7614, 7616, 7620, 7626, 7630, 7632, 7638, 7640, 7644, 7650, 7656, 7660, 7662, 7668, 7672, 7674, 7680, 7686, 7692, 7696, 7698, 7700, 7704, 7710, 7716, 7720, 7722, 7728, 7734, 7740, 7744, 7746, 7752, 7756, 7758, 7760, 7764, 7770, 7776, 7780, 7782, 7784, 7788, 7794, 7800, 7806, 7808, 7812, 7818, 7820, 7824, 7830, 7832, 7836, 7840, 7842, 7848, 7854, 7860, 7866, 7868, 7872, 7875, 7878, 7880, 7884, 7888, 7890, 7896, 7900, 7902, 7904, 7908, 7910, 7912, 7914, 7920, 7924, 7926, 7932, 7936, 7938, 7940, 7944, 7950, 7952, 7956, 7960, 7962, 7968, 7974, 7980, 7986, 7992, 7998, 8000, 8004, 8008, 8010, 8016, 8020, 8022, 8024, 8028, 8034, 8036, 8040, 8046, 8050, 8052, 8056, 8058, 8060, 8064, 8070, 8076, 8080, 8082, 8085, 8088, 8092, 8094, 8096, 8100, 8106, 8112, 8118, 8120, 8124, 8130, 8136, 8140, 8142, 8148, 8154, 8160, 8166, 8172, 8176, 8178, 8180, 8184, 8190, 8196, 8200, 8202, 8204, 8208, 8214, 8216, 8220, 8226, 8228, 8232, 8238, 8240, 8244, 8250, 8256, 8260, 8262, 8268, 8272, 8274, 8280, 8286, 8288, 8292, 8296, 8298, 8300, 8304, 8310, 8316, 8320, 8322, 8328, 8330, 8334, 8340, 8344, 8346, 8352, 8358, 8360, 8364, 8370, 8372, 8376, 8380, 8382, 8388, 8394, 8400, 8406, 8412, 8415, 8418, 8420, 8424, 8428, 8430, 8432, 8436, 8440, 8442, 8448, 8450, 8454, 8456, 8460, 8464, 8466, 8470, 8472, 8478, 8480, 8484, 8490, 8496, 8500, 8502, 8505, 8508, 8512, 8514, 8520, 8526, 8528, 8532, 8536, 8538, 8540, 8544, 8550, 8556, 8560, 8562, 8568, 8574, 8576, 8580, 8586, 8592, 8596, 8598, 8600, 8604, 8610, 8616, 8620, 8622, 8624, 8628, 8632, 8634, 8640, 8646, 8652, 8658, 8660, 8664, 8670, 8676, 8680, 8682, 8688, 8694, 8700, 8704, 8706, 8708, 8712, 8718, 8720, 8724, 8730, 8736, 8740, 8742, 8748, 8750, 8754, 8760, 8764, 8766, 8772, 8778, 8780, 8784, 8790, 8792, 8796, 8800, 8802, 8808, 8814, 8816, 8820, 8826, 8832, 8838, 8840, 8844, 8848, 8850, 8856, 8860, 8862, 8868, 8874, 8876, 8880, 8886, 8888, 8890, 8892, 8898, 8900, 8904, 8910, 8916, 8920, 8922, 8925, 8928, 8932, 8934, 8940, 8944, 8946, 8952, 8958, 8960, 8964, 8968, 8970, 8976, 8980, 8982, 8988, 8994, 9000, 9006, 9012, 9016, 9018, 9020, 9024, 9030, 9036, 9040, 9042, 9044, 9048, 9054, 9060, 9064, 9066, 9072, 9078, 9080, 9084, 9088, 9090, 9096, 9100, 9102, 9108, 9112, 9114, 9120, 9126, 9128, 9132, 9135, 9138, 9140, 9144, 9150, 9152, 9156, 9160, 9162, 9168, 9170, 9174, 9180, 9184, 9186, 9192, 9196, 9198, 9200, 9204, 9210, 9212, 9216, 9220, 9222, 9228, 9234, 9240, 9246, 9248, 9252, 9256, 9258, 9260, 9264, 9268, 9270, 9272, 9276, 9280, 9282, 9288, 9294, 9296, 9300, 9306, 9310, 9312, 9318, 9320, 9324, 9328, 9330, 9336, 9340, 9342, 9344, 9348, 9350, 9352, 9354, 9360, 9366, 9372, 9378, 9380, 9384, 9390, 9396, 9400, 9402, 9408, 9414, 9416, 9420, 9424, 9426, 9432, 9436, 9438, 9440, 9444, 9450, 9456, 9460, 9462, 9464, 9468, 9472, 9474, 9480, 9486, 9492, 9498, 9500, 9504, 9510, 9516, 9520, 9522, 9528, 9534, 9540, 9546, 9548, 9552, 9555, 9558, 9560, 9564, 9568, 9570, 9576, 9580, 9582, 9588, 9590, 9592, 9594, 9600, 9604, 9606, 9612, 9618, 9620, 9624, 9630, 9632, 9636, 9640, 9642, 9648, 9654, 9656, 9660, 9666, 9672, 9678, 9680, 9684, 9688, 9690, 9696, 9700, 9702, 9708, 9714, 9716, 9720, 9724, 9726, 9728, 9730, 9732, 9738, 9740, 9744, 9750, 9756, 9760, 9762, 9765, 9768, 9772, 9774, 9776, 9780, 9786, 9792, 9798, 9800, 9804, 9810, 9816, 9820, 9822, 9828, 9834, 9840, 9846, 9852, 9856, 9858, 9860, 9864, 9870, 9876, 9880, 9882, 9884, 9888, 9894, 9900, 9906, 9912, 9918, 9920, 9924, 9928, 9930, 9936, 9940, 9942, 9944, 9948, 9954, 9960, 9966, 9968, 9972, 9978, 9980, 9984, 9990, 9996, 10000, 10002, 10008, 10010, 10014, 10020, 10024, 10026, 10032, 10038, 10040, 10044, 10050, 10052, 10056, 10060, 10062, 10064, 10068, 10074, 10086, 10088, 10092, 10098, 10100, 10104, 10108, 10110, 10112, 10116, 10120, 10122, 10128, 10134, 10136, 10140, 10146, 10150, 10152, 10158, 10160, 10164, 10170, 10176, 10180, 10182, 10184, 10188, 10192, 10194, 10200, 10206, 10208, 10212, 10218, 10220, 10224, 10230, 10236, 10240, 10242, 10248, 10254, 10260, 10266, 10272, 10276, 10278, 10280, 10284, 10290, 10296, 10300, 10302, 10304, 10308, 10314, 10320, 10326, 10332, 10336, 10338, 10340, 10344, 10350, 10356, 10360, 10362, 10368, 10374, 10380, 10384, 10386, 10388, 10392, 10395, 10398, 10400, 10404, 10410, 10416, 10420, 10422, 10428, 10430, 10434, 10440, 10444, 10446, 10450, 10452, 10458, 10460, 10464, 10470, 10472, 10476, 10480, 10482, 10488, 10494, 10496, 10500, 10504, 10506, 10512, 10518, 10520, 10524, 10528, 10530, 10536, 10540, 10542, 10548, 10554, 10556, 10560, 10566, 10570, 10572, 10578, 10580, 10584, 10590, 10596, 10600, 10602, 10608, 10612, 10614, 10620, 10624, 10626, 10632, 10638, 10640, 10644, 10648, 10650, 10656, 10660, 10662, 10668, 10672, 10674, 10680, 10686, 10692, 10696, 10698, 10700, 10704, 10710, 10712, 10716, 10720, 10722, 10724, 10728, 10734, 10736, 10740, 10744, 10746, 10752, 10758, 10760, 10764, 10770, 10776, 10780, 10782, 10788, 10792, 10794, 10800, 10806, 10808, 10812, 10816, 10818, 10820, 10824, 10830, 10836, 10840, 10842, 10848, 10850, 10854, 10860, 10864, 10866, 10868, 10872, 10878, 10880, 10884, 10890, 10892, 10896, 10900, 10902, 10908, 10912, 10914, 10926, 10932, 10938, 10940, 10944, 10948, 10950, 10956, 10960, 10962, 10968, 10974, 10976, 10980, 10986, 10990, 10992, 10998, 11000, 11004, 11008, 11010, 11016, 11020, 11022, 11024, 11025, 11028, 11032, 11034, 11040, 11046, 11050, 11052, 11058, 11060, 11064, 11070, 11076, 11080, 11082, 11088, 11094, 11096, 11100, 11106, 11112, 11116, 11118, 11120, 11124, 11128, 11130, 11132, 11136, 11140, 11142, 11144, 11148, 11152, 11154, 11160, 11166, 11172, 11176, 11178, 11180, 11184, 11190, 11196, 11200, 11202, 11208, 11214, 11220, 11226, 11228, 11232, 11238, 11240, 11244, 11248, 11250, 11256, 11260, 11262, 11264, 11268, 11270, 11274, 11280, 11284, 11286, 11288, 11292, 11298, 11300, 11304, 11310, 11312, 11316, 11320, 11322, 11328, 11334, 11336, 11346, 11352, 11358, 11360, 11364, 11368, 11370, 11376, 11380, 11382, 11388, 11392, 11394, 11396, 11400, 11406, 11408, 11410, 11412, 11418, 11420, 11424, 11430, 11436, 11440, 11442, 11448, 11452, 11454, 11460, 11466, 11472, 11478, 11480, 11484, 11490, 11492, 11496, 11500, 11502, 11508, 11514, 11526, 11528, 11532, 11536, 11538, 11540, 11544, 11550, 11552, 11556, 11560, 11562, 11564, 11568, 11574, 11580, 11586, 11592, 11598, 11600, 11604, 11610, 11616, 11620, 11622, 11628, 11634, 11640, 11646, 11648, 11652, 11655, 11658, 11660, 11664, 11670, 11676, 11680, 11682, 11688, 11690, 11694, 11696, 11700, 11704, 11706, 11712, 11718, 11720, 11724, 11730, 11732, 11736, 11740, 11742, 11748, 11752, 11754, 11766, 11772, 11776, 11778, 11780, 11784, 11788, 11790, 11792, 11796, 11800, 11802, 11808, 11814, 11816, 11820, 11826, 11830, 11832, 11838, 11840, 11844, 11850, 11856, 11860, 11862, 11868, 11872, 11874, 11886, 11892, 11898, 11900, 11904, 11910, 11916, 11920, 11922, 11928, 11934, 11940, 11946, 11952, 11956, 11958, 11960, 11964, 11968, 11970, 11976, 11980, 11982, 11984, 11988, 11994, 12000, 12006, 12012, 12018, 12020, 12024, 12030, 12032, 12036, 12040, 12042, 12048, 12054, 12056, 12060, 12064, 12066, 12068, 12072, 12078, 12080, 12084, 12090, 12100, 12102, 12104, 12108, 12110, 12114, 12120, 12124, 12126, 12132, 12138, 12140, 12144, 12150, 12152, 12156, 12160, 12162, 12168, 12174, 12186, 12192, 12198, 12200, 12204, 12208, 12210, 12216, 12220, 12222, 12228, 12232, 12234, 12236, 12246, 12250, 12252, 12258, 12260, 12264, 12270, 12272, 12276, 12280, 12282, 12285, 12288, 12292, 12294, 12300, 12306, 12312, 12318, 12320, 12324, 12330, 12336, 12340, 12342, 12348, 12350, 12354, 12360, 12366, 12372, 12376, 12378, 12380, 12384, 12390, 12396, 12400, 12402, 12404, 12408, 12414, 12416, 12420, 12426, 12432, 12438, 12440, 12444, 12450, 12456, 12460, 12462, 12464, 12468, 12474, 12486, 12488, 12492, 12496, 12498, 12500, 12504, 12510, 12512, 12516, 12520, 12522, 12528, 12530, 12534, 12540, 12544, 12546, 12552, 12558, 12560, 12564, 12570, 12572, 12576, 12580, 12582, 12584, 12588, 12594, 12606, 12612, 12618, 12620, 12624, 12628, 12630, 12636, 12640, 12642, 12648, 12650, 12654, 12656, 12660, 12666, 12670, 12672, 12678, 12680, 12684, 12688, 12690, 12696, 12700, 12702, 12705, 12708, 12712, 12714, 12716, 12720, 12726, 12732, 12738, 12740, 12744, 12750, 12756, 12760, 12762, 12768, 12774, 12780, 12784, 12786, 12792, 12796, 12798, 12800, 12804, 12810, 12816, 12820, 12822, 12824, 12828, 12834, 12840, 12846, 12848, 12852, 12858, 12860, 12864, 12870, 12876, 12880, 12882, 12888, 12894, 12896, 12900, 12906, 12908, 12912, 12915, 12918, 12920, 12924, 12928, 12930, 12936, 12940, 12942, 12948, 12950, 12954, 12964, 12966, 12972, 12978, 12980, 12984, 12990, 12992, 12996, 13000, 13002, 13008, 13014, 13024, 13026, 13032, 13038, 13040, 13044, 13048, 13050, 13056, 13060, 13062, 13068, 13072, 13074, 13076, 13080, 13086, 13090, 13092, 13098, 13100, 13110, 13112, 13116, 13120, 13122, 13128, 13132, 13134, 13140, 13146, 13152, 13156, 13158, 13160, 13164, 13170, 13176, 13180, 13182, 13184, 13188, 13192, 13194, 13206, 13208, 13212, 13216, 13218, 13220, 13224, 13230, 13236, 13240, 13242, 13244, 13248, 13254, 13266, 13272, 13278, 13280, 13284, 13288, 13290, 13296, 13300, 13302, 13308, 13312, 13314, 13326, 13328, 13332, 13338, 13340, 13344, 13350, 13356, 13360, 13362, 13368, 13370, 13374, 13376, 13380, 13384, 13386, 13392, 13398, 13400, 13404, 13410, 13412, 13416, 13420, 13422, 13428, 13434, 13446, 13452, 13456, 13458, 13460, 13468, 13470, 13476, 13480, 13482, 13488, 13494, 13496, 13506, 13510, 13512, 13518, 13520, 13524, 13530, 13536, 13540, 13542, 13545, 13548, 13552, 13554, 13560, 13566, 13568, 13572, 13578, 13580, 13584, 13590, 13596, 13600, 13602, 13614, 13616, 13620, 13624, 13626, 13632, 13636, 13638, 13640, 13644, 13650, 13656, 13660, 13662, 13664, 13668, 13674, 13686, 13692, 13696, 13698, 13700, 13704, 13710, 13716, 13720, 13722, 13734, 13736, 13740, 13746, 13748, 13750, 13752, 13758, 13760, 13764, 13770, 13776, 13780, 13782, 13788, 13790, 13794, 13804, 13806, 13812, 13816, 13818, 13820, 13824, 13830, 13832, 13836, 13840, 13842, 13848, 13854, 13866, 13872, 13878, 13880, 13884, 13888, 13890, 13896, 13900, 13902, 13904, 13908, 13914, 13916, 13926, 13930, 13932, 13936, 13938, 13940, 13944, 13950, 13952, 13956, 13960, 13962, 13968, 13972, 13974, 13980, 13984, 13986, 13992, 13998, 14000, 14004, 14008, 14010, 14014, 14016, 14020, 14022, 14028, 14034, 14046, 14052, 14056, 14058, 14060, 14064, 14070, 14076, 14080, 14082, 14084, 14088, 14094, 14100, 14106, 14118, 14120, 14124, 14130, 14136, 14140, 14142, 14144, 14148, 14154, 14166, 14168, 14172, 14175, 14178, 14180, 14184, 14190, 14196, 14200, 14202, 14208, 14210, 14212, 14214, 14224, 14226, 14232, 14238, 14240, 14244, 14248, 14250, 14252, 14260, 14262, 14268, 14274, 14286, 14288, 14292, 14298, 14300, 14304, 14308, 14310, 14316, 14320, 14322, 14328, 14334, 14336, 14340, 14344, 14346, 14350, 14352, 14358, 14360, 14370, 14376, 14380, 14382, 14384, 14388, 14392, 14394, 14406, 14412, 14416, 14418, 14420, 14424, 14430, 14432, 14436, 14440, 14442, 14454, 14456, 14460, 14464, 14466, 14472, 14476, 14478, 14480, 14484, 14496, 14500, 14502, 14504, 14508, 14514, 14526, 14532, 14538, 14540, 14544, 14550, 14552, 14556, 14560, 14562, 14568, 14574, 14586, 14588, 14592, 14598, 14600, 14604, 14608, 14610, 14620, 14622, 14628, 14630, 14634, 14644, 14646, 14652, 14658, 14660, 14664, 14670, 14672, 14676, 14680, 14682, 14694, 14696, 14706, 14712, 14718, 14720, 14724, 14728, 14730, 14736, 14740, 14742, 14748, 14754, 14756, 14766, 14768, 14770, 14772, 14778, 14780, 14790, 14796, 14800, 14802, 14805, 14808, 14812, 14814, 14824, 14826, 14832, 14838, 14840, 14844, 14848, 14856, 14860, 14862, 14872, 14874, 14886, 14892, 14896, 14898, 14900, 14910, 14916, 14920, 14922, 14924, 14928, 14934, 14946, 14950, 14958, 14960, 14964, 14970, 14980, 14982, 14988, 14994, 15006, 15008, 15012, 15015, 15018, 15020, 15024, 15028, 15030, 15036, 15040, 15042, 15050, 15054, 15060, 15064, 15066, 15072, 15078, 15080, 15084, 15088, 15090, 15092, 15096, 15100, 15102, 15104, 15108, 15114, 15126, 15132, 15136, 15138, 15140, 15144, 15148, 15150, 15156, 15160, 15162, 15168, 15174, 15176, 15184, 15186, 15190, 15192, 15198, 15200, 15204, 15210, 15216, 15220, 15222, 15224, 15228, 15232, 15234, 15246, 15252, 15258, 15260, 15270, 15276, 15280, 15282, 15294, 15306, 15316, 15318, 15320, 15324, 15330, 15336, 15340, 15342, 15344, 15348, 15354, 15366, 15368, 15376, 15378, 15380, 15384, 15392, 15396, 15402, 15408, 15414, 15420, 15426, 15428, 15432, 15435, 15438, 15440, 15450, 15460, 15462, 15468, 15470, 15474, 15484, 15486, 15488, 15492, 15496, 15498, 15500, 15510, 15512, 15516, 15520, 15522, 15528, 15534, 15546, 15558, 15560, 15564, 15568, 15570, 15576, 15580, 15582, 15588, 15594, 15596, 15606, 15610, 15612, 15616, 15618, 15620, 15630, 15636, 15640, 15642, 15648, 15652, 15654, 15664, 15666, 15672, 15678, 15680, 15684, 15690, 15700, 15702, 15704, 15714, 15726, 15730, 15732, 15736, 15738, 15740, 15744, 15752, 15756, 15760, 15762, 15764, 15774, 15776, 15786, 15798, 15800, 15804, 15808, 15810, 15816, 15820, 15822, 15824, 15828, 15834, 15846, 15848, 15852, 15858, 15860, 15864, 15870, 15872, 15880, 15882, 15884, 15888, 15890, 15894, 15904, 15906, 15918, 15920, 15924, 15928, 15930, 15932, 15936, 15940, 15942, 15948, 15950, 15954, 15966, 15972, 15978, 15980, 15988, 15990, 15996, 16000, 16002, 16008, 16014, 16016, 16026, 16030, 16032, 16038, 16040, 16044, 16048, 16050, 16056, 16060, 16062, 16065, 16068, 16072, 16074, 16086, 16092, 16098, 16100, 16110, 16112, 16116, 16120, 16122, 16134, 16146, 16150, 16152, 16156, 16158, 16160, 16164, 16176, 16180, 16182, 16184, 16188, 16192, 16194, 16206, 16212, 16218, 16220, 16230, 16242, 16248, 16250, 16254, 16256, 16266, 16268, 16278, 16280, 16284, 16290, 16300, 16302, 16308, 16310, 16314, 16324, 16326, 16328, 16332, 16338, 16340, 16344, 16350, 16352, 16356, 16360, 16362, 16374, 16386, 16392, 16398, 16400, 16404, 16408, 16410, 16420, 16422, 16428, 16432, 16434, 16436, 16446, 16450, 16452, 16456, 16458, 16460, 16476, 16480, 16482, 16492, 16494, 16506, 16518, 16520, 16530, 16540, 16542, 16544, 16548, 16554, 16566, 16572, 16576, 16578, 16580, 16584, 16588, 16592, 16596, 16600, 16602, 16604, 16608, 16614, 16626, 16638, 16640, 16644, 16656, 16660, 16662, 16668, 16674, 16686, 16688, 16692, 16695, 16698, 16700, 16710, 16716, 16720, 16722, 16730, 16734, 16744, 16746, 16752, 16758, 16760, 16764, 16768, 16770, 16772, 16780, 16782, 16788, 16794, 16796, 16806, 16808, 16812, 16818, 16820, 16824, 16828, 16836, 16840, 16842, 16854, 16856, 16864, 16866, 16870, 16878, 16880, 16890, 16900, 16902, 16908, 16912, 16914, 16926, 16928, 16932, 16938, 16940, 16944, 16950, 16952, 16956, 16960, 16962, 16974, 16984, 16986, 16996, 16998, 17000, 17004, 17016, 17020, 17022, 17024, 17034, 17046, 17050, 17056, 17058, 17060, 17070, 17072, 17076, 17080, 17082, 17094, 17106, 17108, 17118, 17120, 17124, 17130, 17140, 17142, 17148, 17150, 17152, 17154, 17164, 17166, 17168, 17178, 17180, 17190, 17192, 17196, 17200, 17202, 17204, 17214, 17226, 17232, 17238, 17240, 17244, 17248, 17250, 17256, 17260, 17262, 17264, 17268, 17272, 17274, 17276, 17286, 17290, 17292, 17296, 17298, 17300, 17310, 17320, 17322, 17325, 17332, 17334, 17336, 17346, 17358, 17364, 17368, 17370, 17380, 17382, 17394, 17406, 17408, 17412, 17416, 17418, 17420, 17436, 17440, 17442, 17444, 17448, 17454, 17466, 17478, 17480, 17484, 17500, 17502, 17508, 17512, 17514, 17526, 17528, 17532, 17536, 17538, 17540, 17560, 17562, 17570, 17574, 17576, 17584, 17586, 17592, 17598, 17610, 17612, 17616, 17620, 17622, 17628, 17632, 17634, 17646, 17652, 17658, 17660, 17668, 17676, 17682, 17694, 17696, 17706, 17710, 17718, 17720, 17732, 17736, 17740, 17742, 17752, 17754, 17772, 17776, 17778, 17780, 17790, 17792, 17796, 17800, 17802, 17814, 17816, 17826, 17832, 17836, 17838, 17840, 17844, 17860, 17862, 17864, 17868, 17874, 17886, 17888, 17898, 17900, 17916, 17922, 17934, 17936, 17946, 17948, 17955, 17958, 17960, 17964, 17970, 17980, 17982, 17988, 17990, 17992, 17994, 18004, 18006, 18012, 18020, 18024, 18030, 18032, 18040, 18042, 18054, 18066, 18078, 18080, 18088, 18100, 18102, 18108, 18114, 18116, 18126, 18128, 18130, 18132, 18138, 18140, 18156, 18160, 18162, 18168, 18172, 18174, 18176, 18186, 18198, 18204, 18210, 18220, 18222, 18224, 18234, 18246, 18256, 18258, 18260, 18264, 18276, 18280, 18282, 18284, 18294, 18304, 18306, 18318, 18320, 18324, 18326, 18340, 18342, 18352, 18354, 18366, 18368, 18372, 18378, 18380, 18390, 18392, 18402, 18410, 18414, 18424, 18426, 18438, 18440, 18444, 18452, 18456, 18460, 18462, 18474, 18486, 18492, 18496, 18498, 18500, 18508, 18510, 18512, 18516, 18520, 18534, 18536, 18544, 18546, 18550, 18552, 18558, 18560, 18568, 18570, 18580, 18582, 18585, 18588, 18590, 18592, 18594, 18606, 18616, 18618, 18636, 18640, 18642, 18654, 18656, 18666, 18676, 18678, 18680, 18688, 18702, 18704, 18708, 18714, 18726, 18738, 18740, 18750, 18762, 18774, 18786, 18788, 18798, 18800, 18804, 18820, 18822, 18824, 18830, 18832, 18834, 18844, 18846, 18848, 18850, 18852, 18858, 18860, 18872, 18880, 18882, 18894, 18906, 18918, 18924, 18928, 18930, 18940, 18944, 18948, 18954, 18956, 18966, 18970, 18978, 18980, 18996, 19000, 19002, 19012, 19014, 19024, 19038, 19060, 19062, 19072, 19074, 19086, 19092, 19096, 19098, 19100, 19120, 19122, 19124, 19134, 19136, 19146, 19158, 19160, 19164, 19180, 19182, 19184, 19194, 19206, 19208, 19212, 19215, 19218, 19220, 19228, 19230, 19242, 19250, 19254, 19264, 19266, 19280, 19284, 19290, 19292, 19300, 19302, 19305, 19308, 19312, 19314, 19326, 19328, 19338, 19340, 19348, 19356, 19362, 19374, 19376, 19386, 19390, 19398, 19400, 19410, 19420, 19422, 19428, 19432, 19434, 19446, 19448, 19452, 19456, 19458, 19460, 19480, 19482, 19494, 19504, 19506, 19516, 19518, 19520, 19524, 19540, 19542, 19544, 19550, 19552, 19554, 19566, 19578, 19580, 19590, 19614, 19624, 19626, 19628, 19635, 19638, 19640, 19644, 19660, 19662, 19670, 19674, 19684, 19686, 19698, 19700, 19722, 19746, 19758, 19768, 19770, 19780, 19794, 19796, 19806, 19810, 19818, 19820, 19830, 19842, 19845, 19852, 19854, 19856, 19864, 19878, 19884, 19888, 19900, 19902, 19914, 19926, 19936, 19938, 19940, 19952, 19956, 19960, 19962, 19964, 19974, 19976, 19986, 19998, 20004, 20022, 20028, 20046, 20048, 20058, 20060, 20072, 20080, 20082, 20090, 20094, 20096, 20104, 20106, 20118, 20120, 20128, 20132, 20140, 20142, 20150, 20152, 20154, 20166, 20172, 20176, 20178, 20180, 20188, 20200, 20214, 20216, 20224, 20226, 20230, 20238, 20260, 20262, 20272, 20274, 20298, 20316, 20320, 20322, 20332, 20334, 20336, 20346, 20350, 20356, 20360, 20364, 20368, 20380, 20382, 20388, 20406, 20416, 20418, 20420, 20442, 20454, 20466, 20468, 20475, 20478, 20482, 20488, 20500, 20502, 20504, 20508, 20510, 20514, 20524, 20526, 20540, 20552, 20560, 20562, 20570, 20574, 20586, 20598, 20600, 20620, 20622, 20634, 20636, 20646, 20650, 20652, 20658, 20660, 20672, 20676, 20682, 20692, 20694, 20696, 20718, 20740, 20742, 20754, 20766, 20768, 20776, 20778, 20780, 20796, 20802, 20804, 20814, 20838, 20840, 20862, 20864, 20886, 20888, 20892, 20898, 20920, 20922, 20930, 20934, 20946, 20958, 20964, 20972, 20980, 20982, 20992, 20994, 21006, 21008, 21018, 21020, 21028, 21032, 21036, 21040, 21054, 21056, 21066, 21070, 21078, 21100, 21102, 21105, 21126, 21138, 21162, 21164, 21174, 21196, 21198, 21208, 21220, 21222, 21224, 21234, 21246, 21248, 21258, 21260, 21282, 21296, 21306, 21308, 21328, 21340, 21342, 21344, 21350, 21354, 21364, 21366, 21376, 21378, 21380, 21392, 21402, 21414, 21424, 21426, 21438, 21448, 21460, 21472, 21474, 21476, 21486, 21488, 21490, 21498, 21500, 21522, 21532, 21534, 21558, 21580, 21584, 21594, 21606, 21616, 21618, 21620, 21632, 21640, 21642, 21644, 21654, 21666, 21678, 21692, 21702, 21712, 21728, 21735, 21738, 21740, 21770, 21774, 21784, 21786, 21808, 21812, 21820, 21822, 21824, 21834, 21846, 21850, 21858, 21860, 21868, 21880, 21894, 21906, 21910, 21918, 21940, 21944, 21945, 21954, 22002, 22014, 22016, 22022, 22026, 22036, 22038, 22048, 22060, 22062, 22064, 22074, 22086, 22088, 22098, 22122, 22144, 22146, 22148, 22158, 22180, 22182, 22190, 22192, 22194, 22204, 22206, 22232, 22242, 22254, 22256, 22264, 22266, 22275, 22278, 22280, 22288, 22300, 22304, 22314, 22316, 22326, 22340, 22352, 22362, 22365, 22372, 22398, 22420, 22422, 22434, 22448, 22456, 22458, 22460, 22482, 22484, 22494, 22496, 22528, 22550, 22566, 22576, 22578, 22580, 22602, 22614, 22616, 22626, 22652, 22662, 22672, 22674, 22686, 22700, 22708, 22724, 22734, 22746, 22758, 22782, 22784, 22794, 22816, 22818, 22854, 22866, 22876, 22900, 22902, 22904, 22912, 22926, 22938, 22962, 22984, 22986, 22988, 22990, 22995, 22998, 23020, 23022, 23030, 23034, 23044, 23046, 23056, 23060, 23082, 23094, 23104, 23106, 23118, 23144, 23154, 23156, 23168, 23170, 23178, 23188, 23192, 23202, 23205, 23212, 23214, 23238, 23260, 23262, 23286, 23298, 23300, 23312, 23324, 23334, 23346, 23358, 23392, 23406, 23418, 23420, 23442, 23450, 23452, 23454, 23464, 23466, 23492, 23502, 23504, 23514, 23526, 23538, 23548, 23552, 23574, 23576, 23584, 23586, 23590, 23608, 23620, 23622, 23625, 23650, 23658, 23672, 23682, 23718, 23740, 23742, 23754, 23766, 23778, 23802, 23816, 23828, 23838, 23848, 23860, 23862, 23874, 23884, 23886, 23934, 23946, 23996, 24006, 24010, 24016, 24018, 24020, 24042, 24050, 24052, 24054, 24064, 24078, 24112, 24114, 24126, 24136, 24162, 24164, 24186, 24198, 24208, 24232, 24244, 24248, 24260, 24272, 24290, 24294, 24306, 24340, 24342, 24366, 24376, 24378, 24430, 24438, 24448, 24460, 24464, 24474, 24522, 24546, 24556, 24558, 24580, 24582, 24592, 24594, 24596, 24620, 24650, 24656, 24666, 24668, 24702, 24704, 24710, 24724, 24728, 24740, 24762, 24774, 24798, 24832, 24834, 24836, 24846, 24856, 24885, 24892, 24904, 24918, 24928, 24942, 24954, 24978, 24980, 25026, 25062, 25064, 25086, 25098, 25122, 25130, 25134, 25180, 25206, 25216, 25232, 25245, 25266, 25302, 25314, 25338, 25374, 25386, 25396, 25446, 25456, 25458, 25472, 25482, 25508, 25515, 25518, 25554, 25566, 25626, 25636, 25638, 25676, 25690, 25698, 25732, 25734, 25782, 25784, 25842, 25850, 25854, 25856, 25916, 25926, 25935, 25962, 25986, 26012, 26022, 26026, 26034, 26058, 26094, 26104, 26110, 26128, 26142, 26145, 26178, 26236, 26238, 26288, 26322, 26346, 26348, 26350, 26368, 26382, 26394, 26454, 26516, 26526, 26538, 26646, 26650, 26682, 26684, 26706, 26728, 26742, 26778, 26886, 26898, 26912, 26934, 26958, 27008, 27042, 27076, 27078, 27102, 27114, 27136, 27138, 27188, 27282, 27294, 27352, 27356, 27366, 27392, 27402, 27498, 27524, 27546, 27582, 27618, 27726, 27748, 27822, 27834, 27858, 27894, 27906, 27916, 27942, 27978, 28038, 28074]
#for x in range(28123):
# s_ab = False
# for i in range(len(ab)):
# if ab[i] > x:
# tmp_ab = ab[:i]
# break
# for i in range(len(tmp_ab)):
# for j in range(i+1):
# if tmp_ab[i]+tmp_ab[j] == x:
# s_ab = True
# if s_ab == False:
# print x
n = []
while len(ab) > 0:
i = ab.pop(0)
if i+i<28123:
n.append(i+i)
for j in ab:
if i+j<28123:
n.append(i+j)
print 'hello'
a = set(n)
b = set(range(28123))
res = b - a
print sum(res)
|
UTF-8
|
Python
| false | false | 2,014 |
16,501,264,361,534 |
87f33483a674942d704eca887a1791d74a8b86a5
|
74bd766716118dbfdd6fbca900812066f8fe76c3
|
/factory.py
|
33791b48353d5fb91eeaad4275fef0f6ad968fa1
|
[] |
no_license
|
JonHurst/fcomng
|
https://github.com/JonHurst/fcomng
|
67d300993d78df235c62a111ece5f2387813ad51
|
6f0e37cc56d2ab9c9db377c4376a98a1b18e5d04
|
refs/heads/master
| 2021-01-01T16:13:25.468284 | 2012-02-18T17:17:54 | 2012-02-18T17:17:54 | 1,748,823 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
# coding=utf-8
from globals import *
import xml.etree.cElementTree as et
import subprocess
import re
import tempfile
import meta
import hashlib
import shutil
class FCOMFactory:
def __init__(self, fcm):
global g_paths
self.errorlog = open("build-error.log", "w")
self.cgmtopng = self._manufacture_cgmtopng()
self.fcm = fcm #instance of FCOMMeta
versiondate = g_paths.control[-12:-4]
self.versionstring = versiondate[:4] + "-" + versiondate[4:6] + "-" + versiondate[6:]
self.revisions = []
self.chunk_depth = 4
def build_fcom(self):
self.write_fleet_js()
content_pages = []
for ident in self.fcm.get_root_nodes():
self._recursive_process_section(ident, content_pages)
self.make_node_page(None)#make contents page
for make_page_args in zip(content_pages,
[None] + content_pages[:-1],
content_pages[1:] + [None]):
self.make_page(*make_page_args)
self.make_revision_list() # this must be done last - self.revisions is filled in by make_page
def _recursive_process_section(self, ident, content_pages):
if (self.fcm.get_section_depth(ident) == self.chunk_depth or
self.fcm.get_type(self.fcm.get_children(ident)[0]) != meta.TYPE_SECTION):
content_pages.append(ident)
else:
self.make_node_page(ident)
for ident in self.fcm.get_children(ident):
self._recursive_process_section(ident, content_pages)
def _process_links(self, page_string):
page_parts = re.split('<a class="duref" href="(\d+)">', page_string)
duref_index = 1
while duref_index < len(page_parts):
ident = page_parts[duref_index]
if not self.fcm.is_valid(ident):
print >> self.errorlog, "Reference to unknown DU", page_parts[duref_index], "whilst processing", ident
page_parts[duref_index] = "<a class='duref' href='#'>!!!DU REFERENCE ERROR:%s!!!" % ident
else:
href = self._make_href(ident)
parent = self.fcm.get_parent(ident)
if self.fcm.get_type(parent) == meta.TYPE_GROUP:
ident = parent
labels = self._make_title(ident).split(":", 1)
if len(labels) == 2:
anchor_string = "<span class='sectionref'>%s</span>%s" % tuple(labels)
else:
anchor_string = labels[0]
if page_parts[duref_index + 1][:2] != "</":
anchor_string = ""
page_parts[duref_index] = 'Refer to <a class="duref" href="%s">%s' % (
href,
anchor_string)
duref_index += 2
return "".join(page_parts)
def _recursive_build_node(self, tb, ident, **other):
node_type = self.fcm.get_type(ident)
if node_type == meta.TYPE_SECTION:
section_attribs = {"sid": self._make_html_identifier(ident),
"title": self._make_title(ident)}
tb.start("section", section_attribs)
if self.fcm.get_type(self.fcm.get_children(ident)[0]) == meta.TYPE_SECTION:
#this causes the sections to be layed out flat rather than in a hierarchy
tb.end("section")
for c in self.fcm.get_children(ident):
self._recursive_build_node(tb, c, **other)
else:
for c in self.fcm.get_children(ident):
self._recursive_build_node(tb, c, **other)
tb.end("section")
elif node_type == meta.TYPE_GROUP:
group_attribs = {"id": self._make_html_identifier(ident),
"title": self.fcm.get_title(ident)}
tb.start("group", group_attribs)
for c in self.fcm.get_children(ident):
self._recursive_build_node(tb, c, **other)
tb.end("group")
elif node_type == meta.TYPE_DUCONTAINER:
du_container_attrib = {"id": self._make_html_identifier(ident),
"title": self.fcm.get_title(ident)}
overriding_tdu = self.fcm.get_overriding(ident)
if overriding_tdu:
du_container_attrib["overridden_by"] = self.fcm.get_parent(overriding_tdu)
tb.start("du_container", du_container_attrib)
jsarray = other['jsarray']
jsarray.append([])
for c in self.fcm.get_children(ident):
self._process_du(tb, c, **other)
if jsarray[-1] == []: del jsarray[-1]
tb.end("du_container")
def _process_du(self, tb, ident, **other):
"""Create DU in TreeBuilder TB.
DU is the duid of the du to build.
"""
filename = self.fcm.get_filename(ident)
if filename: filename = g_paths.dus + filename
du_attrib = {"title": self.fcm.get_title(ident),
"href": filename,
"id": self._make_html_identifier(ident),
"revdate": self.fcm.get_revdate(ident)}
code = self.fcm.get_revision_code(ident)
if code: du_attrib["revcode"] = code
if self.fcm.is_tdu(ident): du_attrib["tdu"] = "tdu"
tb.start("du", du_attrib)
applies = self.fcm.applies(ident)
if applies:
tb.start("applies", {})
tb.data(self.fcm.applies_string(applies))
tb.end("applies")
other['jsarray'][-1].append([ident, applies, self.fcm.applies_string(applies)[:100]])
tb.end("du")
other['revs'].extend(self.fcm.get_du_revs(ident))
def make_page(self, sid, prevsid, nextsid):
global g_paths
filename = self._make_href(sid)
print "Creating:", filename
tb = et.TreeBuilder()
revs = []
jsarray = []
page_attributes = {"title": self._make_title(sid, True),
"version": self.versionstring}
if prevsid:
page_attributes["prev"] = self._make_href(prevsid)
page_attributes["prevtitle"] = self._make_title(prevsid)
if nextsid:
page_attributes["next"] = self._make_href(nextsid)
page_attributes["nexttitle"] = self._make_title(nextsid)
tb.start("page", page_attributes)
self._recursive_build_node(tb, sid, jsarray=jsarray, revs=revs)
tb.end("page")
stylesheet_name = g_paths.xsldir + "page.xsl"
tf = None
if revs:
tf = self._make_temporary_stylesheet(stylesheet_name, revs)
stylesheet_name = tf.name
page_string= subprocess.Popen(["xsltproc", "--nonet", "--novalid", stylesheet_name, "-"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE
).communicate(et.tostring(tb.close(), "utf-8"))[0]
if tf: os.unlink(tf.name)
#create javascript variables for controlling folding
page_string = page_string.replace(
"<!--jsvariable-->",
"var folding = " + str(jsarray) + ";")
#replace xml links with xhtml links
page_string = self._process_links(page_string)
#insert link bar
page_string = page_string.replace("<!--linkbar-->", self._build_linkbar(sid))
#convert cgm images to png images
page_string = re.sub("<img[^>]*></img>", self.cgmtopng, page_string)
#write the file
of = open(g_paths.html_output + filename, "w")
of.write(page_string)
def _make_temporary_stylesheet(self, stylesheet_name, revs):
stylesheet = """\
<?xml version="1.0"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:import href="%s"/>
<xsl:template match="%s"><xsl:call-template name="revised_text"/></xsl:template>
</xsl:stylesheet>""" % (stylesheet_name, " | ".join([r[2:] for r in revs]))
tf = tempfile.NamedTemporaryFile(delete=False)
tf.write(stylesheet)
tf.close()
return tf
def _recursive_add_index_section(self, ident, tb):
if (self.fcm.get_section_depth(ident) < self.chunk_depth and
self.fcm.get_type(self.fcm.get_children(ident)[0]) == meta.TYPE_SECTION):
tb.start("section", {"title": self.fcm.get_title(ident),
"ident": ".".join(self.fcm.get_pslcode(ident)),
"href": self._make_href(ident)})
for s in self.fcm.get_children(ident):
self._recursive_add_index_section(s, tb)
tb.end("section")
else:
tb.start("page", {"href": self._make_href(ident)})
tb.data(self._make_title(ident))
tb.end("page")
def make_node_page(self, ident):
global g_paths
tb = et.TreeBuilder()
if ident:
index_attribs = {"title": self._make_title(ident, True),
"ident": ".".join(self.fcm.get_pslcode(ident)),
"version": self.versionstring}
filename = self._make_href(ident)
else: #contents page
index_attribs = {"title": "Contents",
"version": self.versionstring}
filename = "index.html"
tb.start("index", index_attribs)
if not ident:#inject revisions list link into index.html
tb.start("page", {"href": "revisions.html"})
tb.data("Revision list")
tb.end("page")
for i in self.fcm.get_children(ident):
self._recursive_add_index_section(i, tb)
tb.end("index")
page_string= subprocess.Popen(["xsltproc", "--nonet", "--novalid", g_paths.xsldir + "index.xsl", "-"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE
).communicate(et.tostring(tb.close(), "utf-8"))[0]
page_string = page_string.replace("<!--linkbar-->", self._build_linkbar(ident))
print "Creating", filename
of = open(g_paths.html_output + filename, "w")
of.write(page_string)
def _build_linkbar(self, ident):
title_crop = 30
tb = et.TreeBuilder()
tb.start("div", {"class": "linkbar"})
tb.start("p", {})
if ident: #contents page passes in empty list
tb.start("a", {"title": "Contents",
"href": "index.html"})
tb.data("Contents")
tb.end("a")
if ident == "REV":
ident_list = []
else:
ident_list = self.fcm.get_ancestors(ident)
for i in ident_list:
tb.data(" >> ")
title = self._make_title(i)
tb.start("a", {"title": title,
"href": self._make_href(i)})
tb.data(title[:title_crop])
if len(title) > title_crop:
tb.data("...")
tb.end("a")
else:
tb.data(u" ")
tb.end("p")
tb.start("div", {"class": "otherlinks"})
tb.start("p", {})
tb.data(u"| ")
tb.start("a", {"href": "search.html"})
tb.data("Search")
tb.end("a")
tb.end("p")
tb.end("div")
tb.end("div")
return et.tostring(tb.close(), "utf-8")
def write_fleet_js(self):
global g_paths
open(os.path.join(g_paths.js_output, "fleet.js"), "w").write(
("var fleet = { \n" +
",".join(["'%s':'%s'" % X for X in self.fcm.get_fleet()]) +
"};\n"))
def _recursive_add_revision_node(self, tb, ident):
if self.fcm.get_type(ident) == meta.TYPE_DU:
code = self.fcm.get_revision_code(ident)
if code:
code = code[-1:] # strip E from 2 letter codes
revs = self.fcm.get_du_revs(ident)
#only add (R) to revision list if there are some real revision paths in dumdata
if code != "R" or revs:
tb.start("rev", {"code": code,
"duid": ident,
"href": self._make_href(self.fcm.get_parent(ident)),#href is for container
"title": self.fcm.get_title(ident)})
for hl in self.fcm.get_du_highlights(ident):
tb.start("hl")
tb.data(hl)
tb.end("hl")
tb.end("rev")
elif (self.fcm.get_type(ident) == meta.TYPE_SECTION and
self.fcm.get_type(self.fcm.get_children(ident)[0]) != meta.TYPE_SECTION):
tb.start("section", {"title": self._make_title(ident, True)})
for c in self.fcm.get_children(ident):
self._recursive_add_revision_node(tb, c)
tb.end("section")
else:
for c in self.fcm.get_children(ident):
self._recursive_add_revision_node(tb, c)
def make_revision_list(self):
global g_paths
print "Writing revision list"
tb = et.TreeBuilder()
tb.start("revisions", {"title": "Revision list",
"version": self.versionstring})
self._recursive_add_revision_node(tb, None)
tb.end("revisions")
page_string= subprocess.Popen(["xsltproc", "--nonet", "--novalid", g_paths.xsldir + "revisions.xsl", "-"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE
).communicate(et.tostring(tb.close(), "utf-8"))[0]
page_string = page_string.replace("<!--linkbar-->", self._build_linkbar("REV"))
of = open(g_paths.html_output + "revisions.html", "w")
of.write(page_string)
def _make_href(self, ident):
"""Convert IDENT to an href.
If the ident is of a section that references a page or node
page, returns the relative filename (e.g. 'DSC.20.html").
Otherwise returns a link with a hash part
(e.g. 'GEN.html#duid00014071') which can be used in an <a> tag
to jump to the section"""
section = ident
if self.fcm.get_type(ident) != meta.TYPE_SECTION:
section = self.fcm.get_parent_section(ident)
if self.fcm.get_section_depth(section) > self.chunk_depth:
section = self.fcm.get_ancestors(section)[self.chunk_depth - 1]
href = ".".join(self.fcm.get_pslcode(section)) + ".html"
if ident != section:
href += "#" + self._make_html_identifier(ident)
return href
def _make_html_identifier(self, ident):
"""Creates an identifier suitable for an html id attribute
from IDENT"""
node_type = self.fcm.get_type(ident)
prefixes = {meta.TYPE_DU: "duid",
meta.TYPE_DUCONTAINER: "duid",
meta.TYPE_GROUP: "gid",
meta.TYPE_SECTION: "sid"}
if node_type == meta.TYPE_SECTION:
ident = ".".join(self.fcm.get_pslcode(ident))
return prefixes[node_type] + ident
def _make_title(self, ident, all_sections=False):
sections = [X for X in self.fcm.get_ancestors(ident) + [ident]
if self.fcm.get_type(X) == meta.TYPE_SECTION]
prefix = ".".join(self.fcm.get_pslcode(sections[-1]))
if all_sections:
titleparts = [self.fcm.get_title(X) for X in sections]
return "[%s] %s" % (prefix, " : ".join(titleparts))
else:
return "%s: %s" % (prefix, self.fcm.get_title(ident))
def _manufacture_cgmtopng(self):
global g_paths
ili = et.ElementTree(None, g_paths.image_library +"image-list.xml")
cgm_index = {}
for el in ili.findall("cgmfile"):
cgm_index[el.get("href")] = el
def cgmtopng(matchobj):
#matchobj for re <img[^>]*></img>
tag = matchobj.group(0)
cgm_filename = os.path.basename(re.search('src="([^"]*)"', tag).group(1))
#check it is a file we can work with
if cgm_filename[-3:] != "cgm": return tag #not a cgm
if not cgm_index.has_key(cgm_filename): #is a cgm, but not in library
print >> self.errorlog, "Warning:", cgm_filename, "not in library"
return tag
#pass through class attribute (e.g. class="symbol")
class_attrib_mo = re.search('class="[^"]*"',tag)
class_attrib = ""
if class_attrib_mo:
class_attrib = class_attrib_mo.group()
cgm_element = cgm_index[cgm_filename]
#check md5sum of cgm file
md5sum = hashlib.md5(file(g_paths.illustrations + cgm_filename).read()).hexdigest()
if md5sum != cgm_element.get("md5"):
print >> self.errorlog, "Warning:", cgm_filename, "has incorrect checksum"
png_elements = cgm_element.findall("pngfile")
png, pngzoom = None, None
for p in png_elements:
if p.get("role") == "xhtml":
png = p
elif p.get("role") == "xhtml.zoom":
pngzoom = p
if png != None:
png_filename = png.get("href")
if not os.path.exists(g_paths.image_output + png_filename):
shutil.copyfile(g_paths.image_library + png_filename, g_paths.image_output + png_filename)
width, height = png.get("size").split("x")
tag = "<img %s src='../images/%s' width='%s' height='%s' alt='png'/>" % (
class_attrib,
png_filename,
width,
height)
if pngzoom != None:
pngzoom_filename = pngzoom.get("href")
if not os.path.exists(g_paths.image_output + pngzoom_filename):
shutil.copyfile(g_paths.image_library + pngzoom_filename, g_paths.image_output + pngzoom_filename)
tag += '<p><a class="zoom" href="../images/' + pngzoom_filename + '">Zoom</a></p>'
return tag
return cgmtopng
if __name__ == "__main__":
global g_paths
import sys
import meta
if len(sys.argv) != 2:
print "Usage: ", sys.argv[0], "start_file"
sys.exit(1)
g_paths.initialise(*sys.argv + ["."])
fcm = meta.FCOMMeta(True)
f = FCOMFactory(fcm)
print f._make_href("00014071")
print f._make_href("NG01223")
print f._make_href(('GEN',))
print f._make_href(('DSC','21','20'))
|
UTF-8
|
Python
| false | false | 2,012 |
3,564,822,896,422 |
8d52f97595959b862222e69327561f6f702f7296
|
cf1b17cf23518ac7a004bdda196a5edd3768c0b1
|
/polls/urls.py
|
cc0ceb8a758c258b041a6cade3445979e42f21e1
|
[] |
no_license
|
brainwater/django-tutorial
|
https://github.com/brainwater/django-tutorial
|
ecca6fe68052f43c257e3a6b61abc9fdde21fa85
|
86ab2b0d560981aa5aa325c9a125823f40961d59
|
refs/heads/master
| 2020-04-06T19:04:16.173530 | 2013-11-14T13:28:56 | 2013-11-14T13:28:56 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.conf.urls import patterns, url
from polls import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<pk>\d+)/$', views.DetailView.as_view(), name='detail'),
url(r'^(?P<pk>\d+)/results/$', views.ResultsView.as_view(), name='results'),
url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'),
url(r'^(?P<poll_id>\d+)/like/$', views.like, name='like'),
url(r'^(?P<poll_id>\d+)/likes/$', views.likes, name='likes'),
url(r'^(?P<poll_id>\d+)/popularity/$', views.popularity, name='popularity'),
url(r'^popular$', views.MostPopularFeed(), name='popular'),
)
|
UTF-8
|
Python
| false | false | 2,013 |
4,320,737,125,370 |
4c9c0f686451ddfbdf2ca70e7e15b5ed976c45fa
|
e95f9bd4346957ee083328ba245700bea56983d9
|
/acgweb/controller/duty.py
|
10143f0415b492308ecf9816920a96e0ce3ab916
|
[] |
no_license
|
camelshang/acgweb
|
https://github.com/camelshang/acgweb
|
0499feff84cbbc88eaec046cc0bf88112e0924e3
|
7db1e9d62e3c2da1a4c88e24b3db59e984ce2897
|
refs/heads/master
| 2017-10-07T12:13:12.243312 | 2014-09-14T14:30:04 | 2014-09-14T14:30:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import time
from flask import render_template, json, abort, flash, make_response
from acgweb import app, db
from acgweb.model.activity import Activity
from acgweb.model.duty import Duty
from acgweb.form.duty import DutyForm
from decorated_function import *
import acgweb.const as CONST
@app.route('/dutylist-p<int:pagenum>')
@app.route('/dutylist')
@login_required
def dutylist(pagenum=1):
"""Page: all activitylist"""
activity_count = Activity.query.filter(Activity.end_time != 0).count()
activity_list = Activity.query.filter(Activity.end_time != 0).order_by('start_time DESC').limit(CONST.duty_per_page).offset(CONST.duty_per_page * (pagenum - 1))
if viewtype() == 1:
return render_template('duty/dutylist_mobile.html',
activity_list=activity_list,
page_count=(activity_count - 1) / CONST.duty_per_page + 1, page_current=pagenum)
else:
return render_template('duty/dutylist.html',
activity_list=activity_list,
page_count=(activity_count - 1) / CONST.duty_per_page + 1, page_current=pagenum)
@app.route('/api/dutylist')
#@login_required
def dutylistapi():
activity_list = Activity.query.filter(Activity.end_time != 0).order_by('start_time DESC').limit(50)
res = []
for activity in activity_list:
d = {}
d['id'] = activity.id
d['title'] = activity.title
d['start_time'] = activity.start_time
d['work_start_time'] = activity.work_start_time
d['venue'] = activity.venue
d['end_time'] = activity.end_time
d['logs'] = []
for duty in activity.duties:
for log in duty.getlogs():
d['logs'].append({'uid': duty.member.uid, 'name': duty.member.name, 'type': log['type'], 'content': log['content']})
res.append(d)
resp = make_response(json.dumps(res))
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp
@app.route('/dutymanage-p<int:pagenum>')
@app.route('/dutymanage')
@login_required
def dutymanage(pagenum=1):
"""Page: all activitylist"""
ts = time.localtime()
todaytime = int(time.time()) - ts.tm_hour * 3600 - ts.tm_min * 60 - ts.tm_sec
#config.SEMASTER_BASE
duty_count = Duty.query.join(Activity).filter(Activity.start_time >= todaytime - 3 * 86400).count()
duty_list = Duty.query.join(Activity).filter(Activity.start_time >= todaytime - 3 * 86400)\
.order_by(Activity.start_time).limit(CONST.dutylist_per_page).offset(CONST.dutylist_per_page * (pagenum - 1))
return render_template('duty/dutymanage.html',
duty_list=duty_list,
page_count=(duty_count - 1) / CONST.dutylist_per_page + 1, page_current=pagenum)
@app.route('/dutyedit-<int:duty_id>', methods=['GET', 'POST'])
@login_required
def dutyedit(duty_id):
"""Page: all activitylist"""
if request.method == 'POST':
form = DutyForm(request.form)
duty = Duty.query.get(form.id.data)
if form.validate_on_submit():
if not form.errors:
pass#form.username.errors.append('帐号已存在')
#print form.errors
if not form.errors:
if not session.get('is_arra_monitor'):
abort(403)
duty.aid = form.aid.data
duty.uid = form.uid.data
duty.status = form.status.data
#duty.process=form.process.data
#duty.log=form.log.data
db.session.add(duty)
db.session.commit()
flash({'type': 'success', 'content': '保存成功!'})
return redirect(url_for('dutyedit', duty_id=duty_id))
return render_template('duty/dutyedit.html', form=form, duty=duty)
else:
duty = Duty.query.get_or_404(duty_id)
if not session.get('is_arra_monitor'):
abort(403)
#print duty
form = DutyForm(obj=duty)
#print form
return render_template('duty/dutyedit.html', form=form, duty=duty)
@app.route('/dutydelete-<int:duty_id>')
@login_required
def dutydelete(duty_id):
"""Page: all activitylist"""
duty = Duty.query.get(duty_id)
if not session.get('is_arra_monitor'):
abort(403)
db.session.delete(duty)
db.session.commit()
flash({'type': 'success', 'content': '删除成功!'})
return redirect('/dutymanage')
|
UTF-8
|
Python
| false | false | 2,014 |
11,407,433,155,998 |
8c99d28308590f5f3fc778ec39bd6515becd4491
|
39c0559311cb3e1396b32abdb590a39f800defcd
|
/fbpy/views.py
|
dbcdad1abfd285a2a512308456ee3f417ae72860
|
[] |
no_license
|
mnive93/django-facebookconnect
|
https://github.com/mnive93/django-facebookconnect
|
127b63f7eff14adca7e5fe091bb121c9a900d745
|
ff93b338231fcf7e87d63b8190db169e34a44d9f
|
refs/heads/master
| 2021-01-23T15:03:22.272873 | 2013-06-01T11:18:04 | 2013-06-01T11:18:04 | 10,418,696 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pyfb import Pyfb
from django.http import HttpResponse, HttpResponseRedirect
from settings import FACEBOOK_APP_ID, FACEBOOK_APP_SECRET, FACEBOOK_REDIRECT_URL
from django.shortcuts import render_to_response
def index(request):
return render_to_response("index.html", {"FACEBOOK_APP_ID": FACEBOOK_APP_ID})
def facebook_login(request):
facebook = Pyfb(FACEBOOK_APP_ID)
return HttpResponseRedirect(facebook.get_auth_code_url(redirect_uri=FACEBOOK_REDIRECT_URL))
def facebook_login_success(request):
code = request.GET.get('code')
facebook = Pyfb(FACEBOOK_APP_ID)
facebook.get_access_token(FACEBOOK_APP_SECRET, code, redirect_uri=FACEBOOK_REDIRECT_URL)
me = facebook.get_myself()
welcome = "Welcome %s. Your Facebook login has been completed successfully! Is this your email address? %s"
return HttpResponse(welcome % (me.name, me.email))
|
UTF-8
|
Python
| false | false | 2,013 |
4,956,392,289,835 |
e241a8b0bb4769c397f080409839f04922dbc799
|
6741609c1bb79f914ce26410d769ce9136f6ed77
|
/emulator.py
|
8647707a6d0bdb3d9da25e5b8049283ce73f008b
|
[] |
no_license
|
cyboflash/pychip8
|
https://github.com/cyboflash/pychip8
|
77a36e8ce104073d3e158f3c13f839d4a12d3834
|
dd106f3f04f3b19e029c4329b4d2bcd91742a047
|
refs/heads/master
| 2021-01-21T12:07:26.859507 | 2012-12-19T15:25:08 | 2012-12-19T15:25:08 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import chip8
import pygame
import random
def main():
chip = chip8.Cpu()
chip.load_app('pong2.c8')
# I - Initialize.
pygame.init()
# D - Display.
display = pygame.display.set_mode((640, 320))
# E - Entities.
background = pygame.Surface(display.get_size())
background.fill((0, 0, 0))
background = background.convert()
all_sprites = pygame.sprite.Group()
for row in range(chip.rows):
for col in range(chip.cols):
all_sprites.add(Block(row, col, chip.gfx))
# A - Action.
clock = pygame.time.Clock()
keep_going = True
# A - Assign values.
# L - Loop.
while keep_going:
# T - Timing.
clock.tick(60) # 60 Frames per second.
chip.emulate_cycle()
# E - Events.
for event in pygame.event.get():
if pygame.QUIT == event.type:
keep_going = False
elif pygame.KEYDOWN == event.type:
press_key(event.key, chip.keyboard, True)
elif pygame.KEYUP == event.type:
press_key(event.key, chip.keyboard, False)
# R - Refresh display.
if chip.draw_flag:
all_sprites.clear(display, background)
all_sprites.update()
all_sprites.draw(display)
pygame.display.flip()
if '__main__' == __name__:
main()
|
UTF-8
|
Python
| false | false | 2,012 |
3,453,153,717,616 |
77e956070b3ee8872b976b01aee88265ac96ee90
|
898e45d3e49c210ba0ab27860b65aa9ce3dec4bc
|
/assignment1/frequency.py
|
8ebe18feb7349af0ffaaa8ee1bef5eca2a41ca59
|
[
"ODbL-1.0"
] |
non_permissive
|
jmarrietar/datasci_course_materials
|
https://github.com/jmarrietar/datasci_course_materials
|
50c95debb090c8403c10deca36a0b79160da59e0
|
6d8fef0e52974f8dd688400c3175142e00349b5f
|
refs/heads/master
| 2020-02-01T22:48:29.663983 | 2014-07-30T19:22:24 | 2014-07-30T19:22:24 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import sys
import json
def main():
json_data=open(sys.argv[1])
dict = {} #Crear nuevo directorio de palabras.
for line in json_data:
data = json.loads(str(line))
if "text" in data:
texto=data["text"]
encoded_texto=texto.encode('utf-8')
Lista_con=encoded_texto.split()
for palabra in Lista_con:
if palabra in dict:
dict[palabra]= 1+ dict[palabra]
else:
dict[palabra]= 1
for key in dict:
print key + ' '+ str(dict[key])
if __name__ == '__main__':
main()
|
UTF-8
|
Python
| false | false | 2,014 |
14,980,845,958,094 |
a1adf2a770a2c93257af40adc4dee45fb71d0c6b
|
7e1febab83c66467e61645babccaa0a889fcefa3
|
/005/005.py
|
f3c272e9e9586785c72ba5eb5b9026d40091b381
|
[
"BSD-3-Clause"
] |
permissive
|
jaredks/euler
|
https://github.com/jaredks/euler
|
17521b28487590bcc811c3fec97ac0a8720f02c3
|
b90f0d0c8308b26fd282b9807bfd0d6ed0dd88f4
|
refs/heads/master
| 2021-01-15T14:23:27.162299 | 2013-09-04T19:06:50 | 2013-09-04T19:06:50 | 12,514,250 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from euler import lcm
print lcm(*range(1, 21))
|
UTF-8
|
Python
| false | false | 2,013 |
9,397,388,475,116 |
df9b01c97f97b9a34e8524d7246a0171a3b099e9
|
5f4496d372702dba052fd360f1c96b6c62249344
|
/anynumbervotingsystem.py
|
fa6a8d6f502fa4df074d9277c910e7e9d486928d
|
[] |
no_license
|
brycefurlong/anyNumberVotingSystem
|
https://github.com/brycefurlong/anyNumberVotingSystem
|
a5bd5596f9338336405b218118b35e2de6ae033d
|
84fca7ddf3ff0cad5654cbd06bf6dcd460493462
|
refs/heads/master
| 2018-01-07T16:59:36.888529 | 2014-09-03T01:29:27 | 2014-09-03T01:29:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# any number voting system
from __future__ import division
import random
from operator import itemgetter
import multiprocessing
from itertools import permutations, product
import numpy
import scipy.stats
from scipy import optimize
from multiprocessing import Pool, cpu_count
from functools import partial
def mean_confidence_interval(data, mean=None, confidence=0.95):
a = 1.0*numpy.array(data)
n = len(a)
if not mean:
mean = numpy.mean(a)
se = scipy.stats.sem(a)
h = se * scipy.stats.t.ppf((1+confidence)/2., n-1)
return mean, mean-h, mean+h
logger = multiprocessing.log_to_stderr()
ANVSPCache = {}
p = 1.5
p2 = 1.8
p3 = 1.8
s = 0.45
s2 = 0.65
class RunningAverage():
def __init__(self):
self.count = 0
self.average = 0.0
self.epsilon = None
def __call__(self, n, weight=1):
previous_average = self.average
self.average = (self.average * self.count + n * weight) / float(self.count + weight)
self.count += weight
self.epsilon = abs(self.average - previous_average)
return self.average
def __str__(self):
return ''.join(['Average: ', str(self.average), ' Count: ', str(self.count)])
def __gt__(self, ra2):
return self.average > ra2.average
def __ge__(self, ra2):
return self.average >= ra2.average
def __lt__(self, ra2):
return self.average < ra2.average
def __le__(self, ra2):
return self.average <= ra2.average
def __eq__(self, ra2):
return self.average == ra2.average
def get_random_ballot(number_of_candidates, voting_method):
ballot = [0, 100]
for i in range(number_of_candidates - 2):
ballot.append(random.randint(0, 100))
# ballot.append(FirstOrderStrategy[random.randint(0,10)])
random.shuffle(ballot)
global ANVSPCache
ANVSPCache[tuple(ballot)] = voting_method([ballot], len(ballot))
return ballot
# process ballots using Range Voting System
def process_ballots_range(ballots, breadth):
results = []
for i in range(0, breadth):
column = [ballot[i] for ballot in ballots]
results.append(sum(column) / len(column))
return results
# process ballots using Any Number Voting System
def process_ballots_ANVS(ballots, breadth):
for i in range(0, len(ballots)):
midrange = (max(ballots[i]) + min(ballots[i])) / 2
divisor = 0
for score in ballots[i]:
divisor = divisor + abs(score - midrange)
if divisor != 0:
#scale the ballot
ballots[i] = [score / divisor for score in ballots[i]]
results = []
for i in range(0, breadth):
column = [ballot[i] for ballot in ballots]
results.append(sum(column) / len(column))
return results
# process ballots using Any Number Voting System -- distance from mean
def process_ballots_ANVSM(ballots, breadth):
for i in range(0, len(ballots)):
divisor = 0
mean = sum(ballots[i]) / len(ballots[i])
for score in ballots[i]:
divisor = divisor + abs(score - mean)
# divisor = divisor / breadth
if divisor != 0:
#scale the ballot
ballots[i] = [score / divisor for score in ballots[i]]
results = []
for i in range(0, breadth):
column = [ballot[i] for ballot in ballots]
results.append(sum(column) / len(column))
return results
# process ballots using Any Number Voting System Plus
class process_ballots_ANVSP():
def __init__(self, p=p, p2=p2, s=s, p3=p3, s2=s2):
self.p = p
self.p2 = p2
self.s = s
self.p3 = p3
self.s2 = s2
def __call__(self, ballots, breadth, verbose=False):
global ANVSPCache
p = self.p
p2 = self.p2
s = self.s
p3 = self.p3
s2 = self.s2
ballot_cache = {}
for i in range(0, len(ballots)):
if tuple(ballots[i]) in ballot_cache:
ballots[i] = ballot_cache[tuple(ballots[i])]
elif tuple(ballots[i]) in ANVSPCache:
ballots[i] = ANVSPCache[tuple(ballots[i])]
else:
mean = sum(ballots[i]) / len(ballots[i])
# deviation is standard deviation with custom power/root
deviation = pow(sum([abs(x - mean) ** p for x in ballots[i]]) / len(ballots[i]), 1 / p)
max_deviation = pow(sum([(max(ballots[i]) - x) ** p2 for x in ballots[i]]) / len(ballots[i]), 1 / p2)
min_deviation = pow(sum([(x - min(ballots[i])) ** p3 for x in ballots[i]]) / len(ballots[i]), 1 / p3)
divisor = (deviation ** s) * (max_deviation ** s2) * (min_deviation ** (1 - s - s2))
if divisor != 0:
#scale the ballot
temp = [score / divisor for score in ballots[i]]
ballot_cache[tuple(ballots[i])] = temp
if verbose:
print ballots[i], '=>', temp
ballots[i] = temp
results = []
for i in range(0, breadth):
column = [ballot[i] for ballot in ballots]
results.append(sum(column) / len(column))
if verbose:
print results
return results
def have_election(voting_function, ballots, candidates):
breadth = len(candidates)
for ballot in ballots:
if len(ballot) != breadth:
print 'INVALID BALLOT: ', ballot
return
results = voting_function(ballots, breadth)
score_candidate_pairs = [(results[i], candidates[i]) for i in range(0, breadth)]
# sort by score descending
score_candidate_pairs = sorted(score_candidate_pairs, key=itemgetter(0), reverse=True)
return score_candidate_pairs
def vote_permuations(minimum, maximum, step=1):
for item in ([minimum, x, maximum] for x in range(minimum, maximum + 1, step)):
for item2 in permutations(item):
yield item2
def enumerate_to_best_strategy(middle_utility, voting_func=process_ballots_ANVSP(), number_candidates=3,
number_opponents=2,
min_vote=0, max_vote=100, step=10):
"""
This is a quick, determinisitic alternative to the monte-carlo method...
though it likely sacrifices precision for quickness
"""
print 'testing', middle_utility
candidates = [str(i) for i in range(number_candidates)]
utilities = [min_vote, middle_utility, max_vote]
utility_map = dict(zip(candidates, utilities))
best_utility = min(utilities)
best_vote = None
for middle_vote in range(min_vote, max_vote + 1):
avg_utility = RunningAverage()
for item in product(vote_permuations(min_vote, max_vote, step), repeat=number_opponents):
ballots = list(item)
ballots.append([min_vote, middle_vote, max_vote])
scpairs = have_election(voting_func, ballots, candidates)
utility = utility_map[scpairs[0][1]]
avg_utility(utility)
if avg_utility.average > best_utility:
best_utility = avg_utility.average
best_vote = middle_vote
print middle_utility, ':', best_vote
return best_vote
# Run this multiple times using different ballots (same everything else) to compare voting strategies empirically
def get_average_utility(voting_function, iterations, strategy_ballots, utility_map, candidates, number_opponents,
opponents_same=False, random_ballot_func=get_random_ballot):
"""
Get average utility of a number of competing strategies while facing randomly voting opponents. The random opponent
votes are the same across each voting strategy (like duplicate bridge). Thsi can be used to empirically determine
the best strategy among the ones passed in.
:param voting_function:
:param iterations:
:param strategy_ballots:
These represent the different possible votes the protagonist could cast.
They will fight the same opponent votes.
:param utility_map:
:param candidates:
:param number_opponents:
:param opponents_same:
:param random_ballot_func:
:return: A list of average utilities corresponding to the list of strategy_ballots inputted.
"""
breadth = len(candidates)
averages = [RunningAverage() for _ in strategy_ballots]
for _ in range(iterations):
opponent_ballots = []
if not opponents_same:
for _2 in range(number_opponents):
opponent_ballots.append(random_ballot_func(breadth, voting_function))
else:
opponent_ballot = random_ballot_func(breadth, voting_function)
for _2 in range(number_opponents):
opponent_ballots.append(opponent_ballot)
for i, ballot in enumerate(strategy_ballots):
scpairs = have_election(voting_function, opponent_ballots + [ballot], candidates)
utility = utility_map[scpairs[0][1]]
averages[i](utility)
return [x.average for x in averages]
def get_optimal_strategy(middle_utility, voting_func=process_ballots_ANVSP(), number_candidates=3,
number_opponents=2, min_vote=0, max_vote=100, step=1, iterations_per_round=50,
starting_confidence_interval=0.90, max_rounds=2000, verbose=True):
"""
:param middle_utility: Value between min_vote and max_vote representing the second largest utility
:param voting_func:
:param number_candidates: Currently only works with 3
:param number_opponents:
:param min_vote:
:param max_vote:
:param step:
:param starting_confidence_interval: When the higher mean confidence interval of a strategy falls behind the leading average,
the strategy gets dropped, since it is highly unlikely to make a comeback
:return: returns an error value representing how far the voting function is from ideal
"""
class Strategy:
def __init__(self, strategy_ballot):
self.ballot = strategy_ballot
self.data = []
self._ra = RunningAverage()
def add_value(self, value):
self.data.append(value)
self._ra(value)
return self._ra.average
def get_average(self):
return self._ra.average
def viable(self, confidence, highest_average):
if len(self.data) < 3:
return True
high_confidence_interval = mean_confidence_interval(self.data, self.get_average(), confidence)[2]
return high_confidence_interval > highest_average
def __cmp__(self, other):
return cmp(self.get_average(), other.get_average())
def __str__(self):
return str(self.ballot) + ' | ' + str(self.get_average())
def __hash__(self):
return hash(self.ballot)
candidates = [str(i) for i in range(number_candidates)]
utilities = [min_vote, middle_utility, max_vote]
utility_map = dict(zip(candidates, utilities))
strategy_ballots = [(0, j, 100) for j in range(min_vote, max_vote+1, step)]
strategies = [Strategy(ballot) for ballot in strategy_ballots]
# averages = [RunningAverage() for _ in strategy_ballots]
# data = [[] for _ in strategy_ballots]
rounds = 0
confidence_drop_per_round = starting_confidence_interval / max_rounds
while len(strategy_ballots) > 1 and rounds < max_rounds:
current_confidence = starting_confidence_interval - confidence_drop_per_round*rounds
values = get_average_utility(voting_func, iterations_per_round, [s.ballot for s in strategies], utility_map, candidates, number_opponents)
for i, value in enumerate(values):
strategies[i].add_value(value)
if round > 0:
highest_average = max(strategies).get_average()
to_drop = {strategy for strategy in strategies if not strategy.viable(current_confidence, highest_average) and strategy.ballot[1] != middle_utility}
strategies[:] = [strategy for strategy in strategies if strategy not in to_drop]
if verbose:
for strategy in to_drop:
print 'dropped', strategy.ballot
rounds += 1
print 'OPTIMAL STRATEGY FOUND:', [str(strategy) for strategy in strategies]
if len(strategies) == 1:
assert strategies[0].ballot[1] == middle_utility
return 0
else:
honest_strat = next(x for x in strategies if x.ballot[1] == middle_utility)
strategies.remove(honest_strat)
other_strat = strategies[0]
distance = abs(honest_strat.ballot[1] - other_strat.ballot[1])
utility_difference = other_strat.get_average() - honest_strat.get_average()
if utility_difference < 0:
print 'weird thing'
return 0
else:
return (distance ** 0.5) * (utility_difference ** 0.5)
def optimize_me(params):
p, p2, p3, s, s2 = params
mid_utilities = [0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99]
pool = Pool(processes=cpu_count() - 1 if cpu_count() > 2 else 2)
error_func = partial(get_optimal_strategy, voting_func=process_ballots_ANVSP(p=p, p2=p2, s=s, p3=p3, s2=s2), verbose=False)
results = pool.map(error_func, mid_utilities)
return numpy.mean(results)
if __name__ == '__main__':
import timeit
p = 1.5
p2 = 1.8
p3 = 1.8
s = 0.45
s2 = 0.65
class MyTakeStep(object):
def __init__(self, stepsize=0.5):
self.stepsize = stepsize
def __call__(self, x):
s = self.stepsize
x[0:3] += numpy.random.uniform(-2.5*s, 2.5*s)
x[3:] += numpy.random.uniform(-s, s)
return x
class MyBounds(object):
def __init__(self, bounds):
self.bounds = bounds
def __call__(self, **kwargs):
x = kwargs["x_new"]
for i in range(len(x)):
if x[i] < self.bounds[i][0] or x[i] > self.bounds[i][1]:
return False
return True
start_time = timeit.default_timer()
# ret_val = get_optimal_strategy(50)
mytakestep = MyTakeStep()
mybounds = MyBounds(((0, 10000), (0, 10000), (0, 10000), (-10000, 10000), (-10000, 10000)))
ret_val = optimize.basinhopping(optimize_me, [p, p2, p3, s, s2], accept_test=mybounds, take_step=mytakestep)
elapsed = timeit.default_timer() - start_time
print 'elapsed', elapsed
print str(ret_val)
|
UTF-8
|
Python
| false | false | 2,014 |
18,803,366,830,308 |
cd2eef1ffeca0f127dcfd94bd459c55fdc412600
|
92a9306148db664edc2151b00f9e180f35d00a01
|
/burndown.py
|
daf49392204950ead48b22b6a744a3c79ea8ea8e
|
[] |
no_license
|
qmagico/assembla-burndown
|
https://github.com/qmagico/assembla-burndown
|
095a84dc58e0a87b19bbdcf0a198943e66cab8c5
|
e685312d832129cb772ae68a48fe139828297261
|
refs/heads/master
| 2020-04-05T22:56:26.472723 | 2013-09-16T18:05:40 | 2013-09-16T18:05:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# coding: utf-8
import os
from datetime import datetime
from assembla.api import API
api = API(
key=os.getenv('ASSEMBLA_KEY'),
secret=os.getenv('ASSEMBLA_SECRET')
)
aulasdovaca = api.spaces()[0]
api.space = aulasdovaca
milestones = api.milestones(space_id=aulasdovaca.id)
milestone = None
for ms in milestones:
if ms.planner_type == 2:
milestone = ms
break
if milestone is None:
raise TypeError('There is no default milestone!')
# burndown {
tickets = api.tickets(
space_id=aulasdovaca.id,
milestone_id=milestone.id
)
burndown_header = [
'"Ticket number"',
'"Completed date"',
'"Sprint estimate"',
'"Total working hours"',
'"Team"',
'"Assigned to"',
'"Ticket extra"',
'"Total estimate"',
'"Finalizar no sprint"',
'"Status"\n'
]
burndown_body = []
for ticket in tickets:
assigned_to = '-'
if ticket.assigned_to_id is not None:
assigned_to = api.user(id=ticket.assigned_to_id).name.encode('ascii', 'ignore')
burndown_body.append([
str(ticket.number),
str(ticket.completed_date or '-'),
str(ticket.total_estimate),
str(ticket.total_working_hours),
'"%s"' % ticket.custom_fields['Equipe'],
'"%s"' % assigned_to,
'"%s"' % ticket.custom_fields['Ticket extra?'],
'"%s"' % ticket.custom_fields['Estimativa inicial para acabar'],
'"%s"' % ticket.custom_fields['Finaliza no sprint?'],
'"%s"\n' % ticket.status
])
# sorting by team
burndown_body.sort(key=lambda x: x[4])
# }
# tasks/worked hours {
tasks_header = [
'Ticket',
'User',
'Worked hours',
'Created at'
]
_from = milestone.created_at
to = datetime.now()
tasks = api.tasks(
_from=_from.strftime('%d-%m-%Y'),
to=to.strftime('%d-%m-%Y')
)
tasks_body = []
for task in tasks:
ticket_number = '-'
if task.ticket_id:
ticket_number = api.ticket(space_id=aulasdovaca.id, id=task.ticket_id).number
tasks_body.append([
str(ticket_number),
api.user(id=task.user_id).name.encode('ascii', 'ignore'),
str(task.hours),
str(task.created_at) + '\n'
])
# }
# writing file
with open('burndown.csv', 'w') as out:
out.write(','.join(burndown_header))
for line in burndown_body:
out.write(','.join([l.encode('ascii', 'ignore') for l in line]))
out.write('\n\n')
out.write(','.join(tasks_header) + '\n')
for line in tasks_body:
out.write(','.join([l.encode('ascii', 'ignore') for l in line]))
|
UTF-8
|
Python
| false | false | 2,013 |
16,724,602,664,608 |
5dd0b13dabe06f87e8b0901a12da030af195e596
|
f2b001035b39513574fdb00833ac7f8a9b5a701c
|
/Stripper.py
|
3141c3a9cc3f30e524a2b857ce230b12e6da7f66
|
[] |
no_license
|
eWizardII/homobabel
|
https://github.com/eWizardII/homobabel
|
ad3716579a0002ae9e3fc3c0e2b0abee2fccc037
|
8887e5f2bff072796cdbf1628a849d0d0a09be24
|
refs/heads/master
| 2021-01-18T13:45:45.706399 | 2011-01-10T13:47:57 | 2011-01-10T13:47:57 | 1,128,035 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import time
import json
import string
import unicodedata
import os
def filetxt():
word_freq = {}
word_occ = {}
word_supp = {}
word_stat = {}
word_ver = {}
word_users_behind_word = {}
lvl1 = []
lvl2 = []
total_t = 0
users = 0
ver_v = []
ret_v = []
text = []
foll = []
stat = []
word_list = []
path="C:\Twitter\json_"
dirList=os.listdir(path)
print "Directory Listing Complete"
errors = 0
for fname in dirList:
# Open File
fullfname = "C:/Twitter/json_/" + str(fname)
try:
with open(str(fullfname), "r") as f:
text_f = json.load(f)
stat_v = text_f['0']['statuses_count']
foll_v = text_f['0']['followers_count']
if (stat_v > 5) and (foll_v > 10):
users = users + 1
for i in range(len(text_f)):
text.append(text_f[str(i)]['text'])
if ((text_f[str(i)]['retweet_count'] == '100+') == 1):
ret_v.append('100')
else:
ret_v.append(text_f[str(i)]['retweet_count'])
if ((text_f[str(i)]['verified'] == 'true') == 1):
ver_v.append('1')
else:
ver_v.append('0')
stat.append(stat_v)
foll.append(foll_v)
total_t = total_t + 1
else:
pass
except:
errors += errors
pass
print "Running Filter"
# Filter
occ = 0
ver = 0
import string
for i in range(len(text)):
s = text[i] # Sample string
ret_t = ret_v[i]
occ_t = s.count('RT') + s.count('@') + int(ret_t)
supp_t = foll[i]
stat_t = stat[i]
ver_t = int(ver_v[i])
ver += ver_t
occ += occ_t
s = s.encode('utf-8')
out = s.translate(string.maketrans("",""), string.punctuation)
# Create Wordlist/Dictionary
word_lists = text[i].lower().split(None)
for word in word_lists:
# Frequency of Word
word_freq[word] = word_freq.get(word, 0) + 1
# Support for Word
word_occ[word] = word_occ.get(word, 0) + occ_t
# Followers using this Word
word_supp[word] = word_supp.get(word, 0) + supp_t
# Statuses containing this Word
word_stat[word] = word_stat.get(word, 0) + stat_t
# Verified users of Word
word_ver[word] = word_ver.get(word,0) + int(ver_t)
# Users who are using this Word
word_users_behind_word[word] = word_users_behind_word.get(word, 0) + 1
print "Running Analysis"
keys = word_freq.keys()
numbo = range(1,len(keys)+1)
NList = str(numbo).strip('[]')
WList = list(keys)
NList = NList.split(", ")
W2N = dict(zip(WList, NList))
for i in range(len(text)):
word_list = text[i].lower().split(None)
for k in range (0,len(word_list)):
word_list[k] = W2N[word_list[k]]
for i in range (0,len(word_list)-1):
lvl1.append(word_list[i])
lvl2.append(word_list[i+1])
## Write data for analysis
print "Saving Analysis"
mo = open("matlab.txt", "wb")
for row in word_occ:
mo.write(str(row.encode('utf_8'))+" "+str(word_freq[row])+" "+str(word_occ[row])+" "+str(word_supp[row])+" "+str(word_stat[row])+" "+str(word_ver[row])+" "+str(word_users_behind_word[row])+'\r\n')
mo.close()
# Write all to File
fo = open("output.txt", "wb")
# Print Vertices
fo.write('*Vertices ' + str(len(keys)) + '\r\n')
for i in range (0,len(keys)):
j = i + 1
fo.write(str(j)+" "+"\""+keys[i].encode('utf-8')+"\""+'\r\n')
print "Done Vertices!"
# Generate and Print the Edges
fo.write('*Edges ' + '\r\n')
for i in range (0,len(lvl1)-1):
fo.write(lvl1[i]+" "+lvl2[i+1]+ '\r\n')
print "Done Edges!"
fo.close()
print "***Resuts***"
print "Total Tweets: " + str(total_t)
print "Total Users : " + str(users)
print "Total Occur : " + str(occ)
print "Total Verify: " + str(ver)
print "Total Errors: " + str(errors)
print "Done!"
def time_code(arg):
'''For running code once,and take time'''
start = time.clock()
arg()
end = time.clock()
print 'Code time %.6f seconds' % (end - start)
if __name__ == '__main__':
time_code(filetxt)
|
UTF-8
|
Python
| false | false | 2,011 |
3,418,793,984,580 |
0904380e896810a8241b9de42d235072559541d9
|
b71a976753d02456b401cedae1d5662ecb684e7e
|
/python3/welcome.py
|
5cfc334b93a084c82215e68ef54da52ca0436f79
|
[] |
no_license
|
pythonkc/python3-sandbox
|
https://github.com/pythonkc/python3-sandbox
|
de9b97eb8b64fe58be840775d73c1709d5646a2b
|
2e24365e4a2236e15f84e7f0491c847caffaafb6
|
refs/heads/master
| 2020-05-28T10:32:22.333310 | 2014-12-09T20:32:42 | 2014-12-09T20:32:42 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import datetime
def greetings():
print('Welcome to your new Python 3 sandbox.')
print('Today is {dt:%A} {dt.day} {dt:%B} {dt.year}.'.format(dt=datetime.datetime.now()))
if __name__ == '__main__':
greetings()
|
UTF-8
|
Python
| false | false | 2,014 |
13,700,945,688,060 |
550ade176be119240781483126f1bb927e8dce02
|
577f03954ec69ed82eaea32c62c8eba9ba6a01c1
|
/py/testdir_multi_jvm/test_GBM_poker_1m.py
|
997ea94cdf28faf6a4575fd46faf75df546e9746
|
[
"Apache-2.0"
] |
permissive
|
ledell/h2o
|
https://github.com/ledell/h2o
|
21032d784a1a4bb3fe8b67c9299f49c25da8146e
|
34e271760b70fe6f384e106d84f18c7f0adb8210
|
refs/heads/master
| 2020-02-26T13:53:01.395087 | 2014-12-29T04:14:29 | 2014-12-29T04:14:29 | 24,823,632 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest, random, sys, time, getpass
sys.path.extend(['.','..','../..','py'])
# FIX! add cases with shuffled data!
import h2o, h2o_cmd, h2o_gbm
import h2o_browse as h2b, h2o_import as h2i, h2o_exec as h2e, h2o_jobs as h2j
DO_PLOT_IF_KEVIN = False
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED, tryHeap
tryHeap = 28
SEED = h2o.setup_random_seed()
h2o.init(1, enable_benchmark_log=True, java_heap_GB=tryHeap)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_GBM_poker_1m(self):
for trial in range(2):
# PARSE train****************************************
start = time.time()
xList = []
eList = []
fList = []
modelKey = 'GBMModelKey'
timeoutSecs = 900
# Parse (train)****************************************
csvPathname = 'poker/poker-hand-testing.data'
hex_key = 'poker-hand-testing.data.hex'
parseTrainResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put',
hex_key=hex_key, timeoutSecs=timeoutSecs, doSummary=False)
elapsed = time.time() - start
print "train parse end on ", csvPathname, 'took', elapsed, 'seconds',\
"%d pct. of timeout" % ((elapsed*100)/timeoutSecs)
print "train parse result:", parseTrainResult['destination_key']
# Logging to a benchmark file
algo = "Parse"
l = '{:d} jvms, {:d}GB heap, {:s} {:s} {:6.2f} secs'.format(
len(h2o.nodes), h2o.nodes[0].java_heap_GB, algo, csvPathname, elapsed)
print l
h2o.cloudPerfH2O.message(l)
inspect = h2o_cmd.runInspect(key=parseTrainResult['destination_key'])
print "\n" + csvPathname, \
" numRows:", "{:,}".format(inspect['numRows']), \
" numCols:", "{:,}".format(inspect['numCols'])
numRows = inspect['numRows']
numCols = inspect['numCols']
### h2o_cmd.runSummary(key=parsTraineResult['destination_key'])
# GBM(train iterate)****************************************
ntrees = 2
for max_depth in [5,10,20]:
params = {
'learn_rate': .1,
'nbins': 10,
'ntrees': ntrees,
'max_depth': max_depth,
'min_rows': 10,
'response': numCols-1,
'ignored_cols_by_name': None,
}
print "Using these parameters for GBM: ", params
kwargs = params.copy()
trainStart = time.time()
gbmTrainResult = h2o_cmd.runGBM(parseResult=parseTrainResult,
timeoutSecs=timeoutSecs, destination_key=modelKey, **kwargs)
trainElapsed = time.time() - trainStart
print "GBM training completed in", trainElapsed, "seconds. On dataset: ", csvPathname
# Logging to a benchmark file
algo = "GBM " + " ntrees=" + str(ntrees) + " max_depth=" + str(max_depth)
l = '{:d} jvms, {:d}GB heap, {:s} {:s} {:6.2f} secs'.format(
len(h2o.nodes), h2o.nodes[0].java_heap_GB, algo, csvPathname, trainElapsed)
print l
h2o.cloudPerfH2O.message(l)
gbmTrainView = h2o_cmd.runGBMView(model_key=modelKey)
# errrs from end of list? is that the last tree?
errsLast = gbmTrainView['gbm_model']['errs'][-1]
print "GBM 'errsLast'", errsLast
cm = gbmTrainView['gbm_model']['cms'][-1]['_arr'] # use the last one
pctWrongTrain = h2o_gbm.pp_cm_summary(cm);
print "Last line of this cm might be NAs, not CM"
print "\nTrain\n==========\n"
print h2o_gbm.pp_cm(cm)
# xList.append(ntrees)
xList.append(max_depth)
eList.append(pctWrongTrain)
fList.append(trainElapsed)
# just plot the last one
if DO_PLOT_IF_KEVIN:
xLabel = 'max_depth'
eLabel = 'pctWrong'
fLabel = 'trainElapsed'
eListTitle = ""
fListTitle = ""
h2o_gbm.plotLists(xList, xLabel, eListTitle, eList, eLabel, fListTitle, fList, fLabel)
if __name__ == '__main__':
h2o.unit_main()
|
UTF-8
|
Python
| false | false | 2,014 |
19,335,942,808,802 |
063e74498178df29b2a91fd6c52c29caa03cebc6
|
d62d209a3a62b2bbc4e6c57116dd87daf3c8f637
|
/tests/test_render.py
|
9910ba8a62054417bac9e6b5a288ef0b0a63f510
|
[
"MIT"
] |
permissive
|
daqing15/WeRoBot
|
https://github.com/daqing15/WeRoBot
|
8e5ed1a2901cb0bacf7af283e5ac1e78abd91d67
|
996928b3b5f66280250f2ad507910cf109f199d6
|
refs/heads/master
| 2021-01-17T10:59:55.547281 | 2013-02-03T16:59:11 | 2013-02-03T16:59:11 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import werobot.reply
import werobot.test
from werobot.utils import to_unicode
def test_text_render():
message = werobot.test.make_text_message('test')
reply = werobot.reply.TextReply(message, content='hello', time=1359803261)
reply_message = """
<xml>
<ToUserName><![CDATA[test]]></ToUserName>
<FromUserName><![CDATA[test]]></FromUserName>
<CreateTime>1359803261</CreateTime>
<MsgType><![CDATA[text]]></MsgType>
<Content><![CDATA[hello]]></Content>
<FuncFlag>0</FuncFlag>
</xml>
""".strip().replace(" ", "").replace("\n", "")
result = reply.render().strip().replace(" ", "").replace("\n", "")
assert result == to_unicode(reply_message)
def test_create_reply():
message = werobot.test.make_text_message('test')
reply = werobot.reply.create_reply('hi', message)
assert reply # Just make sure that func works.
|
UTF-8
|
Python
| false | false | 2,013 |
15,599,321,244,060 |
0cd58211e6d6ed479171452b003d39d6800f9a5a
|
91a80a0e05155c248fa4efd6357f82a62160b457
|
/hello16.py
|
9d3010fe93c11321aec9f6debc9806c715c81d1f
|
[] |
no_license
|
ClockWorks001/pyworks
|
https://github.com/ClockWorks001/pyworks
|
b8edd28bebf53c989f2f20f1c9baeafcd687a120
|
6bef0298e9f8533559831665d36a836c49b083a0
|
refs/heads/master
| 2020-06-06T03:26:18.976722 | 2014-08-15T09:10:41 | 2014-08-15T09:10:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: UTF-8
# for ループ
sales = [13, 23, 31, 238]
sum = 0
for sale in sales:
sum += sale
print(sum)
print(sum)
sum = 0
for i in range(10):
print(i)
sum = sum + i
else:
print(sum)
# continue
sum = 0
for i in range(10):
if i == 3:
continue
print(i)
sum = sum + i
else:
print(sum)
# break
sum = 0
for i in range(10):
if i == 3:
break
print(i)
sum = sum + i
else:
print(sum)
|
UTF-8
|
Python
| false | false | 2,014 |
17,188,459,143,124 |
bd4806008b387d3409585c4057821ed0a1bd4edc
|
8439c9f73924a1e251f12401f8e8816e59d9c605
|
/arma/core.py
|
84e93586fc0cb415d4cdfd7a5e5ef128fb2e5949
|
[] |
no_license
|
kiomega/adum
|
https://github.com/kiomega/adum
|
bb02a8b1546a6d470c79dd0561280cc21fcede43
|
0a90d9c73d68499e3cce39edc8d3cb3c50f733cf
|
refs/heads/master
| 2019-04-07T18:10:10.342015 | 2014-11-10T05:37:23 | 2014-11-10T05:37:23 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import ldap3
from django.conf import settings
from arma.models import User, Group
class Core(object):
server = None
connection = None
last_result = {}
SEARCH_ATTRS = (
'cn',
'sAMAccountName',
'givenName',
'sn',
'displayName',
'mail',
)
USER_SEARCH_CLASSES = (
'person',
'organizationalPerson',
'user',
)
GROUP_SEARCH_CLASSES = (
'group',
)
CLASS_MAP = {
'group': Group,
'person': User,
'organizationalPerson': User,
'user': User
}
def __init__(self):
use_ssl = getattr(settings, 'LDAP_BIND_SSL', False)
self.server = ldap3.Server(settings.LDAP_BIND_SERVER, use_ssl=use_ssl)
self.server.port = getattr(settings, 'LDAP_BIND_PORT', 389)
def bind(self, user=settings.LDAP_BIND_USER, password=settings.LDAP_BIND_PASSWORD):
self.connection = ldap3.Connection(
self.server,
auto_bind=True,
client_strategy=ldap3.STRATEGY_SYNC,
user=user,
password=password
)
def unbind(self):
self.connection.unbind()
def add_object(self, classes, attrs, dn=None):
base_dn = settings.LDAP_BASE_DN
if dn:
dn = "%s,%s" % (dn, base_dn)
else:
dn = base_dn
return self.connection.add(dn=dn, object_class=classes, attributes=attrs)
def internal_search(self, filter, dn=None, attrs=ldap3.ALL_ATTRIBUTES):
base_dn = settings.LDAP_BASE_DN
if dn:
dn = "%s,%s" % (dn, base_dn)
else:
dn = base_dn
if attrs is not ldap3.ALL_ATTRIBUTES and 'objectClass' not in attrs:
attrs = list(attrs)
attrs.append('objectClass')
self.connection.search(dn, filter, attributes=attrs)
self.last_result = self.connection.result
return self.connection.response
def get_search_filter(self, s, user=False, group=False, open_attr=None):
out = ""
if user or group:
out += "(&"
if s:
out += "(|"
if not open_attr:
for attr in self.SEARCH_ATTRS:
out += "(%s=*%s*)" % (attr, s)
out += ")"
else:
out += "(%s=%s)" % (open_attr, s)
out += ")"
if user or group:
out += "(|"
if user:
for klass in self.USER_SEARCH_CLASSES:
out += "(objectClass=%s)" % klass
if group:
for klass in self.GROUP_SEARCH_CLASSES:
out += "(objectClass=%s)" % klass
out += ")"
if user or group:
out += ")"
return out
def open_object_from_result(self, result):
object = None
for klass, model in self.CLASS_MAP.items():
if klass in result['attributes']['objectClass']:
object = model
break
if not object:
raise Exception("Unable to determine model type for object with DN: %s" % result['dn'])
out = object(result['dn'], result['attributes'], self)
return out
@classmethod
def search(cls, s):
self = cls()
self.bind()
filter = self.get_search_filter(s, user=True, group=True)
results = self.internal_search(filter)
out = []
for result in results:
out.append(self.open_object_from_result(result))
return out
class Template(object):
def __init__(self):
pass
|
UTF-8
|
Python
| false | false | 2,014 |
12,472,585,051,827 |
745f78b971fb2bcfdccc93e2bc294877e0a4a909
|
263c0c0d5a21b3cb9bd2ee8db4c1c89f2ff9e2a3
|
/etw/descriptors/field.py
|
18b34ade4927c08b040cc1945b1d6f88a34bb82e
|
[] |
no_license
|
sebmarchand/pyetw
|
https://github.com/sebmarchand/pyetw
|
a6531ed1e8ffa06728c5786780e0e32a47660b42
|
302431b0eecc7698f1b7641ed8cf39f8769beb4b
|
refs/heads/master
| 2021-01-18T21:43:30.346870 | 2011-12-08T16:51:11 | 2011-12-08T16:51:11 | 39,590,995 | 14 | 9 | null | false | 2016-04-06T09:17:14 | 2015-07-23T20:33:57 | 2015-07-23T20:34:31 | 2016-04-06T09:16:56 | 168 | 0 | 1 | 0 |
Python
| null | null |
#!/usr/bin/python2.6
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Function definitions for callable field types.
The functions defined here are meant to be used as callable field types
used with event descriptor field definitions. EventClass subclasses are
defined like:
class MyEventClass(EventClass):
_fields_ = [('IntField', field.Int32),
('StringField', field.String)]
When a log event is parsed, the EventClass is created, and each callable
field type is invoked. The return value is assigned to the EventClass using
the name provided in the _fields_ list.
To add a new field type, define a function that takes these arguments:
session: The _TraceLogSession instance the event arrived on.
This has a session-related properties and functionality, such as
the is_64_bit_log property and the SessionTimeToTime member that
will convert a time stamp in the session's units to a python time.
reader: An instance of the BinaryBufferReader class to read from.
and returns a mixed value. If the BinaryBufferReader doesn't already have
a function to read a certain type, it will need to be added as well.
"""
def Boolean(unused_session, reader):
return reader.ReadBoolean()
def Int8(unused_session, reader):
return reader.ReadInt8()
def UInt8(unused_session, reader):
return reader.ReadUInt8()
def Int16(unused_session, reader):
return reader.ReadInt16()
def UInt16(unused_session, reader):
return reader.ReadUInt16()
def Int32(unused_session, reader):
return reader.ReadInt32()
def UInt32(unused_session, reader):
return reader.ReadUInt32()
def Int64(unused_session, reader):
return reader.ReadInt64()
def UInt64(unused_session, reader):
return reader.ReadUInt64()
def Pointer(session, reader):
if session.is_64_bit_log:
return reader.ReadUInt64()
else:
return reader.ReadUInt32()
def String(unused_session, reader):
return reader.ReadString()
def WString(unused_session, reader):
return reader.ReadWString()
def Sid(session, reader):
return reader.ReadSid(session.is_64_bit_log)
def WmiTime(session, reader):
return session.SessionTimeToTime(reader.ReadUInt64())
|
UTF-8
|
Python
| false | false | 2,011 |
4,037,269,289,343 |
1e0e2ec86f7ecc32b61d9fa7c1c6b8f4a63acb4b
|
2e8c92ae8f46904401adfbfb6d3cd459d64ff2d0
|
/zadanie3/prog/unittests/lu_test.py
|
0fe049017dc1cb6c858080998773a2ae0063b27d
|
[] |
no_license
|
krzychu/Pracownia-z-numerk-w
|
https://github.com/krzychu/Pracownia-z-numerk-w
|
9aac495519141c55de59fe9be89889193d66e78c
|
02006ee1f28e5f84eb7712084694d6ee083f6a82
|
refs/heads/master
| 2020-04-13T09:25:34.406456 | 2012-01-14T22:42:51 | 2012-01-14T22:42:51 | 2,521,389 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import unittest
from pymatrix import Matrix
class TestLUDecomposition(unittest.TestCase):
def testKnown(self):
m = Matrix([[1,2,3,4], [1,4,9,16], [1,8,27,64], [1,16,81,256]])
l = Matrix([[1,0,0,0], [1,1,0,0], [1,3,1,0], [1,7,6,1]])
u = Matrix([[1,2,3,4], [0,2,6,12], [0,0,6,24], [0 ,0,0,24]])
(x,y) = m.lu()
self.assertTrue(x.isAlmostEqual(l))
self.assertTrue(y.isAlmostEqual(u))
self.assertTrue(m.isAlmostEqual(x*y))
def testHilbert(self):
m = Matrix(50)
m.hilbert()
(l,u) = m.lu()
self.assertTrue(m.isAlmostEqual(l*u))
def testRandom(self):
l = Matrix(5)
l.randomLowerTriangular(5)
u = Matrix(5)
u.randomUpperTriangular(5)
m = l*u
(x,y) = m.lu()
self.assertTrue(m.isAlmostEqual(x*y))
unittest.main()
|
UTF-8
|
Python
| false | false | 2,012 |
18,597,208,422,381 |
06749c7888f03a7794cddeb87de5cb81dc6a28b8
|
69e41359e2f01ffb12e243970a59e6fcc986e09a
|
/solved/Euler327.py
|
03549d82587ddba90f86bad6c73352cca61b0472
|
[] |
no_license
|
pfhayes/euler
|
https://github.com/pfhayes/euler
|
0d4383f9cfa36890bdaf95bfdae553707c6cbc86
|
56f053afffb91262c7c48463700cab4fe6581813
|
refs/heads/master
| 2016-09-05T13:18:46.089574 | 2011-12-21T05:26:41 | 2011-12-21T05:26:41 | 1,786,274 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# M(C,R) is the minimum number of cards to get through R rooms holding C cards.
# Observe M(x,y) = y + 1 for x >= y
# And, M(x,y+1) = the number of cards required to get M(x,y) cards through one
# door
# And, it takes x cards to deposit x - 2 cards past the first door
M = [[0 for x in range(31)] for x in range(41)]
for x in xrange(3,41) :
for y in xrange(1,31) :
if x >= y + 1 :
M[x][y] = y + 1
else :
num_to_get_in_box = M[x][y-1]
if x == 3 :
full_loads = num_to_get_in_box - 2
extra = 3
elif num_to_get_in_box % (x - 2) == 1 :
full_loads = num_to_get_in_box / (x-2)
extra = 0
elif num_to_get_in_box % (x - 2) == 0 :
full_loads = num_to_get_in_box / (x-2) - 1
extra = M[x][y-1] - full_loads * (x - 2) + 1
else :
full_loads = num_to_get_in_box / (x-2)
extra = M[x][y-1] - full_loads * (x - 2) + 1
if x == 3 and y == 3 :
print num_to_get_in_box, full_loads, extra
M[x][y] = full_loads * x + extra
print "M(3,1) =", M[3][1]
print "M(3,2) =", M[3][2]
print "M(3,3) =", M[3][3]
print "M(3,4) =", M[3][4]
print "M(4,6) =", M[3][6]
print "M(3,6) =", M[3][6]
print "M(4,6) =", M[3][6]
print "sum M(x,10) =", sum([M[x][10] for x in range(3,11)])
print "sum M(x,30) =", sum([M[x][30] for x in range(3,41)])
|
UTF-8
|
Python
| false | false | 2,011 |
5,617,817,267,875 |
746269388756cfddc975ae901af4ccc843f17efd
|
6ff1d8a316040e37a1d4667c48c50525c25c0c22
|
/pyload/plugins/internal/SimpleCrypter.py
|
d935bf1da13914200e433700bab66b198eba1a50
|
[
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-other-copyleft",
"AGPL-3.0-only",
"GPL-1.0-or-later",
"AGPL-3.0-or-later"
] |
non_permissive
|
xzwr/pyload
|
https://github.com/xzwr/pyload
|
66da72bff5e07c829b460f9de78d6c84aba2c0db
|
fdabb02b229f52fd3f98efb678df278eee2a28b6
|
refs/heads/master
| 2020-02-25T11:22:50.365475 | 2013-07-12T19:58:37 | 2013-07-12T19:58:37 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: zoidberg
"""
import re
from module.plugins.Crypter import Crypter
from module.utils import html_unescape
class SimpleCrypter(Crypter):
__name__ = "SimpleCrypter"
__version__ = "0.04"
__pattern__ = None
__type__ = "crypter"
__description__ = """Base crypter plugin"""
__author_name__ = ("stickell", "zoidberg")
__author_mail__ = ("[email protected]", "[email protected]")
"""
These patterns should be defined by each crypter:
LINK_PATTERN: group(1) must be a download link
example: <div class="link"><a href="(http://speedload.org/\w+)
TITLE_PATTERN: (optional) the group defined by 'title' should be the title
example: <title>Files of: (?P<title>[^<]+) folder</title>
"""
def decrypt(self, pyfile):
self.html = self.load(pyfile.url, decode=True)
package_name, folder_name = self.getPackageNameAndFolder()
package_links = re.findall(self.LINK_PATTERN, self.html)
self.logDebug('Package has %d links' % len(package_links))
if package_links:
self.packages = [(package_name, package_links, folder_name)]
else:
self.fail('Could not extract any links')
def getPackageNameAndFolder(self):
if hasattr(self, 'TITLE_PATTERN'):
m = re.search(self.TITLE_PATTERN, self.html)
if m:
name = folder = html_unescape(m.group('title').strip())
self.logDebug("Found name [%s] and folder [%s] in package info" % (name, folder))
return name, folder
name = self.pyfile.package().name
folder = self.pyfile.package().folder
self.logDebug("Package info not found, defaulting to pyfile name [%s] and folder [%s]" % (name, folder))
return name, folder
|
UTF-8
|
Python
| false | false | 2,013 |
10,788,957,885,767 |
7764c5a5ecd3dda4c1fcae6a08d2743c9bf4a841
|
d6c0b5311e8c7a9849bd4ce8ad383536c05957ac
|
/service/io360/includes/helpers.py
|
3ee9a01455d61fb82a219fe38c3a456fce26ae37
|
[] |
no_license
|
lzimm/360io
|
https://github.com/lzimm/360io
|
29182dcc56ef832d379cfb42ac7e30b3cfc55e38
|
e34e329da7aefc8f38b18719b69482cba62b3704
|
refs/heads/master
| 2021-01-23T21:34:14.922298 | 2011-04-02T05:54:25 | 2011-04-02T05:54:25 | 1,558,482 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from io360.settings import SECRET_KEY
from hack.web2.http_headers import Cookie
import hashlib
def updateUserCookie(req, user, **kwargs):
if user:
userdata = hashlib.md5(SECRET_KEY + req.remoteAddr.host + str(user['id'])).hexdigest()
id = user['id']
name = user['name']
else:
userdata = ''
id = ''
name = ''
req.cookies.append(Cookie('auth', userdata, path='/'))
req.cookies.append(Cookie('user_id', id, path='/'))
req.cookies.append(Cookie('user_name', name, path='/'))
for k, v in kwargs.iteritems():
req.cookies.append(Cookie(k, v, path='/'))
|
UTF-8
|
Python
| false | false | 2,011 |
18,691,697,681,179 |
421f58a93e479c974bebf498cca465ce3fd3daa2
|
0d2e427b18823275fc24046b58fede72f4a45641
|
/CS101/exp-5.1.py
|
88e1320d6c7bbd104315aca5dea258892db944d8
|
[] |
no_license
|
paulorv/udacity
|
https://github.com/paulorv/udacity
|
af2807c6c7f1c52f979ef52bddb7a47e50400f66
|
cde44c3fe609ff006ba8ff0d12b3cd2a6e709280
|
refs/heads/master
| 2020-04-09T06:27:49.064431 | 2013-04-18T23:07:23 | 2013-04-18T23:07:23 | 4,510,033 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Write a procedure, shift, which takes as its input a lowercase letter,
# a-z and returns the next letter in the alphabet after it, with 'a'
# following 'z'.
def shift(letter):
alpha_len = 26
n = 1
shift = chr(ord(letter) + n)
if ord(shift) >= (ord('a') + alpha_len):
shift = chr(ord(letter) + n - alpha_len)
return shift
# if letter == 'z':
# return chr(ord(letter) - 25)
# return chr(ord(letter) + 2)
print shift('a')
#>>> b
print shift('n')
#>>> o
print shift('z')
#>>> a
|
UTF-8
|
Python
| false | false | 2,013 |
15,418,932,613,742 |
5764ad5ed2b60acc7e8e2db2c55e3f16be7814dd
|
070f699f435bf666f0d2f1960119924068fad0b3
|
/projects/jzbot2-python/fact-parser/src/factparser/parser.py
|
c948a133379daa2e1a4f5971d4a24f59c5c15809
|
[] |
no_license
|
javawizard/afn
|
https://github.com/javawizard/afn
|
049e510069b2eaf39ac10560f8c706ff5aa74277
|
d9d95e24673794a20bb8138ce44d5bac236e07ed
|
refs/heads/master
| 2016-09-06T06:53:28.872368 | 2013-09-19T08:12:52 | 2013-09-19T08:12:52 | 3,390,850 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# A parser/interpreter for the Fact programming language. This parser
# is a hand-coded predictive recursive descent parser.
import charstack
import exceptions
import entities
import sinks
def parse(text):
stack = charstack.CharStack("{identity|" + text + "}")
entity = parseFunction(stack)
# TODO: we need to do some checking here to make sure they
# didn't include a premature function close or supply an
# out-of-function "|" character
return entity
def parseFunction(stack):
if(stack.next() != "{"):
raise exceptions.ParseException("Start of function reference must be an open brace but is not")
argumentSequence = entities.Sequence()
currentArgument = entities.Sequence()
argumentSequence.add(currentArgument)
currentLiteral = None
while stack.more():
c = stack.next()
if c == "\n" or c == "\r":
continue
elif c == "\\":
if currentLiteral == None :
currentLiteral = entities.Literal()
currentArgument.add(currentLiteral)
theChar = getEscapedChar(stack.next())
if theChar == "[":
v = stack.next()
while v != "]":
currentLiteral.append(v)
v = stack.next()
elif theChar != "\x00":
currentLiteral.append(theChar)
elif c == "%":
currentLiteral = None
varName = ""
c = stack.next()
while c != "%":
varName = varName + c
c = stack.next()
if varName.strip(): # True == Non-Whitespace
currentArgument.append(entities.VarReference(varName))
elif c == "{":
currentLiteral = None
stack.back()
newRef = parseFunction(stack)
currentArgument.add(newRef)
elif c == "|":
currentLiteral = None
# TODO: if the current argument has only one child, replace
# it with its child. This could improve performance a bit.
currentArgument = entities.Sequence()
argumentSequence.add(currentArgument)
elif c == "}":
currentLiteral = None
# TODO: same TODO item about 5 lines up applies here.
newRef = entities.FunctionReference(argumentSequence)
return newRef
else:
if(currentLiteral == None):
currentLiteral = entities.Literal()
currentArgument.add(currentLiteral)
currentLiteral.append(c)
raise exceptions.ParseException('Function call not closed (IE you have more "{" than you have "}"')
def getEscapedChar(char):
if char == "n":
return "\n"
elif char == "r":
return "\r"
elif char == "p":
return "\x0f"
elif char == "b":
return "\x02"
elif char == "u":
return "\x1f"
elif char == "i":
return "\x16"
elif char == "c":
return "\x03"
elif char == "x" or char == " ":
return "\x00"
return char
class FactContext:
globalVars = {}
def __init__(self):
self.localVars = {}
self.subroutines = {}
self.action = False
|
UTF-8
|
Python
| false | false | 2,013 |
15,118,284,893,848 |
c65c0308d092c69ff37ecc49ec53dc0243dbf4f3
|
8a6a51fce49e76d6c81a7d43fece582af6a615cc
|
/pynes/tests/commandline_test.py
|
b487a069e466183fcbecb0e7d30f8a68b669c23b
|
[
"BSD-3-Clause"
] |
permissive
|
gabriel-ozeas/pyNES
|
https://github.com/gabriel-ozeas/pyNES
|
927f75ef576aacbf5727494e342787654a7e9e16
|
bd3912b1441dfaeb81cb23b99ea8e9174941adf3
|
refs/heads/master
| 2021-01-16T19:44:29.225957 | 2012-11-15T14:43:00 | 2012-11-15T14:43:00 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
import unittest
import os
from pynes import main
from mock import patch, Mock
import shutil
class CommandLineTest(unittest.TestCase):
@patch('pynes.compiler.compile')
def test_asm(self, compiler):
main("pynes asm fixtures/movingsprite/movingsprite.asm".split())
self.assertTrue(compiler.called)
@patch('pynes.composer.compose')
def test_py(self, composer):
main("pynes py pynes/examples/movingsprite.py".split())
self.assertTrue(composer.called)
|
UTF-8
|
Python
| false | false | 2,012 |
7,473,243,147,223 |
ca9af1a423702c756daa60219d9215a5ad6ff92a
|
7766621cf84f687d42dd050842712b9756e3a18d
|
/settings.py
|
d7bd61f5e731d2dfb9fde1aa6e49d1158a321097
|
[] |
no_license
|
caesarnine/MrMarkov
|
https://github.com/caesarnine/MrMarkov
|
aa4fbe69a39725ffe1aefd2447a533b988350f0e
|
3ff90dc58da64d8f1e70d634f44a73fb00ab87e4
|
refs/heads/master
| 2016-09-06T10:13:59.933003 | 2014-11-21T04:32:14 | 2014-11-21T04:32:14 | 26,945,399 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
# Let's just use the local mongod instance. Edit as needed.
# Please note that MONGO_HOST and MONGO_PORT could very well be left
# out as they already default to a bare bones local 'mongod' instance.
if os.environ.get('PORT'):
# We're hosted on Heroku! Use the MongoHQ sandbox as our backend.
MONGO_HOST = ''
MONGO_PORT =
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = ''
SERVER_NAME = ''
else:
# Running on local machine. Let's just use the local mongod instance.
# Please note that MONGO_HOST and MONGO_PORT could very well be left
# out as they already default to a bare bones local 'mongod' instance.
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = 'username'
MONGO_PASSWORD = 'password'
MONGO_DBNAME = 'apitest'
EXTRA_RESPONSE_FIELDS = ['generated_text']
# Enable reads (GET), inserts (POST) and DELETE for resources/collections
# (if you omit this line, the API will default to ['GET'] and provide
# read-only access to the endpoint).
RESOURCE_METHODS = ['GET', 'POST']
# Enable reads (GET), edits (PATCH), replacements (PUT) and deletes of
# individual items (defaults to read-only item access).
ITEM_METHODS = ['GET']
schema = {
# Schema definition, based on Cerberus grammar. Check the Cerberus project
# (https://github.com/nicolaiarocci/cerberus) for details.
'original_text': {
'type': 'string',
'minlength': 1,
'maxlength': 3000,
},
'generated_text': {
'type': 'string',
},
}
text = {
# 'title' tag used in item links. Defaults to the resource title minus
# the final, plural 's' (works fine in most cases but not for 'people')
'item_title': 'text',
# We choose to override global cache-control directives for this resource.
'cache_control': 'max-age=10,must-revalidate',
'cache_expires': 10,
# most global settings can be overridden at resource level
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'text': text,
}
|
UTF-8
|
Python
| false | false | 2,014 |
1,571,958,042,464 |
f66ba4eeeb5a8b7021ce66a3e901d07e3044d7a9
|
3ea1319988aca7d54c98dfb09a117ce59739173c
|
/server/file_manager_test.py
|
5e77af3a147d4b1557acb0419d53bca047072f35
|
[] |
no_license
|
xunjieli/malicious
|
https://github.com/xunjieli/malicious
|
f7c23b3c18d1df5ab4e237d42e28ac208dfe500c
|
0ede0593ed8a8634ae55b099638b6b000e51336f
|
refs/heads/master
| 2021-03-12T23:53:10.848919 | 2013-12-13T18:53:03 | 2013-12-13T18:53:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from file_manager import *
from ..common import metadata
from ..common import crypto
import unittest
# test data
user_ids = ['testa', 'testb', 'testc', 'testd']
user_enc_keys = [crypto.generate_user_encryption_keypair() for _ in range(4)]
user_sign_keys = [crypto.generate_user_signature_keypair() for _ in range(4)]
file_key = crypto.generate_symmetric_key()
file_sig_key = crypto.generate_file_signature_keypair()
def public_part(key):
return key[0], key[1]
class TestFileManager(unittest.TestCase):
def setUp(self):
# cleaning up if necessary
for i in range(0,3):
if file_exist(1, user_ids[i]):
remove_file(1, user_ids[i])
def test_create_file(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
def test_create_file_fail(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
try:
create_file(1, user_ids[1], metafile, datafile)
self.assertTrue(False)
except:
self.assertTrue(True)
def test_create_file_fail_file_exist(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
create_file(1, user_ids[0], metafile, datafile)
self.assertTrue(False)
except:
self.assertTrue(True)
def test_modify_datafile(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
modify_datafile(1, user_ids[0], user_ids[0], "new string")
with open(datafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(f.read() == "new string")
except:
self.assertTrue(False)
def test_modify_datafile_collaborator(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
modify_datafile(1, user_ids[2], user_ids[0], "new string")
with open(datafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(f.read() == "new string")
except:
self.assertTrue(False)
def test_modify_datafile_fail(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
modify_datafile(1, user_ids[1], user_ids[0], "new string")
self.assertTrue(False)
except:
self.assertTrue(True)
def test_modify_metadata(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
new_metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
modify_metadata(1, user_ids[0], new_metafile)
with open(metafile_name(1, user_ids[0]), 'r+') as f:
users = metadata.extract_users_from_metadata(f.read())
self.assertTrue(not users.has_key(user_ids[2]))
except:
self.assertTrue(False)
def test_read_metadata(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
with open(metafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(read_metadata(1, user_ids[0], user_ids[0]) == f.read())
except:
self.assertTrue(False)
def test_read_metadata_collaborator(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
with open(metafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(read_metadata(1, user_ids[1], user_ids[0]) == f.read())
except:
self.assertTrue(False)
def test_read_datafile(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
with open(datafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(read_datafile(1, user_ids[0], user_ids[0]) == f.read())
except:
self.assertTrue(False)
def test_read_datafile_collaborator(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
try:
with open(datafile_name(1, user_ids[0]), 'r+') as f:
self.assertTrue(read_datafile(1, user_ids[1], user_ids[0]) == f.read())
except:
self.assertTrue(False)
def test_can_write_datafile(self):
owner = (user_ids[0], public_part(user_enc_keys[0]))
metafile = metadata.metadata_encode('test_file', False, file_key,
file_sig_key,user_sign_keys[0], owner, [
(user_ids[1], False, public_part(user_enc_keys[1])),
(user_ids[2], True, public_part(user_enc_keys[2]))
])
datafile = 'this is datafile'
self.assertTrue(create_file(1, user_ids[0], metafile, datafile))
self.assertTrue(can_write_datafile(1,user_ids[0], user_ids[0]))
self.assertTrue(not can_write_datafile(1,user_ids[1], user_ids[0]))
self.assertTrue(can_write_datafile(1,user_ids[2], user_ids[0]))
if __name__ == '__main__':
unittest.main()
|
UTF-8
|
Python
| false | false | 2,013 |
14,611,478,786,960 |
624e7c2734132e6c506f5fd99a9f58500984adc4
|
4623ed30d4f9e07ae17e465dd0a677ef88995a53
|
/gnss_sdr/findPreambles.py
|
320e1042d639469f5fbec503130fde194ba9ea8c
|
[] |
no_license
|
CGCooke/softgnss_python
|
https://github.com/CGCooke/softgnss_python
|
b456ea908319c8f450f2955f97827407fbe7a4a7
|
0b042bb220c412339e13d11cd3921df79420ad7e
|
refs/heads/master
| 2021-04-18T19:02:42.823056 | 2012-08-22T21:15:39 | 2012-08-22T21:15:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
#--------------------------------------------------------------------------
# SoftGNSS v3.0
#
# Copyright (C) Darius Plausinaitis and Dennis M. Akos
# Written by Darius Plausinaitis and Dennis M. Akos
# Converted to Python by Colin Beighley
#--------------------------------------------------------------------------
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
#USA.
#--------------------------------------------------------------------------
import numpy as np
from navPartyChk import navPartyChk
import corrs2bits
def findPreambles(trackResults, settings):
#Preamble search can be delayed to a later point in the track
#results to avoid noise due to tracking loop transients
searchStartOffset = 0
#Initialize the firstSubFrame array
firstSubFrame = ['-' for i in range(len(trackResults))]
#Generate the preamble pattern (50 Hz)
preamble_bits = [1,-1,-1,-1,1,-1,1,1]
#Upsample to 1KHz (tracking loop speed)
preamble_ms = np.repeat(preamble_bits,20)
#Make a list of channels excluding the channels that are not tracking
activeChnList = []
for i in range(len(trackResults)):
if (trackResults[i].status == 'T'):
activeChnList.append(i)
#Correlate tracking bits with preamble
for channelNr in activeChnList[-1:-len(activeChnList)-1:-1]:
bits = trackResults[channelNr].I_P[\
np.r_[0+searchStartOffset:len(trackResults[channelNr].I_P)]]
#Hard limit the prompt correlation output to +1/-1
#if bits[n] = 0, then sign(bits[n]) = 0, but we don't care about
#that case because we can't assume bit parity from a correlation of 0
bits = np.sign(bits)
#Correlate preamble with the tracking output - convolution reverses vector that is convolved with
tlmXcorrResult = np.correlate(bits, preamble_ms,mode='valid')
index = None
index2 = None
#40 = 2 * 20 ms/bit = 40 to give enough room to get parity bits from previous subframe
#1200 = 2*30 * 20 ms/bit: to give room to get (TLM & HOW), and to account for
#'valid' not searching last 159 shifts of upsampled preamble with bits
convSearchRange = np.r_[40:len(bits)-1200]
#Find where preamble starts
#153 from (8 preamble bits * 20 bits/ms) 160 minus a
#little slop for slight misalignment
index = np.nonzero(np.array(np.greater(np.abs(\
tlmXcorrResult[convSearchRange]),153),dtype=int))[0] + searchStartOffset + 40
#Analyze detected preambles
for i in range(len(index)-1):
#Find distance in time between this occurance and the other 'preambles'
#If 6000ms (6s), then validate the parities of two words to verify
index2 = index - index[i]
#If we find preambles 6000 ms apart, check the preambles of the TLM and HOW
if any(np.equal(index2,6000)):
tlmstar = corrs2bits.signed(trackResults[channelNr].I_P[index[i]-40:index[i]])
tlm = corrs2bits.signed(trackResults[channelNr].I_P[index[i]:index[i] + 20*30])
howstar = corrs2bits.signed(trackResults[channelNr].I_P[index[i]+20*30-40:index[i]+20*30])
how = corrs2bits.signed(trackResults[channelNr].I_P[index[i] + 20*30:index[i]+20*60])
#Check the parity of the TLM and HOW words
if (navPartyChk(tlmstar,tlm) != 0) and (navPartyChk(howstar,how) != 0):
#Parity was okay. Record the preamble start position. Skip the rest of
#the preamble pattern checking for this channel and process next channel.
firstSubFrame[channelNr] = index[i]
break
#Reject channels for which we can't find and validate preamble
if firstSubFrame[channelNr] == '-':
activeChnList.pop(channelNr)
return (firstSubFrame, activeChnList)
|
UTF-8
|
Python
| false | false | 2,012 |
9,225,589,798,420 |
98e3c22b4112127f58aed9b3706cefa1838ec39b
|
53330d6da944fc5bc8dffdad8b9341e87792d757
|
/trunk/frameworkedGL/admin/cg.py
|
3db2e6a30cc5d63baa40f6224d0c5333efbf95ce
|
[] |
no_license
|
BackupTheBerlios/sfox-svn
|
https://github.com/BackupTheBerlios/sfox-svn
|
dfc241dd53689d5e2534b190e78795e7ea7f95a7
|
398038d130dbb319619c76add72e89e14a566bae
|
refs/heads/master
| 2020-04-28T12:42:44.341704 | 2009-08-30T19:30:50 | 2009-08-30T19:30:50 | 40,819,555 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# Inspired by:
# Julien Antille, Thomas Nagy, 2005
# BSD license (see COPYING)
def exists(env):
return true
def generate(env):
""" Detect the cg library (nvidia) """
from SCons.Options import Options
cachefile = env['CACHEDIR']+'/cg.cache.py'
opts = Options(cachefile)
opts.AddOptions(
( 'HASCG', 'has the cg library' ),
)
opts.Update(env)
if 'configure' in env['TARGS'] or not env.has_key('HASCG'):
import SCons.SConf
conf = SCons.SConf.SConf( env )
if not conf.CheckCHeader('Cg/cg.h'):
print 'We really need the cg library !'
print 'Get ftp://download.nvidia.com/developer/cg/Cg_1.3/Linux/Cg-1.3.0501-0700.i386.tar.gz and unpack it in your root directory'
import sys
sys.exit(1)
env['HASCG']=1
env = conf.Finish()
opts.Save(cachefile, env)
env.AppendUnique(LIBS = ['Cg', 'CgGL', 'GL'])
|
UTF-8
|
Python
| false | false | 2,009 |
11,785,390,263,683 |
0fcb5f03d5df5b68529be987b1b1bacc3372d76f
|
85fb5be577b98316a64ff0a693ea28863ec95327
|
/models/db.py
|
e9a354c21808d11ef8b5c3e735bea84220c86a5a
|
[
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-public-domain"
] |
non_permissive
|
span007/paideia
|
https://github.com/span007/paideia
|
8611edf1b227d90dd7a856feab805e47c5f77779
|
f6a10afd461608764bdd2068299b25783d90c9e8
|
refs/heads/master
| 2021-01-18T10:45:45.900803 | 2014-06-17T17:11:54 | 2014-06-17T17:11:54 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
if 0:
from gluon import DAL, URL, Field, SQLFORM
import logging
from pytz import common_timezones
from pytz import timezone
from gluon.tools import Recaptcha, Mail, Auth, Crud, Service, PluginManager
from gluon.tools import IS_IN_SET
from gluon.globals import current
import datetime
response = current.response
request = current.request
now = datetime.datetime.utcnow()
if request.is_local: # disable in production enviroment
from gluon.custom_import import track_changes
track_changes(True)
#-------------------------------------------------------------
# Recognize when running in test environment
#-------------------------------------------------------------
# This section adapted from https://github.com/viniciusban/web2py.test
# note: with Ubuntu, put test db on ramdisk with /dev/shm directory.
temp_dir = '/dev/shm/' + request.application
# temp_dir = '/tmp'
def _i_am_running_under_test():
'''Check if Web2py is running under a test environment.
'''
test_running = False
if request.is_local:
# IMPORTANT: the temp_filename variable must be the same as the one set
# on your tests/conftest.py file.
temp_filename = '%s/tests_%s.tmp' % (temp_dir, request.application)
import glob
if glob.glob(temp_filename):
test_running = True
return test_running
#-------------------------------------------------------------
# define database storage
#-------------------------------------------------------------
if _i_am_running_under_test():
db = DAL('sqlite://storage.sqlite', pool_size=1) # check_reserved=['all']
#db = DAL('sqlite://test_storage.sqlite', folder=temp_dir)
else:
# TODO: check these sqlite settings
# check_reserved makes sure no column names conflict with back-end db's
db = DAL('sqlite://storage.sqlite', pool_size=1, lazy_tables=True,
check_reserved=['sqlite', 'mysql'])
#-------------------------------------------------------------
# Set up logging
#-------------------------------------------------------------
logger = logging.getLogger('web2py.app.paideia')
logger.setLevel(logging.DEBUG)
#-------------------------------------------------------------
# Generic views
#-------------------------------------------------------------
# by default give a view/generic.extension to all actions from localhost
# none otherwise. a pattern can be 'controller/function.extension'
response.generic_patterns = ['*'] if request.is_local else []
#-------------------------------------------------------------
# set up services
#-------------------------------------------------------------
crud = Crud(db) # for CRUD helpers using auth
service = Service() # for json, xml, jsonrpc, xmlrpc, amfrpc
plugins = PluginManager() # for configuring plugins
current.db = db # to access db from modules
#-------------------------------------------------------------
# get private data from secure file
#-------------------------------------------------------------
keydata = {}
with open('applications/paideia/private/app.keys', 'r') as keyfile:
for line in keyfile:
k, v = line.split()
keydata[k] = v
#-------------------------------------------------------------
#configure authorization
#-------------------------------------------------------------
auth = Auth(db, hmac_key=Auth.get_or_create_key()) # authent/authorization
#-------------------------------------------------------------
# place auth in current so it can be imported by modules
#-------------------------------------------------------------
current.auth = auth
#-------------------------------------------------------------
#misc auth settings
#-------------------------------------------------------------
auth.settings.create_user_groups = False
auth.settings.label_separator = ''
#-------------------------------------------------------------
# Customizing auth tables
#-------------------------------------------------------------
#adding custom field for user time zone
auth.settings.extra_fields['auth_user'] = [
Field('time_zone',
'string',
default='America/Toronto',
requires=IS_IN_SET((common_timezones)),
widget=SQLFORM.widgets.options.widget
),
Field.Virtual('tz_obj',
lambda row: timezone(row.auth_user.time_zone.replace('|', ''))
if (hasattr(row.auth_user, 'time_zone') and
row.auth_user.time_zone)
else 'America/Toronto'
),
Field('uuid', length=64, default=lambda:str(uuid.uuid4())),
Field('modified_on', 'datetime', default=request.now)
]
#adding custom field for class info in groups
auth.settings.extra_fields['auth_group'] = [
Field('institution', 'string', default='Tyndale Seminary'),
Field('academic_year', 'integer', default=now.year), # was year
Field('term', 'string'),
Field('course_section', 'string'),
Field('course_instructor', 'reference auth_user', default=auth.user_id),
Field('start_date', 'datetime'),
Field('end_date', 'datetime'),
Field('paths_per_day', 'integer', default=40),
Field('days_per_week', 'integer', default=5),
Field('uuid', length=64, default=lambda:str(uuid.uuid4())),
Field('modified_on', 'datetime', default=request.now)
]
auth.settings.extra_fields['auth_membership'] = [
Field('uuid', length=64, default=lambda:str(uuid.uuid4())),
Field('modified_on', 'datetime', default=request.now)
]
auth.settings.extra_fields['auth_permission'] = [
Field('uuid', length=64, default=lambda:str(uuid.uuid4())),
Field('modified_on', 'datetime', default=request.now)
]
auth.settings.extra_fields['auth_event'] = [
Field('uuid', length=64, default=lambda:str(uuid.uuid4())),
Field('modified_on', 'datetime', default=request.now)
]
auth.settings.extra_fields['auth_cas'] = [
Field('modified_on', 'datetime', default=request.now)
]
auth.define_tables() # creates all needed tables
db.auth_user._format = lambda row: '{}:{}, {}'.format(row.id, row.last_name,
row.first_name)
#-------------------------------------------------------------
# Mail config
#-------------------------------------------------------------
mail = Mail()
mail.settings.server = keydata['email_sender'] # 'logging' # SMTP server
mail.settings.sender = keydata['email_address'] # email
mail.settings.login = keydata['email_pass'] # credentials or None
current.mail = mail
auth.settings.mailer = mail # for user email verification
auth.settings.registration_requires_verification = False
auth.settings.registration_requires_approval = False
auth.messages.verify_email = 'Click on the link http://' \
+ request.env.http_host + URL('default', 'user', args=['verify_email']) \
+ '/%(key)s to verify your email'
auth.settings.reset_password_requires_verification = True
auth.messages.reset_password = 'Click on the link http://' \
+ request.env.http_host + URL('default', 'user', args=['reset_password'])\
+ '/%(key)s to reset your password'
#-------------------------------------------------------------
# enable recaptcha anti-spam for selected actions
#-------------------------------------------------------------
auth.settings.login_captcha = None
# TODO: turn these back on!!!!
auth.settings.register_captcha = Recaptcha(request,
keydata['captcha_public_key'], keydata['captcha_private_key'])
auth.settings.retrieve_username_captcha = Recaptcha(request,
keydata['captcha_public_key'], keydata['captcha_private_key'])
auth.settings.retrieve_password_captcha = Recaptcha(request,
keydata['captcha_public_key'], keydata['captcha_private_key'])
#-------------------------------------------------------------
# crud settings
#-------------------------------------------------------------
crud.settings.auth = auth # =auth to enforce authorization on crud
|
UTF-8
|
Python
| false | false | 2,014 |
4,389,456,622,076 |
1cfa2ad0ac7bc9b86d65d30f3db1eace2e50e853
|
e950c3ea3f818a3fd4b307f1229634e892e46f14
|
/src/warmup_project/scripts/wallfollow.py
|
e80a344d58837c1c47ba2d12b2f9fce55f1bdc4c
|
[] |
no_license
|
rachelboy/comprobo2014
|
https://github.com/rachelboy/comprobo2014
|
579e634ef842142b2d3493a32276244347451ed9
|
2281209aac5a730770ce5308542063a2cb5ea25b
|
refs/heads/master
| 2021-01-17T23:34:03.314985 | 2014-09-25T21:19:58 | 2014-09-25T21:19:58 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id$
## Simple talker demo that listens to std_msgs/Strings published
## to the 'chatter' topic
import rospy
from sensor_msgs.msg import LaserScan
from geometry_msgs.msg import Twist, Vector3
import cv2
from numpy import argmin
from scipy.stats import mode
class WallFollower():
def __init__(self, target=.6, buf_big=.3, buf_small=.05):
self.target = target
self.buf_big = buf_big
self.buf_small = buf_small
self.state = "approach"
self.delay_counter = 0
self.turn = 0.0
self.speed = 0.0
rospy.init_node('approach_wall', anonymous=True)
self.pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
self.sub = rospy.Subscriber('scan', LaserScan, self.scan_received)
def switch_state(self,state):
self.state=state
self.turn, self.speed = 0.0,0.0
def scan_received(self,data):
vals = [(data.ranges[i],data.angle_min+(data.angle_increment*i)) for i in range(len(data.ranges)) if data.ranges[i]>0 and data.ranges[i]<4]
if len(vals)>5:
rs,angs = zip(*vals)
i = argmin(rs)
distance_to_wall = rs[i]
angle_to_wall = angs[i]
print distance_to_wall, angle_to_wall
if self.state == "wall follow":
print "wall follow"
if abs(distance_to_wall-self.target)>self.buf_big:
self.delay_counter = self.delay_counter+1
if self.delay_counter >= 2:
self.delay_counter = 0
self.switch_state("approach")
else:
self.delay_counter = 0
self.turn = angle_to_wall-1.57
self.speed = .05*(4.8-self.turn)
elif self.state == "align":
print "align"
if abs(angle_to_wall-1.57) < .2:
self.switch_state("wall follow")
else:
self.turn = .7*(angle_to_wall-1.57)
self.speed = 0.0
elif self.state == "approach":
if abs(distance_to_wall-self.target)<self.buf_small:
self.switch_state("align")
elif angle_to_wall<.2 or angle_to_wall>6.1:
print "moving"
self.turn = 0.0
self.speed = .4*(distance_to_wall-self.target)
else:
print "turning"
self.turn = .15*(-angle_to_wall+3.142)
self.speed = 0.0
def set_target_distance(self,new_distance):
""" call back function for the OpenCv Slider to set the target distance """
self.target = new_distance/100.0
def wall_withslider(self):
""" Main run loop for wall with slider """
cv2.namedWindow('UI')
cv2.createTrackbar('distance', 'UI', int(self.target*100), 300, self.set_target_distance)
r = rospy.Rate(10)
while not(rospy.is_shutdown()):
msg = Twist()
msg.linear.x = .3*self.speed
#print "speed", self.speed
msg.angular.z = self.turn
#print "turn", self.turn
self.pub.publish(msg)
cv2.waitKey(10)
r.sleep()
if __name__ == '__main__':
follower = WallFollower()
follower.wall_withslider()
|
UTF-8
|
Python
| false | false | 2,014 |
15,229,954,063,439 |
76b13349308c6bc01e08b7e791766691292f46bb
|
07c31afef0103d2d79c335094182028e45eaa545
|
/wikidatabot.py
|
272956d6b3d51632e26ba4d086cd091cfba6d992
|
[] |
no_license
|
thieol/pywikidata
|
https://github.com/thieol/pywikidata
|
0d0f039e2c027be27454b9c4782ea36e5dfc7fa5
|
b12eb758aafe718c9911bf89eb7a00a830e51722
|
refs/heads/master
| 2021-01-16T22:54:42.778365 | 2014-05-04T20:03:14 | 2014-05-04T20:03:14 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import wikidata
import errors
class wikidatabot(object):
"""This is a helper class to use in bots. Somm cheks are done before updates."""
def __init__(self):
self.lang = None
def set_lang(self, lang):
"""gets the language in which you want to work."""
self.lang = lang
def get_lang(self):
"""sets the language in which you want to work."""
return self.lang
def getItem(self, id):
"""gets an item by its id ( number or beginning with q."""
item = wikidata.api.getItemById(id)
if ( str(item.id)[0:1].lower() == "q" ):
item.id = str(item.id)[1:]
return wikidata.api.getItemById(id)
def updateDescription(self, id, lib, comment="", force=False):
"""update description. if description exists, nothing is done instead force=True
when updating a comment is adding.
"""
item = wikidata.api.getItemById(id)
if self.lang in item.descriptions:
if len(item.descriptions[self.lang]) > 0:
if not force :
raise errors.InvalidBotOperation("description is not tempty")
if ( str(item.id)[0:1].lower() == "q" ):
item.id = str(item.id)[1:]
item.descriptions[self.lang] = lib
sumary = comment + "Bot adding [" + self.lang + "] description ->" + lib
wikidata.api.save(item, sumary)
def updateLabel(self, id, lib, comment="", force=False):
"""update label. if label exists, nothing is done instead force=True.
when updating a comment is adding.
"""
item = wikidata.api.getItemById(id)
if self.lang in item.labels:
if len(item.labels[self.lang]) > 0:
if not force :
raise errors.InvalidBotOperation("label is not tempty")
if ( str(item.id)[0:1].lower() == "q" ):
item.id = str(item.id)[1:]
item.labels[self.lang] = lib
sumary = comment + "Bot adding [" + self.lang + "] description ->" + lib
wikidata.api.save(item, sumary)
|
UTF-8
|
Python
| false | false | 2,014 |
18,013,092,866,789 |
610b9dca827118af4a860b079b909e1147f6a1de
|
afd6f816b1543feea24380b563b472aa1f053d6e
|
/brainx/version.py
|
762844b9d3554eec1dadf5a4768c633c79226d54
|
[
"BSD-3-Clause"
] |
permissive
|
jrcohen02/brainx_archive2
|
https://github.com/jrcohen02/brainx_archive2
|
c3cd9c56930920a967e7ab4684f00c24181ae218
|
a78dd69daa26d797888b796122b74b4e9dd6b548
|
refs/heads/master
| 2021-01-19T03:11:36.289916 | 2014-10-15T14:17:54 | 2014-10-15T14:17:54 | 11,833,553 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""brainx version/release information"""
# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z"
_version_major = 0
_version_minor = 1
_version_micro = '' # use '' for first of series, number for 1 and above
_version_extra = 'dev'
#_version_extra = '' # Uncomment this for full releases
# Construct full version string from these.
_ver = [_version_major, _version_minor]
if _version_micro:
_ver.append(_version_micro)
if _version_extra:
_ver.append(_version_extra)
__version__ = '.'.join(map(str, _ver))
classifiers = ["Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering"]
description = "Brainx: timeseries analysis for neuroscience data"
# Note: this long_description is actually a copy/paste from the top-level
# README.txt, so that it shows up nicely on PyPI. So please remember to edit
# it only in one place and sync it correctly.
long_description = """
================================================
Brainx: network analysis for neuroimaging data
================================================
Brainx provides a set of tools, based on the NetworkX graph theory package, for
the analysis of graph properties of neuroimaging data.
Installation
============
For a normal installation, simply type::
python setup.py install [other options here]
To install using setuptools support, use::
python setup_egg.py install [other options here]
For example, to install using a development-mode setup in your personal user
directory, use::
python setup_egg.py develop --prefix=$HOME/.local
License information
===================
Brainx is licensed under the terms of the new BSD license. See the file
"LICENSE" for information on the history of this software, terms & conditions
for usage, and a DISCLAIMER OF ALL WARRANTIES.
"""
# Other constants for distutils setup() call
name = "brainx"
maintainer = "Nipy Developers"
maintainer_email = "[email protected]"
url = "http://nipy.org/brainx"
download_url = "http://github.com/nipy/brainx/downloads"
license = "Simplified BSD"
author = "Brainx developers"
author_email = "[email protected]"
platforms = "OS Independent"
version = __version__
packages = ['brainx',
'brainx.tests',
]
package_data = {"brainx": ["LICENSE"]}
requires = ["numpy", "matplotlib", "scipy", "networkx"]
|
UTF-8
|
Python
| false | false | 2,014 |
5,626,407,164,836 |
2a21ecbb212d292f64ab0f4d6bbf86b2816e0879
|
5dd698ebbb97c068414c1263d013ad2f83ba33dd
|
/pynotifyd/config.py
|
d958b6788a655fa3f26e18219c9c19ed951c1f40
|
[] |
no_license
|
CygnusNetworks/pynotifyd
|
https://github.com/CygnusNetworks/pynotifyd
|
e38861d3d3b1299d4ffd89e094534173be0a7d9c
|
02a3992b1b29db9d4db869b7119e4497f5d1f188
|
refs/heads/master
| 2020-12-24T15:49:21.219546 | 2014-05-21T10:06:39 | 2014-05-21T10:06:39 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import configobj
import email.utils
import socket
import validate
HAS_PHONENUMBERS=False
try:
import phonenumbers
HAS_PHONENUMBERS=True
except ImportError:
pass
import errors
config_spec = configobj.ConfigObj("""
[general]
queuedir = string(min=1)
retry = list(min=1)
[contacts]
[[__many__]]
[providers]
[[__many__]]
driver = string(min=1)
""".splitlines(), interpolation=False, list_values=False)
def get_the_item(obj, key):
"""
Yield obj[key] if it exists and nothing otherwise.
"""
try:
yield obj[key]
except KeyError:
pass
def validate_contact(contact):
"""
@type contact: {str: str}
@raises PyNotifyDConfigurationError:
"""
# Basic constraint checking on phone number, if phonenumbers is not used
for number in get_the_item(contact, "number"):
if not HAS_PHONENUMBERS:
if not number.startswith("+"):
raise errors.PyNotifyDConfigurationError("phone number must start with a plus sign")
if not number[1:].isdigit():
raise errors.PyNotifyDConfigurationError("non-digits found in phone number")
else:
try:
# TODO: add region support
_ = phonenumbers.parse(number, None)
except Exception, msg:
raise errors.PyNotifyDConfigurationError("phonenumber cannot be parsed with exception %s" % msg)
for jabber in get_the_item(contact, "jabber"):
if '@' not in jabber:
raise errors.PyNotifyDConfigurationError("a jabberid has to contain an @ sign")
for addr in get_the_item(contact, "email"):
if len(email.utils.parseaddr(addr)[1])==0:
raise errors.PyNotifyDConfigurationError("email address %s is invalid in contact %s" % (addr, contact))
def read_config(filename):
"""
@type filename: str
@rtype: configobj.ConfigObj
@raises PyNotifyDConfigurationError:
"""
spec = config_spec.copy()
spec["hostname"] = "string(default=%r)" % socket.getfqdn()
try:
config = configobj.ConfigObj(filename, interpolation="template", configspec=spec, file_error=True)
except IOError, msg:
raise errors.PyNotifyDConfigurationError("Failed to read configuration file named %r with IOError: %s" % (filename, msg))
except OSError, msg:
raise errors.PyNotifyDConfigurationError("Failed to read configuration file named %r with OSError: %s" % (filename, msg))
# general verification
for section_list, key, error in configobj.flatten_errors(config, config.validate(validate.Validator())):
raise errors.PyNotifyDConfigurationError("Failed to validate %s in section %s with error %s" % (key, ", ".join(section_list), error))
# check contacts
for contactname, contact in config["contacts"].items():
if not isinstance(contact, dict):
raise errors.PyNotifyDConfigurationError("non-section found in section contacts")
try:
validate_contact(contact)
except errors.PyNotifyDConfigurationError, err:
raise errors.PyNotifyDConfigurationError("%s in contact %s" % (err.message, contactname))
# check retry logic
for provider in config["general"]["retry"]:
if provider.isdigit() or provider == "GIVEUP":
continue
if provider in config["providers"]:
continue
raise errors.PyNotifyDConfigurationError("provider %s not found" % provider)
return config
|
UTF-8
|
Python
| false | false | 2,014 |
1,348,619,731,926 |
9684937808899657d3021f2262c7be0b4d3b4066
|
9b7ca56662f2abdbcb3a8d49c47abd176336509a
|
/prob2.py
|
67663528180fbe1f5e6450a697b342ef04302b8d
|
[] |
no_license
|
mwhara/project-euler-answers
|
https://github.com/mwhara/project-euler-answers
|
bf2f8d6c38e593d3706bbbab5c01d5cca0233d9c
|
0b2ba6c46a17d5ead0aa75556c4dc306ec4963d6
|
refs/heads/master
| 2020-06-04T06:05:29.868095 | 2013-07-10T02:37:09 | 2013-07-10T02:37:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
top_limit = 4000000
term1 = 1
term2 = 2
next_term = 0
even_sum = 0
while next_term <= top_limit:
if term2 % 2 == 0:
even_sum += term2
next_term = term1 + term2
term1 = term2
term2 = next_term
print even_sum
|
UTF-8
|
Python
| false | false | 2,013 |
13,846,974,601,972 |
95cdb782e64e0717f3f982fdc64b1932938b50e6
|
aeab8e0d446d6d09e874f7a308de5535cb0877bd
|
/client/deps/dejagnu/dejagnu.py
|
8bfe7b891d6efbab1cbadb62566f8c36dfb638f3
|
[
"LGPL-3.0-only",
"GPL-2.0-only"
] |
non_permissive
|
yochow/autotest
|
https://github.com/yochow/autotest
|
66ea5da532bdac5553f15edab7da28b79e00a70c
|
e2249d16115e9f8e497019fe41c62ca810fa9c22
|
refs/heads/master
| 2021-01-19T17:44:04.355440 | 2009-07-28T02:52:30 | 2009-07-28T02:52:30 | 262,592 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import os
import common
from autotest_lib.client.bin import utils
version = 1
def setup(tarball, topdir):
srcdir = os.path.join(topdir, 'src')
utils.extract_tarball_to_dir(tarball, 'src')
os.chdir(srcdir)
utils.system ('./configure --prefix=%s/dejagnu' % topdir)
utils.system('make')
utils.system('make install')
os.chdir(topdir)
pwd = os.getcwd()
# http://ftp.gnu.org/pub/gnu/dejagnu/dejagnu-1.4.4.tar.gz
tarball = os.path.join(pwd, 'dejagnu-1.4.4.tar.bz2')
utils.update_version(pwd+'/src', False, version, setup, tarball, pwd)
|
UTF-8
|
Python
| false | false | 2,009 |
14,499,809,608,407 |
4bed726ac7e95a0c058d234ba7203ca25c3bfd6d
|
0274ea3a72981b0d820fede4127aa15c37c45f15
|
/wallhack/erasm/data/ExtractAuthors.py
|
dc244d122d05cb9f0e807340584a9879e57aff01
|
[
"GPL-3.0-only",
"GPL-1.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"LGPL-2.0-or-later",
"LGPL-2.1-or-later"
] |
non_permissive
|
pierrebo/wallhack
|
https://github.com/pierrebo/wallhack
|
9f94cf00ecf70b8f88930b5a7b072a8ca54f21be
|
e3ec9a9901034c433bf50f531ecc81da5c95082b
|
refs/heads/master
| 2021-01-22T17:57:33.852980 | 2014-01-04T17:48:44 | 2014-01-04T17:48:44 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
We will analyse the article metadata file and extract co-authors.
"""
from apgl.util.PathDefaults import PathDefaults
import os
import logging
import sys
import itertools
import json
from apgl.util.ProfileUtils import ProfileUtils
import numpy
import scipy
import scipy.io
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
def saveAuthors():
path = "/local/dhanjalc/dataDump-28-11-12/"
fileName = path + "articleMetadata500000"
if not os.path.exists(fileName):
path = PathDefaults.getDataDir() + "erasm/"
fileName = path + "articleMetadata1000000"
logging.debug("Loading article metadata from " + fileName)
fileObj = open(fileName, 'r')
vertexIdDict = {}
vertexIdSet = set([])
vertexIdList = []
edgeSet = set([])
edgeArray = []
i = 0
lineInd = 0
emptyAuthors = 0
edgeFileName = PathDefaults.getOutputDir() + "edges.txt"
edgesFile = open(edgeFileName, "w")
lineBuffer = ""
for line in fileObj:
if lineInd % 1000 == 0:
print("Line " + str(lineInd) + " Author " + str(len(vertexIdSet)) + " empty author strings " + str(emptyAuthors))
if len(lineBuffer) != 0:
edgesFile.write(lineBuffer)
lineBuffer = ""
articleMetaData = json.loads(line)
if "authors" in articleMetaData:
authors = articleMetaData["authors"]
del articleMetaData
coauthorList = []
for author in authors:
authorString = "".join([author["forename"], " ", author["surname"]])
authorString = authorString.strip()
if len(authorString) != 0:
if authorString not in vertexIdSet:
vertexIdDict[authorString] = len(vertexIdSet)
vertexIdSet.add(authorString)
coauthorList.append(authorString)
del authorString
else:
emptyAuthors += 1
iterator = itertools.combinations(coauthorList, 2)
del coauthorList
for vId1, vId2 in iterator:
#Note that we will have duplicate edges
lineBuffer += str(vertexIdDict[vId1]) + ", " + str(vertexIdDict[vId2]) + "\n"
lineInd += 1
edgesFile.close()
print(sys.getsizeof(vertexIdDict))
print(sys.getsizeof(vertexIdSet))
print(sys.getsizeof(vertexIdList))
print(sys.getsizeof(edgeSet))
print(sys.getsizeof(edgeArray))
logging.debug("Saved edges as " + edgeFileName)
def saveRatingMatrix():
"""
Take the coauthor graph above and make vertices indexed from 0 then save
as matrix market format.
"""
edgeFileName = PathDefaults.getOutputDir() + "erasm/edges2.txt"
logging.debug("Reading edge list")
edges = numpy.loadtxt(edgeFileName, delimiter=",", dtype=numpy.int)
logging.debug("Total number of edges: " + str(edges.shape[0]))
vertexIdDict = {}
vertexIdSet = set([])
i = 0
for edge in edges:
if edge[0] not in vertexIdSet:
vertexIdDict[edge[0]] = i
vertexIdSet.add(edge[0])
i += 1
if edge[1] not in vertexIdSet:
vertexIdDict[edge[1]] = i
vertexIdSet.add(edge[1])
i += 1
n = len(vertexIdDict)
R = scipy.sparse.lil_matrix((n, n))
logging.debug("Creating sparse matrix")
for edge in edges:
R[vertexIdDict[edge[0]], vertexIdDict[edge[1]]] += 1
R[vertexIdDict[edge[1]], vertexIdDict[edge[0]]] += 1
logging.debug("Created matrix " + str(R.shape) + " with " + str(R.getnnz()) + " non zeros")
R = R.tocsr()
minCoauthors = 20
logging.debug("Removing vertices with <" + str(minCoauthors) + " coauthors")
nonzeros = R.nonzero()
inds = numpy.arange(nonzeros[0].shape[0])[numpy.bincount(nonzeros[0]) >= minCoauthors]
R = R[inds, :][:, inds]
logging.debug("Matrix has shape " + str(R.shape) + " with " + str(R.getnnz()) + " non zeros")
matrixFileName = PathDefaults.getOutputDir() + "erasm/R"
scipy.io.mmwrite(matrixFileName, R)
logging.debug("Wrote matrix to file " + matrixFileName)
#saveAuthors()
saveRatingMatrix()
|
UTF-8
|
Python
| false | false | 2,014 |
6,871,947,709,511 |
14a3c0fe81760e1c3d4a504ccd196a34794f7bf5
|
0fd670e636a6cab725aaba0ddc5e48f8e2e566d6
|
/test_flow.py
|
82ca0d639b92bdb59f94d07e94d5429073cd2e29
|
[] |
no_license
|
szymanskirafal/frozentest
|
https://github.com/szymanskirafal/frozentest
|
1dba29cef577f7071dab2d8c2369aa173234f32a
|
c4cc8eee66e31a4ee8beda4b0a897ad507059419
|
refs/heads/master
| 2015-08-18T11:47:37.614814 | 2014-12-29T20:34:28 | 2014-12-29T20:34:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# test_flow
# niesamowite
# H looks at screen of he's smartphone and sees:
# name of each gas station and below
# stripes of different heights
# name of kind of fuel below each stripe
# amount of each kind of fuel below each name
# to see name of each gas station we have to create these names
# so we need to create a model with text attribute
# and then each of these should have value
# have to create model ON
# it has to have it's volume f.e. 10000
# the name of ON and the volume has to be shown below name of station 1
# other kinds of fuels shuold be created in a familiar way
# it has to be a part of station 1
|
UTF-8
|
Python
| false | false | 2,014 |
1,795,296,341,426 |
701eb9320af95e7e412b0280897acf9272985762
|
95cd4eefe2df04ea4353414282196c17737b101e
|
/text/exceptions.py
|
4f9e66142af84c8dc6ebf64ddaa744493668fdd5
|
[
"MIT",
"Beerware",
"Apache-2.0"
] |
permissive
|
nicolargo/TextBlob
|
https://github.com/nicolargo/TextBlob
|
c700813a52ba5792e5fec54b6f058ca4a9efa9dc
|
1444008a36dbb5ebcb5c00e4e9d24ee003e2d88a
|
refs/heads/master
| 2023-08-24T22:19:22.395604 | 2013-08-14T13:59:33 | 2013-08-14T13:59:33 | 12,116,960 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
MISSING_CORPUS_MESSAGE = """
Looks like you are missing some required data for this feature.
To download the necessary data, simply run
curl https://raw.github.com/sloria/TextBlob/master/download_corpora.py | python
Or use the NLTK downloader to download the missing data: http://nltk.org/data.html
If this doesn't fix the problem, file an issue at https://github.com/sloria/TextBlob/issues.
"""
class MissingCorpusException(Exception):
'''Exception thrown when a user tries to use a feature that requires a
dataset or model that the user does not have on their system.
'''
def __init__(self, message=MISSING_CORPUS_MESSAGE, *args, **kwargs):
super(MissingCorpusException, self).__init__(message, *args, **kwargs)
|
UTF-8
|
Python
| false | false | 2,013 |
3,513,283,271,913 |
30c18890abde95c27762501bf690bdf3419af443
|
61d841de16edc2aa2b7b489fac292a2e532d754d
|
/view/indexView.py
|
18fefa471d3f7699bcb412ec1c66558090efadf7
|
[] |
no_license
|
kom551/bctg
|
https://github.com/kom551/bctg
|
f7b62fb6f9ef75fe0151fc6ae68b1a0c83bf6e1a
|
a023a35ea1b509dff1795f1d79eca8514ecce3d5
|
refs/heads/master
| 2016-09-06T08:04:49.159821 | 2013-03-26T13:05:07 | 2013-03-26T13:05:07 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-#
'''
Created on 2012-1-17
@author: kom
'''
import web
import os
from common import *
class IndexHandler:
def GET(self):
data = web.input()
if data.has_key("email"):
logger.info("访问来源,首页,email:%s" %(data["email"]))
return render('index.html',title=u"首页")
class ErrorHandler:
def GET(self):
return render('message.html',title=u"错误")
class notpayHandle:
def GET(self):
return render('message.html',result = u"错误",msg = u"您的尚未付费成为正式用户或者期限已过,请购买正式用户资格,<a href='http://item.taobao.com/item.htm?spm=0.0.0.0.6B2Mxv&id=17691717465' target='_black'>点我购买</a>")
class cancelHandle:
def GET(self):
data = web.input()
if data.has_key("email"):
logger.info("访问来源,取消邮件,email:%s" %(data["email"]))
return render('message.html',title=u"取消",msg=u"取消成功,如果想了解本站的其他信息,请查看<a href='/'>首页</a>")
|
UTF-8
|
Python
| false | false | 2,013 |
14,491,219,686,792 |
cefea3c7577cf25bdb8262b46bb885dd6fc0424f
|
8250dc408468b298c5ab1df57de434093a0a4985
|
/src/Imp/server/__init__.py
|
3bdc0b23e84e68d7d0f3e27e368656792d4f0c2f
|
[
"Apache-2.0"
] |
permissive
|
hdeweirdt/imp
|
https://github.com/hdeweirdt/imp
|
5643fc389b0bbc6ea5a58ee812bc3066301a5f9a
|
fced3e4ae17bc2f209c1a42528facaf68056256f
|
refs/heads/master
| 2021-01-21T09:43:27.907131 | 2014-01-06T13:39:01 | 2014-01-06T13:39:01 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""
Copyright 2013 KU Leuven Research and Development - iMinds - Distrinet
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Administrative Contact: [email protected]
Technical Contact: [email protected]
"""
from Imp.server.persistence import Fact, Agent, Resource, Version, DataStore
from Imp.resources import Id
from Imp.resources import Resource as R
from Imp.loader import CodeLoader
from . import persistence
from amqplib import client_0_8 as amqp
import sys, logging, os, time, re, json, threading, base64, datetime
import tornado.ioloop
import tornado.web
from tornado.web import StaticFileHandler, HTTPError
LOGGER = logging.getLogger(__name__)
class ImpServer(object):
"""
A server that handles fileservering, storing facts and handling
resource updates.
"""
def __init__(self, config):
self._config = config
self._loader = CodeLoader(self._config["server"]["code_dir"])
self._logger = logging.getLogger(__class__.__name__)
loglevel = self._config["server"]["loglevel"]
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(filename=self._config["server"]["logfile"],
filemode='w', level=numeric_level)
self._logger.debug("Init server")
self._stomp = None
self._fact_poll = {}
# open the fact store
ds = DataStore.instance()
ds.open(self._config["server"]["database"])
def fact_timeout_check(self):
"""
Check if a fact is about to timeout
"""
LOGGER.debug("Query for expired facts")
expired_resources = Fact.renew_facts(timeout = 60)
for res_id in expired_resources:
LOGGER.debug("Facts of resource %s expired, poll new facts" % res_id)
self.poll_facts(str(res_id))
return
def run(self):
"""
This method does the actual work
"""
# start a new fileserver thread
self._stomp = MQServer(self._config)
self._stomp.start()
settings = {
"static_path" : os.path.join(os.path.dirname(__file__), "static"),
"template_path" : os.path.join(os.path.dirname(__file__), "template"),
"globals" : {
},
}
application = tornado.web.Application([
(r"/", MainHandler, dict(server = self)),
(r"/agent/(.*)", AgentHandler, dict(server = self)),
(r"/resource/(.*)", ResourceHandler, dict(server = self)),
(r"/fact/(.*)", FactHandler, dict(server = self)),
(r"/file/(.*)", FileHandler, {"path" : self._config["server"]["storage"]}),
(r"/stat", StatHandler, dict(server = self)),
(r"/resources/update/(.*)", ResourceUpdateHandler, dict(server = self)),
(r"/code/(.*)", CodeHandler, dict(server = self)),
#(r"/state/(.*)", PersistenceHandler, dict(server = self)),
#(r"/agentstate/(.*)", StateUpdateHandler, dict(server = self)),
], **settings)
application.listen(8888)
# check if facts are about to expire
periodic = tornado.ioloop.PeriodicCallback(self.fact_timeout_check, 30000)
periodic.start()
# start the ioloop
try:
tornado.ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
self._stomp.stop()
tornado.ioloop.IOLoop.instance().stop()
sys.exit(0)
def poll_facts(self, resource_id):
"""
Send out a request to receive facts
"""
if resource_id in self._fact_poll and (self._fact_poll[resource_id] + 60) > time.time():
return
res_id = Id.parse_id(resource_id)
# fetch the last deployed resource
resource = DataStore.instance().get(Resource, resource_id)
if resource is None:
return
versions = [v.version for v in resource.versions]
sorted(versions)
version = versions[-1]
data = {}
for v in resource.versions:
if version == v.version:
data = v.data
break
request = {"id" : resource_id, "resource": data, "operation" : "FACTS"}
topic = 'resources.%s.%s' % (res_id.agent_name, res_id.entity_type)
msg = amqp.Message(json.dumps(request))
msg.content_type = "application/json"
msg.relpy_to = self._stomp._queue_name
self._stomp._channel.basic_publish(msg, exchange = self._stomp._exchange_name,
routing_key = topic)
self._fact_poll[resource_id] = time.time()
class CodeHandler(tornado.web.RequestHandler):
"""
A handler for submitting code
"""
def initialize(self, server):
self._server = server
def post(self, version):
modules = json.loads(self.request.body.decode("utf-8"))
self._server._loader.deploy_version(int(version), modules, persist = True)
self._server._stomp.update_modules(int(version), modules)
class MainHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def get(self):
self.render("index.html", db = DataStore.instance(), Agent = persistence.Agent)
class AgentHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def get(self, agent_id):
agent = DataStore.instance().get(Agent, agent_id)
self.render("agent.html", agent_id = agent_id, agent = agent)
class ResourceHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def get(self, resource_id):
ds = DataStore.instance()
resource = ds.get(Resource, resource_id)
versions = [v.version for v in resource.versions]
sorted(versions)
version = versions[-1]
data = {}
for v in resource.versions:
if version == v.version:
data = v.data
break
content = ""
if "hash" in data:
file_path = os.path.join(self._server._config["server"]["storage"], data["hash"])
if os.path.exists(file_path):
with open(file_path, "rb+") as fd:
content = fd.read()
self.render("resource.html", resource_id = resource_id, resource = resource, data = data, content = content)
class FactHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def get(self, fact_name):
resource_id = self.get_argument("id", None, True)
fact, timeout = Fact.get(resource_id, fact_name)
if fact is not None and not timeout:
self.write(fact.value)
if timeout:
self._server._logger.info("Fact %s about %s has timed out, an update is requested" % (resource_id, fact_name))
if fact is None or timeout:
self._server.poll_facts(resource_id)
self.set_status(404)
class FileHandler(StaticFileHandler):
def put(self, hash_id):
path = os.path.join(self.root, hash_id)
if os.path.exists(path):
raise HTTPError(500, "File already exists.")
with open(path, "wb+") as fd:
fd.write(self.request.body)
class StatHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def post(self):
files = json.loads(self.request.body.decode("utf-8"))
response = []
for f in files:
f_path = os.path.join(self._server._config["server"]["storage"], f)
if not os.path.exists(f_path):
response.append(f)
self.write(json.dumps(response))
class ResourceUpdateHandler(tornado.web.RequestHandler):
def initialize(self, server):
self._server = server
def put(self, version):
"""
Upload of a batch of resource updates
"""
payload = self.request.body
if "Content-type" in self.request.headers and self.request.headers["Content-type"] == "application/ubjson":
#resources = bson.parse_bytes(payload)
raise Exception("BSON not supported")
else:
resources = json.loads(payload.decode("utf-8"))
try:
self._server._stomp.start_update_transaction()
for resource in resources:
self._server._stomp.update_resource(resource)
self._server._stomp.commit_update_transaction()
except:
LOGGER.exception("An exception occured while processing resource updates")
self._server._stomp.cancel_update_transaction()
# class StateUpdateHandler(tornado.web.RequestHandler):
# """
# Allows agents to fetch the state of their host
# """
# def initialize(self, server):
# self._server = server
#
# def get(self, hostname):
# ds = DataStore.instance()
# query = session.query(Version).order_by(Version.version.desc()).limit(1)
# last_version = 0
#
# try:
# last_version = query.one().version
# except (NoResultFound):
# self.set_status(404)
# return
#
# query = session.query(Version).filter(and_(Version.agent_name == hostname, Version.version == last_version))
#
# try:
# data = [json.loads(res.data.decode("utf-8")) for res in query.all()]
# self.write(json.dumps(data))
# except (NoResultFound):
# self.set_status(404)
class MQServer(threading.Thread):
"""
A STOMP based fileserver for IMP
"""
def __init__(self, config):
threading.Thread.__init__(self)
self._conn = None
self._channel = None
self._config = config
self._logger = logging.getLogger(__class__.__name__)
loglevel = self._config["server"]["loglevel"]
numeric_level = getattr(logging, loglevel.upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(filename=self._config["server"]["logfile"],
filemode='w', level=numeric_level)
self._logger.debug("Init fileserver")
self._run = True
self._exchange_name = "imp"
self._queue_name = ""
def stop(self):
self._run = False
if self._conn is not None and self._channel is not None:
self._channel.close()
self._conn.close()
def _connect(self):
"""
Connect to AMQP and subscribe
"""
self._conn = amqp.Connection(host = self._config["communication"]["host"],
userid = self._config["communication"]["user"],
password = self._config["communication"]["password"],
virtual_host = "/")
self._exchange_name = self._config["communication"]["exchange"]
self._channel = self._conn.channel()
self._channel.exchange_declare(exchange = self._exchange_name, type = "topic")
result = self._channel.queue_declare(exclusive = True)
queue_name = result[0]
self._queue_name = queue_name
self._channel.queue_bind(exchange = self._exchange_name, queue = queue_name,
routing_key="control")
self._channel.queue_bind(exchange = self._exchange_name, queue = queue_name,
routing_key="updated")
self._channel.queue_bind(exchange = self._exchange_name, queue = queue_name,
routing_key="resources.*.*")
self._channel.basic_consume(queue = queue_name, callback=self.on_message, no_ack=True)
def run(self):
"""
This method does the actual work
"""
# init storage
if not os.path.exists(self._config["server"]["storage"]):
os.mkdir(self._config["server"]["storage"])
# connect
self._connect()
while self._channel.callbacks and self._run:
try:
self._channel.wait()
except Exception:
self._logger.exception("Received exception in MQ handler")
def on_message(self, msg):
"""
Receive a new file
"""
message = json.loads(msg.body)
if "operation" in message:
operation = message["operation"]
else:
return
if operation == "FACTS_REPLY":
if "code" in message and message["code"] != 200:
self._logger.error("Received a 404 message on a facts reply. " + str(message))
return
if "facts" in message:
facts = message['facts']
for subject,facts in message['facts'].items():
self._logger.info("Received facts from %s" % subject)
for fact in facts:
value = facts[fact]
if not isinstance(value, str):
value = json.dumps(value)
fact_obj = Fact()
fact_obj.value_time = time.time()
fact_obj.resource_id = Id.parse_id(subject)
fact_obj.name = fact
fact_obj.value = value
fact_obj.entity_type = fact_obj.resource_id.entity_type
fact_obj.save()
else:
self._logger.error("No facts in message: " + str(message))
elif operation == "PONG":
if "hostname" not in message:
self._logger.error("Invalid PONG heartbeat received")
return
for host in message["hostname"]:
h = Agent(host)
h.save()
elif operation == "UPDATED":
if "id" not in message or "version" not in message:
self._logger.error("Invalid UPDATED operation")
return
version = DataStore.instance().get(Version, message['id'])
if version is not None:
version.mark_updated()
version.save()
elif operation == "UPDATE":
# ignore
pass
elif operation == "FACTS":
pass
else:
self._logger.debug("Received message with unknown operation. operation = %s" % str(operation))
def get(self, file_id, content = True):
"""
Get the file with the given id. Returns none if the file does not
exist.
"""
path = os.path.join(self._config["server"]["storage"], file_id)
if not os.path.exists(path):
return None
if not content:
return True
with open(path, "rb") as fd:
data = fd.read()
return data
def start_update_transaction(self):
"""
Start an update transaction
"""
#self._channel.tx_select()
def commit_update_transaction(self):
"""
Commit the update transaction
"""
#self._channel.tx_commit()
def cancel_update_transaction(self):
"""
Cancel the update transaction
"""
#self._channel.tx_rollback()
def update_modules(self, version, modules):
"""
Broadcast module source code to all agents
"""
payload = {"operation" : "MODULE_UPDATE", "version" : version, "modules" : modules}
msg = amqp.Message(json.dumps(payload))
msg.content_type = "application/json"
self._channel.basic_publish(msg, exchange = self._exchange_name, routing_key = "control")
def update_resource(self, resource_data):
"""
Update a resource. Broadcast it on the bus and store the update
in the database.
"""
ds = DataStore.instance()
resource = R.deserialize(resource_data)
version = Version(resource.id)
version.data = resource_data
version.save()
if not ds.contains(Resource, version.resource_id):
res = Resource(version.resource_id)
res.save()
if not ds.contains(Agent, res.agent_name):
agent = Agent(res.agent_name)
agent.save()
# broadcast
topic = "%s.%s" % (resource.id.agent_name, resource.id.entity_type)
msg = amqp.Message(json.dumps({"operation" : "UPDATE", "resource": resource_data}))
msg.content_type = "application/json"
self._channel.basic_publish(msg, exchange = self._exchange_name,
routing_key = "resources.%s" % topic)
|
UTF-8
|
Python
| false | false | 2,014 |
13,151,189,901,703 |
9ca63230abd8d3b307f5b82b001f13f175899d6c
|
14eb0accb65f50adb811aa00a93dc07bb6687568
|
/ligue1/models.py
|
3a5d51d67021f468e3b9098d1567fa2ac5807563
|
[] |
no_license
|
PrFalken/realadder
|
https://github.com/PrFalken/realadder
|
f3cc2548a8ce328f035bcf75018357570947376a
|
cde9bf1cc98a5873dedd287358a0c973eaffdd23
|
refs/heads/master
| 2020-05-20T04:18:11.424177 | 2013-01-29T20:56:04 | 2013-01-29T21:21:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.db import models
class Club(models.Model):
name = models.CharField(max_length=100)
budget = models.FloatField()
points = models.IntegerField()
real_points = models.FloatField()
def __unicode__(self):
return self.name
|
UTF-8
|
Python
| false | false | 2,013 |
515,396,122,518 |
f3733b8f5bace77be94a0f74445297483de7eb3e
|
7cbf2dc04f0d2b620b46f4c04873ee6054c0069d
|
/modules/core/test/test_ev.py
|
f6f074d5fba4152e4224871e77c44dc926fc132a
|
[
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"LGPL-2.1-or-later",
"GPL-3.0-only",
"LGPL-2.1-only"
] |
non_permissive
|
andreyto/imp-fork-proddl
|
https://github.com/andreyto/imp-fork-proddl
|
330185e12c9a209ee53c12369e6ada6219bd0b52
|
348723bad73f72e227d37962703c14853aa0577f
|
refs/heads/master
| 2020-02-01T05:33:46.125115 | 2012-10-08T18:30:04 | 2012-10-08T18:30:04 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import IMP
import IMP.test
import sys
import IMP.core
import IMP.container
import os
import time
class Tests(IMP.test.TestCase):
"""Tests for sampled density maps"""
def test_filters(self):
"""Test filters on excluded volume"""
m = IMP.Model()
m.set_log_level(IMP.SILENT)
print "pdb"
h0s=IMP._create_particles_from_pdb(self.get_input_file_name("1z5s_A.pdb"),
m)
l0= h0s[0:10]
rm= h0s[10:]
for r in rm:
m.remove_particle(r)
del h0s
del rm
print "ev"
#set the restraint
c0= IMP.container.ListSingletonContainer(l0)
r=IMP.core.ExcludedVolumeRestraint(l0, 1, 0)
print "cpc"
cpc= IMP.container.ClosePairContainer(l0, 0, 0)
cr= IMP.container.PairsRestraint(IMP.core.SoftSpherePairScore(1), cpc)
c1= IMP.container.ListSingletonContainer(l0)
dg= IMP.get_dependency_graph(m)
IMP.base.show_graphviz(dg)
idx= IMP.get_vertex_index(dg)
ss= IMP.get_required_score_states(cr, [], dg, idx)
print "ss", ss
crsf= cr.create_scoring_function()
print crsf.get_score_states()
print r.evaluate(False)
m.set_log_level(IMP.VERBOSE)
print cr.evaluate(False)
pp= cpc.get_particle_pairs()
print pp
self.assertAlmostEqual(r.evaluate(False), cr.evaluate(False),
delta=.1)
if __name__ == '__main__':
IMP.test.main()
|
UTF-8
|
Python
| false | false | 2,012 |
3,401,614,104,788 |
cafcf02945ba3c5c859b33e1f6a9b4789ee5a41f
|
3d73284170b92f2cfac43a377d1d484f7f23b9c9
|
/trunk/monografia/test_normal.py
|
33e7ad6ea884b0dcc135035f2ec1c4e7c467109c
|
[] |
no_license
|
douglaz/dionisio
|
https://github.com/douglaz/dionisio
|
17b90c40acf72ed8c6a1625ab40d210e76607c99
|
0cac79cabbd2fdf2f058de5233423d35f1d90dd5
|
refs/heads/master
| 2021-01-15T23:02:40.274893 | 2013-06-30T06:08:40 | 2013-06-30T06:08:40 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import sqlite3
from scipy.stats import *
from numpy import *
def print_per_line(f, l, n=10):
for i, c in enumerate(l):
f.write('%s' % c)
if (i+1) % n == 0:
f.write('\n')
else:
f.write('\t')
def tests(title, s1, s2):
tipos_testes = [
('T-Student (t)', ttest_ind(s1, s2)),
('Mann-Whitney rank test (u)', (mannwhitneyu(s1, s2)[0], mannwhitneyu(s1, s2)[1]*2)),
('Wilcoxon rank-sum test (z)', ranksums(s1, s2)),
('Kruskal-Wallis H-test (H)', kruskal(array(s1), array(s2)))
]
out = r"""
\begin{table}
\centering
\begin{tabular}{|r|c|c|}
\hline
\textbf{Tipo do teste} & \textbf{Valor da estatística} & \textbf{\textit{p}} \\
\hline
"""
for teste, (est, p) in tipos_testes:
out += r"%s & %s & %s \\" % (teste, est, p)
out += "\n"
out += r"\hline "
out += "\n"
out += """
\end{tabular}
\caption{\it %s}
\end{table}
""" % title
print out
#n = min(len(s1), len(s2))
#print 'wilcoxon:', wilcoxon(s1[:n], s2[:n])[1]
def main():
con = sqlite3.connect('../prototipo/experimento/db/production.sqlite3')
c = con.cursor()
#c.execute('select r.stars from ratings r, users u where u.id = r.user_id and u.stage_number >=6')
#print [ v[0] for v in c.fetchall()]
#print normaltest([ v[0] for v in c.fetchall()])
c.execute('select r.stars from ratings r, user_recommendations ur, users u where r.product_id = ur.product_id and r.user_id = ur.target_id and u.id = r.user_id and ur.sender_id in (select distinct _u.id from users _u where _u.group_id = u.group_id and _u.id <> u.id)')
amigos = [ v[0] for v in c.fetchall() ]
print_per_line(file('amigos.txt', 'w+'), amigos)
#print shapiro(amigos)
c.execute('select r.stars from ratings r, user_recommendations ur, users u where r.product_id = ur.product_id and r.user_id = ur.target_id and u.id = r.user_id and ur.sender_id in (select distinct _u.id from users _u where _u.group_id <> u.group_id and _u.id <> u.id)')
nao_amigos = [ v[0] for v in c.fetchall() ]
print_per_line(file('desconhecidos.txt', 'w+'), nao_amigos)
c.execute('select r.stars from ratings r, user_recommendations ur, users u where r.product_id = ur.product_id and r.user_id = ur.target_id and u.id = r.user_id')
diretas = [ v[0] for v in c.fetchall() ]
print_per_line(file('diretas.txt', 'w+'), diretas)
c.execute('select r.stars, sr.algorithm from ratings r, system_recommendations sr where r.product_id = sr.product_id and r.user_id = sr.user_id')
graph = {}
for rating, algoritmo in c.fetchall():
graph.setdefault(algoritmo, [])
graph[algoritmo].append(rating)
for algoritmo, values in graph.items():
print_per_line(file('%s.txt' % algoritmo, 'w+'), values)
tests('Teste de que Amigos recomendam melhor do que Desconhecidos (H1)',
amigos, nao_amigos)
tests('Teste de que recomendações Diretas são melhores que RBC (H1)',
graph['trust'], diretas)
tests('Teste de que RBC é melhor que RBP (H1)',
graph['trust'], graph['profile'])
tests('Teste de que RBI é melhor que RBC (H1)',
graph['trust'], graph['item'])
main()
|
UTF-8
|
Python
| false | false | 2,013 |
5,351,529,280,483 |
5d6c4ee167634ba7b6f30a1762fe094c91689487
|
5585c96c54e7936d56a9dd4598d802a709d5a59a
|
/parse-wards-kml.py
|
99c759ea3b9ba83fee96afc30d2f651d43e9911c
|
[] |
no_license
|
anandology/aapvol-signup
|
https://github.com/anandology/aapvol-signup
|
09bdd38502ccfd83f4af8a1f9b13292c6510c962
|
372d2d7c227d7bbe29978fc290869fe4419b1098
|
refs/heads/master
| 2020-06-04T05:19:03.714737 | 2014-04-02T16:44:47 | 2014-04-02T16:44:47 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Script to parse bangalore ward boundaries from KML file available at
http://openbangalore.org/.
"""
from bs4 import BeautifulSoup
import re
import web
import json
import csv
soup = BeautifulSoup(open("static/bruhath_bangalore_mahanagara_palike.kml").read(), "xml")
ward_info = dict((row[0], row) for row in csv.reader(open("static/wards.tsv"), delimiter="\t"))
def parse_coordinates(coordinates):
d = coordinates.strip().replace(",0 ", ",").split(",")
return " ".join(d)
def parse_ward(e):
name = e.find("name").get_text()
description = e.find("description").get_text()
ward_no = re.match("WARD_NO = (\d+)", description).group(1)
code = 'W{0:03d}'.format(int(ward_no))
info = ward_info[code]
ac = info[2]
pc = info[3]
path = "KA/{}/{}".format(ac.split("-")[0].strip(), code)
return {
"ward": code + " - " + name,
"ac": ac,
"pc": pc,
"path": path,
"c": parse_coordinates(e.find("coordinates").get_text())
}
elems = soup.find("Folder").find_all("Folder")
wards = [parse_ward(e) for e in elems]
print "//"
print "// Bangalore Ward Boundaries"
print "// Generated using ward maps KML file from openbangalore.org"
print "//"
print "var wards = " + json.dumps(wards, separators=(',', ':')) + ";"
print """for (var i=0; i<wards.length; i++) {
var tokens = wards[i].c.split(" ");
wards[i].coordinates = [];
wards[i].c = null;
for (var j=0; j<tokens.length; j++)
wards[i].coordinates.push([tokens[j], tokens[j+1]]);
}"""
|
UTF-8
|
Python
| false | false | 2,014 |
8,297,876,860,184 |
2b31f9a9ee57d1bce24039c81921a5689389d74c
|
b52a82534aa65272a07d8d3863f7f4cf3a8f5c83
|
/regexp/characters.py
|
4e887378f2c8b62b4071d6cfd388da19333c254f
|
[
"Zlib"
] |
permissive
|
dchilot/pystew
|
https://github.com/dchilot/pystew
|
0443cacef0b421839a11e4a5b60066b0fe64d4b8
|
f7b56c1c3d32af567b0033239b1daf00e3eb3e7e
|
refs/heads/master
| 2021-01-10T21:29:27.162587 | 2014-08-29T17:24:28 | 2014-08-29T17:24:28 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
"""Describes a character as to be used in a regulare expression."""
#Copyright (c) 2010 'pystew developpers'
#
#This software is provided 'as-is', without any express or implied
#warranty. In no event will the authors be held liable for any damages
#arising from the use of this software.
#
#Permission is granted to anyone to use this software for any purpose,
#including commercial applications, and to alter it and redistribute it
#freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
#
# 3. This notice may not be removed or altered from any source
# distribution.
#
#$Rev:: $: Revision of last commit
#$Author:: $: Author of last commit
#$Date:: $: Date of last commit
import logging
import copy
import sre_parse
def escape_char(char, raw=False):
"""`char`: character to escape if needed.
Returns a character so that it can be inserted in a regular expression,
escaping it if needed."""
if (char is None):
return ''
if ((char in sre_parse.SPECIAL_CHARS) or ("-" == char) or ("]" == char)):
if (not raw):
return "\\" + char
return char
class Char:
"""
>>> Char('a').get_category()
'LOWER_CASE_LETTER'
>>> Char('G').get_category()
'UPPER_CASE_LETTER'
>>> Char(' ').get_category()
'category_space'
>>> Char('\\t').get_category()
'category_space'
>>> Char('?').get_category()
'SPECIAL'
>>> Char('_').get_category()
'OTHER_WORD'
>>> Char(None).get_category()
'NONE'
>>> (Char('4').get_category() == Char('7').get_category())
True
>>> (Char('A').get_category() == Char('G').get_category())
True
>>> (Char('f').get_category() == Char('i').get_category())
True
>>> (Char('+').get_category() == Char('?').get_category())
True
>>> (Char('-').get_category() == Char('~').get_category())
True
>>> # '_' is in its own category ('OTHER_WORD')
>>> (Char('_').get_category() != Char('\').get_category())
True
"""
SPECIAL = 'SPECIAL'
UNKNOWN = 'UNKNOWN'
LOWER_CASE_LETTER = 'LOWER_CASE_LETTER'
UPPER_CASE_LETTER = 'UPPER_CASE_LETTER'
OTHER_WORD = 'OTHER_WORD'
NONE = 'NONE'
WORD = 'WORD'
def __init__(self, char, regexp_type = "strict"):
self._regexp_type = regexp_type
from sre_parse import SPECIAL_CHARS, CATEGORY_DIGIT
from sre_parse import WHITESPACE, CATEGORY_SPACE
if (char is None):
self._category = Char.NONE
elif ('_' == char):
self._category = Char.OTHER_WORD
elif (char in sre_parse.DIGITS):
self._category = CATEGORY_DIGIT
elif (char in SPECIAL_CHARS):
self._category = Char.SPECIAL
elif (char in WHITESPACE):
self._category = CATEGORY_SPACE
elif (char.isalpha()):
if (char.islower()):
self._category = Char.LOWER_CASE_LETTER
else:
self._category = Char.UPPER_CASE_LETTER
else:
self._category = Char.UNKNOWN
self._char = char
logging.debug("char = " + str(char))
logging.debug("self._category = " + self._category)
def get_all_categories():
"""Returns all possible categories for a character."""
return [sre_parse.CATEGORY_DIGIT, Char.SPECIAL,
sre_parse.CATEGORY_SPACE, Char.LOWER_CASE_LETTER,
Char.UPPER_CASE_LETTER, Char.UNKNOWN]
def get_category(self):
"""Returns the category of this character."""
return self._category
def get_meta_category(self):
"""Returns the meta category of this character (#WORD if this is
a word (as defined in #CATEGORRY_WORDS, self._category otherwise)."""
if (self._category in CATEGORY_WORDS):
return Char.WORD
else:
return self._category
def get_char(self):
"""Returns the wrapped character."""
return self._char
def get_string(self, raw=False):
"""Returns a representation of the character that can be includede in
a regular expression."""
return escape_char(self._char, raw=raw)
def get_is_ordered(self):
"""Returns True if and only if the character is a digit or a letter
(which are ordered)."""
return (self._category in [sre_parse.CATEGORY_DIGIT,
Char.LOWER_CASE_LETTER, Char.UPPER_CASE_LETTER])
def __eq__(self, other_char):
if (other_char is None):
return False
return (self._char == other_char.get_char())
def __ne__(self, other_char):
if (other_char is None):
return True
return (self._char != other_char.get_char())
def __lt__(self, other_char):
if (other_char is None):
return True
return (self._char < other_char.get_char())
def __gt__(self, other_char):
if (other_char is None):
return False
return (self._char > other_char.get_char())
def get_category_as_letter(category):
if (Char.SPECIAL == category):
return '$'
elif (Char.UNKNOWN == category):
return '?'
elif (Char.LOWER_CASE_LETTER == category):
return 'l'
elif (Char.UPPER_CASE_LETTER == category):
return 'U'
elif (Char.OTHER_WORD == category):
return 'o'
elif (Char.NONE == category):
return 'n'
elif (Char.WORD == category):
return 'w'
elif (sre_parse.CATEGORY_DIGIT == category):
return '0'
elif (sre_parse.CATEGORY_SPACE == category):
return ' '
# constants
DIGITS = Char('0').get_category()
"""Sample for digits category."""
LOWER_CASE_LETTERS = Char('a').get_category()
"""Sample for lower case letters category."""
UPPER_CASE_LETTERS = Char('A').get_category()
"""Sample for upper case letters category."""
SPACES = Char(' ').get_category()
"""Sample for spaces category."""
OTHERS = Char('-').get_category()
"""Sample for 'characters not in any other category' category."""
OTHER_WORDS = Char('_').get_category()
"""Sample for 'character(s) that are included in the \w class but are not in
any other category' category."""
SPECIALS = Char('?').get_category()
"""Sample for 'characters that need to be escaped because they have a special
meaning' category."""
EMPTY = Char(None)
"""The empty character."""
CATEGORY_WORDS = set(
[DIGITS, LOWER_CASE_LETTERS, UPPER_CASE_LETTERS, OTHER_WORDS])
CATEGORY_HAVE_CLASS = copy.deepcopy(CATEGORY_WORDS)
CATEGORY_HAVE_CLASS.add(SPACES)
"""
>>> DIGITS in CATEGORY_HAVE_CLASS
True
>>> LOWER_CASE_LETTERS in CATEGORY_HAVE_CLASS
True
>>> UPPER_CASE_LETTERS in CATEGORY_HAVE_CLASS
True
>>> OTHER_WORDS in CATEGORY_HAVE_CLASS
True
>>> for char in [SPACES, OTHERS, SPECIALS]:
>>> char.get_category() in CATEGORY_HAVE_CLASS
True
True
True
"""
def get_category_as_range(category):
if (Char.SPECIAL == category):
return ("\\" + "\\".join(sre_parse.SPECIAL_CHARS), True)
elif (Char.LOWER_CASE_LETTER == category):
return ('a-z', True)
elif (Char.UPPER_CASE_LETTER == category):
return ('A-Z', True)
elif (Char.OTHER_WORD == category):
return ('_', False)
elif (Char.WORD == category):
return ('\w', True)
elif (sre_parse.CATEGORY_DIGIT == category):
return ('\d', True)
elif (sre_parse.CATEGORY_SPACE == category):
return ('\s', True)
elif (Char.UNKNOWN == category):
return ('', False)
else:
raise Exception("Category '%s' not handled!" % category)
def get_categories_as_range(categories):
string = ""
is_range = False
for category in sorted(categories):
sub_string, sub_is_range = get_category_as_range(category)
string += sub_string
is_range |= sub_is_range
return string, is_range
if (__name__ == "__main__"):
import doctest
doctest.testmod()
|
UTF-8
|
Python
| false | false | 2,014 |
8,495,445,358,963 |
faca87e5213be5efc71e9bdc615a83d286dc587f
|
91fae51b15abf5a8a8a9e9678ce3049dff1befbf
|
/gstock/vente/tests.py
|
8fb3428fc530155d9d3142e9fd933f952acc95af
|
[] |
no_license
|
fadiga/site_gestion_stock
|
https://github.com/fadiga/site_gestion_stock
|
e67d1429966af103e755a97a743b3c63b9fc616c
|
5027957f8e0f6c1f4fbc52e4e91c9c4ae58dd4cd
|
refs/heads/master
| 2021-01-13T01:40:38.479289 | 2012-01-10T17:45:35 | 2012-01-10T17:53:06 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding= UTF-8 -*-
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test.client import Client
from django.contrib.auth import (authenticate, login as django_login,
logout as django_logout)
from django.test import TestCase
from django.core.urlresolvers import reverse
class SimpleTest(TestCase):
"""
Tests of ``blog`` application.
"""
fixtures = ['test_data']
def test_login(self):
self.logged_admin = Client()
logged_admin = self.logged_admin.login(username='admin',\
password='admin')
self.assertTrue(logged_admin)
def test_dashboard(self):
""" Je teste la view dashboard"""
response = self.client.get(reverse(v-dashboard)
self.failUnlessEqual(response.status_code, 200)
self.assertTemplateUsed(response,'vente/dashboard.html')
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
#~ __test__ = {"doctest": """
#~ Another way to test that 1 + 1 is equal to 2.
#~
#~ >>> 1 + 1 == 2
#~ True
#~ """}
|
UTF-8
|
Python
| false | false | 2,012 |
17,471,926,987,113 |
2ec8f3e1016a51185c92c90f25a40feee32cf4f1
|
a664deb98a55a216a531fc004fc962ddca7f2aa2
|
/sizescale.py
|
a88d1874d365b7e9b2f5adcb6cb0138b174ef0c3
|
[] |
no_license
|
danellecline/mbari-underwater-size-scale
|
https://github.com/danellecline/mbari-underwater-size-scale
|
ad589f483afc1deae5cdcdeddf98200176e244c9
|
950b539e1d50b6f377bb54c0cf389562ce10f73f
|
refs/heads/master
| 2020-02-05T09:30:17.439939 | 2014-08-06T20:45:09 | 2014-08-06T20:45:09 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import xml.etree.cElementTree as ET
import math
import pymssql
import pdb
import sys
import numpy as np
import pandas as pd
import csv
import random as rnd
import cv2
import video
from operator import attrgetter
from os import getenv
from datetime import datetime, timedelta
import argparse
from argparse import RawTextHelpFormatter
from numpy import *
from siftmatching import *
from scipy.cluster.vq import kmeans,vq
MAXFRAMES=800
class SizeScale():
class EventObject():
def __init__(self, id, image, bbox, frame, speed, time):
self.id = id
self.height = []
self.width = []
self.bbox = {}
self.frame = []
self.kp = {}
self.time_epoch = []
self.field_width = []
self.vehicle_speed = []
self.current_image = None
self.last_image = None
self.add(image, bbox, frame, speed, time)
def resetImage(self):
print 'Resetting image for id ' + str(self.id)
self.current_image = None
self.last_image = None
def setCurrentImage(self, image):
self.last_image = self.current_image
self.current_image = image
print 'setting current image for event ' + str(self.id)
print 'current image'
print self.current_image
print 'last image'
print self.last_image
def getLastImage(self):
return self.last_image
def getId(self):
return self.id
def init(self):
numFrames = len(self.frame)
self.field_width = np.zeros(numFrames)*np.nan
self.width = np.zeros(numFrames)*np.nan
self.height = np.zeros(numFrames)*np.nan
def getStats(self):
i = len(self.time_epoch)
# skip over the first time since we won't have a field width for that
# since the size scale algorithm requires two points
#time = pd.to_datetime(self.time_epoch[1:i],unit='s')
df= pd.DataFrame({ 'VEHICLE_SPEED' : self.vehicle_speed, 'FIELD_WIDTH' : self.field_width, 'HEIGHT' : self.height, 'WIDTH' : self.width })
return df.mean(),df.std(), len(self.frame), self.time_epoch[i/2]
def getBbox(self, frame):
return self.bbox[frame]
def getKeypoints(self, frame):
return self.kp[frame]
def add(self, image, bbox, frame, speed, time):
self.bbox[frame] = bbox
self.last_image = self.current_image
self.current_image = image
self.time_epoch.append(time)
self.frame.append(frame)
self.vehicle_speed.append(speed)
if self.last_image != None:
imgkp, templatekp = findKeyPoints(self.current_image, self.last_image, distance=2)
self.kp[frame] = imgkp
self.kp[frame-1] = templatekp
print self.kp
else:
self.kp[frame] = None
def getFrame(self, index):
return self.frame[index]
def withinRange(self, start, end):
if start >= self.startFrame() and end <= self.endFrame():
return True
return False
def startFrame(self):
return self.frame[0]
def endFrame(self):
return self.frame[-1]
def numFrames(self):
return len(self.frame)
def getX(self, frame):
return self.X[frame]
def getY(self, frame):
return self.Y[frame]
def calc_field_width(self, bestcmDict, width, height, Wi, Hi, fps, focalLength):
d = focalLength
n = self.numFrames()
for i in range(0, n-1):
sf = self.frame[i]
ef = self.frame[i+1]
# arbitrarily choose ending frame center of movement as calculated from optical flow
centroidX = bestcmDict[ef][0]
centroidY = bestcmDict[ef][1]
centerX = Wi/2
centerY = Hi/2
# convert to image coordinate system with origin at the center
Xcm = centroidX * ( Wi / width)
Ycm = centroidY * ( Hi / height)
bbox = self.getBbox(sf)
llx1 = bbox[0]
lly1 = bbox[1]
urx1 = bbox[2]
ury1 = bbox[3]
bbox = self.getBbox(ef)
llx2 = bbox[0]
lly2 = bbox[1]
urx2 = bbox[2]
ury2 = bbox[3]
bboxHeight1 = abs(lly1-ury1)
bboxWidth1 = abs(llx1-urx1)
bboxHeight2 = abs(lly2-ury2)
bboxWidth2 = abs(llx2-urx2)
# vehicle speed
speed1 = self.vehicle_speed[i]
speed2 = self.vehicle_speed[i+1]
S = (speed1 + speed2)/2
# only calculate field width for frames we could track keypoints
frame = self.getFrame(i)
kp = self.kp[frame]
field_width = []
actual_height = []
actual_width = []
try:
if kp is not None and len(kp) > 1:
for point in kp:
Xsf = llx1 + point.pt[0]
Ysf = lly1 + point.pt[1]
Xef = llx2 + point.pt[0]
Yef = lly2 + point.pt[1]
#X1 = self.X[sf] * (Wi / width)
#X2 = self.X[ef] * (Wi / width)
#Y1 = self.Y[sf] * (Hi / height)
#Y2 = self.Y[ef] * (Hi / height)
X1 = Xsf * (Wi / width)
X2 = Xef * (Wi / width)
Y1 = Ysf * (Hi / height)
Y2 = Yef * (Hi / height)
# distance traveled using average ? speed - averaging
# probably doesn't change this much
# as speed is only recorded every second and the
# frame rate is much faster
deltaDp = ((ef - sf) * S) / fps
# distance from image center to center of movement
r = math.sqrt((Xcm - centerX)**2 + (Ycm - centerY)**2)
# distance from object to the center of movement
w = math.sqrt((X1 - Xcm)**2 + (Y1 - Ycm)**2)
# distance traveled by the object beween its two positions
deltaw = math.sqrt((X1 - X2)**2 + (Y1 - Y2)**2)
# calculate field width
fw = (Wi * w /(math.sqrt(r**2 + d**2) * deltaw)) * deltaDp
print 'event ' + str(self.id) + ' frame: ' + str(ef) + '[' + str(Xsf) + ',' + str(Ysf) + ']' + '[' + str(Xef) + ',' + str(Yef) + ']' + '[' + str(centroidX) + ',' + str(centroidY) + ']' + ' fw : ' + str(fw) + ' speed: ' + str(S) + ' deltaW: ' + str(deltaw) + ' deltaDp: ' + str(deltaDp)
actual_height.append(bboxHeight1/fw)
actual_width.append(bboxWidth1/fw)
actual_height.append(bboxHeight2/fw)
actual_width.append(bboxWidth2/fw)
field_width.append(fw)
field_width.append(fw)
except Exception, e:
print e
field_width.append('NaN')
actual_height.append('NaN')
actual_width.append('NaN')
# get the mean from all the keypoint based field widths
df= pd.DataFrame({ 'FIELD_WIDTH' : field_width, 'HEIGHT' : actual_height, 'WIDTH' : actual_width })
df.describe()
mean = df.mean()
self.field_width[i] = mean[0]
self.field_width[i+1] = mean[0]
self.height[i] = mean[1]
self.width[i] = mean[2]
self.height[i+1] = mean[1]
self.width[i+1] = mean[2]
# line segment intersection using vectors
# see Computer Graphics by F.S. Hill
def perp(self, a ) :
b = empty_like(a)
b[0] = -a[1]
b[1] = a[0]
return b
# line segment a given by endpoints a1, a2
# line segment b given by endpoints b1, b2
def seg_intersect(self, a1, a2, b1, b2) :
da = a2-a1
db = b2-b1
dp = a1-b1
dap = self.perp(da)
denom = dot( dap, db)
num = dot( dap, dp )
intersect = (num / denom)*db + b1
return intersect
def calc_cell_width(self, focalLength, zoomAngle):
d = focalLength
alpha = zoomAngle
Wi = 2 * focalLength * math.tan(zoomAngle/2 * (math.pi / 180) )
return Wi
def display_output(self, videoFile, evtDict):
colors = [(255,127,0),(0,255,255),(0,255,0),(255,255,255)]
videoSrc = None
frame = 0
try:
videoSrc = cv2.VideoCapture(videoFile)
while(videoSrc.isOpened()):
ret, fr = videoSrc.read()
frameImg = fr#fr.copy()#fr
if frame == MAXFRAMES:
break
for id in iter(evtDict.keys()):
event = evtDict[id]
if event.withinRange(frame-1,frame):
bbox = event.getBbox(frame)
llx = bbox[0]
lly = bbox[1]
urx = bbox[2]
ury = bbox[3]
X = llx + abs(llx-urx)/2
Y = lly + abs(lly-ury)/2
bboxHeight = abs(lly-ury)
bboxWidth = abs(llx-urx)
#draw rectangle in top left and bottom right coordinates
bry = lly
brx = urx
tly = ury
tlx = llx
subimg = frameImg[tly:tly+bboxHeight,tlx:tlx+bboxWidth]
event.setCurrentImage(subimg)
template = event.getLastImage()
if template != None:
# find strongest SIFT keypoint in the subimage
#gray = cv2.cvtColor(subimg,cv2.COLOR_BGR2GRAY)
#sift = cv2.SIFT()
#kp = sift.detect(gray,None)
#if len(kp) > 0:
# maxkp = max(kp, key=attrgetter("response"))
# cv2.circle(frameImg,(tlx+int(round(maxkp.pt[0])),tly+int(round(maxkp.pt[1]))), 10, (255,0,0),-1)
cv2.rectangle(frameImg,(tlx,tly),(brx,bry),(0,255,0),3)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(frameImg,str(id)+'evt ',(tlx,tly), font, 2,(255,255,255),2)
llxx = bbox[0]
llyy = bbox[3]
h1, w1 = subimg.shape[:2]
h2, w2 = template.shape[:2]
nWidth = w1+w2
nHeight = max(h1, h2)
hdif = max((h1-h2)/2,0)
newimg = np.zeros((nHeight, nWidth, 3), np.uint8)
newimg[hdif:hdif+h2, :w2] = template
newimg[:h1, w2:w1+w2] = subimg
tkp = event.getKeypoints(frame-1)
skp = event.getKeypoints(frame)
if skp is not None and tkp is not None:
maxlen = min(len(skp), len(tkp))
for i in range(maxlen):
pt_a = (int(tkp[i].pt[0]), int(tkp[i].pt[1]+hdif))
pt_b = (int(skp[i].pt[0]+w2), int(skp[i].pt[1]))
pt_at = (llx+int(tkp[i].pt[0]), lly+int(tkp[i].pt[1]))
pt_bt = (llxx+int(skp[i].pt[0]), llyy+int(skp[i].pt[1]))
cv2.line(newimg, pt_a, pt_b, colors[i])
cv2.circle(frameImg, pt_at, 5, colors[i])
cv2.circle(frameImg, pt_bt, 5, colors[i])
cv2.imshow('keypoint tracking ' + str(i), newimg)
resizedImg = cv2.resize(frameImg, (0,0), fx=0.5, fy=0.5)
cv2.imshow('results', resizedImg)
cv2.waitKey(500)
frame = frame + 1
except Exception, e:
print e
if videoSrc is not None:
videoSrc.release()
def load_data2(self, videoFile, inputFile):
videoSrc = None
width = 0
height = 0
evtDict = {}
try:
videoSrc = cv2.VideoCapture(videoFile)
tree = ET.ElementTree(file=inputFile)
root = tree.getroot()
startTc = root.attrib['StartTimecode']
endTc = root.attrib['EndTimecode']
for meta in root.iter('SourceMetadata'):
width = int(meta.attrib['FrameWidth'])
height = int(meta.attrib['FrameHeight'])
durationSecs = self.timecode2secs(endTc, fps) - self.timecode2secs(startTc, fps)
# Query speed by time and get dictionary for fast lookup
acmDict, sampleSecs = self.query_speed_by_time(startDate, durationSecs)
if len(sampleSecs) == 0:
Exception('Error - cannot get speed data beginning with starting date ' + str(startDate))
for event in root.iter('FrameEventSet'):
ret, fr = videoSrc.read()
frameImg = fr.copy()
tc = event.attrib['TimeCode']
frame = int(event.attrib['FrameNumber'])
offsetSecs = float(float(frame)/float(fps))
d = startDate + timedelta(seconds=offsetSecs)
targetSecs = float(d.strftime('%s.%f'))
# Get the speed and how far the lookup was in seconds from the target
# TODO: what if thes are too far off ?
if len(acmDict) > 0:
speed, offsetTargetSecs = self.speed_nearest_to_time(targetSecs, sampleSecs, acmDict)
else:
speed = 0
for eventObj in event.iter('EventObject'):
id = int(eventObj.attrib['ObjectID'])
X = int(eventObj.attrib['CurrX'])
Y = int(eventObj.attrib['CurrY'])
for bbox in eventObj.iter('BoundingBox'):
urx = int(bbox.attrib['UpperRightX'])
ury = int(bbox.attrib['UpperRightY'])
llx = int(bbox.attrib['LowerLeftX'])
lly = int(bbox.attrib['LowerLeftY'])
bbox = llx, lly, urx, ury
bboxHeight = abs(lly-ury)
bboxWidth = abs(llx-urx)
#draw rectangle in top left an bottom right coordinates
bry = lly
brx = urx
tly = ury
tlx = llx
subimg = frameImg[tly:tly+bboxHeight,tlx:tlx+bboxWidth]
# add new Event object keyed by objectIDl
if not evtDict.has_key(id):
print 'Adding new object with id ' + str(id)
evtDict[id] = self.EventObject(id, subimg, bbox, frame, speed, targetSecs)
else:
print 'Appending to existing event ' + str(id)
evtDict[id].add(subimg, bbox, frame, speed, targetSecs)
if frame == MAXFRAMES:
break
except Exception, e:
print e
if videoSrc is not None:
videoSrc.release()
return width, height, evtDict
def run_size_scale(self, outputFile, evtDict, bestcmDict, fps, Wi, Hi, focalLength, zoomAngle, width, height):
numKeys = len(evtDict.keys())
dataframe = None
columns = ['EVENT_ID','EVENT_TIME','EVENT_LENGTH','FIELD_WIDTH_MEAN (cm)',\
'FIELD_WIDTH_STD','VEHICLE_SPEED_MEAN (cm/sec)','VEHICLE_SPEED_STD',\
'HEIGHT_MEAN (cm)', 'HEIGHT_STD', 'WIDTH_MEAN (cm)', 'WIDTH_STD']
df = pd.DataFrame(columns = columns)
for id in iter(evtDict.keys()):
event = evtDict[id]
event.init()
# can only calculate size if have at least two points
if event.numFrames() > 1:
try:
event.calc_field_width(bestcmDict, width, height, Wi, Hi, fps, focalLength)
mean, std, length, time = event.getStats()
df = df.append(
{'EVENT_ID' : id,
'EVENT_TIME' : str(time),
'EVENT_LENGTH' : str(length),
'FIELD_WIDTH_MEAN (cm)' : round(mean[0],3),
'FIELD_WIDTH_STD' : round(std[0],3),
'VEHICLE_SPEED_MEAN (cm/sec)' : round(mean[2],3),
'VEHICLE_SPEED_STD' : round(std[2],3),
'HEIGHT_MEAN (cm)' : round(mean[1],3),
'HEIGHT_STD' : round(std[1],3),
'WIDTH_MEAN (cm)' : round(mean[3],3),
'WIDTH_STD' : round(std[3],3) }, ignore_index=True)
except Exception, e:
print e
df.describe()
df.to_csv(outputFile,index=False)
def find_cm_from_opticalflow(self, vectorFile, Wi, Hi, width, height):
bestcmDict = {}
framenum = 0
# parse and find optical flow centers in the vector file
with open(vectorFile, "rb") as csvfile:
reader = csv.reader(csvfile)
for row in reader:
intersections = []
points = []
# find vectors strings delimited with two tabs
vstring = ' '.join(row).split('\t\t')
# each pair of points represents a vector e.g. X1,Y1,X2,Y2
for v in vstring:
bestcm = None
pt = v.split()
if len(pt) > 0:
a = array( [ int(pt[0]), int(pt[1]), int(pt[2]) , int(pt[3]) ] )
points.append(a)
lenpts = len(points)
if lenpts > 1:
# randomly search through points to find where intersections
# might occur
for i in range(0,lenpts):
# choose two random vectors and find intersection
j = rnd.sample(range(0,lenpts), 2)
intersect = []
try:
i1 = j[0]
i2 = j[1]
m, b = np.polyfit([points[i1][0],points[i1][2]],[points[i1][1],points[i1][3]], 1)
a1 = array( [0, b] )
a2 = array( [2*width, m*2*width + b] )
m, b = np.polyfit([points[i2][0],points[i2][2]],[points[i2][1],points[i2][3]], 1)
b1 = array( [0, b] )
b2 = array( [2*width, m*2*width + b] )
intersect = self.seg_intersect(a1,a2,b1,b2)
if intersect is not None and intersect[0] > 0 and intersect[1] > 0:
intersections.append(intersect)
#print a1, a2, b1, b2, intersect
except Exception, e:
continue
# find intersections clustering around a central point using simple k-means
bestcm = None
if intersections is not None :
if len(intersections) > 1:
k = 1
centroid,_ = kmeans(vstack(intersections), k)
cX = centroid[0][0]
cY = centroid[0][1]
if ( cX > 0 and cX < width and cY > 0 and cY < height ):
bestcm = [cX, cY]
else:
# if only one intersection, just choose the only one we have
if len(intersections) == 1 :
centroid = intersections[0]
if ( cX > 0 and cX < width and cY > 0 and cY < height ):
bestcm = [cX, cY]
# default to origin if nothing found, or the center of movement from the previous frame
if bestcm is not None:
bestcmDict[framenum] = array(bestcm)
else:
if len(bestcmDict) > 0:
bestcmDict[framenum] = bestcmDict[framenum-1]
else:
bestcmDict[framenum] = array([0,0])
framenum = framenum + 1
if framenum == MAXFRAMES:
return bestcmDict
return bestcmDict
def load_data(self, inputFile, startDate, fps):
'''
Parse and load AVEDac data from XML file into EventObject dictionary keyed by Object IDs
Query database for speeds and find nearest match to event times
'''
evtDict = {}
acmDict = {}
sampleSecs = {}
tree = []
width = 0
height = 0
try:
tree = ET.ElementTree(file=inputFile)
root = tree.getroot()
startTc = root.attrib['StartTimecode']
endTc = root.attrib['EndTimecode']
for meta in root.iter('SourceMetadata'):
width = int(meta.attrib['FrameWidth'])
height = int(meta.attrib['FrameHeight'])
durationSecs = self.timecode2secs(endTc, fps) - self.timecode2secs(startTc, fps)
# Query speed by time and get dictionary for fast lookup
acmDict, sampleSecs = self.query_speed_by_time(startDate, durationSecs)
if len(sampleSecs) == 0:
Exception('Error - cannot get speed data beginning with starting date ' + str(startDate))
for event in root.iter('FrameEventSet'):
tc = event.attrib['TimeCode']
frame = int(event.attrib['FrameNumber'])
offsetSecs = float(float(frame)/float(fps))
d = startDate + timedelta(seconds=offsetSecs)
targetSecs = float(d.strftime('%s.%f'))
for eventObj in event.iter('EventObject'):
id = int(eventObj.attrib['ObjectID'])
X = int(eventObj.attrib['CurrX'])
Y = int(eventObj.attrib['CurrY'])
for bbox in eventObj.iter('BoundingBox'):
urx = int(bbox.attrib['UpperRightX'])
ury = int(bbox.attrib['UpperRightY'])
lrx = int(bbox.attrib['LowerLeftX'])
lry = int(bbox.attrib['LowerLeftY'])
X = lrx + abs(lrx-urx)/2
Y = lry + abs(lry-ury)/2
# Get the speed and how far the lookup was in seconds from the target
# TODO: what if thes are too far off ?
speed, offsetTargetSecs = self.speed_nearest_to_time(targetSecs, sampleSecs, acmDict)
####speed = 0
# add new Event object keyed by objectID
if not evtDict.has_key(id):
print 'Adding new object with id ' + str(id)
evtDict[id] = self.EventObject(id, X, Y, frame, speed, targetSecs)
else:
print 'Appending to existing event ' + str(id)
evtDict[id].add(X, Y, frame, speed, targetSecs)
return width, height, evtDict
except Exception, e:
print 'Exception !'
print e
return width, height, evtDict
def timecode2secs(self, tc, fps):
t = tc.split(':')
hours = int(t[0])
mins = int(t[1])
secs = int(t[2])
frames = int(t[3])
seconds = 3600*hours + 60*mins + secs + frames/fps
return seconds
def query_speed_by_time(self, startDate, durationSecs):
acmserver = getenv("ACMDB_SERVER")
acmuser = getenv("ACMDB_USERNAME")
acmpassword = getenv("ACMDB_PASSWORD")
acmDict = {}
sampleSecs = {}
try:
con = pymssql.connect(server=acmserver,user=acmuser,password=acmpassword,database='Midwater_Transects')
cursor = con.cursor(as_dict = True)
# Query in 15 second window to allow for inaccuracies in time recording
window = timedelta(seconds=15)
duration = timedelta(seconds=durationSecs)
start = startDate - window
end = startDate + duration + window
query = "SELECT sample_date, absolute_speed_cm_sec FROM acm WHERE sample_date BETWEEN '%s' AND '%s'" \
% (start.strftime('%Y-%m-%d %H:%M:%S'),end.strftime('%Y-%m-%d %H:%M:%S'))
try:
cursor.execute(query)
# Create look-up table for speeds by times and times array for nearest search
# do these need to be separate ?
i = 0
for row in cursor :
seconds = row['sample_date'].strftime('%s.%f')
acmDict[seconds] = row['absolute_speed_cm_sec']
sampleSecs[i] = seconds
i=i+1
con.close()
except Exception, e:
print 'Could not query for speed'
except Exception, e:
print 'Could not query for speed'
return acmDict,sampleSecs
def speed_nearest_to_time(self, targetSecs, sampleSecs, acmDict):
# find closest to target
tindex=min(sampleSecs, key=lambda x:abs(float(sampleSecs[x])-float(targetSecs)))
seconds = sampleSecs[tindex]
diff = abs(float(targetSecs) - float(seconds))
return acmDict[seconds], diff
def process_command_line(self):
examples = 'Examples:' + '\n\n'
examples += sys.argv[0] + " --startdate '2011-02-01 00:00:00' --input V3603_HD1_300m_transect_H264.events.xml --output V3603_HD1_300m_transect_H264_sizescale.csv --opticalflow V3603_HD1_300m_transect_H264_vector.csv \n"
parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter,
description='Read AVEDa event data from XML file, query corresponding AUV speeds from database, them save to spreadsheet for further analysis ',
epilog=examples)
parser.add_argument('--videoFile', action='store', help='video file', default='.',required=True)
parser.add_argument('--input', action='store', help='AVEDac event.xml filename', default='.',required=True)
parser.add_argument('--output', action='store', help='csv file to store results', default='.',required=True)
parser.add_argument('--opticalFlowFile', action='store', help='optical flow vector file', default='.',required=True)
parser.add_argument('--startdate', action='store', dest='startdate',help='starting date for events recorded in the filename',default='.',required=True)
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose output')
self.args = parser.parse_args()
self.commandline = ""
for item in sys.argv:
if item == '':
# Preserve empty string specifications in the command line
self.commandline += "''" + ' '
else:
self.commandline += item + ' '
return parser
if __name__ == '__main__':
ss = SizeScale()
parser = ss.process_command_line()
fps = 30
focalLength = 0.177
zoomAngle = 80 # degrees
Wi = .88 # image cell width in cm
Hi = .66 # image cell height in cm
focalLength = 0.75 #0.524
evtDict = {}
try:
if len(ss.args.input) > 0 and len(ss.args.output) > 0 and len(ss.args.startdate) and len(ss.args.opticalFlowFile) > 0 and len(ss.args.videoFile) > 0:
startDate = datetime.strptime(ss.args.startdate,'%Y-%m-%d %H:%M:%S')
#width, height, evtDict = ss.load_data(ss.args.input, startDate, fps)
width, height, evtDict = ss.load_data2(ss.args.videoFile, ss.args.input)
bestcmDict = ss.find_cm_from_opticalflow(ss.args.opticalFlowFile, Wi, Hi, width, height)
ss.run_size_scale(ss.args.output, evtDict, bestcmDict, fps, Wi, Hi, focalLength, zoomAngle, width, height)
ss.display_output(ss.args.videoFile, evtDict)
else:
parser.print_help()
except Exception, e:
print e
print 'Done !'
|
UTF-8
|
Python
| false | false | 2,014 |
5,617,817,223,219 |
086cd83542db5421f85f0d36c66e9e8156919350
|
06997d23750aed03f0e305dad7aebe99411bddcb
|
/porcupine/config/pubdirs.py
|
a88768b52ba7a0488a063388c867dd653702eb66
|
[
"LGPL-2.1-only"
] |
non_permissive
|
tkouts/porcupine-legacy
|
https://github.com/tkouts/porcupine-legacy
|
1626d0f9c0f3c516895d37620e23fbd120861eda
|
901ec4523b500966a62940455b49ce3afd290d04
|
refs/heads/master
| 2021-01-18T21:06:24.683014 | 2012-10-23T09:56:13 | 2012-10-23T09:56:13 | 172,221 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#==============================================================================
# Copyright (c) 2005-2011, Tassos Koutsovassilis
#
# This file is part of Porcupine.
# Porcupine is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
# Porcupine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with Porcupine; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#==============================================================================
"Parser module for the server's published directories"
import re
import os.path
from xml.dom import minidom
from porcupine.core.compat import basestring
from porcupine.utils import misc
class Registration(object):
__slots__ = ('path', 'context', 'type', 'encoding', 'filters', 'max_age')
def __init__(self, path, identifier, enc, filters, max_age):
self.path = path
self.context = identifier
if identifier[-4:] == '.psp':
self.type = 1
else:
self.type = 0
self.encoding = enc
self.filters = filters
self.max_age = int(max_age)
def get_filter_by_type(self, type):
if isinstance(type, basestring):
type = misc.get_rto_by_name(type)
filter = [f for f in self.filters
if f[0] == type][0]
return filter
class Dir(object):
def __init__(self, dirNode):
self.path = dirNode.getAttribute('path')
self.__config = []
self.__matchlist = []
self.__cache = {}
configXML = minidom.parse(self.path + '/config.xml')
contextList = configXML.getElementsByTagName('context')
# construct action list
for context_node in contextList:
sPath = context_node.getAttribute('path')
sMatch = context_node.getAttribute('match') or None
sMethod = context_node.getAttribute('method')
sBrowser = context_node.getAttribute('client')
sLang = context_node.getAttribute('lang')
sAction = context_node.getAttribute('action') or ''
encoding = (context_node.getAttribute('encoding').
encode('iso-8859-1') or None)
max_age = context_node.getAttribute('max-age') or 0
if sMatch is not None:
self.__matchlist.append((
(sMatch, sMethod, sBrowser, sLang),
(None,
self.path + '/' + sAction,
encoding,
self.__get_filters_list(context_node),
max_age)))
else:
self.__config.append((
(sPath, sMethod, sBrowser, sLang),
Registration(self.path + '/' + sPath,
self.path + '/' + sAction,
encoding,
self.__get_filters_list(context_node),
max_age)))
configXML.unlink()
def __get_filters_list(self, context_node):
filterList = context_node.getElementsByTagName('filter')
filters = []
for filterNode in filterList:
type = filterNode.getAttribute('type')
filter = [misc.get_rto_by_name(type), {}]
for attr in filterNode.attributes.keys():
filter[1][str(attr)] = filterNode.getAttribute(attr)
filters.append(tuple(filter))
return tuple(filters)
def get_registration(self, sPath, sHttpMethod='GET', sBrowser='.*',
sLang='.*'):
cache_key = (sPath, sHttpMethod, sBrowser, sLang)
if cache_key in self.__cache:
return self.__cache[cache_key]
else:
for paramList in self.__config:
Path, HttpMethod, Browser, Lang = paramList[0]
if (Path == sPath and re.match(HttpMethod, sHttpMethod)
and re.search(Browser, sBrowser)
and re.match(Lang, sLang)):
registration = paramList[1]
self.__cache[cache_key] = registration
return registration
for paramList in self.__matchlist:
Match, HttpMethod, Browser, Lang = paramList[0]
match = re.match(Match, sPath)
if (match and re.match(HttpMethod, sHttpMethod)
and re.search(Browser, sBrowser)
and re.match(Lang, sLang)):
registration_params = paramList[1]
action = registration_params[1]
def repl(mo):
ind = int(mo.group(0)[-1])
s = match.group(ind)
return s
action = re.sub('\$\d', repl, action)
if (os.path.isfile(action)):
registration = Registration(registration_params[0],
action,
*registration_params[2:])
self.__cache[cache_key] = registration
return registration
self.__cache[cache_key] = None
return None
dirs = {}
configDom = minidom.parse('conf/pubdir.xml')
for dirNode in configDom.getElementsByTagName('dir'):
dir = Dir(dirNode)
dirs[dirNode.getAttribute('name')] = dir
configDom.unlink()
del configDom
|
UTF-8
|
Python
| false | false | 2,012 |
17,669,495,475,945 |
8d8a8788f62ec31fd13b4d835785e8b33861eebc
|
65207131701167c1f5c76b1c8d2df3626659dea9
|
/tests/__init__.py
|
143e2bf7845954e19315d4e638662f33d0965bf0
|
[
"MIT"
] |
permissive
|
clayg/redisco
|
https://github.com/clayg/redisco
|
d96bc692d74aa93738b03f5cd288736f1e2b4360
|
f57501adb53e44a892e72d325521b62f01a4ae4a
|
refs/heads/master
| 2021-01-18T18:33:03.516123 | 2010-08-24T15:52:05 | 2010-08-24T15:52:05 | 808,410 | 2 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import unittest
from connection import ConnectionTestCase
from containers import (SetTestCase, ListTestCase, TypedListTestCase,
SortedSetTestCase, HashTestCase)
from models import (ModelTestCase, DateFieldTestCase, FloatFieldTestCase,
BooleanFieldTestCase, ListFieldTestCase, ReferenceFieldTestCase,
DateTimeFieldTestCase, CounterFieldTestCase, CharFieldTestCase,
MutexTestCase,)
import redisco
REDIS_DB = int(os.environ.get('REDIS_DB', 10)) # WARNING TESTS FLUSHDB!!!
REDIS_PORT = int(os.environ.get('REDIS_PORT', 6380))
redisco.connection_setup(host="localhost", port=REDIS_PORT, db=REDIS_DB)
typed_list_suite = unittest.TestLoader().loadTestsFromTestCase(TypedListTestCase)
def all_tests():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ConnectionTestCase))
suite.addTest(unittest.makeSuite(SetTestCase))
suite.addTest(unittest.makeSuite(ListTestCase))
suite.addTest(unittest.makeSuite(TypedListTestCase))
suite.addTest(unittest.makeSuite(SortedSetTestCase))
suite.addTest(unittest.makeSuite(ModelTestCase))
suite.addTest(unittest.makeSuite(DateFieldTestCase))
suite.addTest(unittest.makeSuite(FloatFieldTestCase))
suite.addTest(unittest.makeSuite(BooleanFieldTestCase))
suite.addTest(unittest.makeSuite(ListFieldTestCase))
suite.addTest(unittest.makeSuite(ReferenceFieldTestCase))
suite.addTest(unittest.makeSuite(DateTimeFieldTestCase))
suite.addTest(unittest.makeSuite(CounterFieldTestCase))
suite.addTest(unittest.makeSuite(MutexTestCase))
suite.addTest(unittest.makeSuite(HashTestCase))
suite.addTest(unittest.makeSuite(CharFieldTestCase))
return suite
|
UTF-8
|
Python
| false | false | 2,010 |
609,885,375,955 |
b728539c6ec3d239e4397815c1f8f17b948776fd
|
668c6e59ea9e9d9110d8b0bee8a2867924483ea3
|
/june/app.py
|
dfad3cf64f5ecc9c3452d9e66267bdc225ddb445
|
[
"BSD-3-Clause"
] |
permissive
|
jun0205/june
|
https://github.com/jun0205/june
|
faa4194619050e886bf54f099f3ba77490f74470
|
809cc17ed01e0d2c080254ee4ea012c362a25d93
|
refs/heads/master
| 2020-12-25T16:02:39.861403 | 2012-07-07T16:21:13 | 2012-07-07T16:21:13 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import os
PROJDIR = os.path.abspath(os.path.dirname(__file__))
ROOTDIR = os.path.split(PROJDIR)[0]
try:
import june
print('Start june version: %s' % june.__version__)
except ImportError:
import site
site.addsitedir(ROOTDIR)
print('Development of june')
from tornado.options import options
from july.util import reset_option
from july.app import JulyApplication
from july.web import init_options, run_server
reset_option('debug', True, type=bool)
reset_option('autoescape', None)
# site config
reset_option('sitename', 'June', type=str)
reset_option('version', '0.9.0', type=str)
reset_option('siteurl', 'http://lepture.com/project/june/')
reset_option('sitefeed', '/feed')
reset_option('static_path', os.path.join(PROJDIR, '_static'))
reset_option('static_url_prefix', '/static/', type=str)
reset_option('template_path', os.path.join(PROJDIR, "_templates"))
reset_option('locale_path', os.path.join(PROJDIR, '_locale'))
reset_option('login_url', '/account/signin', type=str)
# factor config
reset_option('reply_factor_for_topic', 600, type=int)
reset_option('reply_time_factor', 1000, type=int)
reset_option('up_factor_for_topic', 1500, type=int)
reset_option('up_factor_for_user', 1, type=int)
reset_option('down_factor_for_topic', 800, type=int)
reset_option('down_factor_for_user', 1, type=int)
reset_option('accept_reply_factor_for_user', 1, type=int)
reset_option('up_max_for_user', 10, type=int)
reset_option('down_max_for_user', 4, type=int)
reset_option('vote_max_for_user', 4, type=int)
reset_option('promote_topic_cost', 100, type=int)
# third party support config
reset_option('gravatar_base_url', "http://www.gravatar.com/avatar/")
reset_option('gravatar_extra', '')
reset_option('recaptcha_key', '')
reset_option('recaptcha_secret', '')
reset_option('recaptcha_theme', 'clean')
reset_option('emoji_url', '')
reset_option('ga', '') # google analytics
reset_option('gcse', '') # google custom search
# image backend
reset_option('image_backend', 'june.front.backends.LocalBackend')
def create_application():
settings = dict(
debug=options.debug,
autoescape=options.autoescape,
cookie_secret=options.cookie_secret,
xsrf_cookies=True,
login_url=options.login_url,
template_path=options.template_path,
static_path=options.static_path,
static_url_prefix=options.static_url_prefix,
)
#: init application
application = JulyApplication(**settings)
#: register account app
application.register_app(
'june.account.handlers.app',
url_prefix='/account'
)
application.register_app('june.account.service.app', url_prefix='/social')
application.add_handler(
('/members', 'june.account.handlers.MembersHandler')
)
application.add_handler(
('/city/(.*)', 'june.account.handlers.CityMembersHandler')
)
#: register node app
application.register_app('june.node.handlers.app', url_prefix='/node')
from june.node.handlers import NodeListHandler
application.add_handler(('/nodes', NodeListHandler))
#: register topic app
application.register_app('june.topic.handlers.app', url_prefix='/topic')
from june.topic.handlers import CreateNodeTopicHandler
from june.topic.handlers import ReplyHandler
application.add_handler(('/node/(\w+)/create', CreateNodeTopicHandler))
application.add_handler(('/reply/(\d+)', ReplyHandler))
#: register dashboard app
application.register_app(
'june.dashboard.handlers.app',
url_prefix='/dashboard'
)
#: register mail service
application.register_app('july.ext.mail.handlers.app', url_prefix='/mail')
#: register front app
application.register_app('june.front.handlers.app', url_prefix='')
#: register feedback app
# application.register_app('june.feedback.handlers.app')
for key in ['sitename', 'siteurl', 'sitefeed', 'version', 'ga', 'gcse']:
application.register_context(key, options[key].value())
import datetime
application.register_context('now', datetime.datetime.utcnow)
application.register_context('debug', options.debug)
from june.filters import markdown, xmldatetime, localtime, timesince
from june.filters import topiclink, normal_markdown
application.register_filter('markdown', markdown)
application.register_filter('normal_markdown', normal_markdown)
application.register_filter('xmldatetime', xmldatetime)
application.register_filter('localtime', localtime)
application.register_filter('timesince', timesince)
application.register_filter('topiclink', topiclink)
return application
def main():
init_options()
application = create_application()
run_server(application)
if __name__ == "__main__":
main()
|
UTF-8
|
Python
| false | false | 2,012 |
1,597,727,878,098 |
374bac1a9ab8eb10be5d232ea3df0d1ec8d68fdf
|
db516edf68e125d9a21e4dbe8c4d8b4c8d0a0f54
|
/inject.py
|
29ea713a71c89aaf0b09ca68ccb75704f2d7a770
|
[] |
no_license
|
cozybit/sd8787-test
|
https://github.com/cozybit/sd8787-test
|
05c55d34d39e152611953a209fa7bcc013a32b9c
|
5fa5dac82b5ab44454e7200f2131baed30e121fb
|
refs/heads/master
| 2021-01-22T07:03:59.985266 | 2013-11-20T21:45:16 | 2013-11-22T01:37:10 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/python
import pcap
import time
import sys
iface = sys.argv[1]
pc = pcap.pcap(name=iface, promisc=True, immediate=True)
rtap = [
0x00, 0x00, # version
0x0b, 0x00, # header
# bitmap: rate, dbm tx power, antenna
0x04, 0x0c, 0x00, 0x00,
0x02, # rate x 2
0x0c, # tx power - 12 dBm
0x01 # antenna
]
# beacon frame
frame = [
0x80, 0x00, # fc
0x00, 0x00, # duration
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, # da
0x13, 0x22, 0x33, 0x44, 0x55, 0x66, # sa
0x13, 0x22, 0x33, 0x44, 0x55, 0x66, # bssid/ta
0x10, 0x00, # seq ctrl
0x60, 0xd9, 0xe4, 0xd2, 0xb4, 0x04,
0x00, 0x00, # tsf
0x64, 0x00, # intvl
0x31, 0x04, # capa
0x00, 0x04, 0x61, 0x62, 0x63, 0x64 # ssid = 'abcd'
]
seq = 0
SEQ_OFF = 22
while True:
seq += 1
frame[SEQ_OFF] = (seq & 0x0f) << 4
frame[SEQ_OFF + 1] = (seq >> 4) & 0xff
pkt = ''.join([chr(x) for x in rtap] + [chr(x) for x in frame])
print 'about to inject...'
pc.inject(pkt, len(pkt))
print 'result %s' % pc.geterr()
time.sleep(.1)
|
UTF-8
|
Python
| false | false | 2,013 |
8,194,797,606,547 |
68081c6592e5eabb4b19c306ccaa126c313aa911
|
46bd46eb2f04ba2d4b84117c10fde1876eb8f31b
|
/main.py
|
9f0697cafa85e2bb46f268e7f831bb137e1162eb
|
[] |
no_license
|
JamesMura/gdgugmap
|
https://github.com/JamesMura/gdgugmap
|
0931e9c78e0a77cebc7c306b4479fe34ce131d80
|
2173a84e835e69da152879b538037d4743ddac91
|
refs/heads/master
| 2021-01-10T19:48:49.582647 | 2012-09-04T08:19:35 | 2012-09-04T08:19:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
from google.appengine.ext.webapp import template
import webapp2
import os
class GDGRequestHandler(webapp2.RequestHandler):
def render(self, name, **data):
if not data:
data = {}
path = os.path.join(os.path.dirname(__file__), 'templates/')
self.response.out.write(template.render(path+name+".html", data))
class MainHandler(GDGRequestHandler):
def get(self):
self.render('index')
app = webapp2.WSGIApplication([('/', MainHandler)],
debug=True)
|
UTF-8
|
Python
| false | false | 2,012 |
16,982,300,688,712 |
3c9b3f41a23eca39115b4f7fe2735a34e751a210
|
b6eb15539bf84c58fc0db8a473ac23caf0c0c7d3
|
/tests/test_swedish_to_english.py
|
ea600bed2d1cad10aa3c28d62d0680d0afc48c98
|
[] |
no_license
|
frecon/sprakt-proj
|
https://github.com/frecon/sprakt-proj
|
19b3e80a09550fed2f4e97100b336eae8f2e74ee
|
c0d40711fa160862ab9659c9cf8e00fc244b9470
|
refs/heads/master
| 2021-01-16T18:29:15.021940 | 2014-12-21T14:47:25 | 2014-12-21T14:47:25 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# coding: utf-8
import unittest
from collections import Counter
from translate.swedish_to_english import (
to_english,
load_dictionary,
english_words,
get_most_probable,
load_bigrams,
translate,
get_inflections,
translate_greedy,
possible_words,
translate_sum,
translate_min,
translate_min_new,
)
class TestData(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dictionary = load_dictionary("folkets_sv_en_public.xml")
cls.bigrams = load_bigrams()
def test_to_english_feel(self):
actual = to_english(u'mår', self.dictionary)
expected = {'feel'}
self.assertEqual(expected, actual)
def test_to_english_tree(self):
actual = to_english(u'träd', self.dictionary)
expected = {'tree'}
self.assertEqual(expected, actual)
def test_to_english_allihopa(self):
actual = to_english('allihopa', self.dictionary)
expected = {'all', 'one and all'}
self.assertEqual(expected, actual)
def test_to_english_swedish_word_returns_list_of_english_words(self):
actual = to_english('hej', self.dictionary)
expected = {'hello', 'hallo', 'hey', 'hi'}
self.assertEqual(expected, actual)
def test_on(self):
actual = to_english(u'på', self.dictionary)
expected = {'at etc', 'on', 'in', 'during', 'of', 'at', 'after', 'in'}
self.assertEqual(expected, actual)
def test_ett(self):
actual = to_english(u'ett', self.dictionary)
expected = {'a', 'an', 'one'}
self.assertEqual(expected, actual)
def test_dig(self):
actual = to_english(u'dig', self.dictionary)
expected = {'you'}
self.assertEqual(expected, actual)
def test_direct_translate(self):
swedish_sentence = u'Hej på dig.'
actual = english_words(swedish_sentence, self.dictionary)
expected = [{'hello', 'hallo', 'hey', 'hi'},
{'at etc', 'on', 'in', 'during', 'of', 'at', 'after', 'in'},
{'you'}]
self.assertEqual(expected, actual)
def test_get_most_probable_hi(self):
fr = '<s>'
to = ["hello", "hallo", "hi", "hey"]
actual = get_most_probable(fr, to, self.bigrams)
expected = 'hi'
self.assertEqual(expected, actual)
def test_get_most_probable_a(self):
fr = 'that'
to = ["a", "one"]
actual = get_most_probable(fr, to, self.bigrams)
expected = 'a'
self.assertEqual(expected, actual)
def test_get_most_probable_in(self):
fr = 'active'
to = ['on', 'and', 'in', 'yes', 'frej']
actual = get_most_probable(fr, to, self.bigrams)
expected = 'in'
self.assertEqual(expected, actual)
def test_translate_hi_all(self):
swedish_sentence = u'Hej allihopa.'
actual = translate(swedish_sentence, self.dictionary, self.bigrams)
expected = 'hi all'
self.assertEqual(expected, actual)
def test_translate_how_are_you(self):
swedish_sentence = u'Hur mår ni.'
actual = translate(swedish_sentence, self.dictionary, self.bigrams)
expected = 'how '
# XXX should fail due to semantical difference between SWE-ENG
self.assertEqual(expected, actual)
def test_translate_how_are_you_two(self):
swedish_sentence = u'Är detta ett steg?'
actual = translate_greedy(swedish_sentence, self.dictionary, self.bigrams)
expected = 'is this one step'
self.assertEqual(expected, actual)
def test_translate_how_are_you_tree(self):
swedish_sentence = u'Är detta ett träd?'
actual = translate_greedy(swedish_sentence, self.dictionary, self.bigrams)
expected = 'is this one tree'
self.assertEqual(expected, actual)
def test_xpath(self):
actual = get_inflections(u'är', self.dictionary)
expected = {'am/are/is'}
# XXX should fail due to semantical difference between SWE-ENG
self.assertEqual(expected, actual)
def test_get_most_probable_a2(self):
fr = 'this'
to = ["a", "one"]
actual = get_most_probable(fr, to, self.bigrams)
expected = 'one'
self.assertEqual(expected, actual)
def test_possible_words(self):
last_word = "<s>"
swedish_sentence = u'Är detta ett träd?'
words = english_words(swedish_sentence, self.dictionary)
actual = possible_words(last_word, words[0], self.bigrams)
expected = Counter({'is': 53121629, 'are': 26543233, 'am': 1596465})
self.assertEqual(expected, actual)
def test_translate_sum_is_this_a_tree(self):
swedish_sentence = u'Är detta ett träd?'
actual = translate_sum(swedish_sentence, self.dictionary, self.bigrams)
expected = 'is this one tree'
self.assertEqual(expected, actual)
def test_translate_min_is_this_a_tree(self):
swedish_sentence = u'Är detta ett träd?'
actual = translate_min(swedish_sentence, self.dictionary, self.bigrams)
expected = 'is this a tree'
self.assertEqual(expected, actual)
def test_translate_min_new(self):
swedish_sentence = u'Jag hatar dig'
actual = translate_min_new(swedish_sentence, self.dictionary, load_dictionary("folkets_en_sv_public.xml"), self.bigrams)
expected = 'i hate you'
self.assertEqual(expected, actual)
|
UTF-8
|
Python
| false | false | 2,014 |
5,755,256,179,375 |
c8db9bb5bb316233f7f975c94811ca3c775425ce
|
0495e5acc5cd58b1a38748868215b9369ab8bfbb
|
/2astWKwithpics/snake/snake.py
|
8a172242cb1bf3f5c2fe74494b8dc1e8493a186f
|
[] |
no_license
|
MacMeDan/astroids
|
https://github.com/MacMeDan/astroids
|
537bdc459ad2bb5153f96c5ef5411e68b70ad104
|
5ccbd882f542e04dc104dfa7d20815ff9ece1feb
|
refs/heads/master
| 2021-01-17T19:52:58.860197 | 2013-12-15T02:18:35 | 2013-12-15T02:18:35 | 15,190,353 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pygame
import random
class Snake:
def __init__(self, length, color, pixel_width, pixel_height, pixels_per_cell):
self.width = pixel_width/pixels_per_cell
self.height = pixel_height/pixels_per_cell
self.pixels_per_cell = pixels_per_cell
# how big
self.length = length
#color
self.color = color
# initial direction (up)
#speed(ignore)
self.x_rate = 0
self.y_rate = -1
#start location
x = self.width/2
y = self.height/2
self.body = []
for segment in range(self.length):
self.body.append((x,y))
y+=1
def paint(self, surface):
for (x,y) in self.body:
xpixel = x * self.pixels_per_cell + self.pixels_per_cell/2
ypixel = y * self.pixels_per_cell + self.pixels_per_cell/2
rpixel = self.pixels_per_cell/2
pygame.draw.circle(surface, self.color, (xpixel, ypixel), rpixel)
def move(self):
(x,y) = self.body[0]
x += self.x_rate
y += self.y_rate
if y <0:
y = self.height - 1
if x<0:
x = self.width - 1
if y > self.height - 1:
y = 0
if x > self.width - 1:
x = 0
#test for collision
collided = False
for (bx, by) in self.body:
if x == bx and y == by:
collided = True
if collided:
r = random.randrange(0,255)
g = random.randrange(0,255)
b = random.randrange(0,255)
self.color = (r, g, b)
self.body.pop()
self.body.insert(0, (x, y))
def up(self):
self.x_rate = 0
self.y_rate = -1
def left(self):
self.x_rate = -1
self.y_rate = 0
def right(self):
self.x_rate = 1
self.y_rate = 0
def down(self):
self.x_rate = 0
self.y_rate = 1
|
UTF-8
|
Python
| false | false | 2,013 |
3,298,534,921,035 |
2d6c62d4dccd1b3d61980e818d6160f7eb8d0e8d
|
736a4ecf21c57b16748694907e7c215ffcdf75d4
|
/NLTK/ch02/corpus.py
|
914fbbda18540de0b47fb704d0b9167126915a09
|
[] |
no_license
|
jhb86253817/nltk-practice
|
https://github.com/jhb86253817/nltk-practice
|
9d7b78a69226cad47d25762e240d098d991bdce3
|
1f5cfdb11161e85d4e21729091cc9425597eb7d5
|
refs/heads/master
| 2021-01-23T17:30:39.961810 | 2014-09-18T14:25:46 | 2014-09-18T14:25:46 | 24,191,086 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#import gutenberg corpus and transform it to certain type of text
import nltk
nltk.corpus.gutenberg.fileids()
emma = nltk.corpus.gutenberg.words('austen-emma.txt')
emma = nltk.Text(emma)
emma.concordance("morning")
#compute the average length of word, sentence and word diversity
from nltk.corpus import gutenberg
for fileid in gutenberg.fileids():
num_chars = len(gutenberg.raw(fileid))
num_words = len(gutenberg.words(fileid))
num_sents = len(gutenberg.sents(fileid))
num_vocab = len(set([w.lower() for w in gutenberg.words(fileid)]))
print int(num_chars/num_words), int(num_words/num_sents), int(num_words/num_vocab), fileid
#tests of brown corpus, with many texts from different sources
from nltk.corpus import brown
news_text = brown.words(categories='news')
fdist = nltk.FreqDist([w.lower() for w in news_text])
modals = ['can', 'could', 'may', 'might', 'must', 'will']
for m in modals:
print m + ':', fdist[m],
#generate random texts using bigram
def generate_model(cfdist, word, num=15):
for i in range(num):
print word,
word = cfdist[word].max()
text = nltk.corpus.genesis.words('english-kjv.txt')
bigrams = nltk.bigrams(text)
cfd = nltk.ConditionalFreqDist(bigrams)
generate_model(cfd, "living")
#calculate the fraction of the words that are not stopwords
from nltk.corpus import stopwords
def content_fraction(text):
stopwords = stopwords.words('english')
content = [w for w in text if w.lower() not in stopwords]
return len(content)/len(text)
|
UTF-8
|
Python
| false | false | 2,014 |
9,569,187,180,864 |
03bdc51a2f686b57a63907b2d2403716ea51b812
|
de922dec0fc3d56f2be0671ac543c79c325b21d7
|
/__init__.py
|
60760cc4eb2901fd5b45e1d72e74e3c8494bc63a
|
[] |
no_license
|
tarzanjw/pyramid_antiflood
|
https://github.com/tarzanjw/pyramid_antiflood
|
5fa5e87564d3ce0589183ea1e920edc560901aa1
|
87dcc27c782a9be25764f6b10b9307aab15a6b6e
|
refs/heads/master
| 2020-05-19T18:59:47.648945 | 2013-10-11T08:27:34 | 2013-10-11T08:27:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'tarzan'
from redis import Redis
_fb1 = 3
_fb2 = 5
_default_limit = 5
_redis = None
def _fibo_timeout_calc(count):
"""
Default timeout calculator for Counter class follow Fibonacy algorithm
"""
if count == 1:
return _fb1
if count == 2:
return _fb2
i = 3
fi, fi1 = _fb2, _fb2 + _fb1
while i <= count:
fi, fi1 = fi1, fi + fi1
i += 1
return fi
def initialize_from_settings(settings):
"""
Initialize redis connector and other settings
"""
global _redis
global _fb0
global _fb1
global _default_limit
_conf_prefix = 'antiflood.'
conf = {k[len(_conf_prefix):]:v
for k,v in settings.iteritems()
if k.startswith(_conf_prefix)}
_redis_prefix = 'redis.'
redis_conf = {k[len(_redis_prefix):]:v
for k,v in conf.iteritems()
if k.startswith(_redis_prefix)}
del _redis
_redis = Redis(**redis_conf)
_fb0 = settings.get('fb1', _fb1)
_fb1 = settings.get('fb2', _fb2)
_default_limit = settings.get('limit', _default_limit)
def includeme(config):
initialize_from_settings(config.registry.settings)
from counter import Counter, LimitionReachedError
|
UTF-8
|
Python
| false | false | 2,013 |
10,015,863,763,163 |
0ef3b755d2fa37338764d57a12cd588eb78d7eb2
|
4b95fab9a26be8284320bf35babcdc6d117e980b
|
/collective/facets/testing.py
|
2eef647f889295b8483c20a5d47b8ad372801ad9
|
[] |
no_license
|
collective/collective.facets
|
https://github.com/collective/collective.facets
|
e6a3ed41fbc6b7cf72a2a850e14131c589164723
|
c93ef03f6931a8e25d467acef562470368a65abc
|
refs/heads/master
| 2023-03-22T13:53:56.852468 | 2014-03-12T02:33:31 | 2014-03-12T02:33:31 | 5,254,685 | 1 | 0 | null | false | 2012-08-07T00:24:58 | 2012-08-01T03:48:14 | 2012-08-07T00:24:56 | 2012-08-07T00:24:56 | 196 | null | 1 | 0 |
Python
| null | null |
import doctest
from zope.configuration import xmlconfig
from plone.app.testing import PloneSandboxLayer
from plone.app.testing import applyProfile
from plone.app.testing.layers import FunctionalTesting
from plone.app.testing.layers import IntegrationTesting
import collective.facets
try:
import plone.app.collection
PLONE43 = True
except:
PLONE43 = False
class PloneAppCollectionLayer(PloneSandboxLayer):
def setUpZope(self, app, configurationContext):
if PLONE43:
xmlconfig.file('configure.zcml', plone.app.collection,
context=configurationContext)
xmlconfig.file('configure.zcml', collective.facets,
context=configurationContext)
def setUpPloneSite(self, portal):
if PLONE43:
applyProfile(portal, 'plone.app.collection:default')
applyProfile(portal, 'collective.facets:default')
PLONEAPPCOLLECTION_FIXTURE = PloneAppCollectionLayer()
PLONEAPPCOLLECTION_INTEGRATION_TESTING = IntegrationTesting(\
bases=(PLONEAPPCOLLECTION_FIXTURE,),
name="PloneAppCollectionLayer:Integration")
PLONEAPPCOLLECTION_FUNCTIONAL_TESTING = FunctionalTesting(\
bases=(PLONEAPPCOLLECTION_FIXTURE,),
name="PloneAppCollectionLayer:Functional")
optionflags = (doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
|
UTF-8
|
Python
| false | false | 2,014 |
1,348,619,780,718 |
42b4d6bbe8d490e44749512022e7f7267076a220
|
ecde4736e27b275b37bf1ccd7f375e31c1faf733
|
/2006old/test_curl/t.py
|
094f2607831d20a1a64425770d400fdd340136f7
|
[] |
no_license
|
bahamut8348/xkcode
|
https://github.com/bahamut8348/xkcode
|
1d85ef9b806c13af7193c9fd2281c99f174357a3
|
41665e5601d6d555ae2633ac0aa8cc1108a6b6bf
|
refs/heads/master
| 2016-09-10T17:29:15.351377 | 2013-05-17T02:37:30 | 2013-05-17T02:37:30 | 34,992,677 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import pycurl
print pycurl.version
c = pycurl.Curl()
c.setopt(pycurl.URL, "http://163.com")
c.setopt(pycurl.FOLLOWLOCATION, 1)
c.perform()
print c.getinfo(pycurl.HTTP_CODE), c.getinfo(pycurl.EFFECTIVE_URL)
import time
time.sleep(100)
|
UTF-8
|
Python
| false | false | 2,013 |
13,056,700,629,134 |
32c9b634ec85f7549aee102830fd52ee7e3ccda1
|
a8695116681afcf327da2e0d2ed24d94aa1a8aed
|
/xue/auditlock/utils.py
|
573b1f96b3ad24fc6a68b29d4d18924bc73b406c
|
[
"BSD-3-Clause"
] |
permissive
|
team-xue/xue
|
https://github.com/team-xue/xue
|
c049afc13755cd2bd3de1413ca7d22e98320fc8e
|
e6bd9539803a2bf902f48b65a9df86356b5d46b2
|
refs/heads/master
| 2020-05-20T09:28:46.021908 | 2014-10-03T16:39:27 | 2014-10-03T16:39:27 | 7,713,996 | 1 | 0 | null | false | 2013-11-03T09:36:10 | 2013-01-20T07:49:04 | 2013-11-03T09:36:10 | 2013-11-03T09:36:10 | 3,216 | null | 2 | 0 |
Python
| null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, division
__all__ = [
'is_locked',
'get_lock_status',
'set_lock_status',
]
from django.utils.translation import ugettext_lazy as _u
_ = lambda x: x
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from .models import LockedStatus
_get_for_model = ContentType.objects.get_for_model
_status_get = LockedStatus.objects.get
def get_status_object_for(obj):
obj_type = _get_for_model(obj)
return _status_get(
content_type__pk=obj_type.id,
object_id=obj.id,
)
def get_lock_status(obj):
try:
stat = get_status_object_for(obj)
except LockedStatus.DoesNotExist:
return False, ''
return stat.status, stat.reason
def is_locked(obj):
stat, reason = get_lock_status(obj)
return stat
def set_lock_status(obj, status, reason=None):
try:
stat = get_status_object_for(obj)
except LockedStatus.DoesNotExist:
stat = LockedStatus(rel_object=obj)
if reason is not None:
stat.status, stat.reason = status, reason
else:
stat.status = status
stat.save()
# vim:set ai et ts=4 sw=4 sts=4 fenc=utf-8:
|
UTF-8
|
Python
| false | false | 2,014 |
8,899,172,284,964 |
7022a0ea4833a5882478211c652939729878f719
|
79faeb2d8b1b3aa0eee8e2a38b718536eabb1f73
|
/AbogadosDesktop/src/gui/GestorCitas.py
|
7c9093bd2ef106e7546d96a10610e34ba1f432d6
|
[] |
no_license
|
ehmsoft/Desktop
|
https://github.com/ehmsoft/Desktop
|
cbacbd89607fa32646b38f896abebb273bd91785
|
afd6324df46433c2ef797785fce61cd1a35f0d58
|
refs/heads/master
| 2020-05-04T21:13:01.157782 | 2012-11-14T17:21:39 | 2012-11-14T17:21:39 | 2,143,422 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: utf-8 -*-
'''
Created on 30/03/2012
@author: harold
'''
from core.Singleton import Singleton
from PySide import QtGui, QtCore
from persistence.Persistence import Persistence
from datetime import datetime
from datetime import timedelta
from core.Preferencias import Preferencias
from gui.Preferencias_GUI import Preferencias_GUI
from core.GestorCorreo import Correo
class GestorCitas(object):
__metaclass__ = Singleton
def __init__(self, tray,parent=None):
self.tray = tray
self.parent = parent
self.citas = []
self.timer = []
self.callback = None
def actualizarCitas(self):
self.__detenerCitas()
self.__cargarCitas()
def registrarCallBack(self, callback):
self.callback = callback
def retirarCallBack(self):
self.callback = None
def __detenerCitas(self):
for t in self.timer:
t.stop()
del self.timer[:]
del self.citas[:]
def __compararCitas(self, a, b):
return int(((a.getFecha() - timedelta(0, a.getAnticipacion())) - (b.getFecha() - timedelta(0, b.getAnticipacion()))).total_seconds())
def __cargarCitas(self):
try:
p = Persistence()
citas = p.consultarCitasCalendario()
citas = sorted(citas, self.__compararCitas)
for cita in citas:
if cita.isAlarma() and cita.getFecha() - timedelta(0, cita.getAnticipacion()) > datetime.today():
timer = QtCore.QTimer(self.parent)
timer.setSingleShot(True)
timer.timeout.connect(self.__seCumpleCita)
delta = cita.getFecha() - datetime.today()
tiempo = (delta.total_seconds() - cita.getAnticipacion()) * 1000
#print 'Cita: '+ cita.getDescripcion() + '\n Anticipación: ' + unicode(tiempo)
self.citas.append(cita)
timer.start(tiempo)
self.timer.append(timer)
except Exception as e:
print e
def __seCumpleCita(self):
if len(self.citas):
cita = self.citas.pop(0)
if self.callback:
self.callback()
preferencias = Preferencias()
tipoAlarma = preferencias.getTipoAlarma()
if tipoAlarma & Preferencias_GUI.MENSAJE_CORREO == Preferencias_GUI.MENSAJE_CORREO:
try:
correo = Correo(self.parent)
correo.cita = cita
correo.correo = preferencias.getCorreoNotificacion()
correo.start()
except Exception as e:
print e.message
QtGui.QMessageBox.information(self.parent, 'Error', u"Error al enviar correo electrónico de notificación de una cita. Por favor verifique su conexión a internet e intente de nuevo. Si el problema persiste por favor comuníquese con nuestro personal de soporte técnico: [email protected]")
if tipoAlarma & Preferencias_GUI.MENSAJE_ICONO == Preferencias_GUI.MENSAJE_ICONO:
self.tray.showMessage(u'Notificación de cita' + cita.getDescripcion(), unicode(cita)) #TODO: Verificar unicode
if tipoAlarma & Preferencias_GUI.MENSAJE_EMERGENTE == Preferencias_GUI.MENSAJE_EMERGENTE:
message = QtGui.QMessageBox()
message.setIcon(QtGui.QMessageBox.Warning)
message.setText("Se cumple la cita:\n" + unicode(cita))#TODO: Verificar unicode
message.exec_()
|
UTF-8
|
Python
| false | false | 2,012 |
2,516,850,852,051 |
e904aa5a554f26dc83955891db6dd15de8ac220a
|
aa8c274b0f9eb9cd7380a2692ef287af33b03b8d
|
/palo/base.py
|
0bbed25c0c4ae75d3b1a1295eeec5b21e638976a
|
[] |
no_license
|
abraneo/python-palo
|
https://github.com/abraneo/python-palo
|
f87455b2e35efbf07cc60dd1558d49691b5cf0f2
|
616c74a0740cef30325a1952e6c6b915dc724240
|
refs/heads/master
| 2021-01-23T00:14:53.343419 | 2012-02-13T12:54:15 | 2012-02-13T12:54:15 | 3,247,360 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import csv
import logging
import urllib
import requests
from StringIO import StringIO
LOG_FILENAME = 'palo-request.log'
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
class BasePaloObject(object):
def execute(self, server, port, action, **params):
action = "http://%(server)s:%(port)s%(action)s?%(params)s" % { 'server':server,
'port':port,
'action':action,
'params':urllib.urlencode(params)}
logging.info("REQUESTING INFO: %s" % action)
r = requests.get(action)
payload = r.text
logging.info("RESPONSE: %s" % payload)
if r.status_code != requests.codes.ok:
raise Exception("Error: %s returned (%d): %s" % (action, r.status_code, payload))
return csv.reader(StringIO(payload), delimiter=';')
|
UTF-8
|
Python
| false | false | 2,012 |
7,705,171,373,409 |
a1cbe86d796856bafccafb23ff8593eb53501e39
|
1563a18d5c12ee5c1e0bc671cc225195ceea3983
|
/challenge2.py
|
150e3543c60f1c4441f51bdcc26ffd18e778edb4
|
[] |
no_license
|
tonyskapunk/devops00
|
https://github.com/tonyskapunk/devops00
|
4842348030871e5096e3fda0a19290a5af1d895c
|
1c14cee2b61f3563a18c0786efe2f0ee2311a0e5
|
refs/heads/master
| 2021-01-23T13:58:21.976235 | 2013-10-21T22:01:31 | 2013-10-21T22:01:31 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
import argparse
import auth
import sys
import time
from pyrax import cloudservers as cs
from challenge1 import *
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Cloud Server Clone.')
parser.add_argument('-F', '--flavor-list', action='store_true',
help='Prints the available Server Flavors/Sizes.')
parser.add_argument('-f', '--flavor', metavar='SIZE', default=512, type=int,
help='Size to use on servers. Default 512.')
parser.add_argument('-I', '--image-list', action='store_true',
help='Prints the available Server Images.')
parser.add_argument('-i', '--image', metavar='IMG_NAME', default='arch',
help='Name of Image to use(1st match). Default Arch.')
parser.add_argument('-s', '--server-name', metavar='NAME',
help='Server Name for the *cloned* Cloud Server.')
args = parser.parse_args()
if args.flavor_list:
flavorList()
sys.exit(0)
if args.image_list:
imageList()
sys.exit(0)
if not isValidImage(args.image):
print "Invalid Image: %s" % (args.image)
sys.exit(1)
if not isValidFlavor(args.flavor):
print "Invalid Flavor: %s" % (args.flavor)
sys.exit(1)
print "Serving your request, please wait..."
flavor = getFlavor(args.flavor)
image = getImage(args.image)
sname = 'x' + randomStr(8)
img_name = randomStr(8)
servers = createServers(1, sname, image, flavor)
s = servers[0]
img_id = s.create_image(img_name)
while True:
s.get()
# While building this attr is set to 'image_snapshot'
if s.__getattr__('OS-EXT-STS:task_state'):
time.sleep(5)
else:
print "Deleting original server."
s.delete()
break
print "Creating a clone server from image created"
cimage = getImage(img_name)
createServers(1, args.server_name, cimage, flavor)
sys.exit(0)
|
UTF-8
|
Python
| false | false | 2,013 |
15,762,530,011,330 |
3633c1f17e44fd29b5681a9384249254dd76118d
|
1ece649b981aa27583190248b6f188fb791fbf6f
|
/src/mobyle2/core/tests/utils.py
|
5e59a319f07c691fe658a6b3a820734ff6ffdedf
|
[] |
no_license
|
longnd/mobyle2.core
|
https://github.com/longnd/mobyle2.core
|
be8d659fbb369a39e24052b13d0b5049a3f43223
|
05ff25182bbd058f25e6357c54d47f9adf69e711
|
refs/heads/master
| 2017-11-02T13:42:49.071450 | 2012-02-03T15:53:12 | 2012-02-03T15:53:12 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__docformat__ = 'restructuredtext en'
import os
from ConfigParser import ConfigParser
import random
import socket
import unittest
import threading
from paste.httpserver import serve
from paste.deploy import loadapp
from sqlalchemy import create_engine
from mobyle2.core.models import DBSession
from mobyle2.core.models import root
from mobyle2.core.models.init import initialize_sql
from mobyle2.core.utils import _
from pyramid.i18n import get_localizer
from paste.fixture import TestApp
D = os.path.dirname
J = os.path.join
HERE_DIR = D(D(D(D(D(D(D(__file__)))))))
CONF_DIR = J(D(D(D(D(D(D(D(__file__))))))), 'etc', 'wsgi')
CONFIG = os.path.join(CONF_DIR, 'instance.ini')
socket.setdefaulttimeout(1)
__wsgiapp__ = None
__session__ = None
__server_infos__ = {}
__app_infos__ = {}
def get_port():
for i in range(100):
port = random.randrange(20000,40000)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
try:
s.connect(('localhost', port))
except socket.error:
return port
finally:
s.close()
raise RuntimeError, "Can't find port"
def server_close(self):
"""
Finish pending requests and shutdown the server.
"""
self.running = False
self.socket.close()
self.thread_pool.shutdown(1)
def get_app():
from mobyle2.core.tests.utils import __wsgiapp__
if not __wsgiapp__:
__wsgiapp__ = loadapp('config:instance.ini', relative_to = CONF_DIR)
return __wsgiapp__
def get_running_port():
return get_server_infos()['port']
def get_app_infos():
if not __app_infos__.keys():
infos = ConfigParser()
infos.read(CONFIG)
__app_infos__.update(infos._sections['app:projectapp'])
__app_infos__['sqlalchemy.url'] = get_sa_url()
return __app_infos__
def get_server_infos():
if not __server_infos__.keys():
infos = ConfigParser()
infos.read(CONFIG)
__server_infos__.update(infos._sections['server:main'])
__server_infos__['port'] = get_port()
return __server_infos__
def get_sa_url():
return 'sqlite://'
def get_session():
from mobyle2.core.tests.utils import __session__
if not __session__:
initialize_sql(create_engine(get_sa_url()))
__session__ = DBSession()
return __session__
class PyramidLayer:
#def testStUp(self, *args, **kwargs):
def setUp(self, *args, **kwargs):
"""
Some global are registred there
- session: SqlAlchemy session
"""
# sqlalchemy intialization
self.session = get_session()
self.config = testing.setUp(settings=get_app_infos())
self.config.include('mobyle2.core.webserver')
self.app = self.config.make_wsgi_app()
setUp = classmethod(setUp)
def tearDown(self):
self.session.get_bind().dispose()
tearDown = classmethod(tearDown)
from pyramid import testing
class PyramidTestCase(unittest.TestCase):
def setUp(self, *args, **kwargs):
"""
Some global are registred there
- server: wsgi server
- app: the Pylon wsgi application
"""
# server thread
# share just one pylons across all tests
self.config = testing.setUp(settings=get_app_infos())
self.config.include('mobyle2.core.webserver')
self.wsgiapp = TestApp(get_app())
def tearDown(self):
pass
class PyramidFunctionnalLayer(PyramidLayer):
def setUp(self, *args, **kwargs):
"""
Some global are registred there
- server: wsgi server
- app: the Pylon wsgi application
"""
# server thread
# share just one pylons across all tests
self.wsgiapp = get_app()
self.sinfos = get_server_infos()
self.server = serve(self.wsgiapp,
self.sinfos['host'],
self.sinfos['port'],
socket_timeout=1,
start_loop=False,
)
def mainloop():
self.server.server_close = server_close
self.server.serve_forever()
self.t = threading.Thread(target=mainloop)
self.t.setDaemon(False)
self.t.start()
setUp = classmethod(setUp)
def tearDown(self):
self.server.server_close(self.server)
self.t.join()
tearDown = classmethod(tearDown)
class DummyRequest(testing.DummyRequest):
def __init__(self, *args, **kw):
testing.DummyRequest.__init__(self, *args, **kw)
self.root = root.root_factory(self)
def translate(self, string):
localizer = get_localizer(self)
return localizer.translate(_(string))
# vim:set et sts=4 ts=4 tw=80:
|
UTF-8
|
Python
| false | false | 2,012 |
16,784,732,193,228 |
6164eacebe6603a31ed661d3a52e143569413930
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_8/gmdnko003/question2.py
|
2e6dcadd7f0eeab55f27ed7251aad48ba5ea3d2b
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
https://github.com/MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
'''Program using recursion to count the number of paired characters in message
nkosi gumede
7 may 2014'''
listed=[]
def paired_strings(message):
if len(message)<=1:
#This is your quit condition
print("Number of pairs:",len(listed))
else:
#If length of message is 2 or more
if message[0]!=message[1]:
#Not going to count anything
paired_strings(message[1:])
if message[0]==message[1]:
#1 count must be added
listed.append(1)
paired_strings(message[2:])
if __name__=='__main__':
x=input("Enter a message:\n")
paired_strings(x)
|
UTF-8
|
Python
| false | false | 2,014 |
11,811,160,093,387 |
73c0842a4588e26f4dbad72ff5859c0c208138b8
|
a68469d1b6f64b203e687a3565fef6ef11a6b1e3
|
/dyn.py
|
168badfcea2cc6aa27cc5ed4e0468de419e946b0
|
[
"GPL-1.0-or-later",
"GPL-3.0-only"
] |
non_permissive
|
safeapp/safeapp.github.io
|
https://github.com/safeapp/safeapp.github.io
|
1f8f8e83363a7d182f440ac1ed83d5e85133f681
|
3d10d6544dbdd8a1cc549de606dcbb509e3da30e
|
refs/heads/master
| 2020-04-06T04:27:45.649553 | 2014-09-04T23:30:59 | 2014-09-04T23:30:59 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
try:
import gevent.monkey
except ImportError:
pass
else:
gevent.monkey.patch_all()
import hashlib
import socket
import struct
import sys
import threading
import time
import sqlite3
from datetime import timedelta
from functools import update_wrapper
from flask import (Flask, request, jsonify, redirect,
make_response, request, current_app, abort,
render_template)
PROBLEM_FEEDBACK = "problem"
WOULD_USE_AGAIN_FEEDBACK = "would_use_again"
WRITTEN_FEEDBACK = "written"
# from http://flask.pocoo.org/snippets/56/
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, basestring):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, basestring):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
def make_app(debug=False):
kw = dict(static_folder=".",
static_url_path="/static") if debug else dict()
app = Flask(__name__, **kw)
_db_tls = threading.local()
def get_db_conn():
if not hasattr(_db_tls, "db_conn"):
_db_tls.db_conn = sqlite3.connect("submit_feedback.db")
return _db_tls.db_conn;
# init db
with get_db_conn() as conn:
conn.execute("create table if not exists would_use_again (client_id integer primary key, yesses integer, noes integer, ts integer)")
conn.execute("create table if not exists written (client_id integer primary key, written text, ts integer)")
conn.execute("create table if not exists exception_client_ids (client_id integer primary key, count integer, ts integer)")
conn.execute("create table if not exists exceptions_normalizations (id integer primary key, value text)")
conn.execute("create table if not exists exceptions (id integer primary key autoincrement, where_id integer, arch_id integer, version_id integer, target_platform_id integer, platform_id integer, value_sizes_id integer, what_id integer, exception_type_id integer, offset_stack_trace_id integer, module_id integer, client_id integer, ts integer)")
def get_client_id():
# a "client id" is a cryptographically hashed ip
# this is so we can differentiate clients without storing their
# actual origins
if 'X-Real-Ip' in request.headers:
ip = request.headers['X-Real-Ip']
else:
try:
ip = request.environ['REMOTE_ADDR']
except KeyError:
ip = socket.gethostbyname(request.environ['REMOTE_HOST'])
packed = socket.inet_aton(ip)
# okay now hash the packed ip
packed_hash = hashlib.sha256(packed).digest()[:len(packed)]
unpacked = struct.unpack("!I", packed_hash)[0]
return unpacked
@app.route('/submit_feedback', methods=["GET", "POST", "OPTIONS"])
@crossdomain(origin="http://www.getsafe.org")
def submit_feedback():
client_id = get_client_id()
json = request.get_json()
if json is None:
d = (request.form
if request.method == "POST" else
request.args)
feedback_type = d['feedback_type']
answer = d['answer']
redirect_to = d.get("redirect_to")
else:
feedback_type = json['feedback_type'];
answer = json['answer']
redirect_to = json.get("redirect_to")
if feedback_type == PROBLEM_FEEDBACK:
if answer not in ["yes", "no"]:
raise Exception("bad answer type")
if answer == "yes":
redirect_to = "report_problem"
elif feedback_type == WOULD_USE_AGAIN_FEEDBACK:
if request.method != "POST":
raise Exception("bad method")
if answer not in ["yes", "no"]:
raise Exception("bad answer type")
answer = answer == 'yes'
with get_db_conn() as conn:
conn.execute(u"""
insert or replace into would_use_again (client_id, yesses, noes, ts)
values (:client_id,
:incyes + coalesce((select yesses from would_use_again where client_id = :client_id), 0),
:incno + coalesce((select noes from would_use_again where client_id = :client_id), 0),
strftime('%s','now'))
""",
dict(client_id=client_id,
incyes=int(answer),
incno=int(not answer)))
elif feedback_type == WRITTEN_FEEDBACK:
if request.method != "POST":
raise Exception("bad method")
if not isinstance(answer, unicode):
raise Exception("bad answer type: %r", repr(answer))
with get_db_conn() as conn:
conn.execute(u"""
insert or replace into written (client_id, written, ts)
values
(:client_id,
:written,
strftime('%s', 'now'))
""",
dict(client_id=client_id,
written=answer))
else:
raise Exception("not supported")
if redirect_to is None:
return jsonify(error=False)
else:
return redirect(redirect_to)
def get_number_of_exceptions_for_client_id(cursor, client_id):
cursor.execute("select coalesce((select count from exception_client_ids where client_id = ?), 0)",
(client_id,))
return cursor.fetchone()[0]
def get_exceptions_normalization_id_from_string(cursor, text):
cursor.execute("select id from exceptions_normalizations where value = ?",
(text,))
what_id_tuple = cursor.fetchone()
if what_id_tuple is None:
cursor.execute("insert into exceptions_normalizations (value) values (?)",
(text,))
what_id_tuple = (cursor.lastrowid,)
return what_id_tuple[0]
EXCEPTIONS_COLUMNS = ["where", "arch", "version", "target_platform",
"platform", "value_sizes", "what", "exception_type",
"offset_stack_trace", "module"]
EXCEPTION_LIMIT_PER_CLIENT = 100
@app.route('/exceptions', methods=["POST", "OPTIONS"])
@crossdomain(origin="http://www.getsafe.org")
def report_exception():
client_id = get_client_id()
args = request.form
with get_db_conn() as conn:
cursor = conn.cursor()
if get_number_of_exceptions_for_client_id(cursor, client_id) > EXCEPTION_LIMIT_PER_CLIENT:
return jsonify(error=dict(message="Too many exceptions"))
insert_values = {"client_id": client_id,
"ts": int(time.time())}
# TODO: consider doing this in bulk if database performance starts degrading
for column_name in EXCEPTIONS_COLUMNS:
insert_values[column_name + "_id"] = (None
if column_name not in args else
get_exceptions_normalization_id_from_string(cursor, args[column_name]))
sql_stmt = "insert into exceptions ("
sql_stmt += ', '.join(column_name for column_name in insert_values)
sql_stmt += ") values ("
sql_stmt += ', '.join(":" + column_name for column_name in insert_values)
sql_stmt += ")"
# insert exception into database
cursor.execute(sql_stmt, insert_values)
exception_id = cursor.lastrowid
# log client_id for abuse protection
cursor.execute("""
insert or replace into exception_client_ids (client_id, count, ts)
values (:client_id,
1 + coalesce((select count from exception_client_ids where client_id = :client_id), 0),
strftime('%s','now'))
""",
dict(client_id=client_id))
resp = make_response(jsonify(error=False,
exception_id=exception_id),
# created
201)
resp.headers['Location'] = '/exceptions/%d' % exception_id
return resp
def denormalize_exception_row(cursor, orig_row):
id_2_column_name = dict((orig_row[a + "_id"], a) for a in EXCEPTIONS_COLUMNS
if orig_row[a + "_id"] is not None)
cursor.row_factory = sqlite3.Row
cursor.execute("select id, value from exceptions_normalizations where id in ("+
','.join(['?'] * len(id_2_column_name)) +
")",
list(id_2_column_name))
denormalized_row = {}
for row in cursor:
col_name = id_2_column_name[row['id']]
denormalized_row[col_name] = row['value']
for k in orig_row.keys():
denormalized_row[k] = orig_row[k]
return denormalized_row
def get_exception(cursor, exception_id):
cursor.row_factory = sqlite3.Row
cursor.execute("select * from exceptions where id = ?",
(exception_id,))
orig_row = cursor.fetchone()
if orig_row is None:
abort(404)
return denormalize_exception_row(cursor, orig_row)
@app.route('/exceptions_list', methods=["GET"])
def exceptions_list():
args = request.args
conn = get_db_conn()
cursor = conn.cursor()
if 'version' in args:
stmt = "select * from exceptions where version_id = (select id from exceptions_normalizations where value = ?) order by id asc"
stmt_args = [(args['version'],)]
else:
stmt = "select * from exceptions order by id asc"
stmt_args = []
PAGE_SIZE = 50
page = int(args.get('page', 1))
if page < 1: abort(400)
stmt += " limit %d offset %d" % (PAGE_SIZE, (page - 1) * PAGE_SIZE)
cursor.execute(stmt, *stmt_args)
cursor.row_factory = sqlite3.Row
cursor2 = conn.cursor()
exceptions = []
for row in cursor:
exception = denormalize_exception_row(cursor2, row)
exception['datetime'] = time.asctime(time.gmtime(exception['ts']))
exceptions.append(exception)
return render_template('exceptions_list.html',
page_size=PAGE_SIZE,
page=page,
exceptions=exceptions)
@app.route('/exceptions/<int:exception_id>', methods=["GET"])
def print_exception(exception_id):
conn = get_db_conn()
cursor = conn.cursor()
exception = get_exception(cursor, exception_id)
# currently we only repond in a format that's easy to read
# for shell scripts
def escape_quote(a):
return a.replace("\\", "\\\\").replace("\"", "\\\"")
out = ""
for column_name in EXCEPTIONS_COLUMNS:
if column_name not in exception: continue
out += column_name.upper() + "=\"" + escape_quote(exception[column_name]) + "\"\n"
out += "TIMESTAMP=" + str(exception['ts']) + "\n"
resp = make_response(out)
resp.headers['Content-Type'] = 'text/x-shellscript'
return resp
if debug:
app.debug = True
return app
def run_app(app, host, port):
try:
from gevent.wsgi import WSGIServer
except ImportError:
app.run(host=host, port=port)
else:
WSGIServer((host, port), app).serve_forever()
def main():
debug = False
port = 8080 if debug else 9345
run_app(make_app(debug=debug), 'localhost', port)
return 0
if __name__ == "__main__":
sys.exit(main())
|
UTF-8
|
Python
| false | false | 2,014 |
11,957,188,967,228 |
069dd6b4b5a7ac083036273e6b4c645361db0d15
|
ecadf491f7a9976a291ef9c7fedf4a37f9055335
|
/xpcode/AST/correctos/suma.xp.tac
|
350e6adcffd5499193ee79a1344bc5f70167532f
|
[] |
no_license
|
jaescalante02/Xplode
|
https://github.com/jaescalante02/Xplode
|
40a7d18db6039c4eac5128f8db30da9d859a69ea
|
35e80a0724b1f1f06167125c9c6c253d8e494d51
|
refs/heads/master
| 2021-01-20T10:06:25.754440 | 2014-11-29T21:22:35 | 2014-11-29T21:22:35 | 21,241,878 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
#../xpcode/correctos/suma.xp
1:
INIT:
|
UTF-8
|
Python
| false | false | 2,014 |
18,313,740,570,237 |
43da6d68e2db0967b87cb5c46cb111da75c450fa
|
f2144b578293f866a8a8b7b3ff891ed07d6b47fa
|
/download_generator.py
|
4d5a4d2a9d12de271c4a63abb7da0bce401c6809
|
[] |
no_license
|
SimonCB765/LeafGAE
|
https://github.com/SimonCB765/LeafGAE
|
06cac0a90b1a00041c0507fd07de410f03b8d3e6
|
7e18a6b76324bfae0c3065ffb8e9baeeb8775202
|
refs/heads/master
| 2020-05-17T20:57:29.963579 | 2014-06-17T09:11:48 | 2014-06-17T09:11:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import os
import models
import logging
def main(PDBDownloadID, userInputChains):
"""Perform the download generation."""
# Setup logging for exceptions.
logging.getLogger().setLevel(logging.DEBUG)
try:
# Establish the location of the data files containing the chain and similarity information.
projectDirectory = os.path.dirname(os.path.realpath(__file__))
PDBData = os.path.join(projectDirectory, 'Data')
chainData = os.path.join(PDBData, 'Chains.tsv')
similarityData = os.path.join(PDBData, 'Similarity')
similarityFiles = [os.path.join(similarityData, i) for i in os.listdir(similarityData)]
# Extract the chains the user wants.
outputChains = []
representativeGroupsOfInterest = set([])
readChainData = open(chainData, 'r')
outputChains.append(readChainData.readline()) # Add the header to the set of chains.
for line in readChainData:
# Parse the data file containing all the chains in the PDB, and record only those that meet the quality criteria.
chunks = (line.strip()).split('\t')
chain = chunks[0]
if chain in userInputChains:
outputChains.append(line)
representativeGroup = chunks[6]
representativeGroupsOfInterest.add(representativeGroup)
readChainData.close()
# Extract the similarities that go with the chains.
outputSimilarities = set([])
header = ''
for i in similarityFiles:
readSimilarities = open(i, 'r')
header = readSimilarities.readline() # Strip the header.
for line in readSimilarities:
chunks = (line.strip()).split('\t')
representativeGroupA = chunks[0]
representativeGroupB = chunks[1]
similarity = float(chunks[2])
addSimilarity = ((representativeGroupA in representativeGroupsOfInterest) and
(representativeGroupB in representativeGroupsOfInterest)
)
if addSimilarity:
# The sequences are of interest and too similar.
outputSimilarities.add(line)
readSimilarities.close()
# Get the user request entity and record the results.
PDBDownload = models.PDBDownload.get_by_id(PDBDownloadID) # The PDBDownload entity.
PDBDownload.chains = ''.join(outputChains)
PDBDownload.similarities = header + ''.join(outputSimilarities)
PDBDownload.finished = True
PDBDownload.put()
except:
logging.exception('PDB download {0} broke down. It contained {1} chains and {2} similarities.'.format(PDBDownloadID,
len(outputChains) - 1, len(outputSimilarities) - 1))
return ''
|
UTF-8
|
Python
| false | false | 2,014 |
12,103,217,862,427 |
27daf21d053bcbbb34c7f3a72df4441574684666
|
a1a1dadd9db6bdf4639603d6697987368a197177
|
/__init__.py
|
1bdd0771a46248d3b83aaa1bdc6a408877ddba9e
|
[
"GPL-3.0-or-later"
] |
non_permissive
|
arcanosam/mutiraopython_vpython
|
https://github.com/arcanosam/mutiraopython_vpython
|
a1969a41f36424547faec52bd88cb1c00fe9fd5b
|
45912dd2dcc042718d36f9422fa6e29c7244160b
|
refs/heads/master
| 2020-05-14T15:19:12.942532 | 2014-05-23T23:41:28 | 2014-05-23T23:41:28 | 10,089,527 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null |
__author__ = 'SamuelTeixeira'
|
UTF-8
|
Python
| false | false | 2,014 |
15,771,119,953,824 |
2b6d9471689e35ab09014160d9accc63bf7fa748
|
aee62ef916fcc365f53b33b206613b0aa93de88d
|
/Network20q/src/chapter1/program.py
|
4bc2e349ae1d0c090a10f69b7cddba423f024890
|
[] |
no_license
|
Thomas1002/NetworkedLife
|
https://github.com/Thomas1002/NetworkedLife
|
89de6f85edd79ec21b83e5c7809f282d3dc6744e
|
a7594d45781f0513b908d2c5f68680cdc273e0f6
|
refs/heads/master
| 2016-09-05T12:11:36.625641 | 2013-03-14T11:07:20 | 2013-03-14T11:07:20 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
import calc as ca
import plotter as p
class Program:
def run(self, channelGains, power, noise, gamma):
calc = ca.Calc(channelGains, noise, gamma)
result = calc.result(power, 30)
print result[0][-1]
plotter = p.Plotter()
#plotter.plot(result[0], 'mW', 0, 2)
#plotter.plot(result[1], 'SIR', 0.5, 3)
|
UTF-8
|
Python
| false | false | 2,013 |
19,181,323,959,029 |
c807876f84ad24a71444cb6d30bccb94fb5b4931
|
a67d4ab71a85eccebf4289ae4e29215a2cc3545e
|
/participants/views.py
|
1522f302eccc681f24299142ffe73d39505c8e50
|
[] |
no_license
|
jcar787/ice-bucket-django
|
https://github.com/jcar787/ice-bucket-django
|
613b45a0df71fbcea60fc12123a537e837ca1cd5
|
b12dd6d3862295c2be8d9d9d0a181a97f134e514
|
refs/heads/master
| 2021-01-22T15:00:17.866222 | 2014-08-28T19:01:05 | 2014-08-28T19:01:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from django.forms.util import ErrorList
from django.shortcuts import render, get_object_or_404
from django.views.generic import DetailView, ListView, TemplateView
from django.views.generic.edit import FormView
from .forms import ParticipantForm
from .models import Participant
from datetime import datetime
# Create your views here.
class ParticipantFormView(FormView):
template_name = 'participant_form.html'
form_class = ParticipantForm
def form_invalid(self, form):
return super(ParticipantFormView, self).form_invalid(form)
def form_valid(self, form):
participant = form.save(commit=False)
tmp = participant.youtube_video.split('?')
if len(tmp) > 1 and tmp[1][0] == 'v':
participant.youtube_video = tmp[1].split('=')[1]
else:
errors = form._errors.setdefault("youtube_video", ErrorList())
errors.append(u"Not a youtube video")
return self.form_invalid(form)
participant.save()
self.success_url = '/success/{0}'.format(participant.id)
return super(ParticipantFormView, self).form_valid(form)
class ParticipantDetailView(DetailView):
template_name = 'participant_detail.html'
model = Participant
def get_context_data(self, **kwargs):
return super(ParticipantDetailView, self).get_context_data(**kwargs)
class ParticipantListView(ListView):
template_name = 'participant_list.html'
paginate_by = 9
model = Participant
class Template404View(TemplateView):
template_name = '404.html'
|
UTF-8
|
Python
| false | false | 2,014 |
10,496,900,117,988 |
98842cc0c2193e9460b9de1353e557555d95c5d6
|
99b6b239e955b06ad01974e2bbc785da065e5dd5
|
/candy/src/widgets/reflection.py
|
becf056f04fd751ce18b6b3ff9f690ee70663dda
|
[
"LGPL-2.1-only"
] |
non_permissive
|
wangkai2014/kaa
|
https://github.com/wangkai2014/kaa
|
a6dc26b06755e0021821ab59044370d2fd8f14e6
|
3a9e75dc033e82ac7fff6716d32b0423dbcf2922
|
refs/heads/master
| 2020-04-06T04:41:24.067800 | 2011-01-25T20:33:16 | 2011-01-25T20:33:16 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------------
# reflection.py - Reflection Widget
# -----------------------------------------------------------------------------
# $Id$
#
# -----------------------------------------------------------------------------
# kaa-candy - Third generation Canvas System using Clutter as backend
# Copyright (C) 2008-2009 Dirk Meyer, Jason Tackaberry
#
# First Version: Dirk Meyer <[email protected]>
# Maintainer: Dirk Meyer <[email protected]>
#
# Please see the file AUTHORS for a complete list of authors.
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version
# 2.1 as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
#
# -----------------------------------------------------------------------------
__all__ = [ 'Reflection', 'ReflectionModifier' ]
# python imports
import logging
# kaa imports
from kaa.utils import property
# kaa.candy imports
from .. import Modifier
from .. import backend
from container import Group
# get logging object
log = logging.getLogger('kaa.candy')
class Reflection(Group):
"""
Widget containing a widget and its reflection actor.
"""
def __init__(self, widget, opacity):
"""
Create new group of widget and reflection.
@param widget: source widget (will be added to the group)
@param opacity: opacity of the reflection.
"""
super(Reflection, self).__init__((widget.x, widget.y))
self.context_sensitive = widget.context_sensitive
self.source = widget
self.x, self.y = self.source.x, self.source.y
self.source.x = self.source.y = 0
self.add(self.source)
self._reflection_opacity = opacity
self._reflection_obj = None
self._dynamic_size = self.source._dynamic_size
def _clutter_render(self):
"""
Render the widget
"""
super(Reflection, self)._clutter_render()
if 'size' in self._sync_properties:
self.source.width = self.width
self.source.height = self.height
if not self._reflection_obj:
self._reflection_obj = backend.ReflectTexture(self.source._obj, 0)
self._reflection_obj.show()
self._reflection_obj.set_opacity(self._reflection_opacity)
self._obj.add(self._reflection_obj)
def _clutter_sync_layout(self):
"""
Layout the widget
"""
# Get the source's size and set the anchor_point. We MUST do
# this before calling super's sync function to avoid
# triggering a re-layout when setting the anchor_point.
width, height = self.source._obj.get_size()
self.anchor_point = width/2, height
super(Reflection, self)._clutter_sync_layout()
# get source's position to set the reflection
x, y = self.source._obj.get_position()
self._reflection_obj.set_property('reflection-height', height / 2)
if self.subpixel_precision:
# FIXME: this code does not respect subpixel_precision because it
# uses the int values from the source
self._reflection_obj.set_anchor_pointu(*self.source._obj.get_anchor_pointu())
self._reflection_obj.set_positionu(x, y + height)
self._reflection_obj.set_sizeu(width, height)
else:
self._reflection_obj.set_anchor_point(*self.source._obj.get_anchor_point())
self._reflection_obj.set_position(int(x), int(y + height))
self._reflection_obj.set_size(int(width), int(height))
def _candy_context_prepare(self, context):
"""
Check if the widget is capable of the given context based on its
dependencies.
@param context: context dict
@returns: False if the widget can not handle the context or True
"""
# This widget does only depend indirect on a context. The real widget
# inside may depend on a context and the reflection depends on the
# widget. So we just use the widget _candy_context_prepare function here.
return self.source._candy_context_prepare(context)
def _candy_context_sync(self, context):
"""
Set a new context.
@param context: context dict
"""
# This widget does only depend indirect on a context. The real widget
# inside may depend on a context and the reflection depends on the
# widget. So we just use the widget _candy_context_sync function here.
return self.source._candy_context_sync(context)
@property
def width(self):
return self.source.width
@width.setter
def width(self, width):
self.source.width = width
self._dynamic_size = self.source._dynamic_size
@property
def height(self):
return self.source.height
@height.setter
def height(self, height):
self.source.height = height
self._dynamic_size = self.source._dynamic_size
@property
def intrinsic_size(self):
return self.source.intrinsic_size
class ReflectionModifier(Modifier):
"""
Modifier to add a reflection.
"""
candyxml_name = 'reflection'
def __init__(self, opacity=50):
"""
Create modifier
@param opacity: opacity of the reflection
"""
self._opacity = opacity
def modify(self, widget):
"""
Modify the given widget.
@param widget: widget to modify
@returns: Group widget with src and reflection textures
"""
return Reflection(widget, self._opacity)
@classmethod
def candyxml_create(cls, element):
"""
Parse the candyxml element and create the modifier. Any texture based
widget can be used as base. Example::
<image width='100' height='100'>
<reflection opacity='50'/>
</image>
"""
return cls(opacity = int(element.opacity or 50))
|
UTF-8
|
Python
| false | false | 2,011 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.