diff --git a/Phys/B2KShh/doc/README b/Phys/B2KShh/doc/README new file mode 100644 index 0000000..74f414c --- /dev/null +++ b/Phys/B2KShh/doc/README @@ -0,0 +1,6 @@ + +The files in the 'misc' directory are required by some of the job submission scripts. + +The .ganga.py should be placed in your home directory or, if you already have a .ganga.py, the contents of the file in this package should be added into your existing file. +The other two files should be placed in a directory which is in your path. + diff --git a/Phys/B2KShh/doc/release.notes b/Phys/B2KShh/doc/release.notes new file mode 100644 index 0000000..f330d12 --- /dev/null +++ b/Phys/B2KShh/doc/release.notes @@ -0,0 +1,12 @@ +!----------------------------------------------------------------------------- +! Package : Phys/B2KShh +! Responsible : Thomas Edward Latham +! Purpose : Collection of Bender algorithms and scripts + Ganga job +! scripts for B2KShh ntuple production +!----------------------------------------------------------------------------- + +! 2012-10-31 - Aurelien Martens + - Automatic determination of the Bender version (package has to be located in a Bender_vXXX area) + +! 2012-09-18 - Thomas Edward Latham + - Import first version to svn diff --git a/Phys/B2KShh/job/resubmit.py b/Phys/B2KShh/job/resubmit.py new file mode 100644 index 0000000..b432b25 --- /dev/null +++ b/Phys/B2KShh/job/resubmit.py @@ -0,0 +1,26 @@ +gridProxy.renew() + +#myjobs = jobs.select(243, 260) +myjobs = jobs.select(261, 264) +stuckSubjobs = 0 +failedSubjobs = 0 +totalSubJobs = 0 +#sitesToBan = ['LCG.GRIDKA.de', 'LCG.IN2P3.fr', 'LCG.PIC.es', 'LCG.NIKHEF.nl', 'LCG.SARA.nl', 'LCG.RAL.uk'] +#sitesToBan = ['LCG.GRIDKA.de', 'LCG.IN2P3.fr', 'LCG.NIKHEF.nl'] +sitesToBan = [''] +#sitesToBan = ['LCG.NIKHEF.nl', 'LCG.IN2P3.fr', 'LCG.CSCS.ch', 'LCG.GRIDKA.de', 'LCG.CNAF.it', 'LCG.RAL.uk', 'LCG.PIC.es'] + +for (index, j) in enumerate(myjobs): + for sj in j.subjobs: + totalSubJobs += 1 + if (sj.status == "new"): + queues.add(sj.submit) + stuckSubjobs += 1 + if (sj.status == "failed"): + #sj.backend.reset() + #sj.force_status('failed', force=True) + sj.backend.settings['BannedSites'] = sitesToBan + queues.add(sj.resubmit) + failedSubjobs += 1 + +print '%s new subjobs submitted, %s failed subjobs resubmitted.' % (stuckSubjobs,failedSubjobs) \ No newline at end of file diff --git a/Phys/B2KShh/job/runganga_bender_Lb2V0hh_signalMC.py b/Phys/B2KShh/job/runganga_bender_Lb2V0hh_signalMC.py new file mode 100644 index 0000000..907703f --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_Lb2V0hh_signalMC.py @@ -0,0 +1,206 @@ + +eventTypeDict = { + 'Lb2pipiLambda0-sqDalitz-DecProdCut' : 15104125 + , 'Lb2KpiLambda0-sqDalitz-DecProdCut' : 15304111 + , 'Lb2KKLambda0-sqDalitz-DecProdCut' : 15104132 + , 'Lb2ppLambda0-sqDalitz-DecProdCut' : 15104142 +} + +# *************************************************************************************** # +# pick magnet polarities, stripping versions and an event type from the known types above # +# *************************************************************************************** # +#magtypes = [ 'MagDown', 'MagUp' ] +#strippings = [ 'Stripping20r1', 'Stripping20', 'Stripping20r1p2', 'Stripping20r0p2' ] +#eventtypes = eventTypeDict.keys() +magtypes = [ 'MagUp' ] +strippings = [ 'Stripping20r1p2' ] +eventtypes = [ 'Lb2pipiLambda0-sqDalitz-DecProdCut' ] +# *************************************************************************************** # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] + +known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p2' ] +known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p2' ] + +nativeStrippingVersion = {} +nativeStrippingVersion['2011'] = 'Stripping20r1' +nativeStrippingVersion['2012'] = 'Stripping20' + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +nuValue = {} +nuValue['2011'] = '2' +nuValue['2012'] = '2.5' + +tck = {} +tck['2011'] = '0x40760037' +tck['2012'] = '0x409f0045' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') + +userReleaseArea = path[:match2.end()] + + +for eventtype in eventtypes : + + print 'Creating job(s) for event type: ' + eventtype + + if eventtype not in eventTypeDict.keys() : + e = Exception('Unknown event type: ' + eventtype ) + raise e + + evtID = eventTypeDict[ eventtype ] + + bkinfo = getBKInfo( evtID ) + bkpaths = bkinfo.keys() + + mode = eventtype.split('-')[0] + btype = mode.split('2')[0] + tracks = re.findall( 'K(?!S)|pi|p(?!i)', mode ) + track1 = tracks[0] + track2 = tracks[1] + match = re.search( 'KS|KS0|Lambda|Lambda0', mode ) + v0type = match.group(0) + if not v0type.endswith('0') : + v0type = v0type+'0' + + for stripping in strippings : + + print 'With stripping: ' + stripping + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + restrip = False + if stripping != nativeStrippingVersion[datatype] : + restrip = True + + for magtype in magtypes : + + print 'With magnet setting: ' + magtype + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + for pythiaversion in [ 6, 8 ] : + + print 'With Pythia version: %d' % pythiaversion + + datasets = [] + dddbtags = set() + condtags = set() + for simversion in [ 'Sim08a', 'Sim08b' ] : + bkpath = '/MC/%s/Beam%sGeV-%s-%s-Nu%s-Pythia%d/%s/Digi13/Trig%s/Reco14a/%sNoPrescalingFlagged/%d/ALLSTREAMS.DST' % ( datatype, beamEnergy[datatype], datatype, magtype, nuValue[datatype], pythiaversion, simversion, tck[datatype], nativeStrippingVersion[datatype], evtID ) + if bkpath not in bkpaths : + continue + print 'Trying BK path: ' + bkpath, + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + dstmp = bkq.getDataset() + if len(dstmp) != 0 : + print ' - found dataset' + datasets.append( dstmp ) + dddbtags.add( bkinfo[bkpath][0] ) + condtags.add( bkinfo[bkpath][1] ) + else : + print ' - nothing found' + + if len( datasets ) == 0 : + print 'Could not find any valid data!! Skipping this configuration!!' + continue + elif len(datasets) == 1 : + ds = datasets[0] + elif len(datasets) == 2 : + ds = datasets[0].union( datasets[1] ) + else : + e = Exception('Found more than two datasets!') + raise e + + if len( ds.files ) == 0 : + e = Exception('Zero files in this dataset!') + raise e + + if len( dddbtags ) != 1 or len( condtags ) != 1 : + e = Exception('Found multiple DB tags') + raise e + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + params = {} + params = {} + params['btype'] = btype + params['track1'] = track1 + params['track2'] = track2 + params['v0type'] = v0type + params['whichMC'] = datatype + params['stripping'] = stripping + params['dddbtag'] = dddbtags.pop() + params['conddbtag'] = condtags.pop() + + moduleFile = modulePath+'/bender_Lb2V0hh_signalMC.py' + if restrip : + moduleFile = modulePath+'/bender_Lb2V0hh_stripping_signalMC.py' + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = mode+'_MC_'+datatype+'_'+magtype+'_'+stripping+'_Pythia'+str(pythiaversion) + j.application = b + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = mode+'-MC-'+datatype+'-'+magtype+'-'+stripping+'-withMCtruth.root' + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + #j.outputfiles = [SandboxFile(tupleFile)] + j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 5 ) + + #j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_bkgdMC.py b/Phys/B2KShh/job/runganga_bender_bkgdMC.py new file mode 100644 index 0000000..b4425ee --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_bkgdMC.py @@ -0,0 +1,194 @@ + +eventTypeDict = { + 'Lb2Lcpi-KS0p' : 15100009 +# , 'Bd2Kst0rho0-KS0pi0pipi' : 11100001 +# , 'Bd2pipiKS0gamma' : 11100003 +# , 'Bd2Kst0phi-KS0pi0KK' : 11100006 +# , 'Bd2etapKS0-pipigamma' : 11104301 +# , 'Bd2etapKS0-etapipi' : 11104501 +# , 'Bd2etaKS0-pipipi0' : 11104511 +# , 'Bu2Kstpipi-KS0pi' : 12100002 +# , 'Bu2Kstphi-KS0piKK' : 12100004 +# , 'Bu2D0K-KS0pipi' : 12165102 +# , 'Bu2D0pi-KS0pipi' : 12165122 +# , 'Bu2D0pi-KS0KK' : 12165131 +# , 'Bu2D0K-KS0KK' : 12165141 +# , 'Bu2Dst0K-D0pi0-KS0pipi' : 12165501 +# , 'Bu2Dst0pi-D0pi0-KS0pipi' : 12165511 +# , 'Bs2Kst0phi-KS0pi0KK' : 13100008 +# , 'Bs2KstKst-KS0piKpi0' : 13106201 +# , 'Lb2Dsp-KS0K' : 15100010 +} + +# *************************************************************************************** # +# pick magnet polarities, stripping versions and an event type from the known types above # +# *************************************************************************************** # +magtypes = [ 'MagDown', 'MagUp' ] +strippings = [ 'Stripping20r1', 'Stripping20' ] +#eventtypes = eventTypeDict.keys() +eventtypes = [ 'Lb2Lcpi-KS0p' ] +# *************************************************************************************** # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] + +known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] +known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + +nativeStrippingVersion = {} +nativeStrippingVersion['2011'] = 'Stripping20r1' +nativeStrippingVersion['2012'] = 'Stripping20' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') + +userReleaseArea = path[:match2.end()] + + +for eventtype in eventtypes : + + print 'Creating job(s) for event type: ' + eventtype + + if eventtype not in eventTypeDict.keys() : + e = Exception('Unknown event type: ' + eventtype ) + raise e + + evtID = eventTypeDict[ eventtype ] + + bkinfo = getBKInfo( evtID ) + bkpaths = bkinfo.keys() + + for stripping in strippings : + + print 'With stripping: ' + stripping + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + restrip = False + if stripping != nativeStrippingVersion[datatype] : + restrip = True + + for magtype in magtypes : + + print 'With magnet setting: ' + magtype + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + for pythiaversion in [ 6, 8 ] : + + print 'With Pythia version: %d' % pythiaversion + + datasets = [] + dddbtags = set() + condtags = set() + + for bkpath in bkpaths : + if ('Sim08' not in bkpath) or ('/MC/'+datatype not in bkpath) or (magtype not in bkpath) or ('Pythia'+str(pythiaversion) not in bkpath) : + continue + print 'Trying BK path: ' + bkpath, + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + dstmp = bkq.getDataset() + if len(dstmp) != 0 : + print ' - found dataset' + datasets.append( dstmp ) + dddbtags.add( bkinfo[bkpath][0] ) + condtags.add( bkinfo[bkpath][1] ) + else : + print ' - nothing found' + + if len( datasets ) == 0 : + print 'Could not find any valid data!! Skipping this configuration!!' + continue + elif len(datasets) == 1 : + ds = datasets[0] + elif len(datasets) == 2 : + ds = datasets[0].union( datasets[1] ) + else : + e = Exception('Found more than two datasets!') + raise e + + if len( ds.files ) == 0 : + e = Exception('Zero files in this dataset!') + raise e + + if len( dddbtags ) != 1 or len( condtags ) != 1 : + e = Exception('Found multiple DB tags') + raise e + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + params = {} + params = {} + params['mode'] = eventtype + params['whichMC'] = datatype + params['stripping'] = stripping + params['dddbtag'] = dddbtags.pop() + params['conddbtag'] = condtags.pop() + + moduleFile = modulePath+'/bender_B2KShh_bkgdMC.py' + if restrip : + moduleFile = modulePath+'/bender_B2KShh_stripping_bkgdMC.py' + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = eventtype+'_MC_'+datatype+'_'+magtype+'_'+stripping+'_Pythia'+str(pythiaversion) + j.application = b + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = eventtype+'-MC-'+datatype+'-'+magtype+'-'+stripping+'.root' + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + #j.outputfiles = [SandboxFile(tupleFile)] + j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 5 ) + + #j.submit() + queues.add(j.submit) + diff --git a/Phys/B2KShh/job/runganga_bender_data.py b/Phys/B2KShh/job/runganga_bender_data.py new file mode 100644 index 0000000..912681a --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_data.py @@ -0,0 +1,113 @@ + +# ************************************************************************ # +# pick magnet polarities and stripping versions from the known types below # +# ************************************************************************ # +magtypes = [ 'MagDown', 'MagUp' ] +strippings = [ 'Stripping20r1', 'Stripping20' ] +# ************************************************************************ # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] +known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] +known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +recoVersion = {} +recoVersion['Stripping20'] = '14' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') +moduleFile = modulePath+'/bender_B2KShh_data.py' + +userReleaseArea = path[:match2.end()] + + +for stripping in strippings : + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + print 'Creating job(s) for stripping: ' + stripping + + for magtype in magtypes : + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + print 'With magnet setting: ' + magtype + + bkpath = '/LHCb/Collision'+datatype[2:]+'/Beam'+beamEnergy[datatype]+'GeV-VeloClosed-'+magtype+'/Real Data/Reco'+recoVersion[stripping[:11]]+'/'+stripping+'/90000000/BHADRONCOMPLETEEVENT.DST' + print 'Using BK path: ' + bkpath + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + ds = bkq.getDataset() + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + params = {} + params['magtype'] = magtype + params['stripping'] = stripping + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = 'Collision'+datatype[2:]+'_'+magtype+'_'+stripping + j.application = b + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = 'B2KShh-Collision'+datatype[2:]+'-'+magtype+'-'+stripping+'.root' + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + #j.outputfiles = [SandboxFile(tupleFile)] + j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 25, maxFiles = -1 ) + + #j.submit() + queues.add(j.submit) + diff --git a/Phys/B2KShh/job/runganga_bender_data_WS.py b/Phys/B2KShh/job/runganga_bender_data_WS.py new file mode 100644 index 0000000..a9a4a5a --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_data_WS.py @@ -0,0 +1,96 @@ + +# ********************** +# pick a magnet polarity and stripping version +# ********************** +magtype = '' +# stripping revision allowed listed in known201(1,2)StrippingVersions below +whichStripping = '' +# ********************** + +# automatically configured +whichData = '' + +known2011StrippingVersions = [ 'Stripping20r1' ] +known2012StrippingVersions = [ 'Stripping20' ] + +if whichStripping in known2011StrippingVersions: + whichData = '2011' +elif whichStripping in known2012StrippingVersions: + whichData = '2012' +else : + e = Exception('Unknown Stripping version ' + whichStripping) + raise e + +knownDataTypes = [ '2011' ,'2012' ] +if whichData not in knownDataTypes : + e = Exception('Unknown data type') + raise e + +knownStrippingVersions = {} +knownStrippingVersions['2011'] = known2011StrippingVersions +knownStrippingVersions['2012'] = known2012StrippingVersions +if whichStripping not in knownStrippingVersions[whichData] : + e = Exception('Unknown Stripping version for data type '+whichData) + raise e + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +recoVersion = {} +recoVersion['Stripping20'] = '14' + +bkpath = '/LHCb/Collision'+whichData[2:]+'/Beam'+beamEnergy[whichData]+'GeV-VeloClosed-'+magtype+'/Real Data/Reco'+recoVersion[whichStripping[:11]]+'/'+whichStripping+'/90000000/BHADRON.MDST' +bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) +ds = bkq.getDataset() + +# uncomment this if you want to run a quick test on the CERN batch +#reduced_ds = LHCbDataset() +#for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + +moduleFile = 'bender_B2KShh_data_WS_'+whichStripping+'_'+magtype+'.py' +import os +nameList = os.getcwd().split('/') +for name in nameList: + if 'Bender' in name: + versionName = name.partition('_')[2] +b = Bender(version=versionName) +b.user_release_area = '$HOME/cmtuser' +b.module = File(moduleFile) +b.platform = 'x86_64-slc5-gcc46-opt' + +j=Job() +j.name = 'Collision'+whichData[2:]+'_'+magtype+'_'+whichStripping+'_WS' +j.application = b +j.backend = Dirac() +j.inputdata = ds + +# uncomment this if you want to run a quick test on the CERN batch +#j.backend = LSF( queue = '8nh' ) +#j.inputdata = reduced_ds + +# NB remember to change the tuple name in the Bender script to match this! +tupleFile = 'B2KShh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'_WS.root' + +# can pick if you want the ntuple in your outputsandbox or outputdata +#j.outputsandbox = [tupleFile] +j.outputfiles = [SandboxFile(tupleFile)] + +# can tweak the Dirac options if you like +#j.backend.settings['CPUTime'] = 10000 +#j.backend.settings['Destination'] = 'LCG.CERN.ch' + +# can change here the number of files you want to run over per job +# NB need recent version of Ganga to use this splitter with Dirac backend - older versions need DiracSplitter +j.splitter = SplitByFiles( filesPerJob = 25, maxFiles = -1 ) + +j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_data_strip21.py b/Phys/B2KShh/job/runganga_bender_data_strip21.py new file mode 100644 index 0000000..988ac4b --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_data_strip21.py @@ -0,0 +1,112 @@ + +# ************************************************************************ # +# pick magnet polarities and stripping versions from the known types below # +# ************************************************************************ # +magtypes = [ 'MagDown', 'MagUp' ] +strippings = [ 'Stripping21r1', 'Stripping21' ] +# ************************************************************************ # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] +known2011StrippingVersions = [ 'Stripping21r1' ] +known2012StrippingVersions = [ 'Stripping21' ] + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +recoVersion = {} +recoVersion['Stripping21'] = '14' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') +moduleFile = modulePath+'/bender_B2KShh_data_strip21.py' + +userReleaseArea = path[:match2.end()] + + +for stripping in strippings : + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + print 'Creating job(s) for stripping: ' + stripping + + for magtype in magtypes : + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + print 'With magnet setting: ' + magtype + + bkpath = '/LHCb/Collision'+datatype[2:]+'/Beam'+beamEnergy[datatype]+'GeV-VeloClosed-'+magtype+'/Real Data/Reco'+recoVersion[stripping[:11]]+'/'+stripping+'/90000000/BHADRON.MDST' + print 'Using BK path: ' + bkpath + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + ds = bkq.getDataset() + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + params = {} + params['magtype'] = magtype + params['stripping'] = stripping + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = 'Collision'+datatype[2:]+'_'+magtype+'_'+stripping + j.application = b + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = 'B2KShh-Collision'+datatype[2:]+'-'+magtype+'-'+stripping+'.root' + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + #j.outputfiles = [SandboxFile(tupleFile)] + j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 25, maxFiles = -1 ) + + #j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_mc11a_genMC.py b/Phys/B2KShh/job/runganga_bender_mc11a_genMC.py new file mode 100644 index 0000000..d4cb2ed --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_mc11a_genMC.py @@ -0,0 +1,74 @@ + +eventTypeDict = { + 'Bd2pipiKS-sqDalitz' : 11104122 + , 'Bd2KpiKS-sqDalitz' : 11304162 + , 'Bd2KKKS-sqDalitz' : 11104112 + , 'Bs2pipiKS-sqDalitz' : 13104122 + , 'Bs2KpiKS-sqDalitz' : 13304102 + , 'Bs2KKKS-sqDalitz' : 13104132 +} + +# ************************************* +# pick a magnet polarity and event type +# ************************************* +mode = '' +magtype = '' +# ************************************* +# ************************************* +# ************************************* + +whichMC = 'MC11a' +evtID = eventTypeDict[ mode ] +bkpath = '/MC/'+whichMC+'/Beam3500GeV-2011-'+magtype+'-Fix1-EmNoCuts/Gen05d/'+str(evtID)+'/XGEN' +bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) +ds = bkq.getDataset() + +# uncomment this if you want to run a quick test on the CERN batch +#reduced_ds = LHCbDataset() +#for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + +moduleFile = 'bender_B2KShh_generatorMC_'+magtype+'.py' +import os +nameList = os.getcwd().split('/') +for name in nameList: + if 'Bender' in name: + versionName = name.partition('_')[2] +b = Bender(version=versionName) +b.user_release_area = '$HOME/cmtuser' +b.module = File(moduleFile) + +j=Job() +j.name = mode+'_'+whichMC+'_'+magtype +j.application = b +j.backend = Dirac() +j.inputdata = ds + +# uncomment this if you want to run a quick test on the CERN batch +#j.backend = LSF( queue = '8nh' ) +#j.inputdata = reduced_ds + +# NB remember to change the tuple name in the Bender script to match this! +tupleFile = mode+'-'+whichMC[0:4]+'-'+magtype+'-genMCTruth.root' + +# can pick if you want the ntuple in your outputsandbox or outputdata +#j.outputsandbox = [tupleFile] +j.outputdata = [tupleFile] + +# can tweak the Dirac options if you like +#j.backend.settings['CPUTime'] = 10000 +#j.backend.settings['Destination'] = 'LCG.CERN.ch' + +# can change here the number of files you want to run over per job +# NB need recent version of Ganga to use this splitter with Dirac backend - older versions need DiracSplitter +j.splitter = SplitByFiles( filesPerJob = 10 ) + +#j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_signalMC.py b/Phys/B2KShh/job/runganga_bender_signalMC.py new file mode 100644 index 0000000..f5b8b02 --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_signalMC.py @@ -0,0 +1,207 @@ + +eventTypeDict = { + 'Lb2ppiKS0-sqDalitz-DecProdCut' : 15104115 + , 'Lb2pKKS0-sqDalitz-DecProdCut' : 15104105 + , 'Xib02ppiKS0-sqDalitz-DecProdCut' : 16104143 + , 'Xib02pKKS0-sqDalitz-DecProdCut' : 16104144 +} + +# *************************************************************************************** # +# pick magnet polarities, stripping versions and an event type from the known types above # +# *************************************************************************************** # +magtypes = [ 'MagDown', 'MagUp' ] +strippings = [ 'Stripping20r1', 'Stripping20' ] +#eventtypes = [ 'Bd2pipiKS0-sqDalitz-DecProdCut', 'Bd2KpiKS0-sqDalitz-DecProdCut', 'Bd2KKKS0-sqDalitz-DecProdCut' ] +eventtypes = eventTypeDict.keys() +# *************************************************************************************** # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] + +known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] +known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + +nativeStrippingVersion = {} +nativeStrippingVersion['2011'] = 'Stripping20r1' +nativeStrippingVersion['2012'] = 'Stripping20' + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +nuValue = {} +nuValue['2011'] = '2' +nuValue['2012'] = '2.5' + +tck = {} +tck['2011'] = '0x40760037' +tck['2012'] = '0x409f0045' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') + +userReleaseArea = path[:match2.end()] + + +for eventtype in eventtypes : + + print 'Creating job(s) for event type: ' + eventtype + + if eventtype not in eventTypeDict.keys() : + e = Exception('Unknown event type: ' + eventtype ) + raise e + + evtID = eventTypeDict[ eventtype ] + + bkinfo = getBKInfo( evtID ) + bkpaths = bkinfo.keys() +# + #print bkinfo + + mode = eventtype.split('-')[0] + btype = mode.split('2')[0] + tracks = re.findall( 'K(?!S)|pi|p(?!i)', mode ) + track1 = tracks[0] + track2 = tracks[1] + match = re.search( 'KS|KS0|Lambda|Lambda0', mode ) + v0type = match.group(0) + if not v0type.endswith('0') : + v0type = v0type+'0' + + for stripping in strippings : + + print 'With stripping: ' + stripping + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + restrip = False + if stripping != nativeStrippingVersion[datatype] : + restrip = True + + for magtype in magtypes : + + print 'With magnet setting: ' + magtype + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + for pythiaversion in [ 6, 8 ] : + + print 'With Pythia version: %d' % pythiaversion + + datasets = [] + dddbtags = set() + condtags = set() + for simversion in [ 'Sim08a', 'Sim08b', 'Sim08c', 'Sim08d', 'Sim08e', 'Sim08f', 'Sim08g', 'Sim08h' ] : + bkpath = '/MC/%s/Beam%sGeV-%s-%s-Nu%s-Pythia%d/%s/Digi13/Trig%s/Reco14a/%sNoPrescalingFlagged/%d/ALLSTREAMS.DST' % ( datatype, beamEnergy[datatype], datatype, magtype, nuValue[datatype], pythiaversion, simversion, tck[datatype], nativeStrippingVersion[datatype], evtID ) + if bkpath not in bkpaths : + #print bkpath + continue + print 'Trying BK path: ' + bkpath, + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + dstmp = bkq.getDataset() + if len(dstmp) != 0 : + print ' - found dataset' + datasets.append( dstmp ) + dddbtags.add( bkinfo[bkpath][0] ) + condtags.add( bkinfo[bkpath][1] ) + else : + print ' - nothing found' + + if len( datasets ) == 0 : + print 'Could not find any valid data!! Skipping this configuration!!' + continue + elif len(datasets) == 1 : + ds = datasets[0] + elif len(datasets) == 2 : + ds = datasets[0].union( datasets[1] ) + else : + e = Exception('Found more than two datasets!') + raise e + + if len( ds.files ) == 0 : + e = Exception('Zero files in this dataset!') + raise e + + if len( dddbtags ) != 1 or len( condtags ) != 1 : + e = Exception('Found multiple DB tags') + raise e + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + params = {} + params = {} + params['btype'] = btype + params['track1'] = track1 + params['track2'] = track2 + params['v0type'] = v0type + params['whichMC'] = datatype + params['stripping'] = stripping + params['dddbtag'] = dddbtags.pop() + params['conddbtag'] = condtags.pop() + + moduleFile = modulePath+'/bender_B2KShh_signalMC.py' + if restrip : + moduleFile = modulePath+'/bender_B2KShh_stripping_signalMC.py' + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = mode+'_MC_'+datatype+'_'+magtype+'_'+stripping+'_Pythia'+str(pythiaversion) + j.application = b + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = mode+'-MC-'+datatype+'-'+magtype+'-'+stripping+'-withMCtruth.root' + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + #j.outputfiles = [SandboxFile(tupleFile)] + j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 5 ) + + j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_signalMC_S21.py b/Phys/B2KShh/job/runganga_bender_signalMC_S21.py new file mode 100644 index 0000000..d260768 --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_signalMC_S21.py @@ -0,0 +1,194 @@ + +eventTypeDict = { + 'Bd2pipiKS0-sqDalitz-DecProdCut' : 11104124 + , 'Bd2KpiKS0-sqDalitz-DecProdCut' : 11304161 + , 'Bd2KKKS0-sqDalitz-DecProdCut' : 11104114 + , 'Bs2pipiKS0-sqDalitz-DecProdCut' : 13104124 + , 'Bs2KpiKS0-sqDalitz-DecProdCut' : 13304104 + , 'Bs2KKKS0-sqDalitz-DecProdCut' : 13104134 +} + +# *************************************************************************************** # +# pick magnet polarities, stripping versions and an event type from the known types above # +# *************************************************************************************** # +magtypes = [ 'MagDown', 'MagUp' ] +strippings = [ 'Stripping21r1', 'Stripping21' ] +eventtypes = eventTypeDict.keys() +# *************************************************************************************** # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] + +known2011StrippingVersions = [ 'Stripping21r1' ] +known2012StrippingVersions = [ 'Stripping21' ] + +nativeStrippingVersion = {} +nativeStrippingVersion['2011'] = 'Stripping20r1' +nativeStrippingVersion['2012'] = 'Stripping20' + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +nuValue = {} +nuValue['2011'] = '2' +nuValue['2012'] = '2.5' + +tck = {} +tck['2011'] = '0x40760037' +tck['2012'] = '0x409f0045' + +#baseDir = '/data/lhcb/phsdba/B2KShh/Sim08-MC/Signal/S21-DSTs' +baseDir = '/eos/lhcb/wg/BnoC/B2KShh/S21-SignalMC-DSTs' + +path = os.getcwd() +pathList = path.split('/') + +match1 = re.search( '/Bender_(v\d+r\d+[p(?=\d)]?[(?<=p)\d]*)/', path ) +match2 = re.search( 'cmtuser', path ) + +if not path.endswith('job') or 'B2KShh' not in pathList or not match1 or not match2 : + e = Exception('You do not appear to be in the \'job\' directory of the B2KShh package within a Bender project') + raise e + +benderVersion = match1.groups()[0] + +modulePath = path.replace('job','options') + +userReleaseArea = path[:match2.end()] + + +for eventtype in eventtypes : + + print 'Creating job(s) for event type: ' + eventtype + + if eventtype not in eventTypeDict.keys() : + e = Exception('Unknown event type: ' + eventtype ) + raise e + + evtID = eventTypeDict[ eventtype ] + + bkinfo = getBKInfo( evtID ) + bkpaths = bkinfo.keys() + + mode = eventtype.split('-')[0] + btype = mode.split('2')[0] + tracks = re.findall( 'K(?!S)|pi|p(?!i)', mode ) + track1 = tracks[0] + track2 = tracks[1] + match = re.search( 'KS|KS0|Lambda|Lambda0', mode ) + v0type = match.group(0) + if not v0type.endswith('0') : + v0type = v0type+'0' + + for stripping in strippings : + + print 'With stripping: ' + stripping + + datatype = '' + if stripping in known2011StrippingVersions: + datatype = '2011' + elif stripping in known2012StrippingVersions: + datatype = '2012' + else : + e = Exception('Unsupported Stripping version: ' + stripping) + raise e + + for magtype in magtypes : + + print 'With magnet setting: ' + magtype + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + for pythiaversion in [ 6, 8 ] : + + print 'With Pythia version: %d' % pythiaversion + + # still need to search the bookkeeping to check that this is a valid configuration and to get the DB tags + datasets = [] + dddbtags = set() + condtags = set() + for simversion in [ 'Sim08a', 'Sim08b' ] : + bkpath = '/MC/%s/Beam%sGeV-%s-%s-Nu%s-Pythia%d/%s/Digi13/Trig%s/Reco14a/%sNoPrescalingFlagged/%d/ALLSTREAMS.DST' % ( datatype, beamEnergy[datatype], datatype, magtype, nuValue[datatype], pythiaversion, simversion, tck[datatype], nativeStrippingVersion[datatype], evtID ) + if bkpath not in bkpaths : + continue + print 'Trying BK path: ' + bkpath, + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + dstmp = bkq.getDataset() + if len(dstmp) != 0 : + print ' - found dataset' + datasets.append( dstmp ) + dddbtags.add( bkinfo[bkpath][0] ) + condtags.add( bkinfo[bkpath][1] ) + else : + print ' - nothing found' + + if len( datasets ) == 0 : + print 'Could not find any valid data!! Skipping this configuration!!' + continue + elif len(datasets) == 1 : + ds = datasets[0] + elif len(datasets) == 2 : + ds = datasets[0].union( datasets[1] ) + else : + e = Exception('Found more than two datasets!') + raise e + + if len( ds.files ) == 0 : + e = Exception('Zero files in this dataset!') + raise e + + if len( dddbtags ) != 1 or len( condtags ) != 1 : + e = Exception('Found multiple DB tags') + raise e + + s21_ds = LHCbDataset() + for file in ds.files : + s21_file_name = baseDir + '/' + '%s_MC_%s_%s_Pythia%d' % ( mode, datatype, magtype, pythiaversion ) + '/' + os.path.basename( file.name ) + s21_file = PhysicalFile( s21_file_name ) + s21_ds.files.append( s21_file ) + + params = {} + params = {} + params['btype'] = btype + params['track1'] = track1 + params['track2'] = track2 + params['v0type'] = v0type + params['whichMC'] = datatype + params['stripping'] = stripping + params['dddbtag'] = dddbtags.pop() + params['conddbtag'] = condtags.pop() + + moduleFile = modulePath+'/bender_B2KShh_signalMC_S21.py' + + b = Bender(version=benderVersion) + b.user_release_area = userReleaseArea + b.module = File(moduleFile) + b.params = params + + j=Job() + j.name = mode+'_MC_'+datatype+'_'+magtype+'_'+stripping+'_Pythia'+str(pythiaversion) + j.application = b + j.backend = LSF() + j.inputdata = s21_ds + + # NB remember to change the tuple name in the Bender script to match this! + tupleFile = mode+'-MC-'+datatype+'-'+magtype+'-'+stripping+'-withMCtruth.root' + + # can pick if you want the ntuple returned to you immediately (LocalFile) or stored on the Grid (DiracFile) + j.outputfiles = [LocalFile(tupleFile)] + #j.outputfiles = [DiracFile(tupleFile)] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + #j.splitter = SplitByFiles( filesPerJob = 5 ) + + #j.submit() + diff --git a/Phys/B2KShh/job/runganga_bender_validation_data.py b/Phys/B2KShh/job/runganga_bender_validation_data.py new file mode 100644 index 0000000..5d95007 --- /dev/null +++ b/Phys/B2KShh/job/runganga_bender_validation_data.py @@ -0,0 +1,33 @@ + +bkpath = '/validation/Collision12/Beam4000GeV-VeloClosed-MagDown/Real Data/Reco14/Stripping20/90000000/BHADRONCOMPLETEEVENT.DST' +bkq = BKQuery( type='Path', path=bkpath ) +ds = bkq.getDataset() + +moduleFile = '$HOME/cmtuser/Bender_v21r2/Phys/B2KShh/options/bender_B2KShh_data.py' +b = Bender(version="v21r2") +b.user_release_area = '$HOME/cmtuser' +b.module = File(moduleFile) + +j=Job() +j.name = 'Stripping20-Validation' +j.application = b +j.backend = Dirac() +j.inputdata = ds + +# make sure this matches what's in the Bender options! +tupleFile = 'B2KShh-Collision12-MagDown-Stripping20.root' + +# can pick if you want the ntuple in your outputsandbox or outputdata +#j.outputsandbox = [tupleFile] +j.outputdata = [tupleFile] + +# can tweak the Dirac options if you like +#j.backend.settings['CPUTime'] = 10000 +#j.backend.settings['Destination'] = 'LCG.CERN.ch' + +# can change here the number of files you want to run over per job +# NB need recent version of Ganga to use this splitter with Dirac backend - older versions need DiracSplitter +j.splitter = SplitByFiles( filesPerJob = 5 ) + +j.submit() + diff --git a/Phys/B2KShh/misc/dirac-bookkeeping-get-prodinfo-eventtype.py b/Phys/B2KShh/misc/dirac-bookkeeping-get-prodinfo-eventtype.py new file mode 100755 index 0000000..c21c3c1 --- /dev/null +++ b/Phys/B2KShh/misc/dirac-bookkeeping-get-prodinfo-eventtype.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +""" +get productions information for eventtype + + +""" + +import DIRAC +from DIRAC.Core.Base import Script + +Script.registerSwitch( '', 'FileType=', 'FileType to search [ALLSTREAMS.DST]' ) + +Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1], + 'Usage:', + ' %s [option] eventType ' % Script.scriptName ] ) ) +fileType = 'ALLSTREAMS.DST' +Script.parseCommandLine( ignoreErrors = True ) + +for switch in Script.getUnprocessedSwitches(): + if switch[0] == "FileType" : + fileType = str(switch[1]) + +eventType = Script.getPositionalArgs()[0] + + +from DIRAC import gLogger, gConfig, S_OK, S_ERROR +from LHCbDIRAC.BookkeepingSystem.Client.BookkeepingClient import BookkeepingClient +from DIRAC.Core.DISET.RPCClient import RPCClient +from LHCbDIRAC.TransformationSystem.Client.TransformationClient import TransformationClient + +import pprint + +class Production(): + """ + Simple helper emulating old Production Client + """ + + + def __init__( self ): + """Instantiates the Workflow object and some default parameters. + """ + + self.transClient = TransformationClient() + + def getParameters( self, prodID, pname = '' ): + """Get a production parameter or all of them if no parameter name specified. + """ + + result = self.transClient.getTransformation( int( prodID ), True ) + if not result['OK']: + gLogger.error( result ) + return S_ERROR( 'Could not retrieve parameters for production %s' % prodID ) + + if not result['Value']: + gLogger.info( result ) + return S_ERROR( 'No additional parameters available for production %s' % prodID ) + + if pname: + if result['Value'].has_key( pname ): + return S_OK( result['Value'][pname] ) + else: + gLogger.verbose( result ) + return S_ERROR( 'Production %s does not have parameter %s' % ( prodID, pname ) ) + + return result + + +bk = RPCClient( 'Bookkeeping/BookkeepingManager' ) +pr = RPCClient( 'ProductionManagement/ProductionRequest' ) + +res = bk.getProductionSummaryFromView({'EventType':eventType,'Visible':True}) +if not res['OK']: + print res['Message'] + DIRAC.exit() +prods = res['Value'] + +bkClient = BookkeepingClient() +prClient = Production() + +prodIDs = [ prod['Production'] for prod in prods] +prodIDs.sort() + +for prodID in prodIDs: + res = bkClient.getProductionInformations(prodID) + nof = 0 + noe = 0 + if res['OK']: + value = res['Value'] + path = value['Path'].split("\n")[1] + for nf in value['Number of files']: + if nf[1] == fileType: + nof = nf[0] + for ne in value['Number of events']: + if ne[0] == fileType: + noe = ne[1] + #print nof, noe, + + DDDBTag = "" + CondDBTag = "" + res = prClient.getParameters(prodID,'DDDBTag') + if res['OK']: + DDDBTag = res['Value'] + res = prClient.getParameters(prodID,'CondDBTag') + if res['OK']: + CondDBTag = res['Value'], + if not (DDDBTag and CondDBTag): + res = prClient.getParameters(prodID,'BKInputQuery') + if res['OK']: + simprodID = eval(res['Value']).get('ProductionID',0) + else: + res = pr.getProductionList() + if res['OK']: + simprodID = res['Value'][0] + if simprodID: + res = prClient.getParameters(simprodID,'DDDBTag') + if res['OK']: + DDDBTag = res['Value'] + res = prClient.getParameters(simprodID,'CondDBTag') + if res['OK']: + CondDBTag = res['Value'] + + if not (DDDBTag and CondDBTag): + res = prClient.getParameters(simprodID,'BKProcessingPass') + if res['OK']: + value = res['Value'] + dBKpp = eval(value) + step0 = dBKpp['Step0'] + if not DDDBTag: + DDDBTag = step0['DDDb'] + if not CondDBTag: + CondDBTag = step0['CondDb'] + if isinstance(CondDBTag,(tuple,list)): + CondDBTag = CondDBTag[0] + if isinstance(DDDBTag,(tuple,list)): + DDDBTag = DDDBTag[0] + #print DDDBTag,CondDBTag + + #print DDDBTag,CondDBTag + print ( path, DDDBTag, CondDBTag, nof, noe) diff --git a/Phys/B2KShh/misc/get_bookkeeping_info b/Phys/B2KShh/misc/get_bookkeeping_info new file mode 100755 index 0000000..8b454d5 --- /dev/null +++ b/Phys/B2KShh/misc/get_bookkeeping_info @@ -0,0 +1,13 @@ +#!/bin/zsh -f +# ============================================================================= +## @file get_grid_url +# simple script to get GRID URL for certains LFN(s) +# ============================================================================= + +. `which SetupProject.sh` LHCbDirac >&! /dev/null + +dirac-bookkeeping-get-prodinfo-eventtype.py $* + +# ============================================================================= +# The END +# ============================================================================= diff --git a/Phys/B2KShh/options/ReStripMC-S21/DV-RedoCaloPID-Stripping21.py b/Phys/B2KShh/options/ReStripMC-S21/DV-RedoCaloPID-Stripping21.py new file mode 100644 index 0000000..13025b8 --- /dev/null +++ b/Phys/B2KShh/options/ReStripMC-S21/DV-RedoCaloPID-Stripping21.py @@ -0,0 +1,68 @@ +#================================================================ +# Rerun PID Reco + Remake ProtoParticles +#================================================================ +from Configurables import ( DaVinci, RecSysConf, GaudiSequencer, + ProcessPhase, PhysConf ) + +from STTools import STOfflineConf +STOfflineConf.DefaultConf().configureTools() + +# Create the top level Conf object and set some general options from DV +rConf = RecSysConf("RecSysConf") +DaVinci().setOtherProps(rConf,["Simulation","DataType"]) + +# Only run PID + Protoparticles +rConf.RecoSequence = ["CALO","PROTO"] +rConf.SkipTracking = True +PhysConf().CaloReProcessing = True + +# list of algs to prepend to DV +palgs = [ ] + +# Create the Reco process phase +reco = ProcessPhase("Reco") +palgs += [reco] + +# Re-pack the new CALO output +from Configurables import CaloDstPackConf +caloPackSeq = GaudiSequencer("CaloPacking") +caloPack = CaloDstPackConf ( Enable = True ) +caloPack.Sequence = caloPackSeq +caloPack.AlwaysCreate = True +caloPack.EnableChecks = False +caloPack.ClearRegistry = False +palgs += [caloPackSeq] + +# Pack the new ProtoParticles +from Configurables import PackProtoParticle +packChargedPs = PackProtoParticle( name = "PackChargedProtos", + AlwaysCreateOutput = True, + ClearRegistry = False, + InputName = "/Event/Rec/ProtoP/Charged", + OutputName = "/Event/pRec/ProtoP/Charged", + EnableCheck = False ) +packNeutralPs = PackProtoParticle( name = "PackNeutralProtos", + AlwaysCreateOutput = True, + ClearRegistry = False, + InputName = "/Event/Rec/ProtoP/Neutrals", + OutputName = "/Event/pRec/ProtoP/Neutrals", + EnableCheck = False ) +palgs += [packChargedPs,packNeutralPs] + +# Add to the start of the DV main sequence +#DaVinci().prependToMainSequence(palgs) +from Configurables import GaudiSequencer +init = GaudiSequencer("PhysInitSeq") +init.Members+= palgs + +# Disable on-demand unpacking of locations created by the above +from Gaudi.Configuration import appendPostConfigAction +def removeUnpacking(): + from Configurables import DataOnDemandSvc, CaloProcessor + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/ProtoP/Neutrals", None ) + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/ProtoP/Charged", None ) + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/Calo/Electrons", None ) + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/Calo/Photons", None ) + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/Calo/MergedPi0s", None ) + DataOnDemandSvc().AlgMap.pop( "/Event/Rec/Calo/SplitPhotons", None ) +appendPostConfigAction( removeUnpacking ) diff --git a/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21-Stripping-MC.py b/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21-Stripping-MC.py new file mode 100644 index 0000000..c68a23b --- /dev/null +++ b/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21-Stripping-MC.py @@ -0,0 +1,121 @@ +""" +Options for building Stripping21 on MC, +""" +#stripping version +stripping='stripping21' + +#use CommonParticlesArchive +from CommonParticlesArchive import CommonParticlesArchiveConf +CommonParticlesArchiveConf().redirect(stripping) + +from Gaudi.Configuration import * +MessageSvc().Format = "% F%30W%S%7W%R%T %0W%M" + +# Tighten Trk Chi2 to <3 +from CommonParticles.Utils import DefaultTrackingCuts +DefaultTrackingCuts().Cuts = { "Chi2Cut" : [ 0, 3 ], + "CloneDistCut" : [5000, 9e+99 ] } + +# +#Raw event juggler to split Other/RawEvent into Velo/RawEvent and Tracker/RawEvent +# +from Configurables import RawEventJuggler +juggler = RawEventJuggler( DataOnDemand=True, Input=2.0, Output=4.0 ) + +# +# Build the streams and stripping object +# +from StrippingConf.Configuration import StrippingConf, StrippingStream +from StrippingSettings.Utils import strippingConfiguration +from StrippingArchive.Utils import buildStreams, cloneLinesFromStream +from StrippingArchive import strippingArchive + +#get the configuration dictionary from the database +config = strippingConfiguration(stripping) +#get the line builders from the archive +archive = strippingArchive(stripping) + +streams = buildStreams(stripping = config, archive = archive) + + +# +# Merge into one stream +# +AllStreams = StrippingStream("AllStreams21") + +for stream in streams : + if stream.name() == 'Bhadron' : + lines = [] + for line in stream.lines : + if 'B2KShh' in line.name() : + lines.append(line) + AllStreams.appendLines( lines ) + +sc = StrippingConf( Streams = [ AllStreams ], + MaxCandidates = 2000, + TESPrefix = 'Strip21' + ) + +# so that we get only events selected by our stripping lines written out +AllStreams.sequence().IgnoreFilterPassed = False + +# +# Configuration of SelDSTWriter +# +enablePacking = True + +from DSTWriters.microdstelements import * +from DSTWriters.Configuration import (SelDSTWriter, + stripDSTStreamConf, + stripDSTElements + ) + +stripPrefixes = ['Strip','Strip21'] + +SelDSTWriterElements = { + 'default' : stripDSTElements(pack=enablePacking,stripPrefix=stripPrefixes) + } + +SelDSTWriterConf = { + 'default' : stripDSTStreamConf(pack=enablePacking,selectiveRawEvent=False,stripPrefix=stripPrefixes) + } + +#Items that might get lost when running the CALO+PROTO ReProcessing in DV +caloProtoReprocessLocs = [ "/Event/pRec/ProtoP#99", "/Event/pRec/Calo#99" ] + +# Make sure they are present on full DST streams +SelDSTWriterConf['default'].extraItems += caloProtoReprocessLocs + +dstWriter = SelDSTWriter( "MyDSTWriter", + StreamConf = SelDSTWriterConf, + MicroDSTElements = SelDSTWriterElements, + OutputFileSuffix ='000000', + SelectionSequences = sc.activeStreams() + ) + +# Add stripping TCK +from Configurables import StrippingTCK +stck = StrippingTCK(HDRLocation = '/Event/Strip21/Phys/DecReports', TCK=0x36112100) + + +# +# DaVinci Configuration +# +from Configurables import DaVinci +DaVinci().Simulation = True +DaVinci().EvtMax = 1000 # Number of events +DaVinci().HistogramFile = "DVHistos.root" +DaVinci().appendToMainSequence( [ sc.sequence() ] ) +DaVinci().appendToMainSequence( [ stck ] ) +DaVinci().appendToMainSequence( [ dstWriter.sequence() ] ) +DaVinci().ProductionType = "Stripping" +DaVinci().DataType = "2012" +DaVinci().InputType = "DST" +#DaVinci().DDDBtag = 'dddb-20140729' +#DaVinci().CondDBtag = 'sim-20140730-vc-md100' + +# Change the column size of Timing table +from Configurables import TimingAuditor, SequencerTimerTool +TimingAuditor().addTool(SequencerTimerTool,name="TIMER") +TimingAuditor().TIMER.NameSize = 60 + diff --git a/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21r1-Stripping-MC.py b/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21r1-Stripping-MC.py new file mode 100644 index 0000000..8aab058 --- /dev/null +++ b/Phys/B2KShh/options/ReStripMC-S21/DV-Stripping21r1-Stripping-MC.py @@ -0,0 +1,121 @@ +""" +Options for building Stripping21 on MC, +""" +#stripping version +stripping='stripping21r1' + +#use CommonParticlesArchive +from CommonParticlesArchive import CommonParticlesArchiveConf +CommonParticlesArchiveConf().redirect(stripping) + +from Gaudi.Configuration import * +MessageSvc().Format = "% F%30W%S%7W%R%T %0W%M" + +# Tighten Trk Chi2 to <3 +from CommonParticles.Utils import DefaultTrackingCuts +DefaultTrackingCuts().Cuts = { "Chi2Cut" : [ 0, 3 ], + "CloneDistCut" : [5000, 9e+99 ] } + +# +#Raw event juggler to split Other/RawEvent into Velo/RawEvent and Tracker/RawEvent +# +from Configurables import RawEventJuggler +juggler = RawEventJuggler( DataOnDemand=True, Input=2.0, Output=4.0 ) + +# +# Build the streams and stripping object +# +from StrippingConf.Configuration import StrippingConf, StrippingStream +from StrippingSettings.Utils import strippingConfiguration +from StrippingArchive.Utils import buildStreams, cloneLinesFromStream +from StrippingArchive import strippingArchive + +#get the configuration dictionary from the database +config = strippingConfiguration(stripping) +#get the line builders from the archive +archive = strippingArchive(stripping) + +streams = buildStreams(stripping = config, archive = archive) + + +# +# Merge into one stream +# +AllStreams = StrippingStream("AllStreams21") + +for stream in streams : + if stream.name() == 'Bhadron' : + lines = [] + for line in stream.lines : + if 'B2KShh' in line.name() : + lines.append(line) + AllStreams.appendLines( lines ) + +sc = StrippingConf( Streams = [ AllStreams ], + MaxCandidates = 2000, + TESPrefix = 'Strip21' + ) + +# so that we get only events selected by our stripping lines written out +AllStreams.sequence().IgnoreFilterPassed = False + +# +# Configuration of SelDSTWriter +# +enablePacking = True + +from DSTWriters.microdstelements import * +from DSTWriters.Configuration import (SelDSTWriter, + stripDSTStreamConf, + stripDSTElements + ) + +stripPrefixes = ['Strip','Strip21'] + +SelDSTWriterElements = { + 'default' : stripDSTElements(pack=enablePacking,stripPrefix=stripPrefixes) + } + +SelDSTWriterConf = { + 'default' : stripDSTStreamConf(pack=enablePacking,selectiveRawEvent=False,stripPrefix=stripPrefixes) + } + +#Items that might get lost when running the CALO+PROTO ReProcessing in DV +caloProtoReprocessLocs = [ "/Event/pRec/ProtoP#99", "/Event/pRec/Calo#99" ] + +# Make sure they are present on full DST streams +SelDSTWriterConf['default'].extraItems += caloProtoReprocessLocs + +dstWriter = SelDSTWriter( "MyDSTWriter", + StreamConf = SelDSTWriterConf, + MicroDSTElements = SelDSTWriterElements, + OutputFileSuffix ='000000', + SelectionSequences = sc.activeStreams() + ) + +# Add stripping TCK +from Configurables import StrippingTCK +stck = StrippingTCK(HDRLocation = '/Event/Strip21/Phys/DecReports', TCK=0x36112110) + + +# +# DaVinci Configuration +# +from Configurables import DaVinci +DaVinci().Simulation = True +DaVinci().EvtMax = 1000 # Number of events +DaVinci().HistogramFile = "DVHistos.root" +DaVinci().appendToMainSequence( [ sc.sequence() ] ) +DaVinci().appendToMainSequence( [ stck ] ) +DaVinci().appendToMainSequence( [ dstWriter.sequence() ] ) +DaVinci().ProductionType = "Stripping" +DaVinci().DataType = "2011" +DaVinci().InputType = "DST" +#DaVinci().DDDBtag = 'dddb-20140729' +#DaVinci().CondDBtag = 'sim-20140730-vc-md100' + +# Change the column size of Timing table +from Configurables import TimingAuditor, SequencerTimerTool +TimingAuditor().addTool(SequencerTimerTool,name="TIMER") +TimingAuditor().TIMER.NameSize = 60 + diff --git a/Phys/B2KShh/options/ReStripMC-S21/README b/Phys/B2KShh/options/ReStripMC-S21/README new file mode 100644 index 0000000..d5840ef --- /dev/null +++ b/Phys/B2KShh/options/ReStripMC-S21/README @@ -0,0 +1,4 @@ +These are DaVinci options to be run to restrip the signal MC with Stripping21. +All events that pass the B2KShh stripping are saved to DST. +These DSTs can then be processed using the Bender options bender_B2KShh_signalMC_S21.py found in the parent options directory. + diff --git a/Phys/B2KShh/options/ReStripMC-S21/runganga.py b/Phys/B2KShh/options/ReStripMC-S21/runganga.py new file mode 100644 index 0000000..52e735c --- /dev/null +++ b/Phys/B2KShh/options/ReStripMC-S21/runganga.py @@ -0,0 +1,171 @@ + +eventTypeDict = { + 'Bd2pipiKS0-sqDalitz-DecProdCut' : 11104124 + , 'Bd2KpiKS0-sqDalitz-DecProdCut' : 11304161 + , 'Bd2KKKS0-sqDalitz-DecProdCut' : 11104114 + , 'Bs2pipiKS0-sqDalitz-DecProdCut' : 13104124 + , 'Bs2KpiKS0-sqDalitz-DecProdCut' : 13304104 + , 'Bs2KKKS0-sqDalitz-DecProdCut' : 13104134 +} + +# *************************************************************************************** # +# pick magnet polarities, stripping versions and an event type from the known types above # +# *************************************************************************************** # +magtypes = [ 'MagDown', 'MagUp' ] +years = [ '2011', '2012' ] +#years = [ '2011' ] +eventtypes = eventTypeDict.keys() +#eventtypes = [ 'Bd2pipiKS0-sqDalitz-DecProdCut' ] +# *************************************************************************************** # + +import os +import re + +knownMagTypes = [ 'MagDown', 'MagUp' ] + +nativeStrippingVersion = {} +nativeStrippingVersion['2011'] = 'Stripping20r1' +nativeStrippingVersion['2012'] = 'Stripping20' + +beamEnergy = {} +beamEnergy['2011'] = '3500' +beamEnergy['2012'] = '4000' + +nuValue = {} +nuValue['2011'] = '2' +nuValue['2012'] = '2.5' + +tck = {} +tck['2011'] = '0x40760037' +tck['2012'] = '0x409f0045' + +currentdir = os.getcwd() +daVinciVersion = 'v36r1p2' + +for eventtype in eventtypes : + + print 'Creating job(s) for event type: ' + eventtype + + if eventtype not in eventTypeDict.keys() : + e = Exception('Unknown event type: ' + eventtype ) + raise e + + evtID = eventTypeDict[ eventtype ] + + bkinfo = getBKInfo( evtID ) + bkpaths = bkinfo.keys() + + mode = eventtype.split('-')[0] + btype = mode.split('2')[0] + tracks = re.findall( 'K(?!S)|pi|p(?!i)', mode ) + track1 = tracks[0] + track2 = tracks[1] + match = re.search( 'KS|KS0|Lambda|Lambda0', mode ) + v0type = match.group(0) + if not v0type.endswith('0') : + v0type = v0type+'0' + + for year in years : + + print 'For year: ' + year + + for magtype in magtypes : + + print 'With magnet setting: ' + magtype + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + for pythiaversion in [ 6, 8 ] : + + print 'With Pythia version: %d' % pythiaversion + + datasets = [] + dddbtags = set() + condtags = set() + for simversion in [ 'Sim08a', 'Sim08b' ] : + bkpath = '/MC/%s/Beam%sGeV-%s-%s-Nu%s-Pythia%d/%s/Digi13/Trig%s/Reco14a/%sNoPrescalingFlagged/%d/ALLSTREAMS.DST' % ( year, beamEnergy[year], year, magtype, nuValue[year], pythiaversion, simversion, tck[year], nativeStrippingVersion[year], evtID ) + if bkpath not in bkpaths : + continue + print 'Trying BK path: ' + bkpath, + bkq = BKQuery( type='Path', dqflag='OK', path=bkpath ) + dstmp = bkq.getDataset() + if len(dstmp) != 0 : + print ' - found dataset' + datasets.append( dstmp ) + dddbtags.add( bkinfo[bkpath][0] ) + condtags.add( bkinfo[bkpath][1] ) + else : + print ' - nothing found' + + if len( datasets ) == 0 : + print 'Could not find any valid data!! Skipping this configuration!!' + continue + elif len(datasets) == 1 : + ds = datasets[0] + elif len(datasets) == 2 : + ds = datasets[0].union( datasets[1] ) + else : + e = Exception('Found more than two datasets!') + raise e + + if len( ds.files ) == 0 : + e = Exception('Zero files in this dataset!') + raise e + + if len( dddbtags ) != 1 or len( condtags ) != 1 : + e = Exception('Found multiple DB tags') + raise e + + # uncomment this if you want to run a quick test on the CERN batch + #reduced_ds = LHCbDataset() + #for file in ds.files : + #sites = file.getReplicas().keys() + #for site in sites : + #if 'CERN' in site : + #reduced_ds.files.append( file ) + #break + #if len(reduced_ds.files) > 0 : + #break + + if year == '2011' : + optsFiles = [ currentdir+'/DV-Stripping21r1-Stripping-MC.py', + currentdir+'/DV-RedoCaloPID-Stripping21.py' ] + elif year == '2012' : + optsFiles = [ currentdir+'/DV-Stripping21-Stripping-MC.py', + currentdir+'/DV-RedoCaloPID-Stripping21.py' ] + else : + e = Exception('Unrecognised year, what is going on?') + raise e + + dv = DaVinci(version=daVinciVersion) + dv.optsfile = optsFiles + dv.extraopts = """ +DaVinci().DDDBtag = '%s' +DaVinci().CondDBtag = '%s' + """ % ( dddbtags.pop(), condtags.pop() ) + + j=Job() + j.name = mode+'_MC_'+year+'_'+magtype+'_Pythia'+str(pythiaversion) + j.application = dv + j.backend = Dirac() + j.inputdata = ds + + # uncomment this if you want to run a quick test on the CERN batch + #j.backend = LSF( queue = '8nh' ) + #j.inputdata = reduced_ds + + # can pick if you want the ntuple returned to you immediately (SandboxFile) or stored on the Grid (DiracFile) + j.outputfiles = [LocalFile('000000.AllStreams21.dst')] + #j.outputfiles = [DiracFile('000000.AllStreams21.dst')] + + # can tweak the Dirac options if you like + #j.backend.settings['CPUTime'] = 10000 + #j.backend.settings['Destination'] = 'LCG.CERN.ch' + + # can change here the number of files you want to run over per job + j.splitter = SplitByFiles( filesPerJob = 1 ) + + #j.submit() + diff --git a/Phys/B2KShh/options/bender_B2KShh_Jpsipipi_MCTruth.py b/Phys/B2KShh/options/bender_B2KShh_Jpsipipi_MCTruth.py new file mode 100644 index 0000000..46f8b5f --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_Jpsipipi_MCTruth.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [] ) : + + #======= B->KShh Configuration and Setup =========# + mode = 'Bs2Jpsipipi' + whichMC = 'MC11' + magtype = 'MagDown' + isXGen = False + #=================================================# + + knownMCTypes = [ 'MC10', 'MC11' ] + + if whichMC not in knownMCTypes : + e = Exception('Unknown MC version') + raise e + + dddbTag = {} + dddbTag['MC10'] = 'head-20101206' + dddbTag['MC11'] = 'head-20111102' + + conddbTag = {} + conddbTag['MC10'] = {} + conddbTag['MC10']['MagUp'] = 'sim-20101210-vc-mu100' + conddbTag['MC10']['MagDown'] = 'sim-20101210-vc-md100' + conddbTag['MC11'] = {} + conddbTag['MC11']['MagUp'] = 'sim-20111111-vc-mu100' + conddbTag['MC11']['MagDown'] = 'sim-20111111-vc-md100' + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = '20'+whichMC[2:] + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.DDDBtag = dddbTag[whichMC] + daVinci.CondDBtag = conddbTag[whichMC][magtype] + daVinci.InputType = "DST" + daVinci.TupleFile = mode+'-'+whichMC+'-'+magtype+'-MCTruth.root' + daVinci.EvtMax = -1 + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.DecayParticle import DecayParticle + muplus = DecayParticle( -13, 0, False, 'mup' ) + muminus = DecayParticle( 13, 0, False, 'mum' ) + piplus = DecayParticle( 211, 0, False, 'pip' ) + piminus = DecayParticle( -211, 0, False, 'pim' ) + jpsi = DecayParticle( 443, 2, False, 'Jpsi' ) + jpsi.addDaughter( muplus ) + jpsi.addDaughter( muminus ) + bdecay = DecayParticle( 531, 3, False, 'B' ) + bdecay.addDaughter( jpsi ) + bdecay.addDaughter( piplus ) + bdecay.addDaughter( piminus ) + + from B2KShh.GenericMCTruthAlgo import GenericMCTruth + + algGenMCTrue = GenericMCTruth( mode, bdecay, isXGen ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTrue.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ +# "DATAFILE='PFN:/data/lhcb/phsdba/B2KShh/DST/MC11/Bd2KSpipi-MagUp/00015443_00000019_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" +# "DATAFILE='PFN:/afs/cern.ch/user/a/amartens/workspace/public/00020550_00000054_1.xgen' TYP='POOL_ROOTTREE' OPT='READ'" + "DATAFILE='PFN:/afs/cern.ch/work/t/tlatham/test-DSTs/MC11a/13144031-MagDown/00018977_00000021_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" + ] + + + configure(datafiles) + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_Jpsippbar_MCtruth.py b/Phys/B2KShh/options/bender_B2KShh_Jpsippbar_MCtruth.py new file mode 100644 index 0000000..0ee5a96 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_Jpsippbar_MCtruth.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [] ) : + + #======= B->KShh Configuration and Setup =========# + mode = 'Bd2Jpsippbar' + whichMC = 'MC11' + magtype = 'MagDown' + isXGen = False + #=================================================# + + knownMCTypes = [ 'MC10', 'MC11' ] + + if whichMC not in knownMCTypes : + e = Exception('Unknown MC version') + raise e + + dddbTag = {} + dddbTag['MC10'] = 'head-20101206' + dddbTag['MC11'] = 'head-20111102' + + conddbTag = {} + conddbTag['MC10'] = {} + conddbTag['MC10']['MagUp'] = 'sim-20101210-vc-mu100' + conddbTag['MC10']['MagDown'] = 'sim-20101210-vc-md100' + conddbTag['MC11'] = {} + conddbTag['MC11']['MagUp'] = 'sim-20111111-vc-mu100' + conddbTag['MC11']['MagDown'] = 'sim-20111111-vc-md100' + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = '20'+whichMC[2:] + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.DDDBtag = dddbTag[whichMC] + daVinci.CondDBtag = conddbTag[whichMC][magtype] + daVinci.InputType = "DST" + daVinci.TupleFile = mode+'-'+whichMC+'-'+magtype+'-MCTruth.root' + daVinci.EvtMax = -1 + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.DecayParticle import DecayParticle + muplus = DecayParticle( -13, 0, False, 'mup' ) + muminus = DecayParticle( 13, 0, False, 'mum' ) + proton = DecayParticle( 2212, 0, False, 'p' ) + antiproton = DecayParticle( -2212, 0, False, 'pbar' ) + jpsi = DecayParticle( 443, 2, False, 'Jpsi' ) + jpsi.addDaughter( muplus ) + jpsi.addDaughter( muminus ) + bdecay = DecayParticle( 511, 3, False, 'B' ) + bdecay.addDaughter( jpsi ) + bdecay.addDaughter( proton ) + bdecay.addDaughter( antiproton ) + + from B2KShh.GenericMCTruthAlgo import GenericMCTruth + + algGenMCTrue = GenericMCTruth( mode, bdecay, isXGen ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTrue.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ +# "DATAFILE='PFN:/data/lhcb/phsdba/B2KShh/DST/MC11/Bd2KSpipi-MagUp/00015443_00000019_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" +# "DATAFILE='PFN:/afs/cern.ch/user/a/amartens/workspace/public/00020550_00000054_1.xgen' TYP='POOL_ROOTTREE' OPT='READ'" + "DATAFILE='PFN:/afs/cern.ch/work/t/tlatham/test-DSTs/MC11a/11144120-MagDown/00018971_00000015_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" + ] + + configure(datafiles) + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_Jpsippbarpi_MCtruth.py b/Phys/B2KShh/options/bender_B2KShh_Jpsippbarpi_MCtruth.py new file mode 100644 index 0000000..104f521 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_Jpsippbarpi_MCtruth.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [] ) : + + #======= B->KShh Configuration and Setup =========# + mode = 'Bu2Jpsippbarpi' + whichMC = 'MC11' + magtype = 'MagDown' + isXGen = False + #=================================================# + + knownMCTypes = [ 'MC10', 'MC11' ] + + if whichMC not in knownMCTypes : + e = Exception('Unknown MC version') + raise e + + dddbTag = {} + dddbTag['MC10'] = 'head-20101206' + dddbTag['MC11'] = 'head-20111102' + + conddbTag = {} + conddbTag['MC10'] = {} + conddbTag['MC10']['MagUp'] = 'sim-20101210-vc-mu100' + conddbTag['MC10']['MagDown'] = 'sim-20101210-vc-md100' + conddbTag['MC11'] = {} + conddbTag['MC11']['MagUp'] = 'sim-20111111-vc-mu100' + conddbTag['MC11']['MagDown'] = 'sim-20111111-vc-md100' + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = '20'+whichMC[2:] + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.DDDBtag = dddbTag[whichMC] + daVinci.CondDBtag = conddbTag[whichMC][magtype] + daVinci.InputType = "DST" + daVinci.TupleFile = mode+'-'+whichMC+'-'+magtype+'-MCTruth.root' + daVinci.EvtMax = -1 + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.DecayParticle import DecayParticle + muplus = DecayParticle( -13, 0, False, 'mup' ) + muminus = DecayParticle( 13, 0, False, 'mum' ) + proton = DecayParticle( 2212, 0, False, 'p' ) + antiproton = DecayParticle( -2212, 0, False, 'pbar' ) + pion = DecayParticle( 211, 0, True, 'pi' ) + jpsi = DecayParticle( 443, 2, False, 'Jpsi' ) + jpsi.addDaughter( muplus ) + jpsi.addDaughter( muminus ) + bdecay = DecayParticle( 521, 4, False, 'B' ) + bdecay.addDaughter( jpsi ) + bdecay.addDaughter( proton ) + bdecay.addDaughter( antiproton ) + bdecay.addDaughter( pion ) + + from B2KShh.GenericMCTruthAlgo import GenericMCTruth + + algGenMCTrue = GenericMCTruth( mode, bdecay, isXGen ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTrue.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ +# "DATAFILE='PFN:/data/lhcb/phsdba/B2KShh/DST/MC11/Bd2KSpipi-MagUp/00015443_00000019_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" +# "DATAFILE='PFN:/afs/cern.ch/user/a/amartens/workspace/public/00020550_00000054_1.xgen' TYP='POOL_ROOTTREE' OPT='READ'" + "DATAFILE='PFN:/afs/cern.ch/work/t/tlatham/test-DSTs/MC11a/12145041-MagDown/00018974_00000005_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" + + ] + + configure(datafiles) + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_MCtruth.py b/Phys/B2KShh/options/bender_B2KShh_MCtruth.py new file mode 100644 index 0000000..d1d11cd --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_MCtruth.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [] ) : + + #======= B->KShh Configuration and Setup =========# + btype = 'Lb' + track1 = 'p' + track2 = 'pi' + v0type = 'KS' + mode = btype+'2'+track1+track2+v0type + whichMC = 'MC11a' + magtype = 'MagDown' + isXGen = False + #=================================================# + + knownMCTypes = [ 'MC10', 'MC11a' ] + + if whichMC not in knownMCTypes : + e = Exception('Unknown MC version') + raise e + + dddbTag = {} + dddbTag['MC10'] = 'head-20101206' + dddbTag['MC11a'] = 'head-20111102' + + conddbTag = {} + conddbTag['MC10'] = {} + conddbTag['MC10']['MagUp'] = 'sim-20101210-vc-mu100' + conddbTag['MC10']['MagDown'] = 'sim-20101210-vc-md100' + conddbTag['MC11a'] = {} + conddbTag['MC11a']['MagUp'] = 'sim-20111111-vc-mu100' + conddbTag['MC11a']['MagDown'] = 'sim-20111111-vc-md100' + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = '20'+whichMC[2:4] + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.DDDBtag = dddbTag[whichMC] + daVinci.CondDBtag = conddbTag[whichMC][magtype] + daVinci.InputType = "DST" + daVinci.TupleFile = mode+'-'+whichMC+'-'+magtype+'-MCTruth.root' + daVinci.EvtMax = -1 + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + + algGenMCTrue = B2KShhMCTruth( mode, btype, track1, track2, v0type, isXGen ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTrue.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ +# "DATAFILE='PFN:/data/lhcb/phsdba/B2KShh/DST/MC11/Bd2KSpipi-MagUp/00015443_00000019_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" +# "DATAFILE='PFN:/afs/cern.ch/user/a/amartens/workspace/public/00020550_00000054_1.xgen' TYP='POOL_ROOTTREE' OPT='READ'" + "DATAFILE='PFN:/afs/cern.ch/work/t/tlatham/test-DSTs/MC11a/00021044_00000001_1.allstreams.dst' TYP='POOL_ROOTTREE' OPT='READ'" + ] + + configure(datafiles) + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_MCtruthResonance.py b/Phys/B2KShh/options/bender_B2KShh_MCtruthResonance.py new file mode 100644 index 0000000..28821f8 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_MCtruthResonance.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [] ) : + + #======= B->KShh Configuration and Setup =========# + btype = 'Bs' + restype = 'Kst0' + bachtype = 'KS0' + resdaug1 = 'K+' + resdaug2 = 'pi-' + mode = btype+'2'+restype+bachtype + whichMC = 'MC11a' + magtype = 'MagDown' + #=================================================# + + knownMCTypes = [ 'MC10', 'MC11a' ] + + if whichMC not in knownMCTypes : + e = Exception('Unknown MC version') + raise e + + dddbTag = {} + dddbTag['MC10'] = 'head-20101206' + dddbTag['MC11a'] = 'head-20111102' + + conddbTag = {} + conddbTag['MC10'] = {} + conddbTag['MC10']['MagUp'] = 'sim-20101210-vc-mu100' + conddbTag['MC10']['MagDown'] = 'sim-20101210-vc-md100' + conddbTag['MC11a'] = {} + conddbTag['MC11a']['MagUp'] = 'sim-20111111-vc-mu100' + conddbTag['MC11a']['MagDown'] = 'sim-20111111-vc-md100' + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = '20'+whichMC[2:4] + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.DDDBtag = dddbTag[whichMC] + daVinci.CondDBtag = conddbTag[whichMC][magtype] + daVinci.InputType = "DST" + daVinci.TupleFile = mode+'-'+whichMC+'-'+magtype+'-MCTruth.root' + daVinci.EvtMax = -1 + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgoResonance import B2KShhMCTruthResonance + + algGenMCTrue = B2KShhMCTruthResonance( mode, btype, restype, bachtype, resdaug1, resdaug2 ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTrue.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/B2KShh/DST/MC11/Bs2Kst0KS-MagDown/00016206_00000001_1.allstreams.dst' + ] + + configure(datafiles) + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_bkgdMC.py b/Phys/B2KShh/options/bender_B2KShh_bkgdMC.py new file mode 100644 index 0000000..656ff0e --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_bkgdMC.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh background MC samples: +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + mode = params.get( 'mode', 'Bd2etapKS' ) + whichMC = params.get( 'whichMC', '2011' ) + whichStripping = params.get( 'stripping', 'Stripping20r1' ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', True ) + #=================================================# + + reco_daughters = [ 'pi', 'pi', 'KS0' ] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + nativeStrippingVersion = {} + nativeStrippingVersion['2011'] = 'Stripping20r1' + nativeStrippingVersion['2012'] = 'Stripping20' + + if whichStripping != nativeStrippingVersion[ whichMC ] : + e = Exception('Requested stripping version %s is not the native version for this MC %s, you need to use the version of the script that will first re-strip the MC.' % (whichStripping, whichMC) ) + raise e + + inputLocationDD = '/Event/AllStreams/Phys/B2KShhDDLine/Particles' + inputLocationLL = '/Event/AllStreams/Phys/B2KShhLLLine/Particles' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterStrip = "HLT_PASS_RE('StrippingB2KShh[DL]{2}LineDecision')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EventPreFilters = filters + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'.root' + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.RecoAlgo import B2KShhReco + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, True, False, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, True, False, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/B2KShh/DST/MC/2011/Bd2etapKS-MagUp-Pythia8/00030589_00000001_1.allstreams.dst' + ] + + pars = {} + pars[ 'mode' ] = 'Bd2etapKS' + pars[ 'whichMC' ] = '2011' + pars[ 'stripping' ] = 'Stripping20r1' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_data.py b/Phys/B2KShh/options/bender_B2KShh_data.py new file mode 100644 index 0000000..3b2e687 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_data.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh stripped data samples: +- run an algorithm to store the reco info for all Stripped candidates +""" + +from Bender.Main import * +from Gaudi.Configuration import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + magtype = params.get( 'magtype', 'MagDown' ) + whichStripping = params.get( 'stripping', 'Stripping20' ) + extended_hypos = params.get( 'extended_hypos', True ) + #=================================================# + + knownMagTypes = [ 'MagDown', 'MagUp' ] + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] + known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + + whichData = '' + if whichStripping in known2011StrippingVersions: + whichData = '2011' + elif whichStripping in known2012StrippingVersions: + whichData = '2012' + else : + e = Exception('Unsupported Stripping version: ' + whichStripping) + raise e + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichData + daVinci.Simulation = False + daVinci.Lumi = True + daVinci.InputType = "DST" + daVinci.TupleFile = 'B2KShh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'.root' + daVinci.EvtMax = -1 + daVinci.PrintFreq = 100000 + + from Configurables import CondDB + CondDB().LatestGlobalTagByDataType = whichData + + setData( datafiles, catalogues ) + + stream = {} + stream['Stripping20'] = 'BhadronCompleteEvent' + stream['Stripping20r1'] = 'BhadronCompleteEvent' + stream['Stripping20r0p1'] = 'BhadronCompleteEvent' + stream['Stripping20r1p1'] = 'BhadronCompleteEvent' + + teslocation = '/Event/'+stream[whichStripping]+'/' + + filterStrip = "HLT_PASS_RE('.*B2KShh.*')" + + inputLocationDD = 'Phys/B2KShhDDLine/Particles' + inputLocationLL = 'Phys/B2KShhLLLine/Particles' + + h1type = 'pi' + h2type = 'pi' + + if whichStripping == 'Stripping20r1p1' or whichStripping == 'Stripping20r0p1' : + h1type = 'K' + filterStrip = "HLT_PASS_RE('StrippingB2KSKpi[DL]{2}LineDecision')" + inputLocationDD = 'Phys/B2KSKpiDDLine/Particles' + inputLocationLL = 'Phys/B2KSKpiLLLine/Particles' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + daVinci.EventPreFilters = filters + + reco_daughters = [ h1type, h2type, 'KS0' ] + + from B2KShh.RecoAlgo import B2KShhReco + + gaudi = appMgr() + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, False, False, + Inputs = [ teslocation+inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, False, False, + Inputs = [ teslocation+inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + '/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000002_1.bhadroncompleteevent.dst' + #'/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p1-MagDown/00024183_00000017_1.bhadroncompleteevent.dst' + ] + + pars = {} + pars[ 'stripping' ] = 'Stripping20' + pars[ 'magtype' ] = 'MagDown' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_data_WS.py b/Phys/B2KShh/options/bender_B2KShh_data_WS.py new file mode 100644 index 0000000..d248ae3 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_data_WS.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh stripped data samples: +- run an algorithm to store the reco info for all Stripped candidates +""" + +from Bender.Main import * +from Gaudi.Configuration import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + magtype = params.get( 'magtype', 'MagDown' ) + whichStripping = params.get( 'stripping', 'Stripping20' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + # no need to configure this anymore, it should be taken from the Stripping revision number + whichData = '' + + known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] + known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + + if whichStripping in known2011StrippingVersions: + whichData = '2011' + elif whichStripping in known2012StrippingVersions: + whichData = '2012' + else : + e = Exception('Unknown Stripping version ' + whichStripping) + raise e + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichData + daVinci.Simulation = False + daVinci.Lumi = True + daVinci.InputType = "MDST" + daVinci.TupleFile = 'B2KShh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'_WS.root' + daVinci.EvtMax = -1 + daVinci.PrintFreq = 100000 + + from Configurables import CondDB + CondDB().LatestGlobalTagByDataType = whichData + + setData( datafiles, catalogues ) + + stream = {} + stream['Stripping20'] = 'Bhadron' + stream['Stripping20r1'] = 'Bhadron' + stream['Stripping20r0p1'] = 'Bhadron' + stream['Stripping20r1p1'] = 'Bhadron' + + teslocation = '/Event/'+stream[whichStripping] + daVinci.RootInTES = teslocation + + filterStrip = "HLT_PASS_RE('StrippingB2KShh[DL]{2}SameSignLineDecision')" + + inputLocationDD = 'Phys/B2KShhDDSameSignLine/Particles' + inputLocationLL = 'Phys/B2KShhLLSameSignLine/Particles' + + h1type = 'pi' + h2type = 'pi' + + if whichStripping == 'Stripping20r1p1' or whichStripping == 'Stripping20r0p1' : + h1type = 'K' + filterStrip = "HLT_PASS_RE('StrippingB2KSKpi[DL]{2}SameSignLineDecision')" + inputLocationDD = 'Phys/B2KSKpiDDSameSignLine/Particles' + inputLocationLL = 'Phys/B2KSKpiLLSameSignLine/Particles' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + + daVinci.EventPreFilters = filters + + reco_daughters = [ h1type, h2type, 'KS0' ] + + from PhysConf.MicroDST import uDstConf + uDstConf ( teslocation ) + + from B2KShh.RecoAlgo import B2KShhReco + + gaudi = appMgr() + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, True, False, False, + Inputs = [ inputLocationDD ], + RootInTES = teslocation + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, True, False, False, + Inputs = [ inputLocationLL ], + RootInTES = teslocation + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000067_1.bhadron.mdst', + 'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000224_1.bhadron.mdst' + + #'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p1-MagDown/00024183_00000046_1.bhadron.mdst', + #'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p1-MagDown/00024183_00000062_1.bhadron.mdst' + ] + + pars = {} + pars[ 'stripping' ] = 'Stripping20' + #pars[ 'stripping' ] = 'Stripping20r0p1' + pars[ 'magtype' ] = 'MagDown' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_data_strip20.py b/Phys/B2KShh/options/bender_B2KShh_data_strip20.py new file mode 100644 index 0000000..90ea379 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_data_strip20.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh stripped data samples: +- run an algorithm to store the reco info for all Stripped candidates +""" + +from Bender.Main import * +from Gaudi.Configuration import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + magtype = params.get( 'magtype', 'MagDown' ) + whichStripping = params.get( 'stripping', 'Stripping20' ) + extended_hypos = params.get( 'extended_hypos', True ) + #=================================================# + + knownMagTypes = [ 'MagDown', 'MagUp' ] + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p1' ] + known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p1' ] + + whichData = '' + if whichStripping in known2011StrippingVersions: + whichData = '2011' + elif whichStripping in known2012StrippingVersions: + whichData = '2012' + else : + e = Exception('Unsupported Stripping version: ' + whichStripping) + raise e + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichData + daVinci.Simulation = False + daVinci.Lumi = True + daVinci.InputType = "DST" + daVinci.TupleFile = 'B2KShh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'.root' + daVinci.EvtMax = -1 + daVinci.PrintFreq = 100000 + + from Configurables import CondDB + CondDB().LatestGlobalTagByDataType = whichData + + setData( datafiles, catalogues ) + + stream = {} + stream['Stripping20'] = 'BhadronCompleteEvent' + stream['Stripping20r1'] = 'BhadronCompleteEvent' + stream['Stripping20r0p1'] = 'BhadronCompleteEvent' + stream['Stripping20r1p1'] = 'BhadronCompleteEvent' + + teslocation = '/Event/'+stream[whichStripping]+'/' + + filterStrip = "HLT_PASS_RE('.*B2KShh.*')" + + inputLocationDD = 'Phys/B2KShhDDLine/Particles' + inputLocationLL = 'Phys/B2KShhLLLine/Particles' + + h1type = 'pi' + h2type = 'pi' + + if whichStripping == 'Stripping20r1p1' or whichStripping == 'Stripping20r0p1' : + h1type = 'K' + filterStrip = "HLT_PASS_RE('StrippingB2KSKpi[DL]{2}LineDecision')" + inputLocationDD = 'Phys/B2KSKpiDDLine/Particles' + inputLocationLL = 'Phys/B2KSKpiLLLine/Particles' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + daVinci.EventPreFilters = filters + + reco_daughters = [ h1type, h2type, 'KS0' ] + + from B2KShh.RecoAlgo import B2KShhReco + + gaudi = appMgr() + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, False, False, + Inputs = [ teslocation+inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, False, False, + Inputs = [ teslocation+inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + '/disk/users/elena/B2KShh/ntuples/tests/00022761_00007763_1.bhadroncompleteevent.dst' + #'/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000002_1.bhadroncompleteevent.dst' + #'/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p1-MagDown/00024183_00000017_1.bhadroncompleteevent.dst' + ] + + pars = {} + # pars[ 'stripping' ] = 'Stripping20' + # pars[ 'magtype' ] = 'MagDown' + pars[ 'stripping' ] = 'Stripping20r1' + pars[ 'magtype' ] = 'MagUp' + + configure( datafiles, params = pars, castor=False ) + + run(300) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_data_strip21.py b/Phys/B2KShh/options/bender_B2KShh_data_strip21.py new file mode 100644 index 0000000..d90a82a --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_data_strip21.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh stripped data samples: +- run an algorithm to store the reco info for all Stripped candidates +""" + +from Bender.Main import * +from Gaudi.Configuration import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + magtype = params.get( 'magtype', 'MagDown' ) + whichStripping = params.get( 'stripping', 'Stripping21' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + knownMagTypes = [ 'MagDown', 'MagUp' ] + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + known2011StrippingVersions = [ 'Stripping21r1' ] + known2012StrippingVersions = [ 'Stripping21' ] + + whichData = '' + if whichStripping in known2011StrippingVersions: + whichData = '2011' + elif whichStripping in known2012StrippingVersions: + whichData = '2012' + else : + e = Exception('Unsupported Stripping version: ' + whichStripping) + raise e + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichData + daVinci.Simulation = False + daVinci.Lumi = True + daVinci.InputType = "MDST" + daVinci.TupleFile = 'B2KShh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'.root' + daVinci.EvtMax = -1 + daVinci.PrintFreq = 100000 + + from Configurables import CondDB + CondDB().LatestGlobalTagByDataType = whichData + + setData( datafiles, catalogues ) + + stream = {} + stream['Stripping21'] = 'Bhadron' + stream['Stripping21r1'] = 'Bhadron' + + teslocation = '/Event/'+stream[whichStripping]+'/' + daVinci.RootInTES = teslocation + + filterStrip = "HLT_PASS_RE('.*B2KShh.*')" + + h1type = 'pi' + h2type = 'pi' + + algNamesAndLocations = {} + if whichData == '2011' : + algNamesAndLocations['B2KShh_DD_2011'] = 'Phys/B2KShh_MVAFilter_DD_2011_OS/Particles' + algNamesAndLocations['B2KShh_LL_2011'] = 'Phys/B2KShh_MVAFilter_LL_2011_OS/Particles' + else : + algNamesAndLocations['B2KShh_DD_2012a'] = 'Phys/B2KShh_MVAFilter_DD_2012a_OS/Particles' + algNamesAndLocations['B2KShh_LL_2012a'] = 'Phys/B2KShh_MVAFilter_LL_2012a_OS/Particles' + algNamesAndLocations['B2KShh_DD_2012b'] = 'Phys/B2KShh_MVAFilter_DD_2012b_OS/Particles' + algNamesAndLocations['B2KShh_LL_2012b'] = 'Phys/B2KShh_MVAFilter_LL_2012b_OS/Particles' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + daVinci.EventPreFilters = filters + + reco_daughters = [ h1type, h2type, 'KS0' ] + + from B2KShh.RecoAlgo import B2KShhReco + + gaudi = appMgr() + + algs = [] + + for algName,inputLoc in algNamesAndLocations.items() : + alg = B2KShhReco( algName, + reco_daughters, + extended_hypos, False, False, False, + RootInTES = teslocation, + Inputs = [ inputLoc ] + ) + algs.append( alg ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + for alg in algs : + userSeq.Members += [ alg.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + #'/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000002_1.bhadroncompleteevent.dst' + #'/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p1-MagDown/00024183_00000017_1.bhadroncompleteevent.dst' + '/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping21/00041836_00000642_1.bhadron.mdst' + ] + + pars = {} + pars[ 'stripping' ] = 'Stripping21' + pars[ 'magtype' ] = 'MagDown' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_signalMC.py b/Phys/B2KShh/options/bender_B2KShh_signalMC.py new file mode 100644 index 0000000..dfd43c9 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_signalMC.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + btype = params.get( 'btype', 'Bd' ) + track1 = params.get( 'track1', 'K' ) + track2 = params.get( 'track2', 'K' ) + v0type = params.get( 'v0type', 'KS0' ) + whichMC = params.get( 'whichMC', '2012' ) + whichStripping = params.get( 'stripping', 'Stripping20' ) + isXGen = params.get( 'isXGen', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', True ) + #=================================================# + + mode = btype+'2'+track1+track2+v0type + mc_daughters = [ track1, track2, v0type ] + + # decay descriptors can be provided if running over resonant signal MC, e.g. B0 -> K*+ pi-; K*+ -> KS0 pi+ + mc_decay_descriptors = [] + + reco_daughters = [ 'pi', 'pi', 'KS0' ] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + nativeStrippingVersion = {} + nativeStrippingVersion['2011'] = 'Stripping20r1' + nativeStrippingVersion['2012'] = 'Stripping20' + + if whichStripping != nativeStrippingVersion[ whichMC ] : + e = Exception('Requested stripping version %s is not the native version for this MC %s, you need to use the version of the script that will first re-strip the MC.' % (whichStripping, whichMC) ) + raise e + + inputLocationDD = '/Event/AllStreams/Phys/B2KShhDDLine/Particles' + inputLocationLL = '/Event/AllStreams/Phys/B2KShhLLLine/Particles' + + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'-withMCtruth.root' + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + from B2KShh.RecoAlgo import B2KShhReco + + algGenMCTruth = B2KShhMCTruth( mode, btype, track1, track2, v0type, isXGen ) + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTruth.name() ] + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + #'/data/lhcb/phsdba/B2KShh/DST/MC/2012/Bd2KSKK-SqDalitz-MagDown-Pythia8/00029527_00000114_1.allstreams.dst' + '/disk/users/elena/B2KShh/ntuples/tests/00031273_00000013_1.allstreams.dst' + ] + + pars = {} + pars[ 'btype' ] = 'Xib0' # 'Bd' + pars[ 'track1' ] = 'p' # 'K' + pars[ 'track2' ] = 'pi' # 'K' + pars[ 'v0type' ] = 'KS0' + pars[ 'whichMC' ] = '2011' # '2012' + pars[ 'stripping' ] = 'Stripping20r1' # 'Stripping20' + pars[ 'dddbtag' ] = 'dddb-20130929' # 'Sim08-20130503-1' + pars[ 'conddbtag' ] = 'sim-20130522-vc-md100' # 'Sim08-20130503-1-vc-md100' + + configure( datafiles, params = pars, castor=False ) + + run(300) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_signalMC_S21.py b/Phys/B2KShh/options/bender_B2KShh_signalMC_S21.py new file mode 100644 index 0000000..7db6921 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_signalMC_S21.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + btype = params.get( 'btype', 'Bd' ) + track1 = params.get( 'track1', 'K' ) + track2 = params.get( 'track2', 'K' ) + v0type = params.get( 'v0type', 'KS0' ) + whichMC = params.get( 'whichMC', '2012' ) + whichStripping = params.get( 'stripping', 'Stripping21' ) + isXGen = params.get( 'isXGen', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + mode = btype+'2'+track1+track2+v0type + mc_daughters = [ track1, track2, v0type ] + + # decay descriptors can be provided if running over resonant signal MC, e.g. B0 -> K*+ pi-; K*+ -> KS0 pi+ + mc_decay_descriptors = [] + + reco_daughters = [ 'pi', 'pi', 'KS0' ] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + nativeStrippingVersion = {} + nativeStrippingVersion['2011'] = 'Stripping21r1' + nativeStrippingVersion['2012'] = 'Stripping21' + + if whichStripping != nativeStrippingVersion[ whichMC ] : + e = Exception('Requested stripping version %s is not the native version for this MC %s, you need to use the version of the script that will first re-strip the MC.' % (whichStripping, whichMC) ) + raise e + + algNamesAndLocations = {} + if whichMC == '2011' : + algNamesAndLocations['B2KShh_DD_2011'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_DD_2011_OS/Particles' + algNamesAndLocations['B2KShh_LL_2011'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_LL_2011_OS/Particles' + else : + algNamesAndLocations['B2KShh_DD_2012a'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_DD_2012a_OS/Particles' + algNamesAndLocations['B2KShh_LL_2012a'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_LL_2012a_OS/Particles' + algNamesAndLocations['B2KShh_DD_2012b'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_DD_2012b_OS/Particles' + algNamesAndLocations['B2KShh_LL_2012b'] = '/Event/AllStreams21/Phys/B2KShh_MVAFilter_LL_2012b_OS/Particles' + + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'-withMCtruth.root' + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + from B2KShh.RecoAlgo import B2KShhReco + + algGenMCTruth = B2KShhMCTruth( mode, btype, track1, track2, v0type, isXGen ) + + reco_algs = [] + + for algName,inputLoc in algNamesAndLocations.items() : + alg = B2KShhReco( algName, + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLoc ] + ) + reco_algs.append( alg ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + userSeq.Members += [ algGenMCTruth.name() ] + for alg in reco_algs : + userSeq.Members += [ alg.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ '/data/lhcb/phsdba/gangadir-KShh-Stripping21/workspace/phsdba/LocalXML/DSTs/Bd2KKKS0_MC_2012_MagDown_Pythia8/00029527_%08d_1.allstreams.dst' % i for i in range(1,117) ] + + for file in datafiles : + print file + + pars = {} + pars[ 'btype' ] = 'Bd' + pars[ 'track1' ] = 'K' + pars[ 'track2' ] = 'K' + pars[ 'v0type' ] = 'KS0' + pars[ 'whichMC' ] = '2012' + pars[ 'stripping' ] = 'Stripping21' + pars[ 'dddbtag' ] = 'Sim08-20130503-1' + pars[ 'conddbtag' ] = 'Sim08-20130503-1-vc-md100' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_stripping_bkgdMC.py b/Phys/B2KShh/options/bender_B2KShh_stripping_bkgdMC.py new file mode 100644 index 0000000..867edbb --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_stripping_bkgdMC.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh background MC samples: +- run a particular Stripping version extracted from the StrippingArchive or from a local config in StrippingSelections and StrippingSettings +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + mode = params.get( 'mode', 'Bd2etapKS' ) + whichMC = params.get( 'whichMC', '2011' ) + whichStripping = params.get( 'stripping', 'Stripping20r1' ) + stripFromArchiveOrLocal = params.get( 'stripFromArchiveOrLocal', 'Archive' ) + saveDst = params.get( 'saveDst', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + knownStrippings = [ 'Stripping20', 'Stripping20r1', 'Stripping20r0p1', 'Stripping20r1p1' ] + + if whichStripping not in knownStrippings : + e = Exception('Unsupported stripping version') + raise e + + strippingYear = {} + strippingYear['Stripping20'] = '2012' + strippingYear['Stripping20r1'] = '2011' + strippingYear['Stripping20r0p1'] = '2012' + strippingYear['Stripping20r1p1'] = '2011' + + if whichMC != strippingYear[whichStripping] : + e = Exception('Mismatch between stripping version and year') + raise e + + strippingNames = {} + strippingNames['Stripping20'] = 'B2KShh' + strippingNames['Stripping20r1'] = 'B2KShh' + strippingNames['Stripping20r0p1'] = 'B2KSKpi' + strippingNames['Stripping20r1p1'] = 'B2KSKpi' + + strippingName = strippingNames[whichStripping] + + inputLocationDD = 'Phys/'+strippingName+'DDLine/Particles' + inputLocationLL = 'Phys/'+strippingName+'LLLine/Particles' + + reco_daughters = [ 'pi', 'pi', 'KS0' ] + if strippingName == 'B2KSKpi' : + reco_daughters = [ 'K', 'pi', 'KS0' ] + + # Configuration of Stripping + if stripFromArchiveOrLocal == 'Archive' : + from StrippingArchive import Utils + elif stripFromArchiveOrLocal == 'Local' : + from StrippingSelections import Utils + else : + e = Exception('Unknown Stripping config location') + raise e + lb = Utils.lineBuilder(whichStripping,strippingName) + + from StrippingConf.StrippingStream import StrippingStream + stream1 = StrippingStream("Bhadron") + stream1.appendLines( lb.lines() ) + + from StrippingConf.Configuration import StrippingConf + # was trying new approach with the EventNodeKiller - but it didn't seem to work :( + sc = StrippingConf( HDRLocation = "ReStrip", Streams = [ stream1 ] ) + #sc = StrippingConf( Streams = [ stream1 ] ) + + for stream in sc.activeStreams() : + for line in stream.lines : + print line.name(),' output=',line.outputLocation() + + from Configurables import StrippingReport + sr = StrippingReport(Selections = sc.selections()) + sr.ReportFrequency = printFreq + + # Configuration of the EventNodeKiller to remove the old stripping reports + #from Configurables import EventNodeKiller + #eventNodeKiller = EventNodeKiller('Stripkiller') + #eventNodeKiller.Nodes = [ '/Event/AllStreams', '/Event/Strip' ] + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + #daVinci.appendToMainSequence( [ eventNodeKiller ] ) + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'.root' + + # Configuration of SelDSTWriter + try : + from DSTWriters.__dev__.Configuration import SelDSTWriter + except ImportError : + from DSTWriters.Configuration import SelDSTWriter + + suffix = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping + + dstWriter = SelDSTWriter( "MyDSTWriter", + OutputFileSuffix = suffix, + SelectionSequences = sc.activeStreams() + ) + + seq = sc.sequence() + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.RecoAlgo import B2KShhReco + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, True, False, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, True, False, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + #userSeq.Members += [ "EventNodeKiller/Stripkiller" ] + userSeq.Members += [ "GaudiSequencer/StrippingGlobal" ] + userSeq.Members += [ sr.name() ] + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + if saveDst: + userSeq.Members += [ 'GaudiSequencer/MyDSTWriterMainSeq' ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/B2KShh/DST/MC/2011/Bd2etapKS-MagUp-Pythia8/00030589_00000001_1.allstreams.dst' + ] + + pars = {} + pars[ 'mode' ] = 'Bd2etapKS' + pars[ 'whichMC' ] = '2011' + pars[ 'stripping' ] = 'Stripping20r1p1' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_B2KShh_stripping_signalMC.py b/Phys/B2KShh/options/bender_B2KShh_stripping_signalMC.py new file mode 100644 index 0000000..33a0cb3 --- /dev/null +++ b/Phys/B2KShh/options/bender_B2KShh_stripping_signalMC.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +- run a particular Stripping version extracted from the StrippingArchive or from a local config in StrippingSelections and StrippingSettings +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + btype = params.get( 'btype', 'Bd' ) + track1 = params.get( 'track1', 'K' ) + track2 = params.get( 'track2', 'K' ) + v0type = params.get( 'v0type', 'KS0' ) + whichMC = params.get( 'whichMC', '2011' ) + whichStripping = params.get( 'stripping', 'Stripping20r1' ) + isXGen = params.get( 'isXGen', False ) + modesuffix = params.get( 'modesuffix', '' ) + stripFromArchiveOrLocal = params.get( 'stripFromArchiveOrLocal', 'Archive' ) + saveDst = params.get( 'saveDst', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + modeName = btype+'2'+track1+track2+v0type + mode = modeName + modesuffix + mc_daughters = [ track1, track2, v0type ] + + # decay descriptors can be provided if running over resonant signal MC, e.g. B0 -> K*+ pi-; K*+ -> KS0 pi+ + mc_decay_descriptors = [] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + knownStrippings = [ 'Stripping20', 'Stripping20r1', 'Stripping20r0p1', 'Stripping20r1p1' ] + + if whichStripping not in knownStrippings : + e = Exception('Unsupported stripping version') + raise e + + strippingYear = {} + strippingYear['Stripping20'] = '2012' + strippingYear['Stripping20r1'] = '2011' + strippingYear['Stripping20r0p1'] = '2012' + strippingYear['Stripping20r1p1'] = '2011' + + if whichMC != strippingYear[whichStripping] : + e = Exception('Mismatch between stripping version and year') + raise e + + strippingNames = {} + strippingNames['Stripping20'] = 'B2KShh' + strippingNames['Stripping20r1'] = 'B2KShh' + strippingNames['Stripping20r0p1'] = 'B2KSKpi' + strippingNames['Stripping20r1p1'] = 'B2KSKpi' + + strippingName = strippingNames[whichStripping] + + inputLocationDD = 'Phys/'+strippingName+'DDLine/Particles' + inputLocationLL = 'Phys/'+strippingName+'LLLine/Particles' + + reco_daughters = [ 'pi', 'pi', 'KS0' ] + if strippingName == 'B2KSKpi' : + reco_daughters = [ 'K', 'pi', 'KS0' ] + + # Configuration of Stripping + if stripFromArchiveOrLocal == 'Archive' : + from StrippingArchive import Utils + elif stripFromArchiveOrLocal == 'Local' : + from StrippingSelections import Utils + else : + e = Exception('Unknown Stripping config location') + raise e + lb = Utils.lineBuilder(whichStripping,strippingName) + + from StrippingConf.StrippingStream import StrippingStream + stream1 = StrippingStream("Bhadron") + stream1.appendLines( lb.lines() ) + + from StrippingConf.Configuration import StrippingConf + # was trying new approach with the EventNodeKiller - but it didn't seem to work :( + sc = StrippingConf( HDRLocation = "ReStrip", Streams = [ stream1 ] ) + #sc = StrippingConf( Streams = [ stream1 ] ) + + for stream in sc.activeStreams() : + for line in stream.lines : + print line.name(),' output=',line.outputLocation() + + from Configurables import StrippingReport + sr = StrippingReport(Selections = sc.selections()) + sr.ReportFrequency = printFreq + + # Configuration of the EventNodeKiller to remove the old stripping reports + #from Configurables import EventNodeKiller + #eventNodeKiller = EventNodeKiller('Stripkiller') + #eventNodeKiller.Nodes = [ '/Event/AllStreams', '/Event/Strip' ] + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + #daVinci.appendToMainSequence( [ eventNodeKiller ] ) + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'-withMCtruth.root' + + # Configuration of SelDSTWriter + try : + from DSTWriters.__dev__.Configuration import SelDSTWriter + except ImportError : + from DSTWriters.Configuration import SelDSTWriter + + suffix = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping + + dstWriter = SelDSTWriter( "MyDSTWriter", + OutputFileSuffix = suffix, + SelectionSequences = sc.activeStreams() + ) + + seq = sc.sequence() + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + from B2KShh.RecoAlgo import B2KShhReco + + algGenMCTruth = B2KShhMCTruth( modeName, btype, track1, track2, v0type, isXGen) + + algB2KShhDD = B2KShhReco( + 'B2KShhDD', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algB2KShhLL = B2KShhReco( + 'B2KShhLL', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + #userSeq.Members += [ "EventNodeKiller/Stripkiller" ] + userSeq.Members += [ algGenMCTruth.name() ] + userSeq.Members += [ "GaudiSequencer/StrippingGlobal" ] + userSeq.Members += [ sr.name() ] + userSeq.Members += [ algB2KShhDD.name() ] + userSeq.Members += [ algB2KShhLL.name() ] + if saveDst: + userSeq.Members += [ 'GaudiSequencer/MyDSTWriterMainSeq' ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/B2KShh/DST/MC/2011/Bd2KSKK-MagDown-Pythia6/00029722_00000001_1.allstreams.dst' + ] + + pars = {} + pars[ 'btype' ] = 'Bd' + pars[ 'track1' ] = 'K' + pars[ 'track2' ] = 'K' + pars[ 'v0type' ] = 'KS0' + pars[ 'whichMC' ] = '2011' + pars[ 'stripping' ] = 'Stripping20r1p1' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_Lb2V0hh_data.py b/Phys/B2KShh/options/bender_Lb2V0hh_data.py new file mode 100644 index 0000000..4cda203 --- /dev/null +++ b/Phys/B2KShh/options/bender_Lb2V0hh_data.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh stripped data samples: +- run an algorithm to store the reco info for all Stripped candidates +""" + +from Bender.Main import * +from Gaudi.Configuration import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + magtype = params.get( 'magtype', 'MagDown' ) + whichStripping = params.get( 'stripping', 'Stripping20' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + knownMagTypes = [ 'MagDown', 'MagUp' ] + + if magtype not in knownMagTypes : + e = Exception('Unsupported magnet setting: ' + magtype) + raise e + + known2011StrippingVersions = [ 'Stripping20r1', 'Stripping20r1p2' ] + known2012StrippingVersions = [ 'Stripping20', 'Stripping20r0p2' ] + + whichData = '' + if whichStripping in known2011StrippingVersions: + whichData = '2011' + elif whichStripping in known2012StrippingVersions: + whichData = '2012' + else : + e = Exception('Unsupported Stripping version: ' + whichStripping) + raise e + + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichData + daVinci.Simulation = False + daVinci.Lumi = True + daVinci.InputType = "MDST" + daVinci.TupleFile = 'Lb2Lhh-Collision'+whichData[2:]+'-'+magtype+'-'+whichStripping+'.root' + daVinci.EvtMax = -1 + daVinci.PrintFreq = 10000 + + from Configurables import CondDB + CondDB().LatestGlobalTagByDataType = whichData + + setData( datafiles, catalogues ) + + stream = {} + stream['Stripping20'] = 'Bhadron' + stream['Stripping20r1'] = 'Bhadron' + stream['Stripping20r0p2'] = 'Bhadron' + stream['Stripping20r1p2'] = 'Bhadron' + + teslocation = '/Event/'+stream[whichStripping]+'/' + daVinci.RootInTES = teslocation + + filterStrip = "HLT_PASS_RE('StrippingLb2V0hh[DL]{2}LineDecision')" + + inputLocationDD = 'Phys/Lb2V0hhDDLine/Particles' + inputLocationLL = 'Phys/Lb2V0hhLLLine/Particles' + + h1type = 'pi' + h2type = 'pi' + + from PhysConf.Filters import LoKi_Filters + + filterVoid = "EXISTS('/Event/Strip/Phys/DecReports')" + filterAlg = LoKi_Filters( VOID_Code = filterVoid, STRIP_Code = filterStrip ) + filters = filterAlg.filters('Filters') + filters.reverse() + daVinci.EventPreFilters = filters + + reco_daughters = [ h1type, h2type, 'Lambda0' ] + + from B2KShh.RecoAlgo import B2KShhReco + + gaudi = appMgr() + + algLb2LhhDD = B2KShhReco( + 'Lb2LhhDD', + reco_daughters, + extended_hypos, False, False, False, + RootInTES = teslocation, + Inputs = [ inputLocationDD ] + ) + algLb2LhhLL = B2KShhReco( + 'Lb2LhhLL', + reco_daughters, + extended_hypos, False, False, False, + RootInTES = teslocation, + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + + userSeq.Members += [ algLb2LhhDD.name() ] + userSeq.Members += [ algLb2LhhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + #'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20-MagDown/00020198_00000224_1.bhadron.mdst' + 'PFN:/data/lhcb/phsdba/B2KShh/DST/Data/2012-Stripping20r0p2-MagDown/00030264_00004002_1.bhadron.mdst' + ] + + pars = {} + #pars[ 'stripping' ] = 'Stripping20' + pars[ 'stripping' ] = 'Stripping20r0p2' + pars[ 'magtype' ] = 'MagDown' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_Lb2V0hh_signalMC.py b/Phys/B2KShh/options/bender_Lb2V0hh_signalMC.py new file mode 100644 index 0000000..033aa0a --- /dev/null +++ b/Phys/B2KShh/options/bender_Lb2V0hh_signalMC.py @@ -0,0 +1,145 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + btype = params.get( 'btype', 'Lb' ) + track1 = params.get( 'track1', 'pi' ) + track2 = params.get( 'track2', 'pi' ) + v0type = params.get( 'v0type', 'Lz' ) + whichMC = params.get( 'whichMC', '2011' ) + whichStripping = params.get( 'stripping', 'Stripping20r1' ) + isXGen = params.get( 'isXGen', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + mode = btype+'2'+track1+track2+v0type + mc_daughters = [ track1, track2, v0type ] + + # decay descriptors can be provided if running over resonant signal MC, e.g. B0 -> K*+ pi-; K*+ -> KS0 pi+ + mc_decay_descriptors = [] + + reco_daughters = [ 'pi', 'pi', 'Lambda0' ] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + nativeStrippingVersion = {} + nativeStrippingVersion['2011'] = 'Stripping20r1' + nativeStrippingVersion['2012'] = 'Stripping20' + + if whichStripping != nativeStrippingVersion[ whichMC ] : + e = Exception('Requested stripping version %s is not the native version for this MC %s, you need to use the version of the script that will first re-strip the MC.' % (whichStripping, whichMC) ) + raise e + + inputLocationDD = '/Event/AllStreams/Phys/Lb2V0hhDDLine/Particles' + inputLocationLL = '/Event/AllStreams/Phys/Lb2V0hhLLLine/Particles' + + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'-withMCtruth.root' + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + from B2KShh.RecoAlgo import B2KShhReco + + algGenMCTruth = B2KShhMCTruth( mode, btype, track1, track2, v0type, isXGen ) + + algLb2V0hhDD = B2KShhReco( + 'Lb2LzhhDD', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algLb2V0hhLL = B2KShhReco( + 'Lb2LzhhLL', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + userSeq.Members += [ algGenMCTruth.name() ] + userSeq.Members += [ algLb2V0hhDD.name() ] + userSeq.Members += [ algLb2V0hhLL.name() ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/Lb2Lhh/DST/MC2011/Lb2Lambdapipi-MagDown-Pythia6/00028874_00000010_1.allstreams.dst' + ] + + pars = {} + pars[ 'btype' ] = 'Lb' + pars[ 'track1' ] = 'pi' + pars[ 'track2' ] = 'pi' + pars[ 'v0type' ] = 'Lambda0' + pars[ 'whichMC' ] = '2011' + pars[ 'stripping' ] = 'Stripping20r1' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/options/bender_Lb2V0hh_stripping_signalMC.py b/Phys/B2KShh/options/bender_Lb2V0hh_stripping_signalMC.py new file mode 100644 index 0000000..bf7b96f --- /dev/null +++ b/Phys/B2KShh/options/bender_Lb2V0hh_stripping_signalMC.py @@ -0,0 +1,218 @@ +#!/usr/bin/env python + +""" +Bender module to run the following sequence over B2KShh signal MC samples: +- run an algorithm to store the MC truth DP position (and other info) for all generated events +- run a particular Stripping version extracted from the StrippingArchive or from a local config in StrippingSelections and StrippingSettings +- run an algorithm to store the reco and MC truth-matched info for all Stripped candidates +""" + +from Bender.Main import * + +def configure( datafiles, catalogues = [], params = {}, castor = False ) : + + #======= B->KShh Configuration and Setup =========# + btype = params.get( 'btype', 'Lb' ) + track1 = params.get( 'track1', 'pi' ) + track2 = params.get( 'track2', 'pi' ) + v0type = params.get( 'v0type', 'Lz' ) + whichMC = params.get( 'whichMC', '2011' ) + whichStripping = params.get( 'stripping', 'Stripping20r1p2' ) + isXGen = params.get( 'isXGen', False ) + modesuffix = params.get( 'modesuffix', '' ) + stripFromArchiveOrLocal = params.get( 'stripFromArchiveOrLocal', 'Archive' ) + saveDst = params.get( 'saveDst', False ) + printFreq = params.get( 'printFreq', 1000 ) + dddbtag = params.get( 'dddbtag', '' ) + conddbtag = params.get( 'conddbtag', '' ) + extended_hypos = params.get( 'extended_hypos', False ) + #=================================================# + + modeName = btype+'2'+track1+track2+v0type + mode = modeName + modesuffix + mc_daughters = [ track1, track2, v0type ] + + # decay descriptors can be provided if running over resonant signal MC, e.g. B0 -> K*+ pi-; K*+ -> KS0 pi+ + mc_decay_descriptors = [] + + knownMCTypes = [ '2011', '2012' ] + + if whichMC not in knownMCTypes : + e = Exception('Unsupported MC version') + raise e + + knownStrippings = [ 'Stripping20', 'Stripping20r1', 'Stripping20r0p2', 'Stripping20r1p2' ] + + if whichStripping not in knownStrippings : + e = Exception('Unsupported stripping version') + raise e + + strippingYear = {} + strippingYear['Stripping20'] = '2012' + strippingYear['Stripping20r1'] = '2011' + strippingYear['Stripping20r0p2'] = '2012' + strippingYear['Stripping20r1p2'] = '2011' + + if whichMC != strippingYear[whichStripping] : + e = Exception('Mismatch between stripping version and year') + raise e + + strippingNames = {} + strippingNames['Stripping20'] = 'Lb2V0hh' + strippingNames['Stripping20r1'] = 'Lb2V0hh' + strippingNames['Stripping20r0p2'] = 'Lb2V0h' + strippingNames['Stripping20r1p2'] = 'Lb2V0h' + + strippingName = strippingNames[whichStripping] + + inputLocationDD = 'Phys/Lb2V0hhDDLine/Particles' + inputLocationLL = 'Phys/Lb2V0hhLLLine/Particles' + + reco_daughters = [ 'pi', 'pi', 'Lambda0' ] + + # Configuration of Stripping + if stripFromArchiveOrLocal == 'Archive' : + from StrippingArchive import Utils + elif stripFromArchiveOrLocal == 'Local' : + from StrippingSelections import Utils + else : + e = Exception('Unknown Stripping config location') + raise e + lb = Utils.lineBuilder(whichStripping,strippingName) + + from StrippingConf.StrippingStream import StrippingStream + stream1 = StrippingStream("Bhadron") + stream1.appendLines( lb.lines() ) + + from StrippingConf.Configuration import StrippingConf + # was trying new approach with the EventNodeKiller - but it didn't seem to work :( + sc = StrippingConf( HDRLocation = "ReStrip", Streams = [ stream1 ] ) + #sc = StrippingConf( Streams = [ stream1 ] ) + + for stream in sc.activeStreams() : + for line in stream.lines : + print line.name(),' output=',line.outputLocation() + + from Configurables import StrippingReport + sr = StrippingReport(Selections = sc.selections()) + sr.ReportFrequency = printFreq + + # Configuration of the EventNodeKiller to remove the old stripping reports + #from Configurables import EventNodeKiller + #eventNodeKiller = EventNodeKiller('Stripkiller') + #eventNodeKiller.Nodes = [ '/Event/AllStreams', '/Event/Strip' ] + + # Configuration of DaVinci + from Configurables import DaVinci + + daVinci = DaVinci() + daVinci.DataType = whichMC + daVinci.Simulation = True + daVinci.Lumi = False + daVinci.InputType = "DST" + daVinci.EvtMax = -1 + daVinci.PrintFreq = printFreq + #daVinci.appendToMainSequence( [ eventNodeKiller ] ) + + ## try to get the tags from Rec/Header + if dddbtag != '' and conddbtag != '' : + daVinci.DDDBtag = dddbtag + daVinci.CondDBtag = conddbtag + else : + from BenderTools.GetDBtags import getDBTags + tags = getDBTags ( datafiles[0] , castor ) + + logger.info ( 'Extract tags from DATA : %s' % tags ) + if tags.has_key ( 'DDDB' ) and tags ['DDDB'] : + daVinci.DDDBtag = tags['DDDB' ] + logger.info ( 'Set DDDB %s ' % daVinci.DDDBtag ) + if tags.has_key ( 'CONDDB' ) and tags ['CONDDB'] : + daVinci.CondDBtag = tags['CONDDB'] + logger.info ( 'Set CONDDB %s ' % daVinci.CondDBtag ) + if tags.has_key ( 'SIMCOND' ) and tags ['SIMCOND'] : + daVinci.CondDBtag = tags['SIMCOND'] + logger.info ( 'Set SIMCOND %s ' % daVinci.CondDBtag ) + + magtype = "MagUp" + if "md" in daVinci.CondDBtag : + magtype = "MagDown" + + daVinci.TupleFile = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping+'-withMCtruth.root' + + # Configuration of SelDSTWriter + try : + from DSTWriters.__dev__.Configuration import SelDSTWriter + except ImportError : + from DSTWriters.Configuration import SelDSTWriter + + suffix = mode+'-MC-'+whichMC+'-'+magtype+'-'+whichStripping + + dstWriter = SelDSTWriter( "MyDSTWriter", + OutputFileSuffix = suffix, + SelectionSequences = sc.activeStreams() + ) + + seq = sc.sequence() + + setData( datafiles, catalogues ) + + gaudi = appMgr() + + from B2KShh.MCTruthAlgo import B2KShhMCTruth + from B2KShh.RecoAlgo import B2KShhReco + + algGenMCTruth = B2KShhMCTruth( modeName, btype, track1, track2, v0type, isXGen) + + algLb2V0hhDD = B2KShhReco( + 'Lb2LzhhDD', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationDD ] + ) + algLb2V0hhLL = B2KShhReco( + 'Lb2LzhhLL', + reco_daughters, + extended_hypos, False, True, True, + mc_daughters, + mc_decay_descriptors, + PP2MCs = [ 'Relations/Rec/ProtoP/Charged'] , + Inputs = [ inputLocationLL ] + ) + + userSeq = gaudi.algorithm('GaudiSequencer/DaVinciUserSequence' , True ) + #userSeq.Members += [ "EventNodeKiller/Stripkiller" ] + userSeq.Members += [ algGenMCTruth.name() ] + userSeq.Members += [ "GaudiSequencer/StrippingGlobal" ] + userSeq.Members += [ sr.name() ] + userSeq.Members += [ algLb2V0hhDD.name() ] + userSeq.Members += [ algLb2V0hhLL.name() ] + if saveDst: + userSeq.Members += [ 'GaudiSequencer/MyDSTWriterMainSeq' ] + + return SUCCESS + +############# + +if '__main__' == __name__ : + + datafiles = [ + 'PFN:/data/lhcb/phsdba/Lb2Lhh/DST/MC2011/Lb2Lambdapipi-MagDown-Pythia6/00028874_00000010_1.allstreams.dst' + ] + + pars = {} + pars[ 'btype' ] = 'Lb' + pars[ 'track1' ] = 'pi' + pars[ 'track2' ] = 'pi' + pars[ 'v0type' ] = 'Lambda0' + pars[ 'whichMC' ] = '2011' + pars[ 'stripping' ] = 'Stripping20r1p2' + + configure( datafiles, params = pars, castor=False ) + + run(-1) + +############# + diff --git a/Phys/B2KShh/python/B2KShh/DecayParticle.py b/Phys/B2KShh/python/B2KShh/DecayParticle.py new file mode 100644 index 0000000..d1ee24d --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/DecayParticle.py @@ -0,0 +1,25 @@ + +class DecayParticle(object) : + + def __init__( self, pdgCode, ndaug_expected, pluscc, branch_name ) : + + self.pdgCode = pdgCode + self._ndaug_expected = ndaug_expected + self.daughters = [] + self.pluscc = pluscc + self.branch_name = branch_name + self.special = False + + + def addDaughter( self, daughter ) : + + if len( self.daughters ) == self._ndaug_expected : + print "Problem adding daughter - already have expected number" + return + + self.daughters.append( daughter ) + + def specialDescriptor( self, special = True ) : + self.special = special + + diff --git a/Phys/B2KShh/python/B2KShh/GenericMCTruthAlgo.py b/Phys/B2KShh/python/B2KShh/GenericMCTruthAlgo.py new file mode 100644 index 0000000..9e66bf0 --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/GenericMCTruthAlgo.py @@ -0,0 +1,259 @@ +#!/usr/bin/env python + +from Bender.Main import * +from Bender.MainMC import * +from ROOT import TLorentzVector + + +# Begin GenericMCTruth Class Definition + +class GenericMCTruth(AlgoMC) : + + """ + Algorithm to perform ntupling of MC truth information. + """ + + def __init__( self, name, decayHead, isXGen, **kwargs ) : + super(GenericMCTruth,self).__init__( name, **kwargs ) + + self.decay_head = decayHead + self.xgen = isXGen + + + def form_decay_descriptor( self ) : + """ + Write the decay descriptor for the decay mode of the MC being run over. + """ + + self.par_branch_name = self.decay_head.branch_name + self.daug_branch_names = {} + self.gdaug_branch_names = {} + for daug in self.decay_head.daughters : + self.daug_branch_names[ daug.pdgCode ] = daug.branch_name + self.gdaug_branch_names[ daug.pdgCode ] = {} + if daug.pluscc : + self.daug_branch_names[ -1*daug.pdgCode ] = daug.branch_name + self.gdaug_branch_names[ -1*daug.pdgCode ] = {} + for gdaug in daug.daughters : + self.gdaug_branch_names[ daug.pdgCode ][ gdaug.pdgCode ] = gdaug.branch_name + if gdaug.pluscc : + self.gdaug_branch_names[ daug.pdgCode ][ -1*gdaug.pdgCode ] = gdaug.branch_name + if daug.pluscc : + self.gdaug_branch_names[ -1*daug.pdgCode ][ gdaug.pdgCode ] = gdaug.branch_name + if gdaug.pluscc : + self.gdaug_branch_names[ -1*daug.pdgCode ][ -1*gdaug.pdgCode ] = gdaug.branch_name + + par_id = LHCb.ParticleID( self.decay_head.pdgCode ) + par_name = self.partpropsvc.find( par_id ).name() + par_conj_name = self.partpropsvc.find( par_id ).anti().name() + + if self.decay_head.special : + if par_name == par_conj_name : + ddhead = par_name+' => ' + else : + ddhead = '( '+par_name+' | '+par_conj_name+' ) => ' + ddpart1 = '( '+ddhead + ddpart2 = '( '+ddhead + for daug in self.decay_head.daughters : + daug_id = LHCb.ParticleID( daug.pdgCode ) + daug_name = self.partpropsvc.find( daug_id ).name() + daug_conj_name = self.partpropsvc.find( daug_id ).anti().name() + ddpart1 += ' '+daug_name + ddpart2 += ' '+daug_conj_name + ddpart1 += ' )' + ddpart2 += ' )' + self.decay_descriptor = '['+ddpart1+', '+ddpart2+']' + else : + self.decay_descriptor = '[ '+par_name+' =>' + for daug in self.decay_head.daughters : + daug_id = LHCb.ParticleID( daug.pdgCode ) + daug_name = self.partpropsvc.find( daug_id ).name() + self.decay_descriptor += ' '+daug_name + self.decay_descriptor += ' ]CC' + + self.Info( 'Will use the decay descriptor '+self.decay_descriptor ) + + + def initialize( self ) : + + sc = AlgoMC.initialize( self ) + if sc.isFailure() : + return sc + + # get the particle property service + self.partpropsvc = self.ppSvc() + + # form the decay descriptor + self.form_decay_descriptor() + + # set up the reconstrucible/reconstructed tools + if not self.xgen : + self.recible = self.tool( cpp.IMCReconstructible, 'MCReconstructible' ) + self.rected = self.tool( cpp.IMCReconstructed, 'MCReconstructed' ) + + return SUCCESS + + + def reco_status_tuple( self, tuple, mcparticle, name ) : + """ + Store the reconstructible/reconstructed status of an MC particle + """ + + if not mcparticle or self.xgen : + tuple.column_int( name + '_Reconstructible', -1 ) + tuple.column_int( name + '_Reconstructed', -1 ) + return + + cat_ible = self.recible.reconstructible( mcparticle ) + cat_ted = self.rected.reconstructed( mcparticle ) + + tuple.column_int( name + '_Reconstructible', int(cat_ible) ) + tuple.column_int( name + '_Reconstructed', int(cat_ted) ) + + + def mc_p4_tuple( self, tuple, mcparticle, name ) : + """ + Store the id, charge, 4-momentum, mass, p_t and eta of an MC particle + """ + + if not mcparticle : + tuple.column_int( name + '_TRUEID', -1 ) + tuple.column_int( name + '_TRUEQ', -1 ) + tuple.column_double( name + '_TRUEP', -1.1 ) + tuple.column_double( name + '_TRUEPE', -1.1 ) + tuple.column_double( name + '_TRUEPX', -1.1 ) + tuple.column_double( name + '_TRUEPY', -1.1 ) + tuple.column_double( name + '_TRUEPZ', -1.1 ) + tuple.column_double( name + '_TRUEPT', -1.1 ) + tuple.column_double( name + '_TRUEETA', -1.1 ) + tuple.column_double( name + '_TRUEPHI', -1.1 ) + tuple.column_double( name + '_TRUETHETA', -1.1 ) + tuple.column_double( name + '_TRUEM', -1.1 ) + tuple.column_int( name + '_OSCIL', -1 ) + return + + tuple.column_int( name + '_TRUEID', int(MCID(mcparticle)) ) + tuple.column_int( name + '_TRUEQ', int(MC3Q(mcparticle)/3) ) + tuple.column_double( name + '_TRUEP', MCP(mcparticle) ) + tuple.column_double( name + '_TRUEPE', MCE(mcparticle) ) + tuple.column_double( name + '_TRUEPX', MCPX(mcparticle) ) + tuple.column_double( name + '_TRUEPY', MCPY(mcparticle) ) + tuple.column_double( name + '_TRUEPZ', MCPZ(mcparticle) ) + tuple.column_double( name + '_TRUEPT', MCPT(mcparticle) ) + tuple.column_double( name + '_TRUEETA', MCETA(mcparticle) ) + tuple.column_double( name + '_TRUEPHI', MCPHI(mcparticle) ) + tuple.column_double( name + '_TRUETHETA', MCTHETA(mcparticle) ) + tuple.column_double( name + '_TRUEM', MCM(mcparticle) ) + tuple.column_int( name + '_OSCIL', int(MCOSCILLATED(mcparticle)) ) + + def mc_vtx_tuple( self, tuple, mcparticle, name ) : + """ + Store vertex and lifetime info for the MC particle + """ + + if not mcparticle : + tuple.column_double( name + '_TRUEORIGINVERTEX_X', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', -1.1 ) + tuple.column_double( name + '_TRUECTAU' , -1.1 ) + return + + tuple.column_double( name + '_TRUEORIGINVERTEX_X', MCVFASPF(MCVX)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', MCVFASPF(MCVY)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', MCVFASPF(MCVZ)(mcparticle) ) + tuple.column_double( name + '_TRUECTAU' , MCCTAU(mcparticle) ) + + + def analyse( self ) : + """ + The method called in the event loop + """ + + # select MC particles + cands = self.mcselect('cands', self.decay_descriptor) + ncands = cands.size() + + if 0 == ncands : + self.Warning( 'No MC candidates found in this event', SUCCESS ) + return SUCCESS + + # get the event header + if not self.xgen: + evthdr = self.get( '/Event/Rec/Header' ) + else : + evthdr = self.get( '/Event/Gen/Header' ) + + # create the ntuple + tuple = self.nTuple( 'tupleMCTruth' ) + + # loop through the candidates + for cand in cands : + + # fill event information + tuple.column_int( 'runNumber', evthdr.runNumber() ) + tuple.column_int( 'evtNumber', evthdr.evtNumber() ) + tuple.column_int( 'nCands', ncands ) + + # get the ID and hance name of the parent + candID = cand.particleID().pid() + candname = self.par_branch_name + + # store parent information + self.mc_p4_tuple( tuple, cand, candname ) + self.mc_vtx_tuple( tuple, cand, candname ) + self.reco_status_tuple( tuple, cand, candname ) + + # loop through the daughters and store their information + found_gdaug_names = [] + for daug in cand.children( True ) : + + daugID = daug.particleID().pid() + if daugID not in self.daug_branch_names : + continue + + daugname = self.daug_branch_names[ daugID ] + + self.mc_p4_tuple( tuple, daug, daugname ) + self.mc_vtx_tuple( tuple, daug, daugname ) + self.reco_status_tuple( tuple, daug, daugname ) + + tuple.column_int( daugname+'_NDAUG', daug.nChildren() ) + + for gdaug in daug.children( True ) : + + gdaugID = gdaug.particleID().pid() + if gdaugID not in self.gdaug_branch_names[ daugID ] : + continue + + gdaugname = self.gdaug_branch_names[ daugID ][ gdaugID ] + found_gdaug_names.append( gdaugname ) + + self.mc_p4_tuple( tuple, gdaug, gdaugname ) + self.mc_vtx_tuple( tuple, gdaug, gdaugname ) + self.reco_status_tuple( tuple, gdaug, gdaugname ) + + gdaugnames = [] + for daug in cand.children( True ) : + + daugID = daug.particleID().pid() + if daugID not in self.daug_branch_names : + continue + + for name in self.gdaug_branch_names[ daugID ].values() : + gdaugnames.append( name ) + + for name in found_gdaug_names : + gdaugnames.remove( name ) + + for name in gdaugnames : + self.mc_p4_tuple( tuple, None, name ) + self.mc_vtx_tuple( tuple, None, name ) + self.reco_status_tuple( tuple, None, name ) + + tuple.write() + + + return SUCCESS + +# End of GenericMCTruth Class Definition + diff --git a/Phys/B2KShh/python/B2KShh/MCTruthAlgo.py b/Phys/B2KShh/python/B2KShh/MCTruthAlgo.py new file mode 100644 index 0000000..8ad77ac --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/MCTruthAlgo.py @@ -0,0 +1,511 @@ +#!/usr/bin/env python + +from Bender.Main import * +from Bender.MainMC import * +from ROOT import ( TLorentzVector, TMath ) +from B2KShh.ThreeBodyKinematics import ThreeBodyKinematics + + +# Begin B2KShhMCTruth Class Definition + +class B2KShhMCTruth(AlgoMC) : + + """ + Algorithm to perform ntupling of MC truth information for the B2KShh analyses. + """ + + def __init__( self, name, Btype, h1type, h2type, V0type, isXGen, **kwargs ) : + super(B2KShhMCTruth,self).__init__( name, **kwargs ) + self.set_types( Btype, h1type, h2type, V0type, isXGen ) + + def set_types( self, Btype, h1type, h2type, V0type, isXGen ): + """ + Define the decay mode of the MC being run over: + Btype = B0, B_s0 or Lambda_b0 + h1type = pi, K or p + h2type = pi, K or p + V0type = KS0 or Lambda0 + isXGen = true if xgen is supposed to be analyzed + """ + + self.xgen = isXGen + self.names = {} + self.gdaugnames = {} + + + # determine the parent type + self.parentID = None + + if Btype == 'Bd' or Btype == 'B0' : + self.parentID = LHCb.ParticleID(511) + self.names[ 511] = 'Bd' + self.names[-511] = 'Bd' + + elif Btype == 'Bs' or Btype == 'B_s0' : + self.parentID = LHCb.ParticleID(531) + self.names[ 531] = 'Bs' + self.names[-531] = 'Bs' + + elif Btype == 'Lb' or Btype == 'Lambda_b0' : + self.parentID = LHCb.ParticleID(5122) + self.names[ 5122] = 'Lb' + self.names[-5122] = 'Lb' + + elif Btype == 'Xib0' or Btype == 'Xi_b0' : + self.parentID = LHCb.ParticleID(5232) + self.names[ 5232] = 'Xib0' + self.names[-5232] = 'Xib0' + + else : + self.Warning( 'B type ('+Btype+') not recognised, setting to B0.', SUCCESS ) + self.parentID = LHCb.ParticleID(511) + self.names[ 511] = 'Bd' + self.names[-511] = 'Bd' + + # determine the V0 type + self.daug3ID = None + self.daug3ConjID = None + + if V0type == 'KS' or V0type == 'KS0' : + self.daug3ID = LHCb.ParticleID(310) + self.daug3ConjID = LHCb.ParticleID(310) + self.names[310] = 'KS' + self.gdaugnames[ 211] = 'KSpip' + self.gdaugnames[-211] = 'KSpim' + + elif V0type == 'L' or V0type == 'Lambda0' : + if self.parentID.pid() == 5122 or self.parentID.pid() == 5232 : + self.daug3ID = LHCb.ParticleID(3122) + self.daug3ConjID = LHCb.ParticleID(-3122) + else : + self.daug3ID = LHCb.ParticleID(-3122) + self.daug3ConjID = LHCb.ParticleID(3122) + self.names[ 3122] = 'Lz' + self.names[-3122] = 'Lz' + self.gdaugnames[ 2212] = 'Lzp' + self.gdaugnames[-2212] = 'Lzp' + self.gdaugnames[ 211] = 'Lzpi' + self.gdaugnames[-211] = 'Lzpi' + + else : + self.Warning( 'V0 type ('+V0type+') not recognised, setting to KS0.', SUCCESS ) + self.daug3ID = LHCb.ParticleID(310) + self.daug3ConjID = LHCb.ParticleID(310) + self.names[310] = 'KS' + self.gdaugnames[ 211] = 'KSpip' + self.gdaugnames[-211] = 'KSpim' + + # determine the charged daughter types + self.daug1ID = None + self.daug2ID = None + + if h1type == 'pi' and h2type == 'pi': + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 211] = 'h1' + self.names[-211] = 'h2' + + elif h1type == 'K' and h2type == 'K': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-321) + self.names[ 321] = 'h1' + self.names[-321] = 'h2' + + elif h1type == 'p' and h2type == 'p': + self.daug1ID = LHCb.ParticleID( 2212) + self.daug2ID = LHCb.ParticleID(-2212) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h2' + + elif h1type == 'K' and h2type == 'pi': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 321] = 'h1' + self.names[-321] = 'h1' + self.names[ 211] = 'h2' + self.names[-211] = 'h2' + + elif h1type == 'p' and h2type == 'pi': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h1' + self.names[ 211] = 'h2' + self.names[-211] = 'h2' + + elif h1type == 'p' and h2type == 'K': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-321) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h1' + self.names[ 321] = 'h2' + self.names[-321] = 'h2' + + else : + self.Warning( 'hh types ('+h1type+' and '+h2type+') not recognised, setting to pions.' , SUCCESS ) + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 211] = 'h1' + self.names[-211] = 'h2' + + + + def check_types( self ) : + + initB = 0 + if self.parentID.isBaryon() : + initB = 1 + + finalB = 0 + for id in ( self.daug1ID, self.daug2ID, self.daug3ID ) : + if id.isBaryon() : + if id.pid() > 0 : + finalB += 1 + else : + finalB -= 1 + + if initB != finalB : + self.Error( 'Initial and final state baryon numbers do not match, '+str(initB)+' != '+str(finalB) ) + return FAILURE + + daug1Mass = self.partpropsvc.find( self.daug1ID ).mass() + daug2Mass = self.partpropsvc.find( self.daug2ID ).mass() + daug3Mass = self.partpropsvc.find( self.daug3ID ).mass() + parentMass = self.partpropsvc.find( self.parentID ).mass() + + if parentMass < ( daug1Mass + daug2Mass + daug3Mass ) : + return FAILURE + + self.kinematics = ThreeBodyKinematics( daug1Mass, daug2Mass, daug3Mass, parentMass ) + + return SUCCESS + + + def form_decay_descriptor( self ) : + + par_name = self.partpropsvc.find( self.parentID ).name() + daug1_name = self.partpropsvc.find( self.daug1ID ).name() + daug2_name = self.partpropsvc.find( self.daug2ID ).name() + daug3_name = self.partpropsvc.find( self.daug3ID ).name() + + par_conj_name = self.partpropsvc.find( self.parentID ).anti().name() + daug1_conj_name = self.partpropsvc.find( self.daug1ID ).anti().name() + daug2_conj_name = self.partpropsvc.find( self.daug2ID ).anti().name() + daug3_conj_name = self.partpropsvc.find( self.daug3ID ).anti().name() + + if self.parentID.isBaryon() or self.daug1ID.abspid() == self.daug2ID.abspid() : + self.decay_descriptor = '[ '+par_name+' => '+daug1_name+' '+daug2_name+' '+daug3_name+' ]CC' + else : + ddhead = '( '+par_name+' | '+par_conj_name+' ) => ' + ddpart1 = '( '+ddhead+' '+daug1_name+' '+daug2_name+' '+daug3_name+' )' + ddpart2 = '( '+ddhead+' '+daug1_conj_name+' '+daug2_conj_name+' '+daug3_conj_name+' )' + self.decay_descriptor = '['+ddpart1+', '+ddpart2+']' + + self.Info( 'Will use the decay descriptor '+self.decay_descriptor ) + + + def initialize( self ) : + + sc = AlgoMC.initialize( self ) + if sc.isFailure() : + return sc + + # get the particle property service + self.partpropsvc = self.ppSvc() + + # check the validity of the decay + sc = self.check_types() + if sc.isFailure() : + return sc + + # form the decay descriptor + self.form_decay_descriptor() + + # set up the reconstrucible/reconstructed tools + if not self.xgen : + self.recible = self.tool( cpp.IMCReconstructible, 'MCReconstructible' ) + self.rected = self.tool( cpp.IMCReconstructed, 'MCReconstructed' ) + + return SUCCESS + + + def reco_status_tuple( self, tuple, mcparticle, name ) : + """ + Store the reconstructible/reconstructed status of an MC particle + """ + + if not mcparticle or self.xgen : + tuple.column_int( name + '_Reconstructible', -1 ) + tuple.column_int( name + '_Reconstructed', -1 ) + return + + cat_ible = self.recible.reconstructible( mcparticle ) + cat_ted = self.rected.reconstructed( mcparticle ) + + tuple.column_int( name + '_Reconstructible', int(cat_ible) ) + tuple.column_int( name + '_Reconstructed', int(cat_ted) ) + + + def mc_p4_tuple( self, tuple, mcparticle, name ) : + """ + Store the id, charge, 4-momentum, mass, p_t and eta of an MC particle + """ + + if not mcparticle : + tuple.column_int( name + '_TRUEID', -1 ) + tuple.column_int( name + '_TRUEQ', -1 ) + tuple.column_double( name + '_TRUEP', -1.1 ) + tuple.column_double( name + '_TRUEPE', -1.1 ) + tuple.column_double( name + '_TRUEPX', -1.1 ) + tuple.column_double( name + '_TRUEPY', -1.1 ) + tuple.column_double( name + '_TRUEPZ', -1.1 ) + tuple.column_double( name + '_TRUEPT', -1.1 ) + tuple.column_double( name + '_TRUEETA', -1.1 ) + tuple.column_double( name + '_TRUEPHI', -1.1 ) + tuple.column_double( name + '_TRUETHETA', -1.1 ) + tuple.column_double( name + '_TRUEM', -1.1 ) + tuple.column_int( name + '_OSCIL', -1 ) + return + + tuple.column_int( name + '_TRUEID', int(MCID(mcparticle)) ) + tuple.column_int( name + '_TRUEQ', int(MC3Q(mcparticle)/3) ) + tuple.column_double( name + '_TRUEP', MCP(mcparticle) ) + tuple.column_double( name + '_TRUEPE', MCE(mcparticle) ) + tuple.column_double( name + '_TRUEPX', MCPX(mcparticle) ) + tuple.column_double( name + '_TRUEPY', MCPY(mcparticle) ) + tuple.column_double( name + '_TRUEPZ', MCPZ(mcparticle) ) + tuple.column_double( name + '_TRUEPT', MCPT(mcparticle) ) + tuple.column_double( name + '_TRUEETA', MCETA(mcparticle) ) + tuple.column_double( name + '_TRUEPHI', MCPHI(mcparticle) ) + tuple.column_double( name + '_TRUETHETA', MCTHETA(mcparticle) ) + tuple.column_double( name + '_TRUEM', MCM(mcparticle) ) + tuple.column_int( name + '_OSCIL', int(MCOSCILLATED(mcparticle)) ) + + + def mc_vtx_tuple( self, tuple, mcparticle, name ) : + """ + Store vertex and lifetime info for the MC particle + """ + + if not mcparticle : + tuple.column_double( name + '_TRUEORIGINVERTEX_X', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', -1.1 ) + tuple.column_double( name + '_TRUECTAU' , -1.1 ) + return + + tuple.column_double( name + '_TRUEORIGINVERTEX_X', MCVFASPF(MCVX)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', MCVFASPF(MCVY)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', MCVFASPF(MCVZ)(mcparticle) ) + tuple.column_double( name + '_TRUECTAU' , MCCTAU(mcparticle) ) + + + def mc_dp_tuple( self, tuple, mcparent ) : + """ + Store the MC truth DP co-ordinates + """ + + # loop through the daughters and store their 4-momenta treating the + # expected 3 daughters and PHOTOS photons separately + + daug_id = [ 0, 0, 0 ] + daug_p4 = [ TLorentzVector(), TLorentzVector(), TLorentzVector() ] + gamma_p4 = [] + + parID = mcparent.particleID().pid() + + for daug in mcparent.children( True ) : + + daugID = daug.particleID().pid() + + px = MCPX( daug ) + py = MCPY( daug ) + pz = MCPZ( daug ) + pe = MCE ( daug ) + + p4 = TLorentzVector( px, py, pz, pe ) + + if daugID == 22 : + gamma_p4.append( p4 ) + elif self.daug1ID.abspid() == self.daug2ID.abspid() : + if daugID == self.daug1ID.pid() : + daug_p4[0] = p4 + daug_id[0] = daugID + elif daugID == self.daug2ID.pid() : + daug_p4[1] = p4 + daug_id[1] = daugID + elif daugID == self.daug3ID.pid() or daugID == self.daug3ConjID.pid() : + daug_p4[2] = p4 + daug_id[2] = daugID + else : + self.Error( 'Daughter does not match any of the expected types' ) + e = Exception('Unknown daughter type: '+str(daugID)) + raise e + else : + if abs(daugID) == self.daug1ID.abspid() : + daug_p4[0] = p4 + daug_id[0] = daugID + elif abs(daugID) == self.daug2ID.abspid() : + daug_p4[1] = p4 + daug_id[1] = daugID + elif abs(daugID) == self.daug3ID.abspid() : + daug_p4[2] = p4 + daug_id[2] = daugID + else : + self.Error( 'Daughter does not match any of the expected types' ) + e = Exception('Unknown daughter type: '+str(daugID)) + raise e + + ngamma = len(gamma_p4) + tuple.column_int( 'nPHOTOS', ngamma ) + + if 0 != ngamma : + + for gamma in gamma_p4 : + minangle = 1000.0 + mindaug = -1 + for daug in daug_p4 : + angle = gamma.Angle( daug.Vect() ) + if abs(angle) < minangle : + minangle = angle + mindaug = daug_p4.index(daug) + + daug_p4[ mindaug ] += gamma + + + p12 = daug_p4[0] + daug_p4[1] + p13 = daug_p4[0] + daug_p4[2] + p23 = daug_p4[1] + daug_p4[2] + + for i in range(3) : + daug_name = self.names[ daug_id[i] ] + tuple.column_double( daug_name + '_CORRPE', daug_p4[i].E() ) + tuple.column_double( daug_name + '_CORRPX', daug_p4[i].Px() ) + tuple.column_double( daug_name + '_CORRPY', daug_p4[i].Py() ) + tuple.column_double( daug_name + '_CORRPZ', daug_p4[i].Pz() ) + + m12Sq = p12.M2() + m13Sq = p13.M2() + m23Sq = p23.M2() + + mPrime = -1.1 + thPrime = -1.1 + + if self.kinematics.withinDPLimits( m13Sq, m23Sq ) : + self.kinematics.updateKinematics( m13Sq, m23Sq ) + mPrime = self.kinematics.mPrime + thPrime = self.kinematics.thPrime + + if m12Sq<0 : m12Sq = -1.1 + if m12Sq>100e6 : m12Sq = -1.1 + if m13Sq<0 : m13Sq = -1.1 + if m13Sq>100e6 : m13Sq = -1.1 + if m23Sq<0 : m23Sq = -1.1 + if m23Sq>100e6 : m23Sq = -1.1 + + tuple.column_double( 'm12Sq_MC', m12Sq ) + tuple.column_double( 'm13Sq_MC', m13Sq ) + tuple.column_double( 'm23Sq_MC', m23Sq ) + + tuple.column_double( 'mPrime_MC', mPrime ) + tuple.column_double( 'thPrime_MC', thPrime ) + + + def analyse( self ) : + """ + The method called in the event loop + """ + + # select MC particles + cands = self.mcselect('cands', self.decay_descriptor) + ncands = cands.size() + + if 0 == ncands : + self.Warning( 'No MC candidates found in this event', SUCCESS ) + return SUCCESS + + # get the event header + if not self.xgen: + evthdr = self.get( '/Event/Rec/Header' ) + else : + evthdr = self.get( '/Event/Gen/Header' ) + + # create the ntuple + tuple = self.nTuple( 'tupleMCTruth' ) + + # loop through the candidates + for cand in cands : + + # fill event information + tuple.column_int( 'runNumber', evthdr.runNumber() ) + tuple.column_int( 'evtNumber', evthdr.evtNumber() ) + tuple.column_int( 'nCands', ncands ) + + # get the ID and hance name of the parent + candID = cand.particleID().pid() + candname = self.names[ candID ] + + # store parent information + self.mc_p4_tuple( tuple, cand, candname ) + self.mc_vtx_tuple( tuple, cand, candname ) + self.reco_status_tuple( tuple, cand, candname ) + + # store DP information + self.mc_dp_tuple( tuple, cand ) + + # loop through the daughters and store their information + for daug in cand.children( True ) : + + daugID = daug.particleID().pid() + if 22 == daugID : + continue + daugname = self.names[ daugID ] + + self.mc_p4_tuple( tuple, daug, daugname ) + self.mc_vtx_tuple( tuple, daug, daugname ) + self.reco_status_tuple( tuple, daug, daugname ) + + # if this is the V0 then also find and store information for it's daughters + if abs(daugID) == self.daug3ID.abspid() : + + tuple.column_int( daugname+'_NDAUG', daug.nChildren() ) + + gdaug1_name = '' + gdaug2_name = '' + + for gdaug in daug.children( True ) : + gdaugID = gdaug.particleID().pid() + if gdaugID not in self.gdaugnames : + continue + + gdaugname = self.gdaugnames[gdaugID] + if gdaug1_name == '' : + gdaug1_name = gdaugname + elif gdaug2_name == '' : + gdaug2_name = gdaugname + else : + self.Warning('Unexpected extra daughter of '+daugname, SUCCESS) + continue + + self.mc_p4_tuple( tuple, gdaug, gdaugname ) + self.mc_vtx_tuple( tuple, gdaug, gdaugname ) + self.reco_status_tuple( tuple, gdaug, gdaugname ) + + gdaugnames = self.gdaugnames.values() + if gdaug1_name in gdaugnames : + gdaugnames.remove( gdaug1_name ) + if gdaug2_name in gdaugnames : + gdaugnames.remove( gdaug2_name ) + + for name in gdaugnames : + self.mc_p4_tuple( tuple, None, name ) + self.mc_vtx_tuple( tuple, None, name ) + self.reco_status_tuple( tuple, None, name ) + + tuple.write() + + + return SUCCESS + +# End of B2KShhMCTruth Class Definition + diff --git a/Phys/B2KShh/python/B2KShh/MCTruthAlgoResonance.py b/Phys/B2KShh/python/B2KShh/MCTruthAlgoResonance.py new file mode 100644 index 0000000..d498c8b --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/MCTruthAlgoResonance.py @@ -0,0 +1,686 @@ +#!/usr/bin/env python + +from Bender.Main import * +from Bender.MainMC import * +from ROOT import TLorentzVector +from B2KShh.ThreeBodyKinematics import ThreeBodyKinematics + + +# Begin B2KShhMCTruthResonance Class Definition + +class B2KShhMCTruthResonance(AlgoMC) : + + """ + Algorithm to perform ntupling of MC truth information for the B2KShh analyses. + """ + + def __init__( self, name, Btype, resType, bachType, resDaug1type, resDaug2type, **kwargs ) : + super(B2KShhMCTruthResonance,self).__init__( name, **kwargs ) + self.set_types( Btype, resType, bachType, resDaug1type, resDaug2type ) + + def set_types( self, Btype, resType, bachType, resDaug1type, resDaug2type ) : + """ + Define the decay mode of the MC being run over: + Btype = Lambda_b0, B0 or B_s0 + resType = phi, Kst0, Kst~0, Kst+, Kst-, rho0, f0 + bachType = pi+, pi-, K+, K-, p+, p~-, KS0, Lambda0, Lambda~0 + resDaug1type = pi+, pi-, K+, K-, p+, p~-, KS0, Lambda0, Lambda~0 + resDaug2type = pi+, pi-, K+, K-, p+, p~-, KS0, Lambda0, Lambda~0 + One (and only one) of bachType, resDaug1type and resDaug2type must be a KS0/Lambda0/Lambda~0. + """ + + self.parentID = None + self.daug1ID = None + self.daug2ID = None + self.daug3ID = None + + self.resonanceID = None + self.resDaug1ID = None + self.resDaug2ID = None + self.bachelorID = None + + self.names = {} + self.gdaugnames = {} + + self.selfconj = True + + # determine the B type + if Btype == 'Bd' or Btype == 'B0' : + self.parentID = LHCb.ParticleID(511) + self.names[ 511] = 'Bd' + self.names[-511] = 'Bd' + elif Btype == 'Bs' or Btype == 'B_s0' : + self.parentID = LHCb.ParticleID(531) + self.names[ 531] = 'Bs' + self.names[-531] = 'Bs' + elif Btype == 'Lb' or Btype == 'Lambda_b0' : + self.parentID = LHCb.ParticleID(5122) + self.names[ 5122] = 'Lb' + self.names[-5122] = 'Lb' + self.selfconj = False + elif Btype == 'Xib0' or Btype == 'Xi_b0' : + self.parentID = LHCb.ParticleID(5232) + self.names[ 5232] = 'Xib0' + self.names[-5232] = 'Xib0' + self.selfconj = False + else : + self.Warning( 'B type ('+Btype+') not recognised, setting to B0.' , SUCCESS ) + self.parentID = LHCb.ParticleID(511) + self.names[ 511] = 'Bd' + self.names[-511] = 'Bd' + + # determine the V0 type + daug_list = [] + daug_list.append( resDaug1type ) + daug_list.append( resDaug2type ) + daug_list.append( bachType ) + + nKS = daug_list.count( 'KS0' ) + nLz = daug_list.count( 'Lambda0' ) + nLzb = daug_list.count( 'Lambda~0' ) + if 1 != ( nKS + nLz + nLzb ) : + self.Error( 'Wrong number of V0 particles' ) + return + + if nKS == 1 : + self.daug3ID = LHCb.ParticleID(310) + self.names[310] = 'KS' + self.gdaugnames[ 211] = 'KSpip' + self.gdaugnames[-211] = 'KSpim' + daug_list.remove( 'KS0' ) + elif nLz == 1 : + self.daug3ID = LHCb.ParticleID(3122) + self.names[ 3122] = 'Lz' + self.names[-3122] = 'Lz' + self.gdaugnames[ 2212] = 'Lzp' + self.gdaugnames[-2212] = 'Lzp' + self.gdaugnames[ 211] = 'Lzpi' + self.gdaugnames[-211] = 'Lzpi' + self.selfconj = False + daug_list.remove( 'Lambda0' ) + else : + self.daug3ID = LHCb.ParticleID(-3122) + self.names[ 3122] = 'Lz' + self.names[-3122] = 'Lz' + self.gdaugnames[ 2212] = 'Lzp' + self.gdaugnames[-2212] = 'Lzp' + self.gdaugnames[ 211] = 'Lzpi' + self.gdaugnames[-211] = 'Lzpi' + self.selfconj = False + daug_list.remove( 'Lambda~0' ) + + # determine the charge daughter types + daug_list[0] = daug_list[0].replace('+','') + daug_list[0] = daug_list[0].replace('-','') + daug_list[0] = daug_list[0].replace('~','') + daug_list[1] = daug_list[1].replace('+','') + daug_list[1] = daug_list[1].replace('-','') + daug_list[1] = daug_list[1].replace('~','') + + if daug_list[0] == 'pi' and daug_list[1] == 'pi': + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 211] = 'h1' + self.names[-211] = 'h2' + + elif daug_list[0] == 'K' and daug_list[1] == 'K': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-321) + self.names[ 321] = 'h1' + self.names[-321] = 'h2' + + if daug_list[0] == 'p' and daug_list[1] == 'p': + self.daug1ID = LHCb.ParticleID( 2212) + self.daug2ID = LHCb.ParticleID(-2212) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h2' + + elif (daug_list[0] == 'K' and daug_list[1] == 'pi') or (daug_list[0] == 'pi' and daug_list[1] == 'K'): + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 321] = 'h1' + self.names[-321] = 'h1' + self.names[ 211] = 'h2' + self.names[-211] = 'h2' + self.selfconj = False + + elif (daug_list[0] == 'p' and daug_list[1] == 'pi') or (daug_list[0] == 'pi' and daug_list[1] == 'p'): + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h1' + self.names[ 211] = 'h2' + self.names[-211] = 'h2' + self.selfconj = False + + elif (daug_list[0] == 'p' and daug_list[1] == 'K') or (daug_list[0] == 'K' and daug_list[1] == 'p'): + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-321) + self.names[ 2212] = 'h1' + self.names[-2212] = 'h1' + self.names[ 321] = 'h2' + self.names[-321] = 'h2' + self.selfconj = False + + else : + self.Warning( 'hh types ('+daug_list[0]+' and '+daug_list[1]+') not recognised, setting to pions.' , SUCCESS ) + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + self.names[ 211] = 'h1' + self.names[-211] = 'h2' + + # determine the resonance type + if resType == 'phi' : + self.resonanceID = LHCb.ParticleID(333) + elif resType == 'Kst0' : + self.resonanceID = LHCb.ParticleID(313) + elif resType == 'Kst~0' : + self.resonanceID = LHCb.ParticleID(-313) + elif resType == 'Kst+' : + self.resonanceID = LHCb.ParticleID(323) + elif resType == 'Kst-' : + self.resonanceID = LHCb.ParticleID(-323) + elif resType == 'rho0' : + self.resonanceID = LHCb.ParticleID(113) + elif resType == 'f0' : + self.resonanceID = LHCb.ParticleID(9010221) + else : + self.Warning( 'Resonance type ('+resType+') not recognised, setting to rho0.' , SUCCESS ) + self.resonanceID = LHCb.ParticleID(113) + + # determine the resonance daughter types + if resDaug1type == 'pi+' : + self.resDaug1ID = LHCb.ParticleID(211) + elif resDaug1type == 'pi-' : + self.resDaug1ID = LHCb.ParticleID(-211) + elif resDaug1type == 'K+' : + self.resDaug1ID = LHCb.ParticleID(321) + elif resDaug1type == 'K-' : + self.resDaug1ID = LHCb.ParticleID(-321) + elif resDaug1type == 'p+' : + self.resDaug1ID = LHCb.ParticleID(2212) + elif resDaug1type == 'p~-' : + self.resDaug1ID = LHCb.ParticleID(-2212) + elif resDaug1type == 'KS0' : + self.resDaug1ID = LHCb.ParticleID(310) + elif resDaug1type == 'Lambda0' : + self.resDaug1ID = LHCb.ParticleID(3122) + elif resDaug1type == 'Lambda~0' : + self.resDaug1ID = LHCb.ParticleID(-3122) + else : + self.Warning( 'Resonance daughter 1 type ('+resDaug1type+') not recognised, setting to pi+.' , SUCCESS ) + self.resDaug1ID = LHCb.ParticleID(211) + + if resDaug2type == 'pi+' : + self.resDaug2ID = LHCb.ParticleID(211) + elif resDaug2type == 'pi-' : + self.resDaug2ID = LHCb.ParticleID(-211) + elif resDaug2type == 'K+' : + self.resDaug2ID = LHCb.ParticleID(321) + elif resDaug2type == 'K-' : + self.resDaug2ID = LHCb.ParticleID(-321) + elif resDaug2type == 'p+' : + self.resDaug2ID = LHCb.ParticleID(2212) + elif resDaug2type == 'p~-' : + self.resDaug2ID = LHCb.ParticleID(-2212) + elif resDaug2type == 'KS0' : + self.resDaug2ID = LHCb.ParticleID(310) + elif resDaug2type == 'Lambda0' : + self.resDaug2ID = LHCb.ParticleID(3122) + elif resDaug2type == 'Lambda~0' : + self.resDaug2ID = LHCb.ParticleID(-3122) + else : + self.Warning( 'Resonance daughter 2 type ('+resDaug2type+') not recognised, setting to pi-.' , SUCCESS ) + self.resDaug2ID = LHCb.ParticleID(-211) + + # determine the bachelor type + if bachType == 'pi+' : + self.bachelorID = LHCb.ParticleID(211) + elif bachType == 'pi-' : + self.bachelorID = LHCb.ParticleID(-211) + elif bachType == 'K+' : + self.bachelorID = LHCb.ParticleID(321) + elif bachType == 'K-' : + self.bachelorID = LHCb.ParticleID(-321) + elif bachType == 'p+' : + self.bachelorID = LHCb.ParticleID(2212) + elif bachType == 'p~-' : + self.bachelorID = LHCb.ParticleID(-2212) + elif bachType == 'KS0' : + self.bachelorID = LHCb.ParticleID(310) + elif bachType == 'Lambda0' : + self.bachelorID = LHCb.ParticleID(3122) + elif bachType == 'Lambda~0' : + self.bachelorID = LHCb.ParticleID(-3122) + else : + self.Warning( 'Bachelor type ('+bachType+') not recognised, setting to KS0.' , SUCCESS ) + self.bachelorID = LHCb.ParticleID(310) + + + + def check_types( self ) : + + initB = 0 + if self.parentID.isBaryon() : + initB = 1 + + finalB = 0 + for id in ( self.daug1ID, self.daug2ID, self.daug3ID ) : + if id.isBaryon() : + if id.pid() > 0 : + finalB += 1 + else : + finalB -= 1 + + if initB != finalB : + self.Error( 'Initial and final state baryon numbers do not match, '+str(initB)+' != '+str(finalB) ) + return FAILURE + + daug1Mass = self.partpropsvc.find( self.daug1ID ).mass() + daug2Mass = self.partpropsvc.find( self.daug2ID ).mass() + daug3Mass = self.partpropsvc.find( self.daug3ID ).mass() + parentMass = self.partpropsvc.find( self.parentID ).mass() + + if parentMass < ( daug1Mass + daug2Mass + daug3Mass ) : + return FAILURE + + self.kinematics = ThreeBodyKinematics( daug1Mass, daug2Mass, daug3Mass, parentMass ) + + return SUCCESS + + + def form_decay_descriptor( self ) : + + parName = self.partpropsvc.find( self.parentID ).name() + resName = self.partpropsvc.find( self.resonanceID ).name() + resDaug1Name = self.partpropsvc.find( self.resDaug1ID ).name() + resDaug2Name = self.partpropsvc.find( self.resDaug2ID ).name() + bachName = self.partpropsvc.find( self.bachelorID ).name() + + parConjName = self.partpropsvc.find( self.parentID ).antiParticle().name() + resConjName = self.partpropsvc.find( self.resonanceID ).antiParticle().name() + resDaug1ConjName = self.partpropsvc.find( self.resDaug1ID ).antiParticle().name() + resDaug2ConjName = self.partpropsvc.find( self.resDaug2ID ).antiParticle().name() + bachConjName = self.partpropsvc.find( self.bachelorID ).antiParticle().name() + + self.decay_descriptor = '[ ( ' + self.decay_descriptor += parName + self.decay_descriptor += ' => ( ' + self.decay_descriptor += resName + self.decay_descriptor += ' => ' + self.decay_descriptor += resDaug1Name + self.decay_descriptor += ' ' + self.decay_descriptor += resDaug2Name + self.decay_descriptor += ' ) ' + self.decay_descriptor += bachName + self.decay_descriptor += ' ) , ( ' + self.decay_descriptor += parName + self.decay_descriptor += ' => ( ' + self.decay_descriptor += resConjName + self.decay_descriptor += ' => ' + self.decay_descriptor += resDaug1ConjName + self.decay_descriptor += ' ' + self.decay_descriptor += resDaug2ConjName + self.decay_descriptor += ' ) ' + self.decay_descriptor += bachConjName + self.decay_descriptor += ' ) , ( ' + self.decay_descriptor += parConjName + self.decay_descriptor += ' => ( ' + self.decay_descriptor += resName + self.decay_descriptor += ' => ' + self.decay_descriptor += resDaug1Name + self.decay_descriptor += ' ' + self.decay_descriptor += resDaug2Name + self.decay_descriptor += ' ) ' + self.decay_descriptor += bachName + self.decay_descriptor += ' ) , ( ' + self.decay_descriptor += parConjName + self.decay_descriptor += ' => ( ' + self.decay_descriptor += resConjName + self.decay_descriptor += ' => ' + self.decay_descriptor += resDaug1ConjName + self.decay_descriptor += ' ' + self.decay_descriptor += resDaug2ConjName + self.decay_descriptor += ' ) ' + self.decay_descriptor += bachConjName + self.decay_descriptor += ' ) ]' + + self.Info( 'Will use the decay descriptor '+self.decay_descriptor ) + + + + def initialize( self ) : + + sc = AlgoMC.initialize( self ) + if sc.isFailure() : + return sc + + # set up the kinematics object based on the decay + self.partpropsvc = self.ppSvc() + + # check the validity of the decay + sc = self.check_types() + if sc.isFailure() : + return sc + + # form the decay descriptor + self.form_decay_descriptor() + + # set up the reconstrucible/reconstructed tools + self.recible = self.tool( cpp.IMCReconstructible, 'MCReconstructible' ) + self.rected = self.tool( cpp.IMCReconstructed, 'MCReconstructed' ) + + return SUCCESS + + + + def reco_status_tuple( self, tuple, mcparticle, name ) : + """ + Store the reconstructible/reconstructed status of an MC particle + """ + + if not mcparticle : + tuple.column_int( name + '_Reconstructible', -1 ) + tuple.column_int( name + '_Reconstructed', -1 ) + return + + cat_ible = self.recible.reconstructible( mcparticle ) + cat_ted = self.rected.reconstructed( mcparticle ) + + tuple.column_int( name + '_Reconstructible', int(cat_ible) ) + tuple.column_int( name + '_Reconstructed', int(cat_ted) ) + + + def mc_p4_tuple( self, tuple, mcparticle, name ) : + """ + Store the id, charge, 4-momentum, mass, p_t and eta of an MC particle + """ + + if not mcparticle : + tuple.column_int( name + '_TRUEID', -1 ) + tuple.column_int( name + '_TRUEQ', -1 ) + tuple.column_double( name + '_TRUEP', -1.1 ) + tuple.column_double( name + '_TRUEPE', -1.1 ) + tuple.column_double( name + '_TRUEPX', -1.1 ) + tuple.column_double( name + '_TRUEPY', -1.1 ) + tuple.column_double( name + '_TRUEPZ', -1.1 ) + tuple.column_double( name + '_TRUEPT', -1.1 ) + tuple.column_double( name + '_TRUEETA', -1.1 ) + tuple.column_double( name + '_TRUEPHI', -1.1 ) + tuple.column_double( name + '_TRUETHETA', -1.1 ) + tuple.column_double( name + '_TRUEM', -1.1 ) + tuple.column_int( name + '_OSCIL', -1 ) + return + + tuple.column_int( name + '_TRUEID', int(MCID(mcparticle)) ) + tuple.column_int( name + '_TRUEQ', int(MC3Q(mcparticle)/3) ) + tuple.column_double( name + '_TRUEP', MCP(mcparticle) ) + tuple.column_double( name + '_TRUEPE', MCE(mcparticle) ) + tuple.column_double( name + '_TRUEPX', MCPX(mcparticle) ) + tuple.column_double( name + '_TRUEPY', MCPY(mcparticle) ) + tuple.column_double( name + '_TRUEPZ', MCPZ(mcparticle) ) + tuple.column_double( name + '_TRUEPT', MCPT(mcparticle) ) + tuple.column_double( name + '_TRUEETA', MCETA(mcparticle) ) + tuple.column_double( name + '_TRUEPHI', MCPHI(mcparticle) ) + tuple.column_double( name + '_TRUETHETA', MCTHETA(mcparticle) ) + tuple.column_double( name + '_TRUEM', MCM(mcparticle) ) + tuple.column_int( name + '_OSCIL', int(MCOSCILLATED(mcparticle)) ) + + + def mc_vtx_tuple( self, tuple, mcparticle, name ) : + """ + Store vertex and lifetime info for the MC particle + """ + + if not mcparticle : + tuple.column_double( name + '_TRUEORIGINVERTEX_X', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', -1.1 ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', -1.1 ) + tuple.column_double( name + '_TRUECTAU' , -1.1 ) + return + + tuple.column_double( name + '_TRUEORIGINVERTEX_X', MCVFASPF(MCVX)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Y', MCVFASPF(MCVY)(mcparticle) ) + tuple.column_double( name + '_TRUEORIGINVERTEX_Z', MCVFASPF(MCVZ)(mcparticle) ) + tuple.column_double( name + '_TRUECTAU' , MCCTAU(mcparticle) ) + + + def mc_dp_tuple( self, tuple, mcparent ) : + """ + Store the MC truth DP co-ordinates + """ + + # loop through the B daughters and store their 4-momenta treating the + # expected 3 daughters and PHOTOS photons separately + + bach_p4 = None + bach_id = -1 + resdaug_p4 = [] + resdaug_id = [] + gammaB_p4 = [] + gammaR_p4 = [] + + parID = mcparent.particleID().pid() + + for daug in mcparent.children( True ) : + + daugID = daug.particleID().pid() + + px = MCPX( daug ) + py = MCPY( daug ) + pz = MCPZ( daug ) + pe = MCE ( daug ) + + p4 = TLorentzVector( px, py, pz, pe ) + + if 22 == daugID : + gammaB_p4.append( p4 ) + + elif daugID not in self.names : + + for resdaug in daug.children( True ) : + + resdaugID = resdaug.particleID().pid() + + px = MCPX( resdaug ) + py = MCPY( resdaug ) + pz = MCPZ( resdaug ) + pe = MCE ( resdaug ) + + p4 = TLorentzVector( px, py, pz, pe ) + + if 22 == resdaugID : + gammaR_p4.append( p4 ) + else : + resdaug_p4.append( p4 ) + resdaug_id.append( resdaugID ) + + else : + bach_p4 = p4 + bach_id = daugID + + ngammaB = len(gammaB_p4) + ngammaR = len(gammaR_p4) + tuple.column_int( 'nPHOTOS', ngammaB+ngammaR ) + + for gamma in gammaB_p4 : + bach_p4 += gamma + + for gamma in gammaR_p4 : + minangle = 1000.0 + mindaug = -1 + for daug in resdaug_p4 : + angle = gamma.Angle( daug.Vect() ) + if abs(angle) < minangle : + minangle = angle + mindaug = resdaug_p4.index(daug) + + resdaug_p4[ mindaug ] += gamma + + + unsorted_daug_p4 = resdaug_p4 + unsorted_daug_p4.append( bach_p4 ) + unsorted_daug_id = resdaug_id + unsorted_daug_id.append( bach_id ) + + daug_id = [ 0, 0, 0 ] + daug_p4 = [ TLorentzVector(), TLorentzVector(), TLorentzVector() ] + + for i in range(3) : + id = unsorted_daug_id[i] + if self.names[ id ] == 'h1' : + daug_p4[0] = unsorted_daug_p4[i] + daug_id[0] = id + elif self.names[ id ] == 'h2' : + daug_p4[1] = unsorted_daug_p4[i] + daug_id[1] = id + else : + daug_p4[2] = unsorted_daug_p4[i] + daug_id[2] = id + + p12 = daug_p4[0] + daug_p4[1] + p13 = daug_p4[0] + daug_p4[2] + p23 = daug_p4[1] + daug_p4[2] + + for i in range(3) : + daug_name = self.names[ daug_id[i] ] + tuple.column_double( daug_name + '_CORRPE', daug_p4[i].E() ) + tuple.column_double( daug_name + '_CORRPX', daug_p4[i].Px() ) + tuple.column_double( daug_name + '_CORRPY', daug_p4[i].Py() ) + tuple.column_double( daug_name + '_CORRPZ', daug_p4[i].Pz() ) + + m12Sq = p12.M2() + m13Sq = p13.M2() + m23Sq = p23.M2() + + mPrime = -1.1 + thPrime = -1.1 + + if self.kinematics.withinDPLimits( m13Sq, m23Sq ) : + self.kinematics.updateKinematics( m13Sq, m23Sq ) + mPrime = self.kinematics.mPrime + thPrime = self.kinematics.thPrime + + if m12Sq<0 : m12Sq = -1.1 + if m12Sq>100e6 : m12Sq = -1.1 + if m13Sq<0 : m13Sq = -1.1 + if m13Sq>100e6 : m13Sq = -1.1 + if m23Sq<0 : m23Sq = -1.1 + if m23Sq>100e6 : m23Sq = -1.1 + + tuple.column_double( 'm12Sq_MC', m12Sq ) + tuple.column_double( 'm13Sq_MC', m13Sq ) + tuple.column_double( 'm23Sq_MC', m23Sq ) + + tuple.column_double( 'mPrime_MC', mPrime ) + tuple.column_double( 'thPrime_MC', thPrime ) + + + def analyse( self ) : + """ + The method called in the event loop + """ + + # select MC particles + cands = self.mcselect('cands', self.decay_descriptor) + ncands = cands.size() + + if 0 == ncands : + self.Warning( 'No MC B candidates found in this event', SUCCESS ) + return SUCCESS + + # get the event header + evthdr = self.get( '/Event/Rec/Header' ) + + # create the ntuple + tuple = self.nTuple( 'tupleMCTruth' ) + + # loop through the candidates + for cand in cands : + + # fill event information + tuple.column_int( 'runNumber', evthdr.runNumber() ) + tuple.column_int( 'evtNumber', evthdr.evtNumber() ) + tuple.column_int( 'nCands', ncands ) + + # get the ID and hance name of the parent + candID = cand.particleID().pid() + candname = self.names[ candID ] + + # store parent information + self.mc_p4_tuple( tuple, cand, candname ) + self.mc_vtx_tuple( tuple, cand, candname ) + self.reco_status_tuple( tuple, cand, candname ) + + # store DP information + self.mc_dp_tuple( tuple, cand ) + + # find the 3 final-state particles + daug_list = [] + for daug in cand.children( True ) : + daugID = daug.particleID().pid() + if 22 == daugID : + continue + elif daugID not in self.names : + for resdaug in daug.children( True ) : + resdaugID = resdaug.particleID().pid() + if 22 == resdaugID : + continue + daug_list.append( resdaug ) + else : + daug_list.append( daug ) + + # loop through the final state particles and store their information + for daug in daug_list : + + daugID = daug.particleID().pid() + daugname = self.names[ daugID ] + + self.mc_p4_tuple( tuple, daug, daugname ) + self.mc_vtx_tuple( tuple, daug, daugname ) + self.reco_status_tuple( tuple, daug, daugname ) + + # if this is the V0 then also find and store information for it's daughters + if abs(daugID) == self.daug3ID.abspid() : + + tuple.column_int( daugname+'_NDAUG', daug.nChildren() ) + + gdaug1_name = '' + gdaug2_name = '' + + for gdaug in daug.children( True ) : + gdaugID = gdaug.particleID().pid() + if gdaugID not in self.gdaugnames : + continue + + gdaugname = self.gdaugnames[gdaugID] + if gdaug1_name == '' : + gdaug1_name = gdaugname + elif gdaug2_name == '' : + gdaug2_name = gdaugname + else : + self.Warning('Unexpected extra daughter of '+daugname, SUCCESS) + continue + + self.mc_p4_tuple( tuple, gdaug, gdaugname ) + self.mc_vtx_tuple( tuple, gdaug, gdaugname ) + self.reco_status_tuple( tuple, gdaug, gdaugname ) + + gdaugnames = self.gdaugnames.values() + if gdaug1_name in gdaugnames : + gdaugnames.remove( gdaug1_name ) + if gdaug2_name in gdaugnames : + gdaugnames.remove( gdaug2_name ) + + for name in gdaugnames : + self.mc_p4_tuple( tuple, None, name ) + self.mc_vtx_tuple( tuple, None, name ) + self.reco_status_tuple( tuple, None, name ) + + tuple.write() + + + return SUCCESS + +# End of B2KShhMCTruthResonance Class Definition + diff --git a/Phys/B2KShh/python/B2KShh/RecoAlgo.py b/Phys/B2KShh/python/B2KShh/RecoAlgo.py new file mode 100644 index 0000000..7265b6a --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/RecoAlgo.py @@ -0,0 +1,1989 @@ +#!/usr/bin/env python + +from Bender.Main import * +from Bender.MainMC import * +import math +from ROOT import ( Double, TMath ) +from B2KShh.ThreeBodyKinematics import ThreeBodyKinematics + +# Begin B2KShhReco Class Definition + +class B2KShhReco(AlgoMC) : + + """ + Algorithm to perform ntupling (and potentially offline selection) for + B2KShh analyses. + Reads stripped candidates from the DST and stores useful info in an ntuple. + Each instance of this algorithm creates a single ntuple, which contains + information on all (considered) final states, but only for KS's composed of + either DD, LD or LL tracks. So one would need 3 instances of this + algorithm in a job to create ntuples for all possible KS categories. + """ + + def __init__( self, name, reco_daughters, use_extended_hypothesis_set = False, wrongsign = False, simulation = False, signalmc = False, mc_daughters = [], mc_decay_descriptors = [], **kwargs ) : + super(B2KShhReco,self).__init__( name, **kwargs ) + + self.simplenames = {} + self.simplenames[511] = 'Bd' + self.simplenames[531] = 'Bs' + self.simplenames[5122] = 'Lb' + self.simplenames[5232] = 'Xib' + self.simplenames[310] = 'KS' + self.simplenames[321] = 'K' + self.simplenames[-321] = 'K' + self.simplenames[211] = 'pi' + self.simplenames[-211] = 'pi' + self.simplenames[2212] = 'p' + self.simplenames[-2212] = 'p' + self.simplenames[3122] = 'Lz' + self.simplenames[-3122] = 'Lz' + + self.simulation = simulation + self.signalmc = False + if self.simulation : + self.signalmc = signalmc + + self.wrongsign = wrongsign + + self.input_mc_dds = mc_decay_descriptors + + self.set_daug_types( reco_daughters, use_extended_hypothesis_set ) + + self.set_mc_daug_types( mc_daughters ) + + + def set_mc_daug_types( self, mc_daughters ) : + """ + Define the MC-truth decay mode + """ + + self.mcgdaugnames = {} + + self.mcdaug1ID = None + self.mcdaug2ID = None + self.mcdaug3ID = None + self.mcdaug3ConjID = None + + if not self.signalmc : + return + + if len(mc_daughters) != 3 : + self.Warning( 'MC daughters not supplied, will assume they are the same as the reco daughters!', SUCCESS ) + self.mcgdaugnames = dict( self.gdaugnames ) + self.mcdaug1ID = LHCb.ParticleID(self.daug1ID) + self.mcdaug2ID = LHCb.ParticleID(self.daug2ID) + self.mcdaug3ID = LHCb.ParticleID(self.daug3ID) + self.mcdaug3ConjID = LHCb.ParticleID(self.daug3ConjID) + return + + # determine the V0 type + if mc_daughters[2] == 'KS' or mc_daughters[2] == 'KS0' : + self.mcdaug3ID = LHCb.ParticleID(310) + self.mcdaug3ConjID = LHCb.ParticleID(310) + self.mcgdaugnames[ 211] = 'KSpip' + self.mcgdaugnames[-211] = 'KSpim' + + elif mc_daughters[2] == 'L' or mc_daughters[2] == 'Lambda0' : + self.mcdaug3ID = LHCb.ParticleID( 3122) + self.mcdaug3ConjID = LHCb.ParticleID(-3122) + self.mcgdaugnames[ 2212] = 'Lzp' + self.mcgdaugnames[-2212] = 'Lzp' + self.mcgdaugnames[ 211] = 'Lzpi' + self.mcgdaugnames[-211] = 'Lzpi' + + else : + self.Warning( 'MC V0 type ('+mc_daughters[2]+') not recognised, setting to KS0.', SUCCESS ) + self.mcdaug3ID = LHCb.ParticleID(310) + self.mcdaug3ConjID = LHCb.ParticleID(310) + self.mcgdaugnames[ 211] = 'KSpip' + self.mcgdaugnames[-211] = 'KSpim' + + # determine the charged daughter types + if mc_daughters[0] == 'pi' and mc_daughters[1] == 'pi': + self.mcdaug1ID = LHCb.ParticleID( 211) + self.mcdaug2ID = LHCb.ParticleID(-211) + + elif mc_daughters[0] == 'K' and mc_daughters[1] == 'K': + self.mcdaug1ID = LHCb.ParticleID( 321) + self.mcdaug2ID = LHCb.ParticleID(-321) + + elif mc_daughters[0] == 'p' and mc_daughters[1] == 'p': + self.mcdaug1ID = LHCb.ParticleID( 2212) + self.mcdaug2ID = LHCb.ParticleID(-2212) + + elif mc_daughters[0] == 'K' and mc_daughters[1] == 'pi': + self.mcdaug1ID = LHCb.ParticleID( 321) + self.mcdaug2ID = LHCb.ParticleID(-211) + + elif mc_daughters[0] == 'p' and mc_daughters[1] == 'pi': + self.mcdaug1ID = LHCb.ParticleID(2212) + self.mcdaug2ID = LHCb.ParticleID(-211) + + elif mc_daughters[0] == 'p' and mc_daughters[1] == 'K': + self.mcdaug1ID = LHCb.ParticleID(2212) + self.mcdaug2ID = LHCb.ParticleID(-321) + + else : + self.Warning( 'MC hh types ('+mc_daughters[0]+' and '+mc_daughters[1]+') not recognised, setting to pions.' , SUCCESS ) + self.mcdaug1ID = LHCb.ParticleID( 211) + self.mcdaug2ID = LHCb.ParticleID(-211) + + + def set_daug_types( self, daughters, use_extended_hypothesis_set ) : + """ + Define the decay mode to be selected + """ + + self.gdaugnames = {} + + self.selfconj = True + + self.daug1ID = None + self.daug2ID = None + self.daug3ID = None + self.daug3ConjID = None + + self.hypotheses = [] + + if len(daughters) != 3 : + self.Error( 'Expected 3 reco daughters but have %d. Cannot continue.' % len(daughters), FAILURE ) + + if self.wrongsign : + self.hypotheses = [ (211,211), (321,211), (321,321) ] + if use_extended_hypothesis_set : + extra_hypotheses = [ (2212,2212), (2212,211), (2212,321) ] + self.hypotheses.extend( extra_hypotheses ) + else : + self.hypotheses = [ (211,-211), (321,-211), (211,-321), (321,-321) ] + if use_extended_hypothesis_set : + extra_hypotheses = [ (2212,-2212), (2212,-211), (211,-2212), (2212,-321), (321,-2212) ] + self.hypotheses.extend( extra_hypotheses ) + + # determine the V0 type + if daughters[2] == 'KS' or daughters[2] == 'KS0' : + self.daug3ID = LHCb.ParticleID(310) + self.daug3ConjID = LHCb.ParticleID(310) + self.gdaugnames[ 211] = 'KSpip' + self.gdaugnames[-211] = 'KSpim' + self.v0name = 'KS' + + elif daughters[2] == 'L' or daughters[2] == 'Lambda0' : + self.daug3ID = LHCb.ParticleID( 3122) + self.daug3ConjID = LHCb.ParticleID(-3122) + self.gdaugnames[ 2212] = 'Lzp' + self.gdaugnames[-2212] = 'Lzp' + self.gdaugnames[ 211] = 'Lzpi' + self.gdaugnames[-211] = 'Lzpi' + self.v0name = 'Lz' + self.selfconj = False + + else : + self.Warning( 'V0 type ('+daughters[2]+') not recognised, setting to KS0.', SUCCESS ) + self.daug3ID = LHCb.ParticleID(310) + self.daug3ConjID = LHCb.ParticleID(310) + self.gdaugnames[ 211] = 'KSpip' + self.gdaugnames[-211] = 'KSpim' + self.v0name = 'KS' + + # determine the charged daughter types + if self.wrongsign : + if daughters[0] == 'pi' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID( 211 ) + self.daug2ID = LHCb.ParticleID( 211 ) + + elif daughters[0] == 'K' and daughters[1] == 'K': + self.daug1ID = LHCb.ParticleID( 321 ) + self.daug2ID = LHCb.ParticleID( 321 ) + + elif daughters[0] == 'p' and daughters[1] == 'p': + self.daug1ID = LHCb.ParticleID( 2212) + self.daug2ID = LHCb.ParticleID( 2212) + + elif daughters[0] == 'K' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID( 211) + self.selfconj = False + + elif daughters[0] == 'p' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(211) + self.selfconj = False + + elif daughters[0] == 'p' and daughters[1] == 'K': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(321) + self.selfconj = False + + else : + self.Warning( 'hh types ('+daughters[0]+' and '+daughters[1]+') not recognised, setting to pions.' , SUCCESS ) + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(211) + + else : + if daughters[0] == 'pi' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + + elif daughters[0] == 'K' and daughters[1] == 'K': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-321) + + elif daughters[0] == 'p' and daughters[1] == 'p': + self.daug1ID = LHCb.ParticleID( 2212) + self.daug2ID = LHCb.ParticleID(-2212) + + elif daughters[0] == 'K' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID( 321) + self.daug2ID = LHCb.ParticleID(-211) + self.selfconj = False + + elif daughters[0] == 'p' and daughters[1] == 'pi': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-211) + self.selfconj = False + + elif daughters[0] == 'p' and daughters[1] == 'K': + self.daug1ID = LHCb.ParticleID(2212) + self.daug2ID = LHCb.ParticleID(-321) + self.selfconj = False + + else : + self.Warning( 'hh types ('+daughters[0]+' and '+daughters[1]+') not recognised, setting to pions.' , SUCCESS ) + self.daug1ID = LHCb.ParticleID( 211) + self.daug2ID = LHCb.ParticleID(-211) + + + def check_types( self ) : + + okB = [ -1, 0, 1 ] + finalB = 0 + for id in ( self.daug1ID, self.daug2ID, self.daug3ID ) : + if id.isBaryon() : + if id.pid() > 0 : + finalB += 1 + else : + finalB -= 1 + + if finalB not in okB : + self.Error( 'Final state baryon number is unexpected value: '+str(finalB) ) + return FAILURE + + # TODO any other checks we can do here? + + return SUCCESS + + + def form_mc_decay_descriptors( self ) : + """ + Create the decay descriptors for reading the MC particles from the MC truth location. + This is created based on the MC daughter types or uses the ones provided (if available). + """ + + if not self.signalmc : + return + + if len(self.input_mc_dds) == 4 : + # use the descriptors provided by the user + self.mc_decay_descriptor_Xb = self.input_mc_dds[0] + self.mc_decay_descriptor_hp = self.input_mc_dds[1] + self.mc_decay_descriptor_hm = self.input_mc_dds[2] + self.mc_decay_descriptor_V0 = self.input_mc_dds[3] + + else : + daug1_name = self.partpropsvc.find( self.mcdaug1ID ).name() + daug2_name = self.partpropsvc.find( self.mcdaug2ID ).name() + daug3_name = self.partpropsvc.find( self.mcdaug3ID ).name() + + daug1_conj_name = self.partpropsvc.find( self.mcdaug1ID ).anti().name() + daug2_conj_name = self.partpropsvc.find( self.mcdaug2ID ).anti().name() + daug3_conj_name = self.partpropsvc.find( self.mcdaug3ID ).anti().name() + + if self.mcdaug1ID.abspid() == self.mcdaug2ID.abspid() and self.mcdaug3ID == self.ksID and not self.wrongsign: + + self.mc_decay_descriptor_Xb = '( Xb & X0 ) => %s %s %s' % (daug1_name, daug2_name, daug3_name) + self.mc_decay_descriptor_hp = '( Xb & X0 ) => ^%s %s %s' % (daug1_name, daug2_name, daug3_name) + self.mc_decay_descriptor_hm = '( Xb & X0 ) => %s ^%s %s' % (daug1_name, daug2_name, daug3_name) + self.mc_decay_descriptor_V0 = '( Xb & X0 ) => %s %s ^%s' % (daug1_name, daug2_name, daug3_name) + + else : + ddpart1 = '( ( Xb & X0 ) => %s %s %s )' % (daug1_name, daug2_name, daug3_name) + ddpart2 = '( ( Xb & X0 ) => %s %s %s )' % (daug1_conj_name, daug2_conj_name, daug3_conj_name) + self.mc_decay_descriptor_Xb = '[ %s, %s ]' % (ddpart1, ddpart2) + + ddpart1 = '( ( Xb & X0 ) => ^%s %s %s )' % (daug1_name, daug2_name, daug3_name) + ddpart2 = '( ( Xb & X0 ) => %s ^%s %s )' % (daug1_conj_name, daug2_conj_name, daug3_conj_name) + self.mc_decay_descriptor_hp = '[ %s, %s ]' % (ddpart1, ddpart2) + + ddpart1 = '( ( Xb & X0 ) => %s ^%s %s )' % (daug1_name, daug2_name, daug3_name) + ddpart2 = '( ( Xb & X0 ) => ^%s %s %s )' % (daug1_conj_name, daug2_conj_name, daug3_conj_name) + self.mc_decay_descriptor_hm = '[ %s, %s ]' % (ddpart1, ddpart2) + + ddpart1 = '( ( Xb & X0 ) => %s %s ^%s )' % (daug1_name, daug2_name, daug3_name) + ddpart2 = '( ( Xb & X0 ) => %s %s ^%s )' % (daug1_conj_name, daug2_conj_name, daug3_conj_name) + self.mc_decay_descriptor_V0 = '[ %s, %s ]' % (ddpart1, ddpart2) + + self.Info( 'Will use the MC decay descriptors:' ) + self.Info( self.mc_decay_descriptor_Xb ) + self.Info( self.mc_decay_descriptor_hp ) + self.Info( self.mc_decay_descriptor_hm ) + self.Info( self.mc_decay_descriptor_V0 ) + + + def form_reco_decay_descriptor( self ) : + """ + Create the decay descriptor for reading the reconstructed particles from the stripping location. + This is created based on the daughter types. + """ + + daug1_name = self.partpropsvc.find( self.daug1ID ).name() + daug2_name = self.partpropsvc.find( self.daug2ID ).name() + daug3_name = self.partpropsvc.find( self.daug3ID ).name() + + if self.daug1ID.abspid() == self.daug2ID.abspid() and self.daug3ID == self.ksID and not self.wrongsign: + self.decay_descriptor = '( Xb & X0 ) -> %s %s %s' % (daug1_name, daug2_name, daug3_name) + + else : + self.decay_descriptor = '[ ( Xb & X0 ) -> %s %s %s ]CC' % (daug1_name, daug2_name, daug3_name) + + self.Info( 'Will use the decay descriptor '+self.decay_descriptor ) + + + def initialize( self ) : + + sc = AlgoMC.initialize( self ) + if sc.isFailure() : + return sc + + sc = self.check_tes_locations() + if sc.isFailure() : + return sc + + self.tistostool = self.tool( cpp.ITriggerTisTos, 'TriggerTisTos' ) + self.l0tistostool = self.tool( cpp.ITriggerTisTos, 'L0TriggerTisTos' ) + + self.bkgtools = [] + if self.simulation : + # the first of these can't be used in Bender <= v18r3 + self.bkgtools.append( self.tool( cpp.IBackgroundCategory, 'BackgroundCategoryViaRelations' ) ) + self.bkgtools.append( self.tool( cpp.IBackgroundCategory, 'BackgroundCategory' ) ) + + self.lifetimetool = self.tool( cpp.ILifetimeFitter, 'PropertimeFitter' ) + + self.stateprovider = self.tool( cpp.ITrackStateProvider, 'TrackStateProvider' ) + + self.partpropsvc = self.ppSvc() + + self.part2calo = self.tool( cpp.IPart2Calo, 'Part2Calo' ) + + # check the validity of the decay + sc = self.check_types() + if sc.isFailure() : + return sc + + # set up the DP kinematics objects + self.piID = LHCb.ParticleID(211) + self.kID = LHCb.ParticleID(321) + self.pID = LHCb.ParticleID(2212) + self.lID = LHCb.ParticleID(3122) + self.ksID = LHCb.ParticleID(310) + self.bdID = LHCb.ParticleID(511) + self.bsID = LHCb.ParticleID(531) + self.lbID = LHCb.ParticleID(5122) + self.xibID = LHCb.ParticleID(5232) + + piMass = self.partpropsvc.find( self.piID ).mass() + kMass = self.partpropsvc.find( self.kID ).mass() + pMass = self.partpropsvc.find( self.pID ).mass() + lMass = self.partpropsvc.find( self.lID ).mass() + ksMass = self.partpropsvc.find( self.ksID ).mass() + bdMass = self.partpropsvc.find( self.bdID ).mass() + bsMass = self.partpropsvc.find( self.bsID ).mass() + lbMass = self.partpropsvc.find( self.lbID ).mass() + xibMass = self.partpropsvc.find( self.xibID ).mass() + + self.kinematics = {} + + self.kinematics[511] = {} + self.kinematics[511][310] = {} + self.kinematics[511][310][211] = {} + self.kinematics[511][310][321] = {} + self.kinematics[511][310][2212] = {} + self.kinematics[511][310][211][211] = ThreeBodyKinematics( piMass, piMass, ksMass, bdMass ) + self.kinematics[511][310][211][321] = ThreeBodyKinematics( piMass, kMass, ksMass, bdMass ) + self.kinematics[511][310][321][211] = ThreeBodyKinematics( kMass, piMass, ksMass, bdMass ) + self.kinematics[511][310][321][321] = ThreeBodyKinematics( kMass, kMass, ksMass, bdMass ) + self.kinematics[511][310][2212][2212] = ThreeBodyKinematics( pMass, pMass, ksMass, bdMass ) + self.kinematics[511][3122] = {} + self.kinematics[511][3122][211] = {} + self.kinematics[511][3122][321] = {} + self.kinematics[511][3122][2212] = {} + self.kinematics[511][3122][211][2212] = ThreeBodyKinematics( piMass, pMass, lMass, bdMass ) + self.kinematics[511][3122][321][2212] = ThreeBodyKinematics( kMass, pMass, lMass, bdMass ) + self.kinematics[511][3122][2212][211] = ThreeBodyKinematics( pMass, piMass, lMass, bdMass ) + self.kinematics[511][3122][2212][321] = ThreeBodyKinematics( pMass, kMass, lMass, bdMass ) + + self.kinematics[531] = {} + self.kinematics[531][310] = {} + self.kinematics[531][310][211] = {} + self.kinematics[531][310][321] = {} + self.kinematics[531][310][2212] = {} + self.kinematics[531][310][211][211] = ThreeBodyKinematics( piMass, piMass, ksMass, bsMass ) + self.kinematics[531][310][211][321] = ThreeBodyKinematics( piMass, kMass, ksMass, bsMass ) + self.kinematics[531][310][321][211] = ThreeBodyKinematics( kMass, piMass, ksMass, bsMass ) + self.kinematics[531][310][321][321] = ThreeBodyKinematics( kMass, kMass, ksMass, bsMass ) + self.kinematics[531][310][2212][2212] = ThreeBodyKinematics( pMass, pMass, ksMass, bsMass ) + self.kinematics[531][3122] = {} + self.kinematics[531][3122][211] = {} + self.kinematics[531][3122][321] = {} + self.kinematics[531][3122][2212] = {} + self.kinematics[531][3122][211][2212] = ThreeBodyKinematics( piMass, pMass, lMass, bsMass ) + self.kinematics[531][3122][321][2212] = ThreeBodyKinematics( kMass, pMass, lMass, bsMass ) + self.kinematics[531][3122][2212][211] = ThreeBodyKinematics( pMass, piMass, lMass, bsMass ) + self.kinematics[531][3122][2212][321] = ThreeBodyKinematics( pMass, kMass, lMass, bsMass ) + + self.kinematics[5122] = {} + self.kinematics[5122][3122] = {} + self.kinematics[5122][3122][211] = {} + self.kinematics[5122][3122][321] = {} + self.kinematics[5122][3122][2212] = {} + self.kinematics[5122][3122][211][211] = ThreeBodyKinematics( piMass, piMass, lMass, lbMass ) + self.kinematics[5122][3122][211][321] = ThreeBodyKinematics( piMass, kMass, lMass, lbMass ) + self.kinematics[5122][3122][321][211] = ThreeBodyKinematics( kMass, piMass, lMass, lbMass ) + self.kinematics[5122][3122][321][321] = ThreeBodyKinematics( kMass, kMass, lMass, lbMass ) + self.kinematics[5122][3122][2212][2212] = ThreeBodyKinematics( pMass, pMass, lMass, lbMass ) + self.kinematics[5122][310] = {} + self.kinematics[5122][310][211] = {} + self.kinematics[5122][310][321] = {} + self.kinematics[5122][310][2212] = {} + self.kinematics[5122][310][211][2212] = ThreeBodyKinematics( piMass, pMass, ksMass, lbMass ) + self.kinematics[5122][310][321][2212] = ThreeBodyKinematics( kMass, pMass, ksMass, lbMass ) + self.kinematics[5122][310][2212][211] = ThreeBodyKinematics( pMass, piMass, ksMass, lbMass ) + self.kinematics[5122][310][2212][321] = ThreeBodyKinematics( pMass, kMass, ksMass, lbMass ) + + self.kinematics[5232] = {} + self.kinematics[5232][3122] = {} + self.kinematics[5232][3122][211] = {} + self.kinematics[5232][3122][321] = {} + self.kinematics[5232][3122][2212] = {} + self.kinematics[5232][3122][211][211] = ThreeBodyKinematics( piMass, piMass, lMass, xibMass ) + self.kinematics[5232][3122][211][321] = ThreeBodyKinematics( piMass, kMass, lMass, xibMass ) + self.kinematics[5232][3122][321][211] = ThreeBodyKinematics( kMass, piMass, lMass, xibMass ) + self.kinematics[5232][3122][321][321] = ThreeBodyKinematics( kMass, kMass, lMass, xibMass ) + self.kinematics[5232][3122][2212][2212] = ThreeBodyKinematics( pMass, pMass, lMass, xibMass ) + self.kinematics[5232][310] = {} + self.kinematics[5232][310][211] = {} + self.kinematics[5232][310][321] = {} + self.kinematics[5232][310][2212] = {} + self.kinematics[5232][310][211][2212] = ThreeBodyKinematics( piMass, pMass, ksMass, xibMass ) + self.kinematics[5232][310][321][2212] = ThreeBodyKinematics( kMass, pMass, ksMass, xibMass ) + self.kinematics[5232][310][2212][211] = ThreeBodyKinematics( pMass, piMass, ksMass, xibMass ) + self.kinematics[5232][310][2212][321] = ThreeBodyKinematics( pMass, kMass, ksMass, xibMass ) + + + # form the decay descriptors + self.form_reco_decay_descriptor() + self.form_mc_decay_descriptors() + + return SUCCESS + + + def find_kinematics( self, parentID, h1ID, h2ID, v0ID ) : + + try : + kine = self.kinematics[abs(parentID)][abs(v0ID)][abs(h1ID)][abs(h2ID)] + except Exception, e : + self.Error( 'Problem retrieving kinematics object for %d -> %d %d %d' % ( parentID, h1ID, h2ID, v0ID ) ) + raise e + + return kine + + + def check_tes_locations( self ) : + """ + Called by initialize to check that the TES locations specified are as expected. + """ + + if 1 != len( self.Inputs ) : + return self.Error( 'Expected 1 TES location in Inputs but found %d' % len( self.Inputs ) ) + + loc = self.Inputs[0] + if loc.find('DD') > -1 : + if self.name().find('DD') < 0 : + return self.Error( 'TES location contains "DD" but my name does not' ) + elif loc.find('LL') > -1 : + if self.name().find('LL') < 0 : + return self.Error( 'TES location contains "LL" but my name does not' ) + elif loc.find('LD') > -1 : + if self.name().find('LD') < 0 : + return self.Error( 'TES location contains "LD" but my name does not' ) + else : + return self.Error( 'TES location does not contain "LL", "LD" or "DD"' ) + + return SUCCESS + + + def p4_tuple( self, tuple, particle, name, dtf_params = None ) : + """ + Store the id, charge, 4-momentum, mass, p_t and eta of a particle. + """ + + p4 = particle.momentum() + pid = particle.particleID() + + tuple.column_int( name + '_KEY', particle.key() ) + tuple.column_int( name + '_ID', pid.pid() ) + tuple.column_int( name + '_Q', particle.charge() ) + tuple.column_double( name + '_P', p4.P() ) + tuple.column_double( name + '_PE', p4.E() ) + tuple.column_double( name + '_PX', p4.Px() ) + tuple.column_double( name + '_PY', p4.Py() ) + tuple.column_double( name + '_PZ', p4.Pz() ) + tuple.column_double( name + '_PT', p4.Pt() ) + tuple.column_double( name + '_ETA', p4.Eta() ) + tuple.column_double( name + '_PHI', p4.Phi() ) + tuple.column_double( name + '_THETA', p4.Theta() ) + tuple.column_double( name + '_M', p4.M() ) + tuple.column_double( name + '_MM', particle.measuredMass() ) + tuple.column_double( name + '_MMERR', particle.measuredMassErr() ) + if dtf_params : + p4 = dtf_params.momentum() + tuple.column_double( name + '_DTFM', p4.m().value() ) + tuple.column_double( name + '_DTFMERR', p4.m().error() ) + + + def mc_p4_tuple( self, tuple, mcparticles, mcmatched, name, signal_matched ) : + """ + Store the id, charge, 4-momentum, mass, p_t and eta of an MC particle + """ + + extra_name = '' + if signal_matched : + extra_name = '_SIG' + + if mcparticles.empty() or mcmatched==0 : + tuple.column_int( name + extra_name + '_TRUEID', -1 ) + tuple.column_int( name + extra_name + '_MOTHER_TRUEID', -1 ) + tuple.column_int( name + extra_name + '_GDMOTHER_TRUEID', -1 ) + tuple.column_int( name + extra_name + '_GTGDMOTHER_TRUEID', -1 ) + tuple.column_int( name + extra_name + '_MOTHER_KEY', -1 ) + tuple.column_int( name + extra_name + '_GDMOTHER_KEY', -1 ) + tuple.column_int( name + extra_name + '_GTGDMOTHER_KEY', -1 ) + tuple.column_int( name + extra_name + '_TRUEQ', -1 ) + tuple.column_double( name + extra_name + '_TRUEP', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPE', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPX', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPY', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPZ', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPT', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEETA', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEPHI', -1.1 ) + tuple.column_double( name + extra_name + '_TRUETHETA', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEM', -1.1 ) + tuple.column_int( name + extra_name + '_OSCIL', -1 ) + else : + mcparticle = mcparticles[0] + motherid = -1 + gdmotherid = -1 + gtgdmotherid = -1 + motherkey = -1 + gdmotherkey = -1 + gtgdmotherkey = -1 + mcmother = mcparticle.mother() + if mcmother : + motherid = int(MCID(mcmother)) + motherkey = mcmother.key() + mcgrandmother = mcmother.mother() + if mcgrandmother : + gdmotherid = int(MCID(mcgrandmother)) + gdmotherkey = mcgrandmother.key() + mcgreatgrandmother = mcgrandmother.mother() + if mcgreatgrandmother : + gtgdmotherid = int(MCID(mcgreatgrandmother)) + gtgdmotherkey = mcgreatgrandmother.key() + tuple.column_int( name + extra_name + '_TRUEID', int(MCID(mcparticle)) ) + tuple.column_int( name + extra_name + '_MOTHER_TRUEID', motherid ) + tuple.column_int( name + extra_name + '_GDMOTHER_TRUEID', gdmotherid ) + tuple.column_int( name + extra_name + '_GTGDMOTHER_TRUEID', gtgdmotherid ) + tuple.column_int( name + extra_name + '_MOTHER_KEY', motherkey ) + tuple.column_int( name + extra_name + '_GDMOTHER_KEY', gdmotherkey ) + tuple.column_int( name + extra_name + '_GTGDMOTHER_KEY', gtgdmotherkey ) + tuple.column_int( name + extra_name + '_TRUEQ', int(MC3Q(mcparticle)/3) ) + tuple.column_double( name + extra_name + '_TRUEP', MCP(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPE', MCE(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPX', MCPX(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPY', MCPY(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPZ', MCPZ(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPT', MCPT(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEETA', MCETA(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEPHI', MCPHI(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUETHETA', MCTHETA(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEM', MCM(mcparticle) ) + tuple.column_int( name + extra_name + '_OSCIL', int(MCOSCILLATED(mcparticle)) ) + + + def ip_tuple( self, tuple, particle, name ) : + """ + Store the impact parameter info for the particle + """ + + primaries = self.vselect('PV', PRIMARY ) + bestPV = self.bestVertex( particle ) + + minipfun = MINIP( primaries, self.geo() ) + minipchi2fun = MINIPCHI2( primaries, self.geo() ) + ipbpvfun = IP( bestPV, self.geo() ) + ipchi2bpvfun = IPCHI2( bestPV, self.geo() ) + + tuple.column_double( name + '_MINIP', minipfun(particle) ) + tuple.column_double( name + '_MINIPCHI2', minipchi2fun(particle) ) + tuple.column_double( name + '_IP_OWNPV', ipbpvfun(particle) ) + tuple.column_double( name + '_IPCHI2_OWNPV', ipchi2bpvfun(particle) ) + + + bpvpos = bestPV.position() + tuple.column_double( name + '_OWNPV_X', bpvpos.x() ) + tuple.column_double( name + '_OWNPV_Y', bpvpos.y() ) + tuple.column_double( name + '_OWNPV_Z', bpvpos.z() ) + + covMatrix = bestPV.covMatrix() + tuple.column_double( name + '_OWNPV_XERR', TMath.Sqrt( covMatrix(0,0) ) ) + tuple.column_double( name + '_OWNPV_YERR', TMath.Sqrt( covMatrix(1,1) ) ) + tuple.column_double( name + '_OWNPV_ZERR', TMath.Sqrt( covMatrix(2,2) ) ) + + chi2 = bestPV.chi2() + ndof = bestPV.nDoF() + chi2ndof = bestPV.chi2PerDoF() + tuple.column_int( name + '_OWNPV_NDOF', ndof ) + tuple.column_double( name + '_OWNPV_CHI2', chi2 ) + tuple.column_double( name + '_OWNPV_CHI2NDOF', chi2ndof ) + tuple.column_double( name + '_OWNPV_PROB', TMath.Prob( chi2, ndof ) ) + + ntrk = bestPV.tracks().size() + tuple.column_int( name + '_OWNPV_NTRACKS', ntrk ) + + + def vtx_tuple( self, tuple, bcand, v0cand ) : + """ + Store vertex info for the B and KS + """ + + bcand_name = 'B' + v0cand_name = self.simplenames[ v0cand.particleID().pid() ] + + for particle in [bcand, v0cand] : + if particle is bcand : + name = bcand_name + else : + name = v0cand_name + + vertex = particle.endVertex() + + vtxpos = vertex.position() + tuple.column_double( name + '_ENDVERTEX_X', vtxpos.x() ) + tuple.column_double( name + '_ENDVERTEX_Y', vtxpos.y() ) + tuple.column_double( name + '_ENDVERTEX_Z', vtxpos.z() ) + + covMatrix = vertex.covMatrix() + tuple.column_double( name + '_ENDVERTEX_XERR', TMath.Sqrt( covMatrix(0,0) ) ) + tuple.column_double( name + '_ENDVERTEX_YERR', TMath.Sqrt( covMatrix(1,1) ) ) + tuple.column_double( name + '_ENDVERTEX_ZERR', TMath.Sqrt( covMatrix(2,2) ) ) + + chi2 = vertex.chi2() + ndof = vertex.nDoF() + chi2ndof = vertex.chi2PerDoF() + tuple.column_int( name + '_ENDVERTEX_NDOF', ndof ) + tuple.column_double( name + '_ENDVERTEX_CHI2', chi2 ) + tuple.column_double( name + '_ENDVERTEX_CHI2NDOF', chi2ndof ) + tuple.column_double( name + '_ENDVERTEX_PROB', TMath.Prob( chi2, ndof ) ) + + primaries = self.vselect('PV', PRIMARY ) + minvdfun = MINVVD( primaries ) + minvdchi2fun = MINVVDCHI2( primaries ) + + tuple.column_double( name + '_MINVD', minvdfun(particle) ) + tuple.column_double( name + '_MINVDCHI2', minvdchi2fun(particle) ) + + bestPV = self.bestVertex( particle ) + vdfun = VD( bestPV ) + vdchi2fun = VDCHI2( bestPV ) + dirafun = DIRA( bestPV ) + + tuple.column_double( name + '_VD_OWNPV', vdfun(particle) ) + tuple.column_double( name + '_VDCHI2_OWNPV', vdchi2fun(particle) ) + tuple.column_double( name + '_DIRA_OWNPV', dirafun(particle) ) + + b_vtx = bcand.endVertex() + vdfun = VD(b_vtx) + vdchi2fun = VDCHI2(b_vtx) + tuple.column_double( v0cand_name+'_VTX_SEP', vdfun(v0cand) ) + tuple.column_double( v0cand_name+'_VTX_SEPCHI2', vdchi2fun(v0cand) ) + + + def mc_vtx_tuple( self, tuple, mcparticles, mcmatched, name, signal_matched ) : + """ + Store vertex and lifetime info for the MC particle + """ + + extra_name = '' + if signal_matched : + extra_name = '_SIG' + + if mcparticles.empty() or mcmatched==0 : + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_X', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_Y', -1.1 ) + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_Z', -1.1 ) + tuple.column_double( name + extra_name + '_TRUECTAU' , -1.1 ) + else : + mcparticle = mcparticles[0] + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_X', MCVFASPF(MCVX)(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_Y', MCVFASPF(MCVY)(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUEORIGINVERTEX_Z', MCVFASPF(MCVZ)(mcparticle) ) + tuple.column_double( name + extra_name + '_TRUECTAU' , MCCTAU(mcparticle) ) + + + def trk_tuple( self, tuple, particle, name ) : + """ + Store track-related quantities: + - track chi2 per dof + - PID quantities: PIDk, PIDp, PIDmu and ISMUON + """ + + # track quantities + tuple.column_double( name + '_TRACK_CHI2NDOF', TRCHI2DOF(particle) ) + tuple.column_double( name + '_TRACK_PCHI2', TRPCHI2(particle) ) + tuple.column_double( name + '_TRACK_GHOSTPROB', TRGHOSTPROB(particle) ) + + # PID quantities + tuple.column_double( name + '_PIDe', PIDe(particle) ) + tuple.column_double( name + '_PIDmu', PIDmu(particle) ) + tuple.column_double( name + '_PIDpi', PIDpi(particle) ) + tuple.column_double( name + '_PIDK', PIDK(particle) ) + tuple.column_double( name + '_PIDp', PIDp(particle) ) + tuple.column_double( name + '_PROBNNe', PROBNNe(particle) ) + tuple.column_double( name + '_PROBNNmu', PROBNNmu(particle) ) + tuple.column_double( name + '_PROBNNpi', PROBNNpi(particle) ) + tuple.column_double( name + '_PROBNNK', PROBNNK(particle) ) + tuple.column_double( name + '_PROBNNp', PROBNNp(particle) ) + tuple.column_double( name + '_PROBNNghost', PROBNNghost(particle) ) + + hasmuon = -1 + ismuon = -1 + hasproto = HASPROTO(particle) + if hasproto : + hasmuon = HASMUON(particle) + if hasmuon : + ismuon = ISMUON(particle) + tuple.column_int( name + '_hasProto', int(hasproto) ) + tuple.column_int( name + '_hasMuon', int(hasmuon) ) + tuple.column_int( name + '_isMuon', int(ismuon) ) + + # HCAL info for L0 Hadron correction + self.part2calo.match( particle, '/dd/Structure/LHCb/DownstreamRegion/Hcal' ) + + caloX = self.part2calo.caloState().x() + caloY = self.part2calo.caloState().y() + caloZ = self.part2calo.caloState().z() + caloP = self.part2calo.caloState().p() + caloM = particle.measuredMass() + + trackET = TMath.Sqrt( caloP*caloP + caloM*caloM ) * TMath.Sqrt( caloX*caloX + caloY*caloY ) / TMath.Sqrt( caloX*caloX + caloY*caloY + caloZ*caloZ ) + + region = self.isinside_HCAL( caloX, caloY ) + + tuple.column_double( name + '_L0Calo_HCAL_realET', trackET ) + tuple.column_double( name + '_L0Calo_HCAL_xProjection', caloX ) + tuple.column_double( name + '_L0Calo_HCAL_yProjection', caloY ) + tuple.column_int( name + '_L0Calo_HCAL_region', region ) + + + def isinside_HCAL( self, caloX, caloY ) : + """ + Determine whether the particle is in the Inner, Outer part of the HCAL + or outside it completely + """ + + inside = True + inner = False + outer = False + + HCAL_CellSize_Inner = 131.3 + HCAL_CellSize_Outer = 262.6 + HCAL_xMax_Inner = 2101 + HCAL_yMax_Inner = 1838 + HCAL_xMax_Outer = 4202 + HCAL_yMax_Outer = 3414 + + # projection inside calo + if TMath.Abs( caloX ) < HCAL_xMax_Outer and TMath.Abs( caloY ) < HCAL_yMax_Outer : + # projection inside inner calo (else is outer calo) + if TMath.Abs( caloX ) < HCAL_xMax_Inner and TMath.Abs( caloY ) < HCAL_yMax_Inner : + # projections outside the beampipe (in x) + if TMath.Abs( caloX ) > 2*HCAL_CellSize_Inner : + inner = True + elif TMath.Abs( caloY ) > 2*HCAL_CellSize_Inner : + inner = True + else : + inside = False + else : + outer = True + else : + inside = False + + if not inside : + return -1 + elif inner : + return 1 + elif outer : + return 0 + else : + return -999 + + + def pid_swap( self, bcand, bpid, h1pid, h2pid ) : + """ + Change the ID of the B candidate and the IDs of its charged daughters. + The particle charges will be unchanged, i.e. supplied IDs will be + treated as unsigned values. + """ + + # first find the "original" IDs + orig_b_pid = bcand.particleID().pid() + orig_daug_pids = [] + for daug in bcand.children() : + daug_pid = daug.particleID().pid() + orig_daug_pids.append( daug_pid ) + + # now swap the ID of the B + bpid = abs(bpid) + if bcand.particleID().pid() < 0 : + bpid = -bpid + bcand.setParticleID( LHCb.ParticleID(bpid) ) + + # swap the charged daughter IDs as well + h1pid = abs(h1pid) + h2pid = -abs(h2pid) + + for daug in bcand.children() : + daug_pid = daug.particleID() + + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + continue + + if daug.charge() > 0 : + daug.setParticleID( LHCb.ParticleID(h1pid) ) + else : + daug.setParticleID( LHCb.ParticleID(h2pid) ) + + return (orig_b_pid, orig_daug_pids) + + + def pid_swap_back( self, bcand, orig_bpid, orig_daug_pids ) : + """ + Change the IDs of the B candidate and its charged daughters back to + their original values. + """ + + bcand.setParticleID( LHCb.ParticleID( orig_bpid ) ) + + index = 0 + for daug in bcand.children() : + daug.setParticleID( LHCb.ParticleID( orig_daug_pids[index] ) ) + index += 1 + + + def dtf_p4_tuple( self, dpinfo, bcand ) : + """ + Store the results of a vertex fit where the masses of various particles + in the decay tree are constrained to their PDG values. + Values stored are: + - information on the success (or otherwise) of the fit + - the vertex chi2, ndof and probability + - the B lifetime, associated uncertainty and significance + - the 4-momentum of the B and its 3 daughters + - the invariant mass-squared pairs m12Sq, m13Sq, m23Sq + - the square DP co-ordinates mPrime and thPrime + """ + + # get the name of the V0 + v0name = self.v0name + + # perform the fit adding constraints on both the parent and V0 masses + pv = self.bestVertex( bcand ) + fitter = cpp.DecayTreeFitter.Fitter( bcand, pv, self.stateprovider ) + fitter.setMassConstraint( bcand ) + for daug in bcand.daughters(): + daug_pid = daug.particleID() + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + fitter.setMassConstraint( daug ) + fitter.fit() + + bp4 = Gaudi.Math.LorentzVectorWithError() + bctau = Gaudi.Math.ValueWithError() + + daug_id = [ 0, 0, 0 ] + daug_p4 = [ Gaudi.Math.LorentzVectorWithError(), Gaudi.Math.LorentzVectorWithError(), Gaudi.Math.LorentzVectorWithError() ] + daug_names = [ 'h1', 'h2', v0name ] + + m12Sq = -1.1 + m13Sq = -1.1 + m23Sq = -1.1 + + mPrime = -1.1 + thPrime = -1.1 + + # check if the fit succeeded + if fitter.status() == 0 : + + # store information on the B candidate + bparams = fitter.fitParams( bcand ) + bp4 = bparams.momentum() + bctau = bparams.ctau() + + bcandID = bcand.particleID().pid() + + # loop through 3 B daughters and store their 4-momenta + if not self.wrongsign : + h1cand, h2cand, v0cand = self.find_daughters( bcand ) + else : + h1cand, h2cand, v0cand = self.find_daughters_wrongsign( bcand ) + + daughters = [ h1cand, h2cand, v0cand ] + + for i in range(3) : + params = fitter.fitParams( daughters[i] ) + daug_p4[i] = params.momentum() + daug_id[i] = daughters[i].particleID().abspid() + + p12 = daug_p4[0] + daug_p4[1] + p13 = daug_p4[0] + daug_p4[2] + p23 = daug_p4[1] + daug_p4[2] + + m12Sq = p12.M2() + m13Sq = p13.M2() + m23Sq = p23.M2() + + kine = self.find_kinematics( bcandID, daug_id[0], daug_id[1], daug_id[2] ) + if kine.withinDPLimits( m13Sq, m23Sq ) : + kine.updateKinematics( m13Sq, m23Sq ) + mPrime = kine.mPrime + thPrime = kine.thPrime + + if m12Sq<0 : m12Sq=-1.1 + if m12Sq>100e6 : m12Sq=-1.1 + if m13Sq<0 : m13Sq=-1.1 + if m13Sq>100e6 : m13Sq=-1.1 + if m23Sq<0 : m23Sq=-1.1 + if m23Sq>100e6 : m23Sq=-1.1 + + # store the fit results (sucess or otherwise, lifetime, 4-momenta and DP co-ordinates) + dpinfo.store_fit_results( fitter, bctau, bp4, daug_p4, m12Sq, m13Sq, m23Sq, mPrime, thPrime ) + + + def calc_DP_info( self, bcand ) : + """ + Calculate the DP position and the daughter 4-momenta with the + B-candidate mass constrained to either the nominal Bd, Bs or Lb mass + and under various mass assumptions for the daughters + """ + + self.dpinfos = [] + + for b_id in [ 511, 531, 5122, 5232 ] : + for h1_id in [ 211, 321, 2212 ] : + for h2_id in [ 211, 321, 2212 ] : + + # only process baryon-number conserving modes + if ((b_id == self.xibID.abspid() and self.daug3ID == self.ksID) + or (b_id == self.lbID.abspid() and self.daug3ID == self.ksID) + or ((b_id == self.bdID.abspid() or b_id == self.bsID.abspid()) and self.daug3ID == self.lID)) : + + if (h1_id == 2212 and h2_id == 2212) or (h1_id != 2212 and h2_id != 2212) : + continue + elif (h1_id == 2212 and h2_id != 2212) or (h1_id != 2212 and h2_id == 2212) : + continue + + # construct the suffix to be of the form, e.g. Bs2KpiKS + suffix = self.simplenames[b_id]+'2'+self.simplenames[h1_id]+self.simplenames[h2_id]+self.simplenames[self.daug3ID.abspid()] + + # do the PID swap + orig_b_pid, orig_daug_pids = self.pid_swap( bcand, b_id, h1_id, h2_id ) + + # create the object to store the information + dpinfo = DalitzInfo( self.v0name, suffix ) + + # do the fits and store the information + self.dtf_p4_tuple( dpinfo, bcand ) + + # do the swap back to the original IDs + self.pid_swap_back( bcand, orig_b_pid, orig_daug_pids ) + + # append the DP info object to the list + self.dpinfos.append( dpinfo ) + + + def store_DP_info( self, tuple ) : + """ + Store the pre-calculated DP information in the supplied tuple + """ + + for info in self.dpinfos : + info.fill_tuple( tuple ) + + + def mc_dp_tuple( self, tuple, mcparents ) : + """ + Store the MC truth DP co-ordinates + """ + + # loop through the B daughters and store their 4-momenta treating the + # expected 3 daughters and PHOTOS photons separately + + daug_names = [ 'h1', 'h2', self.v0name ] + + # if there is no MC info + if mcparents.empty() : + tuple.column_int( 'nPHOTOS', -1 ) + for daug_name in daug_names : + tuple.column_double( daug_name + '_CORRPE', -1.1 ) + tuple.column_double( daug_name + '_CORRPX', -1.1 ) + tuple.column_double( daug_name + '_CORRPY', -1.1 ) + tuple.column_double( daug_name + '_CORRPZ', -1.1 ) + tuple.column_double( 'm12Sq_MC', -1.1 ) + tuple.column_double( 'm13Sq_MC', -1.1 ) + tuple.column_double( 'm23Sq_MC', -1.1 ) + tuple.column_double( 'mPrime_MC', -1.1 ) + tuple.column_double( 'thPrime_MC', -1.1 ) + return + + # otherwise take the first entry + # TODO - can we do something more clever here? + mcparent = mcparents[0] + + mcparent_id = mcparent.particleID().pid() + + daug_id = [ 0, 0, 0 ] + daug_p4 = [ Gaudi.LorentzVector(), Gaudi.LorentzVector(), Gaudi.LorentzVector() ] + gamma_p4 = [] + + # python list of children, may initially contain resonances so it is + # recursively searched through to find the correct final state particles. + children = self.mc_find_daug( [ daug for daug in mcparent.children( True ) ] ) + + # check that we are left with 3 non-photon daughters + ndaug = 0 + for daug in children : + if daug.particleID().pid() != 22 : + ndaug += 1 + if ndaug != 3 : + self.Error( 'Parent particle does not have 3 non-photon final-state daughters' ) + e = Exception('Unexpected number of final-state particles') + raise e + + for daug in children : + + daugID = daug.particleID() + + px = MCPX( daug ) + py = MCPY( daug ) + pz = MCPZ( daug ) + pe = MCE ( daug ) + + p4 = Gaudi.LorentzVector( px, py, pz, pe ) + + if 22 == daugID.pid() : + gamma_p4.append( p4 ) + elif self.mcdaug1ID.abspid() == self.mcdaug2ID.abspid() : + if daugID == self.mcdaug3ID or daugID == self.mcdaug3ConjID : + daug_id[2] = daugID.pid() + daug_p4[2] = p4 + elif daugID == self.mcdaug1ID : + daug_id[0] = daugID.pid() + daug_p4[0] = p4 + else : + daug_id[1] = daugID.pid() + daug_p4[1] = p4 + else : + if daugID == self.mcdaug3ID or daugID == self.mcdaug3ConjID : + daug_id[2] = daugID.pid() + daug_p4[2] = p4 + elif daugID.abspid() == self.mcdaug1ID.abspid() : + daug_id[0] = daugID.pid() + daug_p4[0] = p4 + else : + daug_id[1] = daugID.pid() + daug_p4[1] = p4 + + ngamma = len(gamma_p4) + tuple.column_int( 'nPHOTOS', ngamma ) + + if 0 != ngamma : + + for gamma in gamma_p4 : + minangle = 1000.0 + mindaug = -1 + for daug in daug_p4 : + gammaPHat = gamma.Vect().Unit() + daugPHat = daug.Vect().Unit() + angle = TMath.ACos( gammaPHat.Dot( daugPHat ) ) + if abs(angle) < minangle : + minangle = angle + mindaug = daug_p4.index(daug) + + daug_p4[ mindaug ] += gamma + + + p12 = daug_p4[0] + daug_p4[1] + p13 = daug_p4[0] + daug_p4[2] + p23 = daug_p4[1] + daug_p4[2] + + for i in range(3) : + tuple.column_double( daug_names[i] + '_CORRPE', daug_p4[i].E() ) + tuple.column_double( daug_names[i] + '_CORRPX', daug_p4[i].Px() ) + tuple.column_double( daug_names[i] + '_CORRPY', daug_p4[i].Py() ) + tuple.column_double( daug_names[i] + '_CORRPZ', daug_p4[i].Pz() ) + + m12Sq = p12.M2() + m13Sq = p13.M2() + m23Sq = p23.M2() + + mPrime = -1.1 + thPrime = -1.1 + + + kine = self.find_kinematics( mcparent_id, daug_id[0], daug_id[1], daug_id[2] ) + if kine.withinDPLimits( m13Sq, m23Sq ) : + kine.updateKinematics( m13Sq, m23Sq ) + mPrime = kine.mPrime + thPrime = kine.thPrime + + if m12Sq<0 : m12Sq=-1.1 + if m12Sq>100e6 : m12Sq=-1.1 + if m13Sq<0 : m13Sq=-1.1 + if m13Sq>100e6 : m13Sq=-1.1 + if m23Sq<0 : m23Sq=-1.1 + if m23Sq>100e6 : m23Sq=-1.1 + + tuple.column_double( 'm12Sq_MC', m12Sq ) + tuple.column_double( 'm13Sq_MC', m13Sq ) + tuple.column_double( 'm23Sq_MC', m23Sq ) + + tuple.column_double( 'mPrime_MC', mPrime ) + tuple.column_double( 'thPrime_MC', thPrime ) + + + def mc_find_daug(self, children): + lower_children = [] + for daug in children: + daugID = daug.particleID() + if 22 == daugID.pid() : + lower_children.append(daug) + continue + elif daugID.abspid() in [ self.mcdaug1ID.abspid(), self.mcdaug2ID.abspid(), self.mcdaug3ID.abspid() ] : + lower_children.append(daug) + continue + else : + # get the daughters of the unknown particle + daug_children = [ lower_daug for lower_daug in daug.children( True ) ] + # check that there are daughters (resonance) and not just unknown + if len(daug_children)>0: + # apply same recursive function to daughter's children + lower_children.extend( self.mc_find_daug( daug_children ) ) + continue + else : + self.Error( 'Daughter does not match any of the expected daughter types and has no children: '+str(daugID.pid()) ) + e = Exception('Unknown daughter type') + raise e + return lower_children + + + def trig_tuple( self, tuple, bcand ) : + + # Find the TCK + location = 'Hlt/DecReports' + if self.RootInTES != '' : + location = self.RootInTES + location + hdr = self.get(location) + tck = -1 + if hdr : + tck = hdr.configuredTCK() + tuple.column_int("tck", tck ) + + # Setup the TISTOS tools + self.l0tistostool.setOfflineInput( bcand ) + self.tistostool.setOfflineInput( bcand ) + + # Get the L0 decisions, plus TIS & TOS info + + l0triggers = self.l0tistostool.triggerSelectionNames('L0.*Decision') + self.l0tistostool.setTriggerInput( 'L0.*Decision' ) + l0 = self.l0tistostool.tisTosTobTrigger() + l0dec = l0.decision() + l0tis = l0.tis() + l0tos = l0.tos() + tuple.column_int("L0Global_Dec", l0dec ) + tuple.column_int("L0Global_TIS", l0tis ) + tuple.column_int("L0Global_TOS", l0tos ) + + l0declist = self.l0tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kAnything ) + l0tislist = self.l0tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kTrueRequired, self.tistostool.kAnything ) + l0toslist = self.l0tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kTrueRequired ) + + # set the list of L0 decisions to be stored + l0tuplelist = ['L0DiMuonDecision', + 'L0MuonDecision', + 'L0ElectronDecision', + 'L0ElectronHiDecision', + 'L0PhotonDecision', + 'L0HadronDecision'] + + for line in l0triggers : + if line in l0tuplelist : + l0dec = 0 + l0tis = 0 + l0tos = 0 + if line in l0declist : l0dec = 1 + if line in l0tislist : l0tis = 1 + if line in l0toslist : l0tos = 1 + tuple.column_int(line+"_Dec", l0dec ) + tuple.column_int(line+"_TIS", l0tis ) + tuple.column_int(line+"_TOS", l0tos ) + + # Get the HLT decisions, plus TIS & TOS info + + hlt1triggers = self.tistostool.triggerSelectionNames('Hlt1.*Decision') + self.tistostool.setTriggerInput( 'Hlt1.*Decision' ) + hlt1 = self.tistostool.tisTosTobTrigger() + hlt1dec = hlt1.decision() + hlt1tis = hlt1.tis() + hlt1tos = hlt1.tos() + tuple.column_int("Hlt1Global_Dec", hlt1dec ) + tuple.column_int("Hlt1Global_TIS", hlt1tis ) + tuple.column_int("Hlt1Global_TOS", hlt1tos ) + + hlt1declist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kAnything ) + hlt1tislist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kTrueRequired, self.tistostool.kAnything ) + hlt1toslist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kTrueRequired ) + + #set the list of hlt1 decision to be stored + hlt1tuplelist = ['Hlt1TrackAllL0Decision', + 'Hlt1TrackMuonDecision', + 'Hlt1TrackPhotonDecision'] + + for line in hlt1triggers : + if line in hlt1tuplelist : + hlt1dec = 0 + hlt1tis = 0 + hlt1tos = 0 + if line in hlt1declist : hlt1dec = 1 + if line in hlt1tislist : hlt1tis = 1 + if line in hlt1toslist : hlt1tos = 1 + tuple.column_int(line+"_Dec", hlt1dec ) + tuple.column_int(line+"_TIS", hlt1tis ) + tuple.column_int(line+"_TOS", hlt1tos ) + + hlt2triggers = self.tistostool.triggerSelectionNames('Hlt2.*Decision') + self.tistostool.setTriggerInput( 'Hlt2.*Decision' ) + hlt2 = self.tistostool.tisTosTobTrigger() + hlt2dec = hlt2.decision() + hlt2tis = hlt2.tis() + hlt2tos = hlt2.tos() + tuple.column_int("Hlt2Global_Dec", hlt2dec ) + tuple.column_int("Hlt2Global_TIS", hlt2tis ) + tuple.column_int("Hlt2Global_TOS", hlt2tos ) + + hlt2declist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kAnything ) + hlt2tislist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kTrueRequired, self.tistostool.kAnything ) + hlt2toslist = self.tistostool.triggerSelectionNames( self.tistostool.kTrueRequired, self.tistostool.kAnything, self.tistostool.kTrueRequired ) + + # set the list of hlt2 decision to be stored + hlt2tuplelist = ['Hlt2Topo2BodyBBDTDecision', + 'Hlt2Topo3BodyBBDTDecision', + 'Hlt2Topo4BodyBBDTDecision', + 'Hlt2Topo2BodySimpleDecision', + 'Hlt2Topo3BodySimpleDecision', + 'Hlt2Topo4BodySimpleDecision', + 'Hlt2B2HHDecision', + 'Hlt2B2HHPi0_MergedDecision'] + + for line in hlt2triggers : + if line in hlt2tuplelist : + hlt2dec = 0 + hlt2tis = 0 + hlt2tos = 0 + if line in hlt2declist : hlt2dec = 1 + if line in hlt2tislist : hlt2tis = 1 + if line in hlt2toslist : hlt2tos = 1 + tuple.column_int(line+"_Dec", hlt2dec ) + tuple.column_int(line+"_TIS", hlt2tis ) + tuple.column_int(line+"_TOS", hlt2tos ) + + + def store_DTF_fit_results( self, tuple, fitter, suffix ) : + """ + Fill the supplied tuple with results from the supplied DTF object + """ + + # store info on the success (or otherwise) of the fit + tuple.column_int( 'B_DTF_STATUS_'+suffix, fitter.status() ) + tuple.column_int( 'B_DTF_ERRCODE_'+suffix, fitter.errCode() ) + tuple.column_int( 'B_DTF_NITER_'+suffix, fitter.nIter() ) + + if fitter.status() != 0 : + tuple.column_int( 'B_ENDVERTEX_NDOF_'+suffix, -1 ) + tuple.column_double( 'B_ENDVERTEX_CHI2_'+suffix, -1.1 ) + tuple.column_double( 'B_ENDVERTEX_PROB_'+suffix, -1.1 ) + tuple.column_double( 'B_M_'+suffix, -1.1 ) + if 'PV' in suffix : + tuple.column_double( 'B_CTAU_'+suffix, -1.1 ) + tuple.column_double( 'B_CTAUERR_'+suffix, -1.1 ) + tuple.column_double( 'B_CTAUSIG_'+suffix, -1.1 ) + + else : + chi2 = fitter.chiSquare() + ndof = fitter.nDof() + prob = TMath.Prob( chi2, ndof ) + + tuple.column_int( 'B_ENDVERTEX_NDOF_'+suffix, ndof ) + tuple.column_double( 'B_ENDVERTEX_CHI2_'+suffix, chi2 ) + tuple.column_double( 'B_ENDVERTEX_PROB_'+suffix, prob ) + + bcand = fitter.particle() + bparams = fitter.fitParams( bcand ) + + bp4 = bparams.momentum() + tuple.column_double( 'B_M_'+suffix, bp4.m().value() ) + tuple.column_double( 'B_MERR_'+suffix, bp4.m().error() ) + + if 'PV' in suffix : + bctau = bparams.ctau() + tuple.column_double( 'B_CTAU_'+suffix, bctau.value() ) + tuple.column_double( 'B_CTAUERR_'+suffix, bctau.error() ) + tuple.column_double( 'B_CTAUSIG_'+suffix, bctau.value()/bctau.error() ) + + + def extra_B_vars_tuple( self, tuple, bcand ) : + """ + Store the value of the B mass and lifetime (where appropriate) + resulting from fits with different sets of constraints: + - V0 mass constraint + - PV constraint + - V0 mass and PV constraints + Also store the vertex chi2, ndof and probability of the fit. + """ + + # TupleToolPropertime method + bestPV = self.bestVertex( bcand ) + tau = Double(0.0) + tau_err = Double(0.0) + tau_chisq = Double(0.0) + self.lifetimetool.fit( bestPV, bcand, tau, tau_err, tau_chisq ) + tau *= TMath.C()/1e6 + tau_err *= TMath.C()/1e6 + tuple.column_double( 'B_TAU_TT', tau ) + tuple.column_double( 'B_TAUERR_TT', tau_err ) + + # V0 mass constraint + fitterV0 = cpp.DecayTreeFitter.Fitter( bcand, self.stateprovider ) + for daug in bcand.daughters(): + daug_pid = daug.particleID() + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + fitterV0.setMassConstraint( daug ) + fitterV0.fit() + + self.store_DTF_fit_results( tuple, fitterV0, self.v0name ) + + # PV constraint + fitterPV = cpp.DecayTreeFitter.Fitter( bcand, bestPV, self.stateprovider ) + fitterPV.fit() + + self.store_DTF_fit_results( tuple, fitterPV, 'PV' ) + + # V0 mass and PV constraints + fitterV0PV = cpp.DecayTreeFitter.Fitter( bcand, bestPV, self.stateprovider ) + for daug in bcand.daughters(): + daug_pid = daug.particleID() + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + fitterV0PV.setMassConstraint( daug ) + fitterV0PV.fit() + + self.store_DTF_fit_results( tuple, fitterV0PV, 'PV'+self.v0name ) + + + def bkg_category_tuple( self, tuple, cand, name ) : + """ + Reproducce information stored by TupleToolMCBackgroundInfo + """ + + category = -1 + for tool in self.bkgtools : + category = tool.category( cand ) + if category > -1 and category < 1001 : + break + + if category > 1000 : + category = -1 + + tuple.column_int( name + '_BKGCAT', category ) + + + def vtx_isolation_tuple( self, tuple, bcand ) : + """ + Reproduce information stored by TupleToolVtxIsoln + """ + + # 20/04/2015 - updated to access information stored by Stripping21 only + # - uses RELINFO functor to retrieve the stored information + # - have cone variables for deltaR = 1.0, 1.5 and 1.7 + # - have vertex isolation information + + # get the TES location of our input particles and remove the trailing 'Particles' + teslocation = self.RootInTES + self.Inputs[0] + '/' + + angles_location_names = [ 'P2ConeVar10', 'P2ConeVar', 'P2ConeVar17' ] + angles_variable_names = [ 'CONEANGLE', 'CONEMULT', 'CONEPTASYM' ] + angles = [] + + for name in angles_location_names : + angle = '%.1f' % RELINFO( teslocation+name, 'CONEANGLE', -1.1 )(bcand) + mult = int(RELINFO( teslocation+name, 'CONEMULT', -1.1 )(bcand)) + ptasym = RELINFO( teslocation+name, 'CONEPTASYM', -1.1 )(bcand) + tuple.column_int( 'B_STRIP_CONEMULT_'+angle.replace('.','_'), mult ) + tuple.column_double( 'B_STRIP_PTASYM_'+angle.replace('.','_'), ptasym ) + + vtxiso_location_name = 'VtxIsolationVar' + vtxiso_variable_names = [ 'VTXISONUMVTX', 'VTXISODCHI2ONETRACK', 'VTXISODCHI2MASSONETRACK', 'VTXISODCHI2TWOTRACK', 'VTXISODCHI2MASSTWOTRACK' ] + + for name in vtxiso_variable_names : + value = RELINFO( teslocation+vtxiso_location_name, name, -1.1 )(bcand) + tuple.column_double( 'B_STRIP_'+name, value ) + + + def mc_match_signal( self, tuple, mcB, mchp, mchm, mcv0, hpcand, hmcand, v0cand ) : + """ + Do the MC matching for signal MC + """ + + hpname = 'h1' + hmname = 'h2' + v0name = self.v0name + + mcmatchhp = 0 + mcmatchhm = 0 + mcmatchv0 = 0 + + if not mcB.empty() : + mcMatcher = self.mcTruth() + + hpFromMC = MCTRUTH( mcMatcher, mchp ) + hmFromMC = MCTRUTH( mcMatcher, mchm ) + v0FromMC = MCTRUTH( mcMatcher, mcv0 ) + + if hpFromMC(hpcand) : + mcmatchhp = 1 + if hmFromMC(hmcand) : + mcmatchhm = 1 + if v0FromMC(v0cand) : + mcmatchv0 = 1 + + tuple.column_int( hpname+'_SIG_mcMatch', mcmatchhp ) + tuple.column_int( hmname+'_SIG_mcMatch', mcmatchhm ) + tuple.column_int( v0name+'_SIG_mcMatch', mcmatchv0 ) + + mcmatched = 0 + if mcmatchhp and mcmatchhm and mcmatchv0 : + mcmatched = 1 + tuple.column_int( 'B_SIG_mcMatch', mcmatched ) + + # store the MC truth info about the B and its daughters + self.mc_p4_tuple( tuple, mcB, mcmatched, 'B', True ) + self.mc_vtx_tuple( tuple, mcB, mcmatched, 'B', True ) + self.mc_p4_tuple( tuple, mchp, mcmatchhp, hpname, True ) + self.mc_p4_tuple( tuple, mchm, mcmatchhm, hmname, True ) + self.mc_p4_tuple( tuple, mcv0, mcmatchv0, v0name, True ) + self.mc_vtx_tuple( tuple, mcv0, mcmatchv0, v0name, True ) + + + def mc_match_general( self, tuple, hpcand, hmcand, v0cand ) : + """ + Do the generialised MC matching + """ + + hpname = 'h1' + hmname = 'h2' + v0name = self.v0name + + mcMatcher = self.mcTruth() + hpfun = RCTRUTH( hpcand, mcMatcher ) + hmfun = RCTRUTH( hmcand, mcMatcher ) + v0fun = RCTRUTH( v0cand, mcMatcher ) + + mchp = self.mcselect( 'hpmatches', hpfun & ( ('pi+' == MCABSID) | ('K+' == MCABSID) | ('p+' == MCABSID) | ('mu+' == MCABSID) | ('e+' == MCABSID) ) ) + mchm = self.mcselect( 'hmmatches', hmfun & ( ('pi+' == MCABSID) | ('K+' == MCABSID) | ('p+' == MCABSID) | ('mu+' == MCABSID) | ('e+' == MCABSID) ) ) + mcv0 = self.mcselect( 'v0matches', v0fun & ( ('KS0' == MCABSID) | ('Lambda0' == MCABSID) | ('pi+' == MCABSID) | ('K+' == MCABSID) | ('p+' == MCABSID) | ('mu+' == MCABSID) | ('e+' == MCABSID) ) ) + + mcmatchhp = 0 + mcmatchhm = 0 + mcmatchv0 = 0 + if len(mchp) > 0 : + mcmatchhp = 1 + if len(mchm) > 0 : + mcmatchhm = 1 + if len(mcv0) > 0 : + mcmatchv0 = 1 + tuple.column_int( hpname+'_mcMatch', mcmatchhp ) + tuple.column_int( hmname+'_mcMatch', mcmatchhm ) + tuple.column_int( v0name+'_mcMatch', mcmatchv0 ) + + # store the MC truth info about the reconstructed daughters + self.mc_p4_tuple( tuple, mchp, mcmatchhp, hpname, False ) + self.mc_p4_tuple( tuple, mchm, mcmatchhm, hmname, False ) + self.mc_p4_tuple( tuple, mcv0, mcmatchv0, v0name, False ) + self.mc_vtx_tuple( tuple, mcv0, mcmatchv0, v0name, False ) + + + def unique_cands( self, cands ) : + """ + Find unique candidates in a list + """ + + unique_cands = [] + evt_keys = [] + for cand in cands : + + cand_keys = [] + for daug in cand.children() : + daug_pid = daug.particleID() + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + for gdaug in daug.children() : + cand_keys.append( -gdaug.key() ) + else : + cand_keys.append( daug.key() ) + + cand_keys.sort() + + if cand_keys in evt_keys : + continue + else : + unique_cands.append( cand ) + evt_keys.append( cand_keys ) + + return unique_cands + + + def get_ntracks( self ) : + """ + Extracts the number of Best and Long tracks in the event + """ + + nbest = 0 + nlong = 0 + nspdhits = 0 + nrich1hits = 0 + nrich2hits = 0 + + try : + summary = self.get('/Event/Rec/Summary') + nbest = summary.info(summary.nTracks,0) + nlong = summary.info(summary.nLongTracks,0) + nspdhits = summary.info(summary.nSPDhits,0) + nrich1hits = summary.info(summary.nRich1Hits,0) + nrich2hits = summary.info(summary.nRich2Hits,0) + except : + try : + tracks = self.get( 'Rec/Track/Best' ) + nlong = 0 + nbest = len(tracks) + for trk in tracks : + if 3 == trk.type() : + nlong += 1 + except : + self.Error( 'Information about number of tracks not found neither in /Event/Rec/Summary nor in Rec/Track/Best', SUCCESS ) + + return nbest, nlong, nspdhits, nrich1hits, nrich2hits + + + def find_daughters( self, bcand ) : + """ + Identify the 3 daughters of the given parent + """ + + h1cand = None + h2cand = None + v0cand = None + + for daug in bcand.children() : + daug_pid = daug.particleID() + + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + v0cand = daug + elif daug.charge() > 0 : + h1cand = daug + else : + h2cand = daug + + return h1cand, h2cand, v0cand + + + def find_daughters_wrongsign( self, bcand ) : + """ + Identify the 3 daughters of the given parent (same-sign version) + """ + + h1cand = None + h2cand = None + v0cand = None + + gotFirstCand = False + for daug in bcand.children() : + daug_pid = daug.particleID() + + if daug_pid == self.daug3ID or daug_pid == self.daug3ConjID : + v0cand = daug + elif not gotFirstCand : + gotFirstCand = True + h1cand = daug + else : + h2cand = daug + + + return h1cand, h2cand, v0cand + + + def calc_randno( self, runNum, evtNum ) : + """ + Calculate the "random" numbers used to identify events for the BDT training + """ + + myRandom1 = (( 134*evtNum + runNum ) % 531241)/531241.0 + myRandom2 = (( 134*runNum + evtNum ) % 531241)/531241.0 + + return myRandom1, myRandom2 + + + def analyse( self ) : + """ + The method called in the event loop + """ + + # select the candidates from the input + all_cands = self.select( 'candidates', self.decay_descriptor ) + cands = self.unique_cands( all_cands ) + nCands = len(cands) + + if 0 == nCands : + return SUCCESS + + # select MC particles + if self.signalmc : + mcB = self.mcselect('mcB', self.mc_decay_descriptor_Xb) + mchp = self.mcselect('mchp', self.mc_decay_descriptor_hp) + mchm = self.mcselect('mchm', self.mc_decay_descriptor_hm) + mcv0 = self.mcselect('mcv0', self.mc_decay_descriptor_V0) + + # get the event header + evthdr = self.get( '/Event/Rec/Header' ) + runNum = evthdr.runNumber() + evtNum = evthdr.evtNumber() + myRdm1, myRdm2 = self.calc_randno( runNum, evtNum ) + + # get the track info + nbest, nlong, nspdhits, nrich1hits, nrich2hits = self.get_ntracks() + + # loop through the candidates + iCand = 0 + + for bcand in cands : + + # check that the candidate has an associated PV + bestPV = self.bestVertex( bcand ) + if not bestPV : + self.Warning( 'Candidate %d in event %d has no associated PV!' % (iCand,evtNum), SUCCESS ) + continue + + # calculate all the DP information (common to all ntuples) + self.calc_DP_info( bcand ) + + # get the daughters of this candidate + if not self.wrongsign : + orig_h1cand, orig_h2cand, orig_v0cand = self.find_daughters( bcand ) + else : + orig_h1cand, orig_h2cand, orig_v0cand = self.find_daughters_wrongsign( bcand ) + + # re-fit the candidate under the various daughter mass hypotheses + for hypo in self.hypotheses : + + # do the PID-swap + orig_h1cand.setParticleID( LHCb.ParticleID( hypo[0] ) ) + orig_h2cand.setParticleID( LHCb.ParticleID( hypo[1] ) ) + + # perform the refit + fitter = cpp.DecayTreeFitter.Fitter( bcand, self.stateprovider ) + fitter.fit() + + # check that the fit succeeded + if fitter.status() != 0 : + continue + + # retrieve the refitted candidate and its parameters + newtree = fitter.getFittedTree() + newbcand = newtree.head() + bparams = fitter.fitParams( bcand ) + + # check that the refitted candidate still has an associated PV + newBestPV = self.bestVertex( newbcand ) + if not newBestPV : + self.Warning( 'Refitted version of candidate %d in event %d has no associated PV!' % (iCand,evtNum), SUCCESS ) + continue + + # apply cut on the B-candidate mass (calculated under the new daughter hypothesis) + newbmass = bparams.momentum().m() + if newbmass.value() < 5000.0 or newbmass.value() > 5800.0 : + continue + + # get the B daughters + if not self.wrongsign : + h1cand, h2cand, v0cand = self.find_daughters( newbcand ) + else : + h1cand, h2cand, v0cand = self.find_daughters_wrongsign( newbcand ) + + # apply the cut on the z-separation of the V0 and B vtx + bvtx = newbcand.endVertex() + v0vtx = v0cand.endVertex() + vtx_z_sep = v0vtx.position().z() - bvtx.position().z() + if vtx_z_sep < 0 : + continue + + # get the right ntuple + tuple_name = 'B2%s%s%s' % ( self.simplenames[hypo[0]], self.simplenames[hypo[1]], self.v0name ) + tuple = self.nTuple( tuple_name ) + + # store the event level info + tuple.column_int( 'runNumber', runNum ) + tuple.column_int( 'evtNumber', evtNum ) + tuple.column_int( 'nCands', nCands ) + tuple.column_int( 'iCand', iCand ) + tuple.column_int( 'BestTracks', nbest ) + tuple.column_int( 'LongTracks', nlong ) + tuple.column_int( 'SpdHits', nspdhits ) + tuple.column_int( 'Rich1Hits', nrich1hits ) + tuple.column_int( 'Rich2Hits', nrich2hits ) + + tuple.column_double( 'myRandom1', myRdm1 ) + tuple.column_double( 'myRandom2', myRdm2 ) + + # store info for the B + self.p4_tuple( tuple, newbcand, 'B', bparams ) + self.ip_tuple( tuple, newbcand, 'B' ) + self.extra_B_vars_tuple( tuple, newbcand ) + + # store trigger info + self.trig_tuple( tuple, newbcand ) + + # store vertex info for the B and KS + self.vtx_tuple( tuple, newbcand, v0cand ) + + # store vertex isolation info for B + if not self.wrongsign : + self.vtx_isolation_tuple( tuple, bcand ) + + if self.simulation : + + if self.signalmc : + # store the MC-truth DP info + self.mc_dp_tuple( tuple, mcB ) + + # do the MC matching to the signal MC decay + self.mc_match_signal( tuple, mcB, mchp, mchm, mcv0, h1cand, h2cand, v0cand ) + + # do the general MC matching + self.mc_match_general( tuple, h1cand, h2cand, v0cand ) + + # store the MC background category info + self.bkg_category_tuple( tuple, newbcand, 'B' ) + self.bkg_category_tuple( tuple, v0cand, self.v0name ) + + + # store information on the daughters + self.p4_tuple( tuple, h1cand, 'h1' ) + self.ip_tuple( tuple, h1cand, 'h1' ) + self.trk_tuple( tuple, h1cand, 'h1' ) + + self.p4_tuple( tuple, h2cand, 'h2' ) + self.ip_tuple( tuple, h2cand, 'h2' ) + self.trk_tuple( tuple, h2cand, 'h2' ) + + self.p4_tuple( tuple, v0cand, self.v0name ) + self.ip_tuple( tuple, v0cand, self.v0name ) + + # and the V0 daughters + for gdaug in v0cand.children() : + gdaugID = gdaug.particleID().pid() + gdaugname = self.gdaugnames[ gdaugID ] + self.p4_tuple( tuple, gdaug, gdaugname ) + self.trk_tuple( tuple, gdaug, gdaugname ) + + # store all the pre-calculated DP information + self.store_DP_info( tuple ) + + # fill the ntuple + tuple.write() + + # do the PID-swap back + orig_h1cand.setParticleID( LHCb.ParticleID( self.daug1ID ) ) + orig_h2cand.setParticleID( LHCb.ParticleID( self.daug2ID ) ) + + iCand += 1 + + return SUCCESS + +# End of B2KShhReco Class Definition + + +# Begin DalitzInfo Class Definition + +class DalitzInfo(object) : + + """ + Helper class to store Dalitz-plot fit information and fill a tuple + """ + + def __init__( self, v0name, suffix ) : + """ + Constructor + """ + + self.v0name = v0name + self.suffix = suffix + self.init_values() + + + def init_values( self ) : + """ + Set default values for all variables + """ + + self.status = -1 + self.errcode = -1 + self.niter = -1 + + self.ndof = -1 + self.chi2 = -1.1 + self.prob = -1.1 + + self.b_ctau = -1.1 + self.b_ctauerr = -1.1 + self.b_ctausig = -1.1 + + self.b_p4 = None + self.daug_p4 = {} + self.daug_p4['h1'] = None + self.daug_p4['h2'] = None + self.daug_p4[self.v0name] = None + + self.m12Sq = -1.1 + self.m13Sq = -1.1 + self.m23Sq = -1.1 + + self.mPrime = -1.1 + self.thPrime = -1.1 + + + def store_fit_results( self, fitter, bctau, bp4, daug_p4, m12Sq, m13Sq, m23Sq, mPrime, thPrime ) : + """ + Extract the fit information from the fitter and store it + """ + + self.init_values() + + self.status = fitter.status() + self.errcode = fitter.errCode() + self.niter = fitter.nIter() + + if self.status != 0 : + return + + self.chi2 = fitter.chiSquare() + self.ndof = fitter.nDof() + self.prob = TMath.Prob( self.chi2, self.ndof ) + + self.b_ctau = bctau.value() + self.b_ctauerr = bctau.error() + self.b_ctausig = self.b_ctau/self.b_ctauerr + + self.b_p4 = Gaudi.Math.LorentzVectorWithError(bp4) + + self.daug_p4['h1'] = Gaudi.Math.LorentzVectorWithError(daug_p4[0]) + self.daug_p4['h2'] = Gaudi.Math.LorentzVectorWithError(daug_p4[1]) + self.daug_p4[self.v0name] = Gaudi.Math.LorentzVectorWithError(daug_p4[2]) + + self.m12Sq = m12Sq + self.m13Sq = m13Sq + self.m23Sq = m23Sq + + self.mPrime = mPrime + self.thPrime = thPrime + + + def fill_tuple( self, tuple ) : + """ + Fill the supplied ntuple with the information + """ + + # store info on the success (or otherwise) of the mass constrained fit + tuple.column_int( 'B_DTF_STATUS_'+self.suffix, self.status ) + tuple.column_int( 'B_DTF_ERRCODE_'+self.suffix, self.errcode ) + tuple.column_int( 'B_DTF_NITER_'+self.suffix, self.niter ) + + # also store the fit chisq, ndof and associated probability + tuple.column_int( 'B_ENDVERTEX_NDOF_'+self.suffix, self.ndof ) + tuple.column_double( 'B_ENDVERTEX_CHI2_'+self.suffix, self.chi2 ) + tuple.column_double( 'B_ENDVERTEX_PROB_'+self.suffix, self.prob ) + + # store information on the B candidate + tuple.column_double( 'B_CTAU_'+self.suffix, self.b_ctau ) + tuple.column_double( 'B_CTAUERR_'+self.suffix, self.b_ctauerr ) + tuple.column_double( 'B_CTAUSIG_'+self.suffix, self.b_ctausig ) + + if self.b_p4 : + tuple.column_double( 'B_M_'+self.suffix, self.b_p4.m().value() ) + tuple.column_double( 'B_MERR_'+self.suffix, self.b_p4.m().error() ) + tuple.column_double( 'B_PX_'+self.suffix, self.b_p4.Px() ) + tuple.column_double( 'B_PY_'+self.suffix, self.b_p4.Py() ) + tuple.column_double( 'B_PZ_'+self.suffix, self.b_p4.Pz() ) + tuple.column_double( 'B_PE_'+self.suffix, self.b_p4.E() ) + else : + tuple.column_double( 'B_M_'+self.suffix, -1.1 ) + tuple.column_double( 'B_MERR_'+self.suffix, -1.1 ) + tuple.column_double( 'B_PX_'+self.suffix, -1.1 ) + tuple.column_double( 'B_PY_'+self.suffix, -1.1 ) + tuple.column_double( 'B_PZ_'+self.suffix, -1.1 ) + tuple.column_double( 'B_PE_'+self.suffix, -1.1 ) + + # loop through 3 B daughters and store their 4-momenta + for daugname in self.daug_p4.keys() : + p4 = self.daug_p4[ daugname ] + if p4 : + tuple.column_double( daugname+'_M_'+self.suffix, p4.M() ) + tuple.column_double( daugname+'_PX_'+self.suffix, p4.Px() ) + tuple.column_double( daugname+'_PY_'+self.suffix, p4.Py() ) + tuple.column_double( daugname+'_PZ_'+self.suffix, p4.Pz() ) + tuple.column_double( daugname+'_PE_'+self.suffix, p4.E() ) + else : + tuple.column_double( daugname+'_M_'+self.suffix, -1.1 ) + tuple.column_double( daugname+'_PX_'+self.suffix, -1.1 ) + tuple.column_double( daugname+'_PY_'+self.suffix, -1.1 ) + tuple.column_double( daugname+'_PZ_'+self.suffix, -1.1 ) + tuple.column_double( daugname+'_PE_'+self.suffix, -1.1 ) + + # store the DP co-ordinates + tuple.column_double( 'm12Sq_'+self.suffix, self.m12Sq ) + tuple.column_double( 'm13Sq_'+self.suffix, self.m13Sq ) + tuple.column_double( 'm23Sq_'+self.suffix, self.m23Sq ) + + # store the square DP co-ordinates + tuple.column_double( 'mPrime_'+self.suffix, self.mPrime ) + tuple.column_double( 'thPrime_'+self.suffix, self.thPrime ) + + +# End DalitzInfo Class Definition + diff --git a/Phys/B2KShh/python/B2KShh/ThreeBodyKinematics.py b/Phys/B2KShh/python/B2KShh/ThreeBodyKinematics.py new file mode 100644 index 0000000..8e4a582 --- /dev/null +++ b/Phys/B2KShh/python/B2KShh/ThreeBodyKinematics.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python + +import math +from ROOT import ( TMath, TRandom3 ) + +# Begin ThreeBodyKinematics Class Definition + +class ThreeBodyKinematics(object) : + """ + 3-body kinematics + """ + + def __init__( self, m1, m2, m3, mParent ) : + self._mP = mParent + self._m = [ m1, m2, m3 ] + + self._mPSq = mParent*mParent + self._mSq = [ m1*m1, m2*m2, m3*m3 ] + + self._mDTot = m1 + m2 + m3 + self._mSqDTot = m1*m1 + m2*m2 + m3*m3 + + self._mMin = [ self._mDTot - self._m[i] for i in range(3) ] + self._mMax = [ self._mP - self._m[i] for i in range(3) ] + self._mDiff = [ self._mMax[i] - self._mMin[i] for i in range(3) ] + + self._mSqMin = [ self._mMin[i] * self._mMin[i] for i in range(3) ] + self._mSqMax = [ self._mMax[i] * self._mMax[i] for i in range(3) ] + self._mSqDiff = [ self._mSqMax[i] - self._mSqMin[i] for i in range(3) ] + + self._mij = [ 0.0, 0.0, 0.0 ] + self._mijSq = [ 0.0, 0.0, 0.0 ] + self._cij = [ 0.0, 0.0, 0.0 ] + + self._mPrime = 0.0 + self._thPrime = 0.0 + + self._random = TRandom3(1234) + + @property + def m12Sq( self ) : + return self._mijSq[2] + + @property + def m13Sq( self ) : + return self._mijSq[1] + + @property + def m23Sq( self ) : + return self._mijSq[0] + + @property + def mPrime( self ) : + return self._mPrime + + @property + def thPrime( self ) : + return self._thPrime + + def genFlatPhaseSpace( self ) : + self.updateKinematics( *self._genFlatPoint() ) + while not self._withinDPLimits() : + self.updateKinematics( *self._genFlatPoint() ) + + def _genFlatPoint( self ) : + m13Sq = self._mSqMin[1] + self._random.Rndm()*self._mSqDiff[1] + m23Sq = self._mSqMin[0] + self._random.Rndm()*self._mSqDiff[0] + return m13Sq, m23Sq + + def updateKinematics( self, m13Sq, m23Sq ) : + self._updateMassSquares( m13Sq , m23Sq ) + self._calcHelicities() + self._calcSqDPVars() + + def updateKinematicsSqDP( self, mPrime, thPrime ) : + self._updateMassSquaresSqDP( mPrime , thPrime ) + self._calcHelicities() + + def _withinDPLimits( self ) : + return self.withinDPLimits( self._mijSq[1], self._mijSq[0] ) + + def withinDPLimits( self, m13Sq, m23Sq ) : + if m13Sq < self._mSqMin[1] or m13Sq > self._mSqMax[1] : + return False + + if m23Sq < self._mSqMin[0] or m23Sq > self._mSqMax[0] : + return False + + m13 = TMath.Sqrt( m13Sq ) + + e3CMS13 = ( m13Sq - self._mSq[0] + self._mSq[2] ) / (2.0*m13) + p3CMS13 = self._pCalc( e3CMS13, self._mSq[2] ) + + e2CMS13 = ( self._mPSq - m13Sq - self._mSq[1] ) / (2.0*m13) + p2CMS13 = self._pCalc( e2CMS13, self._mSq[1] ) + + term1 = 2.0 * e2CMS13 * e3CMS13 + self._mSq[1] + self._mSq[2] + term2 = 2.0 * p2CMS13 * p3CMS13 + + m23SqLocMin = term1 - term2 + m23SqLocMax = term1 + term2 + + withinDP = False + if m23Sq > m23SqLocMin and m23Sq < m23SqLocMax : + withinDP = True + + return withinDP + + def calcSqDPJacobian( self ) : + e1CMS12 = ( self._mijSq[2] - self._mSq[1] + self._mSq[0] ) / (2.0*self._mij[2]) + e3CMS12 = ( self._mPSq - self._mijSq[2] - self._mSq[2] ) / (2.0*self._mij[2]) + + p1CMS12 = self._pCalc( e1CMS12, self._mSq[0] ) + p3CMS12 = self._pCalc( e3CMS12, self._mSq[2] ) + + deriv1 = TMath.PiOver2() * self._mDiff[2] * TMath.Sin( TMath.Pi() * self._mPrime ) + deriv2 = TMath.Pi() * TMath.Sin( TMath.Pi() * self._thPrime ) + + jacobian = 4.0 * p1CMS12 * p3CMS12 * self._mij[2] * deriv1 * deriv2 + + return jacobian + + def _calcSqDPVars( self ) : + value = 2.0*(self._mij[2] - self._mMin[2])/self._mDiff[2] - 1.0 + self._mPrime = TMath.InvPi() * TMath.ACos( value ) + self._thPrime = TMath.InvPi() * TMath.ACos( self._cij[2] ) + + def _updateMassSquares( self, m13Sq, m23Sq ) : + m12Sq = self._calcThirdMassSq( m13Sq, m23Sq ) + + self._mijSq[1] = m13Sq + self._mij[1] = TMath.Sqrt( m13Sq ) + + self._mijSq[0] = m23Sq + self._mij[0] = TMath.Sqrt( m23Sq ) + + self._mijSq[2] = m12Sq + self._mij[2] = TMath.Sqrt( m12Sq ) + + def _updateMassSquaresSqDP( self, mPrime, thPrime ) : + self._mPrime = mPrime + self._thPrime = thPrime + + m12 = 0.5 * self._mDiff[2] * ( 1.0 + TMath.Cos( TMath.Pi() * mPrime ) ) + self._mMin[2] + c12 = TMath.Cos( TMath.Pi() * thPrime ) + + self._updateMassSquares_SqDP12( m12, c12 ) + + def _updateMassSquares_SqDP12( self, m12, c12 ) : + self._mij[2] = m12 + self._mijSq[2] = m12*m12 + self._cij[2] = c12 + + self._mijSq[1] = self._mFromC( 0, 1, 2 ) + self._mij[1] = TMath.Sqrt( self._mijSq[1] ) + + self._mijSq[0] = self._calcThirdMassSq( self._mijSq[2], self._mijSq[1] ) + self._mij[0] = TMath.Sqrt( self._mijSq[0] ) + + def _calcHelicities( self ) : + self._cij[2] = self._cFromM( 0, 1, 2 ) + self._cij[0] = self._cFromM( 1, 2, 0 ) + self._cij[1] = self._cFromM( 2, 0, 1 ) + + def _cFromM( self, i, j, k ) : + eiCMSij = self._eiCMSij( i, j, k ) + ekCMSij = self._ekCMSij( i, j, k ) + + qi = self._pCalc( eiCMSij, self._mSq[i] ) + qk = self._pCalc( ekCMSij, self._mSq[k] ) + + cFromM = -1.0 * ( self._mijSq[j] - self._mSq[i] - self._mSq[k] - 2.0*eiCMSij*ekCMSij ) / (2.0*qi*qk) + + if i == 1 : + cFromM = -1.0 * cFromM + + return cFromM + + def _mFromC( self, i, j, k ) : + eiCMSij = self._eiCMSij( i, j, k ) + ekCMSij = self._ekCMSij( i, j, k ) + + qi = self._pCalc( eiCMSij, self._mSq[i] ) + qk = self._pCalc( ekCMSij, self._mSq[k] ) + + mFromC = self._mSq[i] + self._mSq[k] + 2.0*eiCMSij*ekCMSij - 2.0*qi*qk*self._cij[k] + + return mFromC + + def _eiCMSij( self, i, j, k ) : + return ( self._mijSq[k] - self._mSq[j] + self._mSq[i] ) / (2.0 * self._mij[k]) + + def _ekCMSij( self, i, j, k ) : + return ( self._mPSq - self._mijSq[k] - self._mSq[k] ) / (2.0 * self._mij[k]) + + def _calcThirdMassSq( self, firstMassSq, secondMassSq ) : + return self._mPSq + self._mSqDTot - firstMassSq - secondMassSq + + def _pCalc( self, energy, massSq ) : + return TMath.Sqrt( energy*energy - massSq ) + +# End of ThreeBodyKinematics Class Definition +