diff --git a/PklAlgebra.py b/PklAlgebra.py new file mode 100644 index 0000000..8797d05 --- /dev/null +++ b/PklAlgebra.py @@ -0,0 +1,104 @@ +import os +import inspect +from pprint import pprint +from itertools import product +import math +import numpy as n +import sys +import pickle +from datetime import datetime +from suppl.Structure import * +from config import pkl_address +import ROOT as R +from ROOT import gStyle +import parser +gStyle.SetOptStat(False) +#from ROOT import RooFit as RF + + +def PklAlgebra(dataset_1, dataset_2,formula, variable, pkl_address=pkl_address): + text_formula="_"+formula.replace("/","_over_").replace("*","_times_").replace("+","_plus_").replace("-","_minus_")+"_" + + code = parser.expr(formula).compile() + + + with open(dataset_1, 'r') as basket: + ds_1 = pickle.load(basket) + with open(dataset_2, 'r') as basket: + ds_2 = pickle.load(basket) + + tot_coll={} + if ds_1.keys()!= ds_2.keys(): + print "ERROR: Different binnings of datasets. Please use .pkls with collections, which have the same time binning" + return False + for run_bin in ds_1.keys(): + if ds_1[run_bin]['data'].keys()!=ds_2[run_bin]['data'].keys(): + print "ERROR: Different types of detectors in datasets. Please use .pkls with collections, which describe the same detectors" + return False + if ds_1[run_bin]['comment']: + tot_coll[run_bin]={'run_start':ds_1[run_bin]['run_start'], + 'run_stop':ds_1[run_bin]['run_stop'], + 'comment':ds_1[run_bin]['comment'], + 'data':{}} + else: + tot_coll[run_bin]={'run_start':ds_1[run_bin]['run_start'], + 'run_stop':ds_1[run_bin]['run_stop'], + 'data':{}} + + for st_id in ds_1[run_bin]['data']: + if ds_1[run_bin]['data'][st_id].keys()!=ds_2[run_bin]['data'][st_id].keys(): + print "ERROR: Different structure of information in datasets. Please use .pkls which correspond to the same operation mode" + return False + tot_coll[run_bin]['data'][st_id]={} + if variable == "all": + for val in ds_1[run_bin]['data'][st_id]: + a = ds_1[run_bin]['data'][st_id][val] + b = ds_2[run_bin]['data'][st_id][val] + try: + tot_coll[run_bin]['data'][st_id][val]=eval(code) + #print "a = "+str(a)+", b = "+str(b)+"; "+formula+" = "+str(eval(code)) + except: + print "Failed to calculate formula for "+val+" for the sector "+str(st_id) + tot_coll[run_bin]['data'][st_id][val]=0 + elif variable not in ds_1[run_bin]['data'][st_id]: + print "Requested variable is not in dataset. These dataset contain following variables:" + for val in ds_1[run_bin]['data'][st_id]: + print val + print "Exiting." + return False + else: + a = ds_1[run_bin]['data'][st_id][variable] + b = ds_2[run_bin]['data'][st_id][variable] + try: + tot_coll[run_bin]['data'][st_id][variable]=eval(code) + #print "a = "+str(a)+", b = "+str(b)+"; "+formula+" = "+str(eval(code)) + except: + print "Failed to calculate formula for "+variable+" for the sector "+str(st_id) + tot_coll[run_bin]['data'][st_id][variable]=0 + + with open(pkl_address+text_formula+'with_a_as_'+dataset_1.split('/')[-1].replace(".pkl","")+'_and_b_as_'+dataset_2.split('/')[-1].replace(".pkl","")+'.pkl', 'wb') as basket: + pickle.dump(tot_coll, basket) + + return True + + +if __name__ == "__main__": + #local_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + #python ~/tuptohist/tuptohist/PklAlgebra.py ../2012/AllRuns/Pkls/ITHitMonitor_AllRuns_12.pkl ../2015/AllRuns/Pkls/ITHitMonitor_AllRuns_15.pkl abs\(b\)-abs\(a\) mean + if len(sys.argv)==4: + formula = sys.argv[3] + ds_1 = sys.argv[1] + ds_2 = sys.argv[2] + print "Evaluating formula "+formula + PklAlgebra(ds_1, ds_2, formula, "all") + elif len(sys.argv)==5: + formula = sys.argv[3] + ds_1 = sys.argv[1] + ds_2 = sys.argv[2] + variable = sys.argv[4] + print "Evaluating formula "+formula + PklAlgebra(ds_1, ds_2, formula, variable) + + else: + syntax_explanation("PklAlgebra.py") + diff --git a/PklToHist.py b/PklToHist.py new file mode 100644 index 0000000..ccec1be --- /dev/null +++ b/PklToHist.py @@ -0,0 +1,149 @@ +import os +import inspect +from pprint import pprint +from itertools import product +import math +import numpy as n +import sys +import pickle +from datetime import datetime +from suppl.Structure import * +from drawing.CreateTTHist import CreateTTHist +from drawing.CreateITHist import CreateITHist +from drawing.Create_Maps import * +from config import binning +from config import histogram_address +from config import plot_address +import ROOT as R +from ROOT import gStyle +gStyle.SetOptStat(False) +#from ROOT import RooFit as RF + + +def PklToHist(data, operation_mode, histogram_address=histogram_address, plot_address=plot_address, dump_summary = False): + if (operation_mode=='1'): + print "Opening file "+data+" (it may take several minutes)" + start = datetime.now() + with open(data, 'r') as basket: + coll_ITHitMonitor = pickle.load(basket) + print "File oppened (took "+str(datetime.now()-start)+" ) \nCreating 2D plots" + for run_bin in coll_ITHitMonitor: + try: + suffix =coll_ITHitMonitor[run_bin]["comment"].replace(" ","_") + except: + suffix =str(coll_ITHitMonitor[run_bin]["run_start"])+"__"+str(coll_ITHitMonitor[run_bin]["run_stop"]) + CreateITHist(coll_ITHitMonitor[run_bin]["data"],variable="mean", mode="Value",suffix = suffix, address=plot_address) + CreateITHist(coll_ITHitMonitor[run_bin]["data"],variable="width", mode="Value",suffix = suffix, address=plot_address) + print "2D plots created"#, writing histograms to .root file" + #write_histogram(coll_ITHitMonitor, "Monitor", histogram_address+"ITHitMonitor") + print "Creating trends" + create_monitor_trends(coll_ITHitMonitor, "IT", histogram_address+"Trends_ITHitMonitor") + if dump_summary: + ITResolution = [] + IT_Map_dict = IT_Map() + IT_ids_Map_dict = IT_ids_Map() + for st_id in coll_ITHitMonitor[0]['data']: + ITResolution.append({'name':IT_Map_dict[st_id],'id':IT_ids_Map_dict[st_id],'resolution':coll_ITHitMonitor[0]['data'][st_id]['width'], 'mean':coll_ITHitMonitor[0]['data'][st_id]['mean'], 'err_width:':coll_ITHitMonitor[0]['data'][st_id]['err_width']}) + with open('Resolution_Map_IT.pkl', 'wb') as basket: + pickle.dump(ITResolution, basket) + + + elif (operation_mode=='2'): + print "Opening file "+data+" (it may take several minutes)" + start = datetime.now() + with open(data, 'r') as basket: + coll_TTHitMonitor = pickle.load(basket) + print "File oppened (took "+str(datetime.now()-start)+" ) \nCreating 2D plots" + for run_bin in coll_TTHitMonitor: + try: + suffix = coll_TTHitMonitor[run_bin]['comment'].replace(" ","_") + except: + suffix = str(coll_TTHitMonitor[run_bin]["run_start"])+"__"+str(coll_TTHitMonitor[run_bin]["run_stop"]) + CreateTTHist(coll_TTHitMonitor[run_bin]["data"],variable="mean", mode="Value",suffix= suffix,address=plot_address) + CreateTTHist(coll_TTHitMonitor[run_bin]["data"],variable="width", mode="Value",suffix=suffix,address=plot_address) + print "2D plots created"#, writing histograms to .root file" + #write_histogram(coll_TTHitMonitor, "Monitor",histogram_address+"TTHitMonitor") + print "Creating trends" + create_monitor_trends(coll_TTHitMonitor, "TT",histogram_address+"Trends_TTHitMonitor") + if dump_summary: + TTResolution = [] + TT_Map_dict = TT_Map() + TT_ids_Map_dict = TT_ids_Map() + for st_id in coll_TTHitMonitor[0]['data']: + TTResolution.append({'name':TT_Map_dict[st_id],'id':TT_ids_Map_dict[st_id],'resolution':coll_TTHitMonitor[0]['data'][st_id]['width'], 'mean':coll_TTHitMonitor[0]['data'][st_id]['mean'], 'err_width:':coll_TTHitMonitor[0]['data'][st_id]['err_width']}) + with open('Resolution_Map_TT.pkl', 'wb') as basket: + pickle.dump(TTResolution, basket) + + elif (operation_mode=='3'): + print "Opening file "+data+" (it may take several minutes)" + start = datetime.now() + with open(data, 'r') as basket: + coll_TTHitEfficiency = pickle.load(basket) + print "File oppened (took "+str(datetime.now()-start)+" ) \nCreating 2D plots" + for run_bin in coll_TTHitEfficiency: + try: + suffix = coll_TTHitEfficiency[run_bin]["comment"] + except: + suffix = str(coll_TTHitEfficiency[run_bin]["run_start"])+"__"+str(coll_TTHitEfficiency[run_bin]["run_stop"]) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "efficiency", mode="Value",suffix=suffix,address=plot_address) + print "2D plots created"#, writing histograms to .root file" + #write_histogram(coll_TTHitEfficiency, "Efficiency",histogram_address+"TTHitEfficiency") + print "Creating trends" + create_efficiency_trends(coll_TTHitEfficiency, "TT",histogram_address+"Trends_TTHitEfficiency") + if dump_summary: + TTEfficiency = [] + TT_Map_dict = TT_Map() + TT_ids_Map_dict = TT_ids_Map() + for st_id in coll_TTHitEfficiency[0]['data']: + TTEfficiency.append({'name':TT_Map_dict[st_id],'id':TT_ids_Map_dict[st_id],'efficiency':coll_TTHitEfficiency[0]['data'][st_id]['efficiency'], 'err_efficiency':coll_TTHitEfficiency[0]['data'][st_id]['err_efficiency']}) + with open('Efficiency_Map_TT.pkl', 'wb') as basket: + pickle.dump(TTEfficiency, basket) + + + elif (operation_mode=='4'): + print "Opening file "+data+" (it may take several minutes)" + start = datetime.now() + with open(data, 'r') as basket: + coll_ITHitEfficiency = pickle.load(basket) + print "File oppened (took "+str(datetime.now()-start)+" ) \nCreating 2D plots" + for run_bin in coll_ITHitEfficiency: + try: + suffix = coll_ITHitEfficiency[run_bin]["comment"] + except: + suffix = str(coll_ITHitEfficiency[run_bin]["run_start"])+"__"+str(coll_ITHitEfficiency[run_bin]["run_stop"]) + CreateITHist(coll_ITHitEfficiency[run_bin]["data"],variable = "efficiency", mode="Value",suffix=suffix, address=plot_address) + print "2D plots created"#, writing histograms to .root file" + #write_histogram(coll_ITHitEfficiency, "Efficiency",histogram_address+"ITHitEfficiency") + print "Creating trends" + create_efficiency_trends(coll_ITHitEfficiency, "IT",histogram_address+"Trends_ITHitEfficiency") + if dump_summary: + ITEfficiency = [] + IT_Map_dict = IT_Map() + IT_ids_Map_dict = IT_ids_Map() + for st_id in coll_ITHitEfficiency[0]['data']: + ITEfficiency.append({'name':IT_Map_dict[st_id],'id':IT_ids_Map_dict[st_id],'efficiency':coll_ITHitEfficiency[0]['data'][st_id]['efficiency'], 'err_efficiency':coll_ITHitEfficiency[0]['data'][st_id]['err_efficiency']}) + with open('Efficiency_Map_IT.pkl', 'wb') as basket: + pickle.dump(ITEfficiency, basket) + + + else: + print "To run scipt, choose the mode you want to run:" + print "1 - IT Hit Monitor" + print "2 - TT Hit Monitor" + print "3 - TT Hit Efficiency" + print "4 - IT Hit Efficiency" + + return True + + +if __name__ == "__main__": + #local_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + if len(sys.argv)==3: + data = sys.argv[1] + PklToHist(data,sys.argv[2]) + if len(sys.argv)==4: + data = sys.argv[1] + PklToHist(data = data, operation_mode = sys.argv[2], dump_summary = True) + else: + syntax_explanation("PklToHist.py") + diff --git a/README.md b/README.md index f5e673e..fcdbea9 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ STPerformancePaper =============== + +- Includes script to produce plot for the paper, based on tuptuhist framework \ No newline at end of file diff --git a/SimpleStats.py b/SimpleStats.py new file mode 100644 index 0000000..f8a9b21 --- /dev/null +++ b/SimpleStats.py @@ -0,0 +1,118 @@ +import sys +import ROOT as R +from ROOT import gStyle +import os +from suppl.Structure import * +from drawing.Create_Maps import TT_Map as TT_Map_func +from drawing.Create_Maps import IT_Map as IT_Map_func +gStyle.SetOptStat(False) +#from ROOT import RooFit as RF + + +def simple_analysis(data): + #Make distribution of per-sector parameters wieghted with the nEvents and define mean and rms + try: + from config import dead_sectors + except: + dead_sectors = [] + TT_Map = TT_Map_func() + IT_Map = IT_Map_func() + + f_input = R.TFile(data) + tree_ITHitMonitor = f_input.ITHitMonitor + t_ITHitMonitor = tree_ITHitMonitor.Get("TrackMonTuple") + h_ITHitMonitor_u = R.TH2F("h_ITHitMonitor_u","h_ITHitMonitor_u", max(IT_Map.keys())+1, -0.5, max(IT_Map.keys())+0.5, 200, -0.3,0.3) + h_ITHitMonitor_r = R.TH2F("h_ITHitMonitor_r","h_ITHitMonitor_r", max(IT_Map.keys())+1, -0.5, max(IT_Map.keys())+0.5, 200, -0.3,0.3) + t_ITHitMonitor.Project("h_ITHitMonitor_u", "hit_residual*hit_errMeasure*hit_errMeasure/hit_errResidual/hit_errResidual:clusterSTchanMapID") + t_ITHitMonitor.Project("h_ITHitMonitor_r", "hit_residual*hit_errMeasure/hit_errResidual:clusterSTchanMapID") + it_mean_abswidth=0 + h_it_mean_abswidth = R.TH1F("h_it_mean_abswidth","h_it_mean_abswidth", 100, 0., 0.03) + it_mean_res=0 + h_it_mean_res = R.TH1F("h_it_mean_res","h_it_mean_res", 100, 0.03, 0.06) + it_nEntries=0 + for i in IT_Map.keys(): + if not IT_Map[i] in dead_sectors: + #print "Processing "+IT_Map[i] + h_hist_u = h_ITHitMonitor_u.ProjectionY(IT_Map[i], i+1, i+1) + h_hist_r = h_ITHitMonitor_r.ProjectionY(IT_Map[i], i+1, i+1) + it_mean_abswidth += abs(h_hist_u.GetMean())*h_hist_u.GetEntries() + h_it_mean_abswidth.Fill(abs(h_hist_u.GetMean()), h_hist_u.GetEntries()) + it_mean_res += h_hist_r.GetRMS()*h_hist_r.GetEntries() + h_it_mean_res.Fill(h_hist_r.GetRMS(), h_hist_r.GetEntries()) + it_nEntries += h_hist_r.GetEntries() + del h_hist_u + del h_hist_r + it_mean_abswidth = float(it_mean_abswidth)/it_nEntries + it_mean_res = float(it_mean_res)/it_nEntries + + + tree_TTHitMonitor = f_input.TTHitMonitor + t_TTHitMonitor = tree_TTHitMonitor.Get("TrackMonTuple") + h_TTHitMonitor_u = R.TH2F("h_TTHitMonitor_u","h_TTHitMonitor_u", max(TT_Map.keys())+1, -0.5, max(TT_Map.keys())+0.5, 200, -0.3,0.3) + h_TTHitMonitor_r = R.TH2F("h_TTHitMonitor_r","h_TTHitMonitor_r", max(TT_Map.keys())+1, -0.5, max(TT_Map.keys())+0.5, 200, -0.3,0.3) + t_TTHitMonitor.Project("h_TTHitMonitor_u", "hit_residual*hit_errMeasure*hit_errMeasure/hit_errResidual/hit_errResidual:clusterSTchanMapID") + t_TTHitMonitor.Project("h_TTHitMonitor_r", "hit_residual*hit_errMeasure/hit_errResidual:clusterSTchanMapID") + tt_mean_abswidth=0 + h_tt_mean_abswidth = R.TH1F("h_tt_mean_abswidth","h_tt_mean_abswidth", 100, 0., 0.03) + tt_mean_res=0 + h_tt_mean_res = R.TH1F("h_tt_mean_res","h_tt_mean_res", 100, 0.03, 0.06) + tt_nEntries=0 + for i in TT_Map.keys(): + if not IT_Map[i] in dead_sectors: + #print "Processing "+TT_Map[i] + h_hist_u = h_TTHitMonitor_u.ProjectionY(TT_Map[i], i+1, i+1) + h_hist_r = h_TTHitMonitor_r.ProjectionY(TT_Map[i], i+1, i+1) + tt_mean_abswidth += abs(h_hist_u.GetMean())*h_hist_u.GetEntries() + h_tt_mean_abswidth.Fill(abs(h_hist_u.GetMean()), h_hist_u.GetEntries()) + tt_mean_res += h_hist_r.GetRMS()*h_hist_r.GetEntries() + h_tt_mean_res.Fill(h_hist_r.GetRMS(), h_hist_r.GetEntries()) + tt_nEntries += h_hist_r.GetEntries() + del h_hist_u + del h_hist_r + tt_mean_abswidth = float(tt_mean_abswidth)/tt_nEntries + tt_mean_res = float(tt_mean_res)/tt_nEntries + + tree_TTHitEfficiency = f_input.TTHitEfficiency + t_TTHitEfficiency = tree_TTHitEfficiency.Get("TrackMonTuple") + h_TTHitEfficiency = R.TH1F("h_TTHitEfficiency","h_TTHitEfficiency",100, 0.,1.1) + t_TTHitEfficiency.Project("h_TTHitEfficiency","isFound") + tt_eff = h_TTHitEfficiency.GetMean() + tot_tt_eff = h_TTHitEfficiency.GetEntries() + nf_tt_eff = tot_tt_eff*(1-tt_eff) + f_tt_eff = tot_tt_eff*tt_eff + err_tt_eff = (f_tt_eff*nf_tt_eff/tot_tt_eff**3)**0.5 + + tree_ITHitEfficiency = f_input.ITHitEfficiency + t_ITHitEfficiency = tree_ITHitEfficiency.Get("TrackMonTuple") + h_ITHitEfficiency = R.TH1F("h_ITHitEfficiency","h_ITHitEfficiency",100, 0.,1.1) + t_ITHitEfficiency.Project("h_ITHitEfficiency","isFound") + it_eff = h_ITHitEfficiency.GetMean() + tot_it_eff = h_ITHitEfficiency.GetEntries() + nf_it_eff = tot_it_eff*(1-it_eff) + f_it_eff = tot_it_eff*it_eff + err_it_eff = (f_it_eff*nf_it_eff/tot_it_eff**3)**0.5 + + print "Uncertainty \"From Hist\" is obtained as RMS of variable distribution weighted with number of events.\ + Each entry correspond to the sector, value is value of the preformance variable, wwight is a number of events." + #print "(Normal) Weighted resolution IT: "+str(it_mean_res) + print "(From Hist) Weighted resolution IT: "+str(h_it_mean_res.GetMean()) +" +/- "+str(h_it_mean_res.GetRMS()) + #print "(Normal) Weighted abs. bias IT: "+str(it_mean_abswidth) + print "(From Hist) Weighted abs. bias IT: "+str(h_it_mean_abswidth.GetMean()) +" +/- "+str(h_it_mean_abswidth.GetRMS()) + print "(Normal) Weighted efficiency IT: "+str(it_eff) + " +/- "+str(err_it_eff) + #print "(Normal) Weighted resolution TT: "+str(tt_mean_res) + print "(From Hist) Weighted resolution TT: "+str(h_tt_mean_res.GetMean()) +" +/- "+str(h_tt_mean_res.GetRMS()) + #print "(Normal) Weighted abs. bias TT: "+str(tt_mean_abswidth) + print "(From Hist) Weighted abs. bias TT: "+str(h_tt_mean_abswidth.GetMean()) +" +/- "+str(h_tt_mean_abswidth.GetRMS()) + print "(Normal) Weighted efficiency TT: "+str(tt_eff) + " +/- "+str(err_tt_eff) + + + return True + +if __name__ == "__main__": + if len(sys.argv)==2: + data = sys.argv[1] + simple_analysis(data) + else: + print "Indicate tuple to analyse" + #syntax_explanation("SimpleStats.py") + diff --git a/SingleTrend.py b/SingleTrend.py new file mode 100644 index 0000000..7649c43 --- /dev/null +++ b/SingleTrend.py @@ -0,0 +1,38 @@ +import os +import inspect +from pprint import pprint +from itertools import product +import math +import numpy as n +import sys +import pickle +from datetime import datetime +from suppl.Structure import * +from config import binning +from config import histogram_address +from config import plot_address +import ROOT as R +from ROOT import gStyle +gStyle.SetOptStat(False) +#from ROOT import RooFit as RF + + +def SingleTrend(data, sector, histogram_address=histogram_address, plot_address=plot_address): + with open(data, 'r') as basket: + coll = pickle.load(basket) + for run in coll: + if "mean" in coll[run]["data"]: + create_single_monitor_trend(coll, sector, plot_address) + else: + create_single_efficiency_trend(coll, sector, plot_address) + break + return True + +if __name__ == "__main__": + #local_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + if len(sys.argv)==3: + data = sys.argv[1] + SingleTrend(data,sys.argv[2]) + else: + syntax_explanation("SingleTrend.py") + diff --git a/TupToHist.py b/TupToHist.py new file mode 100644 index 0000000..0a9f214 --- /dev/null +++ b/TupToHist.py @@ -0,0 +1,162 @@ +import os +import inspect +from pprint import pprint +from itertools import product +import math +import numpy as n +import sys +import pickle +from datetime import datetime +from suppl.Structure import * +from drawing.CreateTTHist import CreateTTHist +from drawing.CreateITHist import CreateITHist +from drawing.Create_Maps import TT_Map as TT_Map_func +from drawing.Create_Maps import IT_Map as IT_Map_func +from config import binning +from config import bin_name +from config import perform_window_eff_study +from config import Number_Of_Events +from config import histogram_address +from config import plot_address +from config import pkl_address +from config import extra_name +from config import final_eff_window +import ROOT as R +from ROOT import gStyle +gStyle.SetOptStat(False) +#from ROOT import RooFit as RF + +binning +bin_name + +def TupToHist(data, oparation_mode, Number_Of_Events=Number_Of_Events, pkl_address=pkl_address, histogram_address=histogram_address, plot_address=plot_address): + global perform_window_eff_study + + IT_Map=IT_Map_func() + TT_Map=TT_Map_func() + f_input = R.TFile(data) + start = datetime.now() + + if (oparation_mode=='1'): ############## TT ############## + tree_TTHitEfficiency = f_input.TTHitEfficiency + t_TTHitEfficiency = tree_TTHitEfficiency.Get("Tracks") #TrackMonTuple + coll_TTHitEfficiency = create_coll(det = "TT", mode = "Efficiency") + print "TT Efficiency" + for i, s in enumerate(t_TTHitEfficiency): + if i%100==0: + cli_progress_test(i, t_TTHitEfficiency.GetEntries(), start) + if Number_Of_Events>0: + if i>Number_Of_Events: continue + + + for run_bin in coll_TTHitEfficiency: + #if ((s.RunNumber>=coll_TTHitEfficiency[run_bin]["run_start"]) and (s.RunNumber<=coll_TTHitEfficiency[run_bin]["run_stop"])): + if ( + ( ("BinByStrip" in bin_name) and ((abs(s.clusterStrip)>=coll_TTHitEfficiency[run_bin]["Strip_start"]) and (abs(s.clusterStrip)<=coll_TTHitEfficiency[run_bin]["Strip_stop"])) ) + or + ( ("BinByY" in bin_name) and ((abs(s.expected_y)>=coll_TTHitEfficiency[run_bin]["Y_start"]) and (abs(s.expected_y)<=coll_TTHitEfficiency[run_bin]["Y_stop"])) ) + or + ( ("BinByPt" in bin_name) and ((s.pt>=coll_TTHitEfficiency[run_bin]["pT_start"]) and (s.pt<=coll_TTHitEfficiency[run_bin]["pT_stop"])) ) + or + ( ("AllRuns" in bin_name or "BinByMonth" in bin_name or "BinByRunNumber" in bin_name) and ((s.RunNumber>=coll_TTHitEfficiency[run_bin]["run_start"]) and (s.RunNumber<=coll_TTHitEfficiency[run_bin]["run_stop"])) ) + ) : + if s.isExpected: # expected hit + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbExpected"]+=s.isExpected + if s.isFound: + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["residual"].Fill(s.hit_residual) + if abs(s.hit_residual) < final_eff_window: + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbFound"]+=s.isFound + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["clusterSize"].Fill(s.clusterSize) + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["SNratio"].Fill(s.clusterCharge/s.clusterNoise) + + else : # hit is noise + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["residual_noise"].Fill(s.hit_residual) + if abs(s.hit_residual) < final_eff_window: + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbFoundNoise"]+=s.isFound + + + coll_TTHitEfficiency = find_efficiency(coll_TTHitEfficiency) + coll_TTHitEfficiency = find_noise_fraction(coll_TTHitEfficiency) + if perform_window_eff_study: + coll_TTHitEfficiency = window_eff_study(coll_TTHitEfficiency) + write_window_eff_study(coll_TTHitEfficiency, "Efficiency", histogram_address+"TTHitEfficiency"+"_"+bin_name+"_efficiency_study_") + write_histogram(coll_TTHitEfficiency, "Efficiency",histogram_address+"TTHitEfficiency"+"__"+bin_name+"_") + coll_TTHitEfficiency = make_coll_lite(coll = coll_TTHitEfficiency, det = "TT", mode = "Efficiency") + for run_bin in coll_TTHitEfficiency: + try: + suffix = coll_TTHitEfficiency[run_bin]["comment"] + except: + suffix = str(coll_TTHitEfficiency[run_bin]["run_start"])+"__"+str(coll_TTHitEfficiency[run_bin]["run_stop"]) + # These are the plots with the map of the TT/IT + if "AllRuns" in bin_name: + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable ="occupancy", mode="Value",suffix=suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable ="efficiency", mode="Value",suffix=suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "mean", mode="Value",suffix= suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "width", mode="Value",suffix=suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "clusterSize_mean", mode="Value",suffix=suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "SNratio_max", mode="Value",suffix=suffix,address=plot_address) + CreateTTHist(coll_TTHitEfficiency[run_bin]["data"],variable = "noise_fraction", mode="Value",suffix=suffix,address=plot_address) + + with open(pkl_address+'TTHitEfficiency_'+bin_name+extra_name+'.pkl', 'wb') as basket: + pickle.dump(coll_TTHitEfficiency, basket) + create_efficiency_trends(coll_TTHitEfficiency, "TT",histogram_address+"Trends_TTHitEfficiency"+"__"+bin_name+"_") + + elif (oparation_mode=='2'): ############## IT ############## + tree_ITHitEfficiency = f_input.ITHitEfficiency + t_ITHitEfficiency = tree_ITHitEfficiency.Get("Tracks") #TrackMonTuple + coll_ITHitEfficiency = create_coll(det = "IT", mode = "Efficiency") + print "IT Efficiency" + for i, s in enumerate(t_ITHitEfficiency): + if i%100==0: + cli_progress_test(i, t_ITHitEfficiency.GetEntries(), start) + if Number_Of_Events>0: + if i>Number_Of_Events: continue + + for run_bin in coll_ITHitEfficiency: + #if ((s.RunNumber>=coll_ITHitEfficiency[run_bin]["run_start"]) and (s.RunNumber<=coll_ITHitEfficiency[run_bin]["run_stop"])): + if ( + (("BinByStrip" in bin_name) and ((abs(s.clusterStrip)>=coll_ITHitEfficiency[run_bin]["Strip_start"]) and (abs(s.clusterStrip)<=coll_ITHitEfficiency[run_bin]["Strip_stop"])) ) + or + (("BinByY" in bin_name) and ((abs(s.expected_y)>=coll_ITHitEfficiency[run_bin]["Y_start"]) and (abs(s.expected_y)<=coll_ITHitEfficiency[run_bin]["Y_stop"])) ) + or + (("BinByPt" in bin_name) and ((s.pt>=coll_ITHitEfficiency[run_bin]["pT_start"]) and (s.pt<=coll_ITHitEfficiency[run_bin]["pT_stop"]))) + or + (("AllRuns" in bin_name or "BinByMonth" in bin_name or "BinByRunNumber" in bin_name) and ((s.RunNumber>=coll_ITHitEfficiency[run_bin]["run_start"]) and (s.RunNumber<=coll_ITHitEfficiency[run_bin]["run_stop"]))) + ) : + if s.isFound: + coll_ITHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["residual"].Fill(s.hit_residual) + #coll_ITHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbFound"]+=s.isFound + if abs(s.hit_residual) < final_eff_window: + coll_TTHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbFound"]+=s.isFound + coll_ITHitEfficiency[run_bin]["data"][s.clusterSTchanMapID]["nbExpected"]+=s.isExpected + + coll_ITHitEfficiency = find_efficiency(coll_ITHitEfficiency) + if perform_window_eff_study: + coll_ITHitEfficiency = window_eff_study(coll_ITHitEfficiency) + write_window_eff_study(coll_ITHitEfficiency, "Efficiency", histogram_address+"ITHitEfficiency"+"_"+bin_name+"_efficiency_study_") + write_histogram(coll_ITHitEfficiency, "Efficiency",histogram_address+"ITHitEfficiency"+"__"+bin_name+"_") + coll_ITHitEfficiency = make_coll_lite(coll = coll_ITHitEfficiency, det = "IT", mode = "Efficiency") + for run_bin in coll_ITHitEfficiency: + try: + suffix = coll_ITHitEfficiency[run_bin]["comment"] + except: + suffix = str(coll_ITHitEfficiency[run_bin]["run_start"])+"__"+str(coll_ITHitEfficiency[run_bin]["run_stop"]) + CreateITHist(coll_ITHitEfficiency[run_bin]["data"],variable="efficiency", mode="Value",suffix=suffix, address=plot_address) + CreateITHist(coll_ITHitEfficiency[run_bin]["data"],variable = "mean", mode="Value",suffix= suffix,address=plot_address) + CreateITHist(coll_ITHitEfficiency[run_bin]["data"],variable = "width", mode="Value",suffix=suffix,address=plot_address) + with open(pkl_address+'ITHitEfficiency_'+bin_name+extra_name+'.pkl', 'wb') as basket: + pickle.dump(coll_ITHitEfficiency, basket) + create_efficiency_trends(coll_ITHitEfficiency, "IT",histogram_address+"Trends_ITHitEfficiency"+"__"+bin_name+"_") + + else: + syntax_explanation("TupToHist.py") + + return True + +if __name__ == "__main__": + if len(sys.argv)==3: + data = sys.argv[1] + TupToHist(data,sys.argv[2]) + else: + syntax_explanation("TupToHist.py") + diff --git a/binnings/BinByAlignmentVersions_2015.py b/binnings/BinByAlignmentVersions_2015.py new file mode 100644 index 0000000..8e60b4b --- /dev/null +++ b/binnings/BinByAlignmentVersions_2015.py @@ -0,0 +1,85 @@ +bin_name = "BinByAlignment_2015" +binning = [{"year_start":2015, + "month_start":6, + "day_start":3, + "hour_start":9, + "minute_start":30, + "year_stop":2015, + "month_stop":06, + "day_stop":12, + "hour_stop":11, + "minute_stop":50, + "run_start":0, + "run_stop":154624}, + {"year_start":2015, + "month_start":6, + "day_start":12, + "hour_start":11, + "minute_start":50, + "year_stop":2015, + "month_stop":06, + "day_stop":13, + "hour_stop":12, + "minute_stop":25, + "run_start":154625, + "run_stop":154742} , + {"year_start":2015, + "month_start":6, + "day_start":13, + "hour_start":12, + "minute_start":25, + "year_stop":2015, + "month_stop":07, + "day_stop":07, + "hour_stop":13, + "minute_stop":01, + "run_start":154791, + "run_stop":157174} , + {"year_start":2015, + "month_start":7, + "day_start":07, + "hour_start":13, + "minute_start":01, + "year_stop":2015, + "month_stop":8, + "day_stop":14, + "hour_stop":17, + "minute_stop":20, + "run_start":157180, + "run_stop":160155} , + {"year_start":2015, + "month_start":8, + "day_start":14, + "hour_start":17, + "minute_start":20, + "year_stop":2015, + "month_stop":8, + "day_stop":19, + "hour_stop":18, + "minute_stop":20, + "run_start":160156, + "run_stop":160606} , + {"year_start":2015, + "month_start":8, + "day_start":19, + "hour_start":18, + "minute_start":20, + "year_stop":2015, + "month_stop":9, + "day_stop":8, + "hour_stop":9, + "minute_stop":59, + "run_start":160716, + "run_stop":162412} , + {"year_start":2015, + "month_start":9, + "day_start":8, + "hour_start":9, + "minute_start":59, + "year_stop":2016, + "month_stop":12, + "day_stop":31, + "hour_stop":24, + "minute_stop":59, + "run_start":162413, + "run_stop":999999} ] \ No newline at end of file diff --git a/binnings/BinByMonth_2012.py b/binnings/BinByMonth_2012.py new file mode 100644 index 0000000..41611f2 --- /dev/null +++ b/binnings/BinByMonth_2012.py @@ -0,0 +1,47 @@ +bin_name = "BinByMonth_2012" +bin_vector = range(0,10) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +#Apr 12 +{'comment': "Apr_12", +'run_start':111138, +'run_stop':114118}, + +#May 12 +{'comment': "May_12", +'run_start':114119, +'run_stop':117191}, + +#Jun 12 +{'comment': "Jun_12", +'run_start':117192, +'run_stop':119955}, + +#Jul 12 +{'comment': "Jul_12", +'run_start':119956, +'run_stop':124406}, + +#Aug 12 +{'comment': "Aug_12", +'run_start':124407, +'run_stop':126931}, + +#Sep 12 +{'comment': "Sep_12", +'run_start':126932, +'run_stop':129692}, + +#Oct 12 +{'comment': "Oct_12", +'run_start':129693, +'run_stop':131359}, + +#Nov 12 +{'comment': "Nov_12", +'run_start':131360, +'run_stop':133470}, + +#Dec 12 +{'comment': "Dec_12", +'run_start':133471, +'run_stop':133800}] # 133488 diff --git a/binnings/BinByMonth_2015.py b/binnings/BinByMonth_2015.py new file mode 100644 index 0000000..3191c30 --- /dev/null +++ b/binnings/BinByMonth_2015.py @@ -0,0 +1,29 @@ +bin_name = "BinByMonth_2015" +bin_vector = range(0,4) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +#Aug 15 +#{'comment': "Aug_15", +#'run_start':159969, +#'run_stop':161204}, +#Sep 15 +{'comment': "Sep_15", +'run_start':162247, +'run_stop':164462 +}, +#Oct 15 +{'comment': "Oct_15", +'run_start':164524, +'run_stop':166906 +}, +#Nov 15 +{'comment': "Nov_15", +'run_start':166950, +'run_stop': 168872 +} +#, +#Dec 15 +#{'comment': "Dec_15", +#'run_start':168892, +#'run_stop':169617 +#} +] diff --git a/binnings/BinByPt_2012.py b/binnings/BinByPt_2012.py new file mode 100644 index 0000000..b529124 --- /dev/null +++ b/binnings/BinByPt_2012.py @@ -0,0 +1,43 @@ +bin_name = "BinByPt_2012" +bin_vector = [400, 1100, 1500, 1800, 2100, 2500, 3000, 4000, 10000] # last bin up to 60000, 10000 is used only for nicer plot range +binning=[ +#bin1 +{'comment': "pT_400_1100_12", +'pT_start':400, + 'pT_stop':1100}, + +#bin2 +{'comment': "pT_1100_1500_12", +'pT_start':1100, + 'pT_stop':1500}, + +#bin3 +{'comment': "pT_1500_1800_12", +'pT_start':1500, + 'pT_stop':1800}, + +#bin4 +{'comment': "pT_1800_2100_12", +'pT_start':1800, + 'pT_stop':2100}, + +#bin5 +{'comment': "pT_2100_2500_12", +'pT_start':2100, + 'pT_stop':2500}, + +#bin6 +{'comment': "pT_2500_3000_12", +'pT_start':2500, + 'pT_stop':3000}, + +#bin7 +{'comment': "pT_3000_4000_12", +'pT_start':3000, + 'pT_stop':4000}, + +#bin8 +{'comment': "pT_4000_60000_12", +'pT_start':4000, + 'pT_stop':60000}, +] diff --git a/binnings/BinByPt_2015.py b/binnings/BinByPt_2015.py new file mode 100644 index 0000000..0dcb7f6 --- /dev/null +++ b/binnings/BinByPt_2015.py @@ -0,0 +1,43 @@ +bin_name = "BinByPt_2015" +bin_vector = [400, 1100, 1500, 1800, 2100, 2500, 3000, 4000, 10000] # last bin up to 80000, 10000 is used only for nicer plot range +binning=[ +#bin1 +{'comment': "pT_400_1100_15", +'pT_start':400, + 'pT_stop':1100}, + +#bin2 +{'comment': "pT_1100_1500_15", +'pT_start':1100, + 'pT_stop':1500}, + +#bin3 +{'comment': "pT_1500_1800_15", +'pT_start':1500, + 'pT_stop':1800}, + +#bin4 +{'comment': "pT_1800_2100_15", +'pT_start':1800, + 'pT_stop':2100}, + +#bin5 +{'comment': "pT_2100_2500_15", +'pT_start':2100, + 'pT_stop':2500}, + +#bin6 +{'comment': "pT_2500_3000_15", +'pT_start':2500, + 'pT_stop':3000}, + +#bin7 +{'comment': "pT_3000_4000_15", +'pT_start':3000, + 'pT_stop':4000}, + +#bin8 +{'comment': "pT_4000_80000_15", +'pT_start':4000, + 'pT_stop':80000}, +] diff --git a/binnings/BinByRunNumber.py b/binnings/BinByRunNumber.py new file mode 100644 index 0000000..0e9fcd5 --- /dev/null +++ b/binnings/BinByRunNumber.py @@ -0,0 +1,31 @@ +bin_name = "BinByRunNumber" +bin_vector = range(0,6) + +binning=[ + +#bin1 +{'comment': "Run_125980", # 2012 +'run_start':125980, + 'run_stop':125980}, + +#bin2 +{'comment': "Run_162247", +'run_start':162247, + 'run_stop':162247}, + +#bin3 +{'comment': "Run_166277", +'run_start':166277, + 'run_stop':166277}, + +#bin4 +{'comment': "Run_166719", +'run_start':166719, + 'run_stop':166719}, + +#bin5 +{'comment': "Run_167136", +'run_start':167136, + 'run_stop':167136}, + +] diff --git a/binnings/BinByStrip_2017.py b/binnings/BinByStrip_2017.py new file mode 100644 index 0000000..4f15618 --- /dev/null +++ b/binnings/BinByStrip_2017.py @@ -0,0 +1,166 @@ +bin_name = "BinByStrip_2017" +bin_vector = [0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 272, 288, 304, 320, 336, 352, 368, 384, 400, 416, 432, 448, 464, 480, 496, 512 ] # abs() + +binning=[ + +#bin1 +{'comment': "Strip_0_16_17", +'Strip_start':0, + 'Strip_stop':15}, + +#bin2 +{'comment': "Strip_16_32_17", +'Strip_start':16, + 'Strip_stop':31}, + +#bin +{'comment': "Strip_32_48_17", +'Strip_start':32, + 'Strip_stop':47}, + +#bin +{'comment': "Strip_48_64_17", +'Strip_start':48, + 'Strip_stop':63}, + +#bin +{'comment': "Strip_64_80_17", +'Strip_start':64, + 'Strip_stop':79}, + +#bin +{'comment': "Strip_80_96_17", +'Strip_start':80, + 'Strip_stop':95}, + +#bin +{'comment': "Strip_96_112_17", +'Strip_start':96, + 'Strip_stop':111}, + +#bin +{'comment': "Strip_112_128_17", +'Strip_start':112, + 'Strip_stop':127}, + +#bin +{'comment': "Strip_128_144_17", +'Strip_start':128, + 'Strip_stop':143}, + +#bin +{'comment': "Strip_144_160_17", +'Strip_start':144, + 'Strip_stop':159}, + +#bin +{'comment': "Strip_160_176_17", +'Strip_start':160, + 'Strip_stop':175}, + +#bin +{'comment': "Strip_176_192_17", +'Strip_start':176, + 'Strip_stop':191}, + +#bin +{'comment': "Strip_192_208_17", +'Strip_start':192, + 'Strip_stop':207}, + +#bin +{'comment': "Strip_208_224_17", +'Strip_start':208, + 'Strip_stop':223}, + +#bin +{'comment': "Strip_224_240_17", +'Strip_start':224, + 'Strip_stop':239}, + +#bin +{'comment': "Strip_240_256_17", +'Strip_start':240, + 'Strip_stop':255}, + +#bin +{'comment': "Strip_256_272_17", +'Strip_start':256, + 'Strip_stop':271}, + +#bin +{'comment': "Strip_272_288_17", +'Strip_start':272, + 'Strip_stop':287}, + +#bin +{'comment': "Strip_288_304_17", +'Strip_start':288, + 'Strip_stop':303}, + +#bin +{'comment': "Strip_304_320_17", +'Strip_start':304, + 'Strip_stop':319}, + +#bin +{'comment': "Strip_320_336_17", +'Strip_start':320, + 'Strip_stop':335}, + +#bin +{'comment': "Strip_336_352_17", +'Strip_start':336, + 'Strip_stop':351}, + +#bin +{'comment': "Strip_352_368_17", +'Strip_start':352, + 'Strip_stop':367}, + +#bin +{'comment': "Strip_368_384_17", +'Strip_start':368, + 'Strip_stop':383}, + +#bin +{'comment': "Strip_384_400_17", +'Strip_start':384, + 'Strip_stop':399}, + +#bin +{'comment': "Strip_400_416_17", +'Strip_start':400, + 'Strip_stop':415}, + +#bin +{'comment': "Strip_416_432_17", +'Strip_start':416, + 'Strip_stop':431}, + +#bin +{'comment': "Strip_432_448_17", +'Strip_start':432, + 'Strip_stop':447}, + +#bin +{'comment': "Strip_448_464_17", +'Strip_start':448, + 'Strip_stop':463}, + +#bin +{'comment': "Strip_464_480_17", +'Strip_start':464, + 'Strip_stop':479}, + +#bin +{'comment': "Strip_480_496_17", +'Strip_start':480, + 'Strip_stop':495}, + +#bin +{'comment': "Strip_496_512_17", +'Strip_start':496, + 'Strip_stop':512} + +] diff --git a/binnings/BinByY_2012.py b/binnings/BinByY_2012.py new file mode 100644 index 0000000..eba08c8 --- /dev/null +++ b/binnings/BinByY_2012.py @@ -0,0 +1,96 @@ +bin_name = "BinByY_2012" +bin_vector = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 150, 200, 250, 300, 350, 400, 500, 700] # abs() + +binning=[ + +#bin1 +{'comment': "Y_0_10_12", +'Y_start':0, + 'Y_stop':10}, + +#bin2 +{'comment': "Y_10_20_12", +'Y_start':10, + 'Y_stop':20}, + +#bin3 +{'comment': "Y_20_30_12", +'Y_start':20, + 'Y_stop':30}, + +#bin4 +{'comment': "Y_30_40_12", +'Y_start':30, + 'Y_stop':40}, + +#bin5 +{'comment': "Y_40_50_12", +'Y_start':40, + 'Y_stop':50}, + +#bin6 +{'comment': "Y_50_60_12", +'Y_start':50, + 'Y_stop':60}, + +#bin7 +{'comment': "Y_60_70_12", +'Y_start':60, + 'Y_stop':70}, + +#bin8 +{'comment': "Y_70_80_12", +'Y_start':70, + 'Y_stop':80}, + +#bin9 +{'comment': "Y_80_90_12", +'Y_start':80, + 'Y_stop':90}, + +#bin10 +{'comment': "Y_90_100_12", +'Y_start':90, + 'Y_stop':100}, + +#bin11 +{'comment': "Y_100_150_12", +'Y_start':100, + 'Y_stop':150}, + +#bin12 +{'comment': "Y_150_200_12", +'Y_start':150, + 'Y_stop':200}, + +#bin13 +{'comment': "Y_200_250_12", +'Y_start':200, + 'Y_stop':250}, + +#bin14 +{'comment': "Y_250_300_12", +'Y_start':250, + 'Y_stop':300}, + +#bin15 +{'comment': "Y_300_350_12", +'Y_start':300, + 'Y_stop':350}, + +#bin16 +{'comment': "Y_350_400_12", +'Y_start':350, + 'Y_stop':400}, + +#bin17 +{'comment': "Y_400_500_12", +'Y_start':400, + 'Y_stop':500}, + +#bin18 +{'comment': "Y_500_700_12", +'Y_start':500, + 'Y_stop':700}, + +] diff --git a/binnings/BinByY_2015.py b/binnings/BinByY_2015.py new file mode 100644 index 0000000..096b9b2 --- /dev/null +++ b/binnings/BinByY_2015.py @@ -0,0 +1,96 @@ +bin_name = "BinByY_2015" +bin_vector = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 150, 200, 250, 300, 350, 400, 500, 700] # abs() + +binning=[ + +#bin1 +{'comment': "Y_0_10_15", +'Y_start':0, + 'Y_stop':10}, + +#bin2 +{'comment': "Y_10_20_15", +'Y_start':10, + 'Y_stop':20}, + +#bin3 +{'comment': "Y_20_30_15", +'Y_start':20, + 'Y_stop':30}, + +#bin4 +{'comment': "Y_30_40_15", +'Y_start':30, + 'Y_stop':40}, + +#bin5 +{'comment': "Y_40_50_15", +'Y_start':40, + 'Y_stop':50}, + +#bin6 +{'comment': "Y_50_60_15", +'Y_start':50, + 'Y_stop':60}, + +#bin7 +{'comment': "Y_60_70_15", +'Y_start':60, + 'Y_stop':70}, + +#bin8 +{'comment': "Y_70_80_15", +'Y_start':70, + 'Y_stop':80}, + +#bin9 +{'comment': "Y_80_90_15", +'Y_start':80, + 'Y_stop':90}, + +#bin10 +{'comment': "Y_90_100_15", +'Y_start':90, + 'Y_stop':100}, + +#bin11 +{'comment': "Y_100_150_15", +'Y_start':100, + 'Y_stop':150}, + +#bin12 +{'comment': "Y_150_200_15", +'Y_start':150, + 'Y_stop':200}, + +#bin13 +{'comment': "Y_200_250_15", +'Y_start':200, + 'Y_stop':250}, + +#bin14 +{'comment': "Y_250_300_15", +'Y_start':250, + 'Y_stop':300}, + +#bin15 +{'comment': "Y_300_350_15", +'Y_start':300, + 'Y_stop':350}, + +#bin16 +{'comment': "Y_350_400_15", +'Y_start':350, + 'Y_stop':400}, + +#bin17 +{'comment': "Y_400_500_15", +'Y_start':400, + 'Y_stop':500}, + +#bin18 +{'comment': "Y_500_700_15", +'Y_start':500, + 'Y_stop':700}, + +] diff --git a/binnings/BinByY_2017.py b/binnings/BinByY_2017.py new file mode 100644 index 0000000..8fde384 --- /dev/null +++ b/binnings/BinByY_2017.py @@ -0,0 +1,96 @@ +bin_name = "BinByY_2017" +bin_vector = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 150, 200, 250, 300, 350, 400, 500, 700] # abs() + +binning=[ + +#bin1 +{'comment': "Y_0_10_17", +'Y_start':0, + 'Y_stop':10}, + +#bin2 +{'comment': "Y_10_20_17", +'Y_start':10, + 'Y_stop':20}, + +#bin3 +{'comment': "Y_20_30_17", +'Y_start':20, + 'Y_stop':30}, + +#bin4 +{'comment': "Y_30_40_17", +'Y_start':30, + 'Y_stop':40}, + +#bin5 +{'comment': "Y_40_50_17", +'Y_start':40, + 'Y_stop':50}, + +#bin6 +{'comment': "Y_50_60_17", +'Y_start':50, + 'Y_stop':60}, + +#bin7 +{'comment': "Y_60_70_17", +'Y_start':60, + 'Y_stop':70}, + +#bin8 +{'comment': "Y_70_80_17", +'Y_start':70, + 'Y_stop':80}, + +#bin9 +{'comment': "Y_80_90_17", +'Y_start':80, + 'Y_stop':90}, + +#bin10 +{'comment': "Y_90_100_17", +'Y_start':90, + 'Y_stop':100}, + +#bin11 +{'comment': "Y_100_150_17", +'Y_start':100, + 'Y_stop':150}, + +#bin12 +{'comment': "Y_150_200_17", +'Y_start':150, + 'Y_stop':200}, + +#bin13 +{'comment': "Y_200_250_17", +'Y_start':200, + 'Y_stop':250}, + +#bin14 +{'comment': "Y_250_300_17", +'Y_start':250, + 'Y_stop':300}, + +#bin15 +{'comment': "Y_300_350_17", +'Y_start':300, + 'Y_stop':350}, + +#bin16 +{'comment': "Y_350_400_17", +'Y_start':350, + 'Y_stop':400}, + +#bin17 +{'comment': "Y_400_500_17", +'Y_start':400, + 'Y_stop':500}, + +#bin18 +{'comment': "Y_500_700_17", +'Y_start':500, + 'Y_stop':700}, + +] diff --git a/binnings/NoBinning.py b/binnings/NoBinning.py new file mode 100644 index 0000000..e274182 --- /dev/null +++ b/binnings/NoBinning.py @@ -0,0 +1,7 @@ +bin_name = "AllRuns" +binning=[ +{ +'comment':"All_runs", +'run_start':0, +'run_stop':9999999} +] \ No newline at end of file diff --git a/binnings/NoBinning_2012.py b/binnings/NoBinning_2012.py new file mode 100644 index 0000000..454f835 --- /dev/null +++ b/binnings/NoBinning_2012.py @@ -0,0 +1,8 @@ +bin_name = "AllRuns_12" +bin_vector = range(0,2) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +{ +'comment':"All_runs_12", +'run_start':0, +'run_stop':9999999} +] diff --git a/binnings/NoBinning_2015.py b/binnings/NoBinning_2015.py new file mode 100644 index 0000000..b08b305 --- /dev/null +++ b/binnings/NoBinning_2015.py @@ -0,0 +1,8 @@ +bin_name = "AllRuns_15" +bin_vector = range(0,2) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +{ +'comment':"All_runs_15", +'run_start':0, +'run_stop':9999999} +] diff --git a/binnings/NoBinning_2016.py b/binnings/NoBinning_2016.py new file mode 100644 index 0000000..38441ad --- /dev/null +++ b/binnings/NoBinning_2016.py @@ -0,0 +1,8 @@ +bin_name = "AllRuns_16" +bin_vector = range(0,2) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +{ +'comment':"All_runs_16", +'run_start':0, +'run_stop':9999999} +] diff --git a/binnings/NoBinning_2017.py b/binnings/NoBinning_2017.py new file mode 100644 index 0000000..1a9be54 --- /dev/null +++ b/binnings/NoBinning_2017.py @@ -0,0 +1,8 @@ +bin_name = "AllRuns_17" +bin_vector = range(0,2) #vector containing bins edge (size equal to the number of bins+1), used for plotting range histos +binning=[ +{ +'comment':"All_runs_17", +'run_start':0, +'run_stop':9999999} +] diff --git a/binnings/__init__.py b/binnings/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/binnings/__init__.py @@ -0,0 +1 @@ + diff --git a/config.py b/config.py new file mode 100644 index 0000000..7b004e9 --- /dev/null +++ b/config.py @@ -0,0 +1,120 @@ +""" +Here you may configure behaviour of the package. +""" +######### +#General# +######### +from binnings.NoBinning_2017 import binning #Choose a time binning +from binnings.NoBinning_2017 import bin_name #Choose a time binning +from binnings.NoBinning_2017 import bin_vector #Choose a time binning +#from binnings.NoBinning_2016 import binning #Choose a time binning +#from binnings.NoBinning_2016 import bin_name #Choose a time binning +#from binnings.NoBinning_2016 import bin_vector #Choose a time binning +#from binnings.NoBinning_2015 import binning #Choose a time binning +#from binnings.NoBinning_2015 import bin_name #Choose a time binning +#from binnings.NoBinning_2015 import bin_vector #Choose a time binning +#from binnings.NoBinning_2012 import binning #Choose a time binning +#from binnings.NoBinning_2012 import bin_name #Choose a time binning +#from binnings.NoBinning_2012 import bin_vector #Choose a time binning + +#from binnings.BinByMonth_2012 import binning #Choose a time binning +#from binnings.BinByMonth_2012 import bin_name #Choose a time binning +#from binnings.BinByMonth_2012 import bin_vector #Choose a time binning +#from binnings.BinByMonth_2015 import binning #Choose a time binning +#from binnings.BinByMonth_2015 import bin_name #Choose a time binning +#from binnings.BinByMonth_2015 import bin_vector #Choose a time binning + +#from binnings.BinByPt_2012 import binning #Choose a pT binning +#from binnings.BinByPt_2012 import bin_name #Choose a pT binning +#from binnings.BinByPt_2012 import bin_vector #Choose a pT binning +#from binnings.BinByPt_2015 import binning #Choose a pT binning +#from binnings.BinByPt_2015 import bin_name #Choose a pT binning +#from binnings.BinByPt_2015 import bin_vector #Choose a pT binning + +#from binnings.BinByStrip_2017 import binning #Choose a Strip binning +#from binnings.BinByStrip_2017 import bin_name #Choose a Strip binning +#from binnings.BinByStrip_2017 import bin_vector #Choose a Strip binning + +#from binnings.BinByY_2017 import binning #Choose a Y binning +#from binnings.BinByY_2017 import bin_name #Choose a Y binning +#from binnings.BinByY_2017 import bin_vector #Choose a Y binning +#from binnings.BinByY_2015 import binning #Choose a Y binning +#from binnings.BinByY_2015 import bin_name #Choose a Y binning +#from binnings.BinByY_2015 import bin_vector #Choose a Y binning +#from binnings.BinByY_2012 import binning #Choose a Y binning +#from binnings.BinByY_2012 import bin_name #Choose a Y binning +#from binnings.BinByY_2012 import bin_vector #Choose a Y binning + +#from binnings.BinByRunNumber import binning #Choose a pT binning +#from binnings.BinByRunNumber import bin_name #Choose a pT binning +#from binnings.BinByRunNumber import bin_vector #Choose a pT binning + +#from binnings.BinByAlignmentVersions_2015 import binning #Choose a time binning +#from binnings.BinByAlignmentVersions_2015 import bin_name #Choose a time binning + + +Number_Of_Events = 1000 #-1 #Choose a number of tracks to proceed from Tuple. Negative values == all tracks +pkl_address ="Pkls/" #Address of dumping .pkl with dictionaries +histogram_address = "Histos/" #Address for storing histograms +plot_address = "Plots/" #Adress for storing plots +residual_limit = 0.5 #limit of residual histograms, in mm. +residual_nBins = 100 #number of bins in residual histograms +SNratio_limit = 250 #limit of S/N ratio histograms +SNratio_nBins = 250 #number of bins in S/N ratio histograms +dead_sectors = ['IT1BottomX2Sector7', 'IT3TopX1Sector7'] + +################## +#Histogram ranges# +################## +UsePredefinedRanges = True +ITMeanRange = [-0.06, 0.06] +ITWidthRange = [0.044, 0.058] +#ITEffRange = [0.995, 1.0] +ITEffRange = [0.95, 1.0] + +#TTMeanRange = [-0.06, 0.06] +TTMeanRange = [-0.14, 0.14] +#TTMeanRange = [-0.04, 0.04] +#TTWidthRange = [0.02, 0.12] +TTWidthRange = [0.03, 0.10] +#TTEffRange = [0.98, 1.0] +TTEffRange = [0.97, 1.0] + +TTclusterSizeRange = [1., 2.] +TTSNratioRange = [5.,20.] +TTnoiseFractionRange = [0.,0.004] # [0.,0.02] +#TTnoiseFractionRange = [0.,0.005] +TToccupancyRange = [0.,250000.] + + +################## +#Histogram Titles# +################## +UsePredefinedTitles = True +IncudeMissingSectorsToSummary = False +nBins_in_summary = 30 + +ITMeanTitle = "Residual bias distribution [mm], 2017" +ITWidthTitle = "Residual width distribution [mm], 2017" +ITEffTitle = "Hit detection efficiency distribution, 2017" + +TTMeanTitle = "Residual bias distribution [mm], 2017" +TTWidthTitle = "Residual width distribution [mm], 2017" +TTEffTitle = "Hit detection efficiency distribution, 2017" + +TTclusterSizeTitle = "Cluster size, 2017" +TTSNratioTitle = "S/N ratio, 2017" +TTnoiseFractionTitle = "noise fraction, 2017" +TToccupancyTitle = "Occupancy, 2017" + + +################################################### +#Dependence of efficiency from search window study# +################################################### +perform_window_eff_study = True +#efficiency_windows = [0.01, 0.025, 0.05, 0.075, 0.1, 0.2, 0.3, 0.4] +efficiency_windows = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29, 0.3, 0.31, 0.32, 0.33, 0.34, 0.35, 0.36, 0.37, 0.38, 0.39, 0.4 ] + +final_eff_window = 0.24 # # this window is used to calculat the efficiency for the final 2D plots od the detector + +extra_name = "_194066" #"_166903"(2015) #"_180318"(2016) #"_194066"(2017) # _197925 #_194248 diff --git a/config_MC.py b/config_MC.py new file mode 100644 index 0000000..5ba3b20 --- /dev/null +++ b/config_MC.py @@ -0,0 +1,55 @@ +""" +Here you may configure behaviour of the package. +""" +######### +#General# +######### +#from binnings.NoBinning_2015 import binning #Choose a time binning +#from binnings.NoBinning_2015 import bin_name #Choose a time binning +from binnings.NoBinning_2012 import binning #Choose a time binning +from binnings.NoBinning_2012 import bin_name #Choose a time binning +#from binnings.BinByMonth_2012 import binning #Choose a time binning +#from binnings.BinByMonth_2012 import bin_name #Choose a time binning +#from binnings.BinByMonth_2015 import binning #Choose a time binning +#from binnings.BinByMonth_2015 import bin_name #Choose a time binning +#from binnings.BinByAlignmentVersions_2015 import binning #Choose a time binning +#from binnings.BinByAlignmentVersions_2015 import bin_name #Choose a time binning + + +Number_Of_Events = -1 #Choose a number of tracks to proceed from Tuple. Negative values == all tracks +pkl_address ="Pkls/" #Address of dumping .pkl with dictionaries +histogram_address = "Histos/" #Address for storing histograms +plot_address = "Plots/" #Adress for storing plots +residual_limit = 0.5 #limit of residual histograms, in mm. +residual_nBins = 100 #number of bins in residual histograms + +################## +#Histogram ranges# +################## +UsePredefinedRanges = True +ITMeanRange = [-0.03, 0.03] +ITWidthRange = [0.02, 0.06] +ITEffRange = [0.995, 1.0] + +TTMeanRange = [-0.03, 0.03] +TTWidthRange = [0.02, 0.06] +TTEffRange = [0.98, 1.0] + +################## +#Histogram Titles# +################## +UsePredefinedTitles = True +ITMeanTitle = "Residual bias distribution [mm], 2012" +ITWidthTitle = "Residual width distribution [mm], 2012" +ITEffTitle = "Hit detection efficiency distribtion, 2012" + +TTMeanTitle = "Residual bias distribution [mm], 2012" +TTWidthTitle = "Residual width distribution [mm], 2012" +TTEffTitle = "Hit detection efficiency distribtion, 2012" + + +################################################### +#Dependence of efficiency from search window study# +################################################### +perform_window_eff_study = True +efficiency_windows = [0.01, 0.025, 0.05, 0.075, 0.1, 0.2, 0.3] \ No newline at end of file diff --git a/suppl/Structure.py b/suppl/Structure.py new file mode 100644 index 0000000..fd4aaba --- /dev/null +++ b/suppl/Structure.py @@ -0,0 +1,485 @@ +""" +Here are contained supplimentary functions for Tuple to Histogram and Pkl to Histogram transformation. +Normally, Tuples are stored as a python dictionaries (see create_coll, create_monitor_ind and create_efficiency_ind) +Two type of dictioanries is considered: Efficiency-like and Monitor-like +These dictionaries contains histograms, which a later stored in a .root file (In format which is recognized by interactie ST monitor) +Basing on time binning, trend histograms are also created. (They are also saved in ST-monitor-friendly .root files) +ST map plots are created by funcitons from CreateTTHist and CreateITHist files. +""" + +import os +import inspect +from pprint import pprint +from itertools import product +import math +import numpy as n +import sys +from config import binning +from config import bin_name +from config import bin_vector +from config import residual_limit +from config import perform_window_eff_study +from config import efficiency_windows +from config import residual_nBins +from config import SNratio_limit +from config import SNratio_nBins + +from config import extra_name + +from datetime import datetime +from drawing.Create_Maps import TT_Map as TT_Map_func +from drawing.Create_Maps import IT_Map as IT_Map_func + +from array import array + +import ROOT as R +from ROOT import gStyle +from ROOT import gROOT +gStyle.SetOptStat(False) + + +def syntax_explanation(script): + if script == "SingleTrend.py": + print "Incorrect syntax. Please run with:" + print "python "+script+" " + print "Plese use sector names like 'TTaXRegionBSector9'" + elif script == "PklAlgebra.py": + print "Incorrect syntax. Please run with:" + print "python "+script+" " + print "For example:" + print "python "+script+" ds1.pkl ds2.pkl a+b efficiency" + print "This command will create a new collection containing sum of efficiencies." + print "If you will not specify variable, all variables in collection will be calculated." + print "Please use .pkls with collections, which have the same time binning" + print "Please use .pkls with collections, which describe the same detectors" + print "Please use .pkls which correspond to the same operation mode" + print "This function create a new pkl file containng dictionary based on dictionaries from inputs." + print "Both input pkls should have the same structure." + print "Resulting pkl will also have the same structure." + print "The values of the resulting dictionary elements will be found by evaluation of a given formula, with 'a' replaced with value from ds1, and 'b' replaced with value from ds2." + print "If it is impossible to evaluate a formula, the value in resulting dictionary will be set to 0 and warning will be printed." + else: + print "Incorrect syntax. Please run with:" + print "python "+script+" " + print " here:" + print "1 - TT Hit Efficiency" + print "2 - IT Hit Efficiency" + return True + +def cli_progress_test(i, end_val, start, bar_length=20): + percent = float(i) / end_val + hashes = '#' * int(round(percent * bar_length)) + spaces = ' ' * (bar_length - len(hashes)) + sys.stdout.write("\rPercent: [{0}] {1}% ({2}/{3}), {4}".format(hashes + spaces, int(round(percent * 100)),i, end_val, datetime.now()-start)) + sys.stdout.flush() + + +def run_binning(): + #{:{"run_start":, + # "run_stop":} + #} + global binning + run_schema = {} + for pb in binning: + if ("BinByMonth" in bin_name or "AllRuns" in bin_name or "BinByRunNumber" in bin_name): + run_schema[pb["run_start"]]={"run_start":pb["run_start"],"run_stop":pb["run_stop"]} + try: + run_schema[pb["run_start"]]["comment"]=pb["comment"] + except: + print "Unable to find alias for bin range. Use numbers instead" + pass + if "BinByPt" in bin_name: + run_schema[pb["pT_start"]]={"pT_start":pb["pT_start"],"pT_stop":pb["pT_stop"]} + try: + run_schema[pb["pT_start"]]["comment"]=pb["comment"] + except: + print "Unable to find alias for bin range. Use numbers instead" + pass + if "BinByY" in bin_name: + run_schema[pb["Y_start"]]={"Y_start":pb["Y_start"],"Y_stop":pb["Y_stop"]} + try: + run_schema[pb["Y_start"]]["comment"]=pb["comment"] + except: + print "Unable to find alias for bin range. Use numbers instead" + pass + if "BinByStrip" in bin_name: + run_schema[pb["Strip_start"]]={"Strip_start":pb["Strip_start"],"Strip_stop":pb["Strip_stop"]} + try: + run_schema[pb["Strip_start"]]["comment"]=pb["comment"] + except: + print "Unable to find alias for bin range. Use numbers instead" + pass + + return run_schema + + +#R.TH1F("hist_s","Name",100, -3,3) + + +def create_efficiency_ind(st_name,run_range): + #Information which should be filled for individual sector for Efficiency mode + #To produce .root files only! + global residual_limit + global residual_nBins + global perform_window_eff_study + global efficiency_windows + efficiency_ind = {"nbFound":0, + "nbExpected":0, + "efficiency":0, + "err_efficiency":0, + "nbFoundNoise":0, + "nbExpectedNoise":0, + "noise_fraction":0, + "noise_fraction_err":0, + "clusterSize":R.TH1F("clusterSize"+run_range+"_"+st_name,"Cluster size;;Number of events", 5, 0.5, 5.5), + "SNratio":R.TH1F("SNratio"+run_range+"_"+st_name,"S/N ratio;;Number of events", SNratio_nBins, 0., SNratio_limit), + "residual":R.TH1F("residualE"+run_range+"_"+st_name,"Residual;[mm];Number of events", residual_nBins, -residual_limit, residual_limit), + "residual_noise":R.TH1F("residualNoise"+run_range+"_"+st_name,"Residual(noise);[mm];Number of events", residual_nBins, -residual_limit, residual_limit), + "efficiency_hist":R.TH1F("efficiency"+run_range+"_"+st_name,"Efficiency",1, 0.,1.), + "noise_fraction_hist":R.TH1F("noise_fraction"+run_range+"_"+st_name,"Noise fraction",1, 0.,1.) + } + if perform_window_eff_study: + # efficiency_ind["window_dependence"] = R.TH1F("wind_dep_"+run_range+"_"+st_name,"Efficiency as a function of search window;[mm];Efficiency",len(efficiency_windows), 0.,1.) + efficiency_ind["window_dependence"] = R.TH1F("wind_dep_"+run_range+"_"+st_name,"Efficiency as a function of search window;[mm];Efficiency",40,0,0.4) + efficiency_ind["window_dependence_noise"] = R.TH1F("wind_dep_noise_"+run_range+"_"+st_name,"Noise fraction as a function of search window;[mm];Noise fraction",40,0,0.4) + return efficiency_ind + + +def create_efficiency_lite(efficiency_ind): + #Information which should be filled for individual sector for Efficiency mode + #To produce .pkls and build trends. + efficiency_lite = { + "occupancy":efficiency_ind["nbFound"], + "efficiency":efficiency_ind["efficiency"], + "err_efficiency":efficiency_ind["err_efficiency"], + "mean":efficiency_ind["residual"].GetMean(), + "err_mean":efficiency_ind["residual"].GetMeanError(), + "width":efficiency_ind["residual"].GetRMS(), + "err_width":efficiency_ind["residual"].GetRMSError(), + "clusterSize_mean":efficiency_ind["clusterSize"].GetMean(), + "SNratio_max":efficiency_ind["SNratio"].GetBinCenter( efficiency_ind["SNratio"].GetMaximumBin()), + "noise_fraction":efficiency_ind["noise_fraction"], + "noise_fraction_err":efficiency_ind["noise_fraction_err"], + } + return efficiency_lite + + +def create_coll(det="IT", mode="Monitor"): + #To be used for creation of histograms + #{:{ + # "run_start":, + # "run_stop" :, + # "data" :{ + # :{}, + # :{}, + # :{}... + #} } } + coll = run_binning() + if det == "IT": + ST_Map = IT_Map_func() + else: + ST_Map = TT_Map_func() + for run_bin in coll: + try: + run_range=coll[run_bin]["comment"] + except: + run_range="::::"+str(coll[run_bin]["run_start"])+"::"+str(coll[run_bin]["run_stop"])+"::::" + coll[run_bin]["data"]={} + for st_id in ST_Map: + if mode == "Monitor": + coll[run_bin]["data"][st_id]=create_monitor_ind(ST_Map[st_id],run_range) + else: + coll[run_bin]["data"][st_id]=create_efficiency_ind(ST_Map[st_id],run_range) + return coll + +def make_coll_lite(coll, det="IT", mode="Monitor"): + #To be stored in .pkl, to create trends + #{:{ + # "run_start":, + # "run_stop" :, + # "data" :{ + # :{}, + # :{}, + # :{}... + #} } } + lite_coll = run_binning() + if det == "IT": + ST_Map = IT_Map_func() + else: + ST_Map = TT_Map_func() + for run_bin in coll: + try: + run_range=coll[run_bin]["comment"] + except: + run_range="::::"+str(coll[run_bin]["run_start"])+"::"+str(coll[run_bin]["run_stop"])+"::::" + lite_coll[run_bin]["data"]={} + for st_id in ST_Map: + if mode == "Monitor": + lite_coll[run_bin]["data"][st_id]=create_monitor_lite(coll[run_bin]["data"][st_id]) + else: + lite_coll[run_bin]["data"][st_id]=create_efficiency_lite(coll[run_bin]["data"][st_id]) + return lite_coll + +##### Efficiency: nbFound/nbExpected already removed hits from noise #### +def find_efficiency(coll): + for run_bin in coll: + for st_ID in coll[run_bin]["data"]: + nbf = coll[run_bin]["data"][st_ID]["nbFound"] + nbe = coll[run_bin]["data"][st_ID]["nbExpected"] + if nbe == 0: + coll[run_bin]["data"][st_ID]["efficiency"]= 0 + coll[run_bin]["data"][st_ID]["err_efficiency"] = 0 + coll[run_bin]["data"][st_ID]["efficiency_hist"].SetBinContent(1, 0) + coll[run_bin]["data"][st_ID]["efficiency_hist"].SetBinError(1, 0) + continue + coll[run_bin]["data"][st_ID]["efficiency"]=nbf/nbe + coll[run_bin]["data"][st_ID]["err_efficiency"] = nbf**0.5*(nbe-nbf)**0.5*nbe**(-1.5) + coll[run_bin]["data"][st_ID]["efficiency_hist"].SetBinContent(1, coll[run_bin]["data"][st_ID]["efficiency"]) + coll[run_bin]["data"][st_ID]["efficiency_hist"].SetBinError(1, coll[run_bin]["data"][st_ID]["err_efficiency"]) + return coll + +def find_noise_fraction(coll): + for run_bin in coll: + for st_ID in coll[run_bin]["data"]: + nbf = coll[run_bin]["data"][st_ID]["nbFoundNoise"] + nbe = coll[run_bin]["data"][st_ID]["nbExpected"] + if nbe == 0: + coll[run_bin]["data"][st_ID]["noise_fraction"]= 0 + coll[run_bin]["data"][st_ID]["noise_fraction_err"] = 0 + coll[run_bin]["data"][st_ID]["noise_fraction_hist"].SetBinContent(1, 0) + coll[run_bin]["data"][st_ID]["noise_fraction_hist"].SetBinError(1, 0) + continue + coll[run_bin]["data"][st_ID]["noise_fraction"]=nbf/nbe + coll[run_bin]["data"][st_ID]["noise_fraction_err"] = nbf**0.5*(nbe-nbf)**0.5*nbe**(-1.5) + coll[run_bin]["data"][st_ID]["noise_fraction_hist"].SetBinContent(1, coll[run_bin]["data"][st_ID]["noise_fraction"]) + coll[run_bin]["data"][st_ID]["noise_fraction_hist"].SetBinError(1, coll[run_bin]["data"][st_ID]["noise_fraction_err"]) + return coll + + +def bins_from_window(window): + global residual_limit + global residual_nBins + bin_width = 2.*float(residual_limit)/float(residual_nBins) + bin_low = residual_nBins/2 - int(window/bin_width) + bin_hi = residual_nBins-bin_low+1 + return [bin_low, bin_hi] + + +def window_eff_study(coll): + global efficiency_windows + for run_bin in coll: + for st_ID in coll[run_bin]["data"]: + nbe = coll[run_bin]["data"][st_ID]["nbExpected"] + if nbe == 0: + continue +# coll[run_bin]["data"][st_ID]["window_dependence"].GetXaxis().SetNdivisions(-414) + for i, window in enumerate(sorted(efficiency_windows)): + nbf = coll[run_bin]["data"][st_ID]["residual"].Integral(bins_from_window(window)[0], bins_from_window(window)[1]) + nbfN = coll[run_bin]["data"][st_ID]["residual_noise"].Integral(bins_from_window(window)[0], bins_from_window(window)[1]) + coll[run_bin]["data"][st_ID]["window_dependence"].SetBinContent(i+1, nbf/nbe) + coll[run_bin]["data"][st_ID]["window_dependence_noise"].SetBinContent(i+1, nbfN/nbe) + if nbf!=0: + coll[run_bin]["data"][st_ID]["window_dependence"].SetBinError(i+1, nbf**0.5*(nbe-nbf)**0.5*nbe**(-1.5)) + else: + coll[run_bin]["data"][st_ID]["window_dependence"].SetBinError(i+1, 0) + if nbfN!=0: + coll[run_bin]["data"][st_ID]["window_dependence_noise"].SetBinError(i+1, nbfN**0.5*(nbe-nbfN)**0.5*nbe**(-1.5)) + else: + coll[run_bin]["data"][st_ID]["window_dependence_noise"].SetBinError(i+1, 0) +# coll[run_bin]["data"][st_ID]["window_dependence"].GetXaxis().SetBinLabel(i+1,str(window)) + return coll + +def write_histogram(coll, mode, name): + f = R.TFile(name+"histos"+extra_name+".root","recreate") + for run_bin in coll: + try: + cdtof = f.mkdir(coll[run_bin]["comment"]) + except: + cdtof = f.mkdir(str(coll[run_bin]["run_start"])+"-"+str(coll[run_bin]["run_stop"])) + cdtof.cd() + for st_id in coll[run_bin]["data"]: + if mode == "Monitor": + coll[run_bin]["data"][st_id]["residual"].Write() + coll[run_bin]["data"][st_id]["unbiased_residual"].Write() + coll[run_bin]["data"][st_id]["rms_unbiased_residual"].Write() + else: + coll[run_bin]["data"][st_id]["residual"].Write() + coll[run_bin]["data"][st_id]["efficiency_hist"].Write() + coll[run_bin]["data"][st_id]["residual_noise"].Write() + coll[run_bin]["data"][st_id]["clusterSize"].Write() + coll[run_bin]["data"][st_id]["SNratio"].Write() + coll[run_bin]["data"][st_id]["noise_fraction_hist"].Write() + f.Close() + return True + +def write_window_eff_study(coll, mode, name): + f = R.TFile(name+"histos"+extra_name+".root","recreate") + for run_bin in coll: + try: + cdtof = f.mkdir(coll[run_bin]["comment"]) + except: + cdtof = f.mkdir(str(coll[run_bin]["run_start"])+"-"+str(coll[run_bin]["run_stop"])) + cdtof.cd() + for st_id in coll[run_bin]["data"]: + coll[run_bin]["data"][st_id]["window_dependence"].Write() + coll[run_bin]["data"][st_id]["window_dependence_noise"].Write() + f.Close() + return True + + + +def create_efficiency_trends(lite_coll, det, name): + + f = R.TFile(name+"histos"+extra_name+".root","recreate") + if det == "IT": + ST_Map = IT_Map_func() + else: + ST_Map = TT_Map_func() + for st_id in ST_Map: + efficiency = R.TH1F("eff:trend_"+ST_Map[st_id],"Changes of hit efficiency;;Efficiency",len(bin_vector)-1, array('d',bin_vector)) + residual_mean = R.TH1F("bias:trend_"+ST_Map[st_id],"Changes of the bias;;Bias, [mm]",len(bin_vector)-1, array('d',bin_vector)) + residual_width = R.TH1F("width:trend_"+ST_Map[st_id],"Changes of the hit resolution (width of residual);;Resolution, [mm]",len(bin_vector)-1, array('d',bin_vector)) + clusterSize_mean = R.TH1F("clusterSize_mean:trend_"+ST_Map[st_id],"Changes of cluster size;;Cluster size",len(bin_vector)-1, array('d',bin_vector)) + SNratio_max = R.TH1F("SNratio_max:trend_"+ST_Map[st_id],"Changes of S/N ratio;;S/N ratio",len(bin_vector)-1, array('d',bin_vector)) + noise_fraction = R.TH1F("noise_fraction:trend_"+ST_Map[st_id],"Changes of noise fraction;;Noise fraction",len(bin_vector)-1, array('d',bin_vector)) + + for i, run_bin in enumerate(sorted(lite_coll.keys())): + + # Efficiency + efficiency.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["efficiency"]) + efficiency.SetBinError(i+1, lite_coll[run_bin]["data"][st_id]["err_efficiency"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT bins + try: + efficiency.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + efficiency.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + # Mean + residual_mean.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["mean"]) + residual_mean.SetBinError(i+1, lite_coll[run_bin]["data"][st_id]["err_mean"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT histos + try: + residual_mean.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + residual_mean.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + # Width + residual_width.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["width"]) + residual_width.SetBinError(i+1, lite_coll[run_bin]["data"][st_id]["err_width"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT histos + try: + residual_width.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + residual_width.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + # clusterSize + clusterSize_mean.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["clusterSize_mean"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT histos + try: + clusterSize_mean.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + clusterSize_mean.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + # SNratio + SNratio_max.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["SNratio_max"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT histos + try: + SNratio_max.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + SNratio_max.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + # noise_fraction + noise_fraction.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["noise_fraction"]) + noise_fraction.SetBinError(i+1, lite_coll[run_bin]["data"][st_id]["noise_fraction_err"]) + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # don't want bin label for pT histos + try: + noise_fraction.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + noise_fraction.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + + + if ("AllRuns" in bin_name) or ("BinByMonth" in bin_name) or ("BinByRunNumber" in bin_name): # natural division for pT binning histo + efficiency.GetXaxis().SetNdivisions(-414) + residual_mean.GetXaxis().SetNdivisions(-414) + residual_width.GetXaxis().SetNdivisions(-414) + clusterSize_mean.GetXaxis().SetNdivisions(-414) + SNratio_max.GetXaxis().SetNdivisions(-414) + noise_fraction.GetXaxis().SetNdivisions(-414) + efficiency.Write() + residual_mean.Write() + residual_width.Write() + clusterSize_mean.Write() + SNratio_max.Write() + noise_fraction.Write() + + f.Close() + print "Residual & efficiency trends created at "+name+"histos.root" + return True + + + +###### *********** I did not modified this for the last version with noise ********** #### +def create_single_efficiency_trend(lite_coll, sector, plot_address): + #Checl if sector name is correct + gROOT.SetStyle("Modern") + gROOT.ForceStyle() + gROOT.ProcessLine(".x lhcbStyle.C") + gStyle.SetPadLeftMargin(0.2) + gROOT.ForceStyle() + setcor_is_found = False + if "IT" in sector: + ST_Map = IT_Map_func() + else: + ST_Map = TT_Map_func() + for i in ST_Map: + if ST_Map[i]==sector: + st_id = i + break + if not st_id: + print "Wrong sector. Plese use sector names like 'TTaXRegionBSector9'" + return False + + #Check if efficiency error is in collection. + #Check if sollection has information for given sector + erreff_in_collection = False + for bin in lite_coll: + if st_id in lite_coll[bin]["data"]: + if "err_efficiency" in lite_coll[bin]["data"][st_id]: + setcor_is_found = True + erreff_in_collection = True + break + if not setcor_is_found: + print "Trends are empty, please check that you use correct dataset for chosen sector" + return False + + efficiency = R.TH1F(sector,"Changes of hit efficiency of "+sector+";;Efficiency",len(lite_coll),0,1) + for i, run_bin in enumerate(sorted(lite_coll.keys())): + if st_id in lite_coll[run_bin]["data"]: + efficiency.SetBinContent(i+1, lite_coll[run_bin]["data"][st_id]["efficiency"]) + if erreff_in_collection: + efficiency.SetBinError(i+1, lite_coll[run_bin]["data"][st_id]["err_efficiency"]) + efficiency.GetXaxis().SetNdivisions(-414) + try: + efficiency.GetXaxis().SetBinLabel(i+1,lite_coll[run_bin]["comment"]) + except: + efficiency.GetXaxis().SetBinLabel(i+1,str(lite_coll[run_bin]["run_start"])+"-"+str(lite_coll[run_bin]["run_stop"])) + + efficiency.GetYaxis().SetTitleOffset(1.2) + c1 = R.TCanvas("c1","c1",600,600) + efficiency.Draw() + c1.SaveAs(plot_address+"Trend_Efficiency_Sector_"+sector+extra_name+".pdf") + c1.SaveAs(plot_address+"Trend_Efficiency_Sector_"+sector+extra_name+".C") + gROOT.SetStyle("Modern") + gROOT.ForceStyle() + + return True + + +if __name__ == "__main__": + print "Here are contained supplimentary functions for Tuple to Histogram transformation." + print "Normally, Tuples are stored as a python dictionaries (see create_coll, create_monitor_ind and create_efficiency_ind)" + print "Two type of dictioanries is considered: Efficiency-like and Monitor-like" + print "These dictionaries contains histograms, which a later stored in a .root file (In format which is recognized by interactie ST monitor)" + print "Basing on time binning, trend histograms are also created. (They are also saved in ST-monitor-friendly .root files)" + print "ST map plots are created by funcitons from CreateTTHist and CreateITHist files."