diff --git a/.gitignore b/.gitignore index bc4fbd3..90766d6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,10 @@ *.pyc *.txt *.DS_Store +*.zip plots/ +ana_overview_*/ *.swp *.pdf *.png diff --git a/NN/cfg/train_all.cfg.py b/NN/cfg/train_all_3ycmbd.cfg.py similarity index 97% rename from NN/cfg/train_all.cfg.py rename to NN/cfg/train_all_3ycmbd.cfg.py index 297290c..da70df9 100644 --- a/NN/cfg/train_all.cfg.py +++ b/NN/cfg/train_all_3ycmbd.cfg.py @@ -5,11 +5,9 @@ from collections import OrderedDict from os import environ as env -ch = 'mmm' -set_paths(ch, 2018) +set_paths('mmm', 2018) #FIXME this is just a dummy extra_selections = [ - 'hnl_2d_disp_sig>20', 'hnl_pt_12>15', 'sv_cos>0.99', 'sv_prob>0.001', @@ -87,6 +85,7 @@ 'hnl_m_12' , 'sv_prob' , 'hnl_2d_disp', + 'year' , 'channel' ,], composed_features = composed_features, @@ -104,7 +103,6 @@ selection_mc_eem = selection_eem + [cuts_eem.selections['is_prompt_lepton']], selection_tight = cuts_mmm.selections_pd['tight'], - lumi = 59700., epochs = 40, ) diff --git a/NN/cfg/train_all_CR_MRloose.cfg.py b/NN/cfg/train_all_CR_MRloose.cfg.py new file mode 100644 index 0000000..82a5662 --- /dev/null +++ b/NN/cfg/train_all_CR_MRloose.cfg.py @@ -0,0 +1,120 @@ +import numpy as np +from NN.nn_parametric_trainer import Trainer +from plotter.selections import Selections +from plotter.utils import set_paths +from collections import OrderedDict +from os import environ as env + +year = 2016 + +set_paths('mmm', year) #FIXME channel is just a dummy + +extra_selections = [ + 'hnl_pt_12>15', + 'sv_cos>0.90', ## MARTINA FOR STATS + # 'sv_prob>0.001', ## get rid of this for more stats, by MARTINA ... +] + +cuts_mmm = Selections('mmm') +selection_mmm = [ + cuts_mmm.selections['pt_iso'], + cuts_mmm.selections['baseline'], + cuts_mmm.selections['vetoes_12_OS'], + cuts_mmm.selections['vetoes_01_OS'], + cuts_mmm.selections['vetoes_02_OS'], + cuts_mmm.selections['sideband'], +] + extra_selections + +cuts_mem = Selections('mem') +selection_mem = [ + cuts_mem.selections['pt_iso'], + cuts_mem.selections['baseline'], + cuts_mem.selections['sideband'], + cuts_mem.selections['vetoes_02_OS'], +] + extra_selections + +cuts_eee = Selections('eee') +selection_eee = [ + cuts_eee.selections['pt_iso'], + cuts_eee.selections['baseline'], + cuts_eee.selections['vetoes_12_OS'], + cuts_eee.selections['vetoes_01_OS'], + cuts_eee.selections['vetoes_02_OS'], + cuts_eee.selections['sideband'], +] + extra_selections + +cuts_eem = Selections('eem') +selection_eem = [ + cuts_eem.selections['pt_iso'], + cuts_eem.selections['baseline'], + cuts_eem.selections['sideband'], + cuts_eem.selections['vetoes_01_OS'], +] + extra_selections + +if year == 2017: + selection_eee.append('l0_pt > 35') + selection_eem.append('l0_pt > 35') +if year == 2018: + selection_eee.append('l0_pt > 32') + selection_eem.append('l0_pt > 32') + +composed_features = OrderedDict() + +composed_features['abs_l0_eta' ] = lambda df : np.abs(df.l0_eta) +composed_features['abs_l1_eta' ] = lambda df : np.abs(df.l1_eta) +composed_features['abs_l2_eta' ] = lambda df : np.abs(df.l2_eta) +composed_features['log_abs_l0_dxy'] = lambda df : np.log10(np.abs(df.l0_dxy)) +composed_features['log_abs_l0_dz' ] = lambda df : np.log10(np.abs(df.l0_dz )) +composed_features['log_abs_l1_dxy'] = lambda df : np.log10(np.abs(df.l1_dxy)) +composed_features['log_abs_l1_dz' ] = lambda df : np.log10(np.abs(df.l1_dz )) +composed_features['log_abs_l2_dxy'] = lambda df : np.log10(np.abs(df.l2_dxy)) +composed_features['log_abs_l2_dz' ] = lambda df : np.log10(np.abs(df.l2_dz )) +composed_features['abs_q_02' ] = lambda df : np.abs(df.hnl_q_02) +composed_features['abs_q_01' ] = lambda df : np.abs(df.hnl_q_01) + +# https://stackoverflow.com/questions/20528328/numpy-logical-or-for-more-than-two-arguments +# save a label to distinguish different channels +# 1 = mmm +# 2 = mem_os +# 3 = mem_ss +# 4 = eee +# 5 = eem_os +# 6 = eem_ss +# composed_features['channel' ] = lambda df : 1 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==13 and np.abs(df.l2_pdgid)==13) + 2 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 3 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + 4 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==11) + 5 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 6 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + +trainer = Trainer (channel = 'all_channels', + base_dir = env['NTUPLE_DIR'], + #post_fix = 'HNLTreeProducer_%s/tree.root' %ch, + post_fix = 'HNLTreeProducer/tree.root', + + years = [year] , + features = ['l0_pt' , + 'l1_pt' , + 'l2_pt' , + 'hnl_dr_12' , + 'hnl_m_12' , + 'sv_prob' , + 'hnl_2d_disp', + 'channel' ,], + + composed_features = composed_features, + + selection_data_mmm = selection_mmm, + selection_mc_mmm = selection_mmm + [cuts_mmm.selections['is_prompt_lepton']], + + selection_data_mem = selection_mem, + selection_mc_mem = selection_mem + [cuts_mem.selections['is_prompt_lepton']], + + selection_data_eee = selection_eee, + selection_mc_eee = selection_eee + [cuts_eee.selections['is_prompt_lepton']], + + selection_data_eem = selection_eem, + selection_mc_eem = selection_eem + [cuts_eem.selections['is_prompt_lepton']], + + selection_tight = cuts_mmm.selections_pd['tight'], + + epochs = 20, + ) + +if __name__ == '__main__': + trainer.train() diff --git a/NN/cfg/train_all_CR_bj.cfg.py b/NN/cfg/train_all_CR_bj.cfg.py new file mode 100644 index 0000000..a6d0da3 --- /dev/null +++ b/NN/cfg/train_all_CR_bj.cfg.py @@ -0,0 +1,120 @@ +import numpy as np +from NN.nn_parametric_trainer import Trainer +from plotter.selections import Selections +from plotter.utils import set_paths +from collections import OrderedDict +from os import environ as env + +year = 2017 + +set_paths('mmm', year) #FIXME channel is just a dummy + +extra_selections = [ + 'hnl_pt_12>15', + 'sv_cos>0.90', ## MARTINA FOR STATS + # 'sv_prob>0.001', ## get rid of this for more stats, by MARTINA ... +] + +cuts_mmm = Selections('mmm') +selection_mmm = [ + cuts_mmm.selections['pt_iso'], + cuts_mmm.selections['baseline'], + cuts_mmm.selections['vetoes_12_OS'], + cuts_mmm.selections['vetoes_01_OS'], + cuts_mmm.selections['vetoes_02_OS'], + cuts_mmm.selections['CR_bj'], +] + extra_selections + +cuts_mem = Selections('mem') +selection_mem = [ + cuts_mem.selections['pt_iso'], + cuts_mem.selections['baseline'], + cuts_mem.selections['CR_bj'], + cuts_mem.selections['vetoes_02_OS'], +] + extra_selections + +cuts_eee = Selections('eee') +selection_eee = [ + cuts_eee.selections['pt_iso'], + cuts_eee.selections['baseline'], + cuts_eee.selections['vetoes_12_OS'], + cuts_eee.selections['vetoes_01_OS'], + cuts_eee.selections['vetoes_02_OS'], + cuts_eee.selections['CR_bj'], +] + extra_selections + +cuts_eem = Selections('eem') +selection_eem = [ + cuts_eem.selections['pt_iso'], + cuts_eem.selections['baseline'], + cuts_eem.selections['CR_bj'], + cuts_eem.selections['vetoes_01_OS'], +] + extra_selections + +if year == 2017: + selection_eee.append('l0_pt > 35') + selection_eem.append('l0_pt > 35') +if year == 2018: + selection_eee.append('l0_pt > 32') + selection_eem.append('l0_pt > 32') + +composed_features = OrderedDict() + +composed_features['abs_l0_eta' ] = lambda df : np.abs(df.l0_eta) +composed_features['abs_l1_eta' ] = lambda df : np.abs(df.l1_eta) +composed_features['abs_l2_eta' ] = lambda df : np.abs(df.l2_eta) +composed_features['log_abs_l0_dxy'] = lambda df : np.log10(np.abs(df.l0_dxy)) +composed_features['log_abs_l0_dz' ] = lambda df : np.log10(np.abs(df.l0_dz )) +composed_features['log_abs_l1_dxy'] = lambda df : np.log10(np.abs(df.l1_dxy)) +composed_features['log_abs_l1_dz' ] = lambda df : np.log10(np.abs(df.l1_dz )) +composed_features['log_abs_l2_dxy'] = lambda df : np.log10(np.abs(df.l2_dxy)) +composed_features['log_abs_l2_dz' ] = lambda df : np.log10(np.abs(df.l2_dz )) +composed_features['abs_q_02' ] = lambda df : np.abs(df.hnl_q_02) +composed_features['abs_q_01' ] = lambda df : np.abs(df.hnl_q_01) + +# https://stackoverflow.com/questions/20528328/numpy-logical-or-for-more-than-two-arguments +# save a label to distinguish different channels +# 1 = mmm +# 2 = mem_os +# 3 = mem_ss +# 4 = eee +# 5 = eem_os +# 6 = eem_ss +# composed_features['channel' ] = lambda df : 1 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==13 and np.abs(df.l2_pdgid)==13) + 2 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 3 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + 4 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==11) + 5 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 6 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + +trainer = Trainer (channel = 'all_channels', + base_dir = env['NTUPLE_DIR'], + #post_fix = 'HNLTreeProducer_%s/tree.root' %ch, + post_fix = 'HNLTreeProducer/tree.root', + + years = [year] , + features = ['l0_pt' , + 'l1_pt' , + 'l2_pt' , + 'hnl_dr_12' , + 'hnl_m_12' , + 'sv_prob' , + 'hnl_2d_disp', + 'channel' ,], + + composed_features = composed_features, + + selection_data_mmm = selection_mmm, + selection_mc_mmm = selection_mmm + [cuts_mmm.selections['is_prompt_lepton']], + + selection_data_mem = selection_mem, + selection_mc_mem = selection_mem + [cuts_mem.selections['is_prompt_lepton']], + + selection_data_eee = selection_eee, + selection_mc_eee = selection_eee + [cuts_eee.selections['is_prompt_lepton']], + + selection_data_eem = selection_eem, + selection_mc_eem = selection_eem + [cuts_eem.selections['is_prompt_lepton']], + + selection_tight = cuts_mmm.selections_pd['tight'], + + epochs = 20, + ) + +if __name__ == '__main__': + trainer.train() diff --git a/NN/cfg/train_all_MR.cfg.py b/NN/cfg/train_all_MR.cfg.py new file mode 100644 index 0000000..5d5c311 --- /dev/null +++ b/NN/cfg/train_all_MR.cfg.py @@ -0,0 +1,129 @@ +import numpy as np +from NN.nn_parametric_trainer import Trainer +from plotter.selections import Selections +from plotter.utils import set_paths +from collections import OrderedDict +from os import environ as env + +year = 2018 + +set_paths('mmm', year) #FIXME channel is just a dummy + +region_label = 'w_disp_sig' + +sbtrct_prmpt = False +if sbtrct_prmpt: region_label += '_train_w_sbtr' +if not sbtrct_prmpt: region_label += '_train_WO_sbtr' + +extra_selections = [ + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'hnl_2d_disp_sig > 20', +] + +cuts_mmm = Selections('mmm') +selection_mmm = [ + cuts_mmm.selections['pt_iso'], + cuts_mmm.selections['baseline'], + cuts_mmm.selections['vetoes_12_OS'], + cuts_mmm.selections['vetoes_01_OS'], + cuts_mmm.selections['vetoes_02_OS'], + cuts_mmm.selections['sideband'], +] + extra_selections + +cuts_mem = Selections('mem') +selection_mem = [ + cuts_mem.selections['pt_iso'], + cuts_mem.selections['baseline'], + cuts_mem.selections['sideband'], + cuts_mem.selections['vetoes_02_OS'], +] + extra_selections + +cuts_eee = Selections('eee') +selection_eee = [ + cuts_eee.selections['pt_iso'], + cuts_eee.selections['baseline'], + cuts_eee.selections['vetoes_12_OS'], + cuts_eee.selections['vetoes_01_OS'], + cuts_eee.selections['vetoes_02_OS'], + cuts_eee.selections['sideband'], +] + extra_selections + +cuts_eem = Selections('eem') +selection_eem = [ + cuts_eem.selections['pt_iso'], + cuts_eem.selections['baseline'], + cuts_eem.selections['sideband'], + cuts_eem.selections['vetoes_01_OS'], +] + extra_selections + +if year == 2017: + selection_eee.append('l0_pt > 35') + selection_eem.append('l0_pt > 35') +if year == 2018: + selection_eee.append('l0_pt > 32') + selection_eem.append('l0_pt > 32') + +composed_features = OrderedDict() + +composed_features['abs_l0_eta' ] = lambda df : np.abs(df.l0_eta) +composed_features['abs_l1_eta' ] = lambda df : np.abs(df.l1_eta) +composed_features['abs_l2_eta' ] = lambda df : np.abs(df.l2_eta) +composed_features['log_abs_l0_dxy'] = lambda df : np.log10(np.abs(df.l0_dxy)) +composed_features['log_abs_l0_dz' ] = lambda df : np.log10(np.abs(df.l0_dz )) +composed_features['log_abs_l1_dxy'] = lambda df : np.log10(np.abs(df.l1_dxy)) +composed_features['log_abs_l1_dz' ] = lambda df : np.log10(np.abs(df.l1_dz )) +composed_features['log_abs_l2_dxy'] = lambda df : np.log10(np.abs(df.l2_dxy)) +composed_features['log_abs_l2_dz' ] = lambda df : np.log10(np.abs(df.l2_dz )) +composed_features['abs_q_02' ] = lambda df : np.abs(df.hnl_q_02) +composed_features['abs_q_01' ] = lambda df : np.abs(df.hnl_q_01) + +# https://stackoverflow.com/questions/20528328/numpy-logical-or-for-more-than-two-arguments +# save a label to distinguish different channels +# 1 = mmm +# 2 = mem_os +# 3 = mem_ss +# 4 = eee +# 5 = eem_os +# 6 = eem_ss +# composed_features['channel' ] = lambda df : 1 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==13 and np.abs(df.l2_pdgid)==13) + 2 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 3 * (np.abs(df.l0_pdgid)==13 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + 4 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==11) + 5 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02!=0) + 6 * (np.abs(df.l0_pdgid)==11 and np.abs(df.l1_pdgid)==11 and np.abs(df.l2_pdgid)==13 and df.hnl_q_02==0) + +trainer = Trainer (channel = 'all_channels', + base_dir = env['NTUPLE_DIR'], + #post_fix = 'HNLTreeProducer_%s/tree.root' %ch, + post_fix = 'HNLTreeProducer/tree.root', + + region_label = region_label , + sbtrct_prmpt = sbtrct_prmpt , + years = [year] , + features = ['l0_pt' , + 'l1_pt' , + 'l2_pt' , + 'hnl_dr_12' , + 'hnl_m_12' , + 'sv_prob' , + 'hnl_2d_disp', + 'channel' ,], + + composed_features = composed_features, + + selection_data_mmm = selection_mmm, + selection_mc_mmm = selection_mmm + [cuts_mmm.selections['is_prompt_lepton']], + + selection_data_mem = selection_mem, + selection_mc_mem = selection_mem + [cuts_mem.selections['is_prompt_lepton']], + + selection_data_eee = selection_eee, + selection_mc_eee = selection_eee + [cuts_eee.selections['is_prompt_lepton']], + + selection_data_eem = selection_eem, + selection_mc_eem = selection_eem + [cuts_eem.selections['is_prompt_lepton']], + + selection_tight = cuts_mmm.selections_pd['tight'], + + epochs = 20, + ) + +if __name__ == '__main__': + trainer.train() diff --git a/NN/cfg/train_mmm.cfg.py b/NN/cfg/train_mmm.cfg.py index 0c874a5..530e52e 100644 --- a/NN/cfg/train_mmm.cfg.py +++ b/NN/cfg/train_mmm.cfg.py @@ -9,9 +9,36 @@ set_paths(ch, 2018) cuts = Selections(ch) +## LLP talk: LNT/T study for features +# trainer_MR = Trainer (channel = ch, + # base_dir = env['BASE_DIR'], + # post_fix = 'HNLTreeProducer_%s/tree.root' %ch, + + # features = ['l0_pt' , + # 'l1_pt' , + # 'l2_pt' , + # 'hnl_dr_12' , + # 'hnl_m_12' , + # 'sv_prob' , + # 'hnl_2d_disp' , + # ], + + # selection_data = ' & '.join([ cuts.selections['pt_iso'], cuts.selections['SR_sb_w_dxy'], cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], + # cuts.selections['vetoes_02_OS'], ]), + + # selection_mc = ' & '.join([ cuts.selections['pt_iso'], cuts.selections['SR_sb_w_dxy'], cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], + # cuts.selections['vetoes_02_OS'], cuts.selections['is_prompt_lepton'] ]), + + # selection_tight = cuts.selections_pd['tight'], + # lumi = 59700. + # ) + +# trainer_MR.train() + selection = [ cuts.selections['pt_iso'], - cuts.selections['baseline'], + # cuts.selections['baseline'], + cuts.selections['baseline_no_dxy'], ##VS 11/21/19 cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], cuts.selections['vetoes_02_OS'], @@ -52,5 +79,6 @@ lumi = 59700. ) +# trainer.train() if __name__ == '__main__': trainer.train() diff --git a/NN/nn_parametric_trainer.py b/NN/nn_parametric_trainer.py index 8c38dcf..b154959 100644 --- a/NN/nn_parametric_trainer.py +++ b/NN/nn_parametric_trainer.py @@ -12,6 +12,7 @@ https://stackoverflow.com/questions/39691902/ordering-of-batch-normalization-and-dropout https://stats.stackexchange.com/questions/292278/can-one-theoretically-train-a-neural-network-with-fewer-training-samples-than ''' +from os import environ as env import root_pandas from time import time @@ -45,14 +46,12 @@ # fix random seed for reproducibility (FIXME! not really used by Keras) np.random.seed(1986) -# luminosity -lumi = 59700. - class Trainer(object): def __init__( self , channel , features , + years , composed_features , base_dir , post_fix , @@ -65,13 +64,17 @@ def __init__( selection_data_eem , selection_mc_eem , selection_tight , - lumi , + region_label = '' , + sbtrct_prmpt = True, epochs=1000 , early_stopping=True): self.channel = channel.split('_')[0] + self.region_label = region_label + self.sbtrct_prmpt = sbtrct_prmpt self.channel_extra = channel.split('_')[1] if len(channel.split('_'))>1 else '' self.features = features + self.years = years self.composed_features = composed_features self.base_dir = base_dir self.post_fix = post_fix @@ -85,62 +88,92 @@ def __init__( self.selection_mc_eem = ' & '.join(selection_mc_eem) self.selection_tight = selection_tight self.selection_lnt = 'not (%s)' %self.selection_tight - self.lumi = lumi self.epochs = epochs self.early_stopping = early_stopping def train(self): - net_dir_name = self.channel+'_'+self.channel_extra if len(self.channel_extra) else self.channel - net_dir = nn_dir(net_dir_name) - print('============> starting reading the trees') - print ('Net will be stored in: ', net_dir) - now = time() - # FIXME! temporary hack - data = get_data_samples('mmm', '/Users/manzoni/Documents/HNL/ntuples/2018/mmm', self.post_fix, self.selection_data_mmm) - data += get_data_samples('mem', '/Users/manzoni/Documents/HNL/ntuples/2018/mem', self.post_fix, self.selection_data_mem) - data += get_data_samples('eee', '/Users/manzoni/Documents/HNL/ntuples/2018/eee', self.post_fix, self.selection_data_eee) - data += get_data_samples('eem', '/Users/manzoni/Documents/HNL/ntuples/2018/eem', self.post_fix, self.selection_data_eem) - # FIXME! temporary hack - mc = get_mc_samples('mmm', '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mmm/tree.root', self.selection_mc_mmm) - mc += get_mc_samples('mem', '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mem/tree.root', self.selection_mc_mem) - mc += get_mc_samples('eee', '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eee/tree.root', self.selection_mc_eee) - mc += get_mc_samples('eem', '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eem/tree.root', self.selection_mc_eem) - - print('============> it took %.2f seconds' %(time() - now)) - - data_df = pd.concat([idt.df for idt in data], sort=False) - mc_df = pd.concat([imc.df for imc in mc], sort=False) - - # initial weights - data_df['weight'] = 1. - data_df['isdata'] = 0 - data_df['ismc'] = 0 - - passing_data = data_df.query(self.selection_tight) - failing_data = data_df.query(self.selection_lnt) - - for i, imc in enumerate(mc): - - imc.df['weight'] = -1. * self.lumi * imc.lumi_scaling * imc.df.lhe_weight - imc.df['isdata'] = 0 - imc.df['ismc'] = i+1 - - imc.df_tight = imc.df.query(self.selection_tight) - imc.df_lnt = imc.df.query(self.selection_lnt) - - passing_mc = pd.concat([imc.df_tight for imc in mc], sort=False) - failing_mc = pd.concat([imc.df_lnt for imc in mc], sort=False) - - passing = pd.concat ([passing_data, passing_mc], sort=False) - failing = pd.concat ([failing_data, failing_mc], sort=False) - - # targets - passing['target'] = np.ones (passing.shape[0]).astype(np.int) - failing['target'] = np.zeros(failing.shape[0]).astype(np.int) - - # concatenate the events and shuffle - main_df = pd.concat([passing, failing], sort=False) + net_dir_name = self.channel+'_'+str(self.years)+'_'+self.channel_extra if len(self.channel_extra) else self.channel + net_dir = nn_dir(net_dir_name, self.region_label) + + main_dfs = OrderedDict() + for year in self.years: + print('============> year = {year}; starting reading the trees'.format(year=year)) + print ('Net will be stored in: ', net_dir) + now = time() + + if year != 2018: + data = get_data_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=year), 'HNLTreeProducer_mmm/tree.root', self.selection_data_mmm, year) + data += get_data_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=year), 'HNLTreeProducer_mem/tree.root', self.selection_data_mem, year) + data += get_data_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=year), 'HNLTreeProducer_eee/tree.root', self.selection_data_eee, year) + data += get_data_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=year), 'HNLTreeProducer_eem/tree.root', self.selection_data_eem, year) + + if year == 2018: + data = get_data_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/mmm18'.format(year=year), self.post_fix, self.selection_data_mmm, year) + data += get_data_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/mem18'.format(year=year), self.post_fix, self.selection_data_mem, year) + data += get_data_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/eee18'.format(year=year), self.post_fix, self.selection_data_eee, year) + data += get_data_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/eem18'.format(year=year), self.post_fix, self.selection_data_eem, year) + + mc = get_mc_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=year), 'HNLTreeProducer_mmm/tree.root', self.selection_mc_mmm, year) + mc += get_mc_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=year), 'HNLTreeProducer_mem/tree.root', self.selection_mc_mem, year) + mc += get_mc_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=year), 'HNLTreeProducer_eee/tree.root', self.selection_mc_eee, year) + mc += get_mc_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=year), 'HNLTreeProducer_eem/tree.root', self.selection_mc_eem, year) + + print('============> it took %.2f seconds' %(time() - now)) + + data_df = pd.concat([idt.df for idt in data], sort=False) + mc_df = pd.concat([imc.df for imc in mc], sort=False) + + # initial weights + data_df['weight'] = 1. + data_df['isdata'] = 0 + data_df['ismc'] = 0 + data_df['year'] = year + + passing_data = data_df.query(self.selection_tight) + failing_data = data_df.query(self.selection_lnt) + + lumi = -99 + if year == 2018: lumi = 59700 + elif year == 2017: lumi = 41530 + elif year == 2016: lumi = 35987 + assert lumi > 0, 'Lumi ERROR' + + for i, imc in enumerate(mc): + + imc.df['weight'] = -1. * lumi * imc.lumi_scaling * imc.df.lhe_weight + imc.df['isdata'] = 0 + imc.df['ismc'] = i+1 + imc.df['year'] = year + + imc.df_tight = imc.df.query(self.selection_tight) + imc.df_lnt = imc.df.query(self.selection_lnt) + + passing_mc = pd.concat([imc.df_tight for imc in mc], sort=False) + failing_mc = pd.concat([imc.df_lnt for imc in mc], sort=False) + + if self.sbtrct_prmpt: + print ('\n\tWARNING: SUBTRACT PROMPT IS ON\n') + passing = pd.concat ([passing_data, passing_mc], sort=False) + failing = pd.concat ([failing_data, failing_mc], sort=False) + + if not self.sbtrct_prmpt: + print ('\n\tWARNING: SUBTRACT PROMPT IS OFF\n') + passing = passing_data + failing = failing_data + + # targets + passing['target'] = np.ones (passing.shape[0]).astype(np.int) + failing['target'] = np.zeros(failing.shape[0]).astype(np.int) + + # concatenate the events and shuffle + main_dfs[year] = pd.concat([passing, failing], sort=False) + + df_list = [] + for yr in main_dfs.keys(): df_list.append(main_dfs[yr]) + + # main_df = pd.concat([main_dfs[2016], main_dfs[2017], main_dfs[2018]], sort=False) + main_df = pd.concat(df_list, sort=False) for k, v in self.composed_features.items(): main_df[k] = v(main_df) @@ -324,4 +357,3 @@ def train(self): # save ntuple main_df.to_root(net_dir + 'output_ntuple_weighted.root', key='tree', store_index=False) - diff --git a/NN/nn_parametric_trainer_per_year.py b/NN/nn_parametric_trainer_per_year.py new file mode 100644 index 0000000..33d4693 --- /dev/null +++ b/NN/nn_parametric_trainer_per_year.py @@ -0,0 +1,343 @@ +''' +Resources: + +Build TensorFlow with native CPU instructions (make it faster) +https://gist.github.com/winnerineast/05f63146e4b1e81ae08d14da2b38b11f + +https://en.wikipedia.org/wiki/Universal_approximation_theorem +http://neuralnetworksanddeeplearning.com/chap4.html +https://github.com/thomberg1/UniversalFunctionApproximation +https://cms-nanoaod-integration.web.cern.ch/integration/master-102X/mc102X_doc.html +https://ml-cheatsheet.readthedocs.io/en/latest/loss_functions.html +https://stackoverflow.com/questions/39691902/ordering-of-batch-normalization-and-dropout +https://stats.stackexchange.com/questions/292278/can-one-theoretically-train-a-neural-network-with-fewer-training-samples-than +''' +from os import environ as env +import root_pandas + +from time import time +import pickle +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt + +from itertools import product +from collections import OrderedDict + +from root_numpy import root2array + +from keras.models import Sequential, Model +from keras.layers import Dense, Input, Dropout, BatchNormalization +from keras.utils import plot_model +from keras.callbacks import EarlyStopping, Callback, ReduceLROnPlateau, ModelCheckpoint +from keras import backend as K +from keras.activations import softmax +from keras.constraints import unit_norm +from keras.utils import to_categorical +from keras.optimizers import SGD, Adam + +from sklearn.model_selection import train_test_split +from sklearn.metrics import roc_curve, roc_auc_score + +from plotter.sample import get_data_samples, get_mc_samples, get_signal_samples +from plotter.utils import nn_dir +from plotter.selections import Selections + +# fix random seed for reproducibility (FIXME! not really used by Keras) +np.random.seed(1986) + +class Trainer(object): + def __init__( + self , + channel , + year , + features , + composed_features , + base_dir , + post_fix , + selection_data_mmm , + selection_mc_mmm , + selection_data_mem , + selection_mc_mem , + selection_data_eee , + selection_mc_eee , + selection_data_eem , + selection_mc_eem , + selection_tight , + epochs=1000 , + early_stopping=True): + + self.channel = channel.split('_')[0] + self.year = year + self.channel_extra = channel.split('_')[1] if len(channel.split('_'))>1 else '' + self.features = features + self.composed_features = composed_features + self.base_dir = base_dir + self.post_fix = post_fix + self.selection_data_mmm = ' & '.join(selection_data_mmm) + self.selection_mc_mmm = ' & '.join(selection_mc_mmm) + self.selection_data_mem = ' & '.join(selection_data_mem) + self.selection_mc_mem = ' & '.join(selection_mc_mem) + self.selection_data_eee = ' & '.join(selection_data_eee) + self.selection_mc_eee = ' & '.join(selection_mc_eee) + self.selection_data_eem = ' & '.join(selection_data_eem) + self.selection_mc_eem = ' & '.join(selection_mc_eem) + self.selection_tight = selection_tight + self.selection_lnt = 'not (%s)' %self.selection_tight + self.epochs = epochs + self.early_stopping = early_stopping + + self.lumi = -99 + if self.year == 2018: self.lumi = 59700. + elif self.year == 2017: self.lumi = 41500. + elif self.year == 2016: self.lumi = 35900. + assert self.lumi > 0, 'LUMI ERROR' + + def train(self): + + net_dir_name = self.channel+'_'+str(self.year)+'_'+self.channel_extra if len(self.channel_extra) else self.channel + net_dir = nn_dir(net_dir_name) + print('============> year = {year}; starting reading the trees'.format(year=self.year)) + print ('Net will be stored in: ', net_dir) + now = time() + + if self.year != 2018: + data = get_data_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=self.year), 'HNLTreeProducer_mmm/tree.root', self.selection_data_mmm, self.year) + data += get_data_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=self.year), 'HNLTreeProducer_mem/tree.root', self.selection_data_mem, self.year) + data += get_data_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=self.year), 'HNLTreeProducer_eee/tree.root', self.selection_data_eee, self.year) + data += get_data_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=self.year), 'HNLTreeProducer_eem/tree.root', self.selection_data_eem, self.year) + + if self.year == 2018: + data = get_data_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/mmm18'.format(year=self.year), self.post_fix, self.selection_data_mmm, self.year) + data += get_data_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/mem18'.format(year=self.year), self.post_fix, self.selection_data_mem, self.year) + data += get_data_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/eee18'.format(year=self.year), self.post_fix, self.selection_data_eee, self.year) + data += get_data_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/eem18'.format(year=self.year), self.post_fix, self.selection_data_eem, self.year) + + mc = get_mc_samples('mmm', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=self.year), 'HNLTreeProducer_mmm/tree.root', self.selection_mc_mmm, self.year) + mc += get_mc_samples('mem', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=self.year), 'HNLTreeProducer_mem/tree.root', self.selection_mc_mem, self.year) + mc += get_mc_samples('eee', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=self.year), 'HNLTreeProducer_eee/tree.root', self.selection_mc_eee, self.year) + mc += get_mc_samples('eem', env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=self.year), 'HNLTreeProducer_eem/tree.root', self.selection_mc_eem, self.year) + + print('============> it took %.2f seconds' %(time() - now)) + + data_df = pd.concat([idt.df for idt in data], sort=False) + mc_df = pd.concat([imc.df for imc in mc], sort=False) + + # initial weights + data_df['weight'] = 1. + data_df['isdata'] = 0 + data_df['ismc'] = 0 + + passing_data = data_df.query(self.selection_tight) + failing_data = data_df.query(self.selection_lnt) + # print('lnt', self.selection_lnt) + # print('tight', self.selection_tight) + # from pdb import set_trace; set_trace() + + for i, imc in enumerate(mc): + + imc.df['weight'] = -1. * self.lumi * imc.lumi_scaling * imc.df.lhe_weight + imc.df['isdata'] = 0 + imc.df['ismc'] = i+1 + + imc.df_tight = imc.df.query(self.selection_tight) + imc.df_lnt = imc.df.query(self.selection_lnt) + + passing_mc = pd.concat([imc.df_tight for imc in mc], sort=False) + failing_mc = pd.concat([imc.df_lnt for imc in mc], sort=False) + + passing = pd.concat ([passing_data, passing_mc], sort=False) + failing = pd.concat ([failing_data, failing_mc], sort=False) + + # targets + passing['target'] = np.ones (passing.shape[0]).astype(np.int) + failing['target'] = np.zeros(failing.shape[0]).astype(np.int) + + # concatenate the events and shuffle + main_df = pd.concat([passing, failing], sort=False) + # from pdb import set_trace; set_trace() + + for k, v in self.composed_features.items(): + main_df[k] = v(main_df) + self.features.append(k) + + # reindex to avoid duplicated indices, useful for batches + # https://stackoverflow.com/questions/27236275/what-does-valueerror-cannot-reindex-from-a-duplicate-axis)%20-mean + main_df.index = np.array(range(len(main_df))) + main_df = main_df.sample(frac=1, replace=False, random_state=1986) # shuffle + + # X and Y + X = pd.DataFrame(main_df, columns=list(set(self.features))) + # X = pd.DataFrame(main_df, columns=list(set(branches+features+['isnonprompt', 'ismain_df', 'ismc']))) + Y = pd.DataFrame(main_df, columns=['target']) + + # activation = 'tanh' + activation = 'selu' + # activation = 'sigmoid' + # activation = 'relu' + # activation = 'LeakyReLU' #?????? + + # define the net + input = Input((len(self.features),)) + layer = Dense(2048, activation=activation , name='dense1', kernel_constraint=unit_norm())(input) + layer = Dropout(0.5, name='dropout1')(layer) + layer = BatchNormalization()(layer) + # layer = Dense(256, activation=activation , name='dense2', kernel_constraint=unit_norm())(layer) + # layer = Dropout(0.4, name='dropout2')(layer) + # layer = BatchNormalization()(layer) + # layer = Dense(16, activation=activation , name='dense3', kernel_constraint=unit_norm())(layer) + # layer = Dropout(0.4, name='dropout3')(layer) + # layer = BatchNormalization()(layer) + # layer = Dense(16, activation=activation , name='dense4', kernel_constraint=unit_norm())(layer) + # layer = Dropout(0.4, name='dropout4')(layer) + # layer = BatchNormalization()(layer) + layer = Dense(32, activation=activation , name='dense5', kernel_constraint=unit_norm())(layer) + layer = Dropout(0.4, name='dropout5')(layer) + layer = BatchNormalization()(layer) + output = Dense( 1, activation='sigmoid', name='output', )(layer) + + # Define outputs of your model + model = Model(input, output) + + # choose your optimizer + # opt = SGD(lr=0.0001, momentum=0.8) + # opt = Adam(lr=0.001, decay=0.1, beta_1=0.9, beta_2=0.999, amsgrad=False) + opt = Adam(lr=0.01, decay=0.05, beta_1=0.9, beta_2=0.999, amsgrad=True) + # opt = 'Adam' + + # compile and choose your loss function (binary cross entropy for a 1-0 classification problem) + model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['mae', 'acc']) + + # print net summary + print(model.summary()) + + # plot the models + # https://keras.io/visualization/ + plot_model(model, show_shapes=True, show_layer_names=True, to_file=net_dir+'model.png') + + # normalize inputs FIXME! do it, but do it wisely + # https://scikit-learn.org/stable/auto_examples/preprocessing/plot_all_scaling.html#sphx-glr-auto-examples-preprocessing-plot-all-scaling-py + # on QuantileTransformer + # Note that this transform is non-linear. It may distort linear + # correlations between variables measured at the same scale but renders + # variables measured at different scales more directly comparable. + # from sklearn.preprocessing import QuantileTransformer + # qt = QuantileTransformer(output_distribution='normal', random_state=1986) + # fit and FREEZE the transformation paramaters. + # Need to save these to be applied exactly as are when predicting on a different main_dfset + # qt.fit(X[features]) + # now transform + # xx = qt.transform(X[features]) + + # alternative way to scale the inputs + # https://main_dfscienceplus.com/keras-regression-based-neural-networks/ + + from sklearn.preprocessing import RobustScaler + qt = RobustScaler() + qt.fit(X[self.features]) + xx = qt.transform(X[self.features]) + + # save the frozen transformer + pickle.dump( qt, open( net_dir + 'input_tranformation_weighted.pck', 'wb' ) ) + + # save the exact list of features + pickle.dump( self.features, open( net_dir + 'input_features.pck', 'wb' ) ) + + # early stopping + # monitor = 'val_acc' + monitor = 'val_loss' + # monitor = 'val_mae' + es = EarlyStopping(monitor=monitor, mode='auto', verbose=1, patience=50, restore_best_weights=True) + + # reduce learning rate when at plateau, fine search the minimum + reduce_lr = ReduceLROnPlateau(monitor=monitor, mode='auto', factor=0.2, patience=5, min_lr=0.00001, cooldown=10, verbose=True) + + # save the model every now and then + filepath = net_dir + 'saved-model-{epoch:04d}_val_loss_{val_loss:.4f}_val_acc_{val_acc:.4f}.h5' + save_model = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, save_weights_only=False, mode='auto', period=1) + + # weight the events according to their displacement (favour high displacement) + weight = np.array(main_df.weight * np.power(X['hnl_2d_disp'], 0.25)) + + # train only the classifier. beta is set at 0 and the discriminator is not trained + callbacks = [reduce_lr, save_model] + if self.early_stopping: + callbacks.append(es) + history = model.fit(xx, Y, epochs=self.epochs, validation_split=0.5, callbacks=callbacks, batch_size=32, verbose=True, sample_weight=weight) + + # plot loss function trends for train and validation sample + plt.clf() + plt.title('loss') + plt.plot(history.history['loss'], label='train') + plt.plot(history.history['val_loss'], label='test') + plt.legend() + # plt.yscale('log') + center = min(history.history['val_loss'] + history.history['loss']) + plt.ylim((center*0.98, center*1.5)) + plt.grid(True) + plt.savefig(net_dir + 'loss_function_history_weighted.pdf') + plt.clf() + + # plot accuracy trends for train and validation sample + plt.title('accuracy') + plt.plot(history.history['acc'], label='train') + plt.plot(history.history['val_acc'], label='test') + plt.legend() + center = max(history.history['val_acc'] + history.history['acc']) + plt.ylim((center*0.85, center*1.02)) + # plt.yscale('log') + plt.grid(True) + plt.savefig(net_dir + 'accuracy_history_weighted.pdf') + plt.clf() + + # plot accuracy trends for train and validation sample + plt.title('mean absolute error') + plt.plot(history.history['mae'], label='train') + plt.plot(history.history['val_mae'], label='test') + plt.legend() + center = min(history.history['val_mae'] + history.history['mae']) + plt.ylim((center*0.98, center*1.5)) + # plt.yscale('log') + plt.grid(True) + plt.savefig(net_dir + 'mean_absolute_error_history_weighted.pdf') + plt.clf() + + # calculate predictions on the main_df sample + print('predicting on', main_df.shape[0], 'events') + x = pd.DataFrame(main_df, columns=self.features) + # y = model.predict(x) + # load the transformation with the correct parameters! + qt = pickle.load(open( net_dir + 'input_tranformation_weighted.pck', 'rb' )) + xx = qt.transform(x[self.features]) + y = model.predict(xx) + + # impose norm conservation if you want probabilities + # compute the overall rescaling factor scale + scale = 1. + # scale = np.sum(passing['target']) / np.sum(y) + + # add the score to the main_df sample + main_df.insert(len(main_df.columns), 'fr', scale * y) + + # let sklearn do the heavy lifting and compute the ROC curves for you + fpr, tpr, wps = roc_curve(main_df.target, main_df.fr) + plt.plot(fpr, tpr) + xy = [i*j for i,j in product([10.**i for i in range(-8, 0)], [1,2,4,8])]+[1] + plt.plot(xy, xy, color='grey', linestyle='--') + plt.yscale('linear') + plt.savefig(net_dir + 'roc_weighted.pdf') + + # save model and weights + model.save(net_dir + 'net_model_weighted.h5') + # model.save_weights('net_model_weights.h5') + + # rename branches, if you want + # main_df.rename( + # index=str, + # columns={'cand_refit_mass12': 'mass12',}, + # inplace=True) + + # save ntuple + main_df.to_root(net_dir + 'output_ntuple_weighted.root', key='tree', store_index=False) + + diff --git a/NN/nn_trainer.py b/NN/nn_trainer.py index bf763f7..b8f38da 100644 --- a/NN/nn_trainer.py +++ b/NN/nn_trainer.py @@ -89,7 +89,8 @@ def train(self): # mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eee/tree.root', self.selection_mc) # mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mmm/tree.root', self.selection_mc) # mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mem/tree.root', self.selection_mc) - mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eem/tree.root', self.selection_mc) + # mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eem/tree.root', self.selection_mc) + mc = get_mc_samples (self.channel, '/Users/cesareborgia/cernbox/ntuples/2018/all_channels/', 'HNLTreeProducer_%s/tree.root' %self.channel, self.selection_mc) print('============> it took %.2f seconds' %(time() - now)) diff --git a/NN/trainings/all_2016_channels_200117_12h_55m/input_features.pck b/NN/trainings/all_2016_channels_200117_12h_55m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2016_channels_200117_12h_55m/input_features.pck differ diff --git a/NN/trainings/all_2016_channels_200117_12h_55m/input_tranformation_weighted.pck b/NN/trainings/all_2016_channels_200117_12h_55m/input_tranformation_weighted.pck new file mode 100644 index 0000000..690c5bf Binary files /dev/null and b/NN/trainings/all_2016_channels_200117_12h_55m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2016_channels_200117_12h_55m/net_model_weighted.h5 b/NN/trainings/all_2016_channels_200117_12h_55m/net_model_weighted.h5 new file mode 100644 index 0000000..143c231 Binary files /dev/null and b/NN/trainings/all_2016_channels_200117_12h_55m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2016_channels_200210_10h_30m/input_features.pck b/NN/trainings/all_2016_channels_200210_10h_30m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2016_channels_200210_10h_30m/input_features.pck differ diff --git a/NN/trainings/all_2016_channels_200210_10h_30m/input_tranformation_weighted.pck b/NN/trainings/all_2016_channels_200210_10h_30m/input_tranformation_weighted.pck new file mode 100644 index 0000000..690c5bf Binary files /dev/null and b/NN/trainings/all_2016_channels_200210_10h_30m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2016_channels_200210_10h_30m/net_model_weighted.h5 b/NN/trainings/all_2016_channels_200210_10h_30m/net_model_weighted.h5 new file mode 100644 index 0000000..63a51cd Binary files /dev/null and b/NN/trainings/all_2016_channels_200210_10h_30m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2016_channels_200212_16h_25m/input_features.pck b/NN/trainings/all_2016_channels_200212_16h_25m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2016_channels_200212_16h_25m/input_features.pck differ diff --git a/NN/trainings/all_2016_channels_200212_16h_25m/input_tranformation_weighted.pck b/NN/trainings/all_2016_channels_200212_16h_25m/input_tranformation_weighted.pck new file mode 100644 index 0000000..f86132e Binary files /dev/null and b/NN/trainings/all_2016_channels_200212_16h_25m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2016_channels_200212_16h_25m/net_model_weighted.h5 b/NN/trainings/all_2016_channels_200212_16h_25m/net_model_weighted.h5 new file mode 100644 index 0000000..d779f54 Binary files /dev/null and b/NN/trainings/all_2016_channels_200212_16h_25m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2016_channels_200213_11h_23m/input_features.pck b/NN/trainings/all_2016_channels_200213_11h_23m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2016_channels_200213_11h_23m/input_features.pck differ diff --git a/NN/trainings/all_2016_channels_200213_11h_23m/input_tranformation_weighted.pck b/NN/trainings/all_2016_channels_200213_11h_23m/input_tranformation_weighted.pck new file mode 100644 index 0000000..f8595c6 Binary files /dev/null and b/NN/trainings/all_2016_channels_200213_11h_23m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2016_channels_200213_11h_23m/net_model_weighted.h5 b/NN/trainings/all_2016_channels_200213_11h_23m/net_model_weighted.h5 new file mode 100644 index 0000000..97dd0b1 Binary files /dev/null and b/NN/trainings/all_2016_channels_200213_11h_23m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2017_channels_200116_15h_15m/input_features.pck b/NN/trainings/all_2017_channels_200116_15h_15m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2017_channels_200116_15h_15m/input_features.pck differ diff --git a/NN/trainings/all_2017_channels_200116_15h_15m/input_tranformation_weighted.pck b/NN/trainings/all_2017_channels_200116_15h_15m/input_tranformation_weighted.pck new file mode 100644 index 0000000..3390953 Binary files /dev/null and b/NN/trainings/all_2017_channels_200116_15h_15m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2017_channels_200116_15h_15m/net_model_weighted.h5 b/NN/trainings/all_2017_channels_200116_15h_15m/net_model_weighted.h5 new file mode 100644 index 0000000..c921923 Binary files /dev/null and b/NN/trainings/all_2017_channels_200116_15h_15m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2017_channels_200117_12h_44m/input_features.pck b/NN/trainings/all_2017_channels_200117_12h_44m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2017_channels_200117_12h_44m/input_features.pck differ diff --git a/NN/trainings/all_2017_channels_200117_12h_44m/input_tranformation_weighted.pck b/NN/trainings/all_2017_channels_200117_12h_44m/input_tranformation_weighted.pck new file mode 100644 index 0000000..3390953 Binary files /dev/null and b/NN/trainings/all_2017_channels_200117_12h_44m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2017_channels_200117_12h_44m/net_model_weighted.h5 b/NN/trainings/all_2017_channels_200117_12h_44m/net_model_weighted.h5 new file mode 100644 index 0000000..6d42ec7 Binary files /dev/null and b/NN/trainings/all_2017_channels_200117_12h_44m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2017_channels_200210_10h_38m/input_features.pck b/NN/trainings/all_2017_channels_200210_10h_38m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2017_channels_200210_10h_38m/input_features.pck differ diff --git a/NN/trainings/all_2017_channels_200210_10h_38m/input_tranformation_weighted.pck b/NN/trainings/all_2017_channels_200210_10h_38m/input_tranformation_weighted.pck new file mode 100644 index 0000000..3390953 Binary files /dev/null and b/NN/trainings/all_2017_channels_200210_10h_38m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2017_channels_200210_10h_38m/net_model_weighted.h5 b/NN/trainings/all_2017_channels_200210_10h_38m/net_model_weighted.h5 new file mode 100644 index 0000000..d4ef8df Binary files /dev/null and b/NN/trainings/all_2017_channels_200210_10h_38m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2017_channels_200212_16h_14m/input_features.pck b/NN/trainings/all_2017_channels_200212_16h_14m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2017_channels_200212_16h_14m/input_features.pck differ diff --git a/NN/trainings/all_2017_channels_200212_16h_14m/input_tranformation_weighted.pck b/NN/trainings/all_2017_channels_200212_16h_14m/input_tranformation_weighted.pck new file mode 100644 index 0000000..06f25b0 Binary files /dev/null and b/NN/trainings/all_2017_channels_200212_16h_14m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2017_channels_200212_16h_14m/net_model_weighted.h5 b/NN/trainings/all_2017_channels_200212_16h_14m/net_model_weighted.h5 new file mode 100644 index 0000000..2a76e4c Binary files /dev/null and b/NN/trainings/all_2017_channels_200212_16h_14m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2017_channels_200213_11h_48m/input_features.pck b/NN/trainings/all_2017_channels_200213_11h_48m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2017_channels_200213_11h_48m/input_features.pck differ diff --git a/NN/trainings/all_2017_channels_200213_11h_48m/input_tranformation_weighted.pck b/NN/trainings/all_2017_channels_200213_11h_48m/input_tranformation_weighted.pck new file mode 100644 index 0000000..8a21847 Binary files /dev/null and b/NN/trainings/all_2017_channels_200213_11h_48m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2017_channels_200213_11h_48m/net_model_weighted.h5 b/NN/trainings/all_2017_channels_200213_11h_48m/net_model_weighted.h5 new file mode 100644 index 0000000..6e8e8bd Binary files /dev/null and b/NN/trainings/all_2017_channels_200213_11h_48m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2018_channels_200117_11h_43m/input_features.pck b/NN/trainings/all_2018_channels_200117_11h_43m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2018_channels_200117_11h_43m/input_features.pck differ diff --git a/NN/trainings/all_2018_channels_200117_11h_43m/input_tranformation_weighted.pck b/NN/trainings/all_2018_channels_200117_11h_43m/input_tranformation_weighted.pck new file mode 100644 index 0000000..a634422 Binary files /dev/null and b/NN/trainings/all_2018_channels_200117_11h_43m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2018_channels_200117_11h_43m/net_model_weighted.h5 b/NN/trainings/all_2018_channels_200117_11h_43m/net_model_weighted.h5 new file mode 100644 index 0000000..a798b1f Binary files /dev/null and b/NN/trainings/all_2018_channels_200117_11h_43m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2018_channels_200210_10h_44m/input_features.pck b/NN/trainings/all_2018_channels_200210_10h_44m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2018_channels_200210_10h_44m/input_features.pck differ diff --git a/NN/trainings/all_2018_channels_200210_10h_44m/input_tranformation_weighted.pck b/NN/trainings/all_2018_channels_200210_10h_44m/input_tranformation_weighted.pck new file mode 100644 index 0000000..a634422 Binary files /dev/null and b/NN/trainings/all_2018_channels_200210_10h_44m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2018_channels_200210_10h_44m/net_model_weighted.h5 b/NN/trainings/all_2018_channels_200210_10h_44m/net_model_weighted.h5 new file mode 100644 index 0000000..e4cb4d6 Binary files /dev/null and b/NN/trainings/all_2018_channels_200210_10h_44m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_2018_channels_200212_15h_39m/input_features.pck b/NN/trainings/all_2018_channels_200212_15h_39m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_2018_channels_200212_15h_39m/input_features.pck differ diff --git a/NN/trainings/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck b/NN/trainings/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck new file mode 100644 index 0000000..977c79f Binary files /dev/null and b/NN/trainings/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_2018_channels_200212_15h_39m/net_model_weighted.h5 b/NN/trainings/all_2018_channels_200212_15h_39m/net_model_weighted.h5 new file mode 100644 index 0000000..5ce2406 Binary files /dev/null and b/NN/trainings/all_2018_channels_200212_15h_39m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..72211e3 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..8825f3c Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_features.pck b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..c8f8652 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..4d53558 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_features.pck b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..625c31f Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..94f506a Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_features.pck b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..dab1464 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..7b03784 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck new file mode 100644 index 0000000..419c4bd Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5 new file mode 100644 index 0000000..80a7f64 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_features.pck b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..a2d79bd Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..475ef72 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200325_12h_23m_no_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_features.pck b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..c2ef339 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..4d66449 Binary files /dev/null and b/NN/trainings/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_features.pck b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..8074541 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..06cfba5 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_features.pck b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..1d4b05b Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..0158efa Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_features.pck b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..04a332c Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..e6e53fc Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_features.pck b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..5b07642 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..db92628 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck new file mode 100644 index 0000000..99b7f96 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5 new file mode 100644 index 0000000..b8e97fd Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_features.pck b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..3bc0c8f Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..6eced9e Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200325_12h_30m_no_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_features.pck b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..5fbdfe6 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..853ba64 Binary files /dev/null and b/NN/trainings/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..6676a4e Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..053c8cc Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_features.pck b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..9f1e1fe Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..20440ff Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_features.pck b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck new file mode 100644 index 0000000..42a3f7b Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 new file mode 100644 index 0000000..60b0b7a Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_features.pck b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_tranformation_weighted.pck new file mode 100644 index 0000000..c2b073a Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/net_model_weighted.h5 new file mode 100644 index 0000000..f28e79b Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck new file mode 100644 index 0000000..c0e9a0f Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5 new file mode 100644 index 0000000..4cb2956 Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_features.pck b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..fba9408 Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..cba0c16 Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200325_12h_37m_no_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_features.pck b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_features.pck differ diff --git a/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck new file mode 100644 index 0000000..b69643f Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 new file mode 100644 index 0000000..f279e04 Binary files /dev/null and b/NN/trainings/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200116_14h_51m/input_features.pck b/NN/trainings/all_channels_200116_14h_51m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_channels_200116_14h_51m/input_features.pck differ diff --git a/NN/trainings/all_channels_200116_14h_51m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200116_14h_51m/input_tranformation_weighted.pck new file mode 100644 index 0000000..570f35c Binary files /dev/null and b/NN/trainings/all_channels_200116_14h_51m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200116_14h_51m/net_model_weighted.h5 b/NN/trainings/all_channels_200116_14h_51m/net_model_weighted.h5 new file mode 100644 index 0000000..6c9e0a8 Binary files /dev/null and b/NN/trainings/all_channels_200116_14h_51m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200116_15h_5m/input_features.pck b/NN/trainings/all_channels_200116_15h_5m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_channels_200116_15h_5m/input_features.pck differ diff --git a/NN/trainings/all_channels_200116_15h_5m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200116_15h_5m/input_tranformation_weighted.pck new file mode 100644 index 0000000..48c9a43 Binary files /dev/null and b/NN/trainings/all_channels_200116_15h_5m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200116_15h_5m/net_model_weighted.h5 b/NN/trainings/all_channels_200116_15h_5m/net_model_weighted.h5 new file mode 100644 index 0000000..cc55bc0 Binary files /dev/null and b/NN/trainings/all_channels_200116_15h_5m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200211_16h_13m/input_features.pck b/NN/trainings/all_channels_200211_16h_13m/input_features.pck new file mode 100644 index 0000000..5df6a8a Binary files /dev/null and b/NN/trainings/all_channels_200211_16h_13m/input_features.pck differ diff --git a/NN/trainings/all_channels_200211_16h_13m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200211_16h_13m/input_tranformation_weighted.pck new file mode 100644 index 0000000..c6b9d39 Binary files /dev/null and b/NN/trainings/all_channels_200211_16h_13m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200211_16h_13m/net_model_weighted.h5 b/NN/trainings/all_channels_200211_16h_13m/net_model_weighted.h5 new file mode 100644 index 0000000..e6275c8 Binary files /dev/null and b/NN/trainings/all_channels_200211_16h_13m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200211_17h_21m/input_features.pck b/NN/trainings/all_channels_200211_17h_21m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/all_channels_200211_17h_21m/input_features.pck differ diff --git a/NN/trainings/all_channels_200211_17h_21m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200211_17h_21m/input_tranformation_weighted.pck new file mode 100644 index 0000000..c04d268 Binary files /dev/null and b/NN/trainings/all_channels_200211_17h_21m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200211_17h_21m/net_model_weighted.h5 b/NN/trainings/all_channels_200211_17h_21m/net_model_weighted.h5 new file mode 100644 index 0000000..d2a0b05 Binary files /dev/null and b/NN/trainings/all_channels_200211_17h_21m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200212_11h_21m/input_features.pck b/NN/trainings/all_channels_200212_11h_21m/input_features.pck new file mode 100644 index 0000000..2008b6d Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_21m/input_features.pck differ diff --git a/NN/trainings/all_channels_200212_11h_21m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200212_11h_21m/input_tranformation_weighted.pck new file mode 100644 index 0000000..d0876de Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_21m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200212_11h_21m/net_model_weighted.h5 b/NN/trainings/all_channels_200212_11h_21m/net_model_weighted.h5 new file mode 100644 index 0000000..b3cfd27 Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_21m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200212_11h_51m/input_features.pck b/NN/trainings/all_channels_200212_11h_51m/input_features.pck new file mode 100644 index 0000000..2008b6d Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_51m/input_features.pck differ diff --git a/NN/trainings/all_channels_200212_11h_51m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200212_11h_51m/input_tranformation_weighted.pck new file mode 100644 index 0000000..281e25c Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_51m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200212_11h_51m/net_model_weighted.h5 b/NN/trainings/all_channels_200212_11h_51m/net_model_weighted.h5 new file mode 100644 index 0000000..34420a8 Binary files /dev/null and b/NN/trainings/all_channels_200212_11h_51m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200212_14h_18m/input_features.pck b/NN/trainings/all_channels_200212_14h_18m/input_features.pck new file mode 100644 index 0000000..2008b6d Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_18m/input_features.pck differ diff --git a/NN/trainings/all_channels_200212_14h_18m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200212_14h_18m/input_tranformation_weighted.pck new file mode 100644 index 0000000..d0876de Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_18m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200212_14h_18m/net_model_weighted.h5 b/NN/trainings/all_channels_200212_14h_18m/net_model_weighted.h5 new file mode 100644 index 0000000..8c57b21 Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_18m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200212_14h_45m/input_features.pck b/NN/trainings/all_channels_200212_14h_45m/input_features.pck new file mode 100644 index 0000000..2008b6d Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_45m/input_features.pck differ diff --git a/NN/trainings/all_channels_200212_14h_45m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200212_14h_45m/input_tranformation_weighted.pck new file mode 100644 index 0000000..281e25c Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_45m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200212_14h_45m/net_model_weighted.h5 b/NN/trainings/all_channels_200212_14h_45m/net_model_weighted.h5 new file mode 100644 index 0000000..341d1fc Binary files /dev/null and b/NN/trainings/all_channels_200212_14h_45m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_14h_26m/input_features.pck b/NN/trainings/all_channels_200213_14h_26m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_26m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_14h_26m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_14h_26m/input_tranformation_weighted.pck new file mode 100644 index 0000000..dc94c5d Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_26m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_14h_26m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_14h_26m/net_model_weighted.h5 new file mode 100644 index 0000000..44aad0a Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_26m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_14h_48m/input_features.pck b/NN/trainings/all_channels_200213_14h_48m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_48m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_14h_48m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_14h_48m/input_tranformation_weighted.pck new file mode 100644 index 0000000..b8a7385 Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_48m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_14h_48m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_14h_48m/net_model_weighted.h5 new file mode 100644 index 0000000..c18b82c Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_48m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_14h_55m/input_features.pck b/NN/trainings/all_channels_200213_14h_55m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_55m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_14h_55m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_14h_55m/input_tranformation_weighted.pck new file mode 100644 index 0000000..600591c Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_55m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_14h_55m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_14h_55m/net_model_weighted.h5 new file mode 100644 index 0000000..b67e43f Binary files /dev/null and b/NN/trainings/all_channels_200213_14h_55m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_15h_12m/input_features.pck b/NN/trainings/all_channels_200213_15h_12m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_12m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_15h_12m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_15h_12m/input_tranformation_weighted.pck new file mode 100644 index 0000000..d0b2f1d Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_12m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_15h_12m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_15h_12m/net_model_weighted.h5 new file mode 100644 index 0000000..ee77992 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_12m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_15h_24m/input_features.pck b/NN/trainings/all_channels_200213_15h_24m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_24m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_15h_24m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_15h_24m/input_tranformation_weighted.pck new file mode 100644 index 0000000..3bc234c Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_24m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_15h_24m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_15h_24m/net_model_weighted.h5 new file mode 100644 index 0000000..561f083 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_24m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_15h_38m/input_features.pck b/NN/trainings/all_channels_200213_15h_38m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_38m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_15h_38m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_15h_38m/input_tranformation_weighted.pck new file mode 100644 index 0000000..8c3c77d Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_38m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_15h_38m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_15h_38m/net_model_weighted.h5 new file mode 100644 index 0000000..a6aafa5 Binary files /dev/null and b/NN/trainings/all_channels_200213_15h_38m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200213_17h_25m/input_features.pck b/NN/trainings/all_channels_200213_17h_25m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200213_17h_25m/input_features.pck differ diff --git a/NN/trainings/all_channels_200213_17h_25m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200213_17h_25m/input_tranformation_weighted.pck new file mode 100644 index 0000000..3de2b56 Binary files /dev/null and b/NN/trainings/all_channels_200213_17h_25m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200213_17h_25m/net_model_weighted.h5 b/NN/trainings/all_channels_200213_17h_25m/net_model_weighted.h5 new file mode 100644 index 0000000..48b70ad Binary files /dev/null and b/NN/trainings/all_channels_200213_17h_25m/net_model_weighted.h5 differ diff --git a/NN/trainings/all_channels_200214_10h_11m/input_features.pck b/NN/trainings/all_channels_200214_10h_11m/input_features.pck new file mode 100644 index 0000000..0f3ecf7 Binary files /dev/null and b/NN/trainings/all_channels_200214_10h_11m/input_features.pck differ diff --git a/NN/trainings/all_channels_200214_10h_11m/input_tranformation_weighted.pck b/NN/trainings/all_channels_200214_10h_11m/input_tranformation_weighted.pck new file mode 100644 index 0000000..7ca98c4 Binary files /dev/null and b/NN/trainings/all_channels_200214_10h_11m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/all_channels_200214_10h_11m/net_model_weighted.h5 b/NN/trainings/all_channels_200214_10h_11m/net_model_weighted.h5 new file mode 100644 index 0000000..4396f0e Binary files /dev/null and b/NN/trainings/all_channels_200214_10h_11m/net_model_weighted.h5 differ diff --git a/NN/eee/191115_0h_3m/input_features.pck b/NN/trainings/eee/191115_0h_3m/input_features.pck similarity index 100% rename from NN/eee/191115_0h_3m/input_features.pck rename to NN/trainings/eee/191115_0h_3m/input_features.pck diff --git a/NN/eee/191115_0h_3m/input_tranformation_weighted.pck b/NN/trainings/eee/191115_0h_3m/input_tranformation_weighted.pck similarity index 100% rename from NN/eee/191115_0h_3m/input_tranformation_weighted.pck rename to NN/trainings/eee/191115_0h_3m/input_tranformation_weighted.pck diff --git a/NN/eee/191115_0h_3m/net_model_weighted.h5 b/NN/trainings/eee/191115_0h_3m/net_model_weighted.h5 similarity index 100% rename from NN/eee/191115_0h_3m/net_model_weighted.h5 rename to NN/trainings/eee/191115_0h_3m/net_model_weighted.h5 diff --git a/NN/eee/input_features.pck b/NN/trainings/eee/input_features.pck similarity index 100% rename from NN/eee/input_features.pck rename to NN/trainings/eee/input_features.pck diff --git a/NN/eee/input_tranformation_weighted.pck b/NN/trainings/eee/input_tranformation_weighted.pck similarity index 100% rename from NN/eee/input_tranformation_weighted.pck rename to NN/trainings/eee/input_tranformation_weighted.pck diff --git a/NN/eee/net_model_weighted.h5 b/NN/trainings/eee/net_model_weighted.h5 similarity index 100% rename from NN/eee/net_model_weighted.h5 rename to NN/trainings/eee/net_model_weighted.h5 diff --git a/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_features.pck b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_features.pck new file mode 100644 index 0000000..df7f74d Binary files /dev/null and b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_features.pck differ diff --git a/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_tranformation_weighted.pck b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_tranformation_weighted.pck new file mode 100644 index 0000000..959ba21 Binary files /dev/null and b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/net_model_weighted.h5 b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/net_model_weighted.h5 new file mode 100644 index 0000000..a21803f Binary files /dev/null and b/NN/trainings/fixed_from_R/all_channels_191126_9h_45m/net_model_weighted.h5 differ diff --git a/NN/mmm/12Nov19_v0/input_features.pck b/NN/trainings/mmm/12Nov19_v0/input_features.pck similarity index 100% rename from NN/mmm/12Nov19_v0/input_features.pck rename to NN/trainings/mmm/12Nov19_v0/input_features.pck diff --git a/NN/mmm/12Nov19_v0/input_tranformation_weighted.pck b/NN/trainings/mmm/12Nov19_v0/input_tranformation_weighted.pck similarity index 100% rename from NN/mmm/12Nov19_v0/input_tranformation_weighted.pck rename to NN/trainings/mmm/12Nov19_v0/input_tranformation_weighted.pck diff --git a/NN/mmm/12Nov19_v0/net_model_weighted.h5 b/NN/trainings/mmm/12Nov19_v0/net_model_weighted.h5 similarity index 100% rename from NN/mmm/12Nov19_v0/net_model_weighted.h5 rename to NN/trainings/mmm/12Nov19_v0/net_model_weighted.h5 diff --git a/NN/mmm/191114_23h_55m/input_features.pck b/NN/trainings/mmm/191114_23h_55m/input_features.pck similarity index 100% rename from NN/mmm/191114_23h_55m/input_features.pck rename to NN/trainings/mmm/191114_23h_55m/input_features.pck diff --git a/NN/mmm/191114_23h_55m/input_tranformation_weighted.pck b/NN/trainings/mmm/191114_23h_55m/input_tranformation_weighted.pck similarity index 100% rename from NN/mmm/191114_23h_55m/input_tranformation_weighted.pck rename to NN/trainings/mmm/191114_23h_55m/input_tranformation_weighted.pck diff --git a/NN/mmm/191115_0h_1m/input_features.pck b/NN/trainings/mmm/191115_0h_1m/input_features.pck similarity index 100% rename from NN/mmm/191115_0h_1m/input_features.pck rename to NN/trainings/mmm/191115_0h_1m/input_features.pck diff --git a/NN/mmm/191115_0h_1m/input_tranformation_weighted.pck b/NN/trainings/mmm/191115_0h_1m/input_tranformation_weighted.pck similarity index 100% rename from NN/mmm/191115_0h_1m/input_tranformation_weighted.pck rename to NN/trainings/mmm/191115_0h_1m/input_tranformation_weighted.pck diff --git a/NN/mmm/191115_0h_1m/net_model_weighted.h5 b/NN/trainings/mmm/191115_0h_1m/net_model_weighted.h5 similarity index 100% rename from NN/mmm/191115_0h_1m/net_model_weighted.h5 rename to NN/trainings/mmm/191115_0h_1m/net_model_weighted.h5 diff --git a/NN/trainings/mmm/191118_14h_45m/input_features.pck b/NN/trainings/mmm/191118_14h_45m/input_features.pck new file mode 100644 index 0000000..45cc3ea Binary files /dev/null and b/NN/trainings/mmm/191118_14h_45m/input_features.pck differ diff --git a/NN/trainings/mmm/191118_14h_45m/input_tranformation_weighted.pck b/NN/trainings/mmm/191118_14h_45m/input_tranformation_weighted.pck new file mode 100644 index 0000000..54aa4ab Binary files /dev/null and b/NN/trainings/mmm/191118_14h_45m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/mmm/191118_14h_45m/net_model_weighted.h5 b/NN/trainings/mmm/191118_14h_45m/net_model_weighted.h5 new file mode 100644 index 0000000..6be9370 Binary files /dev/null and b/NN/trainings/mmm/191118_14h_45m/net_model_weighted.h5 differ diff --git a/NN/trainings/mmm_191121_16h_25m/input_features.pck b/NN/trainings/mmm_191121_16h_25m/input_features.pck new file mode 100644 index 0000000..45cc3ea Binary files /dev/null and b/NN/trainings/mmm_191121_16h_25m/input_features.pck differ diff --git a/NN/trainings/mmm_191121_16h_25m/input_tranformation_weighted.pck b/NN/trainings/mmm_191121_16h_25m/input_tranformation_weighted.pck new file mode 100644 index 0000000..449eaab Binary files /dev/null and b/NN/trainings/mmm_191121_16h_25m/input_tranformation_weighted.pck differ diff --git a/NN/trainings/mmm_191121_16h_25m/net_model_weighted.h5 b/NN/trainings/mmm_191121_16h_25m/net_model_weighted.h5 new file mode 100644 index 0000000..6b21cbf Binary files /dev/null and b/NN/trainings/mmm_191121_16h_25m/net_model_weighted.h5 differ diff --git a/NN/trainings/mmm_191121_16h_52m/input_features.pck b/NN/trainings/mmm_191121_16h_52m/input_features.pck new file mode 100644 index 0000000..fc456b4 Binary files /dev/null and b/NN/trainings/mmm_191121_16h_52m/input_features.pck differ diff --git a/NN/trainings/mmm_191121_16h_52m/input_tranformation_weighted.pck b/NN/trainings/mmm_191121_16h_52m/input_tranformation_weighted.pck new file mode 100644 index 0000000..c1008d0 Binary files /dev/null and b/NN/trainings/mmm_191121_16h_52m/input_tranformation_weighted.pck differ diff --git a/README.md b/README.md index b6c0a5e..9f2d04e 100644 --- a/README.md +++ b/README.md @@ -3,253 +3,13 @@ tested in the following environment, set up with `conda` ``` -conda create -n alt_root python=3.7 root -c conda-forge -conda activate alt_root -conda config --env --add channels conda-forge -conda install -n alt_root root_numpy -conda install -n alt_root tensorflow -c conda-forge -conda install -n alt_root keras -c conda-forge -conda install -n alt_root matplotlib -conda install -n alt_root scikit-learn +conda env --name plotter create -f plotter/conda_env/alt_root_conda_environment.yml +python -m pip install rootpy --user +conda activate plotter python -m pip install rootpy --user python -m pip install modin --user ``` -and `conda info` returns - - -``` - - active environment : alt_root - active env location : /Users/manzoni/opt/anaconda2/envs/alt_root - shell level : 2 - user config file : /Users/manzoni/.condarc - populated config files : /Users/manzoni/.condarc - /Users/manzoni/opt/anaconda2/envs/alt_root/.condarc - conda version : 4.7.12 - conda-build version : 3.18.11 - python version : 2.7.15.final.0 - virtual packages : - base environment : /Users/manzoni/opt/anaconda2 (writable) - channel URLs : https://conda.anaconda.org/conda-forge/osx-64 - https://conda.anaconda.org/conda-forge/noarch - https://repo.anaconda.com/pkgs/main/osx-64 - https://repo.anaconda.com/pkgs/main/noarch - https://repo.anaconda.com/pkgs/r/osx-64 - https://repo.anaconda.com/pkgs/r/noarch - https://conda.anaconda.org/nlesc/osx-64 - https://conda.anaconda.org/nlesc/noarch - package cache : /Users/manzoni/opt/anaconda2/pkgs - /Users/manzoni/.conda/pkgs - envs directories : /Users/manzoni/opt/anaconda2/envs - /Users/manzoni/.conda/envs - platform : osx-64 - user-agent : conda/4.7.12 requests/2.22.0 CPython/2.7.15 Darwin/19.0.0 OSX/10.15.1 - UID:GID : 503:20 - netrc file : None - offline mode : False -``` - -list of packages - -``` -# packages in environment at /Users/manzoni/opt/anaconda2/envs/alt_root: -# -# Name Version Build Channel -_tflow_select 2.3.0 mkl -absl-py 0.8.1 py37_0 conda-forge -afterimage 1.21 h044d061_1002 conda-forge -appnope 0.1.0 py37_1000 conda-forge -astor 0.7.1 py_0 conda-forge -attrs 19.3.0 py_0 conda-forge -awkward 0.12.14 py_0 conda-forge -backcall 0.1.0 py_0 conda-forge -binutils 1.0.1 0 conda-forge -bleach 3.1.0 py_0 conda-forge -bzip2 1.0.8 h01d97ff_1 conda-forge -c-ares 1.15.0 h01d97ff_1001 conda-forge -c-compiler 1.0.1 h1de35cc_0 conda-forge -ca-certificates 2019.9.11 hecc5488_0 conda-forge -cachetools 3.1.1 py_0 conda-forge -cairo 1.16.0 he1c11cd_1002 conda-forge -cctools 895 1 -certifi 2019.9.11 py37_0 conda-forge -cffi 1.13.2 py37h33e799b_0 conda-forge -cfitsio 3.470 h389770f_2 conda-forge -chardet 3.0.4 py37_1003 conda-forge -clang 4.0.1 1 -clang_osx-64 4.0.1 h1ce6c1d_17 conda-forge -clangxx 4.0.1 1 -clangxx_osx-64 4.0.1 h22b1bf0_17 conda-forge -compiler-rt 4.0.1 hcfea43d_1 -compilers 1.0.1 0 conda-forge -cryptography 2.8 py37hafa8578_0 conda-forge -curl 7.65.3 h22ea746_0 conda-forge -cxx-compiler 1.0.1 h04f5b5a_0 conda-forge -cycler 0.10.0 py_2 conda-forge -davix 0.7.5 h7232a33_0 conda-forge -decorator 4.4.1 py_0 conda-forge -defusedxml 0.6.0 py_0 conda-forge -entrypoints 0.3 py37_1000 conda-forge -fftw 3.3.8 mpi_mpich_h6e18f22_1009 conda-forge -fontconfig 2.13.1 h6b1039f_1001 conda-forge -fortran-compiler 1.0.1 h4f947d3_0 conda-forge -freetype 2.10.0 h24853df_1 conda-forge -fribidi 1.0.5 h01d97ff_1002 conda-forge -gast 0.2.2 py_0 conda-forge -gdk-pixbuf 2.36.12 h284f8de_1003 conda-forge -gettext 0.19.8.1 h46ab8bc_1002 conda-forge -gfortran_osx-64 4.8.5 h22b1bf0_8 conda-forge -giflib 5.1.7 h01d97ff_1 conda-forge -glew 2.0.0 h0a44026_1002 conda-forge -glib 2.58.3 h9d45998_1002 conda-forge -gobject-introspection 1.58.2 py37h93883a9_1002 conda-forge -google-pasta 0.1.8 py_0 conda-forge -graphite2 1.3.13 h2098e52_1000 conda-forge -graphviz 2.40.1 h69955ae_1 conda-forge -grpcio 1.23.0 py37h6ef0057_0 conda-forge -gsl 2.5 ha2d443c_1 conda-forge -h5py 2.10.0 nompi_py37h106b333_100 conda-forge -harfbuzz 2.4.0 hd8d2a14_3 conda-forge -hdf5 1.10.5 nompi_h0cbb7df_1103 conda-forge -icu 64.2 h6de7cb9_1 conda-forge -idna 2.8 py37_1000 conda-forge -importlib_metadata 0.23 py37_0 conda-forge -ipykernel 5.1.3 py37h5ca1d4c_0 conda-forge -ipyparallel 6.2.4 py37_0 conda-forge -ipython 7.9.0 py37h5ca1d4c_0 conda-forge -ipython_genutils 0.2.0 py_1 conda-forge -jedi 0.15.1 py37_0 conda-forge -jinja2 2.10.3 py_0 conda-forge -joblib 0.14.0 py_0 conda-forge -jpeg 9c h1de35cc_1001 conda-forge -jsonschema 3.1.1 py37_0 conda-forge -jupyter_client 5.3.3 py37_1 conda-forge -jupyter_core 4.5.0 py_0 conda-forge -keras 2.3.1 py37_0 conda-forge -keras-applications 1.0.8 py_1 conda-forge -keras-preprocessing 1.1.0 py_0 conda-forge -kiwisolver 1.1.0 py37h770b8ee_0 conda-forge -krb5 1.16.3 hcfa6398_1001 conda-forge -ld64 274.2 1 -libblas 3.8.0 14_openblas conda-forge -libcblas 3.8.0 14_openblas conda-forge -libcroco 0.6.13 hc484408_0 conda-forge -libcurl 7.65.3 h16faf7d_0 conda-forge -libcxx 4.0.1 hcfea43d_1 conda-forge -libcxxabi 4.0.1 hcfea43d_1 -libedit 3.1.20170329 hcfe32e1_1001 conda-forge -libffi 3.2.1 h6de7cb9_1006 conda-forge -libgfortran 3.0.1 0 conda-forge -libgpuarray 0.7.6 h1de35cc_1003 conda-forge -libiconv 1.15 h01d97ff_1005 conda-forge -liblapack 3.8.0 14_openblas conda-forge -libopenblas 0.3.7 hd44dcd8_1 conda-forge -libpng 1.6.37 h2573ce8_0 conda-forge -libprotobuf 3.9.2 hd9629dc_0 -librsvg 2.44.15 h90c2430_0 conda-forge -libsodium 1.0.17 h01d97ff_0 conda-forge -libssh2 1.8.2 hcdc9a53_2 conda-forge -libtiff 4.0.10 hd08fb8f_1003 conda-forge -libxml2 2.9.10 h53d96d6_0 conda-forge -llvm 4.0.1 1 -llvm-lto-tapi 4.0.1 1 conda-forge -llvm-openmp 9.0.0 h40edb58_0 conda-forge -lz4 2.2.1 py37he1520b0_0 conda-forge -lz4-c 1.8.3 h6de7cb9_1001 conda-forge -mako 1.1.0 py_0 conda-forge -markdown 3.1.1 py_0 conda-forge -markupsafe 1.1.1 py37h0b31af3_0 conda-forge -matplotlib 2.2.4 py37_1 conda-forge -matplotlib-base 2.2.4 py37h31f9439_1 conda-forge -metakernel 0.24.3 py_0 conda-forge -mistune 0.8.4 py37h0b31af3_1000 conda-forge -more-itertools 7.2.0 py_0 conda-forge -mpi 1.0 mpich conda-forge -mpich 3.2.1 ha90c164_1014 conda-forge -nbconvert 5.6.1 py37_0 conda-forge -nbformat 4.4.0 py_1 conda-forge -ncurses 6.1 h0a44026_1002 conda-forge -notebook 6.0.1 py37_0 conda-forge -numpy 1.17.3 py37hde6bac1_0 conda-forge -openssl 1.1.1d h0b31af3_0 conda-forge -opt_einsum 3.1.0 py_0 conda-forge -pandas 0.25.1 py37h86efe34_0 conda-forge -pandoc 2.7.3 0 conda-forge -pandocfilters 1.4.2 py_1 conda-forge -pango 1.42.4 h6691c8e_1 conda-forge -parso 0.5.1 py_0 conda-forge -pcre 8.43 h0a44026_0 -pexpect 4.7.0 py37_0 conda-forge -pickleshare 0.7.5 py37_1000 conda-forge -pip 19.3.1 py37_0 conda-forge -pixman 0.38.0 h01d97ff_1003 conda-forge -portalocker 1.5.1 py37_0 conda-forge -prometheus_client 0.7.1 py_0 conda-forge -prompt_toolkit 2.0.10 py_0 conda-forge -protobuf 3.9.2 py37h0a44026_0 -ptyprocess 0.6.0 py_1001 conda-forge -pycparser 2.19 py37_1 conda-forge -pydot 1.4.1 py37_1001 conda-forge -pygments 2.4.2 py_0 conda-forge -pygpu 0.7.6 py37h3b54f70_1000 conda-forge -pyopenssl 19.0.0 py37_0 conda-forge -pyparsing 2.4.5 py_0 conda-forge -pyrsistent 0.15.5 py37h0b31af3_0 conda-forge -pysocks 1.7.1 py37_0 conda-forge -pythia8 8.240 py37h6de7cb9_2 conda-forge -python 3.7.3 h93065d6_1 conda-forge -python-dateutil 2.8.1 py_0 conda-forge -python-xxhash 1.4.1 py37h0b31af3_0 conda-forge -pytz 2019.3 py_0 conda-forge -pyyaml 5.1.2 py37h0b31af3_0 conda-forge -pyzmq 18.1.0 py37hee98d25_0 conda-forge -qt 5.9.7 h8cf7e54_3 conda-forge -readline 8.0 hcfe32e1_0 conda-forge -requests 2.22.0 py37_1 conda-forge -root 6.18.00 py37h500fca7_17 conda-forge -root_numpy 4.8.0 py37haf112f3_2 conda-forge -root_pandas 0.7.0 py_0 conda-forge -scikit-learn 0.21.3 py37hd4ffd6c_0 conda-forge -scipy 1.3.1 py37hab3da7d_2 conda-forge -send2trash 1.5.0 py_0 conda-forge -setuptools 41.6.0 py37_1 conda-forge -six 1.13.0 py37_0 conda-forge -sqlite 3.30.1 h93121df_0 conda-forge -tbb 2019.8 h04f5b5a_0 -tbb-devel 2019.8 h04f5b5a_0 -tensorboard 2.0.0 pyhb230dea_0 -tensorflow 2.0.0 mkl_py37hda344b4_0 -tensorflow-base 2.0.0 mkl_py37h66b1bf0_0 -tensorflow-estimator 2.0.0 pyh2649769_0 -termcolor 1.1.0 py_2 conda-forge -terminado 0.8.2 py37_0 conda-forge -testpath 0.4.4 py_0 conda-forge -theano 1.0.4 py37h0a44026_1000 conda-forge -tk 8.6.9 h2573ce8_1003 conda-forge -tornado 6.0.3 py37h0b31af3_0 conda-forge -traitlets 4.3.3 py37_0 conda-forge -uproot 3.10.10 py37_0 conda-forge -uproot-base 3.10.10 py37_0 conda-forge -uproot-methods 0.7.1 py_0 conda-forge -urllib3 1.25.7 py37_0 conda-forge -vdt 0.4.3 h6de7cb9_0 conda-forge -wcwidth 0.1.7 py_1 conda-forge -webencodings 0.5.1 py_1 conda-forge -werkzeug 0.16.0 py_0 conda-forge -wheel 0.33.6 py37_0 conda-forge -wrapt 1.11.2 py37h0b31af3_0 conda-forge -xrootd 4.9.1 py37h02158b6_1 conda-forge -xz 5.2.4 h1de35cc_1001 conda-forge -yaml 0.1.7 h1de35cc_1001 conda-forge -zeromq 4.3.2 h6de7cb9_2 conda-forge -zipp 0.6.0 py_0 conda-forge -zlib 1.2.11 h0b31af3_1006 conda-forge -zstd 1.4.0 ha9f0a20_0 conda-forge -``` - - - # Limits ``` diff --git a/conda_env/alt_root_conda_environment.yml b/conda_env/alt_root_conda_environment.yml index 6058e46..34c622a 100644 --- a/conda_env/alt_root_conda_environment.yml +++ b/conda_env/alt_root_conda_environment.yml @@ -65,7 +65,7 @@ dependencies: - importlib_metadata=0.23=py37_0 - ipykernel=5.1.3=py37h5ca1d4c_0 - ipyparallel=6.2.4=py37_0 - - ipython=7.9.0=py37h5ca1d4c_0 + - ipython=7.9.0 - ipython_genutils=0.2.0=py_1 - jedi=0.15.1=py37_0 - jinja2=2.10.3=py_0 diff --git a/limits/produce_limits.py b/limits/produce_limits.py index 2ef289c..3044404 100644 --- a/limits/produce_limits.py +++ b/limits/produce_limits.py @@ -12,15 +12,18 @@ from decimal import Decimal import matplotlib.pyplot as plt from intersection import intersection +from getpass import getuser as user import pickle -all_datacards = glob('datacards_mmm/datacard*hnl*.txt') +if user() == 'manzoni' or user() == 'manzonir': all_datacards = glob('datacards_mmm/datacard*hnl*.txt') +if user() == 'cesareborgia' or user() == 'vstampf': all_datacards = glob('datacard*hnl*.txt') all_datacards.sort() signal_type = 'majorana' method = 'asymptotic' # 'toys' variable = 'hnl_m_12' -categories_to_combine = OrderedDict(zip(['lxy_lt_0p5', 'lxy_0p5_to_2p0', 'lxy_mt_2p0'], ['disp1', 'disp2', 'disp3'])) +# categories_to_combine = OrderedDict(zip(['lxy_lt_0p5', 'lxy_0p5_to_2p0', 'lxy_mt_2p0'], ['disp1', 'disp2', 'disp3'])) +categories_to_combine = OrderedDict(zip(['lxy_lt_0p5', 'lxy_0p5_to_1p5', 'lxy_1p5_to_4p0', 'lxy_mt_4p0'], ['disp1', 'disp2', 'disp3', 'disp4'])) run_blind = True flavour = r'$|V|^2_{\mu}$' @@ -35,7 +38,8 @@ continue # string mangling - name = idc.split('/')[1].split('.')[0] + if user() == 'manzoni' or user() == 'manzonir': name = idc.split('/')[1].split('.')[0] + if user() == 'cesareborgia' or user() == 'vstampf': name = idc.split('.')[0] signal_name = re.findall(r'hnl_m_\d+_v2_\d+p\d+Em\d+', name)[0] signal_mass = float(re.findall(r'\d+', re.findall(r'hnl_m_\d+_', signal_name)[0])[0]) signal_coupling_raw = re.findall(r'\d+', re.findall(r'_\d+p\d+Em\d+', signal_name)[0]) @@ -65,26 +69,37 @@ for coupling in couplings.keys(): print '\tcoupling =', coupling datacards_to_combine = couplings[coupling] - # gonna combine the cards - command = 'combineCards.py' - for cat, idc in product(categories_to_combine, datacards_to_combine): - if cat in idc: - command += ' %s=%s ' %(categories_to_combine[cat],idc) - command += ' > datacard_combined_tmp.txt' - - print '\t\t',command - os.system(command) - - command = 'combine -M AsymptoticLimits datacard_combined_tmp.txt' - if run_blind: - command += ' --run blind' - - print '\t\t',command - results = subprocess.check_output(command.split()) - + + # check if file is already there result_file_name = ('result_m_%d_v2_%.1E.txt' %(mass, Decimal(coupling))).replace('-', 'm') - with open(result_file_name, 'w') as ff: - print >> ff, results + exists_result = os.path.isfile(result_file_name) + + if not exists_result: # TODO and not flag force redo results + # gonna combine the cards + command = 'combineCards.py' + for cat, idc in product(categories_to_combine, datacards_to_combine): + if cat in idc: + command += ' %s=%s ' %(categories_to_combine[cat],idc) + command += ' > datacard_combined_tmp.txt' + + print '\t\t',command + os.system(command) + + command = 'combine -M AsymptoticLimits datacard_combined_tmp.txt' + if run_blind: + command += ' --run blind' + + print '\t\t',command + results = subprocess.check_output(command.split()) + + # result_file_name = ('result_m_%d_v2_%.1E.txt' %(mass, Decimal(coupling))).replace('-', 'm') + with open(result_file_name, 'w') as ff: + print >> ff, results + + # else read from result_file + if exists_result: + result_file = open(result_file_name) + results = result_file.read() new_obs = None new_minus_two = None @@ -173,31 +188,23 @@ if not run_blind: limits2D[mass]['obs'] = x_obs -# import sys ; sys.exit(0) - ########################################################################################## ########################################################################################## -masses_central = [] -masses_one_sigma = [] masses_two_sigma = [] +masses_one_sigma = [] +masses_central = [] minus_two = [] minus_one = [] central = [] -plus_two = [] +plus_one = [] plus_two = [] # go through the different mass points first left to right to catch the lower exclusion bound # then right to left to catch the upper exclusion bound -for mass in sorted(limits2D.keys()): - minus_two.append( min(limits2D[mass]['exp_minus_two']) ) ; masses_minus_two.append(mass) - minus_one.append( min(limits2D[mass]['exp_minus_one']) ) ; masses_minus_one.append(mass) - central .append( min(limits2D[mass]['exp_central' ]) ) ; masses_central .append(mass) - plus_two .append( min(limits2D[mass]['exp_plus_one' ]) ) ; masses_plus_two .append(mass) - plus_two .append( min(limits2D[mass]['exp_plus_two' ]) ) ; masses_plus_two .append(mass) - + for mass in sorted(limits2D.keys(), reverse=True): if len(limits2D[mass]['exp_central' ])>1: @@ -217,7 +224,6 @@ # plot the 2D limits plt.clf() - plt.fill_between(masses_two_sigma, minus_two, plus_two, color='gold' , label=r'$\pm 2 \sigma$') plt.fill_between(masses_one_sigma, minus_one, plus_one, color='forestgreen', label=r'$\pm 1 \sigma$') plt.plot (masses_central , central , color='red' , label='central expected', linewidth=2) @@ -228,7 +234,7 @@ plt.ticklabel_format(axis='y', style='sci', scilimits=(0,0)) #plt.tight_layout() plt.yscale('log') -plt.xscale('lin') +# plt.xscale('lin') plt.savefig('2d_hnl_limit.pdf') diff --git a/llp_plots.py b/llp_plots.py new file mode 100644 index 0000000..c8ae03a --- /dev/null +++ b/llp_plots.py @@ -0,0 +1,371 @@ +import ROOT as rt +from rootpy.plotting import Canvas, Pad +from ROOT import RDataFrame as rdf +from plotter.selections import Selections as sel +from plotter.cmsstyle import CMS_lumi +import numpy as np + +def show_logo_in_prog(): + logo = rt.TLatex() + logo.SetNDC() + logo.SetTextAlign(11) + logo.SetTextFont(61) + logo.SetTextSize(0.039) + logo.DrawLatex(0.15,0.88,'CMS') + + preliminary = rt.TLatex() + preliminary.SetNDC() + preliminary.SetTextAlign(11) + preliminary.SetTextFont(52) + preliminary.SetTextSize(0.033) + preliminary.DrawLatex(0.243,0.88,'Work In Progress') + +def show_lumi(title): + latex = rt.TLatex() + latex.SetNDC() + latex.SetTextAlign(31) + latex.SetTextFont(42) + latex.SetTextSize(0.031) + latex.DrawLatex(0.85,0.88,title) + +rt.ROOT.EnableImplicitMT() + +rt.gStyle.SetOptStat(0) + +cuts = sel('mmm') + +tch = rt.TChain('tree') + +tch.Add('/Users/cesareborgia/cernbox/ntuples/2018/mmm/Single_mu_2018A/HNLTreeProducer/tree.root') +tch.Add('/Users/cesareborgia/cernbox/ntuples/2018/mmm/Single_mu_2018B/HNLTreeProducer/tree.root') +tch.Add('/Users/cesareborgia/cernbox/ntuples/2018/mmm/Single_mu_2018C/HNLTreeProducer/tree.root') +tch.Add('/Users/cesareborgia/cernbox/ntuples/2018/mmm/Single_mu_2018D/HNLTreeProducer/tree.root') + +df_w = rdf('tree', 'NN/mmm/191118_14h_45m/output_ntuple_weighted.root') + +df = rdf(tch) + +# b_dxy = np.logspace(-2, 1, 10) +# b_dxy = np.logspace(-2, 1, 25) +b_dxy = np.logspace(-2, 1, 26) +b_disp = np.logspace(-2, 1.5, 20) +# b_disp = np.logspace(-2, 1.5, 21) +b_dr12 = np.linspace(0, 1., 21) +# b_dr12 = np.linspace(0, 1., 20) +# b_m12 = np.linspace(0, 12, 24) +b_m12 = np.linspace(0, 12, 12) +# b_m12 = np.linspace(0, 6, 18) +b_sv_cos = np.linspace(0.5, 1.2, 21) +b_fr = np.linspace(0, 1, 15) +b_fr = np.logspace(-2.5, 0, 20) + +# cut = ' & '.join( [ cuts.selections['SR_sb_no_dxy'], cuts.selections['pt_iso'] ] ) ## v0: no vetoes --> doesn't work that well + +cut = ' & '.join( [ cuts.selections['SR_sb_no_dxy'], cuts.selections['pt_iso'], ## v1: add vetoes + cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], cuts.selections['vetoes_02_OS'] ] ) + +# cut = ' & '.join( [ cuts.selections['SR_sb_w_dxy'], cuts.selections['pt_iso'], ## v2: add dxy + # cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], cuts.selections['vetoes_02_OS'] ] ) + +# cut = ' & '.join( [ cuts.selections['baseline'], cuts.selections['pt_iso'], ## v3: SR, as before + # cuts.selections['vetoes_12_OS'], cuts.selections['vetoes_01_OS'], cuts.selections['vetoes_02_OS'] ] ) + +df_0 = df.Filter(cut) + +df_0 = df_0.Define('abs_l1_dxy', 'abs(l1_dxy)') +df_0 = df_0.Define('abs_l2_dxy', 'abs(l2_dxy)') + +df_lnt = df_0.Filter( '! (' +cuts.selections['tight'] + ' )' ) +df_t = df_0.Filter(cuts.selections['tight']) +# from pdb import set_trace as st; st() + +dxy = False +disp = False +dr = False +sv = False +m12 = True + +disp_w = False +dxy_w = False +dr_w = False +sv_w = False +m12_w = False + +# histos +if dxy: + ph_1dxy_t = df_t.Histo1D(('abs_l1_dxy_t', 'abs_l1_dxy_t', len(b_dxy) -1, b_dxy), 'abs_l1_dxy') + ph_2dxy_t = df_t.Histo1D(('abs_l2_dxy_t', 'abs_l2_dxy_t', len(b_dxy) -1, b_dxy), 'abs_l2_dxy') + + ph_1dxy_lnt = df_lnt.Histo1D(('abs_l1_dxy_lnt', 'abs_l1_dxy_lnt', len(b_dxy) -1, b_dxy), 'abs_l1_dxy') + ph_2dxy_lnt = df_lnt.Histo1D(('abs_l2_dxy_lnt', 'abs_l2_dxy_lnt', len(b_dxy) -1, b_dxy), 'abs_l2_dxy') + + h_1dxy_t = ph_1dxy_t.GetPtr() + h_2dxy_t = ph_2dxy_t.GetPtr() + + h_1dxy_lnt = ph_1dxy_lnt.GetPtr() + h_2dxy_lnt = ph_2dxy_lnt.GetPtr() + + h_1dxy_t .Add(h_2dxy_t) + h_1dxy_lnt.Add(h_2dxy_lnt) + + h_1dxy_lnt.SetTitle('; d_{xy} (cm); %') + h_1dxy_lnt.SetName('loose-not-tight') + h_1dxy_lnt.SetLineColor(rt.kCyan+1) + h_1dxy_lnt.SetLineWidth(2) + + h_1dxy_t .SetTitle('tight') + h_1dxy_t .SetLineColor(rt.kBlue+2) + h_1dxy_t .SetLineWidth(2) + + +if disp: + ph_disp_t = df_t.Histo1D(('2d_disp_t', '2d_disp_t', len(b_disp) -1, b_disp), 'hnl_2d_disp') + + ph_disp_lnt = df_lnt.Histo1D(('2d_disp_lnt', '2d_disp_lnt', len(b_disp) -1, b_disp), 'hnl_2d_disp') + + h_disp_t = ph_disp_t.GetPtr() + + h_disp_lnt = ph_disp_lnt.GetPtr() + + h_disp_lnt.SetTitle('; L_{xy} (cm); %') + h_disp_lnt.SetName('loose-not-tight') + h_disp_lnt.SetLineColor(rt.kCyan+1) + h_disp_lnt.SetLineWidth(2) + + h_disp_t .SetTitle('tight') + h_disp_t .SetLineColor(rt.kBlue+2) + h_disp_t .SetLineWidth(2) + + +if dr: + ph_dr12_t = df_t.Histo1D(('dr12_t', 'dr12_t', len(b_dr12) -1, b_dr12), 'hnl_dr_12') + + ph_dr12_lnt = df_lnt.Histo1D(('dr12_lnt', 'dr12_lnt', len(b_dr12) -1, b_dr12), 'hnl_dr_12') + + h_dr12_t = ph_dr12_t.GetPtr() + + h_dr12_lnt = ph_dr12_lnt.GetPtr() + + h_dr12_lnt.SetTitle('; #Delta R_{23}; %') + h_dr12_lnt.SetName('loose-not-tight') + h_dr12_lnt.SetLineColor(rt.kCyan+1) + h_dr12_lnt.SetLineWidth(2) + + h_dr12_t .SetTitle('tight') + h_dr12_t .SetLineColor(rt.kBlue+2) + h_dr12_t .SetLineWidth(2) + +if sv: + ph_sv_cos_t = df_t.Histo1D(('sv_cos_t', 'sv_cos_t', len(b_sv_cos) -1, b_sv_cos), 'sv_cos') + + ph_sv_cos_lnt = df_lnt.Histo1D(('sv_cos_lnt', 'sv_cos_lnt', len(b_sv_cos) -1, b_sv_cos), 'sv_cos') + + h_sv_cos_t = ph_sv_cos_t.GetPtr() + + h_sv_cos_lnt = ph_sv_cos_lnt.GetPtr() + + h_sv_cos_lnt.SetTitle('; sv_cos; %') + h_sv_cos_lnt.SetName('loose-not-tight') + h_sv_cos_lnt.SetLineColor(rt.kCyan+1) + h_sv_cos_lnt.SetLineWidth(2) + + h_sv_cos_t .SetTitle('tight') + h_sv_cos_t .SetLineColor(rt.kBlue+2) + h_sv_cos_t .SetLineWidth(2) + +if m12: + ph_m12_t = df_t.Histo1D(('m12_t', 'm12_t', len(b_m12) -1, b_m12), 'hnl_m_12') + + ph_m12_lnt = df_lnt.Histo1D(('m12_lnt', 'm12_lnt', len(b_m12) -1, b_m12), 'hnl_m_12') + + h_m12_t = ph_m12_t.GetPtr() + + h_m12_lnt = ph_m12_lnt.GetPtr() + + h_m12_lnt.SetTitle('; m_{23} (GeV); %') + h_m12_lnt.SetName('loose-not-tight') + h_m12_lnt.SetLineColor(rt.kCyan+1) + h_m12_lnt.SetLineWidth(2) + + h_m12_t .SetTitle('; m_{23} (GeV); %') + h_m12_t .SetName('tight') + h_m12_t .SetLineColor(rt.kBlue+2) + h_m12_t .SetLineWidth(2) + + + +if dxy: + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_1dxy_lnt.GetXaxis().SetNoExponent() + h_1dxy_lnt.GetXaxis().SetMoreLogLabels() + h_1dxy_lnt.DrawNormalized('histe') + h_1dxy_t.DrawNormalized('histesame') + main_pad.BuildLegend(0.62, 0.6,0.82,0.7) + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + main_pad.SetLogx() + can.Modified(); can.Update() + can.SaveAs('plots/llp/dxy.pdf') + can.SaveAs('plots/llp/dxy.root') + +if disp: + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_disp_lnt.GetXaxis().SetNoExponent() + h_disp_lnt.GetXaxis().SetMoreLogLabels() + h_disp_lnt.DrawNormalized('histe') + h_disp_t.DrawNormalized('histesame') + main_pad.BuildLegend(0.62, 0.6,0.82,0.7) + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + main_pad.SetLogx() + can.Modified(); can.Update() + can.SaveAs('plots/llp/disp.pdf') + can.SaveAs('plots/llp/disp.root') + +if dr: + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_dr12_lnt.DrawNormalized('histe') + h_dr12_t.DrawNormalized('histesame') + main_pad.BuildLegend(0.62, 0.6,0.82,0.7) + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/dr12.pdf') + can.SaveAs('plots/llp/dr12.root') + +if sv: + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_sv_cos_lnt.DrawNormalized('histe') + h_sv_cos_t.DrawNormalized('histesame') + main_pad.BuildLegend(0.62, 0.6,0.82,0.7) + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/sv_cos.pdf') + can.SaveAs('plots/llp/sv_cos.root') + +if m12: + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_m12_lnt.DrawNormalized('histe') + h_m12_t.DrawNormalized('histesame') + main_pad.BuildLegend(0.62, 0.6,0.82,0.7) + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/m12.pdf') + can.SaveAs('plots/llp/m12.root') + + +if disp_w: + + ph_disp_fr = df_w.Histo2D(('disp_fr', 'disp_fr', len(b_disp)-1, b_disp, len(b_fr)-1, b_fr), 'hnl_2d_disp', 'fr') + + h_disp_fr = ph_disp_fr.GetPtr() + + h_disp_fr.SetTitle('; L_{xy} (cm); FR; %') + + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_disp_fr.DrawNormalized('colz') + h_disp_fr.GetXaxis().SetNoExponent() + h_disp_fr.GetXaxis().SetMoreLogLabels() + main_pad.SetLogx(); main_pad.SetLogy() + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/disp_fr.pdf') + can.SaveAs('plots/llp/disp_fr.root') + +if dr_w: + + ph_dr12_fr = df_w.Histo2D(('dr12_fr', 'dr12_fr', len(b_dr12)-1, b_dr12, len(b_fr)-1, b_fr), 'hnl_dr_12', 'fr') + + h_dr12_fr = ph_dr12_fr.GetPtr() + + h_dr12_fr.SetTitle('; #Delta R_{23}; FR; %') + + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_dr12_fr.DrawNormalized('colz') + h_dr12_fr.GetXaxis().SetNoExponent() + h_dr12_fr.GetXaxis().SetMoreLogLabels() + main_pad.SetLogy() + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/dr12_fr.pdf') + can.SaveAs('plots/llp/dr12_fr.root') + +if m12_w: + + ph_m12_fr = df_w.Histo2D(('m12_fr', 'm12_fr', len(b_m12)-1, b_m12, len(b_fr)-1, b_fr), 'hnl_m_12', 'fr') + + h_m12_fr = ph_m12_fr.GetPtr() + + h_m12_fr.SetTitle('; m_{23} (GeV); FR; %') + + can = Canvas(width=700, height=700) ; can.Draw() + can.cd() ; main_pad = Pad(0. , 0. , 1., 1. ) ; main_pad .Draw() + main_pad.SetTicks(True) + main_pad.SetTopMargin(0.15) + main_pad.SetBottomMargin(0.15) + main_pad.SetLeftMargin(0.15) + main_pad.SetRightMargin(0.15) + main_pad.cd() + h_m12_fr.DrawNormalized('colz') + h_m12_fr.GetXaxis().SetNoExponent() + h_m12_fr.GetXaxis().SetMoreLogLabels() + main_pad.SetLogy() + show_logo_in_prog() + show_lumi('2018, L = 59.7 fb^{-1}, 13 TeV') + can.Modified(); can.Update() + can.SaveAs('plots/llp/m12_fr.pdf') + can.SaveAs('plots/llp/m12_fr.root') + diff --git a/overview_plots_AN.py b/overview_plots_AN.py new file mode 100644 index 0000000..eec0251 --- /dev/null +++ b/overview_plots_AN.py @@ -0,0 +1,561 @@ +from collections import OrderedDict +from glob import glob +import ROOT as rt +from pdb import set_trace +from rootpy.plotting import Hist, HistStack, Canvas, Pad, Legend +from rootpy.plotting.utils import draw +import numpy as np +from plotter.cmsstyle import CMS_lumi + +# f_in ='/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root' +# tf = rt.TFile(f_in) +# t = tf.Get('tree') + +out_dir = '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/' + +rt.gStyle.SetOptStat(0) + +years = ['2016', '2017', '2018'] +chs = ['mmm', 'mem_os', 'mem_ss', 'eee', 'eem_os', 'eem_ss'] +disps = ['lt_0p5', '0p5_to_1p5','1p5_to_4p0', 'mt_4p0'] +bins = ['lo', 'hi'] + +data = 'data_obs' +prmp = 'prompt' +nonp = 'nonprompt' +sig8 = 'hnl_m_8_v2_2p3Em06_majorana' +sig10 = 'hnl_m_10_v2_5p7Em07_majorana' +sigs = [sig8, sig10] + +folders = [] + +# '''# 26Mar20 #''' +# R = 'MRtrain_train_w_dispSig_WO_sbtr_plot_WO_sbtr_w_dispSig' +# folders = glob('/Users/cesareborgia/Dropbox/documents/physics/phd/plots/*/*/200326_*_MRtrain_train_w_dispSig_WO_sbtr_plot_WO_sbtr_w_dispSig/datacards/') + +# '''# 26Mar20 #''' +# R = 'MR_train_WO_sbtr_w_dispSig_plot_WO_sbtr_w_dispSig' +# folders = glob('/Users/cesareborgia/Dropbox/documents/physics/phd/plots/*/*/200326_*_MR_train_WO_sbtr_w_dispSig_plot_WO_sbtr_w_dispSig/datacards/') + +# '''# 25Mar20 #''' +# R = 'MR_train_w_sbtr_WO_dispSig_plot_WO_sbtr_w_dispSig' # is really with disp sig for plotting, this is checked +# folders = glob('/Users/cesareborgia/Dropbox/documents/physics/phd/plots/*/*/200325_*_MR_train_w_sbtr_plot_WO_sbtr/datacards/') + +'''# 25Mar20 #''' +R = 'MRtrain_train_w_sbtr_WO_dispSig_plot_w_sbtr_WO_dispSig' +folders = glob('/Users/cesareborgia/Dropbox/documents/physics/phd/plots/*/*/200325_*_training_MR/datacards/') + +# '''# 24Mar20 #''' +# R = 'MR_with_disp_sig_24Mar20' +# folders = [ +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mmm/200324_13h_9m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mem_os/200324_13h_15m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mem_ss/200324_13h_25m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eee/200324_13h_47m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eem_os/200324_13h_34m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eem_ss/200324_13h_40m/datacards/', + +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mmm/200324_13h_11m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mem_os/200324_13h_18m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mem_ss/200324_13h_28m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eee/200324_13h_49m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eem_os/200324_13h_36m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eem_ss/200324_13h_42m/datacards/', + +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mmm/200324_13h_13m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mem_os/200324_13h_22m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mem_ss/200324_13h_31m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eee/200324_13h_52m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eem_os/200324_13h_38m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eem_ss/200324_13h_45m/datacards/',] + +# '''# 24Mar20 #''' +# R = 'MR_WO_disp_sig_24Mar20' +# folders = [ +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mmm/200324_12h_4m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mem_os/200324_12h_11m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/mem_ss/200324_12h_20m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eee/200324_12h_50m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eem_os/200324_12h_31m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2016/eem_ss/200324_12h_41m/datacards/', + +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mmm/200324_12h_6m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mem_os/200324_12h_14m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/mem_ss/200324_12h_23m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eee/200324_12h_52m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eem_os/200324_12h_35m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2017/eem_ss/200324_12h_44m/datacards/', + +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mmm/200324_12h_9m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mem_os/200324_12h_17m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/mem_ss/200324_12h_28m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eee/200324_12h_55m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eem_os/200324_12h_38m/datacards/', +# '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/2018/eem_ss/200324_12h_47m/datacards/',] + +# R = 'CR_MRloose_no_svProb' +# folders = ['/Users/cesareborgia/cernbox/plots/plotter/2016/mmm/200310_13h_57m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/mem_os/200310_14h_6m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/mem_ss/200310_14h_20m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eem_os/200310_14h_35m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eem_ss/200310_14h_47m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eee/200310_14h_58m_MRloose_no_svProb/datacards/', + + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mmm/200310_13h_59m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mem_os/200310_14h_10m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mem_ss/200310_14h_25m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eem_os/200310_14h_39m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eem_ss/200310_14h_51m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eee/200310_15h_2m_MRloose_no_svProb/datacards/', + + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mmm/200310_14h_2m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mem_os/200310_14h_15m_CR_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mem_ss/200310_14h_31m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eem_os/200310_14h_42m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eem_ss/200310_14h_55m_MRloose_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eee/200310_15h_6m_MRloose_no_svProb/datacards/', + # ] + +# R = 'CR_bj_no_svProb' +# folders = ['/Users/cesareborgia/cernbox/plots/plotter/2016/mem_ss/200310_14h_4m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/mem_os/200310_13h_50m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/mmm/200310_13h_44m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eem_os/200310_14h_19m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eem_ss/200310_14h_30m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2016/eee/200310_14h_41m_CR_bj_no_svProb/datacards/', + + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mmm/200310_13h_46m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mem_os/200310_13h_54m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/mem_ss/200310_14h_9m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eem_os/200310_14h_22m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eem_ss/200310_14h_34m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2017/eee/200310_14h_47m_CR_bj_no_svProb/datacards/', + + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mmm/200310_13h_48m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mem_os/200310_13h_58m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/mem_ss/200310_14h_14m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eem_os/200310_14h_26m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eem_ss/200310_14h_38m_CR_bj_no_svProb/datacards/', + # '/Users/cesareborgia/cernbox/plots/plotter/2018/eee/200310_14h_52m_CR_bj_no_svProb/datacards/', + # ] + +# '''# 25Feb20 #''' +# R = 'MR_with_disp_sig' # +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2018/*/200225_16h_*/datacards/') # 2018 WITH disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2017/*/200225_15h_*/datacards/') # 2017 WITH disp_sig (training: /all_channels_200213_15h_24m/) +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_7m/datacards/') # 2016 WITH disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_8m/datacards/') # 2016 WITH disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_1*/datacards/') # 2016 WITH disp_sig + +# '''# 25Feb20 #''' +# R = 'MR_WO_disp_sig' +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2018/*/200225_15h_*/datacards/') # 2018 W/O disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2017/*/200225_14h_*/datacards/') # 2017 W/O disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_2*/datacards/') # 2016 W/O disp_sig +# folders += glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_3*/datacards/') # 2016 W/O disp_sig + +signal = None +if 'CR' in R or 'MR' in R: + signal = False +if 'SR' in R: + signal = True +assert signal != None, 'signal not defined' + +files = OrderedDict() + +for f in folders: + f_yr = f.split('/')[-5] + f_ch = f.split('/')[-4] + if 'txt' in f: f = f.replace(f.split('/')[-1],'') + try: files[f_yr][f_ch] = f + except: + files[f_yr] = OrderedDict() + files[f_yr][f_ch] = f + +yields = OrderedDict() +yields_err = OrderedDict() + +for yr in years: + yields[yr] = OrderedDict() + yields_err[yr] = OrderedDict() + for ch in chs: + yields[yr][ch] = OrderedDict() + yields_err[yr][ch] = OrderedDict() + for disp in disps: + yields[yr][ch][disp] = OrderedDict() + yields_err[yr][ch][disp] = OrderedDict() + for sig in sigs: + yields[yr][ch][disp][sig] = OrderedDict() + yields_err[yr][ch][disp][sig] = OrderedDict() + + +for yr in files.keys(): + for ch in files[yr].keys(): + f_in = OrderedDict() + for disp in disps: + f_in[disp] = rt.TFile(files[yr][ch] + 'datacard_hnl_m_12_lxy_{disp}.root'.format(disp = disp)) + for sig in sigs: + # if ch[0] == 'm' and '2p3' in sig: continue #muon channels don't have the m=8,v2=2.3e-6 signal + h_sig = f_in[disp].Get(sig) + h_nonp = f_in[disp].Get(nonp) + h_prmp = f_in[disp].Get(prmp) + h_data = f_in[disp].Get(data) + + if signal: + try: h_sig.GetBinContent(1) + except: + continue + set_trace() + + yields[yr][ch][disp][sig] = OrderedDict() + yields_err[yr][ch][disp][sig] = OrderedDict() + yields[yr][ch][disp][data] = OrderedDict() + yields_err[yr][ch][disp][data] = OrderedDict() + yields[yr][ch][disp][nonp] = OrderedDict() + yields_err[yr][ch][disp][nonp] = OrderedDict() + yields[yr][ch][disp][prmp] = OrderedDict() + yields_err[yr][ch][disp][prmp] = OrderedDict() + + if signal: + yields[yr][ch][disp][sig]['lo'] = h_sig.GetBinContent(1) + yields[yr][ch][disp][sig]['hi'] = h_sig.GetBinContent(2) + yields_err[yr][ch][disp][sig]['lo'] = h_sig.GetBinError(1) + yields_err[yr][ch][disp][sig]['hi'] = h_sig.GetBinError(2) + + if not signal: + yields[yr][ch][disp][data]['lo'] = h_data.GetBinContent(1) + yields[yr][ch][disp][data]['hi'] = h_data.GetBinContent(2) + yields_err[yr][ch][disp][data]['lo'] = h_data.GetBinError(1) + yields_err[yr][ch][disp][data]['hi'] = h_data.GetBinError(2) + + yields[yr][ch][disp][nonp]['lo'] = h_nonp.GetBinContent(1) + yields[yr][ch][disp][nonp]['hi'] = h_nonp.GetBinContent(2) + yields_err[yr][ch][disp][nonp]['lo'] = h_nonp.GetBinError(1) + yields_err[yr][ch][disp][nonp]['hi'] = h_nonp.GetBinError(2) + + yields[yr][ch][disp][prmp]['lo'] = h_prmp.GetBinContent(1) + yields[yr][ch][disp][prmp]['hi'] = h_prmp.GetBinContent(2) + yields_err[yr][ch][disp][prmp]['lo'] = h_prmp.GetBinError(1) + yields_err[yr][ch][disp][prmp]['hi'] = h_prmp.GetBinError(2) + +bins = np.arange(0.,25,1) + +# fill data: xxx:lo,hi; xxy_os:lo,hi; xxy_ss:lo,hi +# SetBinX start with 1! (0 is underflow) +for yr in years: + + h_data = Hist(bins, title='data', markersize=1, legendstyle='LEP', name='data') + h_nonp = Hist(bins, title='non-prompt', markersize=0, legendstyle='F', name='non-prompt') + h_prmp = Hist(bins, title='prompt', markersize=0, legendstyle='F', name='prompt') + + h_prmp.fillstyle = 'solid' + h_prmp.fillcolor = 'steelblue' + h_prmp.linecolor = 'steelblue' + h_prmp.linewidth = 0 + + h_nonp.fillstyle = 'solid' + h_nonp.fillcolor = 'skyblue' + h_nonp.linecolor = 'skyblue' + h_nonp.linewidth = 0 + + for l0 in ['e','mu']: + if l0 == 'e': chs = ['eee', 'eem_os', 'eem_ss'] + if l0 == 'mu': chs = ['mmm', 'mem_os', 'mem_ss'] + i = 0 + for ch in chs: + for m_bin in ['lo','hi']: + for disp in disps: + h_data.SetBinContent(i+1, yields[yr][ch][disp][data][m_bin]) + h_prmp.SetBinContent(i+1, yields[yr][ch][disp][prmp][m_bin]) + h_nonp.SetBinContent(i+1, yields[yr][ch][disp][nonp][m_bin]) + + h_data.SetBinError(i+1, yields_err[yr][ch][disp][data][m_bin]) + h_prmp.SetBinError(i+1, yields_err[yr][ch][disp][prmp][m_bin]) + h_nonp.SetBinError(i+1, yields_err[yr][ch][disp][nonp][m_bin]) + + i+=1 + + stack = HistStack([h_nonp, h_prmp], drawstyle='HIST', title='') + + labels = ['0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4', + '0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4', + '0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4'] + + hist_error = stack.sum #sum([all_exp_prompt, all_exp_nonprompt]) + hist_error.drawstyle = 'E2' + hist_error.fillstyle = '/' + hist_error.color = 'gray' + hist_error.title = 'stat. unc.' + hist_error.legendstyle = 'F' + + ratio_exp_error = Hist(bins) + ratio_data = Hist(bins) + for ibin in hist_error.bins_range(): + ratio_exp_error.set_bin_content(ibin, 1.) + ratio_exp_error.set_bin_error (ibin, hist_error.get_bin_error(ibin) / hist_error.get_bin_content(ibin) if hist_error.get_bin_content(ibin)!=0. else 0.) + ratio_data.set_bin_content (ibin, h_data.get_bin_content(ibin) / hist_error.get_bin_content(ibin) if hist_error.get_bin_content(ibin)!=0. else 0.) + ratio_data.set_bin_error (ibin, h_data.get_bin_error(ibin) / hist_error.get_bin_content(ibin) if hist_error.get_bin_content(ibin)!=0. else 0.) + + ratio_data.drawstyle = 'EP' + ratio_data.title = '' + + ratio_exp_error.drawstyle = 'E2' + ratio_exp_error.markersize = 0 + ratio_exp_error.title = '' + ratio_exp_error.fillstyle = '/' + ratio_exp_error.color = 'gray' + + can = Canvas(width=700,height=700) + + can.cd(); pad_plt = Pad(0.,0.25,1.,1.); pad_plt.Draw() + can.cd(); pad_tio = Pad(0.,0.,1.,0.25); pad_tio.Draw() + + pad_plt.SetTicks(True) + pad_plt.SetBottomMargin(0.) + pad_plt.SetLeftMargin(0.15) + pad_plt.SetRightMargin(0.15) + + pad_tio.SetLeftMargin(0.15) + pad_tio.SetRightMargin(0.15) + pad_tio.SetTopMargin(0.) + pad_tio.SetGridy() + pad_tio.SetBottomMargin(0.3) + + h_data.GetXaxis().LabelsOption('h') + h_data.GetXaxis().SetTitle ('#Delta (PV-SV)_2D (cm)') + h_data.GetXaxis().SetTitleSize(0.06) + h_data.GetXaxis().SetTitleOffset(1.1) + h_data.GetXaxis().SetTitleFont(132) + +# bin labels + for i, lbl in enumerate(labels): + h_data.GetXaxis().SetBinLabel(i+1, lbl) + + things_to_plot = [stack, h_data, hist_error] + + yaxis_max = 1.45 * max([ithing.max() for ithing in things_to_plot]) + + for ithing in things_to_plot: + ithing.SetMaximum(yaxis_max) + + draw(things_to_plot, xtitle='', ytitle='Counts', pad=pad_plt, logy=False) + + high_flav = yaxis_max / 1.125 + high_mll = high_flav * 0.85 + left_mll = 0.97 + + pad_plt.cd() + + line0 = rt.TLine(8, 0, 8, high_flav) + line0.SetLineWidth(2) + line0.Draw('same') + + line1 = rt.TLine(16, 0, 16, high_flav) + line1.SetLineWidth(2) + line1.Draw('same') + + line2 = rt.TLine(4, high_mll*1.025, 4, 0.1) + line2.SetLineStyle(2) + line2.SetLineWidth(1) + line2.Draw('same') + + line3 = rt.TLine(12, high_mll*1.025, 12, 0.1) + line3.SetLineStyle(2) + line3.SetLineWidth(1) + line3.Draw('same') + + line4 = rt.TLine(20, high_mll*1.025, 20, 0.1) + line4.SetLineStyle(2) + line4.SetLineWidth(1) + line4.Draw('same') + + tex0 = rt.TLatex(0.8748578,17546.74,'') + tex0 = rt.TLatex(left_mll, high_mll, 'M_{ll} < 4 GeV') + tex0.SetTextSize(0.02) + tex0.SetTextFont(42) + tex0.SetLineWidth(2) + tex0.Draw('same') + + tex1 = rt.TLatex(left_mll+4, high_mll, 'M_{ll} > 4 GeV') + tex1.SetTextSize(0.02) + tex1.SetTextFont(42) + tex1.SetLineWidth(2) + tex1.Draw('same') + + tex2 = rt.TLatex(left_mll+8, high_mll, 'M_{ll} < 4 GeV') + tex2.SetTextSize(0.02) + tex2.SetTextFont(42) + tex2.SetLineWidth(2) + tex2.Draw('same') + + tex2 = rt.TLatex(left_mll+12, high_mll, 'M_{ll} > 4 GeV') + tex2.SetTextSize(0.02) + tex2.SetTextFont(42) + tex2.SetLineWidth(2) + tex2.Draw('same') + + tex3 = rt.TLatex(left_mll+16, high_mll, 'M_{ll} < 4 GeV') + tex3.SetTextSize(0.02) + tex3.SetTextFont(42) + tex3.SetLineWidth(2) + tex3.Draw('same') + + tex4 = rt.TLatex(left_mll+20, high_mll, 'M_{ll} > 4 GeV') + tex4.SetTextSize(0.02) + tex4.SetTextFont(42) + tex4.SetLineWidth(2) + tex4.Draw('same') + + mmm = '\mu\mu\mu' + eee = 'eee' + mem_os = '\mu^{\pm}\mu^{\mp}e' + mem_ss = '\mu^{\pm}\mu^{\pm}e' + eem_os = 'e^{\pm}e^{\mp}\mu' + eem_ss = 'e^{\pm}e^{\pm}\mu' + + if l0 == 'e': + state0 = eee; state1 = eem_os; state2 = eem_ss + if l0 == 'mu': + state0 = mmm; state1 = mem_os; state2 = mem_ss + + state_0 = rt.TLatex(left_mll+2.05, high_flav*.925, state0) + state_0.SetTextFont(43) + state_0.SetTextSize(25) + state_0.Draw('same') + + state_1 = rt.TLatex(left_mll+9.5, high_flav*.925, state1) + state_1.SetTextFont(43) + state_1.SetTextSize(25) + state_1.Draw('same') + + state_2 = rt.TLatex(left_mll+17.5, high_flav*.925, state2) + state_2.SetTextFont(43) + state_2.SetTextSize(25) + state_2.Draw('same') + + year = int(yr) + if year == 2016: + lumi_text = "2016, L = 35.87 fb^{-1}" + elif year == 2017: + lumi_text = "2017, L = 41.53 fb^{-1}" + elif year == 2018: + lumi_text = "2018, L = 59.74 fb^{-1}" + CMS_lumi(pad_plt, 4, 0, lumi_13TeV = lumi_text) + + legend = Legend([h_data, stack, hist_error], pad=pad_plt, leftmargin=0., rightmargin=0., topmargin=0., textfont=42, textsize=0.03, entrysep=0.01, entryheight=0.04) + legend.SetBorderSize(0) + legend.x1 = 0.25 + legend.y1 = 0.8 + legend.x2 = 0.85 + legend.y2 = 0.85 + legend.SetFillColor(0) + legend.SetNColumns(4) + legend.Draw('same') + + for ithing in [ratio_data, ratio_exp_error]: + ithing.xaxis.set_label_size(ithing.xaxis.get_label_size() * 3.) # the scale should match that of the main/ratio pad size ratio + ithing.yaxis.set_label_size(ithing.yaxis.get_label_size() * 3.) # the scale should match that of the main/ratio pad size ratio + ithing.xaxis.set_title_size(ithing.xaxis.get_title_size() * 3.) # the scale should match that of the main/ratio pad size ratio + ithing.yaxis.set_title_size(ithing.yaxis.get_title_size() * 3.) # the scale should match that of the main/ratio pad size ratio + ithing.yaxis.set_ndivisions(405) + ithing.yaxis.set_title_offset(0.4) + # ithing.GetXaxis().LabelsOption('vu') + ithing.GetXaxis().LabelsOption('h') + + for i, lbl in enumerate(labels): + ithing.GetXaxis().SetBinLabel(i+1, lbl) + + draw([ratio_data, ratio_exp_error], xtitle='#Delta_{2D}(PV, SV) (cm)', ytitle='obs/exp', pad=pad_tio, logy=False, ylimits=(0.5, 1.5)) + + line = rt.TLine(min(bins), 1., max(bins), 1.) + line.SetLineColor(rt.kBlack) + line.SetLineWidth(1) + pad_tio.cd() + line.Draw('same') + + can.Modified(); can.Update() + can.SaveAs(out_dir + 'all_%s_ch_%s_%s.pdf' %(l0, yr, R)) + can.SaveAs(out_dir + 'all_%s_ch_%s_%s.root' %(l0, yr, R)) + + +''' +## 2016 ## +mmm +- mmm/200306_15h_8m_CR_bj_with_svProb0p001 +- mmm/200310_13h_44m_CR_bj_no_svProb +- mmm/200310_13h_57m_CR_MRloose_no_svProb +mem_os +- mem_os/200306_15h_13m_CR_bj_with_svProb0p001 +- mem_os/200310_13h_50m_CR_bj_no_svProb +- mem_os/200310_14h_6m_CR_MRloose_no_svProb +mem_ss +- mem_ss/200306_15h_23m_CR_bj_with_svProb0p001 +- mem_ss/200310_14h_4m_CR_bj_no_svProb +- mem_ss/200310_14h_20m_MRloose_no_svProb +eem_os +- eem_os/200306_15h_33m_CR_bj_with_svProb0p001 +- eem_os/200310_14h_19m_CR_bj_no_svProb +- eem_os/200310_14h_35m_MRloose_no_svProb +eem_ss +- eem_ss/200306_15h_40m_CR_bj_with_svProb0p001 +- eem_ss/200310_14h_30m_CR_bj_no_svProb +- eem_ss/200310_14h_47m_MRloose_no_svProb +eee +- eee/200306_15h_47m_CR_bj_with_svProb0p001 +- eee/200310_14h_41m_CR_bj_no_svProb +- eee/200310_14h_58m_MRloose_no_svProb +########## + +## 2017 ## +mmm +- mmm/200306_15h_10m_CR_bj_with_svProb0p001 +- mmm/200310_13h_46m_CR_bj_no_svProb +- mmm/200310_13h_59m_CR_MRloose_no_svProb +mem_os +- mem_os/200306_15h_16m_CR_bj_with_svProb0p001 +- mem_os/200310_13h_54m_CR_bj_no_svProb +- mem_os/200310_14h_10m_CR_MRloose_no_svProb +mem_ss +- mem_ss/200306_15h_26m_CR_bj_with_svProb0p001 +- mem_ss/200310_14h_9m_CR_bj_no_svProb +- mem_ss/200310_14h_25m _MRloose_no_svProb +eem_os +- eem_os/200306_15h_35m_CR_bj_with_svProb0p001 +- eem_os/200310_14h_22m_CR_bj_no_svProb +- eem_os/200310_14h_39m_MRloose_no_svProb +eem_ss +- eem_ss/200306_15h_42m_CR_bj_with_svProb0p001 +- eem_ss/200310_14h_34m_CR_bj_no_svProb +- eem_ss/200310_14h_51m_MRloose_no_svProb +eee +- eee/200306_15h_50m_CR_bj_with_svProb0p001 +- eee/200310_14h_47m_CR_bj_no_svProb +- eee/200310_15h_2m_MRloose_no_svProb +########## + +## 2018 ## +mmm +- mmm/200306_15h_12m_CR_bj_with_svProb0p001 +- mmm/200310_13h_48m_CR_bj_no_svProb +- mmm/200310_14h_2m_CR_MRloose_no_svProb +mem_os +- mem_os/200306_15h_20m _CR_bj_with_svProb0p001 +- mem_os/200310_13h_58m _CR_bj_no_svProb +- mem_os/200310_14h_15m_CR_MRloose_no_svProb +mem_ss +- mem_ss/200306_15h_29m_CR_bj_with_svProb0p001 +- mem_ss/200310_14h_14m_CR_bj_no_svProb +- mem_ss/200310_14h_31m_MRloose_no_svProb +eem_os +- eem_os/200306_15h_37m_CR_bj_with_svProb0p001 +- eem_os/200310_14h_26m_CR_bj_no_svProb +- eem_os/200310_14h_42m_MRloose_no_svProb +eem_ss +- eem_ss/200306_15h_45m_CR_bj_with_svProb0p001 +- eem_ss/200310_14h_38m_CR_bj_no_svProb +- eem_ss/200310_14h_55m_MRloose_no_svProb +eee +- eee/200306_15h_53m_CR_bj_with_svProb0p001 +- eee/200310_14h_52m_CR_bj_no_svProb +- eee/200310_15h_6m_MRloose_no_svProb +''' diff --git a/overview_plots_AN_from_M.py b/overview_plots_AN_from_M.py new file mode 100644 index 0000000..3b8c4aa --- /dev/null +++ b/overview_plots_AN_from_M.py @@ -0,0 +1,257 @@ +#from martina + + +TH1D *HistDiv(TH1D *h1, TH1D *h2, const bool abs) + TH1D *h1c = (TH1D*) h1.Clone() + TH1D *h2c = (TH1D*) h2.Clone() + if(!abs) + h1c.Scale(1/h1c.Integral(), 'width') + h2c.Scale(1/h2c.Integral(), 'width') + + h1c.Divide(h2c) + + +void yieldOrder(TH1D**& hists, unsigned* histInd, const unsigned nHist) + unsigned ordered[nHist] + for(unsigned h = 0 h < nHist ++h) ordered[h] = 999 + for(unsigned h = 0 h < nHist ++h) + #unsigned maxH = 999 + double maxYield = -9999. + for(unsigned k = 0 k maxYield) + maxYield = yield + #maxH = k + + + + #ordered[h] = maxH + ordered[h] = h + + + TH1D* histC[nHist] + for(unsigned h = 0 h < nHist ++h) + histC[h] = (TH1D*) hists[ordered[h]].Clone() + histInd[h] = ordered[h] + + for(unsigned h = 0 h < nHist ++h) + hists[h] = (TH1D*) histC[h].Clone() + + + + + #Order background histograms in terms of yields + unsigned histI[nHist] + yieldOrder(bkg, histI, nHist) + #Calculate total Bkg yields + TH1D* bkgTot = (TH1D*) bkg[0].Clone() + for(unsigned int i = 1 i < nHist ++i) + bkgTot.Add(bkg[i]) + +# create stack containing all backgrounds +bkgStack = rt.THStack('bkgStack', 'bkgStack') +for(int effsam = nHist -1 effsam > -1 --effsam): + bkg[effsam].SetLineColor(colors[effsam]) + bkg[effsam].SetMarkerColor(colors[effsam]) + bkg[effsam].SetLineWidth(2) + + bkg[effsam].SetFillColor(colors[effsam]) + bkg[effsam].SetLineWidth(1) + bkg[effsam].SetLineColor(colors[effsam]) + #if (names[histI[effsam] + 1 + nSig] == 'nonprompt DF' ) bkg[effsam].SetFillStyle(3020) + bkgStack.Add(bkg[effsam], 'f') + + +# legend for data and all backgrounds +legend = rt.TLegend(0.16,0.75,0.92,0.87)#,NULL,'brNDC') +legend.SetFillStyle(0) + + const int signal_out= 14 + int list_signal_out[signal_out] = [1,3,4,5,6,7,9,11,13,14,15,16,17,19] + + for(int effsam = nHist - 1 effsam > -1 --effsam) +legend.SetTextFont(42) +legend.AddEntry(bkg[effsam], 'nonprompt') +legend.AddEntry(bkg[effsam], 'prompt') +legend.SetNColumns(4) + + +labels_sr=['0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4', + '0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4', + '0-0.5','0.5-1.5','1.5-4','>4','0-0.5','0.5-1.5','1.5-4','>4'] + + + +data.SetStats(0) +data.GetXaxis().LabelsOption('vu') +data.GetXaxis().SetTitle ('#Delta (PV-SV)_2D (cm)') +data.GetXaxis().SetTitleSize(0.06) +data.GetXaxis().SetTitleOffset(1.1) +data.GetXaxis().SetTitleFont(132) + +# bin labels +for (int i =0 i<24 i++) + data.GetXaxis().SetBinLabel(i+1, labels_sr[i]) + +data.GetXaxis().SetLabelSize(0.045) +data.GetXaxis().SetLabelOffset(0.005) +data.GetXaxis().SetLabelSize(0.045) +data.GetXaxis().SetLabelOffset(0.005) + + + +# canvas and pads +width = 800 +height = 500 + +c = rt.TCanvas(name_histo,'',width*(1-xPad),height) #1000/500 +c.cd() + +# data and MC yields in first pad +p1 = rt.TPad(name_histo,'',0,xPad,1,1) +p1.Draw() +p1.cd() +p1.SetTopMargin(0.1) #0.1*(width*(1-xPad)/650) FIXME CHANGE THIS BACK +p1.SetBottomMargin(0.) +bkgTot.SetFillStyle(3005) +bkgTot.SetFillColor(kGray+2) +bkgTot.SetMarkerStyle(1) +data.SetMinimum(0.1) +bkgTot.SetMinimum(0.1) +bkgStack.SetMinimum(0.1) + +data.GetXaxis().SetTitleFont(42) +data.GetYaxis().SetTitleFont(42) +data.GetXaxis().SetLabelSize(0.1) +data.GetXaxis().SetTitleSize(0.07) +data.GetYaxis().SetLabelSize(0.04) +data.GetYaxis().SetTitleSize(0.07) + +data.SetMaximum(data.GetBinContent(data.GetMaximumBin())*1.5) +data.GetYaxis().SetRangeUser(0.1,data.GetBinContent(data.GetMaximumBin())*1.5) + + +data.SetMarkerStyle(20) +data.SetMarkerColor(1) +data.SetLineColor(1) +data.Draw('pe') #The range used is now that of the data histogra +bkgStack.Draw('hist same') +data.Draw('pe same') +legend.AddEntry(data, 'data') +legend.Draw('same') +bkgTot.Draw('e2same') + + +high_flav=(data.GetBinContent(data.GetMaximumBin())*1.2)*2 +high_mll=data.GetBinContent(data.GetMaximumBin())*1.05 +left_mll=0.97 + + +line0 = rt.TLine(8.5,0.07,8.5, high_flav/2) +line0.SetLineWidth(2) +line0.Draw() + +line1 = rt.TLine(16.5,0.07,16.5, high_flav/2) +line1.SetLineWidth(2) +line1.Draw() + +line2 = rt.TLine(4.5,high_mll,4.5,0.1) +#ci = TColor::GetColor('#ff6600') +line2.SetLineStyle(2) +line2.SetLineWidth(1) +line2.Draw() +line3 = rt.TLine(12.5,high_mll,12.5,0.1) +#ci = TColor::GetColor('#ff6600') +line3.SetLineStyle(2) +line3.SetLineWidth(1) +line3.Draw() +line4 = rt.TLine(20.5,high_mll,20.5,0.1) +#ci = TColor::GetColor('#ff6600') +line4.SetLineStyle(2) +line4.SetLineWidth(1) +line4.Draw() + + +tex0 = rt.TLatex(0.8748578,17546.74,'') +tex0 = rt.TLatex(left_mll,high_mll,'M_ll < 4 GeV') +tex0.SetTextSize(0.03) +tex0.SetTextFont(42) +tex0.SetLineWidth(2) +tex0.Draw() +tex1 = rt.TLatex(left_mll+4,high_mll,'M_ll > 4 GeV') +tex1.SetTextSize(0.03) +tex1.SetTextFont(42) +tex1.SetLineWidth(2) +tex1.Draw() + +tex2 = rt.TLatex(left_mll+8,high_mll,'M_ll < 4 GeV') +tex2.SetTextSize(0.03) +tex2.SetTextFont(42) +tex2.SetLineWidth(2) +tex2.Draw() +tex2 = rt.TLatex(left_mll+12,high_mll,'M_ll > 4 GeV') +tex2.SetTextSize(0.03) +tex2.SetTextFont(42) +tex2.SetLineWidth(2) +tex2.Draw() + +tex3 = rt.TLatex(left_mll+16,high_mll,'M_ll < 4 GeV') +tex3.SetTextSize(0.03) +tex3.SetTextFont(42) +tex3.SetLineWidth(2) +tex3.Draw() +tex4 = rt.TLatex(left_mll+20,high_mll,'M_ll > 4 GeV') +tex4.SetTextSize(0.03) +tex4.SetTextFont(42) +tex4.SetLineWidth(2) +tex4.Draw() + + +# mu channels + +tex = rt.TLatex(3.857013,high_flav/2,'#mu#mu#mu') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() +tex = rt.TLatex(10.857013,high_flav/2,'#mu^#pm#mu^#mpe') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() +tex = rt.TLatex(17.857013,high_flav/2,'#mu^#pm#mu^#pme') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() + +# e channels +tex = rt.TLatex(3.857013,high_flav/2,'eee') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() +tex = rt.TLatex(10,high_flav/2,'e^#pme^#mp#mu') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() +tex = rt.TLatex(17.857013,high_flav/2,'e^#pme^#pm#mu') +tex.SetTextColor(1) +tex.SetTextSize(0.06) +tex.SetLineWidth(2) +tex.Draw() + + + + + + diff --git a/plotter/cfg/plot_all_CR_MRloose.cfg.py b/plotter/cfg/plot_all_CR_MRloose.cfg.py new file mode 100644 index 0000000..8178784 --- /dev/null +++ b/plotter/cfg/plot_all_CR_MRloose.cfg.py @@ -0,0 +1,226 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections +from re import sub + +cuts = OrderedDict() +selection = OrderedDict() + +for ch in ['mmm', 'mem', 'eem', 'eee']: + cuts[ch] = Selections(ch) + +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['sideband'], + + 'l1_pt>7', + 'l2_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', +] + +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + cuts['eem'].selections['vetoes_01_OS'], + 'l0_q!=l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['vetoes_02_OS'], + 'l0_q!=l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['sideband'], + + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +# extra selection to be applied on variables that don't exist +# in the root tree but they're created for the pandas dataset +# pandas_selection = '(hnl_2d_disp_sig_alt > 20 & sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +# pandas_selection = '(hnl_2d_disp_sig_alt > 20) * (sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +pandas_selection = '' +# pandas_selection = 'hnl_2d_disp_sig_alt>20' + + +if __name__ == '__main__': + for ch in ['mmm', 'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + # for ch in ['mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + + # model = env['NN_DIR'] + '/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2016]_channels_200309_11h_31m_CR_MRloose_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + transformation = env['NN_DIR'] + '/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2016]_channels_200309_16h_11m_CR_MRloose_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter16.plot() + save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + + # model = env['NN_DIR'] + '/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/net_model_weighted.h5', # 2017 solo + # transformation = env['NN_DIR'] + '/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2017]_channels_200309_13h_3m_CR_MRloose_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/net_model_weighted.h5', # 2017 solo + transformation = env['NN_DIR'] + '/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2017]_channels_200309_15h_53m_CR_MRloose_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + plotter18 = Plotter ( + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 59700., + + # model = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_4m_CR_MRloose_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/net_model_weighted.h5', # 2018 training w/o disp_sig cut + transformation = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_33m_CR_MRloose_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter18.plot() + save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_all_CR_MRnodxy.cfg.py b/plotter/cfg/plot_all_CR_MRnodxy.cfg.py new file mode 100644 index 0000000..b5bd877 --- /dev/null +++ b/plotter/cfg/plot_all_CR_MRnodxy.cfg.py @@ -0,0 +1,288 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections + +cuts = OrderedDict() +selection = OrderedDict() + +for ch in ['mmm', 'mem', 'eem', 'eee']: + cuts[ch] = Selections(ch) + +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['sideband'], + + 'l1_pt>7', + 'l2_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', +] + +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + cuts['eem'].selections['vetoes_01_OS'], + 'l0_q!=l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['vetoes_02_OS'], + 'l0_q!=l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['sideband'], + + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +# extra selection to be applied on variables that don't exist +# in the root tree but they're created for the pandas dataset +# pandas_selection = '(hnl_2d_disp_sig_alt > 20 & sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +# pandas_selection = '(hnl_2d_disp_sig_alt > 20) * (sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +pandas_selection = '' +# pandas_selection = 'hnl_2d_disp_sig_alt>20' + + +if __name__ == '__main__': + for ch in ['mmm', 'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + # for ch in ['mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + + # model = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/net_model_weighted.h5', # 2016 training w/o disp_sig cut, from me + # transformation = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_24m/net_model_weighted.h5', # 2017 solo, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_17h_25m/net_model_weighted.h5', # 2yr combd (16, 17), w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200214_10h_11m/net_model_weighted.h5', # 2yr combd (16, 18), w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200214_10h_11m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200214_10h_11m/input_features.pck', + + model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter16.plot() + save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + + # model = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/net_model_weighted.h5', # 2017 training w/o disp_sig cut, from me + # transformation = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr cmbd + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_17h_25m/net_model_weighted.h5', # 2yr combd (16, 17) + # transformation = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_features.pck', + + model = env['NN_DIR'] + '/all_channels_200213_15h_24m/net_model_weighted.h5', # 2017 solo + transformation = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + plotter18 = Plotter ( + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 59700., + # model = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/net_model_weighted.h5', # plots from 1/17/20 + # transformation = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/input_features.pck', + + # model = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/net_model_weighted.h5', # plots from 1/17/20 + # transformation = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr combd + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/net_model_weighted.h5', # 2017 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/input_features.pck', + + model = env['NN_DIR'] + '/all_channels_200213_14h_55m/net_model_weighted.h5', # 2018 training w/o disp_sig cut + transformation = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter18.plot() + save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_all_CR_bj.cfg.py b/plotter/cfg/plot_all_CR_bj.cfg.py new file mode 100644 index 0000000..f7243a9 --- /dev/null +++ b/plotter/cfg/plot_all_CR_bj.cfg.py @@ -0,0 +1,226 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections +from re import sub + +cuts = OrderedDict() +selection = OrderedDict() + +for ch in ['mmm', 'mem', 'eem', 'eee']: + cuts[ch] = Selections(ch) + +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['CR_bj'], + + 'l1_pt>7', + 'l2_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', +] + +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + cuts['eem'].selections['vetoes_01_OS'], + 'l0_q!=l1_q', + cuts['eem'].selections['CR_bj'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['CR_bj'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['vetoes_02_OS'], + 'l0_q!=l2_q', + cuts['mem'].selections['CR_bj'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['CR_bj'], + + 'l1_pt>7', +# 'hnl_2d_disp_sig>20', + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['CR_bj'], + + 'hnl_pt_12>15', + 'sv_cos>0.90', + # 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +# extra selection to be applied on variables that don't exist +# in the root tree but they're created for the pandas dataset +# pandas_selection = '(hnl_2d_disp_sig_alt > 20 & sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +# pandas_selection = '(hnl_2d_disp_sig_alt > 20) * (sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +pandas_selection = '' +# pandas_selection = 'hnl_2d_disp_sig_alt>20' + + +if __name__ == '__main__': + for ch in ['mmm', 'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + # for ch in ['mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + + # model = env['NN_DIR'] + '/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2016]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + transformation = env['NN_DIR'] + '/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2016]_channels_200309_15h_34m_CR_bj_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter16.plot() + save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + + # model = env['NN_DIR'] + '/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/net_model_weighted.h5', # 2017 solo + # transformation = env['NN_DIR'] + '/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2017]_channels_200306_14h_56m_CR_bj_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/net_model_weighted.h5', # 2017 solo + transformation = env['NN_DIR'] + '/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2017]_channels_200309_16h_11m_CR_bj_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + plotter18 = Plotter ( + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 59700., + + # model = env['NN_DIR'] + '/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2018]_channels_200306_14h_57m_CR_bj_with_svProb0p001/input_features.pck', + + model = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/net_model_weighted.h5', # 2018 training w/o disp_sig cut + transformation = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2018]_channels_200309_15h_53m_CR_bj_no_svProb/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter18.plot() + save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_all_MR.cfg.py b/plotter/cfg/plot_all_MR.cfg.py new file mode 100644 index 0000000..2274509 --- /dev/null +++ b/plotter/cfg/plot_all_MR.cfg.py @@ -0,0 +1,326 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections +from re import sub + +region_label = 'MR_train_WO_sbtr_w_dispSig' + +sbtrct_prmpt = False +if sbtrct_prmpt: region_label += '_plot_w_sbtr' +if not sbtrct_prmpt: region_label += '_plot_WO_sbtr' + +disp_sig = True +if disp_sig: region_label += '_w_dispSig' +if not disp_sig: region_label += '_WO_dispSig' + +cuts = OrderedDict() +selection = OrderedDict() + +for ch in ['mmm', 'mem', 'eem', 'eee']: + cuts[ch] = Selections(ch) + +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['sideband'], + + 'l1_pt>7', + 'l2_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', +] + +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + cuts['eem'].selections['vetoes_01_OS'], + 'l0_q!=l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['sideband'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['vetoes_02_OS'], + 'l0_q!=l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['sideband'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['sideband'], + + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +# extra selection to be applied on variables that don't exist +# in the root tree but they're created for the pandas dataset +# pandas_selection = '(hnl_2d_disp_sig_alt > 20 & sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +# pandas_selection = '(hnl_2d_disp_sig_alt > 20) * (sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +if not disp_sig: + pandas_selection = '' +if disp_sig: + pandas_selection = 'hnl_2d_disp_sig > 20' +# pandas_selection = 'hnl_2d_disp_sig_alt>20' + + +if __name__ == '__main__': + for ch in ['mmm']:#, 'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + region_label = region_label, + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + sbtrct_prmpt = sbtrct_prmpt, + + # model = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200117_12h_55m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200210_10h_30m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/net_model_weighted.h5', # 2016 training w/o disp_sig cut, from me + # transformation = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2016_channels_200213_11h_23m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_24m/net_model_weighted.h5', # 2017 solo, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_17h_25m/net_model_weighted.h5', # 2yr combd (16, 17), w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200214_10h_11m/net_model_weighted.h5', # 2yr combd (16, 18), w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200214_10h_11m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200214_10h_11m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2016, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2016, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + # plotter16.plot() + # save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + region_label = region_label, + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + sbtrct_prmpt = sbtrct_prmpt, + + # model = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200117_12h_44m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200210_10h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/net_model_weighted.h5', # 2017 training w/o disp_sig cut, from me + # transformation = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200213_11h_48m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr cmbd + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_17h_25m/net_model_weighted.h5', # 2yr combd (16, 17) + # transformation = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_17h_25m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_24m/net_model_weighted.h5', # 2017 solo + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_features.pck', + + # model = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2017, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2017, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + plotter18 = Plotter ( + region_label = region_label, + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 59700., + sbtrct_prmpt = sbtrct_prmpt, + # model = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/net_model_weighted.h5', # plots from 1/17/20 + # transformation = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200117_11h_43m/input_features.pck', + + # model = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/net_model_weighted.h5', + # transformation = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/fixed_from_R/all_channels_191126_9h_45m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/net_model_weighted.h5', # plots from 1/17/20 + # transformation = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200210_10h_44m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr combd + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2018_channels_200212_15h_39m/input_features.pck', + + # model = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/net_model_weighted.h5', # 2017 (!) training w/o disp_sig cut, from me, X-CHECK + # transformation = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_2017_channels_200212_16h_14m/input_features.pck', + + # model = env['NN_DIR'] + '/all_channels_200213_14h_55m/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_features.pck', + + # model = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2018, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2018, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + # plotter18.plot() + # save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_all_SR.cfg.py b/plotter/cfg/plot_all_SR.cfg.py new file mode 100644 index 0000000..9a7d8ad --- /dev/null +++ b/plotter/cfg/plot_all_SR.cfg.py @@ -0,0 +1,221 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections +from re import sub + +cuts = OrderedDict() +selection = OrderedDict() + +for ch in ['mmm', 'mem', 'eem', 'eee']: + cuts[ch] = Selections(ch) + +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['signal_region'], + + 'l1_pt>7', + 'l2_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', +] + +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + cuts['eem'].selections['vetoes_01_OS'], + 'l0_q!=l1_q', + cuts['eem'].selections['signal_region'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['signal_region'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['vetoes_02_OS'], + 'l0_q!=l2_q', + cuts['mem'].selections['signal_region'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['signal_region'], + + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['signal_region'], + + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', +] + +# extra selection to be applied on variables that don't exist +# in the root tree but they're created for the pandas dataset +# pandas_selection = '' +pandas_selection = 'hnl_2d_disp_sig > 20' +# pandas_selection = 'hnl_2d_disp_sig_alt>20' +# pandas_selection = '(hnl_2d_disp_sig_alt > 20 & sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries +# pandas_selection = '(hnl_2d_disp_sig_alt > 20) * (sv_covxx > 0 & sv_covyy > 0 & sv_covzz > 0)' # workaround bug w/ negativ sv_cov_ii entries + + +if __name__ == '__main__': + for ch in ['mmm', 'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + + # model = env['NN_DIR'] + '/all_channels_200213_15h_38m/net_model_weighted.h5', # 3yr cmbd, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_38m/input_features.pck', + + model = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2016, w/o disp_sig + transformation = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck', + + process_signals = True, # switch off for control regions + mini_signals = True, # process only the signals that you'll plot + plot_signals = True, + blinded = True, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter16.plot() + save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + + # model = env['NN_DIR'] + '/all_channels_200213_15h_24m/net_model_weighted.h5', # 2017 solo, w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_15h_24m/input_features.pck', + + model = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2017, w/o disp_sig + transformation = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck', + + process_signals = True, # switch off for control regions + mini_signals = True, # process only the signals that you'll plot + plot_signals = True, + blinded = True, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + plotter18 = Plotter ( + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 59700., + + # model = env['NN_DIR'] + '/all_channels_200213_14h_55m/net_model_weighted.h5', # 2018 training w/o disp_sig cut + # transformation = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_channels_200213_14h_55m/input_features.pck', + + model = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2018, w/o disp_sig + transformation = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck', + + process_signals = True, # switch off for control regions + mini_signals = True, # process only the signals that you'll plot + plot_signals = True, + blinded = True, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter18.plot() + save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_all_trainMR.cfg.py b/plotter/cfg/plot_all_trainMR.cfg.py new file mode 100644 index 0000000..b48fae7 --- /dev/null +++ b/plotter/cfg/plot_all_trainMR.cfg.py @@ -0,0 +1,185 @@ +from os import environ as env +from collections import OrderedDict +from plotter.plotter import Plotter +from plotter.selections import Selections +from plotter.utils import set_paths, save_plotter_and_selections +from re import sub + +region_label = 'MRtrain_train_w_dispSig_WO_sbtr' + +sbtrct_prmpt = False +if sbtrct_prmpt: region_label += '_plot_w_sbtr' +if not sbtrct_prmpt: region_label += '_plot_WO_sbtr' + +disp_sig = True +if disp_sig: region_label += '_w_dispSig' +if not disp_sig: region_label += '_WO_dispSig' + +cuts = OrderedDict() +selection = OrderedDict() + +extra_selections = [ + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', +] +if disp_sig: extra_selections.append('hnl_2d_disp_sig > 20') + +cuts['mmm'] = Selections('mmm') +selection['mmm'] = [ + cuts['mmm'].selections['pt_iso'], + cuts['mmm'].selections['baseline'], + cuts['mmm'].selections['vetoes_12_OS'], + cuts['mmm'].selections['vetoes_01_OS'], + cuts['mmm'].selections['vetoes_02_OS'], + cuts['mmm'].selections['sideband'], +] + extra_selections + +cuts['mem'] = Selections('mem') +selection['mem_os'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + cuts['mem'].selections['sideband'], + 'l0_q!=l2_q', + cuts['mem'].selections['vetoes_02_OS'], +] + extra_selections + +selection['mem_ss'] = [ + cuts['mem'].selections['pt_iso'], + cuts['mem'].selections['baseline'], + 'l0_q==l2_q', + cuts['mem'].selections['sideband'], +] + extra_selections + +cuts['eee'] = Selections('eee') +selection['eee'] = [ + cuts['eee'].selections['pt_iso'], + cuts['eee'].selections['baseline'], + cuts['eee'].selections['vetoes_12_OS'], + cuts['eee'].selections['vetoes_01_OS'], + cuts['eee'].selections['vetoes_02_OS'], + cuts['eee'].selections['sideband'], +] + extra_selections + +cuts['eem'] = Selections('eem') +selection['eem_os'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q!=l1_q', + cuts['eem'].selections['sideband'], + cuts['eem'].selections['vetoes_01_OS'], +] + extra_selections + +selection['eem_ss'] = [ + cuts['eem'].selections['pt_iso'], + cuts['eem'].selections['baseline'], + 'l0_q==l1_q', + cuts['eem'].selections['sideband'], +] + extra_selections + +pandas_selection = '' # NO disp_sig + +if __name__ == '__main__': + for ch in ['mmm']:#'mem_os', 'mem_ss', 'eem_os', 'eem_ss', 'eee']: + + selection_mc = selection[ch] + [cuts[ch[:3]].selections['is_prompt_lepton']] + selection_tight = cuts[ch[:3]].selections_pd['tight'] + + set_paths(ch, 2016) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 30', selection_tight) + plotter16 = Plotter( + sbtrct_prmpt = sbtrct_prmpt, + region_label = region_label, + channel = ch, + year = 2016, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 35987., + + # model = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2016, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2016]_channels_200324_11h_50m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2016, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2016]_channels_200326_9h_37m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter16.plot() + save_plotter_and_selections(plotter16, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2017) + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 35', selection_tight) + plotter17 = Plotter ( + sbtrct_prmpt = sbtrct_prmpt, + region_label = region_label, + channel = ch, + year = 2017, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + lumi = 41530., + + # model = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2017, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2017]_channels_200324_11h_53m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2017, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2017]_channels_200326_9h_51m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter17.plot() + if ch[0] == 'e': selection_tight = sub('l0_pt > 3.', 'l0_pt > 32', selection_tight) + save_plotter_and_selections(plotter17, selection[ch], selection_mc, selection_tight) + + set_paths(ch, 2018) + plotter18 = Plotter ( + sbtrct_prmpt = sbtrct_prmpt, + region_label = region_label, + channel = ch, + year = 2018, + base_dir = env['NTUPLE_DIR'], + post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, + selection_data = selection[ch], + selection_mc = selection_mc, + selection_tight = selection_tight, + pandas_selection = pandas_selection, + + lumi = 59700., + + # model = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/net_model_weighted.h5', # 2018, w/o disp_sig + # transformation = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_tranformation_weighted.pck', + # features = env['NN_DIR'] + '/all_[2018]_channels_200324_11h_55m_MR_no_disp_sig_latest/input_features.pck', + + model = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/net_model_weighted.h5', # 2018, with disp_sig > 20 + transformation = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_tranformation_weighted.pck', + features = env['NN_DIR'] + '/all_[2018]_channels_200326_10h_20m_w_disp_sig_train_WO_sbtr/input_features.pck', + + process_signals = False, # switch off for control regions + mini_signals = False, # process only the signals that you'll plot + plot_signals = False, + blinded = False, + datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex + ) + plotter18.plot() + save_plotter_and_selections(plotter18, selection[ch], selection_mc, selection_tight) + + pass diff --git a/plotter/cfg/plot_eee.cfg.py b/plotter/cfg/plot_eee.cfg.py deleted file mode 100644 index b656507..0000000 --- a/plotter/cfg/plot_eee.cfg.py +++ /dev/null @@ -1,70 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'eee' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - cuts.selections['vetoes_12_OS'], - cuts.selections['vetoes_01_OS'], - cuts.selections['vetoes_02_OS'], - cuts.selections['signal_region'], -# cuts.selections['sideband'], - - - 'l1_pt>7', - 'l2_pt>7', -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_pt>32', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', - 'l0_reliso_rho_03<0.1', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch, - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., -# model = env['NN_DIR'] + 'trainings/eee_191119_19h_50m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + 'trainings/eee_191119_19h_50m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + 'trainings/eee_191119_19h_50m/input_features.pck', -# model = env['NN_DIR'] + '/all_channels_191122_11h_32m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/all_channels_191122_11h_32m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/all_channels_191122_11h_32m/input_features.pck', - model = env['NN_DIR'] + '/all_channels_191126_9h_45m/net_model_weighted.h5', - transformation = env['NN_DIR'] + '/all_channels_191126_9h_45m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + '/all_channels_191126_9h_45m/input_features.pck', - process_signals = True, - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - plotter.plot() - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - pass - diff --git a/plotter/cfg/plot_eem_os.cfg.py b/plotter/cfg/plot_eem_os.cfg.py deleted file mode 100644 index 815650f..0000000 --- a/plotter/cfg/plot_eem_os.cfg.py +++ /dev/null @@ -1,73 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'eem' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - cuts.selections['vetoes_01_OS'], - 'l0_q!=l1_q', -# cuts.selections['sideband'], - cuts.selections['signal_region'], - - 'l1_pt>7', -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_reliso_rho_03<0.1', - 'l0_pt>32', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch+'_os', - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., -# model = env['NN_DIR'] + '/eem_191119_19h_46m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/eem_191119_19h_46m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/eem_191119_19h_46m/input_features.pck', -# model = env['NN_DIR'] + '/eem_191119_20h_14m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/eem_191119_20h_14m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/eem_191119_20h_14m/input_features.pck', -# model = env['NN_DIR'] + '/eem_191119_20h_20m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/eem_191119_20h_20m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/eem_191119_20h_20m/input_features.pck', - model = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/net_model_weighted.h5', - transformation = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/input_features.pck', - process_signals = True, - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - - plotter.plot() - - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - - pass - \ No newline at end of file diff --git a/plotter/cfg/plot_eem_ss.cfg.py b/plotter/cfg/plot_eem_ss.cfg.py deleted file mode 100644 index d9e5a90..0000000 --- a/plotter/cfg/plot_eem_ss.cfg.py +++ /dev/null @@ -1,66 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'eem' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - 'l0_q==l1_q', -# cuts.selections['sideband'], - cuts.selections['signal_region'], - - 'l1_pt>7', -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_reliso_rho_03<0.1', - 'l0_pt>32', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch+'_ss', - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., -# model = env['NN_DIR'] + '/eem_191119_19h_46m/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/eem_191119_19h_46m/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/eem_191119_19h_46m/input_features.pck', - model = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/net_model_weighted.h5', - transformation = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + 'trainings/eem_191119_22h_30m/input_features.pck', - process_signals = True, - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - - plotter.plot() - - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - - pass - \ No newline at end of file diff --git a/plotter/cfg/plot_mem_os.cfg.py b/plotter/cfg/plot_mem_os.cfg.py deleted file mode 100644 index 329bd2b..0000000 --- a/plotter/cfg/plot_mem_os.cfg.py +++ /dev/null @@ -1,63 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'mem' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - cuts.selections['vetoes_02_OS'], - 'l0_q!=l2_q', -# cuts.selections['sideband'], - cuts.selections['signal_region'], - - 'l1_pt>7', -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_reliso_rho_03<0.1', - 'l0_pt>25', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch+'_os', - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., - model = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/net_model_weighted.h5', - transformation = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/input_features.pck', - process_signals = True, - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - - plotter.plot() - - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - - pass diff --git a/plotter/cfg/plot_mem_ss.cfg.py b/plotter/cfg/plot_mem_ss.cfg.py deleted file mode 100644 index 18829f9..0000000 --- a/plotter/cfg/plot_mem_ss.cfg.py +++ /dev/null @@ -1,62 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'mem' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - 'l0_q==l2_q', -# cuts.selections['sideband'], - cuts.selections['signal_region'], - - 'l1_pt>7', -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_reliso_rho_03<0.1', - 'l0_pt>25', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch+'_ss', - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., - model = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/net_model_weighted.h5', - transformation = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + '/trainings/mem_191119_19h_6m/input_features.pck', - process_signals = True, - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - - plotter.plot() - - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - - pass diff --git a/plotter/cfg/plot_mmm.cfg.py b/plotter/cfg/plot_mmm.cfg.py deleted file mode 100644 index b9ab31e..0000000 --- a/plotter/cfg/plot_mmm.cfg.py +++ /dev/null @@ -1,63 +0,0 @@ -from os import environ as env -from plotter.plotter import Plotter -from plotter.selections import Selections -from plotter.utils import set_paths, save_plotter_and_selections - -ch = 'mmm' - -set_paths(ch, 2018) -cuts = Selections(ch) - -selection = [ - cuts.selections['pt_iso'], - cuts.selections['baseline'], - cuts.selections['vetoes_12_OS'], - cuts.selections['vetoes_01_OS'], - cuts.selections['vetoes_02_OS'], - cuts.selections['signal_region'], -# cuts.selections['sideband'], - -# 'hnl_2d_disp_sig>20', - 'hnl_pt_12>15', - 'sv_cos>0.99', - 'sv_prob>0.001', - 'l0_reliso_rho_03<0.1', - 'l0_pt>25', - 'abs(l1_dz)<10', - 'abs(l2_dz)<10', -] - -# extra selection to be applied on variables that don't exist -# in the root tree but they're created for the pandas dataset -pandas_selection = 'hnl_2d_disp_sig_alt>20' - -selection_mc = selection + [cuts.selections['is_prompt_lepton']] -selection_tight = cuts.selections_pd['tight'] - -plotter = Plotter (channel = ch, - base_dir = env['NTUPLE_DIR'], - post_fix = 'HNLTreeProducer/tree.root', # 'HNLTreeProducer_%s/tree.root' %ch, - selection_data = selection, - selection_mc = selection_mc, - selection_tight = selection_tight, - pandas_selection = pandas_selection, - lumi = 59700., -# model = env['NN_DIR'] + '/trainings/mmm/12Nov19_v0/net_model_weighted.h5', -# transformation = env['NN_DIR'] + '/trainings/mmm/12Nov19_v0/input_tranformation_weighted.pck', -# features = env['NN_DIR'] + '/trainings/mmm/12Nov19_v0/input_features.pck', - model = env['NN_DIR'] + '/all_channels_191126_9h_45m/net_model_weighted.h5', - transformation = env['NN_DIR'] + '/all_channels_191126_9h_45m/input_tranformation_weighted.pck', - features = env['NN_DIR'] + '/all_channels_191126_9h_45m/input_features.pck', - process_signals = True, # switch off for control regions - mini_signals = True, # process only the signals that you'll plot - plot_signals = True, - blinded = True, -# datacards = ['hnl_m_12_lxy_0p5_to_2p0', 'hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_mt_2p0'], # FIXME! improve this to accept wildcards / regex - datacards = ['hnl_m_12_lxy_lt_0p5', 'hnl_m_12_lxy_0p5_to_1p5', 'hnl_m_12_lxy_1p5_to_4p0', 'hnl_m_12_lxy_mt_4p0'], # FIXME! improve this to accept wildcards / regex - ) - -if __name__ == '__main__': - plotter.plot() - # save the plotter and all - save_plotter_and_selections(plotter, selection, selection_mc, selection_tight) - pass diff --git a/plotter/plotter.py b/plotter/plotter.py index 2fdfac9..1649bc9 100644 --- a/plotter/plotter.py +++ b/plotter/plotter.py @@ -5,6 +5,7 @@ import numpy as np import pandas as pd from os import makedirs +from os import environ as env from time import time from collections import OrderedDict from plotter.evaluate_nn import Evaluator @@ -29,6 +30,7 @@ class Plotter(object): def __init__(self , channel , + year , base_dir , post_fix , selection_data , @@ -42,11 +44,16 @@ def __init__(self , process_signals , plot_signals , blinded , + sbtrct_prmpt = True, + region_label= '', datacards=[] , mini_signals=False, do_ratio=True): + self.region_label = region_label + self.sbtrct_prmpt = sbtrct_prmpt self.channel = channel.split('_')[0] + self.year = year self.full_channel = channel self.base_dir = base_dir self.post_fix = post_fix @@ -100,6 +107,15 @@ def create_canvas(self, ratio=True): self.main_pad.SetLeftMargin(0.15) self.main_pad.SetRightMargin(0.15) + def create_kanvas(self): + self.kanvas = Canvas(width=700, height=700) ; self.kanvas.Draw() + self.kanvas.cd() ; self.kpad = Pad(0. , 0. , 1., 1. ) ; self.kpad .Draw() + self.kpad.SetTicks(True) + self.kpad.SetTopMargin(0.15) + self.kpad.SetBottomMargin(0.15) + self.kpad.SetLeftMargin(0.15) + self.kpad.SetRightMargin(0.15) + def create_datacards(self, data, bkgs, signals, label, protect_empty_bins=['nonprompt']): ''' FIXME! For now this is specific to the data-driven case @@ -176,7 +192,7 @@ def create_datacards(self, data, bkgs, signals, label, protect_empty_bins=['nonp def plot(self): evaluator = Evaluator(self.model, self.transformation, self.features) - self.plt_dir = plot_dir() + self.plt_dir = plot_dir(self.region_label) # NN evaluator print('============> starting reading the trees') @@ -184,44 +200,64 @@ def plot(self): now = time() signal = [] if self.process_signals: - # FIXME! -# signal = get_signal_samples(self.channel, self.base_dir, self.post_fix, self.selection_data) -# signal = get_signal_samples(self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/sig', 'HNLTreeProducer_mmm/tree.root', self.selection_data, mini=self.mini_signals) -# signal = get_signal_samples(self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/sig', 'HNLTreeProducer_mem/tree.root', self.selection_data, mini=self.mini_signals) - signal = get_signal_samples(self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/sig', 'HNLTreeProducer_eem/tree.root', self.selection_data, mini=self.mini_signals) -# signal = get_signal_samples(self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/sig', 'HNLTreeProducer_eee/tree.root', self.selection_data, mini=self.mini_signals) + signal = get_signal_samples(self.channel, env['NTUPLE_BASE_DIR'] + '{year}/sig'.format(year=self.year), 'HNLTreeProducer_%s/tree.root'%self.channel, self.selection_data, mini=self.mini_signals, year=self.year) else: signal = [] - data = get_data_samples (self.channel, self.base_dir, self.post_fix, self.selection_data) - # FIXME! -# mc = get_mc_samples (self.channel, self.base_dir, self.post_fix, self.selection_mc) -# mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mmm/tree.root', self.selection_mc) -# mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_mem/tree.root', self.selection_mc) - mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eem/tree.root', self.selection_mc) -# mc = get_mc_samples (self.channel, '/Users/manzoni/Documents/HNL/ntuples/2018/bkg', 'HNLTreeProducer_eee/tree.root', self.selection_mc) + + if self.year != 2018: + data = get_data_samples(self.channel, env['NTUPLE_BASE_DIR'] + '{year}/data'.format(year=self.year), 'HNLTreeProducer_%s/tree.root'%self.channel, self.selection_data, self.year) + if self.year == 2018: + data = get_data_samples(self.channel, env['NTUPLE_BASE_DIR'] + '{year}/{ch}18'.format(year=self.year, ch=self.channel), self.post_fix, self.selection_data, self.year) + + mc = get_mc_samples (self.channel, env['NTUPLE_BASE_DIR'] + '{year}/mc'.format(year=self.year), 'HNLTreeProducer_%s/tree.root'%self.channel, self.selection_mc, self.year) print('============> it took %.2f seconds' %(time() - now)) + + dbg = False + + # apply an extra selection to the pandas dataframes + if len(self.pandas_selection): + for isample in (mc+data+signal): + isample.df = isample.df.query(self.pandas_selection) # evaluate FR for isample in (mc+data): #+signal): + isample.df['year'] = self.year isample.df['fr'] = evaluator.evaluate(isample.df) # already corrected, ready to be applied in lnt-not-tight isample.df['fr_corr'] = isample.df['fr'] / (1. - isample.df['fr']) - - # apply an extra selection to the pandas dataframes - if len(self.pandas_selection): - for isample in (mc+data+signal): - isample.df = isample.df.query(self.pandas_selection) # split the dataframe in tight and lnt-not-tight (called simply lnt for short) for isample in (mc+data+signal): + # extra variables + isample.df['abs_l1_dxy'] = np.abs(isample.df['l1_dxy']) + isample.df['abs_l2_dxy'] = np.abs(isample.df['l2_dxy']) + #defining tight/lnt isample.df_tight = isample.df.query(self.selection_tight) - isample.df_lnt = isample.df.query(self.selection_lnt) + isample.df_lnt = isample.df.query(self.selection_lnt) + + if self.process_signals and dbg: + # jmc = signal[1] # = 4 GeV Mass + # jmc = signal[3] # = 10 GeV Mass + weights_TEST = self.total_weight_calculator(jmc.df_tight, ['weight', 'lhe_weight']+jmc.extra_signal_weights, [self.lumi, jmc.lumi_scaling]) + weights_onlyLHE_TEST = self.total_weight_calculator(jmc.df_tight, ['lhe_weight']+jmc.extra_signal_weights, [self.lumi, jmc.lumi_scaling]) + check_yields = 0 + check_yields_onlyLHE = 0 + for i in weights_TEST: + check_yields += i + for lhi in weights_onlyLHE_TEST: + check_yields_onlyLHE += lhi + print (self.year, self.channel, 'events:', jmc.df_tight.shape[0], ' ## yield =', check_yields, '(all weights)') + print (self.lumi, jmc.lumi_scaling, check_yields_onlyLHE, '(only lhe)') + if dbg == True: + return 0 # sort depending on their position in the stack mc.sort(key = lambda x : x.position_in_stack) # now we plot self.create_canvas(self.do_ratio) + # create checkpad + self.create_kanvas() for ivar in variables: @@ -235,6 +271,7 @@ def plot(self): stack_prompt = [] stack_nonprompt = [] + stack_nonprompt_check = [] for imc in mc: @@ -246,24 +283,48 @@ def plot(self): mc_df_lnt = imc.df_lnt histo_tight = Hist(bins, title=imc.label, markersize=0, legendstyle='F', name=imc.datacard_name+'#'+label) + weights = self.total_weight_calculator(mc_df_tight, ['weight', 'lhe_weight']+imc.extra_signal_weights, [self.lumi, imc.lumi_scaling]) + # print ('WARNING, ONLY LHE WEIGHTS'); weights = self.total_weight_calculator(mc_df_tight, ['lhe_weight']+imc.extra_signal_weights, [self.lumi, imc.lumi_scaling]) histo_tight.fill_array(mc_df_tight[variable], weights=weights) + # print ('WARNING, ONLY EVENT NUMBERS, UNSCALED'); histo_tight.fill_array(mc_df_tight[variable]) + histo_tight.fillstyle = 'solid' histo_tight.fillcolor = 'steelblue' + histo_tight.linecolor = 'steelblue' histo_tight.linewidth = 0 stack_prompt.append(histo_tight) - histo_lnt = Hist(bins, title=imc.label, markersize=0, legendstyle='F') - weights = self.total_weight_calculator(mc_df_lnt, ['weight', 'lhe_weight', 'fr_corr']+imc.extra_signal_weights, [-1., self.lumi, imc.lumi_scaling]) - histo_lnt.fill_array(mc_df_lnt[variable], weights=weights) + if self.sbtrct_prmpt: + histo_lnt = Hist(bins, title=imc.label, markersize=0, legendstyle='F') - histo_lnt.fillstyle = 'solid' - histo_lnt.fillcolor = 'skyblue' - histo_lnt.linewidth = 0 + weights = self.total_weight_calculator(mc_df_lnt, ['weight', 'lhe_weight', 'fr_corr']+imc.extra_signal_weights, [-1., self.lumi, imc.lumi_scaling]) + # print ('WARNING, ONLY LHE WEIGHTS'); weights = self.total_weight_calculator(mc_df_lnt, ['lhe_weight', 'fr_corr']+imc.extra_signal_weights, [-1., self.lumi, imc.lumi_scaling]) + histo_lnt.fill_array(mc_df_lnt[variable], weights=weights) + + histo_lnt.fillstyle = 'solid' + histo_lnt.linecolor = 'skyblue' + histo_lnt.fillcolor = 'skyblue' + histo_lnt.linewidth = 0 + + stack_nonprompt.append(histo_lnt) + + # Sanity check + weights_noFR = self.total_weight_calculator(mc_df_lnt, ['weight', 'lhe_weight']+imc.extra_signal_weights, [self.lumi, imc.lumi_scaling]) + histo_lnt_check = Hist(bins, title=imc.label+'check', markersize=0, legendstyle='F') + histo_lnt_check.fill_array(mc_df_lnt[variable], weights=weights_noFR) + + histo_lnt_check.fillstyle = 'solid' + histo_lnt_check.linecolor = 'skyblue' + histo_lnt_check.fillcolor = 'skyblue' + histo_lnt_check.linewidth = 0 + + stack_nonprompt_check.append(histo_lnt_check) + + # print ('WARNING, ONLY EVENT NUMBERS, UNSCALED'); histo_lnt.fill_array(mc_df_lnt[variable]) - stack_nonprompt.append(histo_lnt) ###################################################################################### # plot the signals @@ -280,8 +341,13 @@ def plot(self): isig_df_tight = isig.df_tight histo_tight = Hist(bins, title=isig.label, markersize=0, legendstyle='L', name=isig.datacard_name+'#'+label) # the "#" thing is a trick to give hists unique name, else ROOT complains + weights = self.total_weight_calculator(isig_df_tight, ['weight', 'lhe_weight']+isig.extra_signal_weights, [self.lumi, isig.lumi_scaling]) + # print ('WARNING, ONLY LHE WEIGHTS'); weights = self.total_weight_calculator(isig_df_tight, ['lhe_weight']+isig.extra_signal_weights, [self.lumi, isig.lumi_scaling]) histo_tight.fill_array(isig_df_tight[variable], weights=weights) + + # print ('WARNING, ONLY EVENT NUMBERS, UNSCALED'); histo_tight.fill_array(isig_df_tight[variable]) + histo_tight.color = isig.colour histo_tight.fillstyle = 'hollow' histo_tight.linewidth = 2 @@ -297,6 +363,7 @@ def plot(self): data_prompt = [] data_nonprompt = [] + data_nonprompt_check = [] for idata in data: @@ -314,19 +381,53 @@ def plot(self): histo_lnt = Hist(bins, title=idata.label, markersize=0, legendstyle='F') histo_lnt.fill_array(idata_df_lnt[variable], weights=idata_df_lnt.fr_corr) + + histo_lnt.fillstyle = 'solid' + histo_lnt.linecolor = 'skyblue' + histo_lnt.fillcolor = 'skyblue' + histo_lnt.linewidth = 0 data_nonprompt.append(histo_lnt) + # check LNT + histo_lnt_check = Hist(bins, title=idata.label+'check', markersize=0, legendstyle='F') + histo_lnt_check.fill_array(idata_df_lnt[variable]) + data_nonprompt_check.append(histo_lnt_check) + + histo_lnt_check.fillstyle = 'solid' + histo_lnt_check.fillcolor = 'firebrick' + histo_lnt_check.linecolor = 'firebrick' + histo_lnt_check.linewidth = 0 + # put the prompt backgrounds together all_exp_prompt = sum(stack_prompt) all_exp_prompt.title = 'prompt' # put the nonprompt backgrounds together - all_exp_nonprompt = sum(stack_nonprompt+data_nonprompt) + if self.sbtrct_prmpt: + print ('\n\tWARNING: SUBTRACT PROMPT IS ON\n') + all_exp_nonprompt = sum(stack_nonprompt+data_nonprompt) + + if not self.sbtrct_prmpt: + print ('\n\tWARNING: SUBTRACT PROMPT IS OFF\n') + all_exp_nonprompt = sum(data_nonprompt) + all_exp_nonprompt.title = 'nonprompt' + all_exp_nonprompt_mc_check = sum(stack_nonprompt_check) + all_exp_nonprompt_mc_check.title = 'nonprompt_mc_check' + + # check_stack + all_exp_nonprompt_data_check = sum(data_nonprompt_check) + all_exp_nonprompt_data_check.title = 'data_nonprompt_check' + + # all_exp_nonprompt_check = sum(stack_nonprompt_check + data_nonprompt_check) + # all_exp_nonprompt_check.title = 'all_LNT_check' + + # create the stacks stack = HistStack([all_exp_prompt, all_exp_nonprompt], drawstyle='HIST', title='') + stack_check = HistStack([all_exp_nonprompt_data_check, all_exp_nonprompt_mc_check], drawstyle='HISTE', title='') # stat uncertainty hist_error = stack.sum #sum([all_exp_prompt, all_exp_nonprompt]) @@ -397,6 +498,33 @@ def plot(self): # for ii in things_to_plot: print(islogy, ii.GetMinimum(), ii.GetMaximum()) draw(things_to_plot, xtitle=xlabel, ytitle=ylabel, pad=self.main_pad, logy=islogy) + + check_LNT = True + all_exp_nonprompt_data_check .linewidth = 2 + all_exp_nonprompt_mc_check .linewidth = 2 + if check_LNT and islogy == False: + self.kpad.cd() + all_obs_prompt.SetMarkerSize(0) + all_obs_prompt.SetTitle('%s; %s; %s' %(label, xlabel, ylabel)) + stack_check.SetTitle('%s; %s; %s' %(label, xlabel, ylabel)) + stack_check.Draw('histe') + all_obs_prompt.Draw('histesame') + legend_check = Legend([all_obs_prompt, stack_check], pad=self.kpad, leftmargin=0., rightmargin=0., topmargin=0., textfont=42, textsize=0.02, entrysep=0.01, entryheight=0.03) + legend_check.SetBorderSize(0) + legend_check.x1 = 0.35 + legend_check.y1 = 0.69 + legend_check.x2 = 0.68 + legend_check.y2 = 0.82 + legend_check.SetFillColor(0) + self.kanvas.cd() + legend_check.Draw('same') + self.kpad.cd() + stack_check.Draw('histesame') + all_obs_prompt.Draw('histesame') + + self.main_pad.cd() + # TODO instead do hist.DrawNormalized('ep'), 'epsame' ... + # draw(all_exp_nonprompt, xtitle=xlabel, ytitle=ylabel, pad=self.kpad, logy=islogy) # update the stack yaxis range *after* is drawn. # It will be picked up by canvas.Update() @@ -425,6 +553,9 @@ def plot(self): ratio_exp_error.color = 'gray' for ithing in [ratio_data, ratio_exp_error]: + if ivar.set_log_x: + ithing.xaxis.set_no_exponent() + ithing.xaxis.set_more_log_labels() ithing.xaxis.set_label_size(ithing.xaxis.get_label_size() * 3.) # the scale should match that of the main/ratio pad size ratio ithing.yaxis.set_label_size(ithing.yaxis.get_label_size() * 3.) # the scale should match that of the main/ratio pad size ratio ithing.xaxis.set_title_size(ithing.xaxis.get_title_size() * 3.) # the scale should match that of the main/ratio pad size ratio @@ -444,28 +575,42 @@ def plot(self): self.ratio_pad.cd() line.Draw('same') - self.canvas.cd() - # FIXME! add SS and OS channels - if self.full_channel == 'mmm': channel = '\mu\mu\mu' - elif self.full_channel == 'eee': channel = 'eee' - elif self.full_channel == 'mem_os': channel = '\mu^{\pm}\mu^{\mp}e' - elif self.full_channel == 'mem_ss': channel = '\mu^{\pm}\mu^{\pm}e' - elif self.full_channel == 'eem_os': channel = 'e^{\pm}e^{\mp}\mu' - elif self.full_channel == 'eem_ss': channel = 'e^{\pm}e^{\pm}\mu' - else: assert False, 'ERROR: Channel not valid.' - finalstate = ROOT.TLatex(0.68, 0.68, channel) - finalstate.SetTextFont(43) - finalstate.SetTextSize(25) - finalstate.SetNDC() - finalstate.Draw('same') + for can in [self.canvas, self.kanvas]: + can.cd() + if self.full_channel == 'mmm': channel = '\mu\mu\mu' + elif self.full_channel == 'eee': channel = 'eee' + elif self.full_channel == 'mem_os': channel = '\mu^{\pm}\mu^{\mp}e' + elif self.full_channel == 'mem_ss': channel = '\mu^{\pm}\mu^{\pm}e' + elif self.full_channel == 'eem_os': channel = 'e^{\pm}e^{\mp}\mu' + elif self.full_channel == 'eem_ss': channel = 'e^{\pm}e^{\pm}\mu' + else: assert False, 'ERROR: Channel not valid.' + finalstate = ROOT.TLatex(0.68, 0.68, channel) + finalstate.SetTextFont(43) + finalstate.SetTextSize(25) + finalstate.SetNDC() + finalstate.Draw('same') + self.canvas.cd() legend.Draw('same') if self.plot_signals: legend_signals.Draw('same') - CMS_lumi(self.main_pad, 4, 0) - self.canvas.Modified() - self.canvas.Update() + if self.year == 2016: + lumi_text = "2016, L = 35.87 fb^{-1}" + elif self.year == 2017: + lumi_text = "2017, L = 41.53 fb^{-1}" + elif self.year == 2018: + lumi_text = "2018, L = 59.74 fb^{-1}" + CMS_lumi(self.main_pad, 4, 0, lumi_13TeV = lumi_text) + # CMS_lumi(self.kpad, 4, 0, lumi_13TeV = lumi_text) + if ivar.set_log_x: + self.main_pad .SetLogx() + self.kpad .SetLogx() + self.ratio_pad.SetLogx() + self.canvas.Modified(); self.canvas.Update() self.canvas.SaveAs(self.plt_dir + '%s%s.pdf' %(label, '_log' if islogy else '_lin')) + if check_LNT and islogy == False: + self.kanvas.Modified(); self.kanvas.Update() + self.kanvas.SaveAs(self.plt_dir + '%s%s_check_LNT-T.pdf' %(label, '_log' if islogy else '_lin')) # save only the datacards you want, don't flood everything if len(self.datacards) and label not in self.datacards: diff --git a/plotter/sample.py b/plotter/sample.py index 5b7fdb5..44fd4ba 100644 --- a/plotter/sample.py +++ b/plotter/sample.py @@ -188,47 +188,92 @@ def __init__(self, -def get_data_samples(channel, basedir, postfix, selection): +def get_data_samples(channel, basedir, postfix, selection, year=2018): if channel [0] == 'm': lep = 'mu' elif channel [0] == 'e': lep = 'ele' + assert year in [2016, 2017, 2018], 'Year does not exist' assert lep == 'ele' or lep == 'mu', 'Lepton flavor error' - data = [ - Sample('Single_{lep}_2018A'.format(lep=lep), channel, '2018A', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), - Sample('Single_{lep}_2018B'.format(lep=lep), channel, '2018B', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), - Sample('Single_{lep}_2018C'.format(lep=lep), channel, '2018C', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), - Sample('Single_{lep}_2018D'.format(lep=lep), channel, '2018D', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), - ] + if year == 2016: + data = [ + # Sample('Single_{lep}_2016B'.format(lep=lep), channel, '2016B', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.) # no events processed ? TODO, + Sample('Single_{lep}_2016C'.format(lep=lep), channel, '2016C', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2016D'.format(lep=lep), channel, '2016D', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2016E'.format(lep=lep), channel, '2016E', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2016F'.format(lep=lep), channel, '2016F', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2016G'.format(lep=lep), channel, '2016G', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2016H'.format(lep=lep), channel, '2016H', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + ] + elif year == 2017: + data = [ + Sample('Single_{lep}_2017B'.format(lep=lep), channel, '2017B', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2017C'.format(lep=lep), channel, '2017C', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2017D'.format(lep=lep), channel, '2017D', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2017E'.format(lep=lep), channel, '2017E', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2017F'.format(lep=lep), channel, '2017F', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + ] + elif year == 2018: + data = [ + Sample('Single_{lep}_2018A'.format(lep=lep), channel, '2018A', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2018B'.format(lep=lep), channel, '2018B', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2018C'.format(lep=lep), channel, '2018C', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + Sample('Single_{lep}_2018D'.format(lep=lep), channel, '2018D', selection, 'data_obs', 'black', 9999, basedir, postfix, True, False, False, 1., 1.), + ] return data -def get_mc_samples(channel, basedir, postfix, selection): - mc = [ - Sample('DYJetsToLL_M50_ext', channel, r'DY$\to\ell\ell$', selection, 'DY', 'gold' ,10, basedir, postfix, False, True, False, 1., 6077.22), - Sample('TTJets_ext' , channel, r'$t\bar{t}$' , selection, 'TT', 'slateblue', 0, basedir, postfix, False, True, False, 1., 831.76), - Sample('WW' , channel, 'WW' , selection, 'WW', 'blue' , 5, basedir, postfix, False, True, False, 1., 75.88), - Sample('WZ' , channel, 'WZ' , selection, 'WZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 27.6 ), - Sample('ZZ' , channel, 'ZZ' , selection, 'ZZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 12.14), - ] +def get_mc_samples(channel, basedir, postfix, selection, year=2018): + assert year in [2016, 2017, 2018], 'Year does not exist' + if year == 2016: + mc = [ + Sample('DYJetsToLL_M50' , channel, r'DY$\to\ell\ell$', selection, 'DY', 'gold' ,10, basedir, postfix, False, True, False, 1., 6077.22), + Sample('TTJets' , channel, r'$t\bar{t}$' , selection, 'TT', 'slateblue', 0, basedir, postfix, False, True, False, 1., 831.76), + Sample('WW' , channel, 'WW' , selection, 'WW', 'blue' , 5, basedir, postfix, False, True, False, 1., 75.88), + Sample('WZ' , channel, 'WZ' , selection, 'WZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 27.6 ), + Sample('ZZTo4L' , channel, 'ZZ' , selection, 'ZZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 12.14), #FIXME check xsec here! + ] + elif year == 2017: + mc = [ + Sample('DYJetsToLL_M50_ext', channel, r'DY$\to\ell\ell$', selection, 'DY', 'gold' ,10, basedir, postfix, False, True, False, 1., 6077.22), + Sample('TTJets' , channel, r'$t\bar{t}$' , selection, 'TT', 'slateblue', 0, basedir, postfix, False, True, False, 1., 831.76), + Sample('WW' , channel, 'WW' , selection, 'WW', 'blue' , 5, basedir, postfix, False, True, False, 1., 75.88), + Sample('WZ' , channel, 'WZ' , selection, 'WZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 27.6 ), + Sample('ZZ' , channel, 'ZZ' , selection, 'ZZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 12.14), + ] + elif year == 2018: + mc = [ + Sample('DYJetsToLL_M50_ext', channel, r'DY$\to\ell\ell$', selection, 'DY', 'gold' ,10, basedir, postfix, False, True, False, 1., 6077.22), + Sample('TTJets_ext' , channel, r'$t\bar{t}$' , selection, 'TT', 'slateblue', 0, basedir, postfix, False, True, False, 1., 831.76), + Sample('WW' , channel, 'WW' , selection, 'WW', 'blue' , 5, basedir, postfix, False, True, False, 1., 75.88), + Sample('WZ' , channel, 'WZ' , selection, 'WZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 27.6 ), + Sample('ZZ' , channel, 'ZZ' , selection, 'ZZ', 'blue' , 5, basedir, postfix, False, True, False, 1., 12.14), + ] return mc -def get_signal_samples(channel, basedir, postfix, selection, mini=False): +def get_signal_samples(channel, basedir, postfix, selection, mini=False, year=2018): assert channel[0] == 'e' or channel[0] == 'm', 'Lepton flavor error' if channel [0] == 'm': if mini: + # from pdb import set_trace; set_trace() signal = [ ########## M = 2 - Sample('HN3L_M_2_V_0p0110905365064_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV |V|^{2}=1.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_1p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 0.5278 , toplot=True ), - ########## M = 5 - Sample('HN3L_M_5_V_0p00145602197786_mu_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV |V|^{2}=2.1 10^{-6}}{Majorana}' , selection, 'hnl_m_5_v2_2p1Em06_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 0.008434 , toplot=True ), + #FIXME THIS ONE DOESN"T WORK FOR 2016! (no skim report found) + # Sample('HN3L_M_2_V_0p0110905365064_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV |V|^{2}=1.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_1p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 0.5278 , toplot=True ) if year != 2016 else 'DEL', + # Sample('HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO' , channel, '#splitline{m=4 GeV, |V|^{2}=8.4 10^{-6}}{Majorana}' , selection, 'hnl_m_4_v2_8p4Em06_majorana' , 'indigo' ,10, basedir, postfix, False, True, False, 1., 0.03356 , toplot=True) if year != 2016 else 'DEL', + # ########## M = 5 + # Sample('HN3L_M_5_V_0p00145602197786_mu_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV |V|^{2}=2.1 10^{-6}}{Majorana}' , selection, 'hnl_m_5_v2_2p1Em06_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 0.008434 , toplot=True ), ########## M = 10 - Sample('HN3L_M_10_V_0p001_mu_massiveAndCKM_LO' , channel, '#splitline{m=10 GeV |V|^{2}=1.0 10^{-6}}{Majorana}', selection, 'hnl_m_10_v2_1p0Em06_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 0.004121 , toplot=True ), + # Sample('HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO', channel, '#splitline{m=10 GeV, |V|^{2}=5.7 10^{-7}}{Majorana}', selection, 'hnl_m_10_v2_5p7Em07_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 0.002362 , toplot=True), # AGREED UPON WITH MARTINA FOR THE SYNC + # Sample('HN3L_M_10_V_0p001_mu_massiveAndCKM_LO' , channel, '#splitline{m=10 GeV |V|^{2}=1.0 10^{-6}}{Majorana}', selection, 'hnl_m_10_v2_1p0Em06_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 0.004121 , toplot=True ), + Sample('HN3L_M_1_V_0p0949736805647_mu_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=9.0 10^{-3}}{Majorana}' , selection, 'hnl_m_1_v2_9p0Em03_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 38.67 , toplot=True), ## added 24Mar20; SR plots for AN + Sample('HN3L_M_2_V_0p0248394846967_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=6.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_6p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 2.647 , toplot=True), ## added 24Mar20; SR plots for AN + Sample('HN3L_M_8_V_0p00151327459504_mu_massiveAndCKM_LO' , channel, '#splitline{m=8 GeV, |V|^{2}=2.3 10^{-6}}{Majorana}' , selection, 'hnl_m_8_v2_2p3Em06_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 9.383e-03, toplot=True), ## added 24Mar20; SR plots for AN ] else: signal = [ Sample('HN3L_M_1_V_0p0949736805647_mu_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=9.0 10^{-3}}{Majorana}' , selection, 'hnl_m_1_v2_9p0Em03_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 38.67 , toplot=False, is_generator=True), - Sample('HN3L_M_2_V_0p0110905365064_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=1.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_1p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 0.5278 , toplot=True , is_generator=True), + Sample('HN3L_M_2_V_0p0110905365064_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=1.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_1p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 0.5278 , toplot=True , is_generator=True) if year != 2016 else 'DEL', Sample('HN3L_M_2_V_0p0248394846967_mu_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=6.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_6p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 2.647 , toplot=False), Sample('HN3L_M_3_V_0p00707813534767_mu_massiveAndCKM_LO' , channel, '#splitline{m=3 GeV, |V|^{2}=5.0 10^{-5}}{Majorana}' , selection, 'hnl_m_3_v2_5p0Em05_majorana' , 'firebrick' ,10, basedir, postfix, False, True, False, 1., 0.2014 , toplot=False, is_generator=True), - Sample('HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO' , channel, '#splitline{m=4 GeV, |V|^{2}=8.4 10^{-6}}{Majorana}' , selection, 'hnl_m_4_v2_8p4Em06_majorana' , 'indigo' ,10, basedir, postfix, False, True, False, 1., 0.0335 , toplot=False, is_generator=True), + Sample('HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO' , channel, '#splitline{m=4 GeV, |V|^{2}=8.4 10^{-6}}{Majorana}' , selection, 'hnl_m_4_v2_8p4Em06_majorana' , 'indigo' ,10, basedir, postfix, False, True, False, 1., 0.0335 , toplot=False, is_generator=True) if year != 2016 else 'DEL', Sample('HN3L_M_5_V_0p000316227766017_mu_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV, |V|^{2}=1.0 10^{-7}}{Majorana}' , selection, 'hnl_m_5_v2_1p0Em07_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 0.0003981, toplot=False), Sample('HN3L_M_5_V_0p000547722557505_mu_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV, |V|^{2}=3.0 10^{-7}}{Majorana}' , selection, 'hnl_m_5_v2_3p0Em07_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 0.001194 , toplot=False), Sample('HN3L_M_5_V_0p00145602197786_mu_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV, |V|^{2}=2.1 10^{-6}}{Majorana}' , selection, 'hnl_m_5_v2_2p1Em06_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 0.008434 , toplot=True , is_generator=True), @@ -238,25 +283,32 @@ def get_signal_samples(channel, basedir, postfix, selection, mini=False): Sample('HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO', channel, '#splitline{m=10 GeV, |V|^{2}=5.7 10^{-7}}{Majorana}', selection, 'hnl_m_10_v2_5p7Em07_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 0.002362 , toplot=False), Sample('HN3L_M_10_V_0p001_mu_massiveAndCKM_LO' , channel, '#splitline{m=10 GeV, |V|^{2}=1.0 10^{-6}}{Majorana}', selection, 'hnl_m_10_v2_1p0Em06_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 0.004121 , toplot=True , is_generator=True), ] + elif channel [0] == 'e': if mini: signal = [ - Sample('HN3L_M_2_V_0p0248394846967_e_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=6.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_6p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 2.648 , toplot=True , is_generator=False), - Sample('HN3L_M_8_V_0p00151327459504_e_massiveAndCKM_LO' , channel, '#splitline{m=8 GeV, |V|^{2}=2.3 10^{-6}}{Majorana}' , selection, 'hnl_m_8_v2_2p3Em06_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 9.383e-03, toplot=True , is_generator=False), + Sample('HN3L_M_1_V_0p0949736805647_e_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=9.0 10^{-3}}{Majorana}' , selection, 'hnl_m_1_v2_9p0Em03_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 38.23 , toplot=True), ## added 24Mar20; SR plots for AN + Sample('HN3L_M_2_V_0p0248394846967_e_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=6.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_6p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 2.648 , toplot=True), ## kept 24Mar20; SR plots for AN + # Sample('HN3L_M_4_V_0p00290516780927_e_massiveAndCKM_LO' , channel, '#splitline{m=4 GeV, |V|^{2}=8.4 10^{-6}}{Majorana}' , selection, 'hnl_m_4_v2_8p4Em06_majorana' , 'indigo' ,10, basedir, postfix, False, True, False, 1., 3.365e-02, toplot=True), + Sample('HN3L_M_8_V_0p00151327459504_e_massiveAndCKM_LO' , channel, '#splitline{m=8 GeV, |V|^{2}=2.3 10^{-6}}{Majorana}' , selection, 'hnl_m_8_v2_2p3Em06_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 9.383e-03, toplot=True), ## kept 24Mar20; SR plots for AN \\## AGREED UPON WITH MARTINA FOR THE SYNC (feb?) + # Sample('HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO', channel, '#splitline{m=10 GeV, |V|^{2}=5.7 10^{-7}}{Majorana}', selection, 'hnl_m_10_v2_5p7Em07_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 2.366e-03, toplot=True), # AGREED UPON WITH MARTINA FOR THE SYNC ] else: signal = [ - Sample('HN3L_M_1_V_0p212367605816_e_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=4.5 10^{-2}}{Majorana}' , selection, 'hnl_m_1_v2_4p5Em02_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 191.1 , toplot=False, is_generator=True), + Sample('HN3L_M_1_V_0p0949736805647_e_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=9.0 10^{-3}}{Majorana}' , selection, 'hnl_m_1_v2_9p0Em03_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 38.23 , toplot=False, is_generator=True), + Sample('HN3L_M_1_V_0p212367605816_e_massiveAndCKM_LO' , channel, '#splitline{m=1 GeV, |V|^{2}=4.5 10^{-2}}{Majorana}' , selection, 'hnl_m_1_v2_4p5Em02_majorana' , 'darkorange' ,10, basedir, postfix, False, True, False, 1., 191.1 , toplot=False, is_generator=True) if year != 2016 else 'DEL', Sample('HN3L_M_2_V_0p0248394846967_e_massiveAndCKM_LO' , channel, '#splitline{m=2 GeV, |V|^{2}=6.2 10^{-4}}{Majorana}' , selection, 'hnl_m_2_v2_6p2Em04_majorana' , 'forestgreen',10, basedir, postfix, False, True, False, 1., 2.648 , toplot=True , is_generator=True), Sample('HN3L_M_3_V_0p00707813534767_e_massiveAndCKM_LO' , channel, '#splitline{m=3 GeV, |V|^{2}=5.1 10^{-5}}{Majorana}' , selection, 'hnl_m_3_v2_5p1Em05_majorana' , 'firebrick' ,10, basedir, postfix, False, True, False, 1., 0.2022 , toplot=False, is_generator=True), Sample('HN3L_M_4_V_0p00290516780927_e_massiveAndCKM_LO' , channel, '#splitline{m=4 GeV, |V|^{2}=8.4 10^{-6}}{Majorana}' , selection, 'hnl_m_4_v2_8p4Em06_majorana' , 'indigo' ,10, basedir, postfix, False, True, False, 1., 3.365e-02, toplot=False, is_generator=True), Sample('HN3L_M_5_V_0p00145602197786_e_massiveAndCKM_LO' , channel, '#splitline{m=5 GeV, |V|^{2}=2.1 10^{-6}}{Majorana}' , selection, 'hnl_m_5_v2_2p1Em06_majorana' , 'chocolate' ,10, basedir, postfix, False, True, False, 1., 8.479e-03, toplot=False, is_generator=True), Sample('HN3L_M_6_V_0p00202484567313_e_massiveAndCKM_LO' , channel, '#splitline{m=6 GeV, |V|^{2}=4.1 10^{-6}}{Majorana}' , selection, 'hnl_m_6_v2_4p1Em06_majorana' , 'olive' ,10, basedir, postfix, False, True, False, 1., 1.655e-02, toplot=False, is_generator=True), -# Sample('HN3L_M_7_V_0p0316227766017_e_massiveAndCKM_LO' , channel, '#splitline{m=7 GeV, |V|^{2}=1.0 10^{-4}}{Majorana}' , selection, 'hnl_m_7_v2_1p0Em04_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 4.088 , toplot=False, is_generator=True), + Sample('HN3L_M_7_V_0p0316227766017_e_massiveAndCKM_LO' , channel, '#splitline{m=7 GeV, |V|^{2}=1.0 10^{-4}}{Majorana}' , selection, 'hnl_m_7_v2_1p0Em04_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 4.088 , toplot=False, is_generator=True) if year != 2019 else 'DEL', Sample('HN3L_M_8_V_0p00151327459504_e_massiveAndCKM_LO' , channel, '#splitline{m=8 GeV, |V|^{2}=2.3 10^{-6}}{Majorana}' , selection, 'hnl_m_8_v2_2p3Em06_majorana' , 'darkgray' ,10, basedir, postfix, False, True, False, 1., 9.383e-03, toplot=True , is_generator=True), - Sample('HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO', channel, '#splitline{m=10 GeV, |V|^{2}=5.7 10^{-7}}{Majorana}', selection, 'hnl_m_10_v2_5p7Em07_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 2.366e-03, toplot=False, is_generator=True), + Sample('HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO', channel, '#splitline{m=10 GeV, |V|^{2}=5.7 10^{-7}}{Majorana}', selection, 'hnl_m_10_v2_5p7Em07_majorana', 'teal' ,10, basedir, postfix, False, True, False, 1., 2.366e-03, toplot=False, is_generator=True), # AGREED UPON WITH MARTINA FOR THE SYNC ] + + signal = [sig for sig in signal if sig != 'DEL'] # QUICKFIX SO THAT THINGS DON'T CRASH IF ANYTHING IS MISSING # generate reweighted samples generators = [isample for isample in signal if isample.is_generator] diff --git a/plotter/selections.py b/plotter/selections.py index 12f001e..3a0810f 100644 --- a/plotter/selections.py +++ b/plotter/selections.py @@ -9,17 +9,23 @@ def __init__(self, channel): self.selections = OrderedDict() if self.channel == 'mmm': - self.selections['pt_iso'] = ' & '.join(['l0_pt > 25' , - 'l2_pt > 5' , - 'l1_pt > 5' , - 'l0_id_m == 1' , - 'l1_Medium == 1', - 'l2_Medium == 1',]) + self.selections['pt_iso'] = ' & '.join(['l0_pt > 25' , + 'l2_pt > 5' , + 'l1_pt > 5' , + 'abs(l0_eta) < 2.4' , + 'abs(l1_eta) < 2.4' , + 'abs(l2_eta) < 2.4' , + 'l0_id_m == 1' , + 'l1_Medium == 1' , + 'l2_Medium == 1' ,]) if self.channel == 'mem': self.selections['pt_iso'] = ' & '.join(['l0_pt > 25' , 'l2_pt > 5' , 'l1_pt > 5' , + 'abs(l0_eta) < 2.4' , + 'abs(l1_eta) < 2.5' , + 'abs(l2_eta) < 2.4' , 'l0_id_m == 1' , 'l1_LooseNoIso == 1' , 'l2_Medium == 1' ,]) @@ -28,6 +34,9 @@ def __init__(self, channel): self.selections['pt_iso'] = ' & '.join(['l0_pt > 30' , 'l2_pt > 5' , 'l1_pt > 5' , + 'abs(l0_eta) < 2.5' , + 'abs(l1_eta) < 2.5' , + 'abs(l2_eta) < 2.4' , # 'l0_eid_mva_iso_wp90 == 1' , # martina uses no iso 'l0_eid_mva_noniso_wp90 == 1', 'l1_LooseNoIso == 1' , @@ -37,6 +46,9 @@ def __init__(self, channel): self.selections['pt_iso'] = ' & '.join(['l0_pt > 30' , 'l2_pt > 5' , 'l1_pt > 5' , + 'abs(l0_eta) < 2.5' , + 'abs(l1_eta) < 2.5' , + 'abs(l2_eta) < 2.5' , # 'l0_eid_mva_iso_wp90 == 1' , # martina uses no iso 'l0_eid_mva_noniso_wp90 == 1', 'l1_LooseNoIso == 1' , @@ -45,20 +57,16 @@ def __init__(self, channel): assert self.selections['pt_iso'], 'Error: No channel specific selection applied!' self.selections['baseline'] = ' & '.join([ - 'abs(l0_eta) < 2.4' , 'abs(l0_dxy) < 0.05' , 'abs(l0_dz) < 0.1' , 'l0_reliso_rho_03 < 0.1', - 'abs(l1_eta) < 2.4' , 'l1_reliso_rho_03 < 10' , - 'abs(l2_eta) < 2.4' , 'l2_reliso_rho_03 < 10' , 'hnl_q_12 == 0' , - 'nbj == 0' , 'hnl_dr_12 < 1.' , 'hnl_m_12 < 12' , @@ -71,9 +79,32 @@ def __init__(self, channel): 'abs(l2_dxy) > 0.01' , ]) - self.selections['sideband'] = '!(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)' # THIS IS IMPORTANT! + self.selections['baseline_no_dxy'] = ' & '.join([ + 'abs(l0_dxy) < 0.05' , + 'abs(l0_dz) < 0.1' , + 'l0_reliso_rho_03 < 0.1' , + + 'l1_reliso_rho_03 < 10' , + + 'l2_reliso_rho_03 < 10' , + + 'hnl_q_12 == 0' , + + 'hnl_dr_12 < 1.' , + + 'hnl_m_12 < 12' , + 'sv_cos > 0.9' , + + 'abs(hnl_dphi_01)>1.' , + 'abs(hnl_dphi_02)>1.' , # dphi a la facon belgique + + ]) + + self.selections['CR_bj'] = 'nbj != 0' # THIS IS IMPORTANT! - self.selections['signal_region'] = '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)' # THIS IS IMPORTANT! + self.selections['sideband'] = '!(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.) & nbj == 0' # THIS IS IMPORTANT! + + self.selections['signal_region'] = '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.) & nbj == 0' # THIS IS IMPORTANT! # self.selections['vetoes_12_OS'] = ' & '.join([ # # vetoes 12 (always OS anyways) @@ -122,10 +153,11 @@ def __init__(self, channel): 'l2_reliso_rho_03 < 0.2', ]) - self.selections['is_prompt_lepton'] = ' & '.join([ - '(l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22)', - '(l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22)', - ]) + # self.selections['is_prompt_lepton'] = ' & '.join([ + # '(l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22)', + # '(l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22)', + # ]) + self.selections['is_prompt_lepton'] = '(l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22 | l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22)' self.selections['zmm'] = ' & '.join([ 'l0_pt > 40' , @@ -145,6 +177,60 @@ def __init__(self, channel): 'hnl_q_01==0' , ]) + ### FOR LLP TALK + self.selections['SR_sb_no_dxy'] = ' & '.join([ + 'abs(l0_eta) < 2.4' , + 'abs(l0_dxy) < 0.05' , + 'abs(l0_dz) < 0.2' , + 'l0_reliso_rho_03 < 0.2' , + + 'abs(l1_eta) < 2.4' , + 'l1_reliso_rho_03 < 10' , + + 'abs(l2_eta) < 2.4' , + 'l2_reliso_rho_03 < 10' , + + 'hnl_q_12 == 0' , + + 'nbj == 0' , + '(hnl_w_vis_m < 50. | hnl_w_vis_m > 80.)' , + 'hnl_dr_12 < 1.' , + + 'hnl_m_12 < 12' , + 'sv_cos > 0.' , + + 'abs(hnl_dphi_01)>1' , + 'abs(hnl_dphi_02)>1.' , # dphi a la facon belgique + ]) + + self.selections['SR_sb_w_dxy'] = ' & '.join([ + 'abs(l0_eta) < 2.4' , + 'abs(l0_dxy) < 0.05' , + 'abs(l0_dz) < 0.2' , + 'l0_reliso_rho_03 < 0.2' , + + 'abs(l1_eta) < 2.4' , + 'l1_reliso_rho_03 < 10' , + + 'abs(l2_eta) < 2.4' , + 'l2_reliso_rho_03 < 10' , + + 'hnl_q_12 == 0' , + + 'nbj == 0' , + '(hnl_w_vis_m < 50. | hnl_w_vis_m > 80.)' , + 'hnl_dr_12 < 1.' , + + 'hnl_m_12 < 12' , + 'sv_cos > 0.' , + + 'abs(hnl_dphi_01)>1' , + 'abs(hnl_dphi_02)>1.' , # dphi a la facon belgique + + 'abs(l1_dxy) > 0.01' , + 'abs(l2_dxy) > 0.01' , + ]) + # convert to pandas readable queries self.selections_pd = OrderedDict() for k, v in self.selections.items(): diff --git a/plotter/signals/check_sigs.py b/plotter/signals/check_sigs.py new file mode 100644 index 0000000..acafd43 --- /dev/null +++ b/plotter/signals/check_sigs.py @@ -0,0 +1,33 @@ + +fs = [ +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2016/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eee/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root', +'/Users/cesareborgia/cernbox/hnl/2018/sig/HN3L_M_10_V_0p000756967634711_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root',] + +import ROOT as rt + +for f in fs: + tf = rt.TFile(f) + tr = tf.Get('tree') + print(tr.GetEntries(), f) diff --git a/plotter/utils.py b/plotter/utils.py index 561cc92..bdac9ff 100644 --- a/plotter/utils.py +++ b/plotter/utils.py @@ -6,17 +6,30 @@ import pickle def set_paths(channel, year): + + env['NTUPLE_DIR'] = '' + assert channel in ['mmm', 'mem', 'mem_os', 'mem_ss', 'eem', 'eem_os', 'eem_ss', 'eee'], 'ERROR: Channel not valid.' - assert year in [2017, 2018], 'ERROR: Year not valid.' + assert year in [2016, 2017, 2018], 'ERROR: Year not valid.' if user() == 'manzoni': - env['NTUPLE_DIR'] = '/Users/manzoni/Documents/HNL/ntuples/%d/%s' %(year, channel.split('_')[0]) - env['PLOT_DIR'] = '/Users/manzoni/Documents/HNL/plotter/plots_%d_%s/' %(year, channel) - env['NN_DIR'] = '/Users/manzoni/Documents/HNL/NN/' + env['NTUPLE_BASE_DIR'] = '/Users/manzoni/Documents/HNL/ntuples/' + env['PLOT_DIR'] = '/Users/manzoni/Documents/HNL/plotter/plots_%d_%s/' %(year, channel) + env['NN_DIR'] = '/Users/manzoni/Documents/HNL/NN/' if user() == 'cesareborgia': - env['NTUPLE_DIR'] = '/Users/cesareborgia/cernbox/ntuples/%d/' %year - env['PLOT_DIR'] = '/Users/cesareborgia/cernbox/plots/plotter/%s/' %channel - env['NN_DIR'] = '/Users/cesareborgia/HNL/NN/%s/' %channel + env['NTUPLE_BASE_DIR'] = '/Users/cesareborgia/cernbox/hnl/' + env['PLOT_DIR'] = '/Users/cesareborgia/Dropbox/documents/physics/phd/plots/%d/%s/' %(year, channel) + env['NN_DIR'] = '/Users/cesareborgia/HNL/plotter/NN/trainings/' + + if user() == '': + env['NTUPLE_BASE_DIR'] = '/Users//hnl/' + env['PLOT_DIR'] = '/Users//%d/%s/' %(year, channel) + env['NN_DIR'] = '/Users//plotter/NN/trainings/' + + if user() == '': + env['NTUPLE_BASE_DIR'] = '/Users//hnl/' + env['PLOT_DIR'] = '/Users//%d/%s/' %(year, channel) + env['NN_DIR'] = '/Users//plotter/NN/trainings/' def get_time_str(): today = datetime.now() @@ -26,13 +39,15 @@ def get_time_str(): time_str = date + '_' + hour + 'h_' + minit + 'm/' return time_str -def plot_dir(): +def plot_dir(region_label=''): plot_dir = env['PLOT_DIR'] + get_time_str() + if len(region_label): plot_dir = plot_dir[:-1] + '_' + region_label + '/' if not ensure_path(plot_dir): makedirs(plot_dir) #mkdir(plot_dir) return plot_dir -def nn_dir(channel): +def nn_dir(channel,region_label): nn_dir = env['NN_DIR'] + channel + '_' + get_time_str() + if len(region_label): nn_dir = nn_dir[:-1] + '_' + region_label + '/' if not ensure_path(nn_dir): makedirs(nn_dir) return nn_dir diff --git a/plotter/variables.py b/plotter/variables.py index 80ac8ff..c5aedbd 100644 --- a/plotter/variables.py +++ b/plotter/variables.py @@ -1,7 +1,7 @@ import numpy as np class Variable(object): - def __init__(self, var, bins, xlabel, ylabel, label=None, extra_label=None, extra_selection=None): + def __init__(self, var, bins, xlabel, ylabel, label=None, extra_label=None, extra_selection=None, set_log_x=False): self.var = var self.bins = bins self.xlabel = xlabel @@ -9,6 +9,7 @@ def __init__(self, var, bins, xlabel, ylabel, label=None, extra_label=None, extr self.extra_label = extra_label self.extra_selection = extra_selection self.label = self.var if label is None else self.label + self.set_log_x = set_log_x if self.extra_label is not None: self.label = '_'.join([self.label, self.extra_label]) @@ -86,6 +87,12 @@ def __init__(self, var, bins, xlabel, ylabel, label=None, extra_label=None, extr Variable('hnl_m_12', m12_bins_martina, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>0.5 & hnl_2d_disp<=1.5', extra_label='lxy_0p5_to_1p5'), Variable('hnl_m_12', m12_bins_martina, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>1.5 & hnl_2d_disp<=4.0', extra_label='lxy_1p5_to_4p0'), Variable('hnl_m_12', m12_bins_martina, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>4.0' , extra_label='lxy_mt_4p0' ), + + # check with more mass bins! + # Variable('hnl_m_12', m12_bins_displaced_1_alt, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp<=0.5' , extra_label='lxy_lt_0p5' ), + # Variable('hnl_m_12', m12_bins_displaced_1_alt, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>0.5 & hnl_2d_disp<=1.5', extra_label='lxy_0p5_to_1p5'), + # Variable('hnl_m_12', m12_bins_displaced_1_alt, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>1.5 & hnl_2d_disp<=4.0', extra_label='lxy_1p5_to_4p0'), + # Variable('hnl_m_12', m12_bins_martina, 'm_{23} (GeV)', 'events', extra_selection='hnl_2d_disp>4.0' , extra_label='lxy_mt_4p0' ), Variable('hnl_2d_disp' , np.linspace( 0 , 30 , 25 + 1) , 'L_{xy} (cm)' , 'events'), Variable('hnl_2d_disp' , np.linspace( 0 , 10 , 25 + 1) , 'L_{xy} (cm)' , 'events', extra_label='hnl_2d_disp_narrow'), @@ -140,10 +147,19 @@ def __init__(self, var, bins, xlabel, ylabel, label=None, extra_label=None, extr Variable('l1_reliso_rho_03', np.linspace( 0, 0.2, 25 + 1), 'l_{2} #rho-corrected I^{rel}' , 'events', extra_label='l1_reliso_rho_03_zoom'), Variable('l2_reliso_rho_03', np.linspace( 0, 0.2, 25 + 1), 'l_{3} #rho-corrected I^{rel}' , 'events', extra_label='l2_reliso_rho_03_zoom'), - Variable('fr' , np.linspace( 0 , 1, 30 + 1) , 'fake rate' , 'events'), - Variable('fr' , np.linspace( 0 , 1, 20 + 1) , 'fake rate' , 'events', extra_label='fr_coarse'), - Variable('fr' , np.linspace( 0 , 1, 15 + 1) , 'fake rate' , 'events', extra_label='fr_very_coarse'), - Variable('fr' , np.linspace( 0 , 1, 10 + 1) , 'fake rate' , 'events', extra_label='fr_very_very_coarse'), -] + # Variable('fr' , np.linspace( 0 , 1, 30 + 1) , 'fake rate' , 'events'), + # Variable('fr' , np.linspace( 0 , 1, 20 + 1) , 'fake rate' , 'events', extra_label='fr_coarse'), + # Variable('fr' , np.linspace( 0 , 1, 15 + 1) , 'fake rate' , 'events', extra_label='fr_very_coarse'), + # Variable('fr' , np.linspace( 0 , 1, 10 + 1) , 'fake rate' , 'events', extra_label='fr_very_very_coarse'), + # NN features: +] +# variables = [ + # Variable('hnl_m_12', m12_bins_displaced_1, 'm_{23} (GeV)', 'events'), + # Variable('hnl_2d_disp' , np.linspace( 0 , 10 , 25 + 1) , 'L_{xy} (cm)' , 'events', extra_label='hnl_2d_disp_narrow'), + # # Variable('hnl_w_vis_m' , np.linspace( 0 , 150 , 40 + 1) , 'm_{3l}' , 'events'), + # # Variable('sv_cos' , np.linspace( 0.9 , 1 , 30 + 1) , '\cos\alpha' , 'events'), + # # Variable('sv_prob' , np.linspace( 0 , 1 , 30 + 1) , 'SV probability' , 'events'), + # # Variable('hnl_pt_12', np.linspace( 10, 60, 20 + 1), 'p_{T}^{23} (GeV)', 'events'), +# ] diff --git a/source_env.sh b/source_env.sh index 7d051ea..b59b08e 100644 --- a/source_env.sh +++ b/source_env.sh @@ -4,6 +4,7 @@ if test -e "$BASE_DIR"; then fi BASE_DIR=$PWD + echo "setting BASE_DIR to" $BASE_DIR export BASE_DIR=$BASE_DIR # base for the code diff --git a/sync/parse_text.py b/sync/parse_text.py new file mode 100644 index 0000000..56d430b --- /dev/null +++ b/sync/parse_text.py @@ -0,0 +1,33 @@ +# parse text + +import subprocess +from re import sub +from pdb import set_trace + +proc = subprocess.Popen(['python','sync_4Mar20.py'],stdout=subprocess.PIPE) + +lines = [] +while True: + lyne = proc.stdout.readline() + line = lyne.strip(); line = str(line) + line = sub("'",'',line) + line = sub('b','',line) + line = line[11:] + # set_trace() + line = sub('\*', '', line) + for i in range(10): + line = sub(' ', ' ', line) + if len(line) and line[0] == ' ': line = line[1:] + line = line.strip() + # set_trace() + if line == '' or 'event' in line: continue + if 'entries' in line or 'entry' in line: break + lines.append(line) + print(line) + if not lyne: break + +with open ('sync_6Mar20_mem_DY17.txt', 'w') as out_file: + for line in lines: + out_file.write(line + '\n') + +out_file.close() diff --git a/sync/sync_13Dec19.py b/sync/sync_13Dec19.py new file mode 100644 index 0000000..7799c17 --- /dev/null +++ b/sync/sync_13Dec19.py @@ -0,0 +1,311 @@ +import ROOT as rt + +fin_mmm = '/eos/home-v/vstampf/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root' +fin_mem = '/eos/home-v/vstampf/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root' + +tf_in_mmm = rt.TFile(fin_mmm) +tf_in_mem = rt.TFile(fin_mem) + +tree_mmm = tf_in_mmm.Get('tree') +tree_mem = tf_in_mem.Get('tree') + +tree_mmm.SetScanField(0) +tree_mem.SetScanField(0) + +# tree.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):puppimet_pt:pfmet_pt:pass_mmm:pass_mem:pass_eem:pass_eee", "l0_id_m & l1_reliso_rho_03 < 1.2 & l2_reliso_rho_03 < 1.2", "col=3:4:5:3:9.4f:3:9.4f:9.4f:3:9.4f:9.4f:9.4f:9.4f:1:1:1:1") + +# print '\n\nNOW MMM\n\n' + +# cut_string_mmm = 'l0_pt > 28 & l1_id_m & l2_id_m & l1_reliso_rho_03 < 2 & l2_reliso_rho_03 < 2 & l1_pt > 5 & l2_pt > 5' +# cut_string_mem = 'l0_pt > 28 & l1_LooseNoIso & l2_id_m & l1_reliso_rho_03 < 2 & l2_reliso_rho_03 < 2 & l1_pt > 5 & l2_pt > 5' + +# tree_mmm.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):pass_mmm:pass_mem:pass_eem:pass_eee", + # cut_string_mmm, + # # "col=3:4:5:3:9.4f:3:9.4f:9.4f:3:9.4f:9.4f:1:1:1:1") + # "precision=4 col=:::::::::::1:1:1:1") + +# print '\n\nNOW MEM\n\n' + + +# tree_mem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):pass_mmm:pass_mem:pass_eem:pass_eee", + # cut_string_mem, + # # "col=3:4:5:3:9.4f:3:9.4f:9.4f:3:9.4f:9.4f:1:1:1:1") + # "precision=4 col=:::::::::::1:1:1:1") + +# VS: 13/12/19 + + +# print '\n\nNOW MMM\n\n' + +selection_data = [ + 'l0_pt > 25 & l2_pt > 5 & l1_pt > 5 & l0_id_m == 1 & l1_Medium == 1 & l2_Medium == 1', + + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.2 & l0_reliso_rho_03 < 0.2 & abs(l1_eta) < 2.4 & l1_reliso_rho_03 < 10', + 'abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1', + 'abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + + 'abs(hnl_m_12-3.0969) > 0.08 & abs(hnl_m_12-3.6861) > 0.08 & abs(hnl_m_12-0.7827) > 0.08 & abs(hnl_m_12-1.0190) > 0.08', + + '!(hnl_q_01==0 & abs(hnl_m_01-91.1876) < 10) & !(hnl_q_01==0 & abs(hnl_m_01- 9.4603) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.0233) < 0.08)', + '!(hnl_q_01==0 & abs(hnl_m_01-10.3552) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.0969) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.6861) < 0.08)', + '!(hnl_q_01==0 & abs(hnl_m_01-0.7827) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-1.0190) < 0.08)', + + '!(hnl_q_02==0 & abs(hnl_m_02-91.1876) < 10) & !(hnl_q_02==0 & abs(hnl_m_02- 9.4603) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-10.0233) < 0.08)', + '!(hnl_q_02==0 & abs(hnl_m_02-10.3552) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.0969) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.6861) < 0.08)', + '!(hnl_q_02==0 & abs(hnl_m_02-0.7827) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-1.0190) < 0.08)', + + '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + + 'hnl_2d_disp_sig>20', + + 'hnl_pt_12>15', + + 'sv_cos>0.99', + + 'sv_prob>0.001', + + 'abs(l1_dz) < 10 & abs(l2_dz) < 10', + + 'l0_reliso_rho_03<0.1', + + # selection_tight + 'l1_reliso_rho_03 < 0.2 & l2_reliso_rho_03 < 0.2' +] + +selection_data = ' & '.join(selection_data) + + +# tree_mmm.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):pass_mmm:pass_mem:pass_eem:pass_eee", + # selection_data, + # # "col=3:4:5:3:9.4f:3:9.4f:9.4f:3:9.4f:9.4f:1:1:1:1") + # "precision=4 col=:::::::::::1:1:1:1") + + +to_check= [ +'run==1 & lumi==1 & event==510', +'run==6 & lumi==1 & event==65', +'run==9 & lumi==1 & event==186', +'run==9 & lumi==1 & event==625', +'run==11 & lumi==1 & event==838', +'run==13 & lumi==1 & event==231', +'run==13 & lumi==1 & event==775', +'run==13 & lumi==1 & event==999', +'run==14 & lumi==1 & event==86', +'run==16 & lumi==1 & event==219', +'run==16 & lumi==1 & event==686', +'run==17 & lumi==1 & event==666', +'run==18 & lumi==1 & event==371', +'run==18 & lumi==1 & event==622', +'run==21 & lumi==1 & event==892', +'run==23 & lumi==1 & event==978', +'run==24 & lumi==1 & event==42', +'run==25 & lumi==1 & event==530', +'run==25 & lumi==1 & event==827', +'run==27 & lumi==1 & event==298', +'run==30 & lumi==1 & event==754', +'run==32 & lumi==1 & event==275', +'run==32 & lumi==1 & event==327', +'run==33 & lumi==1 & event==722', +'run==34 & lumi==1 & event==966', +'run==37 & lumi==1 & event==606', +'run==37 & lumi==1 & event==876', +'run==39 & lumi==1 & event==403', +'run==40 & lumi==1 & event==222', +'run==40 & lumi==1 & event==299', +'run==41 & lumi==1 & event==570', +'run==41 & lumi==1 & event==728', +'run==41 & lumi==1 & event==887', +'run==42 & lumi==1 & event==590', +'run==47 & lumi==1 & event==478', +'run==47 & lumi==1 & event==629', +'run==51 & lumi==1 & event==295', +'run==51 & lumi==1 & event==725', +'run==52 & lumi==1 & event==10', +'run==54 & lumi==1 & event==336', +'run==54 & lumi==1 & event==444', +'run==55 & lumi==1 & event==241', +'run==56 & lumi==1 & event==27', +'run==56 & lumi==1 & event==862', +'run==58 & lumi==1 & event==303', +'run==58 & lumi==1 & event==895', +'run==60 & lumi==1 & event==734', +'run==62 & lumi==1 & event==954', +'run==63 & lumi==1 & event==398', +'run==63 & lumi==1 & event==939', +'run==64 & lumi==1 & event==239', +'run==64 & lumi==1 & event==296', +'run==65 & lumi==1 & event==130', +'run==67 & lumi==1 & event==284', +'run==67 & lumi==1 & event==585', +'run==69 & lumi==1 & event==271', +'run==70 & lumi==1 & event==532', +'run==71 & lumi==1 & event==347', +'run==73 & lumi==1 & event==298', +'run==74 & lumi==1 & event==33', +'run==75 & lumi==1 & event==566', +'run==76 & lumi==1 & event==242', +'run==77 & lumi==1 & event==37', +'run==77 & lumi==1 & event==378', +'run==77 & lumi==1 & event==565', +'run==78 & lumi==1 & event==580', +'run==79 & lumi==1 & event==14', +'run==82 & lumi==1 & event==122', +'run==82 & lumi==1 & event==234', +'run==86 & lumi==1 & event==747', +'run==88 & lumi==1 & event==88', +'run==91 & lumi==1 & event==34', +'run==92 & lumi==1 & event==81', +'run==92 & lumi==1 & event==251', +'run==92 & lumi==1 & event==800', +'run==92 & lumi==1 & event==819', +'run==92 & lumi==1 & event==839', +'run==93 & lumi==1 & event==157', +'run==93 & lumi==1 & event==534', +'run==94 & lumi==1 & event==467', +'run==95 & lumi==1 & event==893', +'run==96 & lumi==1 & event==466', +'run==98 & lumi==1 & event==69', +'run==99 & lumi==1 & event==233', +'run==100 & lumi==1 & event==362', +'run==101 & lumi==1 & event==650', +'run==103 & lumi==1 & event==388', +'run==105 & lumi==1 & event==280', +'run==106 & lumi==1 & event==507', +'run==107 & lumi==1 & event==56', +'run==107 & lumi==1 & event==106', +'run==107 & lumi==1 & event==392', +'run==109 & lumi==1 & event==505', +'run==110 & lumi==1 & event==831', +'run==112 & lumi==1 & event==935', +'run==115 & lumi==1 & event==793', +'run==116 & lumi==1 & event==450', +'run==117 & lumi==1 & event==518', +'run==117 & lumi==1 & event==977', +'run==117 & lumi==1 & event==986', +'run==119 & lumi==1 & event==962', +'run==119 & lumi==1 & event==967', +'run==120 & lumi==1 & event==78', +'run==120 & lumi==1 & event==179', +'run==121 & lumi==1 & event==729', +'run==124 & lumi==1 & event==238', +'run==124 & lumi==1 & event==391', +'run==130 & lumi==1 & event==863', +'run==131 & lumi==1 & event==258', +'run==131 & lumi==1 & event==960', +'run==133 & lumi==1 & event==542', +'run==136 & lumi==1 & event==383', +'run==136 & lumi==1 & event==859', +'run==136 & lumi==1 & event==933', +'run==137 & lumi==1 & event==790', +'run==138 & lumi==1 & event==760', +'run==139 & lumi==1 & event==718', +'run==143 & lumi==1 & event==278', +'run==144 & lumi==1 & event==482', +'run==144 & lumi==1 & event==653', +'run==144 & lumi==1 & event==755', +'run==144 & lumi==1 & event==978', +'run==145 & lumi==1 & event==910', +'run==146 & lumi==1 & event==320', +'run==147 & lumi==1 & event==148', +'run==147 & lumi==1 & event==856', +'run==148 & lumi==1 & event==110', +'run==148 & lumi==1 & event==160', +'run==149 & lumi==1 & event==988', +'run==150 & lumi==1 & event==984', +'run==151 & lumi==1 & event==188', +'run==153 & lumi==1 & event==287', +'run==154 & lumi==1 & event==283', +'run==154 & lumi==1 & event==453', +'run==155 & lumi==1 & event==749', +'run==156 & lumi==1 & event==56', +'run==157 & lumi==1 & event==112', +'run==157 & lumi==1 & event==165', +'run==157 & lumi==1 & event==555', +'run==157 & lumi==1 & event==916', +'run==162 & lumi==1 & event==37', +'run==162 & lumi==1 & event==440', +'run==163 & lumi==1 & event==865', +'run==165 & lumi==1 & event==933', +'run==166 & lumi==1 & event==22', +'run==167 & lumi==1 & event==931', +'run==170 & lumi==1 & event==62', +'run==170 & lumi==1 & event==734', +'run==172 & lumi==1 & event==82', +'run==172 & lumi==1 & event==758', +'run==172 & lumi==1 & event==879', +'run==177 & lumi==1 & event==694', +'run==181 & lumi==1 & event==513', +'run==188 & lumi==1 & event==362', +'run==189 & lumi==1 & event==270', +'run==190 & lumi==1 & event==43', +'run==192 & lumi==1 & event==246', +'run==194 & lumi==1 & event==38', +'run==195 & lumi==1 & event==125', +'run==198 & lumi==1 & event==480', +'run==198 & lumi==1 & event==816', +'run==200 & lumi==1 & event==39', +'run==200 & lumi==1 & event==834', +'run==205 & lumi==1 & event==153', +'run==208 & lumi==1 & event==60', +'run==208 & lumi==1 & event==529', +'run==208 & lumi==1 & event==697', +'run==210 & lumi==1 & event==807', +'run==210 & lumi==1 & event==901', +'run==211 & lumi==1 & event==383', +'run==212 & lumi==1 & event==946', +'run==214 & lumi==1 & event==848', +'run==217 & lumi==1 & event==213', +'run==217 & lumi==1 & event==685', +'run==224 & lumi==1 & event==246', +'run==224 & lumi==1 & event==943', +'run==225 & lumi==1 & event==77', +'run==225 & lumi==1 & event==850', +'run==227 & lumi==1 & event==671', +'run==228 & lumi==1 & event==128', +'run==228 & lumi==1 & event==289', +'run==228 & lumi==1 & event==929', +'run==230 & lumi==1 & event==943', +'run==231 & lumi==1 & event==133', +'run==231 & lumi==1 & event==880', +'run==233 & lumi==1 & event==78', +'run==234 & lumi==1 & event==617', +'run==234 & lumi==1 & event==884', +'run==235 & lumi==1 & event==488', +'run==235 & lumi==1 & event==675', +'run==237 & lumi==1 & event==517', +'run==239 & lumi==1 & event==485', +'run==241 & lumi==1 & event==674', +'run==242 & lumi==1 & event==891', +'run==243 & lumi==1 & event==621', +'run==243 & lumi==1 & event==914', +'run==244 & lumi==1 & event==43', +'run==244 & lumi==1 & event==253', +'run==244 & lumi==1 & event==628', +'run==245 & lumi==1 & event==739', +'run==246 & lumi==1 & event==301', +'run==246 & lumi==1 & event==573', +'run==246 & lumi==1 & event==974', +'run==247 & lumi==1 & event==289', +'run==248 & lumi==1 & event==893', +'run==249 & lumi==1 & event==765', +'run==250 & lumi==1 & event==1', +'run==250 & lumi==1 & event==435', +] + +from collections import OrderedDict +check = OrderedDict() + +count_0 =0 +count_1 =0 + +for iev in to_check: + check [iev] = tree_mmm.GetEntries(iev) + print iev, check[iev] + if check[iev] == 0: count_0 += 1 + if check[iev] == 1: count_1 += 1 + +print count_0, count_1 + + + + diff --git a/sync/sync_17Feb20.py b/sync/sync_17Feb20.py new file mode 100644 index 0000000..dc1101e --- /dev/null +++ b/sync/sync_17Feb20.py @@ -0,0 +1,135 @@ +import ROOT as rt +import subprocess + +fin_mmm = '/eos/home-v/vstampf/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mmm/tree.root' +#fin_mem = '/eos/home-v/vstampf/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root' +fin_mem = '/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_4_V_0p00290516780927_mu_massiveAndCKM_LO/HNLTreeProducer_mem/tree.root' + +#tf_in_mmm = rt.TFile(fin_mmm) +tf_in_mem = rt.TFile(fin_mem) + +#tree_mmm = tf_in_mmm.Get('tree') +tree_mem = tf_in_mem.Get('tree') + +#tree_mmm.SetScanField(0) +tree_mem.SetScanField(0) + + +selection_data_mem_ss = [ + 'l0_pt > 25 & l2_pt > 5 & l1_pt > 5 & l0_id_m == 1 & l1_LooseNoIso == 1 & l2_Medium == 1', + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.1 & l0_reliso_rho_03 < 0.1 & abs(l1_eta) < 2.4 & l1_reliso_rho_03 < 10 & abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1. & abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + 'l0_q==l2_q', + '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l1_reliso_rho_03 < 0.2 & l2_reliso_rho_03 < 0.2' +] + +selection_data_mem_ss_full = ' & '.join(selection_data_mem_ss) + +# tree_mem.Scan("run:lumi:event:l0_pdgid:l0_pt:l2_pdgid:l2_pt:abs(l2_dxy):l1_pdgid:l1_pt:abs(l1_dxy):hnl_m_12:hnl_2d_disp", + # selection_data_mem_ss_full, + # "precision=4 ") + + +to_check_martina_doesnt_have = [ +'run==101 & lumi==1 & event==255', +'run==120 & lumi==1 & event==195', +'run==134 & lumi==1 & event==790', +'run==14 & lumi==1 & event==561', +'run==140 & lumi==1 & event==331', +'run==153 & lumi==1 & event==481', +'run==164 & lumi==1 & event==162', +'run==171 & lumi==1 & event==737', +'run==191 & lumi==1 & event==773', +'run==199 & lumi==1 & event==475', +'run==200 & lumi==1 & event==442', +'run==204 & lumi==1 & event==384', +'run==214 & lumi==1 & event==651', +'run==223 & lumi==1 & event==753', +'run==23 & lumi==1 & event==458', +'run==245 & lumi==1 & event==286', +'run==249 & lumi==1 & event==237', +'run==30 & lumi==1 & event==770', +'run==44 & lumi==1 & event==914', +'run==7 & lumi==1 & event==962', +'run==87 & lumi==1 & event==932', +] + +to_check_i_dont_have = [ +# 'run == 10 & lumi==1 & event==792', +# 'run == 13 & lumi==1 & event==758', +# 'run == 21 & lumi==1 & event==866', +# 'run == 22 & lumi==1 & event==794', +'run == 27 & lumi==1 & event==537', +'run == 3 & lumi==1 & event==779', +# 'run == 47 & lumi==1 & event==766', +'run == 58 & lumi==1 & event==550', +'run == 58 & lumi==1 & event==993', +# 'run == 7 & lumi==1 & event==363', +# 'run == 73 & lumi==1 & event==430', +# 'run == 8 & lumi==1 & event==559', +# 'run == 84 & lumi==1 & event==826', +# 'run == 84 & lumi==1 & event==924', +# 'run == 86 & lumi==1 & event==541', +# 'run == 88 & lumi==1 & event==714', +# 'run == 89 & lumi==1 & event==593', +# 'run == 91 & lumi==1 & event==288', +# 'run == 98 & lumi==1 & event==738', +# 'run == 112 & lumi==1 & event==297', +'run == 116 & lumi==1 & event==326', +# 'run == 141 & lumi==1 & event==484', +# 'run == 142 & lumi==1 & event==53', +# 'run == 144 & lumi==1 & event==411', +# 'run == 153 & lumi==1 & event==173', +# 'run == 156 & lumi==1 & event==139', +# 'run == 169 & lumi==1 & event==314', +# 'run == 170 & lumi==1 & event==935', +# 'run == 182 & lumi==1 & event==687', +# 'run == 189 & lumi==1 & event==880', +# 'run == 191 & lumi==1 & event==543', +# 'run == 196 & lumi==1 & event==538', +# 'run == 205 & lumi==1 & event==747', +# 'run == 206 & lumi==1 & event==666', +# 'run == 207 & lumi==1 & event==395', +# 'run == 207 & lumi==1 & event==519', +# 'run == 212 & lumi==1 & event==936', +# 'run == 217 & lumi==1 & event==373', +# 'run == 220 & lumi==1 & event==873', +# 'run == 222 & lumi==1 & event==647', +# 'run == 225 & lumi==1 & event==711', +# 'run == 232 & lumi==1 & event==188', +# 'run == 241 & lumi==1 & event==383', +# 'run == 241 & lumi==1 & event==633', +] + +from collections import OrderedDict +check = OrderedDict() + +# for i in selection_data_mem_ss + ['l1_reliso_rho_03 < 0.2', 'l2_reliso_rho_03 < 0.2']: + # count_not_found =0 + # count_exists =0 + # for iev in to_check: + # check [iev] = tree_mem.GetEntries(iev) # all found + # check [iev] = tree_mem.GetEntries(iev + ' & ' + i) # all found + # print(iev, check[iev]) + # if check[iev] == 0: count_not_found += 1 + # if check[iev] == 1: count_exists += 1 + + # print(i) + # print('not found: {nf}, found: {ex} \n'.format(nf=count_not_found, ex=count_exists)) + + +for iev in to_check_i_dont_have: + tree_mem.Scan("run:lumi:event:l0_pdgid:l0_pt:l2_pdgid:l2_pt:abs(l2_dxy):l2_reliso_rho_03:l1_pdgid:l1_pt:abs(l1_dxy):l1_reliso_rho_03:hnl_m_12:hnl_2d_disp", + iev + ' & ' + selection_data_mem_ss_full, + "precision=4 ") + # print(iev, check[iev]) + + diff --git a/sync/sync_2Mar20.py b/sync/sync_2Mar20.py new file mode 100644 index 0000000..fbae5b6 --- /dev/null +++ b/sync/sync_2Mar20.py @@ -0,0 +1,163 @@ +import ROOT as rt + +fin_eem = '/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_4_V_0p00290516780927_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root' + +# fin_eem = '/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root' + + +tf_in_eem = rt.TFile(fin_eem) + +tree_eem = tf_in_eem.Get('tree') + +tree_eem.SetScanField(0) + +selection_data_eem_os = [ + 'l0_pt > 30 & l2_pt > 5 & l1_pt > 5 & l0_eid_mva_noniso_wp90 == 1 & l1_LooseNoIso == 1 & l2_Medium == 1', + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.1 & l0_reliso_rho_03 < 0.1 & abs(l1_eta) < 2.4 & l1_reliso_rho_03 < 10 & abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1. & abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + '!(hnl_q_01==0 & abs(hnl_m_01-91.1876) < 10) & !(hnl_q_01==0 & abs(hnl_m_01- 9.4603) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.0233) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.3552) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.0969) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.6861) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-0.7827) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-1.0190) < 0.08)', + 'l0_q!=l1_q', + '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>32', # 2018!! + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l1_reliso_rho_03 < 0.2 & l2_reliso_rho_03 < 0.2' + ] + +selection_data_eem_os_full = ' & '.join(selection_data_eem_os) + +tree_eem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):hnl_m_12:hnl_2d_disp", + selection_data_eem_os_full, + "precision=4 ") + + +to_check_martina_doesnt_have = [ +'run=10 & lumi = 1 & event = 740', +'run=102 & lumi = 1 & event = 470', +'run=103 & lumi = 1 & event = 261', +'run=103 & lumi = 1 & event = 830', +'run=103 & lumi = 1 & event = 939', +'run=104 & lumi = 1 & event = 194', +'run=106 & lumi = 1 & event = 790', +'run=110 & lumi = 1 & event = 853', +'run=113 & lumi = 1 & event = 354', +'run=114 & lumi = 1 & event = 503', +'run=120 & lumi = 1 & event = 751', +'run=121 & lumi = 1 & event = 708', +'run=126 & lumi = 1 & event = 925', +'run=128 & lumi = 1 & event = 556', +'run==129 & lumi == 1 & event == 759', +'run==137 & lumi == 1 & event == 212', +'run==137 & lumi == 1 & event == 461', +'run==138 & lumi == 1 & event == 872', +'run==139 & lumi == 1 & event == 838', +'run==141 & lumi == 1 & event == 183', +'run==147 & lumi == 1 & event == 629', +'run==152 & lumi == 1 & event == 383', +'run==157 & lumi == 1 & event == 444', +'run==161 & lumi == 1 & event == 194', +'run==168 & lumi == 1 & event == 230', +'run==17 & lumi == 1 & event == 49', +'run==175 & lumi == 1 & event == 919', +'run==182 & lumi == 1 & event == 880', +'run==19 & lumi == 1 & event == 180', +'run==195 & lumi == 1 & event == 455', +'run==202 & lumi == 1 & event == 251', +'run==202 & lumi == 1 & event == 462', +'run==205 & lumi == 1 & event == 715', +'run==220 & lumi == 1 & event == 707', +'run==223 & lumi == 1 & event == 282', +'run==223 & lumi == 1 & event == 363', +'run==223 & lumi == 1 & event == 437', +'run==231 & lumi == 1 & event == 304', +'run==233 & lumi == 1 & event == 196', +'run==243 & lumi == 1 & event == 96', +'run==249 & lumi == 1 & event == 878', +'run==3 & lumi == 1 & event == 624', +'run==30 & lumi == 1 & event == 304', +'run==33 & lumi == 1 & event == 823', +'run==37 & lumi == 1 & event == 889', +'run==39 & lumi == 1 & event == 285', +'run==4 & lumi == 1 & event == 155', +'run==4 & lumi == 1 & event == 452', +'run==43 & lumi == 1 & event == 741', +'run==5 & lumi == 1 & event == 859', +'run==50 & lumi == 1 & event == 9', +'run==60 & lumi == 1 & event == 314', +'run==70 & lumi == 1 & event == 634', +'run==72 & lumi == 1 & event == 353', +'run==73 & lumi == 1 & event == 313', +'run==78 & lumi == 1 & event == 441', +'run==80 & lumi == 1 & event == 133', +'run==87 & lumi == 1 & event == 269', +'run==98 & lumi == 1 & event == 731]', +] + +to_check_i_dont_have = [ +'run==101 & lumi == 1 & event == 387', +'run==101 & lumi == 1 & event == 881', +'run==109 & lumi == 1 & event == 915', +'run==11 & lumi == 1 & event == 252', +'run==117 & lumi == 1 & event == 959', +'run==12 & lumi == 1 & event == 122', +'run==133 & lumi == 1 & event == 27', +'run==133 & lumi == 1 & event == 767', +'run==136 & lumi == 1 & event == 810', +'run==139 & lumi == 1 & event == 908', +'run==152 & lumi == 1 & event == 493', +'run==154 & lumi == 1 & event == 60', +'run==155 & lumi == 1 & event == 13', +'run==168 & lumi == 1 & event == 95', +'run==171 & lumi == 1 & event == 649', +'run==18 & lumi == 1 & event == 381', +'run==180 & lumi == 1 & event == 989', +'run==187 & lumi == 1 & event == 168', +'run==188 & lumi == 1 & event == 801', +'run==196 & lumi == 1 & event == 564', +'run==203 & lumi == 1 & event == 58', +'run==209 & lumi == 1 & event == 484', +'run==210 & lumi == 1 & event == 470', +'run==217 & lumi == 1 & event == 761', +'run==244 & lumi == 1 & event == 676', +'run==244 & lumi == 1 & event == 751', +'run==59 & lumi == 1 & event == 874', +'run==71 & lumi == 1 & event == 41', +'run==86 & lumi == 1 & event == 710', +'run==96 & lumi == 1 & event == 234', +'run==98 & lumi == 1 & event == 115', +] + + + + +from collections import OrderedDict +check = OrderedDict() + +# for i in selection_data_eem_os + ['l1_reliso_rho_03 < 0.2', 'l2_reliso_rho_03 < 0.2', 'l1_LooseNoIso']: +for i in ['l0_pt > 30', 'l2_pt > 5', 'l1_pt > 5', 'l0_eid_mva_noniso_wp90 == 1', 'l1_LooseNoIso == 1', 'l2_Medium == 1' ,' l1_LooseNoIso']: + count_not_found =0 + count_exists =0 + # i += '| l0_reliso_rho_03 > 0.1' + # i += '& l1_LooseNoIso' + for iev in to_check_i_dont_have: + check [iev] = tree_eem.GetEntries(iev) # all found + check [iev] = tree_eem.GetEntries(iev + ' & ' + i) # all found + print(iev, check[iev]) + if check[iev] == 0: count_not_found += 1 + if check[iev] == 1: count_exists += 1 + + print(i) + print('not found: {nf}, found: {ex} \n'.format(nf=count_not_found, ex=count_exists)) + + +# for iev in to_check_i_dont_have: + # tree_mem.Scan("run:lumi:event:l0_pdgid:l0_pt:l2_pdgid:l2_pt:abs(l2_dxy):l2_reliso_rho_03:l1_pdgid:l1_pt:abs(l1_dxy):l1_reliso_rho_03:hnl_m_12:hnl_2d_disp", + # iev + ' & ' + selection_data_mem_ss_full, + # "precision=4 ") + # print(iev, check[iev]) + + diff --git a/sync/sync_3Mar20.py b/sync/sync_3Mar20.py new file mode 100644 index 0000000..5e2569f --- /dev/null +++ b/sync/sync_3Mar20.py @@ -0,0 +1,128 @@ +import ROOT as rt + +fin_eem = '/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_4_V_0p00290516780927_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root' + +# fin_eem = '/Users/cesareborgia/cernbox/hnl/2017/sig/HN3L_M_10_V_0p000756967634711_e_massiveAndCKM_LO/HNLTreeProducer_eem/tree.root' + + +tf_in_eem = rt.TFile(fin_eem) + +tree_eem = tf_in_eem.Get('tree') + +tree_eem.SetScanField(0) + + +selection_data_eem_os = [ + 'l0_pt > 30 & l2_pt > 5 & l1_pt > 5 & l0_eid_mva_noniso_wp90 == 1 & l1_LooseNoIso == 1 & l2_Medium == 1', + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.1 & l0_reliso_rho_03 < 0.1 & abs(l1_eta) < 2.4 & l1_reliso_rho_03 < 10 & abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1. & abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + '!(hnl_q_01==0 & abs(hnl_m_01-91.1876) < 10) & !(hnl_q_01==0 & abs(hnl_m_01- 9.4603) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.0233) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.3552) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.0969) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.6861) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-0.7827) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-1.0190) < 0.08)', + 'l0_q!=l1_q', + '(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>35', # 2017!! + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l1_reliso_rho_03 < 0.2 & l2_reliso_rho_03 < 0.2' + ] + +selection_data_eem_os_full = ' & '.join(selection_data_eem_os) + +# tree_eem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):hnl_m_12:hnl_2d_disp", + # selection_data_eem_os_full, + # "precision=4 ") + + +to_check_martina_doesnt_have = [ +'run == 10 & lumi == 1 & event == 740', +'run == 103 & lumi == 1 & event == 261', +'run == 104 & lumi == 1 & event == 194', +'run == 114 & lumi == 1 & event == 503', +'run == 120 & lumi == 1 & event == 751', +'run == 126 & lumi == 1 & event == 925', +'run == 128 & lumi == 1 & event == 556', +'run == 137 & lumi == 1 & event == 212', +'run == 139 & lumi == 1 & event == 838', +'run == 152 & lumi == 1 & event == 383', +'run == 168 & lumi == 1 & event == 230', +'run == 19 & lumi == 1 & event == 180', +'run == 205 & lumi == 1 & event == 715', +'run == 30 & lumi == 1 & event == 304', +'run == 37 & lumi == 1 & event == 889', +'run == 4 & lumi == 1 & event == 452', +'run == 70 & lumi == 1 & event == 634', +'run == 78 & lumi == 1 & event == 441', +'run == 80 & lumi == 1 & event == 133', +'run == 98 & lumi == 1 & event == 731', +] + +to_check_i_dont_have = [ +'run == 101 & lumi == 1 & event == 387', +'run == 101 & lumi == 1 & event == 881', +'run == 109 & lumi == 1 & event == 915', +'run == 11 & lumi == 1 & event == 252', +'run == 117 & lumi == 1 & event == 959', +'run == 12 & lumi == 1 & event == 122', +'run == 133 & lumi == 1 & event == 27', +'run == 133 & lumi == 1 & event == 767', +'run == 136 & lumi == 1 & event == 810', +'run == 139 & lumi == 1 & event == 908', +'run == 152 & lumi == 1 & event == 493', +'run == 154 & lumi == 1 & event == 60', +'run == 155 & lumi == 1 & event == 13', +'run == 168 & lumi == 1 & event == 95', +'run == 171 & lumi == 1 & event == 649', +'run == 18 & lumi == 1 & event == 381', +'run == 180 & lumi == 1 & event == 989', +'run == 187 & lumi == 1 & event == 168', +'run == 188 & lumi == 1 & event == 801', +'run == 196 & lumi == 1 & event == 564', +'run == 203 & lumi == 1 & event == 58', +'run == 209 & lumi == 1 & event == 484', +'run == 210 & lumi == 1 & event == 470', +'run == 217 & lumi == 1 & event == 761', +'run == 244 & lumi == 1 & event == 676', +'run == 244 & lumi == 1 & event == 751', +'run == 59 & lumi == 1 & event == 874', +'run == 71 & lumi == 1 & event == 41', +'run == 86 & lumi == 1 & event == 710', +'run == 96 & lumi == 1 & event == 234', +'run == 98 & lumi == 1 & event == 115', +] + + + +from collections import OrderedDict +check = OrderedDict() + +# for i in selection_data_eem_os + ['l1_reliso_rho_03 < 0.2', 'l2_reliso_rho_03 < 0.2', 'l1_LooseNoIso']: +for i in ['l0_pt > 30', 'l2_pt > 5', 'l1_pt > 5', 'l0_eid_mva_noniso_wp90 == 1', 'l1_LooseNoIso == 1', 'l2_Medium == 1' ,' l1_LooseNoIso']: + count_not_found =0 + count_exists =0 + # i += '| l0_reliso_rho_03 > 0.1' + # i += '& l1_LooseNoIso' + for iev in to_check_i_dont_have: + check [iev] = tree_eem.GetEntries(iev) # all found + check [iev] = tree_eem.GetEntries(iev + ' & ' + i) # all found + # print(iev, check[iev]) + if check[iev] == 0: count_not_found += 1 + if check[iev] == 1: count_exists += 1 + + print(i) + print('not found: {nf}, found: {ex} \n'.format(nf=count_not_found, ex=count_exists)) + + +double_cross_check_sv_cos_and_sv_prob = [ +'run == 104 & lumi == 1 & event == 194', +'run == 30 & lumi == 1 & event == 304', +] + +for iev in double_cross_check_sv_cos_and_sv_prob: + tree_eem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l1_reliso_rho_03:l2_pdgid:l2_pt:abs(l2_dxy):l2_reliso_rho_03:hnl_m_12:hnl_2d_disp:sv_cos:sv_prob", + iev + ' & ' + selection_data_eem_os_full, + "precision=4 ") + + diff --git a/sync/sync_4Mar20.py b/sync/sync_4Mar20.py new file mode 100644 index 0000000..3077b40 --- /dev/null +++ b/sync/sync_4Mar20.py @@ -0,0 +1,163 @@ +import ROOT as rt + +fin_mem = '/Users/cesareborgia/cernbox/hnl/2017/mc/DYJetsToLL_M50_ext/HNLTreeProducer_mem/tree.root' +fin_eee = '/Users/cesareborgia/cernbox/hnl/2017/mc/DYJetsToLL_M50_ext/HNLTreeProducer_eee/tree.root' + + +tf_in_mem = rt.TFile(fin_mem) +tf_in_eee = rt.TFile(fin_eee) + +tree_mem = tf_in_mem.Get('tree') +tree_eee = tf_in_eee.Get('tree') + +tree_mem.SetScanField(0) +tree_eee.SetScanField(0) + + +selection_data_eee = [ + 'l0_pt > 30 & l2_pt > 5 & l1_pt > 5 & l0_eid_mva_noniso_wp90 == 1 & l1_LooseNoIso == 1 & l2_LooseNoIso == 1', + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.1 & l0_reliso_rho_03 < 0.1 & abs(l1_eta) < 2.4 & l1_reliso_rho_03 < 10 & abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1. & abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + '!(hnl_2d_disp<1.5 & abs(hnl_m_12-3.0969) < 0.08) & !(hnl_2d_disp<1.5 & abs(hnl_m_12-3.6861) < 0.08) & !(hnl_2d_disp<1.5 & abs(hnl_m_12-0.7827) < 0.08) & !(hnl_2d_disp<1.5 & abs(hnl_m_12-1.0190) < 0.08)', + '!(hnl_q_01==0 & abs(hnl_m_01-91.1876) < 10) & !(hnl_q_01==0 & abs(hnl_m_01- 9.4603) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.0233) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-10.3552) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.0969) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-3.6861) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-0.7827) < 0.08) & !(hnl_q_01==0 & abs(hnl_m_01-1.0190) < 0.08)', + '!(hnl_q_02==0 & abs(hnl_m_02-91.1876) < 10) & !(hnl_q_02==0 & abs(hnl_m_02- 9.4603) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-10.0233) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-10.3552) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.0969) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.6861) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-0.7827) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-1.0190) < 0.08)', + '!(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + 'l1_pt>7', + 'l2_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_pt>35', # 2017!! + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l0_reliso_rho_03<0.1', + 'l1_reliso_rho_03 < 0.2', + 'l2_reliso_rho_03 < 0.2', + '(l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22 | l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22)', +] + +selection_data_mme_os = [ + 'l0_pt > 25 & l2_pt > 5 & l1_pt > 5 & l0_id_m == 1 & l1_LooseNoIso == 1 & l2_Medium == 1', + 'abs(l0_eta) < 2.4 & abs(l0_dxy) < 0.05 & abs(l0_dz) < 0.1 & l0_reliso_rho_03 < 0.1 & abs(l1_eta) < 2.5 & l1_reliso_rho_03 < 10 & abs(l2_eta) < 2.4 & l2_reliso_rho_03 < 10 & hnl_q_12 == 0 & nbj == 0 & hnl_dr_12 < 1. & hnl_m_12 < 12 & sv_cos > 0.9 & abs(hnl_dphi_01)>1. & abs(hnl_dphi_02)>1. & abs(l1_dxy) > 0.01 & abs(l2_dxy) > 0.01', + '!(hnl_q_02==0 & abs(hnl_m_02-91.1876) < 10) & !(hnl_q_02==0 & abs(hnl_m_02- 9.4603) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-10.0233) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-10.3552) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.0969) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-3.6861) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-0.7827) < 0.08) & !(hnl_q_02==0 & abs(hnl_m_02-1.0190) < 0.08)', + 'l0_q!=l2_q', + '!(hnl_w_vis_m > 50. & hnl_w_vis_m < 80.)', + 'l1_pt>7', + 'hnl_pt_12>15', + 'sv_cos>0.99', + 'sv_prob>0.001', + 'l0_reliso_rho_03<0.1', + 'l0_pt>25', + 'abs(l1_dz)<10', + 'abs(l2_dz)<10', + 'l1_reliso_rho_03 < 0.2', + 'l2_reliso_rho_03 < 0.2', + '(l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22 | l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22)', +] + +selection_data_eee_full = ' & '.join(selection_data_eee) +selection_data_mme_os_full = ' & '.join(selection_data_mme_os) + +# tree_eee.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):hnl_m_12:hnl_2d_disp", + # selection_data_eee_full, + # "precision=4 col=1:6:9.0f:4") + +# print('='*30) + +# tree_mem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l2_pdgid:l2_pt:abs(l2_dxy):hnl_m_12:hnl_2d_disp", + # selection_data_mme_os_full, + # "precision=4 col=1:6:9.0f:4") + +to_check_martina_doesnt_have = [ +# eee +'run == 1 & lumi == 119098 & event == 269424234', +'run == 1 & lumi == 59071 & event == 133630051', +'run == 1 & lumi == 62631 & event == 141683112', +# mem_os +# 'run == 1 & lumi == 142236 & event == 321765876', +# 'run == 1 & lumi == 222567 & event == 503490272', +# 'run == 1 & lumi == 52077 & event == 117809463', +] + +to_check_i_dont_have = [ +# eee +'run == 1 & lumi == 122708 & event == 277589688', +'run == 1 & lumi == 127797 & event == 289102176', +'run == 1 & lumi == 135493 & event == 306512460', +'run == 1 & lumi == 13791 & event == 31198175', +'run == 1 & lumi == 138743 & event == 313863816', +'run == 1 & lumi == 148808 & event == 336634066', +'run == 1 & lumi == 165037 & event == 373346804', +'run == 1 & lumi == 181798 & event == 411263972', +'run == 1 & lumi == 186659 & event == 422259793', +'run == 1 & lumi == 206262 & event == 466604276', +'run == 1 & lumi == 209608 & event == 474175765', +'run == 1 & lumi == 214894 & event == 486133246', +'run == 1 & lumi == 226689 & event == 512816380', +'run == 1 & lumi == 29497 & event == 66727256', +'run == 1 & lumi == 38304 & event == 86651108', +'run == 1 & lumi == 43407 & event == 98194032', +'run == 1 & lumi == 46261 & event == 104651064', +'run == 1 & lumi == 97356 & event == 220237943', +### not in ntuple +'run == 1 & lumi == 18187 & event == 31690514', +'run == 1 & lumi == 21955 & event == 38256226', +'run == 1 & lumi == 41908 & event == 73024119', +'run == 1 & lumi == 130611 & event == 295467446', +'run == 1 & lumi == 133014 & event == 300904752', +# mem_os +# 'run == 1 & lumi == 186067 & event == 420918977', +# 'run == 1 & lumi == 85866 & event == 194246274', +] + + + +from collections import OrderedDict +check = OrderedDict() + +# for i in selection_data_eee + ['l0_eid_mva_noniso_wp90 == 1', 'l1_LooseNoIso == 1', 'l2_LooseNoIso == 1', '1 == 1']: +# for i in selection_data_eee + ['sv_cos > 0.99 & l0_reliso_rho_03 < 0.1', '1 == 1']: +# for i in [#'sv_cos > 0.99 & l0_reliso_rho_03 < 0.1 & (l1_gen_match_isPrompt==1 | l1_gen_match_pdgid==22 | l2_gen_match_isPrompt==1 | l2_gen_match_pdgid==22) & nbj==0 & l2_reliso_rho_03 < 0.2', + # 'nbj == 0', + # 'sv_cos > 0.9', +# for i in [#'abs(l0_eta) < 2.5', + # 'abs(l0_dxy) < 0.05 ', + # ' abs(l0_dz) < 0.1 ', + # ' abs(l1_eta) < 2.5 ', + # ' l1_reliso_rho_03 < 10 ', + # ' abs(l2_eta) < 2.5 ', + # ' l2_reliso_rho_03 < 10 ', + # ' hnl_q_12 == 0 ', + # ' hnl_dr_12 < 1. ', + # ' hnl_m_12 < 12 ', + # ' abs(hnl_dphi_01)>1. ', + # ' abs(hnl_dphi_02)>1. ', + # ' abs(l1_dxy) > 0.01 ', + # ' abs(l2_dxy) > 0.01', + # 'abs(l0_eta) < 2.4 & abs(l1_eta) < 2.4 & abs(l2_eta) < 2.4 ', +for i in ['1 == 1']: + count_not_found =0 + count_exists =0 + # i += '| l0_reliso_rho_03 > 0.1' + # i += '& l1_LooseNoIso' + for iev in to_check_i_dont_have: + check [iev] = tree_eee.GetEntries(iev) # all found + check [iev] = tree_eee.GetEntries(iev + ' & ' + i) # all found + if check[iev] == 0: print(iev, check[iev]) + if check[iev] == 0: count_not_found += 1 + if check[iev] == 1: count_exists += 1 + + print(i) + print('not found: {nf}, found: {ex} \n'.format(nf=count_not_found, ex=count_exists)) + + +# double_cross_check_sv_cos_and_sv_prob = [ +# 'run == 104 & lumi == 1 & event == 194', +# 'run == 30 & lumi == 1 & event == 304', +# ] + +# for iev in double_cross_check_sv_cos_and_sv_prob: + # tree_eem.Scan("run:lumi:event:l0_pdgid:l0_pt:l1_pdgid:l1_pt:abs(l1_dxy):l1_reliso_rho_03:l2_pdgid:l2_pt:abs(l2_dxy):l2_reliso_rho_03:hnl_m_12:hnl_2d_disp:sv_cos:sv_prob", + # iev + ' & ' + selection_data_eem_os_full, + # "precision=4 ") + + diff --git a/sync/sync_yields.py b/sync/sync_yields.py new file mode 100644 index 0000000..1bcab2f --- /dev/null +++ b/sync/sync_yields.py @@ -0,0 +1,174 @@ +from collections import OrderedDict +from glob import glob +import ROOT as rt +from pdb import set_trace + + +years = ['2016', '2017', '2018'] +chs = ['eee', 'eem_os', 'eem_ss', 'mem_ss', 'mem_os', 'mmm'] +disps = ['lt_0p5', '0p5_to_1p5','1p5_to_4p0', 'mt_4p0'] +bins = ['lo', 'hi'] + +data = 'data_obs' +prmp = 'prompt' +nonp = 'nonprompt' +sig8 = 'hnl_m_8_v2_2p3Em06_majorana' +sig10 = 'hnl_m_10_v2_5p7Em07_majorana' +sigs = [sig8, sig10] +# sigs = [sig10] # only one sig for both e/m coupling, after discussion with martina on 2/12 + +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/*/*/*/datacards/') # year/channel/date_of_prod +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/*/*/200122_*/datacards/') # year/channel/date_of_prod +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/*/*/200214*/datacards/datacard_hnl_m_12_lxy_mt_4p0_hnl_m_10_v2_5p7Em07_majorana.txt') # year/channel/date_of_prod +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2017/*/200224_15h_17m/datacards/') # year/channel/date_of_prod + +folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2018/*/200225_16h_*/datacards/') # 2018 WITH disp_sig +folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2018/*/200225_15h_*/datacards/') # 2018 W/O disp_sig + +folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2017/*/200225_15h_*/datacards/') # 2017 WITH disp_sig +folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2017/*/200225_14h_*/datacards/') # 2017 W/O disp_sig + +# 2016 WITH disp_sig +folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_7m/datacards/')\ + + glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_8m/datacards/')\ + + glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_1*/datacards/') + +# 2016 W/O disp_sig +# folders = glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_2*/datacards/')\ + # + glob('/Users/cesareborgia/cernbox/plots/plotter/2016/*/200225_16h_3*/datacards/') + +signal = False + +files = OrderedDict() + +for f in folders: + f_yr = f.split('/')[-5] + f_ch = f.split('/')[-4] + if 'txt' in f: f = f.replace(f.split('/')[-1],'') + try: files[f_yr][f_ch] = f + except: + files[f_yr] = OrderedDict() + files[f_yr][f_ch] = f + +yields = OrderedDict() + +for yr in years: + yields[yr] = OrderedDict() + for ch in chs: + yields[yr][ch] = OrderedDict() + for disp in disps: + yields[yr][ch][disp] = OrderedDict() + for sig in sigs: + yields[yr][ch][disp][sig] = OrderedDict() + + +for yr in files.keys(): + for ch in files[yr].keys(): + f_in = OrderedDict() + for disp in disps: + f_in[disp] = rt.TFile(files[yr][ch] + 'datacard_hnl_m_12_lxy_{disp}.root'.format(disp = disp)) + for sig in sigs: + # if ch[0] == 'm' and '2p3' in sig: continue #muon channels don't have the m=8,v2=2.3e-6 signal + h_sig = f_in[disp].Get(sig) + h_nonp = f_in[disp].Get(nonp) + h_prmp = f_in[disp].Get(prmp) + h_data = f_in[disp].Get(data) + + if signal: + try: h_sig.GetBinContent(1) + except: + continue + set_trace() + + yields[yr][ch][disp][sig] = OrderedDict() + yields[yr][ch][disp][data] = OrderedDict() + yields[yr][ch][disp][nonp] = OrderedDict() + yields[yr][ch][disp][prmp] = OrderedDict() + + if signal: + yields[yr][ch][disp][sig]['lo'] = h_sig.GetBinContent(1) + yields[yr][ch][disp][sig]['hi'] = h_sig.GetBinContent(2) + + if not signal: + yields[yr][ch][disp][data]['lo'] = h_data.GetBinContent(1) + yields[yr][ch][disp][data]['hi'] = h_data.GetBinContent(2) + + yields[yr][ch][disp][nonp]['lo'] = h_nonp.GetBinContent(1) + yields[yr][ch][disp][nonp]['hi'] = h_nonp.GetBinContent(2) + + yields[yr][ch][disp][prmp]['lo'] = h_prmp.GetBinContent(1) + yields[yr][ch][disp][prmp]['hi'] = h_prmp.GetBinContent(2) + + +out_folder = '/Users/cesareborgia/HNL/plotter/sync/' + +with open(out_folder + 'sync_yields.txt', 'w') as f: + for yr in files.keys(): + for ch in files[yr].keys(): + f.write('\n\t\tyear={yr}\n'.format(yr=yr)) + + f.write('\t\tchannel={ch}\n'.format(ch=ch)) + + f.write('\t\t\tdisp\t\t{disp0}\t\t\t\t{disp1}\t\t\t{disp2}\t\t\t{disp3}\n'.format(disp0=disps[0],disp1=disps[1],disp2=disps[2],disp3=disps[3])) + + f.write('\t\t\tbin\t\t0 < m < 4\t4 < m < 12') + f.write('\t0 < m < 4\t4 < m < 12') + f.write('\t0 < m < 4\t4 < m < 12') + f.write('\t0 < m < 4\t4 < m < 12') + f.write('\n') + + f.write('\t\t\tnon-prompt\t{d0_mlo:.3f}\t\t{d0_mhi:.3f}\t\t{d1_mlo:.3f}\t\t{d1_mhi:.3f}'.format(d0_mlo=yields[yr][ch][disps[0]][nonp]['lo'], d0_mhi=yields[yr][ch][disps[0]][nonp]['hi'], + d1_mlo=yields[yr][ch][disps[1]][nonp]['lo'], d1_mhi=yields[yr][ch][disps[1]][nonp]['hi'])) + + f.write('\t\t{d2_mlo:.3f}\t\t{d2_mhi:.3f}\t\t{d3_mlo:.3f}\t\t{d3_mhi:.3f}'.format(d2_mlo=yields[yr][ch][disps[2]][nonp]['lo'], d2_mhi=yields[yr][ch][disps[2]][nonp]['hi'], + d3_mlo=yields[yr][ch][disps[3]][nonp]['lo'], d3_mhi=yields[yr][ch][disps[3]][nonp]['hi'])) + + f.write('\n') + + f.write('\t\t\tprompt\t\t{d0_mlo:.3f}\t\t{d0_mhi:.3f}\t\t{d1_mlo:.3f}\t\t{d1_mhi:.3f}'.format(d0_mlo=yields[yr][ch][disps[0]][prmp]['lo'], d0_mhi=yields[yr][ch][disps[0]][prmp]['hi'], + d1_mlo=yields[yr][ch][disps[1]][prmp]['lo'], d1_mhi=yields[yr][ch][disps[1]][prmp]['hi'])) + + f.write('\t\t{d2_mlo:.3f}\t\t{d2_mhi:.3f}\t\t{d3_mlo:.3f}\t\t{d3_mhi:.3f}'.format(d2_mlo=yields[yr][ch][disps[2]][prmp]['lo'], d2_mhi=yields[yr][ch][disps[2]][prmp]['hi'], + d3_mlo=yields[yr][ch][disps[3]][prmp]['lo'], d3_mhi=yields[yr][ch][disps[3]][prmp]['hi'])) + + f.write('\n') + + if signal: + for sig in sigs: + if ch[0] == 'm' and '2p3' in sig: continue #muon channels don't have the m=8,v2=2.3e-6 signal + f.write('\t{s}\t{d0_mlo:.3f}\t\t{d0_mhi:.3f}\t\t{d1_mlo:.3f}\t\t{d1_mhi:.3f}'.format(s=sig, d0_mlo=yields[yr][ch][disps[0]][sig]['lo'], d0_mhi=yields[yr][ch][disps[0]][sig]['hi'], + d1_mlo=yields[yr][ch][disps[1]][sig]['lo'], d1_mhi=yields[yr][ch][disps[1]][sig]['hi'])) + + f.write('\t\t{d2_mlo:.3f}\t\t{d2_mhi:.3f}\t\t{d3_mlo:.3f}\t\t{d3_mhi:.3f}'.format(d2_mlo=yields[yr][ch][disps[2]][sig]['lo'], d2_mhi=yields[yr][ch][disps[2]][sig]['hi'], + d3_mlo=yields[yr][ch][disps[3]][sig]['lo'], d3_mhi=yields[yr][ch][disps[3]][sig]['hi'])) + f.write('\n') + + if not signal: + # data + f.write('\t\t\tdata\t\t{d0_mlo:.3f}\t\t{d0_mhi:.3f}\t\t{d1_mlo:.3f}\t\t{d1_mhi:.3f}'.format(d0_mlo=yields[yr][ch][disps[0]][data]['lo'], d0_mhi=yields[yr][ch][disps[0]][data]['hi'], + d1_mlo=yields[yr][ch][disps[1]][data]['lo'], d1_mhi=yields[yr][ch][disps[1]][data]['hi'])) + + f.write('\t\t{d2_mlo:.3f}\t\t{d2_mhi:.3f}\t\t{d3_mlo:.3f}\t\t{d3_mhi:.3f}'.format(d2_mlo=yields[yr][ch][disps[2]][data]['lo'], d2_mhi=yields[yr][ch][disps[2]][data]['hi'], + d3_mlo=yields[yr][ch][disps[3]][data]['lo'], d3_mhi=yields[yr][ch][disps[3]][data]['hi'])) + + f.write('\n') + + # RATIOS + f.write('\t\t\tdt/bkg\t\t{d0_mlo:.3f}\t\t{d0_mhi:.3f}\t\t{d1_mlo:.3f}\t\t{d1_mhi:.3f}'.format( + d0_mlo=yields[yr][ch][disps[0]][data]['lo'] / (yields[yr][ch][disps[0]][prmp]['lo'] + yields[yr][ch][disps[0]][nonp]['lo']), + d0_mhi=yields[yr][ch][disps[0]][data]['hi'] / (yields[yr][ch][disps[0]][prmp]['hi'] + yields[yr][ch][disps[0]][nonp]['hi']), + d1_mlo=yields[yr][ch][disps[1]][data]['lo'] / (yields[yr][ch][disps[1]][prmp]['lo'] + yields[yr][ch][disps[1]][nonp]['lo']), + d1_mhi=yields[yr][ch][disps[1]][data]['hi'] / (yields[yr][ch][disps[1]][prmp]['hi'] + yields[yr][ch][disps[1]][nonp]['hi']))) + + f.write('\t\t{d2_mlo:.3f}\t\t{d2_mhi:.3f}\t\t{d3_mlo:.3f}\t\t{d3_mhi:.3f}'.format( + d2_mlo=yields[yr][ch][disps[2]][data]['lo'] / (yields[yr][ch][disps[2]][prmp]['lo'] + yields[yr][ch][disps[2]][nonp]['lo']), + d2_mhi=yields[yr][ch][disps[2]][data]['hi'] / (yields[yr][ch][disps[2]][prmp]['hi'] + yields[yr][ch][disps[2]][nonp]['hi']), + d3_mlo=yields[yr][ch][disps[3]][data]['lo'] / (yields[yr][ch][disps[3]][prmp]['lo'] + yields[yr][ch][disps[3]][nonp]['lo']), + d3_mhi=yields[yr][ch][disps[3]][data]['hi'] / (yields[yr][ch][disps[3]][prmp]['hi'] + yields[yr][ch][disps[3]][nonp]['hi']))) + + f.write('\n') + # ERROR + + + f.write('\n\n') +f.close()