forked from sagittaeri/htt
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathoptimize-kinematic-cuts
More file actions
executable file
·186 lines (154 loc) · 6.5 KB
/
optimize-kinematic-cuts
File metadata and controls
executable file
·186 lines (154 loc) · 6.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
#!/usr/bin/env python
import os
import logging
import multiprocessing
from multiprocessing import Process
import time
from rootpy import ROOT
from rootpy.plotting import Graph, Canvas, Hist, Legend
from rootpy.stats.histfactory import (
Data, Sample, Channel, make_measurement, make_workspace)
from rootpy.plotting.style.atlas.labels import ATLAS_label
from mva import CONST_PARAMS, CACHE_DIR
from mva.categories import Category_VBF, Category_Boosted
from mva.samples import QCD, Ztautau
from mva.analysis import Analysis
from mva.defaults import TARGET_REGION
from statstools.fixups import fix_measurement
from statstools import get_significance_workspace
from statstools.jobs import run_pool
from statstools.plotting import pvalue_plot
log = logging.getLogger(os.path.basename(__file__))
gaussian_cdf_c = ROOT.Math.gaussian_cdf_c
class SigProcess(Process):
def __init__(self, *args, **kwargs):
super(SigProcess, self).__init__()
self.args = args
self.kwargs = kwargs
self.result = multiprocessing.Queue()
@property
def output(self):
return self.result.get()
def run(self):
self.result.put(get_sig(*self.args, **self.kwargs))
def map_pool(process, args, n_jobs=-1, **kwargs):
procs = [process(*arg, **kwargs) for arg in args]
run_pool(procs, n_jobs=n_jobs)
return [p.output for p in procs]
def get_workspace(scores, binning,
category,
mass=125,
systematics=False):
hist_template = Hist(binning)
background = []
for sample, scores_dict in scores.bkg_scores:
background.append(sample.get_histfactory_sample(
hist_template, None, category, TARGET_REGION, scores=scores_dict,
systematics=systematics))
signal = []
for sample, scores_dict in scores.all_sig_scores[mass]:
signal.append(sample.get_histfactory_sample(
hist_template, None, category, TARGET_REGION, scores=scores_dict,
systematics=systematics))
# TODO: why is the clone needed?
data_hist = sum([b.hist.Clone(shallow=True) for b in background])
data_hist.name = 'Data'
data = Data('Data', data_hist)
channel = Channel(category.name, signal + background, data)
measurement = make_measurement(
'MVA', channel,
POI='SigXsecOverSM',
const_params=CONST_PARAMS)
fix_measurement(measurement)
return make_workspace(measurement, silence=True)
def get_sig(category,
cuts,
mass=125,
systematics=False):
analysis = Analysis(2012,
systematics=systematics,
qcd_workspace_norm=False,
ztt_workspace_norm=False,
qcd_shape_systematic=False)
analysis.normalize(category)
clf = analysis.get_clf(
Category_VBF,
load=True,
mass=mass,
transform=True)
scores = analysis.get_scores(
clf, category, TARGET_REGION,
mode='workspace', cuts=cuts,
masses=[mass], systematics=systematics)
binning = clf.binning(analysis.year, overflow=1E5)
ws = get_workspace(scores, binning,
category,
mass=mass,
systematics=systematics)
hist = get_significance_workspace(ws)
sig = hist[2].value
# handle nan
return 0 if sig != sig else sig
if __name__ == '__main__':
# pip install --user tabulate
from tabulate import tabulate
from rootpy.extern.argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('--year', type=int, choices=[2011, 2012], default=2012)
parser.add_argument('--categories', nargs='*')
parser.add_argument('--systematics', action='store_true', default=False)
parser.add_argument('--mass', type=int, default=125, choices=range(100, 155, 5))
parser.add_argument('--procs', type=int, default=-1)
args = parser.parse_args()
year = args.year
mass = args.mass
lead_tau_cuts = range(35, 75, 2)
sublead_tau_cuts = range(25, 65, 2)
lead_jet_cuts = range(50, 90, 2)
sublead_jet_cuts = range(30, 70, 2)
cuts_l = ['tau1_pt > {0}'.format(cut_gev*1e3) for cut_gev in lead_tau_cuts]
cuts_sl = ['tau2_pt > {0}'.format(cut_gev*1e3) for cut_gev in sublead_tau_cuts]
cuts_j_l = ['jet1_pt > {0}'.format(cut_gev*1e3) for cut_gev in lead_jet_cuts]
cuts_j_sl = ['jet2_pt > {0}'.format(cut_gev*1e3) for cut_gev in sublead_jet_cuts]
sigs_t_l = map_pool(SigProcess, [(Category_VBF, cut) for cut in cuts_l],
mass=args.mass, systematics=args.systematics, n_jobs=args.procs)
sigs_t_sl = map_pool(SigProcess, [(Category_VBF, cut) for cut in cuts_sl],
mass=args.mass, systematics=args.systematics, n_jobs=args.procs)
sigs_j_l = map_pool(SigProcess, [(Category_VBF, cut) for cut in cuts_j_l],
mass=args.mass, systematics=args.systematics, n_jobs=args.procs)
sigs_j_sl = map_pool(SigProcess, [(Category_VBF, cut) for cut in cuts_j_sl],
mass=args.mass, systematics=args.systematics, n_jobs=args.procs)
log.info(sigs_t_l)
log.info(sigs_t_sl)
log.info(sigs_j_l)
log.info(sigs_j_sl)
pvals_t_l = [gaussian_cdf_c(sig) for sig in sigs_t_l]
pvals_t_sl = [gaussian_cdf_c(sig) for sig in sigs_t_sl]
pvals_j_l = [gaussian_cdf_c(sig) for sig in sigs_j_l]
pvals_j_sl = [gaussian_cdf_c(sig) for sig in sigs_j_sl]
log.info(pvals_t_l)
log.info(pvals_t_sl)
log.info(pvals_j_l)
log.info(pvals_j_sl)
thres = range(0, 20)
c = Canvas()
_, graphs = pvalue_plot(thres, [pvals_t_l, pvals_t_sl, pvals_j_l, pvals_j_sl],
pad=c, xtitle='threshold step',
yrange=(gaussian_cdf_c(2.5), 50),
linecolor = ['blue', 'red', 'green', 'purple'])
labels = []
labels.append('scan p_{T}(#tau_{1}): 35 - 75 GeV (2 GeV)')
labels.append('scan p_{T}(#tau_{2}): 25 - 65 GeV (2 GeV)')
labels.append('scan p_{T}(j_{1}): 50 - 90 GeV (2 GeV)')
labels.append('scan p_{T}(j_{2}): 30 - 70 GeV (2 GeV)')
for graph, label in zip (graphs, labels):
graph.title = label
graph.legendstyle = 'L'
leg = Legend(graphs,
leftmargin=0.1,
rightmargin=0.5,
header=Category_VBF.label,
textsize=20)
leg.Draw('same')
ATLAS_label(0.2, 0.88, text="Internal", sqrts=8, pad=c, sep=0.12)
c.SaveAs('toto.png')