Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
"scetlib_dyturbo_LatticeNP_CT18Z_N3p0LL_N2LO_pdfas": {"pdf": "ct18z"},
"scetlib_dyturbo_LatticeNP_CT18Z_N3p1LL_N2LO_pdfas": {"pdf": "ct18z"},
"scetlib_dyturbo_LatticeNP_CT18Z_N4p0LL_N2LO_pdfas": {"pdf": "ct18z"},
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N3p1LL_N3LO_pdfas": {"pdf": "ct18z"},
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N4p0LL_N3LO_pdfas": {"pdf": "ct18z"},
"scetlib_nnlojet_CT18Z_N3p1LL_N3LO_pdfas": {"pdf": "ct18z"},
"scetlib_nnlojet_CT18Z_N4p0LL_N3LO_pdfas": {"pdf": "ct18z"},
"scetlib_nnlojet_MSHT20an3lo_N4p0LL_N3LO_pdfas": {"pdf": "msht20an3lo"},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
"scetlib_dyturbo_LatticeNP_CT18Z_N3p0LL_N2LO_pdfvars": {"pdf": "ct18z"},
"scetlib_dyturbo_LatticeNP_CT18Z_N3p1LL_N2LO_pdfvars": {"pdf": "ct18z"},
"scetlib_dyturbo_LatticeNP_CT18Z_N4p0LL_N2LO_pdfvars": {"pdf": "ct18z"},
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N3p1LL_N3LO_pdfvars": {"pdf": "ct18z"},
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N4p0LL_N3LO_pdfvars": {"pdf": "ct18z"},
"scetlib_dyturbo_LatticeNP_CT18_N3p0LL_N2LO_pdfvars": {"pdf": "ct18"},
"scetlib_dyturbo_LatticeNP_HERAPDF20_N3p0LL_N2LO_pdfvars": {
"pdf": "herapdf20 herapdf20ext"
Expand Down
26 changes: 23 additions & 3 deletions scripts/corrections/make_rescaled_theory_corr.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,25 @@ def corr_name(corrf):
return match[1]


def find_corr_hist_name(corr_dict, proc, corrf, suffix):
base = corr_name(corrf)
candidates = [
f"{base}{suffix}",
f"{base.rstrip('_')}_{suffix}",
]
candidates = list(dict.fromkeys(candidates))

for candidate in candidates:
if candidate in corr_dict[proc]:
return candidate

available = [k for k in corr_dict[proc].keys() if k.endswith(suffix)]
raise KeyError(
f"Could not find histogram with suffix '{suffix}' for file {corrf}. "
f"Tried {candidates}. Available matches: {available}"
)


def parse_args():
parser = parsing.base_parser()
parser.add_argument(
Expand Down Expand Up @@ -63,10 +82,11 @@ def main():

proc = "Z" if "CorrZ" in args.refCorr else "W"

refcorr = ref[proc][corr_name(args.refCorr) + "_minnlo_ratio"]
refcorr_name = find_corr_hist_name(ref, proc, args.refCorr, "minnlo_ratio")
refcorr = ref[proc][refcorr_name]

rescale_corr_name = (
f"{corr_name(args.rescaleCorr)}_minnlo_ratio"
find_corr_hist_name(rescale, proc, args.rescaleCorr, "minnlo_ratio")
if "dataPtll" not in args.rescaleCorr
else "MC_data_ratio"
)
Expand All @@ -82,7 +102,7 @@ def main():
# Broadcast syst axis
new_corr[...] = (refcorr.view().T * central_val_corr.T).T

new_name = args.newCorrName + "_minnlo_ratio"
new_name = f"{args.newCorrName.rstrip('_')}_minnlo_ratio"
if "dataPtll" in args.newCorrName:
new_name = "MC_data_ratio"
# Avoid overwriting
Expand Down
7 changes: 7 additions & 0 deletions scripts/rabbit/setupRabbit.py
Original file line number Diff line number Diff line change
Expand Up @@ -724,6 +724,12 @@ def make_parser(parser=None, argv=None):
type=float,
help="Scale the minnlo qcd scale uncertainties by this factor",
)
parser.add_argument(
"--scaleNPLambda4",
default=1.0,
type=float,
help="Scale the nonperturbative lambda4 uncertainty by this factor",
)
parser.add_argument(
"--symmetrizeTheoryUnc",
default="quadratic",
Expand Down Expand Up @@ -1772,6 +1778,7 @@ def setup(
pdf_from_corr=args.pdfUncFromCorr,
as_from_corr=not args.asUncFromUncorr,
scale_pdf_unc=args.scalePdf,
scale_np_lambda4=args.scaleNPLambda4,
samples=theorySystSamples,
minnlo_unc=args.minnloScaleUnc,
minnlo_scale=args.scaleMinnloScale,
Expand Down
11 changes: 9 additions & 2 deletions wremnants/postprocessing/rabbit_theory_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def __init__(self, label, datagroups, args, hasNonsigSamples=False):
self.np_model = "Delta_Lambda"
self.pdf_from_corr = False
self.scale_pdf_unc = -1.0
self.scale_np_lambda4 = 1.0
self.mirror_tnp = True
self.minnlo_unc = "byHelicityPt"
self.helicity_fit_unc = False
Expand Down Expand Up @@ -112,6 +113,7 @@ def configure(
pdf_operation=None,
samples=[],
scale_pdf_unc=-1.0,
scale_np_lambda4=1.0,
minnlo_unc="byHelicityPt",
minnlo_scale=1.0,
from_hels=False,
Expand All @@ -131,6 +133,7 @@ def configure(
self.as_from_corr = pdf_from_corr or as_from_corr
self.pdf_operation = pdf_operation
self.scale_pdf_unc = scale_pdf_unc
self.scale_np_lambda4 = scale_np_lambda4
self.samples = samples
self.helicity_fit_unc = False
self.minnlo_scale = minnlo_scale
Expand Down Expand Up @@ -824,7 +827,7 @@ def add_correlated_np_uncertainties(self):
np_map = {
"lambda2": ["0.0", "0.5"],
"delta_lambda2": ["-0.02", "0.02"],
"lambda4": ["0.01", "0.16"],
"lambda4": ["0.01", "0.12"],
}
elif "Lambda" in self.np_model:
np_map = {
Expand Down Expand Up @@ -860,6 +863,7 @@ def operation(h, entries):
for nuisance, vals in np_map.items():
entries = [nuisance + v for v in vals]
rename = f"scetlibNP{nuisance}"
scale = self.scale_np_lambda4 if nuisance.lower() == "lambda4" else 1.0
# operation = lambda h : h[{self.syst_ax : entries}]
self.datagroups.addSystematic(
self.corr_hist_name,
Expand All @@ -870,6 +874,7 @@ def operation(h, entries):
preOp=operation,
preOpArgs=dict(entries=entries),
outNames=[f"{rename}Down", f"{rename}Up"],
scale=scale,
name=rename,
)

Expand All @@ -878,7 +883,7 @@ def add_uncorrelated_np_uncertainties(self):
np_map = {
"lambda2": ["0.0", "0.5"],
"delta_lambda2": ["-0.02", "0.02"],
"lambda4": ["0.01", "0.16"],
"lambda4": ["0.01", "0.12"],
}
elif "Lambda" in self.np_model:
np_map = {
Expand Down Expand Up @@ -945,6 +950,7 @@ def add_uncorrelated_np_uncertainties(self):
for nuisance, vals in np_map.items():
entries = [nuisance + v for v in vals]
rename = f"scetlibNP{label}{nuisance}"
scale = self.scale_np_lambda4 if nuisance.lower() == "lambda4" else 1.0
self.datagroups.addSystematic(
self.np_hist_name,
processes=[sample_group],
Expand All @@ -965,6 +971,7 @@ def add_uncorrelated_np_uncertainties(self):
(entries[0], f"{rename}Down"),
],
skipEntries=[{self.syst_ax: ["central", "pdf0"]}],
scale=scale,
name=rename,
)

Expand Down
13 changes: 13 additions & 0 deletions wremnants/production/theory_corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,12 @@ def make_theory_corr_weight_info(pdf, *, alphas=False, renorm=False):
"scetlib_dyturbo_LatticeNP_CT18Z_N4p0LL_N2LO_pdfas": make_theory_corr_weight_info(
"ct18z", alphas=True, renorm=True
),
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N3p1LL_N3LO_pdfas": make_theory_corr_weight_info(
"ct18z", alphas=True, renorm=True
),
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N4p0LL_N3LO_pdfas": make_theory_corr_weight_info(
"ct18z", alphas=True, renorm=True
),
"scetlib_dyturbo_LatticeNP_CT18_N3p0LL_N2LO_pdfas": make_theory_corr_weight_info(
"ct18", alphas=True, renorm=True
),
Expand Down Expand Up @@ -128,6 +134,12 @@ def make_theory_corr_weight_info(pdf, *, alphas=False, renorm=False):
"scetlib_dyturbo_LatticeNP_CT18Z_N4p0LL_N2LO_pdfvars": make_theory_corr_weight_info(
"ct18z"
),
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N3p1LL_N3LO_pdfvars": make_theory_corr_weight_info(
"ct18z"
),
"scetlib_nnlojet_LatticeNPCoarse_CT18Z_N4p0LL_N3LO_pdfvars": make_theory_corr_weight_info(
"ct18z"
),
"scetlib_dyturbo_LatticeNP_CT18_N3p0LL_N2LO_pdfvars": make_theory_corr_weight_info(
"ct18"
),
Expand Down Expand Up @@ -199,6 +211,7 @@ def load_corr_helpers(
(generator == generators[0])
and ("nnlojet" in generator.lower())
and ("pdfas" not in generator.lower())
and ("pdfvars" not in generator.lower())
):
logger.info(
f"Adding statistical uncertainties for correction {generator}"
Expand Down
23 changes: 21 additions & 2 deletions wremnants/utilities/io_tools/input_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,13 +275,32 @@ def read_nnlojet_file(
return h * 1e-3


def read_nnlojet_ybin(refname, ybins, charge=None):
def resolve_nnlojet_ybin_filename(refname, ybins):
format_decimal = lambda x: (
"0" if x == 0 else f"{round(x, 1+(x % 1 in [0.25, 0.75]))}".replace(".", "p")
)

def alternate_decimal(value):
formatted = format_decimal(value)
return formatted[:-1] if formatted.endswith("p0") else formatted

bounds = tuple(format_decimal(y) for y in ybins)
candidates = [
f"{refname}__{bounds[0]}__{bounds[1]}.dat",
f"{refname}__{alternate_decimal(ybins[0])}__{bounds[1]}.dat",
f"{refname}__{bounds[0]}__{alternate_decimal(ybins[1])}.dat",
f"{refname}__{alternate_decimal(ybins[0])}__{alternate_decimal(ybins[1])}.dat",
]
for candidate in dict.fromkeys(candidates):
if os.path.isfile(candidate):
return candidate
return candidates[0]


def read_nnlojet_ybin(refname, ybins, charge=None):
yax = hist.axis.Variable(ybins, name="Y")
return read_nnlojet_file(
f"{refname}__{format_decimal(ybins[0])}__{format_decimal(ybins[1])}.dat",
resolve_nnlojet_ybin_filename(refname, ybins),
other_axes=[yax],
charge=charge,
)
Expand Down