Skip to content

Commit 130be3f

Browse files
authored
Merge branch 'master' into fix-zero-scaled-grids
2 parents 1b27a25 + d60c14e commit 130be3f

9 files changed

Lines changed: 198 additions & 115 deletions

File tree

CHANGELOG.md

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,19 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1414
offers this functionality via `pineappl write --repair` and it can also be
1515
accessed via Python
1616

17+
### Fixed
18+
19+
- added a missing implementation for a branch in `Grid::merge` that was
20+
triggered when exporting some PineAPPL grids generated from the 'pinejet'
21+
group
22+
- fixed wrong coupling orders when exporting to APPLgrid. This happened when
23+
the PineAPPL grid had orders that had any other ordering than 'LO', 'NLO',
24+
'NNLO'
25+
- fixed a bug that caused exported grids to compare unsuccessfully when the
26+
convolution functions were proton-anti-proton; APPLgrid doesn't store the
27+
types of convolution functions, so we simply convert the grid to use only
28+
proton PDFs
29+
1730
## [1.1.0] - 08/07/2025
1831

1932
### Added

maintainer/test-cli-export.sh

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
#!/bin/bash -x
2+
3+
set -euo pipefail
4+
5+
grids=(
6+
## Ploughshare
7+
8+
# Group: pinejet
9+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wm-arxiv-1109.5141/pinejet-atlas-wm-arxiv-1109.5141.tgz"
10+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wm-arxiv-1603.09222/pinejet-atlas-wm-arxiv-1603.09222.tgz"
11+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wm-arxiv-1612.03016/pinejet-atlas-wm-arxiv-1612.03016.tgz"
12+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wp-arxiv-1109.5141/pinejet-atlas-wp-arxiv-1109.5141.tgz"
13+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wp-arxiv-1603.09222/pinejet-atlas-wp-arxiv-1603.09222.tgz"
14+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-wp-arxiv-1612.03016/pinejet-atlas-wp-arxiv-1612.03016.tgz"
15+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1109.5141/pinejet-atlas-z0-arxiv-1109.5141.tgz"
16+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1305.4192/pinejet-atlas-z0-arxiv-1305.4192.tgz"
17+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1404.1212/pinejet-atlas-z0-arxiv-1404.1212.tgz"
18+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1603.09222/pinejet-atlas-z0-arxiv-1603.09222.tgz"
19+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1606.01736/pinejet-atlas-z0-arxiv-1606.01736.tgz"
20+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1612.03016/pinejet-atlas-z0-arxiv-1612.03016.tgz"
21+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-atlas-z0-arxiv-1710.05167/pinejet-atlas-z0-arxiv-1710.05167.tgz"
22+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cdf-z0-arxiv-0908.3914/pinejet-cdf-z0-arxiv-0908.3914.tgz"
23+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wm-arxiv-1206.2598/pinejet-cms-wm-arxiv-1206.2598.tgz"
24+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wm-arxiv-1312.6283/pinejet-cms-wm-arxiv-1312.6283.tgz"
25+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wm-arxiv-1603.01803/pinejet-cms-wm-arxiv-1603.01803.tgz"
26+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wp-arxiv-1206.2598/pinejet-cms-wp-arxiv-1206.2598.tgz"
27+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wp-arxiv-1312.6283/pinejet-cms-wp-arxiv-1312.6283.tgz"
28+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-wp-arxiv-1603.01803/pinejet-cms-wp-arxiv-1603.01803.tgz"
29+
#"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-cms-z0-arxiv-1310.7291/pinejet-cms-z0-arxiv-1310.7291.tgz" # fails due to static-scale optimization
30+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-d0-wm-arxiv-1309.2591/pinejet-d0-wm-arxiv-1309.2591.tgz"
31+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-d0-wp-arxiv-1309.2591/pinejet-d0-wp-arxiv-1309.2591.tgz"
32+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-d0-z0-arxiv-0702025/pinejet-d0-z0-arxiv-0702025.tgz"
33+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-wm-arxiv-1505.07024/pinejet-lhcb-wm-arxiv-1505.07024.tgz"
34+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-wm-arxiv-1511.08039/pinejet-lhcb-wm-arxiv-1511.08039.tgz"
35+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-wp-arxiv-1505.07024/pinejet-lhcb-wp-arxiv-1505.07024.tgz"
36+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-wp-arxiv-1511.08039/pinejet-lhcb-wp-arxiv-1511.08039.tgz"
37+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-z0-arxiv-1505.07024/pinejet-lhcb-z0-arxiv-1505.07024.tgz"
38+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-z0-arxiv-1511.08039/pinejet-lhcb-z0-arxiv-1511.08039.tgz"
39+
"https://ploughshare.web.cern.ch/ploughshare/db/pinejet/pinejet-lhcb-z0-arxiv-1607.06495/pinejet-lhcb-z0-arxiv-1607.06495.tgz"
40+
)
41+
42+
tmp=$(mktemp -d)
43+
cd "${tmp}"
44+
45+
trap "cd && rm -rf ${tmp}" EXIT SIGKILL
46+
47+
for grid in ${grids[@]}; do
48+
archive=${grid##*/}
49+
wget --no-verbose --no-clobber "${grid}" -O /tmp/"${archive}" || true
50+
mkdir subdir
51+
tar xzf /tmp/"${archive}" -C subdir
52+
53+
for grid in $(find subdir \( -name '*.lz4' \)); do
54+
if [[ $(basename $grid) =~ ^\..* ]]; then
55+
continue
56+
fi
57+
58+
converted_grid="${grid}".appl
59+
reimported_grid="${grid}".new.pineappl.lz4
60+
pineappl export --accuracy 1e-12 "${grid}" "${converted_grid}" NNPDF31_nnlo_as_0118_luxqed
61+
pineappl import --accuracy 1e-12 "${converted_grid}" "${reimported_grid}" NNPDF31_nnlo_as_0118_luxqed
62+
du -h "${converted_grid}" "${reimported_grid}"
63+
done
64+
65+
rm -r subdir
66+
done

pineappl/src/evolution.rs

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -446,23 +446,27 @@ pub(crate) fn evolve_slice(
446446
}
447447

448448
for (pids1, factor) in channel1.entry() {
449-
for (fk_table, ops) in
450-
channels0
451-
.iter()
452-
.zip(tables.iter_mut())
453-
.filter_map(|(pids0, fk_table)| {
454-
izip!(pids0, pids1, &pids01, &eko_slices)
455-
.map(|(&pid0, &pid1, pids, slices)| {
456-
pids.iter().zip(slices).find_map(|(&(p0, p1), op)| {
457-
((p0 == pid0) && (p1 == pid1)).then_some(op)
458-
})
459-
})
460-
// TODO: avoid using `collect`
461-
.collect::<Option<Vec<_>>>()
462-
.map(|ops| (fk_table, ops))
449+
// find the tuple of EKOs that evolve the current channel entry of the grid into
450+
// every channel of the FK-table
451+
let tmp = channels0.iter().map(|pids0| {
452+
izip!(pids0, pids1, &pids01, &eko_slices)
453+
.map(|(&pid0, &pid1, pids, slices)| {
454+
// for each convolution ...
455+
pids.iter().zip(slices).find_map(|(&(p0, p1), op)| {
456+
// find the EKO that matches both the FK-table and the grid PID
457+
((p0 == pid0) && (p1 == pid1)).then_some(op)
458+
})
463459
})
464-
{
465-
general_tensor_mul(*factor, array.view(), &ops, fk_table.view_mut());
460+
// if an EKO isn't found, it's zero and therefore the whole FK-table
461+
// channel contribution will be zero
462+
.collect::<Option<Box<[_]>>>()
463+
});
464+
465+
for (fk_table, ops) in tables.iter_mut().zip(tmp) {
466+
// if there's one zero EKO, the entire tuple is `None`
467+
if let Some(ops) = ops {
468+
general_tensor_mul(*factor, array.view(), &ops, fk_table.view_mut());
469+
}
466470
}
467471
}
468472
}

pineappl/src/grid.rs

Lines changed: 2 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -964,17 +964,8 @@ impl Grid {
964964
.multi_slice_mut((s![.., .., index], s![.., .., other_index]));
965965

966966
for (lhs, rhs) in a.iter_mut().zip(b.iter_mut()) {
967-
if !rhs.is_empty() {
968-
if lhs.is_empty() {
969-
// we can't merge into an EmptySubgridV1
970-
*lhs = mem::replace(rhs, EmptySubgridV1.into());
971-
// transpose `lhs`
972-
todo!();
973-
} else {
974-
lhs.merge(rhs, Some((a_subgrid, b_subgrid)));
975-
*rhs = EmptySubgridV1.into();
976-
}
977-
}
967+
lhs.merge(rhs, Some((a_subgrid, b_subgrid)));
968+
*rhs = EmptySubgridV1.into();
978969
}
979970
}
980971
}

pineappl/src/subgrid.rs

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -418,15 +418,21 @@ impl SubgridEnum {
418418
if other.is_empty() {
419419
return;
420420
}
421+
421422
if let Self::EmptySubgridV1(_) = self {
422-
if transpose.is_none() {
423-
*self = other.clone();
423+
// change the type of `self` to the type of `other`
424+
*self = other.clone();
425+
426+
if transpose.is_some() {
427+
// TODO: emptying `self` could be done more efficiently, we're probably storing a
428+
// lot of zeros here
429+
self.scale(0.0);
424430
} else {
425-
todo!();
431+
return;
426432
}
427-
} else {
428-
self.merge_impl(other, transpose);
429433
}
434+
435+
self.merge_impl(other, transpose);
430436
}
431437
}
432438

pineappl_applgrid/build.rs

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,10 @@ fn main() {
2424
)
2525
.unwrap();
2626

27+
// 1.6.{39,40} weren't released
2728
let tested_versions = [
28-
"1.6.27", "1.6.28", "1.6.29", "1.6.30", "1.6.31", "1.6.32", "1.6.35", "1.6.36",
29+
"1.6.27", "1.6.28", "1.6.29", "1.6.30", "1.6.31", "1.6.32", "1.6.35", "1.6.36", "1.6.37",
30+
"1.6.38", "1.6.41", "1.6.42", "1.6.43", "1.6.44",
2931
];
3032

3133
if !tested_versions

pineappl_cli/src/export/applgrid.rs

Lines changed: 31 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use cxx::{let_cxx_string, UniquePtr};
33
use float_cmp::approx_eq;
44
use lhapdf::Pdf;
55
use ndarray::{s, Axis};
6-
use pineappl::boc::{Kinematics, Order};
6+
use pineappl::boc::{Channel, Kinematics, Order};
77
use pineappl::grid::Grid;
88
use pineappl::interpolation::{Interp, InterpMeth, Map, ReweightMeth};
99
use pineappl::pids::PidBasis;
@@ -111,8 +111,17 @@ pub fn convert_into_applgrid(
111111
bail!("grid has non-consecutive bin limits, which APPLgrid does not support");
112112
}
113113

114-
if grid.convolutions().len() > 2 {
115-
bail!("APPLgrid does not support grids with more than two convolutions");
114+
match grid.convolutions() {
115+
[_] => {}
116+
[a, b] => {
117+
if (a != b) && (a.cc() == *b) {
118+
// use charge conjugate to map hadron-anti-hadron grids to use the same single
119+
// convolution function
120+
let index = usize::from(a.pid() < 0);
121+
grid.charge_conjugate(index);
122+
}
123+
}
124+
_ => bail!("APPLgrid does not support grids with more than two convolutions"),
116125
}
117126

118127
// APPLgrid only understands PDG PIDs
@@ -121,7 +130,7 @@ pub fn convert_into_applgrid(
121130
let non_trivial_factors = grid
122131
.channels()
123132
.iter()
124-
.flat_map(|channel| channel.entry())
133+
.flat_map(Channel::entry)
125134
.any(|&(_, factor)| !approx_eq!(f64, factor, 1.0, ulps = 4));
126135

127136
// APPLgrid doesn't support non-trivial factors
@@ -156,9 +165,16 @@ pub fn convert_into_applgrid(
156165
.collect();
157166

158167
// `id` must end with '.config' for APPLgrid to know its type is `lumi_pdf`
159-
let id = "PineAPPL-Lumi.config";
168+
let id = format!(
169+
"{}.config",
170+
output
171+
.file_stem()
172+
// UNWRAP: because we write to that file in the end, there always must be a file name
173+
.unwrap()
174+
.to_string_lossy()
175+
);
160176
// this object is managed by APPLgrid internally
161-
ffi::make_lumi_pdf(id, &combinations).into_raw();
177+
ffi::make_lumi_pdf(&id, &combinations).into_raw();
162178

163179
let limits: Vec<_> = grid
164180
.bwfl()
@@ -200,17 +216,17 @@ pub fn convert_into_applgrid(
200216
- lo_alphas;
201217

202218
let mut applgrid =
203-
ffi::make_empty_grid(&limits, id, lo_alphas.into(), loops.into(), "f2", "h0");
219+
ffi::make_empty_grid(&limits, &id, lo_alphas.into(), loops.into(), "f2", "h0");
204220

205-
let has_pdf1 = !grid.convolutions().is_empty();
206-
let has_pdf2 = grid.convolutions().get(1).is_some();
221+
// APPLgrid has either two or one convolution(s)
222+
let convolutions = grid.convolutions().len();
207223

208-
for (appl_order, order) in order_mask
224+
for order in order_mask
209225
.iter()
210226
.enumerate()
211227
.filter_map(|(index, keep)| keep.then_some(index))
212-
.enumerate()
213228
{
229+
let appl_order = grid.orders()[order].alphas - lo_alphas;
214230
let factor = TAU.powi(grid.orders()[order].alphas.into());
215231

216232
for (bin, subgrids) in grid
@@ -275,9 +291,7 @@ pub fn convert_into_applgrid(
275291
})
276292
.collect::<Result<_>>()?;
277293

278-
// in the DIS case APPLgrid always uses the first x dimension
279-
280-
let (x1_grid, x2_grid) = if has_pdf1 && has_pdf2 {
294+
let (x1_grid, x2_grid) = if convolutions == 2 {
281295
(
282296
grid.kinematics()
283297
.iter()
@@ -298,26 +312,13 @@ pub fn convert_into_applgrid(
298312
// TODO: convert this into an error
299313
.unwrap(),
300314
)
301-
} else if has_pdf1 {
302-
(
303-
grid.kinematics()
304-
.iter()
305-
.zip(subgrid.node_values())
306-
.find_map(|(kin, node_values)| {
307-
matches!(kin, &Kinematics::X(idx) if idx == 0)
308-
.then_some(node_values)
309-
})
310-
// TODO: convert this into an error
311-
.unwrap(),
312-
Vec::new(),
313-
)
314315
} else {
315316
(
316317
grid.kinematics()
317318
.iter()
318319
.zip(subgrid.node_values())
319320
.find_map(|(kin, node_values)| {
320-
matches!(kin, &Kinematics::X(idx) if idx == 1)
321+
matches!(kin, &Kinematics::X(idx) if idx == 0)
321322
.then_some(node_values)
322323
})
323324
// TODO: convert this into an error
@@ -376,7 +377,7 @@ pub fn convert_into_applgrid(
376377
weightgrid.as_mut(),
377378
appl_q2_idx,
378379
appl_x1_idx[indices[1]],
379-
if has_pdf1 && has_pdf2 {
380+
if convolutions == 2 {
380381
appl_x2_idx[indices[2]]
381382
} else {
382383
0
@@ -394,7 +395,7 @@ pub fn convert_into_applgrid(
394395
unsafe {
395396
applgrid.pin_mut().add_igrid(
396397
bin.try_into().unwrap(),
397-
appl_order.try_into().unwrap(),
398+
appl_order.into(),
398399
igrid.into_raw(),
399400
);
400401
}

0 commit comments

Comments
 (0)