Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
`pineappl_grid_evolve_info`, and `pineappl_grid_evolve` to evolve grids
- C API: added `pineappl_fktable_optimize` to optimize FK Table-like objects
given an optimization assumption
- added methods `Grid::merge_channel_factors` and `Channel::factor`

## [1.0.0] - 10/06/2025

Expand Down
38 changes: 38 additions & 0 deletions pineappl/src/boc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1086,6 +1086,44 @@ impl Channel {
}
})
}

/// Finds the factor with the smallest absolute value in the channel and
/// divides all coefficients by this value.
///
/// # Returns
///
/// A tuple containing:
/// - the factored-out coefficient
/// - a new `Channel` with all coefficients divided by the factored value
///
/// # Panics
///
/// TODO
#[must_use]
pub fn factor(&self) -> (f64, Self) {
let factor = self
.entry
.iter()
.map(|(_, f)| *f)
.min_by(|a, b| {
a.abs()
.partial_cmp(&b.abs())
// UNWRAP: if we can't compare the numbers the data structure is bugged
.unwrap()
})
// UNWRAP: every `Channel` has at least one entry
.unwrap();

let new_channel = Self::new(
self.entry
.iter()
.cloned()
.map(|(e, f)| (e, f / factor))
.collect(),
);

(factor, new_channel)
}
}

impl FromStr for Channel {
Expand Down
12 changes: 10 additions & 2 deletions pineappl/src/fk_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
use super::boc::{Channel, Kinematics, Order};
use super::convolutions::ConvolutionCache;
use super::error::{Error, Result};
use super::grid::Grid;
use super::pids::OptRules;
use super::grid::{Grid, GridOptFlags};
use super::pids::{OptRules, PidBasis};
use super::subgrid::{self, EmptySubgridV1, Subgrid};
use ndarray::{s, ArrayD};
use std::collections::BTreeMap;
Expand Down Expand Up @@ -249,6 +249,14 @@ impl FkTable {
)
}

/// Rotate the FK Table into the specified basis.
pub fn rotate_pid_basis(&mut self, pid_basis: PidBasis) {
self.grid.rotate_pid_basis(pid_basis);
self.grid.split_channels();
self.grid.merge_channel_factors();
self.grid.optimize_using(GridOptFlags::all());
}

/// Optimize the size of this FK-table by throwing away heavy quark flavors assumed to be zero
/// at the FK-table's scales and calling [`Grid::optimize`].
pub fn optimize(&mut self, assumptions: FkAssumptions) {
Expand Down
44 changes: 43 additions & 1 deletion pineappl/src/grid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ use std::{iter, mem};
const BIN_AXIS: Axis = Axis(1);

// const ORDER_AXIS: Axis = Axis(0);
// const CHANNEL_AXIS: Axis = Axis(2);

const CHANNEL_AXIS: Axis = Axis(2);

#[derive(Clone, Deserialize, Serialize)]
struct Mmv4;
Expand Down Expand Up @@ -1482,6 +1483,21 @@ impl Grid {
})
.collect();
}

/// Merges the factors of the channels into the subgrids to normalize channel coefficients.
///
/// This method factors out the smallest absolute coefficient from each channel using
/// [`boc::Channel::factor`] and then scales the corresponding subgrids by these factors.
pub fn merge_channel_factors(&mut self) {
let (factors, new_channels): (Vec<_>, Vec<_>) =
self.channels().iter().map(Channel::factor).unzip();

for (mut subgrids_bo, &factor) in self.subgrids.axis_iter_mut(CHANNEL_AXIS).zip(&factors) {
subgrids_bo.map_inplace(|subgrid| subgrid.scale(factor));
}

self.channels = new_channels;
}
}

#[cfg(test)]
Expand Down Expand Up @@ -1820,6 +1836,32 @@ mod tests {
assert_eq!(grid.orders().len(), 1);
}

#[test]
fn grid_merge_channel_factors() {
let mut grid = Grid::new(
BinsWithFillLimits::from_fill_limits([0.0, 1.0].to_vec()).unwrap(),
vec![Order::new(0, 2, 0, 0, 0)],
vec![Channel::new(vec![(vec![1, -1], 0.5), (vec![2, -2], 2.5)])],
PidBasis::Pdg,
vec![Conv::new(ConvType::UnpolPDF, 2212); 2],
v0::default_interps(false, 2),
vec![Kinematics::Scale(0), Kinematics::X(0), Kinematics::X(1)],
Scales {
ren: ScaleFuncForm::Scale(0),
fac: ScaleFuncForm::Scale(0),
frg: ScaleFuncForm::NoScale,
},
);

grid.merge_channel_factors();
grid.channels().iter().all(|channel| {
channel
.entry()
.iter()
.all(|(_, fac)| (*fac - 1.0).abs() < f64::EPSILON)
});
}

#[test]
fn grid_convolutions() {
let mut grid = Grid::new(
Expand Down
22 changes: 19 additions & 3 deletions pineappl_cli/src/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ enum OpsArg {
DeleteOrders(Vec<RangeInclusive<usize>>),
DivBinNormDims(Vec<usize>),
MergeBins(Vec<RangeInclusive<usize>>),
MergeChannelFactors(bool),
MulBinNorm(f64),
Optimize(bool),
OptimizeFkTable(FkAssumptions),
Expand Down Expand Up @@ -84,7 +85,7 @@ impl FromArgMatches for MoreArgs {
});
}
}
"optimize" | "split_channels" | "upgrade" => {
"merge_channel_factors" | "optimize" | "split_channels" | "upgrade" => {
let arguments: Vec<Vec<_>> = matches
.remove_occurrences(&id)
.unwrap()
Expand All @@ -95,6 +96,7 @@ impl FromArgMatches for MoreArgs {
for (index, arg) in indices.into_iter().zip(arguments.into_iter()) {
assert_eq!(arg.len(), 1);
args[index] = Some(match id.as_str() {
"merge_channel_factors" => OpsArg::MergeChannelFactors(arg[0]),
"optimize" => OpsArg::Optimize(arg[0]),
"split_channels" => OpsArg::SplitChannels(arg[0]),
"upgrade" => OpsArg::Upgrade(arg[0]),
Expand Down Expand Up @@ -346,6 +348,17 @@ impl Args for MoreArgs {
.value_name("BIN1-BIN2,...")
.value_parser(helpers::parse_integer_range),
)
.arg(
Arg::new("merge_channel_factors")
.action(ArgAction::Append)
.default_missing_value("true")
.help("Merge channel factors into the grid")
.long("merge-channel-factors")
.num_args(0..=1)
.require_equals(true)
.value_name("ON")
.value_parser(clap::value_parser!(bool)),
)
.arg(
Arg::new("mul_bin_norm")
.action(ArgAction::Append)
Expand Down Expand Up @@ -551,6 +564,7 @@ impl Subcommand for Opts {
grid.merge_bins(range)?;
}
}
OpsArg::MergeChannelFactors(true) => grid.merge_channel_factors(),
OpsArg::MulBinNorm(factor) => {
grid.set_bwfl(
BinsWithFillLimits::new(
Expand Down Expand Up @@ -603,8 +617,10 @@ impl Subcommand for Opts {
}
OpsArg::SplitChannels(true) => grid.split_channels(),
OpsArg::Upgrade(true) => grid.upgrade(),
OpsArg::Optimize(false) | OpsArg::SplitChannels(false) | OpsArg::Upgrade(false) => {
}
OpsArg::MergeChannelFactors(false)
| OpsArg::Optimize(false)
| OpsArg::SplitChannels(false)
| OpsArg::Upgrade(false) => {}
}
}

Expand Down
1 change: 1 addition & 0 deletions pineappl_cli/tests/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ Options:
--delete-key <KEY> Delete an internal key-value pair
--div-bin-norm-dims <DIM1,...> Divide each bin normalizations by the bin lengths for the given dimensions
--merge-bins <BIN1-BIN2,...> Merge specific bins together
--merge-channel-factors[=<ON>] Merge channel factors into the grid [possible values: true, false]
--mul-bin-norm <NORM> Multiply all bin normalizations with the given factor
--optimize[=<ENABLE>] Optimize internal data structure to minimize memory and disk usage [possible values: true, false]
--optimize-fk-table <OPTIMI> Optimize internal data structure of an FkTable to minimize memory and disk usage [possible values: Nf6Ind, Nf6Sym, Nf5Ind, Nf5Sym, Nf4Ind, Nf4Sym, Nf3Ind, Nf3Sym]
Expand Down
8 changes: 2 additions & 6 deletions pineappl_py/src/fk_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,12 +224,8 @@ impl PyFkTable {
/// ----------
/// pid_basis: PyPidBasis
/// PID basis of the resulting FK Table
pub fn rotate_pid_basis(&mut self, pid_basis: PyPidBasis) -> PyGrid {
let mut grid_mut = self.fk_table.grid().clone();
grid_mut.rotate_pid_basis(pid_basis.into());
PyGrid {
grid: grid_mut.clone(),
}
pub fn rotate_pid_basis(&mut self, pid_basis: PyPidBasis) {
self.fk_table.rotate_pid_basis(pid_basis.into());
}

/// Write to file.
Expand Down
5 changes: 5 additions & 0 deletions pineappl_py/src/grid.rs
Original file line number Diff line number Diff line change
Expand Up @@ -712,6 +712,11 @@ impl PyGrid {
self.grid.rotate_pid_basis(pid_basis.into());
}

/// Merge the factors of all the channels.
pub fn merge_channel_factors(&mut self) {
self.grid.merge_channel_factors();
}

/// Scale all subgrids.
///
/// Parameters
Expand Down
10 changes: 7 additions & 3 deletions pineappl_py/tests/test_boc.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,13 @@ def _generated_bwfl_fields(n_bins: int, n_dimensions: int) -> BwflFields:

class TestChannel:
def test_init(self):
le = Channel([([2, 2], 0.5)])
assert isinstance(le, Channel)
assert le.into_array() == [([2, 2], 0.5)]
channel = Channel([([2, -2], 0.5)])
assert isinstance(channel, Channel)
assert channel.into_array() == [([2, -2], 0.5)]

channels = Channel([([2, -2], 0.5), ([3, -3], 1.5)])
assert isinstance(channels, Channel)
assert channels.into_array() == [([2, -2], 0.5), ([3, -3], 1.5)]


class TestKinematics:
Expand Down
45 changes: 37 additions & 8 deletions pineappl_py/tests/test_fk_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"""

import numpy as np
import tempfile

from pineappl.boc import Channel, Order
from pineappl.convolutions import Conv, ConvType
Expand All @@ -15,7 +14,7 @@


class TestFkTable:
def test_convolve(self, fake_grids):
def test_convolve(self, fake_grids, tmp_path):
# Define convolution types and the initial state hadrons
# We consider an initial state Polarized Proton
h = ConvType(polarized=True, time_like=False)
Expand Down Expand Up @@ -65,9 +64,9 @@ def test_convolve(self, fake_grids):
)

# Test writing/dumping the FK table into disk
with tempfile.TemporaryDirectory() as tmpdir:
fk.write(f"{tmpdir}/toy_fktable.pineappl")
fk.write_lz4(f"{tmpdir}/toy_fktable.pineappl.lz4")
path = f"{tmp_path}/toy_fktable.pineappl"
fk.write(path)
fk.write_lz4(path)

def test_fktable(
self,
Expand Down Expand Up @@ -113,8 +112,38 @@ def test_fktable(

# Check that FK table is in the Evolution basis and rotate into PDG
assert fk.pid_basis == PidBasis.Evol
new_fk = fk.rotate_pid_basis(PidBasis.Pdg)
assert new_fk.pid_basis == PidBasis.Pdg
fk.rotate_pid_basis(PidBasis.Pdg)
assert fk.pid_basis == PidBasis.Pdg

def test_fktable_rotations(
self,
pdf,
download_objects,
tmp_path,
fkname: str = "FKTABLE_CMSTTBARTOT8TEV-TOPDIFF8TEVTOT.pineappl.lz4",
):
expected_results = [3.72524538e04] # Numbers computed using `v0.8.6`

fk_table = download_objects(f"{fkname}")
fk = FkTable.read(fk_table)

# rotate in the PDG basis and check that all the factors are unity
fk.rotate_pid_basis(PidBasis.Pdg)
assert fk.pid_basis == PidBasis.Pdg

# check that the convolutions are still the same
np.testing.assert_allclose(
fk.convolve(
pdg_convs=fk.convolutions,
xfxs=[pdf.unpolarized_pdf, pdf.unpolarized_pdf],
),
expected_results,
)

# check that the FK table can be loaded properly
path = f"{tmp_path}/rotated_fktable.pineappl.lz4"
fk.write_lz4(path)
_ = FkTable.read(path)

def test_unpolarized_convolution(
self,
Expand All @@ -125,7 +154,7 @@ def test_unpolarized_convolution(
"""Check the convolution of an actual FK table that involves two
symmetrical unpolarized protons:
"""
expected_results = [3.72524538e04]
expected_results = [3.72524538e04] # Numbers computed using `v0.8.6`
fk_table = download_objects(f"{fkname}")
fk = FkTable.read(fk_table)

Expand Down
Loading