diff --git a/JugFast/CMakeLists.txt b/JugFast/CMakeLists.txt index 5b0852454e4e9d7425e94b8846ca32d7aa695ea2..baa77ac23dd98f0ef69d3406f7bb106302390571 100644 --- a/JugFast/CMakeLists.txt +++ b/JugFast/CMakeLists.txt @@ -9,10 +9,12 @@ find_package(DD4hep COMPONENTS DDG4 DDG4IO DDRec REQUIRED) gaudi_add_module(JugFastPlugins SOURCES + src/components/ClusterMerger.cpp src/components/MatchClusters.cpp src/components/MC2SmearedParticle.cpp src/components/ParticlesWithTruthPID.cpp src/components/SmearedFarForwardParticles.cpp + src/components/TruthEnergyPositionClusterMerger.cpp LINK Gaudi::GaudiAlgLib Gaudi::GaudiKernel JugBase diff --git a/JugFast/src/components/ClusterMerger.cpp b/JugFast/src/components/ClusterMerger.cpp new file mode 100644 index 0000000000000000000000000000000000000000..74cfa9f9b585196158fde2f1606102722b1b6bcb --- /dev/null +++ b/JugFast/src/components/ClusterMerger.cpp @@ -0,0 +1,164 @@ +#include <limits> +#include <numbers> + +#include <fmt/format.h> +// Gaudi +#include "Gaudi/Property.h" +#include "GaudiAlg/GaudiAlgorithm.h" +#include "GaudiAlg/GaudiTool.h" +#include "GaudiAlg/Transformer.h" +#include "GaudiKernel/PhysicalConstants.h" + +#include "JugBase/DataHandle.h" +#include "JugBase/UniqueID.h" + +// Event Model related classes +#include "dd4pod/Geant4ParticleCollection.h" +#include "eicd/ClusterCollection.h" +#include "eicd/MergedClusterRelationsCollection.h" + +using namespace Gaudi::Units; + +namespace Jug::Fast { + +/** Simple algorithm to merge clusters orinating from the same particle together, + * based on the MC truth. + * + * \ingroup fast + */ +class ClusterMerger : public GaudiAlgorithm, public AlgorithmIDMixin<> { +public: + // Input + DataHandle<eic::ClusterCollection> m_inputClusters{"InputClusters", Gaudi::DataHandle::Reader, this}; + // Output + DataHandle<eic::ClusterCollection> m_outputClusters{"OutputClusters", Gaudi::DataHandle::Writer, this}; + DataHandle<eic::MergedClusterRelationsCollection> m_relations{"OutputClusterRelations", Gaudi::DataHandle::Writer, + this}; // namespace Jug::Reco +public: + ClusterMerger(const std::string& name, ISvcLocator* svcLoc) + : GaudiAlgorithm(name, svcLoc), AlgorithmIDMixin(name, info()) { + declareProperty("inputClusters", m_inputClusters, "Input cluster collection"); + declareProperty("outputClusters", m_outputClusters, "Cluster collection with good energy precision"); + declareProperty("outputRelations", m_relations, "Cluster collection with good position precision"); + } + + StatusCode initialize() override { return StatusCode::SUCCESS; } + + StatusCode execute() override { + if (msgLevel(MSG::DEBUG)) { + debug() << "Merging cluster that belong to the same primary particle" << endmsg; + } + // input + const auto& split = *(m_inputClusters.get()); + // output + auto& merged = *(m_outputClusters.createAndPut()); + auto& relations = *(m_relations.createAndPut()); + + if (!split.size()) { + if (msgLevel(MSG::DEBUG)) { + debug() << "Nothing to do for this event, returning..." << endmsg; + } + return StatusCode::SUCCESS; + } + + if (msgLevel(MSG::DEBUG)) { + debug() << "Step 0/1: Getting indexed list of clusters..." << endmsg; + } + // get an indexed map of all vectors of clusters, indexed by mcID + auto clusterMap = indexedClusterLists(split); + + // loop over all position clusters and match with energy clusters + if (msgLevel(MSG::DEBUG)) { + debug() << "Step 1/1: Merging clusters where needed" << endmsg; + } + // index for newly created matched clusters + int32_t idx = 0; + for (const auto& [mcID, clusters] : clusterMap) { + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Processing " << clusters.size() << " clusters for mcID " << mcID << endmsg; + } + if (clusters.size() == 1) { + const auto& clus = clusters[0]; + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Only a single cluster " << clus.ID() << ", energy: " << clus.energy() + << " for this particle, copying" << endmsg; + } + merged.push_back(clus.clone()); + auto rel = relations.create(); + rel.clusterID(clus.ID()); + rel.size(1); + rel.parent()[0] = clus.ID(); + } else { + auto new_clus = merged.create(); + new_clus.ID({idx++, algorithmID()}); + auto rel = relations.create(); + rel.clusterID(new_clus.ID()); + rel.size(clusters.size()); + // calculate aggregate info + float energy = 0; + float energyError = 0; + float time = 0; + int nhits = 0; + eic::VectorXYZ position; + float radius = 0; + float skewness = 0; + size_t cnt = 0; + for (const auto& clus : clusters) { + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Adding cluster " << clus.ID() << ", energy: " << clus.energy() << endmsg; + } + energy += clus.energy(); + energyError += clus.energyError() * clus.energyError(); + time += clus.time() * clus.energy(); + nhits += clus.nhits(); + position = position.add(clus.position().scale(energy)); + radius += clus.radius() * clus.radius(); // @TODO does this make sense? + skewness += 0; // @TODO + if (cnt < 4) { + rel.parent()[cnt] = clus.ID(); + } + ++cnt; + } + new_clus.energy(energy); + new_clus.energyError(sqrt(energyError)); + new_clus.time(time / energy); + new_clus.nhits(nhits); + new_clus.position(position.scale(1/energy)); + new_clus.radius(sqrt(radius)); + new_clus.skewness(skewness); + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Merged cluster " << new_clus.ID() << ", energy: " << new_clus.energy() << endmsg; + } + } + } + + // That's all! + + return StatusCode::SUCCESS; + } + + // get a map of mcID --> std::vector<cluster> for clusters that belong together + std::map<eic::Index, std::vector<eic::ConstCluster>> indexedClusterLists(const eic::ClusterCollection& clusters) const { + std::map<eic::Index, std::vector<eic::ConstCluster>> matched = {}; + for (const auto& cluster : clusters) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> Found cluster: " << cluster.ID() << " with mcID " << cluster.mcID() << " and energy " + << cluster.energy() << endmsg; + } + if (!cluster.mcID()) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> WARNING: no valid MC truth link found, skipping cluster..." << endmsg; + } + continue; + } + if (!matched.count(cluster.mcID())) { + matched[cluster.mcID()] = {}; + } + matched[cluster.mcID()].push_back(cluster); + } + return matched; + } +}; +DECLARE_COMPONENT(ClusterMerger) + +} // namespace Jug::Fast diff --git a/JugFast/src/components/MatchClusters.cpp b/JugFast/src/components/MatchClusters.cpp index feec1fedbb371f4e85e8c1dafb90b0cae3c03593..d65f9e35adbf4ad9f19c4c4aa8f29f2a1bbc2eea 100644 --- a/JugFast/src/components/MatchClusters.cpp +++ b/JugFast/src/components/MatchClusters.cpp @@ -128,6 +128,7 @@ public: debug() << " --> Processing unmatched ECAL cluster (" << eclus.ID() << "), energy: " << eclus.energy() << endmsg; } + // get mass/PID from mcparticles, photon in case the matched particle is charged. const auto& mc = mcparticles[mcID.value]; const double mass = (!mc.charge()) ? mc.mass() : 0; @@ -186,6 +187,12 @@ private: verbose() << " --> Found cluster: " << cluster.ID() << " with mcID " << cluster.mcID() << " and energy " << cluster.energy() << endmsg; } + if (!cluster.mcID()) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> WARNING: no valid MC truth link found, skipping cluster..." << endmsg; + } + continue; + } const bool duplicate = matched.count(cluster.mcID()); if (duplicate) { if (msgLevel(MSG::VERBOSE)) { diff --git a/JugFast/src/components/TruthEnergyPositionClusterMerger.cpp b/JugFast/src/components/TruthEnergyPositionClusterMerger.cpp new file mode 100644 index 0000000000000000000000000000000000000000..13459b7796a7a5bf32aa540f55ae5bd83967524c --- /dev/null +++ b/JugFast/src/components/TruthEnergyPositionClusterMerger.cpp @@ -0,0 +1,193 @@ +#include <limits> +#include <numbers> + +#include <fmt/format.h> +// Gaudi +#include "Gaudi/Property.h" +#include "GaudiAlg/GaudiAlgorithm.h" +#include "GaudiAlg/GaudiTool.h" +#include "GaudiAlg/Transformer.h" +#include "GaudiKernel/PhysicalConstants.h" + +#include "JugBase/DataHandle.h" +#include "JugBase/UniqueID.h" + +// Event Model related classes +#include "dd4pod/Geant4ParticleCollection.h" +#include "eicd/ClusterCollection.h" +#include "eicd/MergedClusterRelationsCollection.h" + +using namespace Gaudi::Units; + +namespace Jug::Fast { + +/** Simple algorithm to merge the energy measurement from cluster1 with the position + * measurement of cluster2 (in case matching clusters are found). If not, it will + * propagate the raw cluster from cluster1 or cluster2 + * + * Matching occurs based on the mc truth information of the clusters. + * + * \ingroup reco + */ +class TruthEnergyPositionClusterMerger : public GaudiAlgorithm, public AlgorithmIDMixin<> { +public: + // Input + DataHandle<dd4pod::Geant4ParticleCollection> m_inputMCParticles{"mcparticles", Gaudi::DataHandle::Reader, this}; + DataHandle<eic::ClusterCollection> m_energyClusters{"EnergyClusters", Gaudi::DataHandle::Reader, this}; + DataHandle<eic::ClusterCollection> m_positionClusters{"PositionClusters", Gaudi::DataHandle::Reader, this}; + // Output + DataHandle<eic::ClusterCollection> m_outputClusters{"OutputClusters", Gaudi::DataHandle::Writer, this}; + DataHandle<eic::MergedClusterRelationsCollection> m_relations{"OutputClusterRelations", Gaudi::DataHandle::Writer, + this}; // namespace Jug::Reco +public: + TruthEnergyPositionClusterMerger(const std::string& name, ISvcLocator* svcLoc) + : GaudiAlgorithm(name, svcLoc), AlgorithmIDMixin(name, info()) { + declareProperty("inputMCParticles", m_inputMCParticles, "mcparticles"); + declareProperty("inputEnergyClusters", m_energyClusters, "Cluster collection with good energy precision"); + declareProperty("inputPositionClusters", m_positionClusters, "Cluster collection with good position precision"); + declareProperty("outputClusters", m_outputClusters, "Cluster collection with good energy precision"); + declareProperty("outputRelations", m_relations, "Cluster collection with good position precision"); + } + + StatusCode initialize() override { return StatusCode::SUCCESS; } + + StatusCode execute() override { + if (msgLevel(MSG::DEBUG)) { + debug() << "Merging energy and position clusters for new event" << endmsg; + } + // input + const auto& mcparticles = *(m_inputMCParticles.get()); + const auto& energy_clus = *(m_energyClusters.get()); + const auto& pos_clus = *(m_positionClusters.get()); + // output + auto& merged = *(m_outputClusters.createAndPut()); + auto& relations = *(m_relations.createAndPut()); + + if (!energy_clus.size() && !pos_clus.size()) { + if (msgLevel(MSG::DEBUG)) { + debug() << "Nothing to do for this event, returning..." << endmsg; + } + return StatusCode::SUCCESS; + } + + if (msgLevel(MSG::DEBUG)) { + debug() << "Step 0/2: Getting indexed list of clusters..." << endmsg; + } + // get an indexed map of all clusters + auto energyMap = indexedClusters(energy_clus); + auto posMap = indexedClusters(pos_clus); + + // loop over all position clusters and match with energy clusters + if (msgLevel(MSG::DEBUG)) { + debug() << "Step 1/2: Matching all position clusters to the available energy clusters..." << endmsg; + } + // index for newly created matched clusters + int32_t idx = 0; + for (const auto& [mcID, pclus] : posMap) { + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Processing position cluster " << pclus.ID() << ", mcID: " << mcID << ", energy: " << pclus.energy() + << endmsg; + } + if (energyMap.count(mcID)) { + const auto& eclus = energyMap[mcID]; + auto new_clus = merged.create(); + new_clus.ID({idx++, algorithmID()}); + new_clus.energy(eclus.energy()); + new_clus.energyError(eclus.energyError()); + new_clus.time(pclus.time()); + new_clus.nhits(pclus.nhits() + eclus.nhits()); + new_clus.position(pclus.position()); + new_clus.positionError(pclus.positionError()); + new_clus.radius(pclus.radius()); + new_clus.skewness(pclus.skewness()); + new_clus.mcID(mcID); + auto rel = relations.create(); + rel.clusterID(new_clus.ID()); + rel.size(2); + rel.parent()[0] = pclus.ID(); + rel.parent()[1] = eclus.ID(); + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Found matching energy cluster " << eclus.ID() << ", energy: " << eclus.energy() << endmsg; + debug() << " --> Created new combined cluster " << new_clus.ID() << ", energy: " << new_clus.energy() << endmsg; + } + // erase the energy cluster from the map, so we can in the end account for all + // remaining clusters + energyMap.erase(mcID); + } else { + if (msgLevel(MSG::DEBUG)) { + debug() << " --> No matching energy cluster found, copying over position cluster" << endmsg; + } + merged.push_back(pclus.clone()); + auto rel = relations.create(); + rel.clusterID(pclus.ID()); + rel.size(1); + rel.parent()[0] = pclus.ID(); + } + } + // Collect remaining energy clusters. Use mc truth position for these clusters, as + // they should really have a match in the position clusters (and if they don't it due + // to a clustering error). + if (msgLevel(MSG::DEBUG)) { + debug() << "Step 2/2: Collecting remaining energy clusters..." << endmsg; + } + for (const auto& [mcID, eclus] : energyMap) { + const auto& mc = mcparticles[mcID.value]; + auto new_clus = merged.create(); + new_clus.ID({idx++, algorithmID()}); + new_clus.energy(eclus.energy()); + new_clus.energyError(eclus.energyError()); + new_clus.time(eclus.time()); + new_clus.nhits(eclus.nhits()); + // use nominal radius of 110cm, and use start vertex theta and phi + new_clus.position(eic::VectorPolar{110, mc.ps().theta(), mc.ps().phi()}); + new_clus.radius(eclus.radius()); + new_clus.skewness(eclus.skewness()); + new_clus.mcID(mcID); + auto rel = relations.create(); + rel.clusterID(new_clus.ID()); + rel.size(1); + rel.parent()[0] = eclus.ID(); + if (msgLevel(MSG::DEBUG)) { + debug() << " --> Processing energy cluster " << eclus.ID() << ", mcID: " << mcID << ", energy: " << eclus.energy() + << endmsg; + debug() << " --> Created new 'combined' cluster " << new_clus.ID() << ", energy: " << new_clus.energy() << endmsg; + } + } + + // That's all! + + return StatusCode::SUCCESS; + } + + // get a map of mcID --> cluster + // input: cluster_collections --> list of handles to all cluster collections + std::map<eic::Index, eic::ConstCluster> indexedClusters(const eic::ClusterCollection& clusters) const { + std::map<eic::Index, eic::ConstCluster> matched = {}; + for (const auto& cluster : clusters) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> Found cluster: " << cluster.ID() << " with mcID " << cluster.mcID() << " and energy " + << cluster.energy() << endmsg; + } + if (!cluster.mcID()) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> WARNING: no valid MC truth link found, skipping cluster..." << endmsg; + } + continue; + } + const bool duplicate = matched.count(cluster.mcID()); + if (duplicate) { + if (msgLevel(MSG::VERBOSE)) { + verbose() << " --> WARNING: this is a duplicate mcID, keeping the higher energy cluster" << endmsg; + } + if (cluster.energy() < matched[cluster.mcID()].energy()) { + continue; + } + } + matched[cluster.mcID()] = cluster; + } + return matched; + } +}; +DECLARE_COMPONENT(TruthEnergyPositionClusterMerger) + +} // namespace Jug::Fast