Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • 3-clustering-algorithm-for-calorimeter
  • 36-need-jug-run-executable-to-launch-full-reconstruction
  • 37-eta-and-phi-definition-in-merger
  • 73-add-rich-irt-algorithm
  • 79-testing-ci-for-alexander
  • 88-use-gsl-owner
  • 95-jugfast-matchclusters-needs-updating-for-edm4hep-migration
  • 96-implement-simtrackerhitsmerger-to-overlay-merge-background-events
  • 99-trackprojector-generalize-by-passing-target-surface-as-option
  • BHCalReco
  • Mriganka-master-patch-97470
  • acts-20
  • acts-seeding-21
  • acts_debug
  • algorithms
  • algorithms-integration-calorimeter-hit-digi
  • algorithms-service-proto
  • ayk-01
  • bebop_test
  • billlee77-master-patch-93524
  • brynnamoran-master-patch-77949
  • calorimeter-hit-digi
  • calorimeter-hit-digi-with-random-svc
  • debug_tracking
  • eicrecon-migration
  • feat-add-ActsSvc
  • feat-context-service-overhaul
  • fitting
  • imaging_cal_ML
  • irt-hepmc-jugpid
  • irt-init
  • irt-init-rebased
  • irt-init-v01
  • less-jug-xl-master
  • less_verbose_acts
  • lkosarzew-master-patch-18213
  • main
  • master
  • neutral_momentum
  • new_versions_fix
  • pid
  • pileup-tools
  • podio-data-svc-collections
  • re-update_ci
  • robin-ImagingDataShaper
  • silicon-tracker-digi
  • sim-tracker-hits-collector
  • source_link_debug
  • standalone
  • swapneshkhade-master-patch-93112
  • tensorflow
  • test-main
  • test-unstable-container
  • tflite-proof-of-concept
  • tof-pid-plugin
  • topo_cluster
  • track-param-imaging-cluster-init-phi-shift-to-ecal
  • track-projector-firstSmallerThanZ
  • track_projection
  • tracking_update
  • trackprojection_writeout
  • trajectory_info
  • update_imcal_alg
  • use-algorithms-library
  • use-eicrecon-algorithms
  • use-upstream-edm4hep
  • use_v3_image
  • vanekjan-master-patch-35348
  • wdconinc-main-patch-17306
  • wdconinc-main-patch-77189
  • wdconinc-main-patch-83553
  • wdconinc-main-patch-85492
  • wdconinc-master-patch-91743
  • 0.1
  • stable-1.2
  • v0.1
  • v0.3.1
  • v0.3.2
  • v0.3.3
  • v0.4.0
  • v0.4.1
  • v1.2.0
  • v1.5.0
  • v1.6.0
  • v1.8.0
  • v10.0.0
  • v10.0.1
  • v10.1.0
  • v11.0.0
  • v12.0.0
  • v13.0.0
  • v14.0.0
  • v14.0.1
  • v14.0.2
  • v14.0.3
  • v14.1.0
  • v14.2.0
  • v14.2.1
  • v14.2.2
  • v14.3.0
  • v15.0.0
  • v15.0.1
  • v15.0.2
  • v2.0.0
  • v3.0.0
  • v3.1.0
  • v3.2.0
  • v3.3.0
  • v3.3.1
  • v3.4.0
  • v3.5.0
  • v3.6.0
  • v4.0.0
  • v4.1.0
  • v4.2.0
  • v4.3.0
  • v4.4.0
  • v5.0.0
  • v6.0.0
  • v6.1.0
  • v7.0.0
  • v8.0.0
  • v8.0.1
  • v8.0.2
  • v9.0.0
  • v9.1.0
  • v9.2.0
  • v9.3.0
  • v9.4.0
129 results

Target

Select target project
  • EIC/juggler
  • nbrei/juggler
  • wdconinc/juggler
  • ylai/juggler
4 results
Select Git revision
  • 11-generic-3d-clustering-of-hits
  • 26-implement-3d-clustering-algorithm-for-sampling-calorimeter
  • 3-clustering-algorithm-for-calorimeter
  • downstream_pipelines
  • fitting
  • master
  • new_versions_fix
  • pages
  • pid
  • test-unstable
  • 0.1
  • stable-1.2
  • v0.1
  • v0.3.1
  • v0.3.2
  • v0.3.3
  • v0.4.0
  • v0.4.1
  • v1.2.0
19 results
Show changes
Commits on Source (506)
Showing
with 1772 additions and 197 deletions
--- ---
Language: Cpp BasedOnStyle: LLVM
BasedOnStyle: Chromium BreakConstructorInitializersBeforeComma: true
AccessModifierOffset: -2 ConstructorInitializerAllOnOneLineOrOnePerLine: true
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: true
AlignConsecutiveDeclarations: true
AlignEscapedNewlines: Right
AlignOperands: true
AlignTrailingComments: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortBlocksOnASingleLine: false
AllowShortCaseLabelsOnASingleLine: false
AllowShortFunctionsOnASingleLine: All
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterDefinitionReturnType: None
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: true
BinPackArguments: true
BinPackParameters: true
BraceWrapping:
AfterClass: false
AfterControlStatement: false
AfterEnum: false
AfterFunction: true
AfterNamespace: false
AfterObjCDeclaration: false
AfterStruct: false
AfterUnion: false
AfterExternBlock: false
BeforeCatch: false
BeforeElse: false
IndentBraces: false
SplitEmptyFunction: true
SplitEmptyRecord: true
SplitEmptyNamespace: true
BreakBeforeBinaryOperators: None
BreakBeforeBraces: Custom
BreakBeforeInheritanceComma: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: false
BreakConstructorInitializers: BeforeColon
BreakAfterJavaFieldAnnotations: false
BreakStringLiterals: true
ColumnLimit: 120
CommentPragmas: '^ IWYU pragma:'
CompactNamespaces: false
ConstructorInitializerAllOnOneLineOrOnePerLine: false
ConstructorInitializerIndentWidth: 4
ContinuationIndentWidth: 4
Cpp11BracedListStyle: true Cpp11BracedListStyle: true
DerivePointerAlignment: false Standard: Cpp11
DisableFormat: false #SpaceBeforeParens: ControlStatements
ExperimentalAutoDetectBinPacking: false SpaceAfterControlStatementKeyword: true
FixNamespaceComments: true PointerBindsToType: true
ForEachMacros:
- foreach
- Q_FOREACH
- BOOST_FOREACH
IncludeBlocks: Preserve IncludeBlocks: Preserve
IncludeCategories:
- Regex: '^"(llvm|llvm-c|clang|clang-c)/'
Priority: 2
- Regex: '^(<|"(gtest|gmock|isl|json)/)'
Priority: 3
- Regex: '.*'
Priority: 1
IncludeIsMainRegex: '(Test)?$'
IndentCaseLabels: false
IndentPPDirectives: None
IndentWidth: 2
IndentWrappedFunctionNames: false
JavaScriptQuotes: Leave
JavaScriptWrapImports: true
KeepEmptyLinesAtTheStartOfBlocks: true
MacroBlockBegin: ''
MacroBlockEnd: ''
MaxEmptyLinesToKeep: 1
NamespaceIndentation: All
ObjCBlockIndentWidth: 2
ObjCSpaceAfterProperty: false
ObjCSpaceBeforeProtocolList: true
PenaltyBreakAssignment: 2
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 300
PenaltyBreakFirstLessLess: 120
PenaltyBreakString: 1000
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 60
PointerAlignment: Left
ReflowComments: true
SortIncludes: true
SortUsingDeclarations: true
SpaceAfterCStyleCast: false
SpaceAfterTemplateKeyword: true
SpaceBeforeAssignmentOperators: true
SpaceBeforeParens: ControlStatements
#SpaceBeforeRangeBasedForLoopColon: true
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 1
SpacesInAngles: false
SpacesInContainerLiterals: true
SpacesInCStyleCastParentheses: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: Cpp11
TabWidth: 8
UseTab: Never UseTab: Never
ColumnLimit: 100
NamespaceIndentation: Inner
AlignConsecutiveAssignments: true
... ...
HeaderFilterRegex: ''
Checks: '
bugprone-*,
concurrency-*,
cppcoreguidelines-*,
modernize-*,
portability-*,
readability-*,
-modernize-use-trailing-return-type,
-modernize-avoid-c-arrays,
-modernize-use-nodiscard,
-readability-magic-numbers,
-cppcoreguidelines-avoid-magic-numbers
'
FormatStyle: file
name: linux-eic-shell
on: [push, pull_request]
jobs:
build-test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: cvmfs-contrib/github-action-cvmfs@v2
- uses: eic/run-cvmfs-osg-eic-shell@main
with:
platform-release: "jug_xl:nightly"
run: |
PREFIX=${PWD}/install
# install this repo
cmake -B build -S . -DCMAKE_INSTALL_PREFIX=${PREFIX}
cmake --build build -- install
- uses: actions/upload-artifact@v3
with:
name: install-eic-shell
path: install/
if-no-files-found: error
- uses: actions/upload-artifact@v3
with:
name: build-eic-shell
path: build/
if-no-files-found: error
clang-tidy:
runs-on: ubuntu-latest
needs: build-test
steps:
- uses: actions/checkout@v2
- uses: cvmfs-contrib/github-action-cvmfs@v2
- uses: actions/download-artifact@v3
with:
name: build-eic-shell
path: build/
- uses: eic/run-cvmfs-osg-eic-shell@main
with:
platform-release: "jug_xl:nightly"
run: |
run-clang-tidy -p build -export-fixes clang_tidy_fixes.yml -extra-arg='-std=c++20'
- uses: actions/upload-artifact@v3
with:
name: clang-tidy-fixes.yml
path: clang_tidy_fixes.yml
...@@ -18,11 +18,12 @@ DEBUG*/* ...@@ -18,11 +18,12 @@ DEBUG*/*
BUILD*/* BUILD*/*
RELEASE*/* RELEASE*/*
TEST*/* TEST*/*
doc/docs/*
# cmake # cmake
CMakeCache.txt CMakeCache.txt
CMakeFiles CMakeFiles
Makefile #Makefile
cmake_install.cmake cmake_install.cmake
install_manifest.txt install_manifest.txt
...@@ -39,3 +40,6 @@ __pycache__/ ...@@ -39,3 +40,6 @@ __pycache__/
# ROOT file # ROOT file
*.root *.root
*.sif
**old
install/*
image: eicweb.phy.anl.gov:4567/containers/eic_container/eic:latest image: eicweb.phy.anl.gov:4567/containers/image_recipes/ubuntu_dind:latest
default: variables:
artifacts: VERSION: "${CI_COMMIT_REF_NAME}"
paths:
- build/
stages: stages:
- build - build
- run - analysis
- config
- docker ## build new version of juggler
- deploy ## repo-local singularity image for development work
workflow:
## Only rebuild on MRs and on commits to the main, as in other cases
## we should use the tagged jug_xl releases from eic_container.
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
- if: '$CI_COMMIT_BRANCH == "main"'
compile: default:
## plan:
## Workflows:
## - main --> config + docker (eic_container) + singularity (this repo)
## --> trigger eic_container master
## - MR --> config + docker (eic_container) + singularity (this repo)
## - upstream trigger from eic_container (nightly) --> run main
##
## Container images tags
## - main --> nightly on eicweb & DH, and export to eic_container
## - MR --> unstable-mr-XXX (on eicweb only, untag at end of pipeline)
juggler:local:
image: eicweb.phy.anl.gov:4567/containers/eic_container/jug_xl:nightly
stage: build stage: build
tags: parallel:
- silicon matrix:
before_script: - CMAKE_CXX_STANDARD:
- pwd && ls -lrth - 20
script:
- |
cmake -Bbuild -S. -DCMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD} -DCMAKE_INSTALL_PREFIX=/usr/local
cmake --build build -j20
artifacts:
expire_in: 1 hour
paths:
- build/
analysis:clang-tidy:
image: eicweb.phy.anl.gov:4567/containers/eic_container/jug_xl:nightly
stage: analysis
needs:
- juggler:local
script: script:
- export homedir=$(pwd) && pwd && cd /tmp && git clone --depth=1 https://eicweb.phy.anl.gov/EIC/NPDet.git && mkdir build && cd build && cmake ../NPDet/. && make -j20 install - |
- cd /tmp && git clone --depth=1 https://eicweb.phy.anl.gov/EIC/eicd.git && mkdir eicd_build && cd eicd_build && cmake ../eicd/. && make -j20 install run-clang-tidy -p build -j20 -export-fixes clang_tidy_fixes.yml -extra-arg='-std=c++20'
- cd $homedir && ls -lrth && mkdir build && cd build && cmake .. && make -j10 artifacts:
expire_in: 1 week
run_example: paths:
stage: run - clang_tidy_fixes.yml
tags: allow_failure: true
- silicon
version:
stage: config
rules:
- if: '$CI_SERVER_HOST == "eicweb.phy.anl.gov"'
script: script:
- ./build/run gaudirun.py Examples/options/hello_world.py - |
if [ "x${CI_PIPELINE_SOURCE}" == "xmerge_request_event" ]; then
VERSION="unstable-mr-${CI_MERGE_REQUEST_PROJECT_ID}-${CI_MERGE_REQUEST_IID}"
fi
echo "VERSION=$VERSION" >> build.env
cat build.env
artifacts:
reports:
dotenv: build.env
eic_container:
stage: deploy
needs:
- version
variables:
VERSION: "${VERSION}"
JUGGLER_VERSION: "${CI_COMMIT_REF_NAME}"
trigger:
project: containers/eic_container
strategy: depend
allow_failure: false
run_example2: pages:
image: eicweb.phy.anl.gov:4567/eic/npdet/npdet:latest image: eicweb.phy.anl.gov:4567/containers/eic_container/alpine
stage: run stage: deploy
tags: rules:
- silicon - if: '$CI_SERVER_HOST == "gitlab.phy.anl.gov" && $CI_COMMIT_BRANCH == "main"'
script: script:
- ./build/run gaudirun.py JugBase/tests/options/simple_reader.py - apk update && apk add doxygen graphviz ttf-ubuntu-font-family
- cd doc && doxygen Doxyfile && cd ..
- mkdir -p public && cp -r doc/docs/html/* public/.
artifacts:
paths:
- public
cmake_minimum_required(VERSION 3.2.0) # SPDX-License-Identifier: LGPL-3.0-or-later
# Copyright (C) 2022 Wouter Deconinck, Whitney Armstrong
set(CMAKE_PREFIX_PATH $ENV{HOME}/lib CMAKE_PREFIX_PATH)
find_package(NPDet REQUIRED) cmake_minimum_required(VERSION 3.19)
#---------------------------------------------------------------
# to use ROOT targets, find_package ROOT must come before find_package Gaudi # CMP0074: find_package() uses <PackageName>_ROOT variables
# (for now) # see issue Gaudi#103 cmake_policy(SET CMP0074 NEW)
set(PODIO $ENV{PODIO})
set(CMAKE_MODULE_PATH CMAKE_MODULE_PATH PODIO) project(Juggler VERSION 4.3.0)
find_package(podio 0.11.0 REQUIRED)
include_directories(${podio_INCLUDE_DIR}) option(JUGGLER_BUILD_TRACKING "Build tracking algorithms" TRUE)
find_package(ROOT COMPONENTS MathCore GenVector Geom REQUIRED)
find_package(DD4hep COMPONENTS DDG4 DDG4IO DDRec REQUIRED) set(CMAKE_CXX_STANDARD 20 CACHE STRING "")
find_package(Geant4) if(NOT CMAKE_CXX_STANDARD MATCHES "20")
message(FATAL_ERROR "Unsupported C++ standard: ${CMAKE_CXX_STANDARD}")
endif()
find_package(GaudiProject) set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
gaudi_project(Juggler v1r0
USE Gaudi v33r1) # Export compile commands as json for run-clang-tidy
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# Also use clang-tidy integration in CMake
option(ENABLE_CLANG_TIDY "Enable clang-tidy integration in cmake" OFF)
if(ENABLE_CLANG_TIDY)
find_program(CLANG_TIDY_EXE NAMES "clang-tidy")
if (CLANG_TIDY_EXE)
message(STATUS "clang-tidy found: ${CLANG_TIDY_EXE}")
set(CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_EXE}" CACHE STRING "" FORCE)
else()
set(CMAKE_CXX_CLANG_TIDY "" CACHE STRING "" FORCE)
endif()
endif()
# Set default build type
set(default_build_type "Release")
if(EXISTS "${CMAKE_SOURCE_DIR}/.git")
set(default_build_type "RelWithDebInfo")
endif()
if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
message(STATUS "Setting build type to '${default_build_type}' as none was specified.")
set(CMAKE_BUILD_TYPE "${default_build_type}" CACHE
STRING "Choose the type of build." FORCE)
# Set the possible values of build type for cmake-gui
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS
"Debug" "Release" "MinSizeRel" "RelWithDebInfo")
endif()
# Set all warnings
if(NOT CMAKE_BUILD_TYPE MATCHES Release)
add_compile_options(-Wall -Wextra -Werror -Wno-error=deprecated-declarations)
endif()
find_package(Microsoft.GSL CONFIG)
find_package(EDM4EIC REQUIRED)
find_package(EDM4HEP 0.4.1 REQUIRED)
find_package(podio 0.16.3)
if(NOT podio_FOUND)
find_package(podio 1.0 REQUIRED)
endif()
add_definitions("-Dpodio_VERSION_MAJOR=${podio_VERSION_MAJOR}")
add_definitions("-Dpodio_VERSION_MINOR=${podio_VERSION_MINOR}")
add_definitions("-Dpodio_VERSION_PATCH=${podio_VERSION_PATCH}")
find_package(ROOT COMPONENTS Core RIO Tree MathCore GenVector Geom REQUIRED)
find_package(DD4hep COMPONENTS DDRec REQUIRED)
if(JUGGLER_BUILD_TRACKING)
find_package(Acts REQUIRED COMPONENTS Core PluginTGeo PluginDD4hep PluginJson)
set(Acts_VERSION_MIN "20.2.0")
set(Acts_VERSION "${Acts_VERSION_MAJOR}.${Acts_VERSION_MINOR}.${Acts_VERSION_PATCH}")
if(${Acts_VERSION} VERSION_LESS ${Acts_VERSION_MIN}
AND NOT "${Acts_VERSION}" STREQUAL "9.9.9")
message(FATAL_ERROR "Acts version ${Acts_VERSION_MIN} or higher required, but ${Acts_VERSION} found")
endif()
add_definitions("-DActs_VERSION_MAJOR=${Acts_VERSION_MAJOR}")
add_definitions("-DActs_VERSION_MINOR=${Acts_VERSION_MINOR}")
add_definitions("-DActs_VERSION_PATCH=${Acts_VERSION_PATCH}")
# Get ActsCore path for ActsExamples include
get_target_property(ActsCore_LOCATION ActsCore LOCATION)
get_filename_component(ActsCore_PATH ${ActsCore_LOCATION} DIRECTORY)
endif()
## Dependencies
find_package(algorithms)
find_package(Gaudi)
find_package(k4FWCore)
## Components
add_subdirectory(JugBase)
add_subdirectory(JugAlgo)
add_subdirectory(JugDigi)
add_subdirectory(JugFast)
add_subdirectory(JugPID)
add_subdirectory(JugReco)
if(JUGGLER_BUILD_TRACKING)
add_subdirectory(JugTrack)
endif()
## CMake config
gaudi_install(CMAKE)
# create and install Juggler.xenv file as it still has a use-case
# TODO: update workflow to not need xenv files anymore
include(cmake/xenv.cmake)
import os
import ROOT
from Gaudi.Configuration import *
from GaudiKernel import SystemOfUnits as units
from GaudiKernel.DataObjectHandleBase import DataObjectHandleBase
from Configurables import ApplicationMgr, EICDataSvc, PodioOutput, GeoSvc
from Configurables import PodioInput
from Configurables import Jug__Base__InputCopier_dd4pod__Geant4ParticleCollection_dd4pod__Geant4ParticleCollection_ as MCCopier
from Configurables import Jug__Base__InputCopier_dd4pod__CalorimeterHitCollection_dd4pod__CalorimeterHitCollection_ as CalCopier
from Configurables import Jug__Digi__CalorimeterHitDigi as CalorimeterHitDigi
from Configurables import Jug__Reco__ImagingPixelReco as ImagingPixelReco
from Configurables import Jug__Reco__ImagingTopoCluster as ImagingTopoCluster
from Configurables import Jug__Reco__ImagingClusterReco as ImagingClusterReco
# input arguments through environment variables
kwargs = dict()
kwargs['sf'] = float(os.environ.get('CB_EMCAL_SAMP_FRAC', '1.0'))
kwargs['input'] = os.environ.get('CB_EMCAL_SIM_FILE', '../topside/barrel_pion0_5GeV.root')
kwargs['output'] = os.environ.get('CB_EMCAL_REC_FILE', 'barrel_pion0_5GeV_cluster.root')
kwargs['compact'] = os.environ.get('CB_EMCAL_COMPACT_PATH', '../topside/test.xml')
kwargs['nev'] = int(os.environ.get('CB_EMCAL_NUMEV', 10000))
if kwargs['nev'] < 1:
f = ROOT.TFile(kwargs['input'])
kwargs['nev'] = f.events.GetEntries()
print(kwargs)
# get sampling fraction from system environment variable, 1.0 by default
sf = float(os.environ.get('CB_EMCAL_SAMP_FRAC', '1.0'))
geo_service = GeoSvc("GeoSvc", detectors=kwargs['compact'].split(','))
podioevent = EICDataSvc("EventDataSvc", inputs=kwargs['input'].split(','), OutputLevel=DEBUG)
out = PodioOutput("out", filename=kwargs['output'])
podioinput = PodioInput("PodioReader", collections=["mcparticles", "EcalBarrelHits"], OutputLevel=DEBUG)
copier = MCCopier("MCCopier",
inputCollection="mcparticles",
outputCollection="mcparticles2",
OutputLevel=DEBUG)
calcopier = CalCopier("CalCopier",
inputCollection="EcalBarrelHits",
outputCollection="EcalBarrelHits2",
OutputLevel=DEBUG)
imcaldigi = CalorimeterHitDigi("imcal_digi",
inputHitCollection="EcalBarrelHits",
outputHitCollection="DigiEcalBarrelHits",
inputEnergyUnit=units.GeV,
inputTimeUnit=units.ns,
energyResolutions=[0., 0.02, 0.],
dynamicRangeADC=3*units.MeV,
pedestalSigma=40,
OutputLevel=DEBUG)
imcalreco = ImagingPixelReco("imcal_reco",
inputHitCollection="DigiEcalBarrelHits",
outputHitCollection="RecoEcalBarrelHits",
dynamicRangeADC=3.*units.MeV,
pedestalSigma=40,
readoutClass="EcalBarrelHits",
layerField="layer",
sectorField="module")
imcalcluster = ImagingTopoCluster(inputHitCollection="RecoEcalBarrelHits",
outputClusterCollection="EcalBarrelClusters",
localRanges=[2.*units.mm, 2*units.mm],
adjLayerRanges=[10*units.mrad, 10*units.mrad],
adjLayerDiff=2,
adjSectorDist=3.*units.cm)
clusterreco = ImagingClusterReco(inputClusterCollection="EcalBarrelClusters",
outputLayerCollection="EcalBarrelClustersLayers",
samplingFraction=sf,
OutputLevel=DEBUG)
out.outputCommands = ["keep *"]
ApplicationMgr(
TopAlg=[podioinput, copier, calcopier, imcaldigi, imcalreco, imcalcluster, clusterreco, out],
EvtSel='NONE',
EvtMax=kwargs['nev'],
ExtSvc=[podioevent],
OutputLevel=DEBUG
)
import os
from Gaudi.Configuration import *
from GaudiKernel import SystemOfUnits as units
from GaudiKernel.DataObjectHandleBase import DataObjectHandleBase
from Configurables import ApplicationMgr, EICDataSvc, PodioOutput, GeoSvc
from Configurables import PodioInput
from Configurables import Jug__Digi__EcalTungstenSamplingDigi as EcalTungstenSamplingDigi
from Configurables import Jug__Reco__EcalTungstenSamplingReco as EcalTungstenSamplingReco
from Configurables import Jug__Reco__CalorimeterHitsMerger as CalorimeterHitsMerger
from Configurables import Jug__Reco__CalorimeterIslandCluster as IslandCluster
from Configurables import Jug__Reco__ClusterRecoCoG as RecoCoG
# get sampling fraction from system environment variable, 1.0 by default
sf = float(os.environ.get('CB_EMCAL_SAMP_FRAC', '1.0'))
geo_service = GeoSvc("GeoSvc", detectors=["../athena/athena.xml"])
podioevent = EICDataSvc("EventDataSvc", inputs=["../reconstruction_benchmarks/sim_barrel_clusters.root"], OutputLevel=DEBUG)
podioinput = PodioInput("PodioReader", collections=["mcparticles", "EcalBarrelHits"], OutputLevel=DEBUG)
emcaldigi = EcalTungstenSamplingDigi("ecal_digi",
inputHitCollection="EcalBarrelHits",
outputHitCollection="DigiEcalBarrelHits",
inputEnergyUnit=units.GeV,
inputTimeUnit=units.ns,
energyResolutions=[0., 0.02, 0.],
dynamicRangeADC=700*units.keV,
pedestalSigma=40,
OutputLevel=DEBUG)
emcalreco = EcalTungstenSamplingReco("ecal_reco",
inputHitCollection="DigiEcalBarrelHits",
outputHitCollection="RecoEcalBarrelHits",
dynamicRangeADC=700*units.keV,
pedestalSigma=40,
OutputLevel=DEBUG)
# readout id definition for barrel ecal
# <id>system:8,barrel:3,module:4,layer:10,slice:5,x:32:-16,y:-16</id>
# sum layers/slices, masking (8+3+4, 8+3+4+5+10-1)
xymerger = CalorimeterHitsMerger("ecal_xy_merger",
fields=["layer", "slice"],
inputHitCollection="RecoEcalBarrelHits",
outputHitCollection="RecoEcalBarrelHitsXY")
# sum modules, masking (8+3+4+5+10, 8+3+4+5+10+32-1)
zmerger = CalorimeterHitsMerger("ecal_z_merger",
fields=["x", "y"],
inputHitCollection="RecoEcalBarrelHits",
outputHitCollection="RecoEcalBarrelHitsZ")
emcalcluster = IslandCluster(inputHitCollection="RecoEcalBarrelHitsXY",
outputClusterCollection="EcalBarrelClusters",
minClusterCenterEdep=0.5*units.MeV,
splitCluster=False,
groupRanges=[5.*units.cm, 5*units.cm, 5.*units.cm])
clusterreco = RecoCoG(clusterCollection="EcalBarrelClusters", logWeightBase=6.2, samplingFraction=sf, OutputLevel=DEBUG)
out = PodioOutput("out", filename="barrel_cluster.root")
out.outputCommands = ["keep *"]
ApplicationMgr(
TopAlg = [podioinput, emcaldigi, emcalreco, xymerger, zmerger, emcalcluster, clusterreco, out],
EvtSel = 'NONE',
EvtMax = 5,
ExtSvc = [podioevent],
OutputLevel=DEBUG
)
'''
A script to visualize the cluster
It reads the output from the Juggler component ImagingClusterReco, which is supposed to be clusters of hits after
digitization, reconstruction, and clustering
Author: Chao Peng (ANL)
Date: 04/30/2021
'''
import os
import numpy as np
import pandas as pd
import argparse
import matplotlib
from matplotlib import cm
from matplotlib import pyplot as plt
from matplotlib.ticker import MultipleLocator
from matplotlib.collections import PatchCollection
from matplotlib.patches import Rectangle
from mpl_toolkits.axes_grid1 import make_axes_locatable
from utils import *
import sys
# draw cluster in a 3d axis, expect a numpy array of (nhits, 4) shape with each row contains (x, y, z, E)
# note z and x axes are switched
def draw_hits3d(axis, data, cmap, units=('mm', 'mm', 'mm', 'MeV'), fontsize=24, **kwargs):
# normalize energy to get colors
x, y, z, edep = np.transpose(data)
cvals = edep - min(edep) / (max(edep) - min(edep))
cvals[np.isnan(cvals)] = 1.0
colors = cmap(cvals)
# hits
axis.scatter(z, y, x, c=colors, marker='o', **kwargs)
axis.tick_params(labelsize=fontsize)
axis.set_zlabel('x ({})'.format(units[2]), fontsize=fontsize + 2, labelpad=fontsize)
axis.set_ylabel('y ({})'.format(units[1]), fontsize=fontsize + 2, labelpad=fontsize)
axis.set_xlabel('z ({})'.format(units[0]), fontsize=fontsize + 2, labelpad=fontsize)
cb = plt.colorbar(cm.ScalarMappable(norm=matplotlib.colors.Normalize(vmin=min(edep), vmax=max(edep)), cmap=cmap),
ax=axis, shrink=0.85)
cb.ax.tick_params(labelsize=fontsize)
cb.ax.get_yaxis().labelpad = fontsize
cb.ax.set_ylabel('Energy Deposit ({})'.format(units[3]), rotation=90, fontsize=fontsize + 4)
return axis
# draw a cylinder in 3d axes
# note z and x axes are switched
def draw_cylinder3d(axis, r, z, order=['x', 'y', 'z'], rsteps=500, zsteps=500, **kwargs):
x = np.linspace(-r, r, rsteps)
z = np.linspace(-z, z, zsteps)
Xc, Zc = np.meshgrid(x, z)
Yc = np.sqrt(r**2 - Xc**2)
axis.plot_surface(Zc, Yc, Xc, alpha=0.1, **kwargs)
axis.plot_surface(Zc, -Yc, Xc, alpha=0.1, **kwargs)
return axis
# fit the track of cluster and draw the fit
def draw_track_fit(axis, dfh, length=200, stop_layer=8, scat_kw=dict(), line_kw=dict()):
dfh = dfh[dfh['layer'] <= stop_layer]
data = dfh.groupby('layer')[['z', 'y','x']].mean().values
# data = dfh[['z', 'y', 'x']].values
# ax.scatter(*data.T, **scat_kw)
datamean = data.mean(axis=0)
uu, dd, vv = np.linalg.svd(data - datamean)
linepts = vv[0] * np.mgrid[-length:length:2j][:, np.newaxis]
linepts += datamean
axis.plot3D(*linepts.T, 'k:')
return axis
# color map
def draw_heatmap(axis, x, y, weights, bins=1000, cmin=0., cmap=plt.get_cmap('rainbow'), pc_kw=dict()):
w, xedg, yedg = np.histogram2d(x, y, weights=weights, bins=bins)
xsz = np.mean(np.diff(xedg))
ysz = np.mean(np.diff(yedg))
wmin, wmax = w.min(), w.max()
recs, clrs = [], []
for i in np.arange(len(xedg) - 1):
for j in np.arange(len(yedg) - 1):
if w[i][j] > cmin:
recs.append(Rectangle((xedg[i], yedg[j]), xsz, ysz))
clrs.append(cmap((w[i][j] - wmin) / (wmax - wmin)))
axis.add_collection(PatchCollection(recs, facecolor=clrs, **pc_kw))
axis.set_xlim(xedg[0], xedg[-1])
axis.set_ylim(yedg[0], yedg[-1])
return axis, cm.ScalarMappable(norm=matplotlib.colors.Normalize(vmin=wmin, vmax=wmax), cmap=cmap)
# execute this script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Visualize the cluster from analysis')
parser.add_argument('file', type=str, help='path to root file')
parser.add_argument('-e', type=int, default=0, dest='iev', help='event number to plot')
parser.add_argument('-c', type=int, default=0, dest='icl', help='cluster number to plot (0: all, -1: no cluster)')
parser.add_argument('-s', type=int, default=8, dest='stop', help='stop layer for track fit')
parser.add_argument('-o', type=str, default='./plots', dest='outdir', help='output directory')
parser.add_argument('--compact', type=str, default='', dest='compact', help='compact file')
parser.add_argument('-m', '--macros', type=str, default='rootlogon.C', dest='macros',
help='root macros to load (accept multiple paths separated by \",\")')
parser.add_argument('-b', '--branch-name', type=str, default='RecoEcalBarrelHits', dest='branch',
help='branch name in the root file (outputLayerCollection from ImagingClusterReco)')
parser.add_argument('--topo-size', type=float, default=2.0, dest='topo_size',
help='bin size for projection plot (mrad)')
parser.add_argument('--topo-range', type=float, default=50.0, dest='topo_range',
help='half range for projection plot (mrad)')
parser.add_argument('--zoom-factor', type=float, default=1.0, dest='zoom_factor',
help='factor for zoom-in')
args = parser.parse_args()
# we can read these values from xml file
desc = compact_constants(args.compact, [
'cb_ECal_RMin',
'cb_ECal_ReadoutLayerThickness',
'cb_ECal_ReadoutLayerNumber',
'cb_ECal_Length'
])
if not len(desc):
# or define Ecal shapes
rmin, thickness, length = 890, 20*(10. + 1.65), 860*2+500
else:
# convert cm to mm
rmin = desc[0]*10.
thickness = desc[1]*desc[2]*10.
length = desc[3]*10.
# read data
load_root_macros(args.macros)
df = get_hits_data(args.file, args.iev, branch=args.branch)
if args.icl != 0:
df = df[df['cluster'] == args.icl]
if not len(df):
print("Error: do not find any hits for cluster {:d} in event {:d}".format(args.icl, args.iev))
exit(-1)
# convert to polar coordinates (mrad), and stack all r values
df['r'] = np.sqrt(df['x'].values**2 + df['y'].values**2 + df['z'].values**2)
df['phi'] = np.arctan2(df['y'].values, df['x'].values)*1000.
df['theta'] = np.arccos(df['z'].values/df['r'].values)*1000.
df['eta'] = -np.log(np.tan(df['theta'].values/1000./2.))
# truth
dfmcp = get_mcp_simple(args.file, args.iev, 'mcparticles2').iloc[0]
pdgbase = ROOT.TDatabasePDG()
inpart = pdgbase.GetParticle(int(dfmcp['pid']))
print("Incoming particle = {}, pdgcode = {}, charge = {}, mass = {}"\
.format(inpart.GetName(), inpart.PdgCode(), inpart.Charge(), inpart.Mass()))
# neutral particle, no need to consider magnetic field
if np.isclose(inpart.Charge(), 0., rtol=1e-5):
vec = dfmcp[['px', 'py', 'pz']].values
# charge particle, use the cluster center
else:
flayer = df[df['layer'] == df['layer'].min()]
vec = flayer[['x', 'y', 'z']].mean().values
vec = vec/np.linalg.norm(vec)
# particle line from (0, 0, 0) to the inner Ecal surface
length = rmin/np.sqrt(vec[0]**2 + vec[1]**2)
pline = np.transpose(vec*np.mgrid[0:length:2j][:, np.newaxis])
cmap = truncate_colormap(plt.get_cmap('jet'), 0.1, 0.9)
os.makedirs(args.outdir, exist_ok=True)
# cluster plot
fig = plt.figure(figsize=(15, 12), dpi=160)
ax = fig.add_subplot(111, projection='3d')
# draw particle line
ax.plot(*pline[[2, 1]], '--', zs=pline[0], color='green')
# draw hits
draw_hits3d(ax, df[['x', 'y', 'z', 'edep']].values, cmap, s=5.0)
draw_cylinder3d(ax, rmin, length, rstride=10, cstride=10, color='royalblue')
draw_cylinder3d(ax, rmin + thickness, length, rstride=10, cstride=10, color='forestgreen')
ax.set_zlim(-(rmin + thickness), rmin + thickness)
ax.set_ylim(-(rmin + thickness), rmin + thickness)
ax.set_xlim(-length, length)
fig.tight_layout()
fig.savefig(os.path.join(args.outdir, 'e{}_cluster.png'.format(args.iev)))
# zoomed-in cluster plot
fig = plt.figure(figsize=(15, 12), dpi=160)
ax = fig.add_subplot(111, projection='3d')
# draw particle line
ax.plot(*pline[[2, 1]], '--', zs=pline[0], color='green')
# draw hits
draw_hits3d(ax, df[['x', 'y', 'z', 'edep']].values, cmap, s=20.0)
# draw_track_fit(ax, df, stop_layer=args.stop,
# scat_kw=dict(color='k', s=50.0), line_kw=dict(linestyle=':', color='k', lw=3))
# view range
center = (length + thickness/2.)*vec
ranges = np.vstack([center - thickness/args.zoom_factor, center + thickness/args.zoom_factor]).T
ax.set_zlim(*ranges[0])
ax.set_ylim(*ranges[1])
ax.set_xlim(*ranges[2])
fig.tight_layout()
fig.savefig(os.path.join(args.outdir, 'e{}_cluster_zoom.png'.format(args.iev)))
# projection plot
# convert to mrad
vecp = np.asarray([np.arccos(vec[2]), np.arctan2(vec[1], vec[0])])*1000.
phi_rg = np.asarray([vecp[1] - args.topo_range, vecp[1] + args.topo_range])
th_rg = np.asarray([vecp[0] - args.topo_range, vecp[0] + args.topo_range])
eta_rg = np.resize(-np.log(np.tan(vecp[0]/1000./2.)), 2) + np.asarray([-args.topo_range, args.topo_range])/1000.
fig, axs = plt.subplots(1, 2, figsize=(13, 12), dpi=160, gridspec_kw={'wspace':0., 'width_ratios': [12, 1]})
ax, sm = draw_heatmap(axs[0], df['eta'].values, df['phi'].values, weights=df['edep'].values,
bins=(np.arange(*eta_rg, step=args.topo_size/1000.), np.arange(*phi_rg, step=args.topo_size)),
cmap=cmap, cmin=0., pc_kw=dict(alpha=0.8, edgecolor='k'))
ax.set_ylabel(r'$\phi$ (mrad)', fontsize=32)
ax.set_xlabel(r'$\eta$', fontsize=32)
ax.tick_params(labelsize=28)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(5))
ax.grid(linestyle=':', which='both')
ax.set_axisbelow(True)
cb = plt.colorbar(sm, cax=axs[1], shrink=0.85, aspect=1.2*20)
cb.ax.tick_params(labelsize=28)
cb.ax.get_yaxis().labelpad = 10
cb.ax.set_ylabel('Energy Deposit (MeV)', rotation=90, fontsize=32)
fig.savefig(os.path.join(args.outdir, 'e{}_topo.png'.format(args.iev)))
'''
A script to visualize the cluster layer-by-layer
It reads the output from the Juggler component ImagingClusterReco, which is supposed to be clusters of hits after
digitization, reconstruction, and clustering
Author: Chao Peng (ANL)
Date: 04/30/2021
'''
import os
import numpy as np
import pandas as pd
import argparse
import matplotlib
from matplotlib import cm
from matplotlib import pyplot as plt
from matplotlib.ticker import MultipleLocator
from matplotlib.collections import PatchCollection
from matplotlib.patches import Rectangle
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.backends.backend_pdf import PdfPages
from utils import *
import sys
import imageio
# color map
def draw_heatmap(axis, x, y, weights, bins=1000, vmin=None, vmax=None, cmap=plt.get_cmap('rainbow'), pc_kw=dict()):
w, xedg, yedg = np.histogram2d(x, y, weights=weights, bins=bins)
xsz = np.mean(np.diff(xedg))
ysz = np.mean(np.diff(yedg))
if vmin == None:
vmin = w.min()
if vmax == None:
vmax = w.max()
recs, clrs = [], []
for i in np.arange(len(xedg) - 1):
for j in np.arange(len(yedg) - 1):
if w[i][j] > vmin:
recs.append(Rectangle((xedg[i], yedg[j]), xsz, ysz))
clrs.append(cmap((w[i][j] - vmin) / (vmax - vmin)))
axis.add_collection(PatchCollection(recs, facecolor=clrs, **pc_kw))
axis.set_xlim(xedg[0], xedg[-1])
axis.set_ylim(yedg[0], yedg[-1])
return axis, cm.ScalarMappable(norm=matplotlib.colors.Normalize(vmin=vmin, vmax=vmax), cmap=cmap)
# execute this script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Visualize the cluster (layer-wise) from analysis')
parser.add_argument('file', type=str, help='path to root file')
parser.add_argument('-e', type=int, default=0, dest='iev', help='event number to plot')
parser.add_argument('-c', type=int, default=0, dest='icl', help='cluster number to plot (0: all, -1: no cluster)')
parser.add_argument('-s', type=int, default=8, dest='stop', help='stop layer for track fit')
parser.add_argument('-o', type=str, default='./plots', dest='outdir', help='output directory')
parser.add_argument('-d', type=float, default=1.0, dest='dura', help='duration of gif')
parser.add_argument('--compact', type=str, default='', dest='compact', help='compact file')
parser.add_argument('-m', '--macros', type=str, default='rootlogon.C', dest='macros',
help='root macros to load (accept multiple paths separated by \",\")')
parser.add_argument('-b', '--branch-name', type=str, default='RecoEcalBarrelHits', dest='branch',
help='branch name in the root file (outputLayerCollection from ImagingClusterReco)')
parser.add_argument('--topo-size', type=float, default=2.0, dest='topo_size',
help='bin size for projection plot (mrad)')
parser.add_argument('--topo-range', type=float, default=50.0, dest='topo_range',
help='half range for projection plot (mrad)')
args = parser.parse_args()
# we can read these values from xml file
desc = compact_constants(args.compact, [
'cb_ECal_RMin',
'cb_ECal_ReadoutLayerThickness',
'cb_ECal_ReadoutLayerNumber',
'cb_ECal_Length'
])
if not len(desc):
# or define Ecal shapes
rmin, thickness, length = 890, 20*(10. + 1.65), 860*2+500
else:
# convert cm to mm
rmin = desc[0]*10.
thickness = desc[1]*desc[2]*10.
length = desc[3]*10.
# read data
load_root_macros(args.macros)
df = get_hits_data(args.file, args.iev, args.branch)
if args.icl != 0:
df = df[df['cluster'] == args.icl]
if not len(df):
print("Error: do not find any hits for cluster {:d} in event {:d}".format(args.icl, args.iev))
exit(-1)
# convert to polar coordinates (mrad), and stack all r values
df['r'] = np.sqrt(df['x'].values**2 + df['y'].values**2 + df['z'].values**2)
df['phi'] = np.arctan2(df['y'].values, df['x'].values)*1000.
df['theta'] = np.arccos(df['z'].values/df['r'].values)*1000.
df['eta'] = -np.log(np.tan(df['theta'].values/1000./2.))
# truth
dfmcp = get_mcp_simple(args.file, args.iev, 'mcparticles2').iloc[0]
pdgbase = ROOT.TDatabasePDG()
inpart = pdgbase.GetParticle(int(dfmcp['pid']))
print("Incoming particle = {}, pdgcode = {}, charge = {}, mass = {}"\
.format(inpart.GetName(), inpart.PdgCode(), inpart.Charge(), inpart.Mass()))
# neutral particle, no need to consider magnetic field
if np.isclose(inpart.Charge(), 0., rtol=1e-5):
vec = dfmcp[['px', 'py', 'pz']].values
# charge particle, use the cluster center
else:
flayer = df[df['layer'] == df['layer'].min()]
vec = flayer[['x', 'y', 'z']].mean().values
vec = vec/np.linalg.norm(vec)
# particle line from (0, 0, 0) to the inner Ecal surface
length = rmin/np.sqrt(vec[0]**2 + vec[1]**2)
pline = np.transpose(vec*np.mgrid[0:length:2j][:, np.newaxis])
cmap = truncate_colormap(plt.get_cmap('jet'), 0.1, 0.9)
# convert truth to mrad
vecp = np.asarray([np.arccos(vec[2]), np.arctan2(vec[1], vec[0])])*1000.
phi_rg = np.asarray([vecp[1] - args.topo_range, vecp[1] + args.topo_range])
th_rg = np.asarray([vecp[0] - args.topo_range, vecp[0] + args.topo_range])
eta_rg = np.resize(-np.log(np.tan(vecp[0]/1000./2.)), 2) + np.asarray([-args.topo_range, args.topo_range])/1000.
os.makedirs(args.outdir, exist_ok=True)
# cluster plot by layers (rebinned)
pdf = PdfPages(os.path.join(args.outdir, 'e{}_c{}_layers.pdf'.format(args.iev, args.icl)))
bpos, bprops = (0.5, 0.95), dict(boxstyle='round', facecolor='wheat', alpha=0.2)
frames = []
for i in np.arange(1, df['layer'].max() + 1, dtype=int):
data = df[df['layer'] == i]
fig, axs = plt.subplots(1, 2, figsize=(17, 16), dpi=160, gridspec_kw={'wspace':0., 'width_ratios': [16, 1]})
ax, sm = draw_heatmap(axs[0], data['eta'].values, data['phi'].values, weights=data['edep'].values,
bins=(np.arange(*eta_rg, step=args.topo_size/1000.), np.arange(*phi_rg, step=args.topo_size)),
cmap=cmap, vmin=0.0, vmax=1.0, pc_kw=dict(alpha=0.8, edgecolor='k'))
ax.set_ylabel(r'$\phi$ (mrad)', fontsize=28)
ax.set_xlabel(r'$\eta$', fontsize=28)
ax.tick_params(labelsize=24)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(5))
ax.grid(linestyle=':', which='both')
ax.set_axisbelow(True)
ax.text(*bpos, 'Layer {:d}'.format(i), transform=ax.transAxes,
fontsize=26, va='top', ha='center', bbox=bprops)
cb = plt.colorbar(sm, cax=axs[1], shrink=0.85)
cb.ax.tick_params(labelsize=24)
cb.ax.get_yaxis().labelpad = 24
cb.ax.set_ylabel('Energy Deposit (MeV)', rotation=90, fontsize=28)
pdf.savefig(fig)
# gif frames
fig.savefig('ltmp.png')
plt.close(fig)
frames.append(imageio.imread('ltmp.png'))
pdf.close()
os.remove('ltmp.png')
# build GIF
imageio.mimsave(os.path.join(args.outdir, 'e{:d}_c{:d}_layers.gif'.format(args.iev, args.icl)),
frames, 'GIF', duration=args.dura)
# cluster plot by layers (scatters)
pdf = PdfPages(os.path.join(args.outdir, 'e{}_c{}_layers2.pdf'.format(args.iev, args.icl)))
bpos, bprops = (0.5, 0.95), dict(boxstyle='round', facecolor='wheat', alpha=0.2)
frames = []
for i in np.arange(1, df['layer'].max() + 1, dtype=int):
data = df[df['layer'] == i]
fig, axs = plt.subplots(1, 2, figsize=(17, 16), dpi=160, gridspec_kw={'wspace':0., 'width_ratios': [16, 1]})
ax = axs[0]
colors = cmap(data['edep']/1.0)
ax.scatter(data['eta'].values, data['phi'].values, c=colors, marker='s', s=15.0)
ax.set_xlim(*eta_rg)
ax.set_ylim(*phi_rg)
ax.set_ylabel(r'$\phi$ (mrad)', fontsize=28)
ax.set_xlabel(r'$\eta$', fontsize=28)
ax.tick_params(labelsize=24)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.yaxis.set_minor_locator(MultipleLocator(5))
ax.grid(linestyle=':', which='both')
ax.set_axisbelow(True)
ax.text(*bpos, 'Layer {:d}'.format(i), transform=ax.transAxes,
fontsize=26, va='top', ha='center', bbox=bprops)
cb = plt.colorbar(sm, cax=axs[1], shrink=0.85)
cb.ax.tick_params(labelsize=24)
cb.ax.get_yaxis().labelpad = 24
cb.ax.set_ylabel('Energy Deposit (MeV)', rotation=90, fontsize=28)
pdf.savefig(fig)
# gif frames
fig.savefig('ltmp.png')
plt.close(fig)
frames.append(imageio.imread('ltmp.png'))
pdf.close()
os.remove('ltmp.png')
# build GIF
imageio.mimsave(os.path.join(args.outdir, 'e{:d}_c{:d}_layers2.gif'.format(args.iev, args.icl)),
frames, 'GIF', duration=args.dura)
'''
A script to generate the energy profile (layer-wise)
It reads the output from the Juggler component ImagingClusterReco, which is supposed to be clusters of hits after
digitization, reconstruction, and clustering
Author: Chao Peng (ANL)
Date: 04/30/2021
'''
import os
import numpy as np
import pandas as pd
import ROOT
from ROOT import gROOT, gInterpreter
import argparse
import matplotlib
from matplotlib import pyplot as plt
from matplotlib.ticker import MultipleLocator, MaxNLocator
import sys
from utils import *
def find_start_layer(grp, min_edep=0.5):
ids, edeps = grp.sort_values('layer')[['layer', 'edep']].values.T
edeps = np.cumsum(edeps)
return min(ids[edeps > min_edep]) if sum(edeps > min_edep) > 0 else 20
# execute this script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate energy profiles')
parser.add_argument('file', type=str, help='path to root file')
parser.add_argument('-o', '--plot-dir', type=str, default='./plots', dest='outdir', help='output directory')
parser.add_argument('-t', '--type', type=str, default='unknown', dest='type', help='profile type (used in save)')
parser.add_argument('-e', '--energy', type=float, default=5., dest='energy', help='incident particle energy (GeV)')
parser.add_argument('-s', '--save', type=str, default='', dest='save', help='path to save profile')
parser.add_argument('-c', '--color', type=str, default='royalblue', dest='color', help='colors for bar plots')
parser.add_argument('-b', '--branch-name', type=str, default='EcalBarrelClustersLayers', dest='branch',
help='branch name in the root file (outputLayerCollection from ImagingClusterReco)')
parser.add_argument('-m', '--macros', type=str, default='rootlogon.C', dest='macros',
help='root macros to load (accept multiple paths separated by \",\")')
args = parser.parse_args()
load_root_macros(args.macros)
# prepare data
dfe = get_layers_data(args.file, branch=args.branch)
dfe.loc[:, 'total_edep'] = dfe.groupby(['event', 'cluster'])['edep'].transform('sum')
# dfe.loc[:, 'efrac'] = dfe['edep']/dfe['total_edep']
dfe.loc[:, 'efrac'] = dfe['edep']/(args.energy*1000.)
dfe = dfe[dfe['cluster'] == 0]
os.makedirs(args.outdir, exist_ok=True)
slayer = dfe.groupby('event').apply(lambda x: find_start_layer(x, 1.0)).astype(int)
dfe = dfe.merge(slayer.to_frame(name='slayer'), on='event')
# dfe.loc[:, 'eff_layer'] = dfe['layer'] - dfe['slayer']
# prof = dfe[dfe['eff_layer'] > 0].groupby('eff_layer')['edep'].describe()
prof = dfe.groupby('layer')['efrac'].describe()
# print(prof['mean'])
# plot profiles
bpos, bprops = (0.5, 0.95), dict(boxstyle='round', facecolor='wheat', alpha=0.3)
fig, ax = plt.subplots(figsize=(16, 9), dpi=160)
nev = len(dfe['event'].unique())
ax.hist(dfe.groupby('event')['slayer'].min(), weights=[1/float(nev)]*nev,
ec='black', bins=np.arange(0.5, 10.5, step=1.0))
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.yaxis.set_minor_locator(MultipleLocator(1))
ax.grid(linestyle=':', which='both')
ax.tick_params(labelsize=24)
ax.set_xlabel('Start Layer', fontsize=26)
ax.set_ylabel('Normalized Counts', fontsize=26)
ax.text(*bpos, 'Mininum Edep\n' + '{:.1f} MeV'.format(1.0),
transform=ax.transAxes, fontsize=26, verticalalignment='top', bbox=bprops)
fig.savefig(os.path.join(args.outdir, 'edep_start_{}_{}.png'.format(args.type, int(args.energy))))
fig, ax = plt.subplots(figsize=(16, 9), dpi=160)
ax.plot(prof.index, prof['mean'].values*100., color=args.color, lw=2)
# ax.fill_between(prof.index, prof['25%'], prof['75%'], color=args.color, alpha=0.3)
ax.fill_between(prof.index, (prof['mean'] - prof['std'])*100., (prof['mean'] + prof['std'])*100.,
color=args.color, alpha=0.3)
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.yaxis.set_minor_locator(MultipleLocator(1))
ax.grid(linestyle=':', which='both')
ax.tick_params(labelsize=24)
ax.set_xlabel('Layer', fontsize=26)
ax.set_ylabel('Energy Deposit Percentage', fontsize=26)
fig.savefig(os.path.join(args.outdir, 'efrac_{}_{}.png'.format(args.type, int(args.energy))))
# edep fraction by layers
layers = np.asarray([
[1, 5, 8,],
[10, 15, 20],
])
layers_bins = np.linspace(0, 0.5, 50)
fig, ax = plt.subplots(*layers.shape, figsize=(16, 9), dpi=160, sharex='col', sharey='all',
gridspec_kw=dict(hspace=0.05, wspace=0.05))
for ax, layer in zip(ax.flat, layers.flatten()):
data = dfe[dfe['layer'] == layer]
ax.hist(data['efrac'].values*100., weights=[1/float(len(data))]*len(data), bins=layers_bins,
ec='black', color=args.color)
ax.tick_params(labelsize=24)
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.grid(linestyle=':', which='both')
# ax.set_xlabel('Energy Deposit (MeV)', fontsize=26)
# ax.set_ylabel('Normalized Counts', fontsize=26)
mean, std = data.describe().loc[['mean', 'std'], 'edep'].values
label = 'Layer {}'.format(layer)
# + '\n' + r'$\mu = {:.3f}$ MeV'.format(mean)
# + '\n' + r'$\sigma = {:.3f}$ MeV'.format(std)
ax.text(*bpos, label, transform=ax.transAxes, fontsize=26, verticalalignment='top', bbox=bprops)
ax.set_axisbelow(True)
ax.set_yscale('log')
fig.text(0.5, 0.02, 'Energy Deposit Percentage', fontsize=26, ha='center')
fig.text(0.02, 0.5, 'Normalized Counts', fontsize=26, va='center', rotation=90)
fig.savefig(os.path.join(args.outdir, 'efrac_layers_{}_{}.png'.format(args.type, int(args.energy))))
if args.save:
prof.loc[:, 'energy'] = args.energy*1000.
prof.loc[:, 'type'] = args.type
if os.path.exists(args.save):
prev = pd.read_csv(args.save).set_index('layer', drop=True)
prof = pd.concat([prof, prev])
prof.to_csv(args.save)
'''
A script to use the energy profile for e-pi separation
It reads the output from the Juggler component ImagingClusterReco, which is supposed to be clusters of hits after
digitization, reconstruction, and clustering
Author: Chao Peng (ANL)
Date: 04/30/2021
'''
import os
import numpy as np
import pandas as pd
import ROOT
from ROOT import gROOT, gInterpreter
import argparse
import matplotlib
from matplotlib import pyplot as plt
from matplotlib.ticker import MultipleLocator, MaxNLocator
import sys
from utils import *
def prepare_data(path, **kwargs):
tmp = get_layers_data(path, **kwargs)
tmp.loc[:, 'total_edep'] = tmp.groupby(['event', 'cluster'])['edep'].transform('sum')
tmp.loc[:, 'efrac'] = tmp['edep']/tmp['total_edep']
# tmp = tmp[tmp['cluster'] == 0]
return tmp
def calc_chi2(grp, pr, lmin=5, lmax=12):
grp2 = grp[(grp['layer'] >= lmin) & (grp['layer'] <= lmax)]
mean, std = pr.loc[grp2['layer'], ['mean', 'std']].values.T*pr['energy'].mean()
edeps = grp2['edep'].values
return np.sqrt(np.sum((edeps - mean)**2/std**2)/float(len(edeps)))
# execute this script
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='epi_separation')
parser.add_argument('efile', type=str, help='path to root file (electrons)')
parser.add_argument('pifile', type=str, help='path to root file (pions)')
parser.add_argument('--prof', type=str, default='profile.csv', help='path to electron profile')
parser.add_argument('--plot-dir', type=str, default='./plots', dest='outdir', help='output directory')
parser.add_argument('-b', '--branch-name', type=str, default='EcalBarrelClustersLayers', dest='branch',
help='branch name in the root file (outputLayerCollection from ImagingClusterReco)')
parser.add_argument('-m', '--macros', type=str, default='rootlogon.C', dest='macros',
help='root macros to load (accept multiple paths separated by \",\")')
args = parser.parse_args()
os.makedirs(args.outdir, exist_ok=True)
load_root_macros(args.macros)
# prepare data
dfe = prepare_data(args.efile, branch=args.branch)
dfpi = prepare_data(args.pifile, branch=args.branch)
colors = ['royalblue', 'indianred', 'limegreen']
prof = pd.concat([pd.read_csv(p.strip()) for p in args.prof.split(',')]).reset_index(drop=True)
# profile comparison
fig, ax = plt.subplots(figsize=(16, 9), dpi=160)
for title, color in zip(sorted(prof['type'].unique()), colors):
pr = prof[prof['type'] == title]
ax.plot(pr.index, pr['mean'], color=color, lw=2)
ax.fill_between(pr.index, (pr['mean'] - pr['std']), (pr['mean'] + pr['std']),
color=color, alpha=0.3, label=title)
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.yaxis.set_minor_locator(MultipleLocator(1))
ax.grid(linestyle=':', which='both')
ax.tick_params(labelsize=24)
ax.set_xlabel('Layer', fontsize=26)
ax.set_ylabel('Energy Deposit Percentage', fontsize=26)
ax.legend(fontsize=26, loc='upper left')
fig.savefig(os.path.join(args.outdir, 'compare_prof.png'))
# check profile
layer_range = (4, 12)
pre = prof[prof['type'].str.lower() == 'electron']
fig, ax = plt.subplots(figsize=(16, 9), dpi=160)
chi2 = dfe.groupby(['event', 'cluster']).apply(lambda x: calc_chi2(x, pre, *layer_range))
ax.hist(chi2, bins=np.linspace(0, 5, 200), weights=[1/float(len(chi2))]*chi2,
ec='royalblue', color='royalblue', alpha=0.5, label='electrons')
chi2 = dfpi.groupby(['event', 'cluster']).apply(lambda x: calc_chi2(x, pre, *layer_range))
# print(chi2[chi2 < 0.7])
ax.hist(chi2, bins=np.linspace(0, 5, 200), weights=[1/float(len(chi2))]*chi2,
ec='indianred', color='indianred', alpha=0.5, label='pions')
ax.grid(linestyle=':', which='major')
ax.set_axisbelow(True)
ax.tick_params(labelsize=24)
ax.set_xlabel(r'$\chi^2$', fontsize=26)
ax.set_ylabel('Normalized Counts', fontsize=26)
# ax.set_yscale('log')
# ax.set_ylim(1e-3, 1.)
ax.legend(title=r'$\chi^2$ for $E_{{dep}}$ in layer {:d} - {:d}'.format(*layer_range), title_fontsize=28, fontsize=26)
fig.savefig(os.path.join(args.outdir, 'efrac_chi2.png'))
'''
A utility script to help the analysis of imaging calorimeter data
Author: Chao Peng (ANL)
Date: 04/30/2021
'''
import os
import ROOT
import numpy as np
import pandas as pd
import matplotlib
import DDG4
from ROOT import gROOT, gInterpreter
# helper function to truncate color map (for a better view from the rainbow colormap)
def truncate_colormap(cmap, minval=0.0, maxval=1.0, n=100):
new_cmap = matplotlib.colors.LinearSegmentedColormap.from_list(
'trunc({n},{a:.2f},{b:.2f})'.format(n=cmap.name, a=minval, b=maxval),
cmap(np.linspace(minval, maxval, n)))
return new_cmap
# load root macros, input is an argument string
def load_root_macros(arg_macros):
for path in arg_macros.split(','):
path = path.strip()
if os.path.exists(path):
gROOT.Macro(path)
else:
print('\"{}\" does not exist, skip loading it.'.format(path))
# read mc particles from root file
def get_mcp_data(path, evnums=None, branch='mcparticles2'):
f = ROOT.TFile(path)
events = f.events
if evnums is None:
evnums = np.arange(events.GetEntries())
elif isinstance(evnums, int):
evnums = [evnums]
dbuf = np.zeros(shape=(5000*len(evnums), 6))
idb = 0
for iev in evnums:
if iev >= events.GetEntries():
print('Error: event {:d} is out of range (0 - {:d})'.format(iev, events.GetEntries() - 1))
continue
events.GetEntry(iev)
# extract full mc particle data
for part in getattr(events, branch):
dbuf[idb] = (iev, part.psx, part.psy, part.psz, part.pdgID, part.status)
idb += 1
return pd.DataFrame(data=dbuf[:idb], columns=['event', 'px', 'py', 'pz', 'pid', 'status'])
# read mc particles from root file
def get_mcp_simple(path, evnums=None, branch='mcparticles2'):
f = ROOT.TFile(path)
events = f.events
if evnums is None:
evnums = np.arange(events.GetEntries())
elif isinstance(evnums, int):
evnums = [evnums]
dbuf = np.zeros(shape=(len(evnums), 6))
idb = 0
for iev in evnums:
if iev >= events.GetEntries():
print('Error: event {:d} is out of range (0 - {:d})'.format(iev, events.GetEntries() - 1))
continue
events.GetEntry(iev)
# extract full mc particle data
part = getattr(events, branch)[2]
dbuf[idb] = (iev, part.psx, part.psy, part.psz, part.pdgID, part.status)
idb += 1
return pd.DataFrame(data=dbuf[:idb], columns=['event', 'px', 'py', 'pz', 'pid', 'status'])
# read hits data from root file
def get_hits_data(path, evnums=None, branch='RecoEcalBarrelHits'):
f = ROOT.TFile(path)
events = f.events
if evnums is None:
evnums = np.arange(events.GetEntries())
elif isinstance(evnums, int):
evnums = [evnums]
dbuf = np.zeros(shape=(2000*len(evnums), 7))
idb = 0
for iev in evnums:
if iev >= events.GetEntries():
print('Error: event {:d} is out of range (0 - {:d})'.format(iev, events.GetEntries() - 1))
continue
events.GetEntry(iev)
for hit in getattr(events, branch):
dbuf[idb] = (iev, hit.clusterID, hit.layerID, hit.position.x, hit.position.y, hit.position.z, hit.edep)
idb += 1
return pd.DataFrame(data=dbuf[:idb], columns=['event', 'cluster', 'layer', 'x', 'y', 'z', 'edep'])
# read layers data from root file
def get_layers_data(path, evnums=None, branch="EcalBarrelClustersLayers"):
f = ROOT.TFile(path)
events = f.events
if evnums is None:
evnums = np.arange(events.GetEntries())
elif isinstance(evnums, int):
evnums = [evnums]
dbuf = np.zeros(shape=(2000*len(evnums), 7))
idb = 0
for iev in evnums:
if iev >= events.GetEntries():
print('Error: event {:d} is out of range (0 - {:d})'.format(iev, events.GetEntries() - 1))
continue
events.GetEntry(iev)
for layer in getattr(events, branch):
dbuf[idb] = (iev, layer.clusterID, layer.layerID,
layer.position.x, layer.position.y, layer.position.z, layer.edep)
idb += 1
return pd.DataFrame(data=dbuf[:idb], columns=['event', 'cluster', 'layer', 'x', 'y', 'z', 'edep'])
# read clusters data from root file
def get_clusters_data(path, evnums=None, branch='EcalBarrelClustersReco'):
f = ROOT.TFile(path)
events = f.events
if evnums is None:
evnums = np.arange(events.GetEntries())
elif isinstance(evnums, int):
evnums = [evnums]
dbuf = np.zeros(shape=(20*len(evnums), 6))
idb = 0
for iev in evnums:
if iev >= events.GetEntries():
print('Error: event {:d} is out of range (0 - {:d})'.format(iev, events.GetEntries() - 1))
continue
events.GetEntry(iev)
for k, cl in enumerate(getattr(events, branch)):
dbuf[idb] = (iev, k, cl.nhits, cl.edep, cl.cl_theta, cl.cl_phi)
idb += 1
return pd.DataFrame(data=dbuf[:idb], columns=['event', 'cluster', 'nhits', 'edep', 'cl_theta', 'cl_phi'])
def compact_constants(path, names):
if not os.path.exists(path):
print('Cannot find compact file \"{}\".'.format(path))
return []
kernel = DDG4.Kernel()
description = kernel.detectorDescription()
kernel.loadGeometry("file:{}".format(path))
try:
vals = [description.constantAsDouble(n) for n in names]
except:
print('Fail to extract values from {}, return empty.'.format(names))
vals = []
kernel.terminate()
return vals
# SPDX-License-Identifier: LGPL-3.0-or-later
# Copyright (C) 2022 Sylvester Joosten
################################################################################
# Package: JugAlgo
################################################################################
gaudi_add_header_only_library(JugAlgo
LINK
Gaudi::GaudiKernel
algorithms::core
JugBase
)
file(GLOB JugAlgoPlugins_sources CONFIGURE_DEPENDS src/components/*.cpp)
gaudi_add_module(JugAlgoPlugins
SOURCES
${JugAlgoPlugins_sources}
LINK
Gaudi::GaudiKernel
JugBase
JugAlgo
algorithms::core
)
target_include_directories(JugAlgoPlugins PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
target_compile_options(JugAlgoPlugins PRIVATE -Wno-suggest-override)
install(TARGETS JugAlgo JugAlgoPlugins
EXPORT JugAlgoTargets
RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT bin
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT shlib
COMPONENT dev)
if(BUILD_TESTING)
enable_testing()
endif()
Juggler bindings for the `algorithms` library.
// SPDX-License-Identifier: LGPL-3.0-or-later
// Copyright (C) 2022 Sylvester Joosten
#include <string>
#include <type_traits>
#include <algorithms/algorithm.h>
#include <algorithms/type_traits.h>
#include <Gaudi/Algorithm.h>
#include <GaudiKernel/Service.h>
#include <JugAlgo/IAlgoServiceSvc.h>
#include <JugAlgo/detail/DataProxy.h>
#define JUGALGO_DEFINE_ALGORITHM(algorithm, impl, ns) \
using algorithm##Base = Jug::Algo::Algorithm<impl>; \
namespace ns { \
struct algorithm : algorithm##Base { \
algorithm(const std::string& name, ISvcLocator* svcLoc) : algorithm##Base(name, svcLoc) {} \
}; \
DECLARE_COMPONENT(algorithm) \
}
namespace Jug::Algo {
template <class AlgoImpl> class Algorithm : public Gaudi::Algorithm {
public:
using algo_type = AlgoImpl;
using input_type = typename algo_type::input_type;
using output_type = typename algo_type::output_type;
using Input = typename algo_type::Input;
using Output = typename algo_type::Output;
Algorithm(const std::string& name, ISvcLocator* svcLoc)
: Gaudi::Algorithm(name, svcLoc)
, m_algo{name}
, m_output{this, m_algo.outputNames()}
, m_input{this, m_algo.inputNames()} {
defineProperties();
}
StatusCode initialize() override {
debug() << "Initializing " << name() << endmsg;
// Algorithms uses exceptions, Gaudi uses StatusCode --> catch and propagate
try {
// Grab the AlgoServiceSvc
m_algo_svc = service("AlgoServiceSvc");
if (!m_algo_svc) {
error() << "Unable to get an instance of the AlgoServiceSvc" << endmsg;
return StatusCode::FAILURE;
}
// Forward the log level of this algorithm
const algorithms::LogLevel level{
static_cast<algorithms::LogLevel>(msgLevel() > 0 ? msgLevel() - 1 : 0)};
debug() << "Setting the logger level to " << algorithms::logLevelName(level) << endmsg;
m_algo.level(level);
// Init our data structures
debug() << "Initializing data structures" << endmsg;
m_input.init();
m_output.init();
// configure properties
debug() << "Configuring properties" << endmsg;
initProperties();
// validate properties
debug() << "Validating properties" << endmsg;
m_algo.validate();
// call the internal algorithm init
debug() << "Initializing underlying algorithm " << m_algo.name() << endmsg;
m_algo.init();
} catch (const std::exception& e) {
fatal() << e.what() << endmsg;
return StatusCode::FAILURE;
}
return StatusCode::SUCCESS;
}
StatusCode execute(const EventContext&) const override {
try {
m_algo.process(m_input.get(), m_output.get());
} catch (const std::exception& e) {
error() << e.what() << endmsg;
return StatusCode::FAILURE;
}
return StatusCode::SUCCESS;
}
protected:
template <typename T> void setAlgoProp(std::string_view name, T&& value) {
m_algo.template setProperty<T>(name, value);
}
template <typename T> T getAlgoProp(std::string name) const {
return m_algo.template getProperty<T>(name);
}
bool hasAlgoProp(std::string_view name) const { return m_algo.hasProperty(name); }
private:
// to be called during construction (before init())
void defineProperties() {
for (const auto& [key, prop] : m_algo.getProperties()) {
std::visit(
[this, key = key](auto&& val) {
using T = std::decay_t<decltype(val)>;
this->m_props.emplace(
key, std::make_unique<Gaudi::Property<T>>(this, std::string(key), val));
},
prop.get());
}
}
// to be called during init() --> will actually set the underlying alo properties
void initProperties() {
for (const auto& [key, prop] : m_algo.getProperties()) {
std::visit(
[this, key = key](auto&& val) {
using T = std::decay_t<decltype(val)>;
const auto* gaudi_prop = static_cast<Gaudi::Property<T>*>(this->m_props.at(key).get());
const auto prop_val = gaudi_prop->value();
this->m_algo.setProperty(key, prop_val);
},
prop.get());
}
}
algo_type m_algo;
SmartIF<IAlgoServiceSvc> m_algo_svc;
detail::DataProxy<output_type> m_output;
detail::DataProxy<input_type> m_input;
std::map<std::string_view, std::unique_ptr<Gaudi::Details::PropertyBase>> m_props;
};
} // namespace Jug::Algo
// SPDX-License-Identifier: LGPL-3.0-or-later
// Copyright (C) 2022 Sylvester Joosten
#pragma once
#include <GaudiKernel/IService.h>
// Juggler bindings for all required algorithms services
// Will setup all necessary services (using the algorithms::ServiceSvc)
class GAUDI_API IAlgoServiceSvc : virtual public IService {
public:
DeclareInterfaceID(IAlgoServiceSvc, 1, 0);
virtual ~IAlgoServiceSvc() {}
// No actual API needed, as this service will do all the magic behind the screens
};
#pragma once
#include <string>
#include <tuple>
#include <vector>
#include <algorithms/algorithm.h>
#include <algorithms/type_traits.h>
#include <Gaudi/Algorithm.h>
#include <k4FWCore/DataHandle.h>
namespace Jug::Algo::detail {
enum class DataMode : unsigned { kInput, kOutput };
// Generate properties for each of the data arguments
template <class T, DataMode kMode> class DataElement {
public:
using value_type = std::conditional_t<kMode == DataMode::kInput, algorithms::input_type_t<T>,
algorithms::output_type_t<T>>;
using data_type = algorithms::data_type_t<T>;
constexpr static const bool kIsOptional = algorithms::is_optional_v<T>;
template <class Owner>
DataElement(Owner* owner, std::string_view name)
: m_data_name{std::make_unique<Gaudi::Property<std::string>>(owner, std::string(name), "")}
, m_owner{owner} {}
void init() {
if (m_handle) {
// treat error: already initialized
}
if (!m_data_name->empty()) {
m_handle = std::make_unique<DataHandle<data_type>>(
*m_data_name,
((kMode == DataMode::kInput) ? Gaudi::DataHandle::Reader : Gaudi::DataHandle::Writer),
m_owner);
} else if (!algorithms::is_optional_v<T>) {
// treat error: member not optional but no collection name given
}
}
value_type get() const {
if constexpr (kIsOptional) {
if (!m_handle) {
return nullptr;
}
}
if constexpr (kMode == DataMode::kInput) {
return m_handle->get();
} else {
return m_handle->createAndPut();
}
}
private:
std::unique_ptr<Gaudi::Property<std::string>>
m_data_name; // This needs to be a pointer, else things go wrong once we go through
// createElements - probably something about passing the Property through an
// rvalue (or copy) constructor
std::unique_ptr<DataHandle<data_type>> m_handle;
gsl::not_null<Gaudi::Algorithm*> m_owner;
};
// Specialization for vectors
template <class T, class A, DataMode kMode> class DataElement<std::vector<T, A>, kMode> {
public:
using value_type = std::conditional_t<kMode == DataMode::kInput, algorithms::input_type_t<T>,
algorithms::output_type_t<T>>;
using data_type = algorithms::data_type_t<T>;
template <class Owner>
DataElement(Owner* owner, std::string_view name)
: m_data_names{std::make_unique<Gaudi::Property<std::vector<std::string>>>(
owner, std::string(name), {})}
, m_owner{owner} {}
void init() {
if (!m_handles.empty()) {
// treat error: already initialized
}
if (!m_data_names->empty()) {
for (const auto& name : *m_data_names) {
if (!name.empty()) {
m_handles.emplace_back(std::make_unique<DataHandle<data_type>>(
name,
(kMode == DataMode::kInput ? Gaudi::DataHandle::Reader : Gaudi::DataHandle::Writer),
m_owner));
} else {
// treat error: empty name
}
}
} else {
// OK if nothing given here, no error
}
}
std::vector<value_type> get() const {
std::vector<value_type> ret;
for (auto& handle : m_handles) {
if constexpr (kMode == DataMode::kInput) {
ret.emplace_back(handle->get());
} else {
ret.emplace_back(handle->createAndPut());
}
}
return ret;
}
private:
std::unique_ptr<Gaudi::Property<std::vector<std::string>>> m_data_names;
std::vector<std::unique_ptr<DataHandle<T>>> m_handles;
gsl::not_null<Gaudi::Algorithm*> m_owner;
};
template <DataMode kMode, class Owner, class NamesArray, class Tuple, size_t... I>
auto createElements(Owner* owner, const NamesArray& names, const Tuple&, std::index_sequence<I...>)
-> std::tuple<DataElement<std::tuple_element_t<I, Tuple>, kMode>...> {
return {DataElement<std::tuple_element_t<I, Tuple>, kMode>(owner, std::get<I>(names))...};
}
// Call ::get() on each element of the HandleTuple, and return the result in the format of
// ReturnTuple
template <class ReturnTuple, class HandleTuple, size_t... I>
ReturnTuple getElements(HandleTuple& handles, std::index_sequence<I...>) {
return {std::get<I>(handles).get()...};
}
// Create data handle structure for all members
template <class Data> class DataProxy {
public:
static constexpr DataMode kMode =
(algorithms::is_input_v<Data> ? DataMode::kInput : DataMode::kOutput);
using value_type = typename Data::value_type;
using data_type = typename Data::data_type;
constexpr static size_t kSize = Data::kSize;
using names_type = typename Data::key_type;
using elements_type =
decltype(createElements<kMode>(std::declval<Gaudi::Algorithm*>(), names_type(), data_type(),
std::make_index_sequence<kSize>()));
template <class Owner>
DataProxy(Owner* owner, const names_type& names)
: m_elements{
createElements<kMode>(owner, names, data_type(), std::make_index_sequence<kSize>())} {}
void init() {
std::apply([](auto&&... el) { (el.init(), ...); }, m_elements);
}
value_type get() const {
return getElements<value_type>(m_elements, std::make_index_sequence<kSize>());
}
private:
elements_type m_elements;
};
} // namespace Jug::Algo::detail
// SPDX-License-Identifier: LGPL-3.0-or-later
// Copyright (C) 2022 Sylvester Joosten
#include <string>
#include <GaudiKernel/Service.h>
#include <JugAlgo/IAlgoServiceSvc.h>
#include <k4Interface/IGeoSvc.h>
#include <algorithms/geo.h>
#include <algorithms/logger.h>
#include <algorithms/random.h>
#include <algorithms/service.h>
// too many services? :P
class AlgoServiceSvc : public extends<Service, IAlgoServiceSvc> {
public:
AlgoServiceSvc(const std::string& name, ISvcLocator* svc) : base_class(name, svc) {}
virtual ~AlgoServiceSvc() = default;
virtual StatusCode initialize() final;
virtual StatusCode finalize() final { return StatusCode::SUCCESS; }
private:
SmartIF<IGeoSvc> m_geoSvc;
Gaudi::Property<size_t> m_randomSeed{this, "randomSeed", 1};
};
DECLARE_COMPONENT(AlgoServiceSvc)
// Implementation
StatusCode AlgoServiceSvc::initialize() {
StatusCode sc = Service::initialize();
if (!sc.isSuccess()) {
fatal() << "Error initializing AlgoServiceSvc" << endmsg;
return sc;
}
try {
auto& serviceSvc = algorithms::ServiceSvc::instance();
info() << "ServiceSvc declared " << serviceSvc.services().size() << " services" << endmsg;
// Now register custom initializers
// Starting with the logger
const algorithms::LogLevel level{
static_cast<algorithms::LogLevel>(msgLevel() > 0 ? msgLevel() - 1 : 0)};
info() << "Setting up algorithms::LogSvc with default level " << algorithms::logLevelName(level)
<< endmsg;
serviceSvc.setInit<algorithms::LogSvc>([this,level](auto&& logger) {
this->info() << "Initializing the algorithms::LogSvc using the Gaudi logger" << endmsg;
logger.defaultLevel(level);
logger.init(
[this](const algorithms::LogLevel l, std::string_view caller, std::string_view msg) {
const std::string text = fmt::format("[{}] {}", caller, msg);
if (l == algorithms::LogLevel::kCritical) {
this->fatal() << text << endmsg;
} else if (l == algorithms::LogLevel::kError) {
this->error() << text << endmsg;
} else if (l == algorithms::LogLevel::kWarning) {
this->warning() << text << endmsg;
} else if (l == algorithms::LogLevel::kInfo) {
this->info() << text << endmsg;
} else if (l == algorithms::LogLevel::kDebug) {
this->debug() << text << endmsg;
} else if (l == algorithms::LogLevel::kTrace) {
this->verbose() << text << endmsg;
}
});
});
// geo Service
info() << "Setting up algorithms::GeoSvc" << endmsg;
m_geoSvc = service("GeoSvc");
if (!m_geoSvc) {
error() << "Unable to locate Geometry Service. "
<< "Make sure you have GeoSvc in the right order in the configuration." << endmsg;
return StatusCode::FAILURE;
}
serviceSvc.setInit<algorithms::GeoSvc>([this](auto&& g) {
this->info() << "Initializing algorithms::RandomSvc with the Juggler GeoSvc" << endmsg;
g.init(m_geoSvc->getDetector());
});
// setup random service
info() << "Setting up algorithms::RandomSvc\n"
<< " --> using internal STL 64-bit MT engine\n"
<< " --> seed set to" << m_randomSeed << endmsg;
serviceSvc.setInit<algorithms::RandomSvc>([this](auto&& r) {
this->info() << "Initializing the algorithms::RandomSvc" << endmsg;
r.setProperty("seed", m_randomSeed);
r.init();
});
info() << "Initializing services" << endmsg;
// first set own log level to verbose so we actually display everything that is requested
// (this overrides what was initally set through the OutputLevel property)
updateMsgStreamOutputLevel(MSG::VERBOSE);
// Validate our service setup (this will throw if we encounter issues)
serviceSvc.init();
} catch (const std::exception& e) {
fatal() << e.what() << endmsg;
return StatusCode::FAILURE;
}
info() << "AlgoServiceSvc initialized successfully" << endmsg;
return StatusCode::SUCCESS;
}
# SPDX-License-Identifier: LGPL-3.0-or-later
# Copyright (C) 2022 Wouter Deconinck, Whitney Armstrong
################################################################################ ################################################################################
# Package: JugBase # Package: JugBase
################################################################################ ################################################################################
gaudi_subdir(JugBase v1r0)
gaudi_add_header_only_library(JugBase
find_package(EICD) LINK
#set(PODIO $ENV{PODIO}) Gaudi::GaudiKernel
#set(CMAKE_MODULE_PATH CMAKE_MODULE_PATH PODIO)
#find_package(podio 0.11.01 REQUIRED)
#include_directories(${podio_INCLUDE_DIR})
find_package(Acts REQUIRED COMPONENTS Core IdentificationPlugin TGeoPlugin DD4hepPlugin )
find_package(ROOT COMPONENTS RIO Tree Core REQUIRED)
find_package(DD4hep COMPONENTS DDG4 DDG4IO DDRec REQUIRED)
# this declaration will not be needed in the future
gaudi_depends_on_subdirs(GaudiAlg GaudiKernel)
gaudi_install_scripts()
gaudi_install_python_modules()
gaudi_add_library(JugBase
src/*.cpp
INCLUDE_DIRS EICD PODIO ROOT $ENV{HOME}/stow/podio/include
LINK_LIBRARIES GaudiAlgLib GaudiKernel ROOT DD4hep::DDG4IO
PUBLIC_HEADERS JugBase)
target_link_libraries(JugBase
podio::podioRootIO podio::podioRootIO
ActsCore ActsDD4hepPlugin ROOT::Core ROOT::RIO ROOT::Tree
) DD4hep::DDRec
target_compile_options(JugBase PRIVATE -Wno-suggest-override) ActsCore ActsPluginDD4hep
k4FWCore::k4FWCore
)
file(GLOB JugBasePlugins_sources CONFIGURE_DEPENDS src/components/*.cpp)
gaudi_add_module(JugBasePlugins gaudi_add_module(JugBasePlugins
src/components/*.cpp src/components/ReadTestConsumer.cxx SOURCES
LINK_LIBRARIES GaudiAlgLib GaudiKernel JugBase ROOT NPDet::DD4podIO ActsCore ActsDD4hepPlugin) ${JugBasePlugins_sources}
LINK
Gaudi::GaudiKernel
ROOT::Core ROOT::RIO ROOT::Tree
JugBase
EDM4HEP::edm4hep
DD4hep::DDRec
ActsCore ActsPluginDD4hep ActsPluginJson
EDM4EIC::edm4eic
)
target_include_directories(JugBasePlugins PUBLIC
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}>
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
)
target_compile_options(JugBasePlugins PRIVATE -Wno-suggest-override) target_compile_options(JugBasePlugins PRIVATE -Wno-suggest-override)
#gaudi_add_test(ProduceForReadTest install(TARGETS JugBase JugBasePlugins
# WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} EXPORT JugBaseTargets
# FRAMEWORK tests/options/simple_producer.py) RUNTIME DESTINATION "${CMAKE_INSTALL_BINDIR}" COMPONENT bin
gaudi_add_test(ReadTest LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}" COMPONENT shlib
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} COMPONENT dev)
FRAMEWORK tests/options/simple_reader.py)
gaudi_add_test(ReadGeoTest if(BUILD_TESTING)
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} enable_testing()
FRAMEWORK tests/options/reader_with_geosvc.py) endif()
#gaudi_add_test(CheckReadCollectionSize
# ENVIRONMENT PYTHONPATH+=${PODIO_PYTHON_DIR} #add_test(NAME ProduceForReadTest
# WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} # WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
# COMMAND python JugBase/tests/scripts/check_coll_after_read.py # COMMAND ${CMAKE_BINARY_DIR}/run ${PROJECT_SOURCE_DIR}/JugBase/scripts/gaudirun tests/options/simple_producer.py)
# DEPENDS ReadTest) #add_test(NAME ReadTest
# WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
# COMMAND ${CMAKE_BINARY_DIR}/run ${PROJECT_SOURCE_DIR}/JugBase/scripts/gaudirun tests/options/simple_reader.py)
#add_test(NAME ReadGeoTest
# WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
# COMMAND ${CMAKE_BINARY_DIR}/run ${PROJECT_SOURCE_DIR}/JugBase/scripts/gaudirun tests/options/reader_with_geosvc.py)
#add_test(NAME CheckReadCollectionSize
# ENVIRONMENT PYTHONPATH+=${PODIO_PYTHON_DIR}
# WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
# COMMAND python JugBase/tests/scripts/check_coll_after_read.py
# DEPENDS ReadTest)