Skip to content
Snippets Groups Projects
Commit 5cf02d63 authored by Whitney Armstrong's avatar Whitney Armstrong
Browse files

Using small number of s3 data events for benchmark

parent b64d74d1
No related branches found
No related tags found
1 merge request!69Using small number of s3 data events for benchmark
...@@ -47,9 +47,20 @@ common:detector: ...@@ -47,9 +47,20 @@ common:detector:
- mkdir -p config - mkdir -p config
- print_env.sh - print_env.sh
.phy_benchmark:
needs:
- ["common:detector"]
before_script:
- source .local/bin/env.sh
- ls -lrtha
- ln -s "${LOCAL_DATA_PATH}/sim_output" sim_output
- ln -s "${LOCAL_DATA_PATH}/datasets/data" data
- ls -lrtha
- bash bin/get_calibrations
include: include:
- local: 'benchmarks/dis/config.yml' - local: 'benchmarks/dis/config.yml'
- local: 'benchmarks/dvmp/config.yml' #- local: 'benchmarks/dvmp/config.yml'
- local: 'benchmarks/dvcs/config.yml' - local: 'benchmarks/dvcs/config.yml'
summary: summary:
......
dvcs:process: dvcs:process:
stage: process stage: process
timeout: 2 hour extends: .phy_benchmark
tags:
- s3
needs: ["common:detector"] needs: ["common:detector"]
script: script:
- bash benchmarks/dvcs/dvcs.sh --data-init
- bash benchmarks/dvcs/dvcs.sh - bash benchmarks/dvcs/dvcs.sh
dvcs:results: dvcs:results:
stage: collect stage: collect
needs: ["dvcs:process"] needs: ["dvcs:process"]
script: script:
- pip install junitparser - ls -lrth
#pip install junitparser
#- python dvcs/scripts/merge_results.py #- python dvcs/scripts/merge_results.py
#!/bin/bash #!/bin/bash
function print_the_help {
echo "USAGE: ${0} [--data-init] "
echo " OPTIONS: "
exit
}
DATA_INIT=
REC_ONLY=
ANALYSIS_ONLY=
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-h|--help)
shift # past argument
print_the_help
;;
--data-init)
DATA_INIT=1
shift # past value
;;
*) # unknown option
#POSITIONAL+=("$1") # save it in an array for later
echo "unknown option $1"
print_the_help
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
# these variables might not need exported. # these variables might not need exported.
export JUGGLER_FILE_NAME_TAG="dvcs" export FILE_NAME_TAG="dvcs"
export JUGGLER_SIM_FILE="sim_${JUGGLER_FILE_NAME_TAG}.root" export JUGGLER_SIM_FILE="sim_${FILE_NAME_TAG}.root"
export JUGGLER_REC_FILE="rec_${JUGGLER_FILE_NAME_TAG}.root" export JUGGLER_REC_FILE="rec_${FILE_NAME_TAG}.root"
echo "JUGGLER_N_EVENTS = ${JUGGLER_N_EVENTS}" echo "JUGGLER_N_EVENTS = ${JUGGLER_N_EVENTS}"
echo "JUGGLER_DETECTOR = ${JUGGLER_DETECTOR}" echo "JUGGLER_DETECTOR = ${JUGGLER_DETECTOR}"
echo "JUGGLER_FILE_NAME_TAG = ${JUGGLER_FILE_NAME_TAG}" echo "FILE_NAME_TAG = ${FILE_NAME_TAG}"
print_env.sh
## To run the reconstruction, we need the following global variables: ## To run the reconstruction, we need the following global variables:
## - JUGGLER_INSTALL_PREFIX: Install prefix for Juggler (simu/recon) ## - JUGGLER_INSTALL_PREFIX: Install prefix for Juggler (simu/recon)
## - JUGGLER_DETECTOR: the detector package we want to use for this benchmark ## - JUGGLER_DETECTOR: the detector package we want to use for this benchmark
## - DETECTOR_PATH: full path to the detector definitions ## - DETECTOR_PATH: full path to the detector definitions
if [[ -n "${DATA_INIT}" ]] ; then
curl -o test_proton_dvcs_eic.hepmc "https://eicweb.phy.anl.gov/api/v4/projects/345/jobs/artifacts/master/raw/data/test_proton_dvcs_eic.hepmc?job=compile" mc -C . config host add S3 https://dtn01.sdcc.bnl.gov:9000 $S3_ACCESS_KEY $S3_SECRET_KEY
if [[ "$?" -ne "0" ]] ; then mc -C . cat --insecure S3/eictest/ATHENA/EVGEN/DVCS/DVCS_10x100_2M/DVCS.1.hepmc | head -n 1004 > "${LOCAL_DATA_PATH}/dvcs_test.hepmc"
echo "Failed to download hepmc file" if [[ "$?" -ne "0" ]] ; then
exit 1 echo "Failed to download hepmc file"
exit 1
fi
exit
fi fi
export JUGGLER_N_EVENTS=10
#curl -o test_proton_dvcs_eic.hepmc "https://eicweb.phy.anl.gov/api/v4/projects/345/jobs/artifacts/master/raw/data/test_proton_dvcs_eic.hepmc?job=compile"
## run geant4 simulations ## run geant4 simulations
npsim --runType batch \ npsim --runType batch \
...@@ -30,7 +70,7 @@ npsim --runType batch \ ...@@ -30,7 +70,7 @@ npsim --runType batch \
-v ERROR \ -v ERROR \
--numberOfEvents ${JUGGLER_N_EVENTS} \ --numberOfEvents ${JUGGLER_N_EVENTS} \
--compactFile ${DETECTOR_PATH}/${JUGGLER_DETECTOR}.xml \ --compactFile ${DETECTOR_PATH}/${JUGGLER_DETECTOR}.xml \
--inputFiles test_proton_dvcs_eic.hepmc \ --inputFiles "${LOCAL_DATA_PATH}/dvcs_test.hepmc" \
--outputFile ${JUGGLER_SIM_FILE} --outputFile ${JUGGLER_SIM_FILE}
if [[ "$?" -ne "0" ]] ; then if [[ "$?" -ne "0" ]] ; then
echo "ERROR running npsim" echo "ERROR running npsim"
...@@ -46,6 +86,7 @@ if [[ "$?" -ne "0" ]] ; then ...@@ -46,6 +86,7 @@ if [[ "$?" -ne "0" ]] ; then
fi fi
mkdir -p results/dvcs mkdir -p results/dvcs
rootls -t ${JUGGLER_SIM_FILE}
root -b -q "benchmarks/dvcs/scripts/dvcs_tests.cxx(\"${JUGGLER_REC_FILE}\")" root -b -q "benchmarks/dvcs/scripts/dvcs_tests.cxx(\"${JUGGLER_REC_FILE}\")"
if [[ "$?" -ne "0" ]] ; then if [[ "$?" -ne "0" ]] ; then
......
void dvcs_ps_gen() {
double E_p = 100.0;
double M_p = 0.938;
double E_e = 5.0;
TLorentzVector target(0.0, 0.0, std::sqrt(E_p*E_p - M_p*M_p),E_p);
TLorentzVector beam(0.0, 0.0, E_e, std::sqrt(E_e*E_e+0.000511*0.000511));
TLorentzVector W = beam + target;
//(Momentum, Energy units are Gev/C, GeV)
Double_t masses[3] = { 0.000511,0.938, 0.0} ;
TGenPhaseSpace event;
if(!event.SetDecay(W, 3, masses))
std::cout << "derp\n";
;
TH2F *h2 = new TH2F("h2","h2; Q^{2} ; t", 100,0,5, 100,-0.120,0);
for (Int_t n=0;n<1000000;n++) {
Double_t weight = event.Generate();
TLorentzVector* pElectron = event.GetDecay(0);
TLorentzVector* pProton = event.GetDecay(1);
TLorentzVector* pGamma = event.GetDecay(2);
TLorentzVector pq = beam - *pElectron;
TLorentzVector Delta = target - *pProton;
h2->Fill(-1.0*(pq.M2()) , Delta.M2() ,weight);
//std::cout << -1.0*(pq.M2()) << " , " << Delta.M2() << " , " << weight << "\n";
}
h2->Draw("colz");
}
...@@ -106,7 +106,7 @@ void dvcs_tests(const char* fname = "rec_dvcs.root"){ ...@@ -106,7 +106,7 @@ void dvcs_tests(const char* fname = "rec_dvcs.root"){
auto df0 = df.Define("isThrown", "mcparticles2.genStatus == 1") auto df0 = df.Define("isThrown", "mcparticles2.genStatus == 1")
.Define("thrownParticles", "mcparticles2[isThrown]") .Define("thrownParticles", "mcparticles2[isThrown]")
.Define("thrownP", fourvec, {"thrownParticles"}) .Define("thrownP", fourvec, {"thrownParticles"})
.Define("dumRec", dumfourvec, {"DummyReconstructedParticles"}) .Define("dumRec", dumfourvec, {"ReconstructedParticles"})
.Define("dumNPart", "dumRec.size()") .Define("dumNPart", "dumRec.size()")
.Define("p_thrown", momentum, {"thrownP"}) .Define("p_thrown", momentum, {"thrownP"})
.Define("nTracks", "outputTrackParameters.size()") .Define("nTracks", "outputTrackParameters.size()")
......
#!/usr/bin/env python3
import json
from junitparser import TestCase, TestSuite, JUnitXml, Skipped, Error, IntAttr, FloatAttr
# Create the new element by subclassing Element or one of its child class,
## and add custom attributes to it.
#class MyTestCase(TestCase):
# foo = Attr()
# Add the custom attribute
#TestCase.id = IntAttr('id')
TestCase.efficiency = FloatAttr('efficiency')
#TestCase.custom = Attr('custom')
#case = TestCase()
#case.id = 123
#case.rate = 0.95
#case.custom = 'foobar'
# After looking at two different python libraries (junit-xml and junitparser)
# junitparser looks the most robust
# https://github.com/weiwei/junitparser
def merge_results():
results = None;
with open("results/dvcs/dvcs_tests.json","r") as f:
results = json.load(f)
# Create suite and add cases
suite = TestSuite('dvcs')
suite.add_property('energy', '10-on-100')
for tname,tres in results.items():
for ttype, tval in tres.items():
# Create cases
case1 = TestCase(tname)
case1.time = 1.0
case1.efficiency = tval
case1.classname = ttype
suite.add_testcase(case1)
xml = JUnitXml()
xml.add_testsuite(suite)
xml.write('results/dvcs/dvcs_report.xml',pretty=True)
#test code for junit-xml:
#from junit_xml import TestSuite, TestCase
# test_cases = []
# print(test_name)
# print(test_res)
# for test_type, test_val in test_res.items():
# test_cases.append(TestCase(test_name, "dvcs.dvcs_tests.{}".format(test_type), 10, str(test_val), 'I am stderr!'))
# ts = TestSuite("my test suite", test_cases)
# # pretty printing is on by default but can be disabled using prettyprint=False
# print(TestSuite.to_xml_string([ts]))
# # you can also write the XML to a file and not pretty print it
# with open('results/dvcs/dvcs_report.xml', 'w') as f:
# TestSuite.to_file(f, [ts], prettyprint=True)
if __name__ == "__main__":
# execute only if run as a script
merge_results()
...@@ -82,7 +82,7 @@ int vm_invar(const std::string& config_name) ...@@ -82,7 +82,7 @@ int vm_invar(const std::string& config_name)
//==================================================================== //====================================================================
// Define analysis flow // Define analysis flow
auto d_im = d.Define("p_rec_sorted", momenta_sort_rec, {"DummyReconstructedParticles"}) auto d_im = d.Define("p_rec_sorted", momenta_sort_rec, {"ReconstructedParticles"})
.Define("p_sim_sorted", momenta_sort_sim, {"mcparticles2"}) .Define("p_sim_sorted", momenta_sort_sim, {"mcparticles2"})
.Define("N", "p_rec_sorted.size()") .Define("N", "p_rec_sorted.size()")
.Define("invariant_quantities_rec", util::calc_inv_quant, {"p_rec_sorted"}) .Define("invariant_quantities_rec", util::calc_inv_quant, {"p_rec_sorted"})
......
...@@ -96,7 +96,7 @@ int vm_mass(const std::string& config_name) ...@@ -96,7 +96,7 @@ int vm_mass(const std::string& config_name)
// common_bench::PrintGeant4(mcparticles2); // common_bench::PrintGeant4(mcparticles2);
// Define analysis flow // Define analysis flow
auto d_im = d.Define("p_rec", common_bench::momenta_RC, {"DummyReconstructedParticles"}) //using dummy rc auto d_im = d.Define("p_rec", common_bench::momenta_RC, {"ReconstructedParticles"}) //using dummy rc
.Define("N", "p_rec.size()") .Define("N", "p_rec.size()")
.Define("p_sim", common_bench::momenta_from_simulation, {"mcparticles2"}) .Define("p_sim", common_bench::momenta_from_simulation, {"mcparticles2"})
.Define("decay_pair_rec", find_decay_pair, {"p_rec"}) .Define("decay_pair_rec", find_decay_pair, {"p_rec"})
......
dvmp:generate: dvmp:generate:
needs: ["common:detector"] needs: ["common:detector"]
image: eicweb.phy.anl.gov:4567/monte_carlo/lager/lager:unstable image: eicweb.phy.anl.gov:4567/monte_carlo/lager/lager:unstable
extends: .phy_benchmark
stage: generate stage: generate
timeout: 1 hours timeout: 1 hours
script: script:
...@@ -12,6 +13,7 @@ dvmp:generate: ...@@ -12,6 +13,7 @@ dvmp:generate:
dvmp:process: dvmp:process:
stage: process stage: process
extends: .phy_benchmark
needs: ["common:detector", "dvmp:generate"] needs: ["common:detector", "dvmp:generate"]
timeout: 2 hour timeout: 2 hour
script: script:
......
...@@ -85,8 +85,8 @@ echo "Running the digitization and reconstruction" ...@@ -85,8 +85,8 @@ echo "Running the digitization and reconstruction"
export JUGGLER_SIM_FILE=${SIM_FILE} export JUGGLER_SIM_FILE=${SIM_FILE}
export JUGGLER_REC_FILE=${REC_FILE} export JUGGLER_REC_FILE=${REC_FILE}
xenv -x ${JUGGLER_INSTALL_PREFIX}/Juggler.xenv \ xenv -x ${JUGGLER_INSTALL_PREFIX}/Juggler.xenv \
gaudirun.py options/reconstruction.py \ gaudirun.py options/reconstruction.py
2>&1 > ${REC_LOG} ## 2>&1 > ${REC_LOG}
## on-error, first retry running juggler again as there is still a random ## on-error, first retry running juggler again as there is still a random
## crash we need to address FIXME ## crash we need to address FIXME
if [ "$?" -ne "0" ] ; then if [ "$?" -ne "0" ] ; then
......
#!/bin/bash
branch=${1:-master}
detector_benchmarks=https://eicweb.phy.anl.gov/EIC/benchmarks/detector_benchmarks/-/jobs/artifacts/${branch}/raw/
mkdir -p config
for i in results/emcal_barrel_calibration.json ; do
curl --fail -sL ${detector_benchmarks}/${i}?job=deploy_results --output config/$(basename ${i})
done
This diff is collapsed.
from Gaudi.Configuration import *
from Configurables import ApplicationMgr, EICDataSvc, PodioOutput, GeoSvc
from GaudiKernel import SystemOfUnits as units
detector_name = "topside"
if "JUGGLER_DETECTOR" in os.environ :
detector_name = str(os.environ["JUGGLER_DETECTOR"])
# todo add checks
input_sim_file = str(os.environ["JUGGLER_SIM_FILE"])
output_rec_file = str(os.environ["JUGGLER_REC_FILE"])
n_events = str(os.environ["JUGGLER_N_EVENTS"])
detector_path = detector_name
if "DETECTOR_PATH" in os.environ :
detector_path = str(os.environ["DETECTOR_PATH"])
geo_service = GeoSvc("GeoSvc",
detectors=["{}/{}.xml".format(detector_path, detector_name)])
podioevent = EICDataSvc("EventDataSvc", inputs=[input_sim_file], OutputLevel=DEBUG)
from Configurables import PodioInput
from Configurables import Jug__Base__InputCopier_dd4pod__Geant4ParticleCollection_dd4pod__Geant4ParticleCollection_ as MCCopier
from Configurables import Jug__Base__InputCopier_dd4pod__CalorimeterHitCollection_dd4pod__CalorimeterHitCollection_ as CalCopier
from Configurables import Jug__Base__InputCopier_dd4pod__TrackerHitCollection_dd4pod__TrackerHitCollection_ as TrkCopier
from Configurables import Jug__Digi__ExampleCaloDigi as ExampleCaloDigi
from Configurables import Jug__Digi__UFSDTrackerDigi as UFSDTrackerDigi
from Configurables import Jug__Digi__EMCalorimeterDigi as EMCalorimeterDigi
from Configurables import Jug__Base__MC2DummyParticle as MC2DummyParticle
from Configurables import Jug__Reco__TrackerHitReconstruction as TrackerHitReconstruction
from Configurables import Jug__Reco__TrackerSourceLinker as TrackerSourceLinker
from Configurables import Jug__Reco__Tracker2SourceLinker as Tracker2SourceLinker
#from Configurables import Jug__Reco__TrackerSourcesLinker as TrackerSourcesLinker
#from Configurables import Jug__Reco__TrackingHitsSourceLinker as TrackingHitsSourceLinker
from Configurables import Jug__Reco__TrackParamTruthInit as TrackParamTruthInit
from Configurables import Jug__Reco__TrackParamClusterInit as TrackParamClusterInit
from Configurables import Jug__Reco__TrackParamVertexClusterInit as TrackParamVertexClusterInit
from Configurables import Jug__Reco__TrackFindingAlgorithm as TrackFindingAlgorithm
from Configurables import Jug__Reco__ParticlesFromTrackFit as ParticlesFromTrackFit
from Configurables import Jug__Reco__EMCalReconstruction as EMCalReconstruction
from Configurables import Jug__Reco__SimpleClustering as SimpleClustering
podioinput = PodioInput("PodioReader",
collections=["mcparticles","TrackerEndcapHits","TrackerBarrelHits","EcalBarrelHits"])#, OutputLevel=DEBUG)
#"SiVertexBarrelHits",
dummy = MC2DummyParticle("MC2Dummy",
inputCollection="mcparticles",
outputCollection="DummyReconstructedParticles",
smearing = 0.1)
## copiers to get around input --> output copy bug. Note the "2" appended to the output collection.
copier = MCCopier("MCCopier",
inputCollection="mcparticles",
outputCollection="mcparticles2")
trkcopier = TrkCopier("TrkCopier",
inputCollection="TrackerBarrelHits",
outputCollection="TrackerBarrelHits2")
ecal_digi = EMCalorimeterDigi("ecal_digi",
inputHitCollection="EcalBarrelHits",
outputHitCollection="RawEcalBarrelHits")
ufsd_digi = UFSDTrackerDigi("ufsd_digi",
inputHitCollection="TrackerBarrelHits",
outputHitCollection="TrackerBarrelRawHits",
timeResolution=8)
ufsd_digi2 = UFSDTrackerDigi("ufsd_digi2",
inputHitCollection="TrackerEndcapHits",
outputHitCollection="TrackerEndcapRawHits",
timeResolution=8)
#vtx_digi = UFSDTrackerDigi("vtx_digi",
# inputHitCollection="SiVertexBarrelHits",
# outputHitCollection="SiVertexBarrelRawHits",
# timeResolution=8)
ecal_reco = EMCalReconstruction("ecal_reco",
inputHitCollection=ecal_digi.outputHitCollection,
outputHitCollection="RecEcalBarrelHits",
minModuleEdep=0.0*units.MeV,
OutputLevel=DEBUG)
simple_cluster = SimpleClustering("simple_cluster",
inputHitCollection=ecal_reco.outputHitCollection,
outputClusterCollection="SimpleClusters",
outputProtoClusterCollection="SimpleProtoClusters",
minModuleEdep=1.0*units.MeV,
maxDistance=50.0*units.cm,
OutputLevel=DEBUG)
trk_barrel_reco = TrackerHitReconstruction("trk_barrel_reco",
inputHitCollection="TrackerBarrelRawHits",
outputHitCollection="TrackerBarrelRecHits")
trk_endcap_reco = TrackerHitReconstruction("trk_endcap_reco",
inputHitCollection="TrackerEndcapRawHits",
outputHitCollection="TrackerEndcapRecHits")
#vtx_barrel_reco = TrackerHitReconstruction("vtx_barrel_reco",
# inputHitCollection = vtx_digi.outputHitCollection,
# outputHitCollection="VertexBarrelRecHits")
# Source linker
sourcelinker = TrackerSourceLinker("trk_srclinker",
inputHitCollection="TrackerBarrelRecHits",
outputSourceLinks="BarrelTrackSourceLinks",
OutputLevel=DEBUG)
trk_hits_srclnkr = Tracker2SourceLinker("trk_hits_srclnkr",
TrackerBarrelHits="TrackerBarrelRecHits",
TrackerEndcapHits="TrackerEndcapRecHits",
outputMeasurements="lnker2Measurements",
outputSourceLinks="lnker2Links",
allTrackerHits="linker2AllHits",
OutputLevel=DEBUG)
## Track param init
truth_trk_init = TrackParamTruthInit("truth_trk_init",
inputMCParticles="mcparticles",
outputInitialTrackParameters="InitTrackParams",
OutputLevel=DEBUG)
clust_trk_init = TrackParamClusterInit("clust_trk_init",
inputClusters="SimpleClusters",
outputInitialTrackParameters="InitTrackParamsFromClusters",
OutputLevel=DEBUG)
#vtxcluster_trk_init = TrackParamVertexClusterInit("vtxcluster_trk_init",
# inputVertexHits="VertexBarrelRecHits",
# inputClusters="SimpleClusters",
# outputInitialTrackParameters="InitTrackParamsFromVtxClusters",
# maxHitRadius=40.0*units.mm,
# OutputLevel=DEBUG)
# Tracking algorithms
trk_find_alg = TrackFindingAlgorithm("trk_find_alg",
inputSourceLinks = sourcelinker.outputSourceLinks,
inputMeasurements = sourcelinker.outputMeasurements,
inputInitialTrackParameters= "InitTrackParams",#"InitTrackParamsFromClusters",
outputTrajectories="trajectories",
OutputLevel=DEBUG)
parts_from_fit = ParticlesFromTrackFit("parts_from_fit",
inputTrajectories="trajectories",
outputParticles="ReconstructedParticles",
outputTrackParameters="outputTrackParameters",
OutputLevel=DEBUG)
trk_find_alg1 = TrackFindingAlgorithm("trk_find_alg1",
inputSourceLinks = trk_hits_srclnkr.outputSourceLinks,
inputMeasurements = trk_hits_srclnkr.outputMeasurements,
inputInitialTrackParameters= "InitTrackParamsFromClusters",
outputTrajectories="trajectories1",
OutputLevel=DEBUG)
parts_from_fit1 = ParticlesFromTrackFit("parts_from_fit1",
inputTrajectories="trajectories1",
outputParticles="ReconstructedParticles1",
outputTrackParameters="outputTrackParameters1",
OutputLevel=DEBUG)
trk_find_alg2 = TrackFindingAlgorithm("trk_find_alg2",
inputSourceLinks = trk_hits_srclnkr.outputSourceLinks,
inputMeasurements = trk_hits_srclnkr.outputMeasurements,
inputInitialTrackParameters= "InitTrackParams",#"InitTrackParamsFromClusters",
#inputInitialTrackParameters= "InitTrackParamsFromVtxClusters",
outputTrajectories="trajectories2",
OutputLevel=DEBUG)
parts_from_fit2 = ParticlesFromTrackFit("parts_from_fit2",
inputTrajectories="trajectories2",
outputParticles="ReconstructedParticles2",
outputTrackParameters="outputTrackParameters2",
OutputLevel=DEBUG)
#types = []
## this printout is useful to check that the type information is passed to python correctly
#print("---------------------------------------\n")
#print("---\n# List of input and output types by class")
#for configurable in sorted([ PodioInput, EICDataSvc, PodioOutput,
# TrackerHitReconstruction,ExampleCaloDigi,
# UFSDTrackerDigi, TrackerSourceLinker,
# PodioOutput],
# key=lambda c: c.getType()):
# print("\"{}\":".format(configurable.getType()))
# props = configurable.getDefaultProperties()
# for propname, prop in sorted(props.items()):
# print(" prop name: {}".format(propname))
# if isinstance(prop, DataHandleBase):
# types.append(prop.type())
# print(" {}: \"{}\"".format(propname, prop.type()))
#print("---")
out = PodioOutput("out", filename=output_rec_file)
out.outputCommands = ["keep *",
"drop BarrelTrackSourceLinks",
"drop InitTrackParams",
"drop trajectories",
"drop outputSourceLinks",
"drop outputInitialTrackParameters",
"drop mcparticles"
]
ApplicationMgr(
TopAlg = [podioinput,
dummy,
copier, trkcopier,
ecal_digi, ufsd_digi2,ufsd_digi, #vtx_digi,
ecal_reco,
simple_cluster,
trk_barrel_reco,
trk_endcap_reco,
#vtx_barrel_reco,
sourcelinker, trk_hits_srclnkr,
clust_trk_init,
truth_trk_init,
#vtxcluster_trk_init,
trk_find_alg, parts_from_fit,
trk_find_alg1, parts_from_fit1,
trk_find_alg2, parts_from_fit2,
out
],
EvtSel = 'NONE',
EvtMax = n_events,
ExtSvc = [podioevent,geo_service],
OutputLevel=DEBUG
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment