Skip to content
Snippets Groups Projects
Commit c9f703df authored by Whitney Armstrong's avatar Whitney Armstrong
Browse files

Set artifact upload limit based on filesize

parent 74409176
No related branches found
No related tags found
1 merge request!43Set artifact upload limit based on filesize
......@@ -56,5 +56,10 @@ mkdir -p results/clustering
root -b -q "clustering/scripts/barrel_clusters.cxx(\"${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}\")"
cp topside/${JUGGLER_REC_FILE} results/clustering/.
root_filesize=$(stat --format=%s "${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}")
if [[ "${JUGGLER_N_EVENTS}" -lt "500" ]] ; then
# file must be less than 10 MB to upload
if [[ "${root_filesize}" -lt "10000000" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/clustering/.
fi
fi
......@@ -110,13 +110,13 @@ void barrel_clusters(const char* in_fname = "topside/rec_barrel_clusters.root")
auto h_theta_thrown = d0.Histo1D({"h_theta_thrown", "; #theta", 100, 30.0, 180.0}, "thrownTheta");
auto h_momentum_thrown = d0.Histo1D({"h_momentum_thrown", "; E [GeV]", 100, 0.0, 30.0}, "thrownMomentum");
auto h_nclusters = d0.Histo1D({"h_nclusters", "; N clusters", 6, 0, 6}, "nclusters");
auto h_Ecluster = d0.Histo1D({"h_Ecluster", "; cluster E [GeV]", 100, 0, 1}, "Ecluster");
auto h_Ecluster1 = d1.Histo1D({"h_Ecluster1", "One cluster events; cluster E [GeV]", 100, 0, 30}, "Ecluster");
auto h_momentum_thrown1 = d1.Histo1D({"h_momentum_thrown", "; E [GeV]", 100, 0.0, 30.0},"thrownMomentum");
auto h_Ecluster = d0.Histo1D({"h_Ecluster", "; cluster E [GeV]", 100, 0.1, 30}, "Ecluster");
auto h_Ecluster1 = d1.Histo1D({"h_Ecluster1", "One cluster events; cluster E [GeV]", 100, 0.1, 30}, "Ecluster");
auto h_momentum_thrown1 = d1.Histo1D({"h_momentum_thrown", "; E [GeV]", 100, 0.1, 30.0},"thrownMomentum");
auto h_delta_E = d0.Histo1D({"h_delta_E", "; #Delta E [GeV]", 100, -3, 3}, "delta_E");
auto h_delta_E_over_E =
d0.Histo1D({"h_delta_E_over_E", "; #frac{E_{thrown}-E_{cluster}}{E_{thrown}} ", 100, -1, 1}, "delta_E_over_E");
d0.Histo1D({"h_delta_E_over_E", "; #frac{E_{thrown}-E_{cluster}}{E_{thrown}} ", 100, -0.5, 0.5}, "delta_E_over_E");
//auto h_Ecluster2 = d1.Filter("thrownMomentum > 4").Histo1D({"h_Ecluster1", "One cluster events; cluster E [GeV]",100, 0,30},"Ecluster");
//auto h_momentum_thrown2 = d1.Filter("thrownMomentum > 4").Histo1D({"h_momentum_thrown", "; E [GeV]", 100, 0.0, 30.0},"thrownMomentum");
......
......@@ -107,8 +107,11 @@ fi
#paste results/sim_${JUGGLER_FILE_NAME_TAG}.txt results/rec_${JUGGLER_FILE_NAME_TAG}.txt > results/eng_${JUGGLER_FILE_NAME_TAG}.txt
#root -b -q "ecal/scripts/read_eng.C(\"results/eng_${JUGGLER_FILE_NAME_TAG}.root\", \"results/eng_${JUGGLER_FILE_NAME_TAG}.txt\")"
#root -b -q "ecal/scripts/cal_eng_res.C(\"results/eng_${JUGGLER_FILE_NAME_TAG}.root\")"
root_filesize=$(stat --format=%s "${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}")
if [[ "${JUGGLER_N_EVENTS}" -lt "500" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
# file must be less than 10 MB to upload
if [[ "${root_filesize}" -lt "10000000" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
fi
fi
......@@ -84,7 +84,11 @@ if [[ "$?" -ne "0" ]] ; then
exit 1
fi
root_filesize=$(stat --format=%s "${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}")
if [[ "${JUGGLER_N_EVENTS}" -lt "500" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
# file must be less than 10 MB to upload
if [[ "${root_filesize}" -lt "10000000" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
fi
fi
......@@ -51,4 +51,11 @@ ls -l
popd
# @TODO add analysis scripts
#root_filesize=$(stat --format=%s "${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}")
#if [[ "${JUGGLER_N_EVENTS}" -lt "500" ]] ; then
# # file must be less than 10 MB to upload
# if [[ "${root_filesize}" -lt "10000000" ]] ; then
# cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
# fi
#fi
......@@ -75,7 +75,11 @@ if [[ "$?" -ne "0" ]] ; then
exit 1
fi
root_filesize=$(stat --format=%s "${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE}")
if [[ "${JUGGLER_N_EVENTS}" -lt "500" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
# file must be less than 10 MB to upload
if [[ "${root_filesize}" -lt "10000000" ]] ; then
cp ${JUGGLER_DETECTOR}/${JUGGLER_REC_FILE} results/.
fi
fi
......@@ -9,37 +9,6 @@
using ROOT::RDataFrame;
using namespace ROOT::VecOps;
//namespace edm4hep {
//
//std::vector<float> pt (std::vector<MCParticleData> const& in){
// std::vector<float> result;
// for (size_t i = 0; i < in.size(); ++i) {
// result.push_back(std::sqrt(in[i].momentum.x * in[i].momentum.x + in[i].momentum.y * in[i].momentum.y));
// }
// return result;
//}
//
//std::vector<float> eta(std::vector<MCParticleData> const& in){
// std::vector<float> result;
// ROOT::Math::PxPyPzMVector lv;
// for (size_t i = 0; i < in.size(); ++i) {
// lv.SetCoordinates(in[i].momentum.x, in[i].momentum.y, in[i].momentum.z, in[i].mass);
// result.push_back(lv.Eta());
// }
// return result;
//}
//
//std::vector<float> cos_theta(std::vector<MCParticleData> const& in){
// std::vector<float> result;
// ROOT::Math::PxPyPzMVector lv;
// for (size_t i = 0; i < in.size(); ++i) {
// lv.SetCoordinates(in[i].momentum.x, in[i].momentum.y, in[i].momentum.z, in[i].mass);
// result.push_back(cos(lv.Theta()));
// }
// return result;
//}
//
//}
auto p_track = [](std::vector<eic::TrackParametersData> const& in) {
std::vector<double> result;
for (size_t i = 0; i < in.size(); ++i) {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment