diff --git a/.containers/Dockerfile.in b/.containers/Dockerfile.in
new file mode 100644
index 0000000000000000000000000000000000000000..06567d32b0985cde7c1d20a331ba75bda4ab6522
--- /dev/null
+++ b/.containers/Dockerfile.in
@@ -0,0 +1,22 @@
+FROM eicweb.phy.anl.gov:4567/containers/image_recipes/root_spack:@CONTAINER_TAG@
+
+LABEL maintainer "Sylvester Joosten <sjoosten@anl.gov>"
+
+RUN  cd /tmp \
+  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/hallac_evio.git \
+  && mkdir hallac_evio/build && cd hallac_evio/build  \
+  && cmake ../.  && make -j20 && make install \
+  && cd /tmp && rm -rf hallac_evio \
+  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/analyzer.git \
+  && mkdir analyzer/build && cd analyzer/build   && git pull && git checkout v1.8.3 \
+  && cmake ../.  && make -j20 VERBOSE=1 && make install \
+  && cd /tmp  && rm -rf analyzer
+
+## 2 layers so we can isolate hcana from the rest of the stack
+
+RUN cd /tmp \
+  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/hcana.git \
+  && mkdir hcana/build && cd hcana/build  \
+  && git pull \
+  && cmake ../.  && make -j20 VERBOSE=1 && make install \
+  && cd /tmp && rm -rf hcana 
diff --git a/.containers/Makefile b/.containers/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..d5e8233fff63af2932d816d30bf0cf9c43862c80
--- /dev/null
+++ b/.containers/Makefile
@@ -0,0 +1,64 @@
+# import config.
+# You can change the default config with `make cnf="config_special.env" build`
+cnf ?= config.env
+include $(cnf)
+# exports variables in config.env as environment variables
+export $(shell sed 's/=.*//' $(cnf))
+
+SHELL = bash
+
+# grep the version from the mix file
+VERSION=$(shell bash version.sh)
+TAG_VERSION=$(VERSION)
+LONG_TAG=$(VERSION)-$(PUBLISH_TAG)
+
+# help will output the help for each task
+# thanks to https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
+.PHONY: help
+
+help: ## This help.
+	@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
+
+.DEFAULT_GOAL := help
+
+# ==========================================================================
+#
+build: ## build the image
+	docker build -t $(APP_NAME):$(LONG_TAG) .
+
+build-nc: ## Build the container without caching (from scratch)
+	docker build --no-cache -t $(APP_NAME):$(LONG_TAG) .
+
+# ==========================================================================
+#
+login: ## Auto login to AWS-ECR unsing aws-cli
+	@docker login -u ${CI_REGISTRY_USER} -p ${CI_REGISTRY_PASSWORD} ${CI_REGISTRY}
+	echo "Login COMPLETE"
+
+# ==========================================================================
+#
+
+publish: login ## Publish a tagged container to ECR
+	@echo 'publish $(PUBLISH_TAG) to $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME)'
+	docker tag $(APP_NAME):$(LONG_TAG) $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(PUBLISH_TAG)
+	docker push $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(PUBLISH_TAG)
+	docker rmi  $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(PUBLISH_TAG)
+
+publish-version: login ## Publish the `{version}` taged container to ECR
+	@echo 'publish $(VERSION) to $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(VERSION)'
+	docker tag $(APP_NAME):$(LONG_TAG) $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(VERSION)
+	docker push $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(VERSION)
+	docker rmi  $(REG_NAME)/$(GL_REG_GROUP)/$(APP_NAME):$(VERSION)
+
+# ==========================================================================
+# remove container from registry
+unpublish: login
+	@echo 'removing $(PUBLISH_TAG)'
+	curl --request DELETE --header "PRIVATE-TOKEN: $(CI_JOB_TOKEN)" "$(REG_API_URL)/$(PUBLISH_TAG)"
+
+# ==========================================================================
+# cleanup docker registry on system used by runner
+cleanup:
+	@echo 'removing $(REG_NAME):$(LONG_TAG)'
+	docker rmi $(REG_NAME):$(LONG_TAG)
+
diff --git a/.containers/config.env b/.containers/config.env
new file mode 100644
index 0000000000000000000000000000000000000000..79a52b2395efce1644765abfad03b063e2c4aa74
--- /dev/null
+++ b/.containers/config.env
@@ -0,0 +1,17 @@
+# Port to run the container 
+
+APP_NAME     = hcana
+REPO_NAME    = hcana
+
+REG_HOST  ?= eicweb.phy.anl.gov
+REG_NAME  ?= eicweb.phy.anl.gov:4567
+REG_PORT  ?= 4567
+REG_URL   ?= https://$(REG_HOST)
+
+GL_GROUP     = jlab/hallc/analyzer_software
+GL_REG_GROUP = jlab/hallc/analyzer_software/hcana
+GL_REG_NAME  = $(REG_NAME)
+
+PUBLISH_TAG = $(HCANA_TAG)
+
+REG_API_URL = $(REG_URL)/api/v4/projects/$(CI_PROJECT_ID)/registry/repositories/49/tags
diff --git a/containers/docker/Makefile b/.containers/docker/Makefile
similarity index 100%
rename from containers/docker/Makefile
rename to .containers/docker/Makefile
diff --git a/.containers/hcana.def.in b/.containers/hcana.def.in
new file mode 100644
index 0000000000000000000000000000000000000000..12ab7e6377bcc4c41f9911595fe6bd51436efa0c
--- /dev/null
+++ b/.containers/hcana.def.in
@@ -0,0 +1,14 @@
+Bootstrap: docker
+From: eicweb.phy.anl.gov:4567/jlab/hallc/analyzer_software/hcana:@HCANA_TAG@
+
+%help
+  singularity container for hcana development
+
+%labels
+  Maintainer "Whitney Armstrong, Sylvester Joosten"
+  Version v1-dev
+
+%post -c /bin/bash
+  echo "  -------------------------------------------------"
+  echo "  ===> Image setup complete"
+  echo "  -------------------------------------------------"
diff --git a/containers/singularity/Singularity.broadwell b/.containers/singularity/Singularity.broadwell
similarity index 100%
rename from containers/singularity/Singularity.broadwell
rename to .containers/singularity/Singularity.broadwell
diff --git a/.containers/version.sh b/.containers/version.sh
new file mode 100644
index 0000000000000000000000000000000000000000..93ef8dc29dfe97a476861893b492f43f49272866
--- /dev/null
+++ b/.containers/version.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+cat ../VERSION
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0aaf4ed384f2b72d30361ea0bd8f11481cc309b2..e2978a196d1e894661c130e22d987988ca0ce226 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,84 +1,113 @@
-image: eicweb.phy.anl.gov:4567/containers/image_recipes/root_base:latest
+variables:
+  CONTAINER_TAG: "1.4.0"
+
+default: 
+  image: eicweb.phy.anl.gov:4567/containers/image_recipes/ubuntu_dind:latest
+  tags:
+    - silicon
 
 stages:
+  - config
   - build
-  - build_docker
-  - build_sing_img
-  - data_replays
-  - data_tests
+  - push
+  - deploy
+  - cleanup
 
-hcana_build:
-  stage: build
-  tags: 
-    - sodium
-  script:
-    -  ls -lrth /usr/local/bin
-    - bash bin/compile
-
-hcana_docker:
-  stage: build_docker  
-  only:
-     - tags
-  tags: 
-     - eic0 docker
-  script:
-     - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
-     - cd containers/docker && make release
+workflow:
+  rules:
+     - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+     - if: '$CI_COMMIT_BRANCH == "master"'
+     - if: '$CI_COMMIT_TAG'
 
-hcana_singularity:
-  tags: 
-     - singularity
-  stage: build_sing_img
-  when: manual
-  dependencies:
-     - hcana_docker
+env:
+  stage: config 
   script:
-     - /bin/bash .gitlabci/setup.sh
-     - mkdir -p build
-     - pwd
-     - cp containers/singularity/Singularity Singularity.hcana
-     - cp Singularity.hcana build/.
-     - /bin/bash .gitlabci/build.sh Singularity.hcana
-     - cp Singularity.hcana.simg build/.
+    - export HCANA_TAG="latest"
+    - |
+      if [ "x${CI_PIPELINE_SOURCE}" == "xmerge_request_event" ]; then
+        export HCANA_TAG="testing-mr-${CI_MERGE_REQUEST_IID}"
+      fi
+    - echo "CI HCANA_TAG for this pipeline set to ${HCANA_TAG}"
+    - echo "HCANA_TAG=$HCANA_TAG" >> hcana.env
   artifacts:
-      paths:
-        - build/Singularity.hcana
-        - build/Singularity.hcana.simg
+    reports:
+      dotenv: hcana.env
 
-elastic_replay:
-  when: manual
-  tags: 
-     - eic0 docker
-  stage: data_replays
-  dependencies: 
-     - hcana_singularity
+docker:
+  stage: build
+  needs:
+    - env
   script:
-     - bash tests/replay_elastic_data.sh
-  artifacts:
-     paths: 
-       - ROOTfiles/*
-       - build/Singularity.hcana
-       - build/Singularity.hcana.simg
-
+    - echo $HCANA_TAG
+    - ./.gitlabci/configure.sh .containers/Dockerfile.in
+    - cd .containers
+    - make build-nc
 
-elastic_test1:
-  when: manual
-  tags: 
-     - eic0 docker
-  stage: data_tests
-  dependencies: 
-     - elastic_replay
+## for now only publish the stable versions to the registry.
+## if we change this down the line for some CI-time testing
+## make sure to also enable the cleanup job
+publish:
+  stage: push
+  rules:
+    - if: '$CI_COMMIT_BRANCH == "master"'
+      when: manual
+    - if: '$CI_COMMIT_TAG'
+      when: manual
+  needs:
+    - env
+    - docker
   script:
-     - bash tests/elastic_test.sh
+    - cd .containers
+    - |
+      if [ "x$CI_COMMIT_TAG" != "x" ]; then
+        make publish publish-version
+      else
+        make publish
+      fi
+  retry:
+    max: 2
+    when:
+      - runner_system_failure
+      - stuck_or_timeout_failure
 
-elastic_test2:
-  when: manual
-  tags: 
-     - eic0 docker
-  stage: data_tests
-  dependencies: 
-     - elastic_replay
+.singularity:
+  stage: deploy
+  needs: 
+    - env
+    - publish
   script:
-     - bash tests/elastic_test2.sh
+    - ./.gitlabci/configure.sh .containers/hcana.def.in
+    - mkdir -p build
+    - mv .containers/hcana.def build
+    - singularity build build/hcana.sif build/hcana.def
+  retry:
+    max: 2
+    when:
+      - runner_system_failure
+      - stuck_or_timeout_failure
 
+singularity:latest:
+  extends: .singularity
+  rules:
+    - if: '$CI_COMMIT_BRANCH == "master"'
+      when: manual
+    - if: '$CI_COMMIT_TAG'
+      when: manual
+  artifacts:
+    expire_in: 90 days
+    paths:
+      - build/hcana.sif
+      - build/hcana.def
 
+purge_image:
+  stage: cleanup
+  needs:
+    - env
+    - publish
+  rules:
+    - if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
+      when: never
+  script:
+    - cd .containers
+    - make cleanup || true
+    - make unpublish || true
diff --git a/.gitlabci/build.sh b/.gitlabci/build.sh
old mode 100644
new mode 100755
diff --git a/.gitlabci/configure.sh b/.gitlabci/configure.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c59d8a41a734e19c5ba32cc1217f12e281e5a56a
--- /dev/null
+++ b/.gitlabci/configure.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+## Configure a CI file based on a template file (first and only
+## argument to this script).
+
+## Known variables that will be substituted:
+## @CONTAINER_TAG@          - docker tag for the ROOT container
+## @HCANA_TAG@         - output tag for the hcana version
+## @HCANA_BRANCH@      - hcana git branch for build
+
+TEMPLATE_FILE=$1
+OUTPUT_FILE=${TEMPLATE_FILE%.*}
+HCANA_BRANCH="master"
+
+if [ -n "${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME}" ] ; then
+    HCANA_BRANCH=$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME
+fi
+
+echo "Configuring CI file: ${TEMPLATE_FILE}"
+echo "Output will be written to: ${OUTPUT_FILE}"
+
+sed "s/@CONTAINER_TAG@/$CONTAINER_TAG/g" $TEMPLATE_FILE | \
+    sed "s/@HCANA_TAG@/$HCANA_TAG/g" | \
+    sed "s/@HCANA_BRANCH@/$HCANA_BRANCH/g" > ${OUTPUT_FILE}
+
+echo "Done"
diff --git a/.gitlabci/setup.sh b/.gitlabci/setup.sh
deleted file mode 100644
index 5c9bbe517eb1dc83dc33b142507764e10fd555d1..0000000000000000000000000000000000000000
--- a/.gitlabci/setup.sh
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/bin/bash
-
-apt-get update && apt-get install -y wget git \
-                                          build-essential \
-                                          squashfs-tools \
-                                          libtool \
-                                          autotools-dev \
-                                          libarchive-dev \
-                                          automake \
-                                          autoconf \
-                                          uuid-dev \
-                                          python3   \
-                                          libssl-dev
-
-
-sed -i -e 's/^Defaults\tsecure_path.*$//' /etc/sudoers
-
-# Check Python
-echo "Python Version:"
-which python
-python --version
-pip install sregistry[all]
-sregistry version
-
-echo "sregistry Version:"
-
-# Install Singularity
-
-cd /tmp && \
-    git clone -b vault/release-2.5 https://www.github.com/sylabs/singularity.git
-    cd singularity && \
-    ./autogen.sh && \
-    ./configure --prefix=/usr/local && \
-    make -j8 && make install
diff --git a/.gitlabci/sregistry-gitlab.png b/.gitlabci/sregistry-gitlab.png
deleted file mode 100644
index a14e917a20fb32011329af6bc6abd04012acd0d6..0000000000000000000000000000000000000000
Binary files a/.gitlabci/sregistry-gitlab.png and /dev/null differ
diff --git a/.gitlabci/sregistry-gitlab.xcf b/.gitlabci/sregistry-gitlab.xcf
deleted file mode 100644
index f2742162bfc602f92d3d9c5cd573f29e334e042d..0000000000000000000000000000000000000000
Binary files a/.gitlabci/sregistry-gitlab.xcf and /dev/null differ
diff --git a/VERSION b/VERSION
new file mode 100644
index 0000000000000000000000000000000000000000..88c5fb891dcf1d1647d2b84bac0630cf9570d213
--- /dev/null
+++ b/VERSION
@@ -0,0 +1 @@
+1.4.0
diff --git a/bin/compile b/bin/compile
deleted file mode 100755
index ea6387a783b1d0248b1b727ff622ab78247c79cd..0000000000000000000000000000000000000000
--- a/bin/compile
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-#set -o nounset
-set -o errexit
-startdir=$(pwd)
-export PYTHONPATH=/usr/local/lib:$PYTHONPATH  
-export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  
-export PATH=/usr/local/bin:$PATH  
-source /usr/local/bin/thisroot.sh 
-cd /tmp 
-git clone https://github.com/fmtlib/fmt.git && cd fmt 
-git checkout 6.2.1 && mkdir /tmp/build && cd /tmp/build 
-cmake -DBUILD_SHARED_LIBS=TRUE ../fmt 
-make -j20 install 
-cd /tmp && rm -r /tmp/build && rm -r /tmp/fmt 
-cd /tmp  
-git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/hallac_evio.git 
-mkdir hallac_evio/build && cd hallac_evio/build  
-cmake ../.  && make -j20 && make install 
-cd /tmp && rm -rf hallac_evio 
-cd /tmp  
-export PYTHONPATH=/usr/local/lib:$PYTHONPATH  
-export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  
-export PATH=/usr/local/bin:$PATH  
-git clone --depth 1 https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/analyzer.git 
-mkdir analyzer/build && cd analyzer/build   
-cmake ../.  && make -j20 VERBOSE=1 && make install 
-cd /tmp  && rm -rf analyzer 
-export PYTHONPATH=/usr/local/lib:$PYTHONPATH  
-export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  
-export PATH=/usr/local/bin:$PATH  
-source /usr/local/bin/thisroot.sh 
-cd $startdir
-mkdir build && cd build  
-cmake ../.  && make -j20 VERBOSE=1 && make install  
-
diff --git a/containers/docker/Dockerfile b/containers/docker/Dockerfile
deleted file mode 100644
index f3a412a9af41a6cb8daffc94fea5cd0826f923d3..0000000000000000000000000000000000000000
--- a/containers/docker/Dockerfile
+++ /dev/null
@@ -1,53 +0,0 @@
-FROM eicweb.phy.anl.gov:4567/containers/image_recipes/root_base:latest                                                                                                                                       
-
-LABEL maintainer "Whitney Armstrong <warmstrong@anl.gov>"
-#
-
-RUN   ls -lrth /usr/local/lib/lib*.so \
-  && export PYTHONPATH=/usr/local/lib:$PYTHONPATH  \
-  && export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  \
-  && export PATH=/usr/local/bin:$PATH  \
-  && source /usr/local/bin/thisroot.sh \
-  && cd /tmp \
-  && git clone https://github.com/fmtlib/fmt.git && cd fmt \
-  && git checkout 6.2.1 && mkdir /tmp/build && cd /tmp/build \
-  && cmake -DBUILD_SHARED_LIBS=TRUE ../fmt \
-  &&  make -j20 install \
-  && cd /tmp && rm -r /tmp/build && rm -r /tmp/fmt \
-  && cd /tmp  \
-  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/hallac_evio.git \
-  && mkdir hallac_evio/build && cd hallac_evio/build  \
-  && cmake ../.  && make -j20 && make install \
-  && cd /tmp && rm -rf hallac_evio \
-  && cd /tmp  \
-  && export PYTHONPATH=/usr/local/lib:$PYTHONPATH  \
-  && export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  \
-  && export PATH=/usr/local/bin:$PATH  \
-  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/analyzer.git \
-  && mkdir analyzer/build && cd analyzer/build   && git pull && git checkout v1.8.2 \
-  && cmake ../.  && make -j20 VERBOSE=1 && make install \
-  && cd /tmp  && rm -rf analyzer \
-  && export PYTHONPATH=/usr/local/lib:$PYTHONPATH  \
-  && export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH  \
-  && export PATH=/usr/local/bin:$PATH  \
-  && source /usr/local/bin/thisroot.sh \
-  && git clone https://eicweb.phy.anl.gov/jlab/hallc/analyzer_software/hcana.git \
-  && mkdir hcana/build && cd hcana/build  \
-  && git pull \
-  && cmake ../.  && make -j20 VERBOSE=1 && make install \ 
-  && cd /tmp && rm -rf hcana 
-
-
-#-DCMAKE_CXX_FLAGS=" -march=haswell -O3 -mfma -malign-data=cacheline -finline-functions "
-#&& wget -O- https://root.cern.ch/download/root_v6.14.06.source.tar.gz | tar -zxvf - \
-#&& mv root-6.14.06 root_master \
-#RUN which c++ && ls -lrth /usr/bin/c++ && cd /tmp/builds/root_build && make -j38 VERBOSE=1 && make install \
-#      && cd /tmp && rm -rf /tmp/root_master && rm -rf /tmp/builds/root_build 
-
-#RUN useradd -ms /bin/bash -d /opt/user user
-#USER user
-#WORKDIR /opt/bubble_user
-
-##CMD ["-c" ]
-#ENTRYPOINT ["/bin/bash"]
-
diff --git a/containers/docker/Dockerfile.broadwell b/containers/docker/Dockerfile.broadwell
deleted file mode 100644
index 9185d4528387f17217baf733244bdc6bf2e03a54..0000000000000000000000000000000000000000
--- a/containers/docker/Dockerfile.broadwell
+++ /dev/null
@@ -1,58 +0,0 @@
-# ROOT base
-#
-# A container for the latest root
-#
-FROM  whit/image_recipes/ubuntu_base:latest
-LABEL maintainer "Whitney Armstrong <warmstrong@anl.gov>"
-#
-
-RUN cd /tmp \
-      && wget http://bitbucket.org/eigen/eigen/get/3.3.4.tar.bz2 \
-      && tar -xvf 3.3.4.tar.bz2 \
-      && cd eigen-* \
-      && mkdir build && cd build \
-      && cmake ../. -DCMAKE_CXX_FLAGS=" -march=haswell -O3 -mfma -malign-data=cacheline -finline-functions " \
-      && make -j10 > /tmp/eigen_build.log && make install
-
-RUN cd /tmp \
-      && git clone --depth=1 https://gitlab.cern.ch/CLHEP/CLHEP.git \
-      && mkdir -p builds/clhep_build \
-      && cd  builds/clhep_build \
-      && cmake /tmp/CLHEP/.  -DCMAKE_CXX_FLAGS=" -march=haswell -O3 -mfma -malign-data=cacheline -finline-functions "\
-      && make -j38 install > /tmp/clhep_build.log \
-      && cd /tmp && rm -rf /tmp/CLHEP && rm -rf /tmp/builds/clhep_build
-
-RUN cd /tmp \
-&& git clone https://github.com/VcDevel/Vc.git \
-&& cd  Vc \
-&& git submodule update --init \
-&& mkdir build && cd build \
-&& cmake -DCMAKE_INSTALL_PREFIX=/usr/local -DBUILD_TESTING=OFF -DTARGET_ARCHITECTURE=broadwell ../.  \
-&& make -j30 > /tmp/vc_build.log  \
-&& make install
-
-# Build root from the repo master
-RUN cd /tmp \
-      && pwd \
-      && git clone --depth=1 https://github.com/root-project/root.git root_master \
-      && cd /tmp && mkdir -p builds/root_build \
-      && cd builds/root_build \
-      && cmake ../../root_master/. -Droot7:BOOL=ON -Dcxx17:BOOL=ON -Dfortran:BOOL=ON \
-             -Dgdml:BOOL=ON -Dmathmore:BOOL=ON -Dminuit2:BOOL=ON  -Dbuiltin_vdt:BOOL=ON -Dbuiltin_veccore:BOOL=ON \
-             -Dvc:BOOL=ON -Dbuiltin_vecgeom:BOOL=ON  -Dunuran:BOOL=ON  \
-      && cd /tmp/builds/root_build && make -j38 > /tmp/root_build.log && make install \
-      && cd /tmp && rm -rf /tmp/root_master && rm -rf /tmp/builds/root_build 
-             
-#-DCMAKE_CXX_FLAGS=" -march=haswell -O3 -mfma -malign-data=cacheline -finline-functions "
-#&& wget -O- https://root.cern.ch/download/root_v6.14.06.source.tar.gz | tar -zxvf - \
-#&& mv root-6.14.06 root_master \
-#RUN which c++ && ls -lrth /usr/bin/c++ && cd /tmp/builds/root_build && make -j38 VERBOSE=1 && make install \
-#      && cd /tmp && rm -rf /tmp/root_master && rm -rf /tmp/builds/root_build 
-
-#RUN useradd -ms /bin/bash -d /opt/user user
-#USER user
-#WORKDIR /opt/bubble_user
-
-##CMD ["-c" ]
-#ENTRYPOINT ["/bin/bash"]
-
diff --git a/containers/docker/README.md b/containers/docker/README.md
deleted file mode 100644
index d6337961f1b7287f56f0b92aa3c87994f04abd55..0000000000000000000000000000000000000000
--- a/containers/docker/README.md
+++ /dev/null
@@ -1,18 +0,0 @@
-Hall A/C Software
-=================
-
-
-Images for running containers for all aspects of the SANE experimental 
-analysis.
-
-The starting point is a pre-built image for the ROOT libraries. (ubuntu + ROOT)
-
-Main software libraries:
-
- - `evio`: Built from https://github.com/whit2333/hallac_evio
- - `analyzer`: Hall A analyzer (podd)from https://github.com/whit2333/analyzer
- - `hcana`: Hall C analyzer from  https://github.com/whit2333/hcana
-
-These are all built using the super build project `cool_halls` (https://github.com/whit2333/cool_halls)
-
-
diff --git a/containers/docker/config.env b/containers/docker/config.env
deleted file mode 100644
index 631951825ad49e88b62bd773e249dd89b9c01ac0..0000000000000000000000000000000000000000
--- a/containers/docker/config.env
+++ /dev/null
@@ -1,23 +0,0 @@
-# Port to run the container 
-PORT=4000
-
-REG_TOKEN ?= ${CI_IMAGE_BUILD_PAT}
-REG_USER  ?= whit
-REG_NAME  ?= eicweb.phy.anl.gov:4567
-REG_HOST  ?= eicweb.phy.anl.gov:4567
-
-# name of alternate build: 
-# Dockerfile.$(ALT_NAME) --> $(APP_NAME)_${ALT_NAME}
-ALT_NAME ?= broadwell
-
-APP_NAME     = hcana
-REPO_NAME    = hcana
-DH_ORG       = hallac
-GL_GROUP     = jlab/hallc/analyzer_software
-GL_REG_GROUP = jlab/hallc/analyzer_software
-GL_REG_NAME  = hcana
-REPO         = hcana
-TAG_VERSION  = latest
-
-
-
diff --git a/containers/docker/deploy.env b/containers/docker/deploy.env
deleted file mode 100644
index 909aa72672001a5758a7f8c08619c7aa489defa1..0000000000000000000000000000000000000000
--- a/containers/docker/deploy.env
+++ /dev/null
@@ -1,5 +0,0 @@
-# You have to define the values in {}
-#DOCKER_REPO={account-nr}.dkr.ecr.{region}.amazonaws.com
-## optional aws-cli options
-#AWS_CLI_PROFILE={aws-cli-profile}
-#AWS_CLI_REGION={aws-cli-region}
diff --git a/containers/docker/usage.sh b/containers/docker/usage.sh
deleted file mode 100644
index 0b72b4786a77c190f0247a7f566fc50f93c914d6..0000000000000000000000000000000000000000
--- a/containers/docker/usage.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-# INSTALL
-# - copy the files deploy.env, config.env, version.sh and Makefile to your repo
-# - replace the vars in deploy.env
-# - define the version script
-
-# Build the container
-make build
-
-# Build and publish the container
-make release
-
-# Publish a container to AWS-ECR.
-# This includes the login to the repo
-make publish
-
-# Run the container
-make run
-
-# Build an run the container
-make up
-
-# Stop the running container
-make stop
-
-# Build the container with differnt config and deploy file
-make cnf=another_config.env dpl=another_deploy.env build
\ No newline at end of file
diff --git a/containers/docker/version.sh b/containers/docker/version.sh
deleted file mode 100644
index 504899f94c6af4dcb28d4b7e6017c47614478b23..0000000000000000000000000000000000000000
--- a/containers/docker/version.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-echo "0.1"
diff --git a/containers/singularity/Singularity b/containers/singularity/Singularity
deleted file mode 100644
index e57ff64c3eeaacf29147da31449d3b5ca9ba7ca3..0000000000000000000000000000000000000000
--- a/containers/singularity/Singularity
+++ /dev/null
@@ -1,125 +0,0 @@
-Bootstrap: docker
-From: eicweb.phy.anl.gov:4567/jlab/hallc/analyzer_software/hcana:latest
-
-%help
-  Hall A/C container.
-  Tools:
-     - evio        : EVIO DAQ data format  
-     - analyzer    : Hall A analyzer (podd) 
-     - hcana       : Hall C analyzer (hcana)
-     - root        : root version used for the analyzer
-     - rootls, rootbrowse, root_config
-
-%labels
-  Maintainer "Whitney Armstrong, Sylvester Joosten"
-  Version v1.0
-
-%setup -c /bin/bash
-  export SINGULARITY_SHELL=/bin/bash
-
-%environment -c /bin/bash
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include:/usr/local/include/podd:/usr/local/include/hcana
-
-%post -c /bin/bash
-  echo "Hello from post"
-  echo "Install additional software here"
-  source /usr/local/bin/thisroot.sh
-  ## libformat and nlohmann json used heavily in new generation replay scripts
-  ## libformat
-  #cd /tmp && git clone https://github.com/fmtlib/fmt.git && cd fmt && \
-  #  git checkout 5.3.0 && mkdir /tmp/build && cd /tmp/build && \
-  #  cmake -DBUILD_SHARED_LIBS=TRUE ../fmt &&
-  #  make -j4 install && cd /tmp && rm -r /tmp/build && rm -r /tmp/fmt
-  ### json
-
-# =======================
-# global
-# =======================
-
-%runscript
-  echo "Launching a shell in the Hall A/C singularity container
-  exec bash
-
-
-# =======================
-# root
-# =======================
-%apprun root
-  root "$@"
-
-%appenv root
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include/podd:/usr/local/include/hcana
-
-# =======================
-# analyzer
-# =======================
-%apprun analyzer
-  analyzer "$@"
-
-%appenv analyzer
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include/podd:/usr/local/include/hcana
-
-# =======================
-# hcana
-# =======================
-%apphelp hcana
-  Run the Hall-C analyzer with same root-style arguments.
-
-%apprun hcana
-  source /usr/local/bin/thisroot.sh
-  hcana "$@"
-
-%appenv hcana
-  export DB_DIR=DBASE
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOTSYS=/usr/local
-  export ROOT_INCLUDE_PATH=/usr/local/include
-  export ROOT_INCLUDE_PATH=/usr/local/include:/usr/local/include/podd:/usr/local/include/hcana
-
-# =======================
-# root-config
-# =======================
-%apprun root_config
-  root-config "$@"
-
-%appenv root_config
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include/podd:/usr/local/include/hcana
-
-
-# =======================
-# rootbrowse
-# =======================
-%apprun rootbrowse
-  rootbrowse "$@"
-
-%appenv rootbrowse
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include/podd:/usr/local/include/hcana
-
-# =======================
-# rootls
-# =======================
-%apprun rootls
-  rootls "$@"
-
-%appenv rootls
-  export PYTHONPATH=/usr/local/lib:${PYTHONPATH}
-  export PATH=/usr/local/bin:${PATH}
-  export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
-  export ROOT_INCLUDE_PATH=/usr/local/include/podd:/usr/local/include/hcana
diff --git a/setup.fish b/setup.fish
deleted file mode 100644
index 0ad82c6e2a1a2ea8126a90270c85c74fa50f7be0..0000000000000000000000000000000000000000
--- a/setup.fish
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/fish
-
-function update_path -d "Remove argv[2]argv[3] from argv[1] if argv[2], and prepend argv[4]"
-   # Assert that we got enough arguments
-   if test (count $argv) -ne 4
-      echo "update_path: needs 4 arguments but have " (count $argv)
-      return 1
-   end
-
-   set var $argv[1]
-
-   set newpath $argv[4]
-   for el in $$var
-      if test "$argv[2]" = ""; or not test "$el" = "$argv[2]$argv[3]"
-         set newpath $newpath $el
-      end
-   end
-
-   set -xg $var $newpath
-end
-
-
-if set -q HCANALYZER
-  set old_hcanalyzer $HCANALYZER
-end
-
-if set -q ANALYZER
-  set old_analyzer $ANALYZER
-end
-set -x HCANALYZER (realpath (dirname (status -f)))
-set -x ANALYZER $HCANALYZER/podd
-
-update_path LD_LIBRARY_PATH "$old_hcanalyzer" "" $HCANALYZER
-update_path LD_LIBRARY_PATH "$old_analyzer" "$HCANALYZER" $ANALYZER
-update_path DYLD_LIBRARY_PATH "$old_hcanalyzer" "" $HCANALYZER
-update_path DYLD_LIBRARY_PATH "$old_analyzer" "$HCANALYZER" $ANALYZER
-
diff --git a/tests/README.md b/tests/README.md
deleted file mode 100644
index 6a0afb3ebdafedc26379432edcae7ee373367894..0000000000000000000000000000000000000000
--- a/tests/README.md
+++ /dev/null
@@ -1,13 +0,0 @@
-Hcana tests
-===========
-
-## Current tests:
-
-- ep elastic tests
-
-## Tests to add:
-
-- Start time check
-- Focal plane times test
-- "Tracking efficiency" test
-- Invariant mass check (jpsi)
diff --git a/tests/elastic_coin_replay.cxx b/tests/elastic_coin_replay.cxx
deleted file mode 100644
index 7011fa8a309be4d7572aaf931f7ac7df1496ea50..0000000000000000000000000000000000000000
--- a/tests/elastic_coin_replay.cxx
+++ /dev/null
@@ -1,296 +0,0 @@
-#include <fmt/core.h>
-#include <fmt/ostream.h>
-#include <vector>
-
-#include "TString.h"
-
-R__LOAD_LIBRARY(libHallC.so)
-#include "hcana/HallC_Data.h"
-#include "DecData.h"
-//R__LOAD_LIBRARY(libScandalizer.so)
-//#include "monitor/DetectorDisplay.h"
-//#include "monitor/DisplayPlots.h"
-//#include "monitor/MonitoringDisplay.h"
-//#include "scandalizer/PostProcessors.h"
-//#include "scandalizer/ScriptHelpers.h"
-//
-//#include "THaPostProcess.h"
-//#include "monitor/ExperimentMonitor.h"
-//#include "scandalizer/PostProcessors.h"
-#include "THcAnalyzer.h"
-#include "THaCut.h"
-#include "THcGlobals.h"
-#include "THcHallCSpectrometer.h"
-#include "THcDetectorMap.h"
-#include "THcCherenkov.h"
-#include "THcDC.h"
-#include "THcHodoscope.h"
-#include "THcParmList.h"
-#include "THaGoldenTrack.h"
-#include "THcHodoEff.h"
-#include "THcScalerEvtHandler.h"
-#include "THcShower.h"
-#include "THcReactionPoint.h"
-#include "THcExtTarCor.h"
-#include "THcRasteredBeam.h"
-#include "THcRun.h"
-#include "THcCoinTime.h"
-#include "THcConfigEvtHandler.h"
-#include "THcTrigDet.h"
-#include "THcTrigApp.h"
-#include "THcSecondaryKine.h"
-#include "THcAerogel.h"
-#include "THcPrimaryKine.h"
-#include "THaReactionPoint.h"
-
-
-
-void elastic_coin_replay(Int_t RunNumber = 0, Int_t MaxEvent = -1) {
-  using namespace std;
-
-  // Get RunNumber and MaxEvent if not provided.
-  if( RunNumber<=0 ) {
-    std::exit(-1);
-  }
-
-  // Create file name patterns.
-  const char* RunFileNamePattern = "coin_all_%05d.dat";
-  vector<TString> pathList;
-  pathList.push_back(".");
-  pathList.push_back("./raw");
-  pathList.push_back("./raw/../raw.copiedtotape");
-  pathList.push_back("./cache");
-
-  //const char* RunFileNamePattern = "raw/coin_all_%05d.dat";
-  const char* ROOTFileNamePattern = "ROOTfiles/coin_replay_production_%d_%d.root";
-  
-  // Load global parameters
-  gHcParms->Define("gen_run_number", "Run Number", RunNumber);
-  gHcParms->AddString("g_ctp_database_filename", "DBASE/COIN/standard.database");
-  gHcParms->Load(gHcParms->GetString("g_ctp_database_filename"), RunNumber);
-  gHcParms->Load(gHcParms->GetString("g_ctp_parm_filename"));
-  gHcParms->Load(gHcParms->GetString("g_ctp_kinematics_filename"), RunNumber);
-  // Load params for COIN trigger configuration
-  gHcParms->Load("PARAM/TRIG/tcoin.param");
-  // Load fadc debug parameters
-  gHcParms->Load("PARAM/HMS/GEN/h_fadc_debug.param");
-  gHcParms->Load("PARAM/SHMS/GEN/p_fadc_debug.param");
-
-  // const char* CurrentFileNamePattern = "low_curr_bcm/bcmcurrent_%d.param";
-  // gHcParms->Load(Form(CurrentFileNamePattern, RunNumber));
-
-  // Load the Hall C detector map
-  gHcDetectorMap = new THcDetectorMap();
-  gHcDetectorMap->Load("MAPS/COIN/DETEC/coin.map");
-
-     // Dec data
-   gHaApps->Add(new Podd::DecData("D","Decoder raw data"));
-  //=:=:=:=
-  // SHMS 
-  //=:=:=:=
-
-  // Set up the equipment to be analyzed.
-  THcHallCSpectrometer* SHMS = new THcHallCSpectrometer("P", "SHMS");
-  SHMS->SetEvtType(1);
-  SHMS->AddEvtType(4);
-  SHMS->AddEvtType(5);
-  SHMS->AddEvtType(6);
-  SHMS->AddEvtType(7);
-  gHaApps->Add(SHMS);
-  // Add Noble Gas Cherenkov to SHMS apparatus
-  THcCherenkov* pngcer = new THcCherenkov("ngcer", "Noble Gas Cherenkov");
-  SHMS->AddDetector(pngcer);
-  // Add drift chambers to SHMS apparatus
-  THcDC* pdc = new THcDC("dc", "Drift Chambers");
-  SHMS->AddDetector(pdc);
-  // Add hodoscope to SHMS apparatus
-  THcHodoscope* phod = new THcHodoscope("hod", "Hodoscope");
-  SHMS->AddDetector(phod);
-  // Add Heavy Gas Cherenkov to SHMS apparatus
-  THcCherenkov* phgcer = new THcCherenkov("hgcer", "Heavy Gas Cherenkov");
-  SHMS->AddDetector(phgcer);
-  // Add Aerogel Cherenkov to SHMS apparatus
-  THcAerogel* paero = new THcAerogel("aero", "Aerogel");
-  SHMS->AddDetector(paero);
-  // Add calorimeter to SHMS apparatus
-  THcShower* pcal = new THcShower("cal", "Calorimeter");
-  SHMS->AddDetector(pcal);
-
-  // THcBCMCurrent* hbc = new THcBCMCurrent("H.bcm", "BCM current check");
-  // gHaPhysics->Add(hbc);
-
-  // Add rastered beam apparatus
-  THaApparatus* pbeam = new THcRasteredBeam("P.rb", "Rastered Beamline");
-  gHaApps->Add(pbeam);
-  // Add physics modules
-  // Calculate reaction point
-  THcReactionPoint* prp = new THcReactionPoint("P.react", "SHMS reaction point", "P", "P.rb");
-  gHaPhysics->Add(prp);
-  // Calculate extended target corrections
-  THcExtTarCor* pext = new THcExtTarCor("P.extcor", "HMS extended target corrections", "P", "P.react");
-  gHaPhysics->Add(pext);
-  // Calculate golden track quantites
-  THaGoldenTrack* pgtr = new THaGoldenTrack("P.gtr", "SHMS Golden Track", "P");
-  gHaPhysics->Add(pgtr);
-  // Calculate the hodoscope efficiencies
-  THcHodoEff* peff = new THcHodoEff("phodeff", "SHMS hodo efficiency", "P.hod");
-  gHaPhysics->Add(peff);   
-
-  // Add event handler for scaler events
-  THcScalerEvtHandler* pscaler = new THcScalerEvtHandler("P", "Hall C scaler event type 1");
-  pscaler->AddEvtType(1);
-  pscaler->AddEvtType(4);
-  pscaler->AddEvtType(5);
-  pscaler->AddEvtType(6);
-  pscaler->AddEvtType(7);
-  pscaler->AddEvtType(129);
-  pscaler->SetDelayedType(129);
-  pscaler->SetUseFirstEvent(kTRUE);
-  gHaEvtHandlers->Add(pscaler);
-
-  //=:=:=
-  // HMS 
-  //=:=:=
-
-  // Set up the equipment to be analyzed.
-  THcHallCSpectrometer* HMS = new THcHallCSpectrometer("H", "HMS");
-  HMS->SetEvtType(2);
-  HMS->AddEvtType(4);
-  HMS->AddEvtType(5);
-  HMS->AddEvtType(6);
-  HMS->AddEvtType(7);
-  gHaApps->Add(HMS);
-  // Add drift chambers to HMS apparatus
-  THcDC* hdc = new THcDC("dc", "Drift Chambers");
-  HMS->AddDetector(hdc);
-  // Add hodoscope to HMS apparatus
-  THcHodoscope* hhod = new THcHodoscope("hod", "Hodoscope");
-  HMS->AddDetector(hhod);
-  // Add Cherenkov to HMS apparatus
-  THcCherenkov* hcer = new THcCherenkov("cer", "Heavy Gas Cherenkov");
-  HMS->AddDetector(hcer);
-  // Add Aerogel Cherenkov to HMS apparatus
-  // THcAerogel* haero = new THcAerogel("aero", "Aerogel");
-  // HMS->AddDetector(haero);
-  // Add calorimeter to HMS apparatus
-  THcShower* hcal = new THcShower("cal", "Calorimeter");
-  HMS->AddDetector(hcal);
-
-  // Add rastered beam apparatus
-  THaApparatus* hbeam = new THcRasteredBeam("H.rb", "Rastered Beamline");
-  gHaApps->Add(hbeam);  
-  // Add physics modules
-  // Calculate reaction point
-  THcReactionPoint* hrp = new THcReactionPoint("H.react", "HMS reaction point", "H", "H.rb");
-  gHaPhysics->Add(hrp);
-  // Calculate extended target corrections
-  THcExtTarCor* hext = new THcExtTarCor("H.extcor", "HMS extended target corrections", "H", "H.react");
-  gHaPhysics->Add(hext);
-  // Calculate golden track quantities
-  THaGoldenTrack* hgtr = new THaGoldenTrack("H.gtr", "HMS Golden Track", "H");
-  gHaPhysics->Add(hgtr);
-  // Calculate the hodoscope efficiencies
-  THcHodoEff* heff = new THcHodoEff("hhodeff", "HMS hodo efficiency", "H.hod");
-  gHaPhysics->Add(heff);
-
-  // Add event handler for scaler events
-  THcScalerEvtHandler *hscaler = new THcScalerEvtHandler("H", "Hall C scaler event type 4");  
-  hscaler->AddEvtType(2);
-  hscaler->AddEvtType(4);
-  hscaler->AddEvtType(5);
-  hscaler->AddEvtType(6);
-  hscaler->AddEvtType(7);
-  hscaler->AddEvtType(129);
-  hscaler->SetDelayedType(129);
-  hscaler->SetUseFirstEvent(kTRUE);
-  gHaEvtHandlers->Add(hscaler);
-
-  //=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=
-  // Kinematics Modules
-  //=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=
-
-  // Add Physics Module to calculate primary (scattered electrons) beam kinematics
-  THcPrimaryKine* hkin_primary = new THcPrimaryKine("H.kin.primary", "HMS Single Arm Kinematics", "H", "H.rb");
-  gHaPhysics->Add(hkin_primary);
-  // Add Physics Module to calculate secondary (scattered hadrons) beam kinematics
-  THcSecondaryKine* pkin_secondary = new THcSecondaryKine("P.kin.secondary", "SHMS Single Arm Kinematics", "P", "H.kin.primary");
-  gHaPhysics->Add(pkin_secondary);
-  
-  //=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=
-  // Global Objects & Event Handlers
-  //=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=:=
-
-  // Add trigger apparatus
-  THaApparatus* TRG = new THcTrigApp("T", "TRG");
-  gHaApps->Add(TRG);
-  // Add trigger detector to trigger apparatus
-  THcTrigDet* coin = new THcTrigDet("coin", "Coincidence Trigger Information");
-  // Suppress missing reference time warnings for these event types
-  coin->SetEvtType(1);
-  coin->AddEvtType(2);
-  TRG->AddDetector(coin); 
-
-  
-  //Add coin physics module THcCoinTime::THcCoinTime (const char *name, const char* description, const char* hadArmName, 
-  // const char* elecArmName, const char* coinname) :
-  THcCoinTime* coinTime = new THcCoinTime("CTime", "Coincidende Time Determination", "P", "H", "T.coin");
-  gHaPhysics->Add(coinTime);
-
-  // Add event handler for prestart event 125.
-  THcConfigEvtHandler* ev125 = new THcConfigEvtHandler("HC", "Config Event type 125");
-  gHaEvtHandlers->Add(ev125);
-  // Add event handler for EPICS events
-  THaEpicsEvtHandler* hcepics = new THaEpicsEvtHandler("epics", "HC EPICS event type 180");
-  gHaEvtHandlers->Add(hcepics);
- 
-  // Set up the analyzer - we use the standard one,
-  // but this could be an experiment-specific one as well.
-  // The Analyzer controls the reading of the data, executes
-  // tests/cuts, loops over Acpparatus's and PhysicsModules,
-  // and executes the output routines.
-  THcAnalyzer* analyzer = new THcAnalyzer;
-
-  // A simple event class to be output to the resulting tree.
-  // Creating your own descendant of THaEvent is one way of
-  // defining and controlling the output.
-  THaEvent* event = new THaEvent;
-
-  // Define the run(s) that we want to analyze.
-  // We just set up one, but this could be many.
-  THcRun* run = new THcRun( pathList, Form(RunFileNamePattern, RunNumber) );
-
-  // Set to read in Hall C run database parameters
-  run->SetRunParamClass("THcRunParameters");
-  
-  // Eventually need to learn to skip over, or properly analyze the pedestal events
-  run->SetEventRange(1, MaxEvent); // Physics Event number, does not include scaler or control events.
-  run->SetNscan(1);
-  run->SetDataRequired(0x7);
-  run->Print();
-
-  // Define the analysis parameters
-  TString ROOTFileName = Form(ROOTFileNamePattern, RunNumber, MaxEvent);
-  analyzer->SetCountMode(2);  // 0 = counter is # of physics triggers
-                              // 1 = counter is # of all decode reads
-                              // 2 = counter is event number
-
-  analyzer->SetEvent(event);
-  // Set EPICS event type
-  analyzer->SetEpicsEvtType(180);
-  // Define crate map
-  analyzer->SetCrateMapFileName("MAPS/db_cratemap.dat");
-  // Define output ROOT file
-  analyzer->SetOutFile(ROOTFileName.Data());
-  // Define DEF-file+
-  analyzer->SetOdefFile("DEF-files/COIN/PRODUCTION/coin_production_hElec_pProt.def");
-  // Define cuts file
-  analyzer->SetCutFile("DEF-files/COIN/PRODUCTION/CUTS/coin_production_cuts.def");  // optional
-  // File to record accounting information for cuts
-  analyzer->SetSummaryFile(Form("REPORT_OUTPUT/COIN/PRODUCTION/summary_production_%d_%d.report", RunNumber, MaxEvent));  // optional
-  // Start the actual analysis.
-  analyzer->Process(run);
-  // Create report file from template
-  analyzer->PrintReport("TEMPLATES/COIN/PRODUCTION/coin_production.template",
-  			Form("REPORT_OUTPUT/COIN/PRODUCTION/replay_coin_production_%d_%d.report", RunNumber, MaxEvent));  // optional
-
-}
diff --git a/tests/elastic_test.cxx b/tests/elastic_test.cxx
deleted file mode 100644
index 4c9ad79f09c3cfefec9d399f8fd68d6718ee3551..0000000000000000000000000000000000000000
--- a/tests/elastic_test.cxx
+++ /dev/null
@@ -1,446 +0,0 @@
-#include "nlohmann/json.hpp"
-#include <cmath>
-#include <iostream>
-
-#include "ROOT/RDataFrame.hxx"
-#include "ROOT/RVec.hxx"
-
-#include "Math/Vector3D.h"
-#include "Math/Vector4D.h"
-#include "Math/VectorUtil.h"
-#include "TCanvas.h"
-#include "TLatex.h"
-#include "TStyle.h"
-#include "TSystem.h"
-R__LOAD_LIBRARY(libMathMore.so)
-R__LOAD_LIBRARY(libGenVector.so)
-
-R__LOAD_LIBRARY(libfmt.so)
-#include "fmt/core.h"
-
-#include "THStack.h"
-
-#ifdef __cpp_lib_filesystem
-#include <filesystem>
-namespace fs = std::filesystem;
-#else
-#include <experimental/filesystem>
-namespace fs = std::experimental::filesystem;
-#endif
-
-using RDFNode = decltype(ROOT::RDataFrame{0}.Filter(""));
-using Histo1DProxy =
-    decltype(ROOT::RDataFrame{0}.Histo1D(ROOT::RDF::TH1DModel{"", "", 128u, 0., 0.}, ""));
-
-struct RDFInfo {
-  RDFNode&          df;
-  const std::string title;
-  RDFInfo(RDFNode& df, std::string_view title) : df{df}, title{title} {}
-};
-
-constexpr const double M_P     = .938272;
-constexpr const double M_P2    = M_P * M_P;
-constexpr const double M_pion  = 0.139;
-constexpr const double M_pion2 = M_pion * M_pion;
-constexpr const double M_e     = .000511;
-
-// =================================================================================
-// Cuts
-// =================================================================================
-std::string goodTrackSHMS = "P.gtr.dp > -10 && P.gtr.dp < 22";
-std::string goodTrackHMS  = "H.gtr.dp > -8 && H.gtr.dp < 8";
-
-std::string piCutSHMS = "P.cal.etottracknorm<1.0";
-//std::string piCutSHMS = "P.aero.npeSum > 1.0 && P.cal.eprtracknorm < 0.2 && P.cal.etottracknorm<1.0";
-
-std::string eCutHMS = "H.cal.etottracknorm > 0.50 && H.cal.etottracknorm < 2. && "
-                      "H.cer.npeSum > 1.";
-
-std::string epiCut = "P.aero.npeSum > 1.0 && P.cal.eprtracknorm < 0.2 && "
-                     "H.cer.npeSum > 1.0 && H.cal.etottracknorm > 0.6 && "
-                     "H.cal.etottracknorm < 2.0 && P.cal.etottracknorm<1.0";
-
-using Pvec3D = ROOT::Math::XYZVector;
-using Pvec4D = ROOT::Math::PxPyPzMVector;
-
-// =================================================================================
-// reconstruction
-// =================================================================================
-auto p_pion = [](double px, double py, double pz) {
-  return Pvec4D{px * 0.996, py * 0.996, pz * 0.996, M_e};
-};
-auto p_electron = [](double px, double py, double pz) {
-  return Pvec4D{px * 0.994, py * 0.994, pz * 0.994, M_e};
-};
-auto t = [](const double Egamma, Pvec4D& jpsi) {
-  Pvec4D beam{0, 0, Egamma, 0};
-  return (beam - jpsi).M2();
-};
-
-bool root_file_exists(std::string rootfile) {
-  bool found_good_file = false;
-  if (!gSystem->AccessPathName(rootfile.c_str())) {
-    TFile file(rootfile.c_str());
-    if (file.IsZombie()) {
-      std::cout << rootfile << " is a zombie.\n";
-      std::cout
-          << " Did your replay finish?  Check that the it is done before running this script.\n";
-      return false;
-      // return;
-    } else {
-      std::cout << " using : " << rootfile << "\n";
-      return true;
-    }
-  }
-  return false;
-}
-
-void elastic_test(int RunNumber = 6012, int nevents = 50000, int prompt = 0, int update = 0,
-                        int default_count_goal = 10000, int redo_timing = 0) {
-
-  // ===============================================================================================
-  // Initialization
-  // ===============================================================================================
-  using nlohmann::json;
-  json j;
-  {
-    std::ifstream json_input_file("db2/run_list_coin.json");
-    try {
-      json_input_file >> j;
-    } catch (json::parse_error) {
-      std::cerr << "error: json file, db2/run_list.json, is incomplete or has broken syntax.\n";
-      std::quick_exit(-127);
-    }
-  }
-
-  auto runnum_str = std::to_string(RunNumber);
-  if (j.find(runnum_str) == j.end()) {
-    std::cout << "Run " << RunNumber << " not found in db2/run_list_coin.json\n";
-    std::cout << "Check that run number and replay exists. \n";
-    std::cout << "If problem persists please contact Sylvester (217-848-0565)\n";
-  }
-  double P0_shms_setting = j[runnum_str]["spectrometers"]["shms_momentum"].get<double>();
-  double P0_shms         = std::abs(P0_shms_setting);
-
-  bool found_good_file = false;
-
-  std::string rootfile =
-      fmt::format("full_online/coin_replay_production_{}_{}.root", RunNumber, nevents);
-  found_good_file = root_file_exists(rootfile.c_str());
-  if (!found_good_file) {
-    rootfile =
-        fmt::format("ROOTfiles_volatile/coin_replay_production_{}_{}.root", RunNumber, nevents);
-    found_good_file = root_file_exists(rootfile.c_str());
-  }
-  if (!found_good_file) {
-    rootfile = fmt::format("ROOTfiles_csv/coin_replay_production_{}_{}.root", RunNumber, nevents);
-    found_good_file = root_file_exists(rootfile.c_str());
-  }
-  if (!found_good_file) {
-    rootfile = fmt::format("ROOTfiles/coin_replay_production_{}_{}.root", RunNumber, nevents);
-    found_good_file = root_file_exists(rootfile.c_str());
-  }
-  if (!found_good_file) {
-    std::cout << " Error: suitable root file not found\n";
-    return;
-  }
-
-  // ===============================================================================================
-  // Dataframe
-  // ===============================================================================================
-
-  ROOT::EnableImplicitMT(24);
-
-  //---------------------------------------------------------------------------
-  // Detector tree
-  ROOT::RDataFrame d("T", rootfile);
-
-  //// SHMS Scaler tree
-  //ROOT::RDataFrame d_sh("TSP", rootfile);
-  //// int N_scaler_events = *(d_sh.Count());
-
-  auto d_coin = d.Filter("fEvtHdr.fEvtType == 4");
-
-  // Good track cuts
-  auto dHMSGoodTrack  = d_coin.Filter(goodTrackHMS);
-  auto dSHMSGoodTrack = d_coin.Filter(goodTrackSHMS);
-  auto dCOINGoodTrack = dHMSGoodTrack.Filter(goodTrackSHMS)
-                            .Define("p_electron", p_electron, {"H.gtr.px", "H.gtr.py", "H.gtr.pz"})
-                            .Define("p_pion", p_pion, {"P.gtr.px", "P.gtr.py", "P.gtr.pz"});
-  // PID cuts
-  auto dHMSEl  = dHMSGoodTrack.Filter(eCutHMS);
-  auto dSHMSEl = dSHMSGoodTrack.Filter(piCutSHMS);
-  auto dCOINEl = dCOINGoodTrack.Filter(eCutHMS + " && " + piCutSHMS);
-                     //.Filter(
-                     //    [=](double npe, double dp) {
-                     //      double p_track = P0_shms * (100.0 + dp) / 100.0;
-                     //      // no cerenkov cut needed when momentum is below 2.8 GeV/c
-                     //      if (p_track < 2.8) {
-                     //        return true;
-                     //      }
-                     //      return npe > 1.0;
-                     //    },
-                     //    {"P.hgcer.npeSum", "P.gtr.dp"});
-
-  // Timing cuts
-  // Find the timing peak
-  // Find the coin peak
-  double coin_peak_center = 0;
-  if (redo_timing) {
-    auto h_coin_time =
-        dCOINEl.Histo1D({"coin_time", "coin_time", 8000, 0, 1000}, "CTime.ePositronCoinTime_ROC2");
-    h_coin_time->DrawClone();
-    int coin_peak_bin = h_coin_time->GetMaximumBin();
-    coin_peak_center  = h_coin_time->GetBinCenter(coin_peak_bin);
-    std::cout << "COINCIDENCE time peak found at: " << coin_peak_center << std::endl;
-  } else {
-    //coin_peak_center = 43.0; // run 7240-7241
-    coin_peak_center = 23.0; // run 6012
-    std::cout << "COINCIDENCE time peak: using pre-calculated value at: " << coin_peak_center
-              << std::endl;
-    ;
-  }
-  // timing cut lambdas
-  // TODO: evaluate timing cut and offset for random background
-  auto timing_cut = [=](double coin_time) { return std::abs(coin_time - coin_peak_center) < 2.; };
-  // anti-timing set to 5x width of regular
-  auto anti_timing_cut = [=](double coin_time) {
-    return std::abs(coin_time - coin_peak_center - 28.) < 10.;
-  };
-
-  // timing counts
-  auto dHMSElInTime  = dHMSEl.Filter(timing_cut, {"CTime.ePositronCoinTime_ROC2"});
-  auto dHMSElRandom  = dHMSEl.Filter(anti_timing_cut, {"CTime.ePositronCoinTime_ROC2"});
-  auto dSHMSElInTime = dSHMSEl.Filter(timing_cut, {"CTime.ePositronCoinTime_ROC2"});
-  auto dSHMSElRandom = dSHMSEl.Filter(anti_timing_cut, {"CTime.ePositronCoinTime_ROC2"});
-
-  auto dCOINElInTime = dCOINEl.Filter(timing_cut, {"CTime.ePiCoinTime_ROC2"});
-  auto dCOINElRandom = dCOINEl.Filter(anti_timing_cut, {"CTime.ePiCoinTime_ROC2"});
-
-  // Output root file
-  //auto out_file =
-  //    new TFile(fmt::format("monitoring/{}/good_csv_counter.root", RunNumber).c_str(), "UPDATE");
-  //out_file->cd();
-
-  // =========================================================================================
-  // Histograms
-  // =========================================================================================
-  // 2D correlations
-  auto hTheta2DNoCuts = d_coin.Histo2D(
-      {"theta2D", "No cuts;#theta_{SHMS};#theta_{HMS};#counts", 50, -.1, .1, 50, -.1, .1},
-      "P.gtr.th", "H.gtr.th");
-  auto hTheta2DTracking = dCOINGoodTrack.Histo2D(
-      {"theta2D", "Cuts: tracking;#theta_{SHMS};#theta_{HMS};#counts", 50, -.1, .1, 50, -.1, .1},
-      "P.gtr.th", "H.gtr.th");
-  auto hTheta2DPID =
-      dCOINEl.Histo2D({"theta2D", "Cuts: tracking+PID;#theta_{SHMS};#theta_{HMS};#counts", 50, -.1,
-                       .1, 50, -.1, .1},
-                      "P.gtr.th", "H.gtr.th");
-  auto hTheta2DTiming =
-      dCOINElInTime.Histo2D({"theta2D", "Cuts: tracking+PID;#theta_{SHMS};#theta_{HMS};#counts", 50,
-                             -.1, .1, 50, -.1, .1},
-                            "P.gtr.th", "H.gtr.th");
-  // timing
-  auto hCoinTimeNoCuts =
-      d_coin.Histo1D({"coin_time.NoCuts", "No Cuts;coin_time;counts", 8000, 0, 1000},
-                     "CTime.ePositronCoinTime_ROC2");
-  auto hCoinTimeTracking = dCOINGoodTrack.Histo1D(
-      {"coin_time.Tracking", "Cuts: Tracking;coin_time;counts", 8000, 0, 1000},
-      "CTime.ePositronCoinTime_ROC2");
-  auto hCoinTimePID =
-      dCOINEl.Histo1D({"coin_time.PID", "Cuts: Tracking+PID;coin_time;counts", 8000, 0, 1000},
-                      "CTime.ePositronCoinTime_ROC2");
-  auto hCoinTimeTiming = dCOINElInTime.Histo1D(
-      {"coin_time.Timing", "Cuts: Tracking+PID+Timing;coin_time;counts", 8000, 0, 1000},
-      "CTime.ePositronCoinTime_ROC2");
-
-  auto hRandCoinTimePID = dCOINElRandom.Histo1D(
-      {"rand_coin_time.PID", "Cuts: Tracking+PID;coin_time;counts", 8000, 0, 1000},
-      "CTime.ePositronCoinTime_ROC2");
-
-  // P.gtr.dp
-  auto hPdpNoCuts =
-      d_coin.Histo1D({"P.gtr.dp.NoCuts", "No Cuts;#deltap [%];counts", 200, -30, 40}, "P.gtr.dp");
-  auto hPdpTracking = dSHMSGoodTrack.Histo1D(
-      {"P.gtr.dp.Tracking", "Cuts: Tracking;#deltap [%];counts", 200, -30, 40}, "P.gtr.dp");
-  auto hPdpPID = dSHMSEl.Histo1D(
-      {"P.gtr.dp.PID", "Cuts: Tracking+PID;#deltap [%];counts", 200, -30, 40}, "P.gtr.dp");
-  auto hPdpTiming = dSHMSElInTime.Histo1D(
-      {"P.gtr.dp.Timing", "Cuts: Tracking+PID+Timing;#deltap [%];counts", 200, -30, 40},
-      "P.gtr.dp");
-  // P.gtr.th
-  auto hPthNoCuts = d_coin.Histo1D(
-      {"P.gtr.th.NoCuts", "No Cuts;#theta_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.th");
-  auto hPthTracking = dSHMSGoodTrack.Histo1D(
-      {"P.gtr.th.Tracking", "Cuts: Tracking;#theta_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.th");
-  auto hPthPID = dSHMSEl.Histo1D(
-      {"P.gtr.th.PID", "Cuts: Tracking+PID;#theta_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.th");
-  auto hPthTiming = dSHMSElInTime.Histo1D(
-      {"P.gtr.th.Timing", "Cuts: Tracking+PID+Timing;#theta_{SHMS};counts", 200, -0.1, 0.1},
-      "P.gtr.th");
-  // P.gtr.ph
-  auto hPphNoCuts =
-      d_coin.Histo1D({"P.gtr.ph.NoCuts", "No Cuts;#phi_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.ph");
-  auto hPphTracking = dSHMSGoodTrack.Histo1D(
-      {"P.gtr.ph.Tracking", "Cuts: Tracking;#phi_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.ph");
-  auto hPphPID = dSHMSEl.Histo1D(
-      {"P.gtr.ph.PID", "Cuts: Tracking+PID;#phi_{SHMS};counts", 200, -0.1, 0.1}, "P.gtr.ph");
-  auto hPphTiming = dSHMSElInTime.Histo1D(
-      {"P.gtr.ph.Timing", "Cuts: Tracking+PID+Timing;#phi_{SHMS};counts", 200, -0.1, 0.1},
-      "P.gtr.ph");
-  // P.gtr.y
-  auto hPyNoCuts =
-      d_coin.Histo1D({"P.gtr.y.NoCuts", "No Cuts;ytar;counts", 200, -10., 10.}, "P.gtr.y");
-  auto hPyTracking = dSHMSGoodTrack.Histo1D(
-      {"P.gtr.y.Tracking", "Cuts: Tracking;ytar;counts", 200, -10., 10.}, "P.gtr.y");
-  auto hPyPID =
-      dSHMSEl.Histo1D({"P.gtr.y.PID", "Cuts: Tracking+PID;ytar;counts", 200, -10., 10.}, "P.gtr.y");
-  auto hPyTiming = dSHMSElInTime.Histo1D(
-      {"P.gtr.y.Timing", "Cuts: Tracking+PID+Timing;ytar;counts", 200, -10., 10.}, "P.gtr.y");
-  // P.cal.etottracknorm
-  auto hPcalEPNoCuts =
-      d_coin.Histo1D({"P.cal.etottracknorm.NoCuts", "No Cuts;SHMS E/P;counts", 200, -.5, 1.5},
-                     "P.cal.etottracknorm");
-  auto hPcalEPTracking = dSHMSGoodTrack.Histo1D(
-      {"P.cal.etottracknorm.Tracking", "Cuts: Tracking;SHMS E/P;counts", 200, -.5, 1.5},
-      "P.cal.etottracknorm");
-  auto hPcalEPPID = dSHMSEl.Histo1D(
-      {"P.cal.etottracknorm.PID", "Cuts: Tracking+PID;SHMS E/P;counts", 200, -.5, 1.5},
-      "P.cal.etottracknorm");
-  auto hPcalEPAll = dCOINElInTime.Histo1D(
-      {"P.cal.etottracknorm.All", "Cuts: Tracking+PID+Coincidence;SHMS E/P;counts", 200, -.5, 1.5},
-      "P.cal.etottracknorm");
-  // P.ngcer.npeSum
-  auto hPcerNpheNoCuts = d_coin.Histo1D(
-      {"P.ngcer.npeSum.NoCuts", "No Cuts;SHMS NGC #phe;counts", 200, -5, 76}, "P.ngcer.npeSum");
-  auto hPcerNpheTracking = dSHMSGoodTrack.Histo1D(
-      {"P.ngcer.npeSum.Tracking", "Cuts: Tracking;SHMS NGC #phe;counts", 200, -5, 76},
-      "P.ngcer.npeSum");
-  auto hPcerNphePID = dSHMSEl.Histo1D(
-      {"P.ngcer.npeSum.PID", "Cuts: Tracking+PID;SHMS NGC #phe;counts", 200, -5, 76},
-      "P.ngcer.npeSum");
-  auto hPcerNpheAll = dCOINElInTime.Histo1D(
-      {"P.ngcer.npeSum.All", "Cuts: Tracking+PID+Coincidence;SHMS NGC #phe;counts", 200, -5, 76},
-      "P.ngcer.npeSum");
-  // P.hgcer.npeSum
-  auto hPhgcerNpheNoCuts = d_coin.Histo1D(
-      {"P.hgcer.npeSum.NoCuts", "No Cuts;SHMS HGC #phe;counts", 200, -5, 76}, "P.hgcer.npeSum");
-  auto hPhgcerNpheTracking = dSHMSGoodTrack.Histo1D(
-      {"P.hgcer.npeSum.Tracking", "Cuts: Tracking;SHMS HGC #phe;counts", 200, -5, 76},
-      "P.hgcer.npeSum");
-  auto hPhgcerNphePID = dSHMSEl.Histo1D(
-      {"P.hgcer.npeSum.PID", "Cuts: Tracking+PID;SHMS HGC #phe;counts", 200, -5, 76},
-      "P.hgcer.npeSum");
-  auto hPhgcerNpheAll = dCOINElInTime.Histo1D(
-      {"P.hgcer.npeSum.All", "Cuts: Tracking+PID+Coincidence;SHMS HGC #phe;counts", 200, -5, 76},
-      "P.hgcer.npeSum");
-  // H.cal.etottracknorm
-  auto hHcalEPNoCuts =
-      d_coin.Histo1D({"H.cal.etottracknorm.NoCuts", "No Cuts;HMS E/P;counts", 200, -.5, 1.5},
-                     "H.cal.etottracknorm");
-  auto hHcalEPTracking = dHMSGoodTrack.Histo1D(
-      {"H.cal.etottracknorm.Tracking", "Cuts: Tracking;HMS E/P;counts", 200, -.5, 1.5},
-      "H.cal.etottracknorm");
-  auto hHcalEPPID = dHMSEl.Histo1D(
-      {"H.cal.etottracknorm.PID", "Cuts: Tracking+PID;HMS E/P;counts", 200, -.5, 1.5},
-      "H.cal.etottracknorm");
-  auto hHcalEPAll = dCOINElInTime.Histo1D(
-      {"H.cal.etottracknorm.All", "Cuts: Tracking+PID+Coincidence;HMS E/P;counts", 200, -.5, 1.5},
-      "H.cal.etottracknorm");
-  // H.cer.npeSum
-  auto hHcerNpheNoCuts = d_coin.Histo1D(
-      {"H.cer.npeSum.NoCuts", "No Cuts;HMS Cer #phe;counts", 200, -1, 15}, "H.cer.npeSum");
-  auto hHcerNpheTracking = dHMSGoodTrack.Histo1D(
-      {"H.cer.npeSum.Tracking", "Cuts: Tracking;HMS Cer #phe;counts", 200, -1, 15}, "H.cer.npeSum");
-  auto hHcerNphePID = dHMSEl.Histo1D(
-      {"H.cer.npeSum.PID", "Cuts: Tracking+PID+Coincidence;HMS Cer #phe;counts", 200, -1, 15},
-      "H.cer.npeSum");
-  auto hHcerNpheAll = dCOINElInTime.Histo1D(
-      {"H.cer.npeSum.PID", "Cuts: Tracking+PID+Coincidence;HMS Cer #phe;counts", 200, -1, 15},
-      "H.cer.npeSum");
-  // H.gtr.dp
-  auto hHdpNoCuts =
-      d_coin.Histo1D({"H.gtr.dp.NoCuts", "No Cuts;#deltap [%];counts", 200, -30, 40}, "H.gtr.dp");
-  auto hHdpTracking = dHMSGoodTrack.Histo1D(
-      {"H.gtr.dp.Tracking", "Cuts: Tracking;#deltap [%];counts", 200, -30, 40}, "H.gtr.dp");
-  auto hHdpPID = dHMSEl.Histo1D(
-      {"H.gtr.dp.PID", "Cuts: Tracking+PID;#deltap [%];counts", 200, -30, 40}, "H.gtr.dp");
-  auto hHdpTiming = dHMSElInTime.Histo1D(
-      {"H.gtr.dp.Timing", "Cuts: Tracking+PID+Timing;#deltap [%];counts", 200, -30, 40},
-      "H.gtr.dp");
-  // H.gtr.th
-  auto hHthNoCuts = d_coin.Histo1D(
-      {"H.gtr.th.NoCuts", "No Cuts;#theta_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.th");
-  auto hHthTracking = dHMSGoodTrack.Histo1D(
-      {"H.gtr.th.Tracking", "Cuts: Tracking;#theta_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.th");
-  auto hHthPID = dHMSEl.Histo1D(
-      {"H.gtr.th.PID", "Cuts: Tracking+PID;#theta_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.th");
-  auto hHthTiming = dHMSElInTime.Histo1D(
-      {"H.gtr.th.Timing", "Cuts: Tracking+PID+Timing;#theta_{HMS};counts", 200, -0.1, 0.1},
-      "H.gtr.th");
-  // H.gtr.ph
-  auto hHphNoCuts =
-      d_coin.Histo1D({"H.gtr.ph.NoCuts", "No Cuts;#phi_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.ph");
-  auto hHphTracking = dHMSGoodTrack.Histo1D(
-      {"H.gtr.ph.Tracking", "Cuts: Tracking;#phi_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.ph");
-  auto hHphPID = dHMSEl.Histo1D(
-      {"H.gtr.ph.PID", "Cuts: Tracking+PID;#phi_{HMS};counts", 200, -0.1, 0.1}, "H.gtr.ph");
-  auto hHphTiming = dHMSElInTime.Histo1D(
-      {"H.gtr.ph.Timing", "Cuts: Tracking+PID+Timing;#phi_{HMS};counts", 200, -0.1, 0.1},
-      "H.gtr.ph");
-  // H.gtr.y
-  auto hHyNoCuts =
-      d_coin.Histo1D({"H.gtr.y.NoCuts", "No Cuts;ytar;counts", 200, -10., 10.}, "H.gtr.y");
-  auto hHyTracking = dHMSGoodTrack.Histo1D(
-      {"H.gtr.y.Tracking", "Cuts: Tracking;ytar;counts", 200, -10., 10.}, "H.gtr.y");
-  auto hHyPID =
-      dHMSEl.Histo1D({"H.gtr.y.PID", "Cuts: Tracking+PID;ytar;counts", 200, -10., 10.}, "H.gtr.y");
-  auto hHyTiming = dHMSElInTime.Histo1D(
-      {"H.gtr.y.Timing", "Cuts: Tracking+PID+Timing;ytar;counts", 200, -10., 10.}, "H.gtr.y");
-
-  // scalers
-  //auto total_charge        = d_sh.Max("P.BCM4B.scalerChargeCut");
-  //auto shms_el_real_scaler = d_sh.Max("P.pEL_REAL.scaler");
-  //auto hms_el_real_scaler  = d_sh.Max("P.hEL_REAL.scaler");
-  //auto time_1MHz           = d_sh.Max("P.1MHz.scalerTime");
-  //auto time_1MHz_cut       = d_sh.Max("P.1MHz.scalerTimeCut");
-
-  auto yield_all = d.Count();
-  // 5 timing cut widths worth of random backgrounds
-  auto yield_coin          = d_coin.Count();
-  auto yield_HMSGoodTrack  = dHMSGoodTrack.Count();
-  auto yield_SHMSGoodTrack = dSHMSGoodTrack.Count();
-  auto yield_COINGoodTrack = dCOINGoodTrack.Count();
-  auto yield_HMSEl         = dHMSEl.Count();
-  auto yield_SHMSEl        = dSHMSEl.Count();
-  auto yield_COINEl        = dCOINEl.Count();
-  auto yield_HMSElInTime   = dHMSElInTime.Count();
-  auto yield_HMSElRandom   = dHMSElRandom.Count();
-  auto yield_SHMSElInTime  = dSHMSElInTime.Count();
-  auto yield_SHMSElRandom  = dSHMSElRandom.Count();
-  auto yield_COINElInTime  = dCOINElInTime.Count();
-  auto yield_COINElRandom  = dCOINElRandom.Count();
-  auto yield_coin_raw      = dCOINElInTime.Count();
-  auto yield_coin_random   = dCOINElRandom.Count();
-
-
-  // -------------------------------------
-  // End lazy eval
-  // -------------------------------------
-  auto n_coin_good  = *yield_coin_raw - *yield_coin_random / 5.;
-  auto n_HMSElGood  = *yield_HMSElInTime - *yield_HMSElRandom / 5;
-  auto n_SHMSElGood = *yield_SHMSElInTime - *yield_SHMSElRandom / 5;
-  auto n_COINElGood = *yield_COINElInTime - *yield_COINElRandom / 5;
-
-  hPdpNoCuts->DrawCopy();
-  //std::cout << "  coin COUNTS : " << *(d_coin.Count()) << "\n";
-  //std::cout << " yield_HMSEl : " << *(yield_HMSEl) << "\n";
-  std::cout << " yield_COINEl : " << *(yield_COINEl) << "\n";
-  //std::cout << "  ALL COUNTS : " << *yield_all << "\n";
-  //std::cout << " GOOD COUNTS : " << n_COINElGood << "\n";
-  //
-  if( 4 != (*(yield_COINEl)) ){
-    std::exit(-1);
-  }
-  std::exit(0);
-}
diff --git a/tests/elastic_test.sh b/tests/elastic_test.sh
deleted file mode 100644
index 5d137e59c5413ab3ab02a67612ca8466182b04f9..0000000000000000000000000000000000000000
--- a/tests/elastic_test.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-#echo "This is the elastic testing..."
-#echo " "
-#echo "There are currently 0 tests to run!"
-#which hcana
-#
-#ls -lrth
-#ls -lrth build
-#
-#git clone git@eicweb.phy.anl.gov:jlab/hallc/exp/CSV/hallc_replay_csv.git
-#git clone git@eicweb.phy.anl.gov:jlab/hallc/exp/CSV/online_csv.git
-#cd online_csv 
-#ln -s ../hallc_reaply_csv/PARAM
-## and the reset
-#
-#mkdir raw 
-#pushd raw
-#  wget coin.dat
-#popd
-
-
-singularity help build/Singularity.hcana.simg
-
-
-singularity exec build/Singularity.hcana.simg hcana tests/elastic_test.cxx
diff --git a/tests/elastic_test2.sh b/tests/elastic_test2.sh
deleted file mode 100644
index 36da2058d88749b20c9af12ce265f23befd157c2..0000000000000000000000000000000000000000
--- a/tests/elastic_test2.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/bash
-
-echo "This is the elastic testing..."
-echo " "
-echo "There are currently 0 tests to run!"
-which hcana
-
-ls -lrth
-ls -lrth build
-
-git clone git@eicweb.phy.anl.gov:jlab/hallc/exp/CSV/hallc_replay_csv.git
-git clone git@eicweb.phy.anl.gov:jlab/hallc/exp/CSV/online_csv.git
-
-cd online_csv 
-ln -s ../hallc_reaply_csv/PARAM
-# and the reset
-
-mkdir raw 
-pushd raw
-  wget coin.dat
-popd
-
-
-singularity help build/Singularity.hcana.simg
-
-singularity exec build/Singularity.hcana.simg which hcana
-
-singularity exec build/Singularity.hcana.simg hcana tests/my_root_script.cxx
-
-echo " WOOOO"
diff --git a/tests/my_root_script.cxx b/tests/my_root_script.cxx
deleted file mode 100644
index 0d3686284d799790b779b7724cdbca81d3a5c129..0000000000000000000000000000000000000000
--- a/tests/my_root_script.cxx
+++ /dev/null
@@ -1,20 +0,0 @@
-void my_root_script() {
-
-
-
-  std::cout << "Hello from my_root_script.cxx!\n";
-
-  std::cout << "This should be run with singularity\n";
-  double pi = 3.14;
-
-  auto pi_equals_3 = (3 == pi);
-  std::cout <<  " pi_equals_3 = " << pi_equals_3 << "\n";
-
-  if( pi_equals_3) {
-    std::cout << "what the hell?\n";
-    std::exit( 0 );
-  }
-  /* else */
-  
-  std::exit( -1 );
-}
diff --git a/tests/replay_elastic_data.sh b/tests/replay_elastic_data.sh
deleted file mode 100644
index 48b147f737be5ce6b44ed32e39f3505b377dccb8..0000000000000000000000000000000000000000
--- a/tests/replay_elastic_data.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/bin/bash
-
-echo "This is the elastic testing..."
-echo " "
-echo "There are currently 0 tests to run!"
-which hcana
-
-ls -lrth
-ls -lrth build
-mkdir -p ROOTfiles
-git clone https://eicweb.phy.anl.gov/whit/ci_test_data.git 
-git clone https://eicweb.phy.anl.gov/jlab/hallc/exp/CSV/hallc_replay_csv.git
-git clone https://eicweb.phy.anl.gov/jlab/hallc/exp/CSV/online_csv.git
-
-cd online_csv 
-mkdir -p logs
-ln -s ../hallc_replay_csv/PARAM
-ln -s ../hallc_replay_csv/DBASE
-ln -s ../hallc_replay_csv/CALIBRATION
-ln -s ../hallc_replay_csv/DEF-files
-ln -s ../hallc_replay_csv/MAPS
-ln -s ../hallc_replay_csv/SCRIPTS
-ln -s ../hallc_replay_csv/DATFILES
-ln -s ../ci_test_data/raw
-ln -s ../ROOTfiles
-# and the reset
-
-ls -lrth
-ls -lrth raw/
-ls -lrth ROOTfiles/
-pwd
-# run replay script
-df -h
-
-singularity exec ../build/Singularity.hcana.simg hcana -b -q "../tests/elastic_coin_replay.cxx+(6012,50000)"
-
-echo "hcana calls... the coin replay script and outputs blah.root"
-