Skip to content
Snippets Groups Projects
Commit 496e5344 authored by Sylvester Joosten's avatar Sylvester Joosten
Browse files

Fix build

parent 4d68a321
Branches
Tags
1 merge request!17Fix build
Showing
with 1629 additions and 0 deletions
#!/bin/bash
iatest=$(expr index "$-" i)
#######################################################
# SOURCED ALIAS'S AND SCRIPTS BY zachbrowne.me
#######################################################
# Source global definitions
if [ -f /etc/bashrc ]; then
. /etc/bashrc
fi
# Enable bash programmable completion features in interactive shells
if [ -f /usr/share/bash-completion/bash_completion ]; then
. /usr/share/bash-completion/bash_completion
elif [ -f /etc/bash_completion ]; then
. /etc/bash_completion
fi
#######################################################
# EXPORTS
#######################################################
# Disable the bell
if [[ $iatest > 0 ]]; then bind "set bell-style visible"; fi
# Expand the history size
export HISTFILESIZE=10000
export HISTSIZE=500
# Don't put duplicate lines in the history and do not add lines that start with a space
export HISTCONTROL=erasedups:ignoredups:ignorespace
# Check the window size after each command and, if necessary, update the values of LINES and COLUMNS
shopt -s checkwinsize
# Causes bash to append to history instead of overwriting it so if you start a new terminal, you have old session history
shopt -s histappend
PROMPT_COMMAND='history -a'
# Allow ctrl-S for history navigation (with ctrl-R)
stty -ixon
# Ignore case on auto-completion
# Note: bind used instead of sticking these in .inputrc
if [[ $iatest > 0 ]]; then bind "set completion-ignore-case on"; fi
# Show auto-completion list automatically, without double tab
if [[ $iatest > 0 ]]; then bind "set show-all-if-ambiguous On"; fi
# Set the default editor
export EDITOR=vim
export VISUAL=vim
alias pico='edit'
alias spico='sedit'
alias nano='edit'
alias snano='sedit'
# To have colors for ls and all grep commands such as grep, egrep and zgrep
export CLICOLOR=1
export LS_COLORS='no=00:fi=00:di=00;34:ln=01;36:pi=40;33:so=01;35:do=01;35:bd=40;33;01:cd=40;33;01:or=40;31;01:ex=01;32:*.tar=01;31:*.tgz=01;31:*.arj=01;31:*.taz=01;31:*.lzh=01;31:*.zip=01;31:*.z=01;31:*.Z=01;31:*.gz=01;31:*.bz2=01;31:*.deb=01;31:*.rpm=01;31:*.jar=01;31:*.jpg=01;35:*.jpeg=01;35:*.gif=01;35:*.bmp=01;35:*.pbm=01;35:*.pgm=01;35:*.ppm=01;35:*.tga=01;35:*.xbm=01;35:*.xpm=01;35:*.tif=01;35:*.tiff=01;35:*.png=01;35:*.mov=01;35:*.mpg=01;35:*.mpeg=01;35:*.avi=01;35:*.fli=01;35:*.gl=01;35:*.dl=01;35:*.xcf=01;35:*.xwd=01;35:*.ogg=01;35:*.mp3=01;35:*.wav=01;35:*.xml=00;31:'
#export GREP_OPTIONS='--color=auto' #deprecated
#alias grep="/usr/bin/grep $GREP_OPTIONS"
#unset GREP_OPTIONS
# Color for manpages in less makes manpages a little easier to read
export LESS_TERMCAP_mb=$'\E[01;31m'
export LESS_TERMCAP_md=$'\E[01;31m'
export LESS_TERMCAP_me=$'\E[0m'
export LESS_TERMCAP_se=$'\E[0m'
export LESS_TERMCAP_so=$'\E[01;44;33m'
export LESS_TERMCAP_ue=$'\E[0m'
export LESS_TERMCAP_us=$'\E[01;32m'
#######################################################
# GENERAL ALIAS'S
#######################################################
# Alias's to modified commands
alias cp='cp -i'
alias mv='mv -i'
alias rm='rm -iv'
alias mkdir='mkdir -p'
alias ps='ps auxf'
alias ping='ping -c 10'
alias less='less -R'
alias cls='clear'
alias apt-get='apt-get'
alias multitail='multitail --no-repeat -c'
alias freshclam='freshclam'
alias vi='vim'
alias vis='vim "+set si"'
# Change directory aliases
alias home='cd ~'
alias cd..='cd ..'
alias ..='cd ..'
alias ...='cd ../..'
alias ....='cd ../../..'
alias .....='cd ../../../..'
# cd into the old directory
alias bd='cd "$OLDPWD"'
# Remove a directory and all files
alias rmd='/bin/rm --recursive --force --verbose '
# Alias's for multiple directory listing commands
alias la='ls -Alh' # show hidden files
alias ls='ls -aFh --color=always' # add colors and file type extensions
alias lx='ls -lXBh' # sort by extension
alias lk='ls -lSrh' # sort by size
alias lc='ls -lcrh' # sort by change time
alias lu='ls -lurh' # sort by access time
alias lr='ls -lRh' # recursive ls
alias lt='ls -ltrh' # sort by date
alias lm='ls -alh |more' # pipe through 'more'
alias lw='ls -xAh' # wide listing format
alias ll='ls -Fls' # long listing format
alias labc='ls -lap' #alphabetical sort
alias lf="ls -l | egrep -v '^d'" # files only
alias ldir="ls -l | egrep '^d'" # directories only
# alias chmod commands
alias mx='chmod a+x'
alias 000='chmod -R 000'
alias 644='chmod -R 644'
alias 666='chmod -R 666'
alias 755='chmod -R 755'
alias 777='chmod -R 777'
# Search command line history
alias h="history | grep "
# Search running processes
alias p="ps aux | grep "
alias topcpu="/bin/ps -eo pcpu,pid,user,args | sort -k 1 -r | head -10"
# Search files in the current folder
alias f="find . | grep "
# Count all files (recursively) in the current folder
alias countfiles="for t in files links directories; do echo \`find . -type \${t:0:1} | wc -l\` \$t; done 2> /dev/null"
# To see if a command is aliased, a file, or a built-in command
alias checkcommand="type -t"
# Show all logs in /var/log
alias logs="sudo find /var/log -type f -exec file {} \; | grep 'text' | cut -d' ' -f1 | sed -e's/:$//g' | grep -v '[0-9]$' | xargs tail -f"
# SHA1
alias sha1='openssl sha1'
#######################################################
# SPECIAL FUNCTIONS
#######################################################
# Extracts any archive(s) (if unp isn't installed)
extract () {
for archive in $*; do
if [ -f $archive ] ; then
case $archive in
*.tar.bz2) tar xvjf $archive ;;
*.tar.gz) tar xvzf $archive ;;
*.bz2) bunzip2 $archive ;;
*.rar) rar x $archive ;;
*.gz) gunzip $archive ;;
*.tar) tar xvf $archive ;;
*.tbz2) tar xvjf $archive ;;
*.tgz) tar xvzf $archive ;;
*.zip) unzip $archive ;;
*.Z) uncompress $archive ;;
*.7z) 7z x $archive ;;
*) echo "don't know how to extract '$archive'..." ;;
esac
else
echo "'$archive' is not a valid file!"
fi
done
}
# Searches for text in all files in the current folder
ftext ()
{
# -i case-insensitive
# -I ignore binary files
# -H causes filename to be printed
# -r recursive search
# -n causes line number to be printed
# optional: -F treat search term as a literal, not a regular expression
# optional: -l only print filenames and not the matching lines ex. grep -irl "$1" *
grep -iIHrn --color=always "$1" . | less -r
}
# Copy file with a progress bar
cpp()
{
set -e
strace -q -ewrite cp -- "${1}" "${2}" 2>&1 \
| awk '{
count += $NF
if (count % 10 == 0) {
percent = count / total_size * 100
printf "%3d%% [", percent
for (i=0;i<=percent;i++)
printf "="
printf ">"
for (i=percent;i<100;i++)
printf " "
printf "]\r"
}
}
END { print "" }' total_size=$(stat -c '%s' "${1}") count=0
}
# Copy and go to the directory
cpg ()
{
if [ -d "$2" ];then
cp $1 $2 && cd $2
else
cp $1 $2
fi
}
# Move and go to the directory
mvg ()
{
if [ -d "$2" ];then
mv $1 $2 && cd $2
else
mv $1 $2
fi
}
# Create and go to the directory
mkdirg ()
{
mkdir -p $1
cd $1
}
# Goes up a specified number of directories (i.e. up 4)
up ()
{
local d=""
limit=$1
for ((i=1 ; i <= limit ; i++))
do
d=$d/..
done
d=$(echo $d | sed 's/^\///')
if [ -z "$d" ]; then
d=..
fi
cd $d
}
#Automatically do an ls after each cd
# cd ()
# {
# if [ -n "$1" ]; then
# builtin cd "$@" && ls
# else
# builtin cd ~ && ls
# fi
# }
# Returns the last 2 fields of the working directory
pwdtail ()
{
pwd|awk -F/ '{nlast = NF -1;print $nlast"/"$NF}'
}
# Show the current version of the operating system
ver ()
{
local dtype
dtype=$(distribution)
if [ $dtype == "redhat" ]; then
if [ -s /etc/redhat-release ]; then
cat /etc/redhat-release && uname -a
else
cat /etc/issue && uname -a
fi
elif [ $dtype == "suse" ]; then
cat /etc/SuSE-release
elif [ $dtype == "debian" ]; then
lsb_release -a
# sudo cat /etc/issue && sudo cat /etc/issue.net && sudo cat /etc/lsb_release && sudo cat /etc/os-release # Linux Mint option 2
elif [ $dtype == "gentoo" ]; then
cat /etc/gentoo-release
elif [ $dtype == "mandriva" ]; then
cat /etc/mandriva-release
elif [ $dtype == "slackware" ]; then
cat /etc/slackware-version
else
if [ -s /etc/issue ]; then
cat /etc/issue
else
echo "Error: Unknown distribution"
exit 1
fi
fi
}
# For some reason, rot13 pops up everywhere
rot13 () {
if [ $# -eq 0 ]; then
tr '[a-m][n-z][A-M][N-Z]' '[n-z][a-m][N-Z][A-M]'
else
echo $* | tr '[a-m][n-z][A-M][N-Z]' '[n-z][a-m][N-Z][A-M]'
fi
}
# Trim leading and trailing spaces (for scripts)
trim()
{
local var=$@
var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
var="${var%"${var##*[![:space:]]}"}" # remove trailing whitespace characters
echo -n "$var"
}
#######################################################
# Set the ultimate amazing command prompt
#######################################################
alias cpu="grep 'cpu ' /proc/stat | awk '{usage=(\$2+\$4)*100/(\$2+\$4+\$5)} END {print usage}' | awk '{printf(\"%.1f\n\", \$1)}'"
function __setprompt
{
local LAST_COMMAND=$? # Must come first!
# Define colors
local LIGHTGRAY="\033[0;37m"
local WHITE="\033[1;37m"
local BLACK="\033[0;30m"
local DARKGRAY="\033[1;30m"
local RED="\033[0;31m"
local LIGHTRED="\033[1;31m"
local GREEN="\033[0;32m"
local LIGHTGREEN="\033[1;32m"
local BROWN="\033[0;33m"
local YELLOW="\033[1;33m"
local BLUE="\033[0;34m"
local LIGHTBLUE="\033[1;34m"
local MAGENTA="\033[0;35m"
local LIGHTMAGENTA="\033[1;35m"
local CYAN="\033[0;36m"
local LIGHTCYAN="\033[1;36m"
local NOCOLOR="\033[0m"
# Show error exit code if there is one
if [[ $LAST_COMMAND != 0 ]]; then
# PS1="\[${RED}\](\[${LIGHTRED}\]ERROR\[${RED}\])-(\[${LIGHTRED}\]Exit Code \[${WHITE}\]${LAST_COMMAND}\[${RED}\])-(\[${LIGHTRED}\]"
PS1="\[${DARKGRAY}\](\[${LIGHTRED}\]ERROR\[${DARKGRAY}\])-(\[${RED}\]Exit Code \[${LIGHTRED}\]${LAST_COMMAND}\[${DARKGRAY}\])-(\[${RED}\]"
if [[ $LAST_COMMAND == 1 ]]; then
PS1+="General error"
elif [ $LAST_COMMAND == 2 ]; then
PS1+="Missing keyword, command, or permission problem"
elif [ $LAST_COMMAND == 126 ]; then
PS1+="Permission problem or command is not an executable"
elif [ $LAST_COMMAND == 127 ]; then
PS1+="Command not found"
elif [ $LAST_COMMAND == 128 ]; then
PS1+="Invalid argument to exit"
elif [ $LAST_COMMAND == 129 ]; then
PS1+="Fatal error signal 1"
elif [ $LAST_COMMAND == 130 ]; then
PS1+="Script terminated by Control-C"
elif [ $LAST_COMMAND == 131 ]; then
PS1+="Fatal error signal 3"
elif [ $LAST_COMMAND == 132 ]; then
PS1+="Fatal error signal 4"
elif [ $LAST_COMMAND == 133 ]; then
PS1+="Fatal error signal 5"
elif [ $LAST_COMMAND == 134 ]; then
PS1+="Fatal error signal 6"
elif [ $LAST_COMMAND == 135 ]; then
PS1+="Fatal error signal 7"
elif [ $LAST_COMMAND == 136 ]; then
PS1+="Fatal error signal 8"
elif [ $LAST_COMMAND == 137 ]; then
PS1+="Fatal error signal 9"
elif [ $LAST_COMMAND -gt 255 ]; then
PS1+="Exit status out of range"
else
PS1+="Unknown error code"
fi
PS1+="\[${DARKGRAY}\])\[${NOCOLOR}\]\n"
else
PS1=""
fi
# Date
PS1+="\[${DARKGRAY}\](\[${CYAN}\]\$(date +%a) $(date +%b-'%-m')" # Date
PS1+="${BLUE} $(date +'%-I':%M:%S%P)\[${DARKGRAY}\])-" # Time
# CPU
PS1+="(\[${MAGENTA}\]CPU $(cpu)%"
# Jobs
PS1+="\[${DARKGRAY}\]:\[${MAGENTA}\]\j"
# Network Connections (for a server - comment out for non-server)
PS1+="\[${DARKGRAY}\]:\[${MAGENTA}\]Net $(awk 'END {print NR}' /proc/net/tcp)"
PS1+="\[${DARKGRAY}\])-"
# User and server
local SSH_IP=`echo $SSH_CLIENT | awk '{ print $1 }'`
local SSH2_IP=`echo $SSH2_CLIENT | awk '{ print $1 }'`
if [ $SSH2_IP ] || [ $SSH_IP ] ; then
PS1+="(\[${RED}\]\u@\h"
else
PS1+="(\[${RED}\]\u"
fi
# Current directory
PS1+="\[${DARKGRAY}\]:\[${BROWN}\]\w\[${DARKGRAY}\])-"
# Total size of files in current directory
PS1+="(\[${GREEN}\]$(/bin/ls -lah | /bin/grep -m 1 total | /bin/sed 's/total //')\[${DARKGRAY}\]:"
# Number of files
PS1+="\[${GREEN}\]\$(/bin/ls -A -1 | /usr/bin/wc -l)\[${DARKGRAY}\])"
# Skip to the next line
PS1+="\n"
# singularity?
if [ -e /environment ] ; then
PS1+="singularity"
fi
if [[ $EUID -ne 0 ]]; then
PS1+="\[${GREEN}\]>\[${NOCOLOR}\] " # Normal user
else
PS1+="\[${RED}\]>\[${NOCOLOR}\] " # Root user
fi
# PS2 is used to continue a command using the \ character
PS2="\[${DARKGRAY}\]>\[${NOCOLOR}\] "
# PS3 is used to enter a number choice in a script
PS3='Please enter a number from above list: '
# PS4 is used for tracing a script in debug mode
PS4='\[${DARKGRAY}\]+\[${NOCOLOR}\] '
}
PROMPT_COMMAND='__setprompt'
#!/usr/bin/env bash
## Force environment to be clean
. /etc/lager-env.sh
exec "$@"
#!/bin/bash
for i in /etc/profile.d/*.sh; do
. $i
done
export PS1='lager-shell> \[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
export LS_COLORS='rs=0:di=01;34:ln=01;36:mh=00:pi=40;33'
## redefine ls and less as functions, as this is something we
## can import into our plain bash --norc --noprofile session
## (aliases cannot be transferred to a child shell)
ls () {
/bin/ls --color=auto $@
}
less () {
/usr/bin/less -R $@
}
grep () {
/bin/grep --color=auto $@
}
MYSHELL=$(ps -p $$ | awk '{print($4);}' | tail -n1)
## only export the functions for bash, as this does not work
## in all shells and we only care about bash here. Note that
## the singularity startup runs in plain sh which requires the
## if statement
if [ "$MYSHELL" = "bash" ]; then
export -f ls
export -f less
export -f grep
fi
unset MYSHELL
#!/bin/bash
cat /etc/lager_info
#!/usr/bin/env bash
piped_args=()
if [ -p /dev/stdin ]; then
# If we want to read the input line by line
while IFS= read line; do
piped_args+=("${line}")
done
fi
## Properly setup environment
. /etc/lager-env.sh
### Fire off bash
if [ "${#piped_args[@]}" != "0" ]; then
printf "%s\n" "${piped_args[@]}" | bash -s -- --norc --noprofile
elif [ $# -gt 0 ]; then
exec bash -c "$@"
else
exec bash --norc --noprofile
fi
This diff is collapsed.
#!/bin/bash
## Force environment to be clean
export LD_LIBRARY_PATH="/lib/x86_64-linux-gnu:/usr/local/lib64:/usr/local/lib"
export PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
[ ! -z "$CC" ] && unset CC
[ ! -z "$CXX" ] && unset CXX
[ ! -z "$JUPYTER_CONFIG_DIR" ] && unset JUPYTER_CONFIG_DIR
[ ! -z "$JUPYTER_PATH" ] && unset JUPYTER_PATH
[ ! -z "$CLING_STANDARD_PCH" ] && unset CLING_STANDARD_PCH
[ ! -z "$USER_PATH" ] && unset USER_PATH
[ ! -z "$SHLIB_PATH" ] && unset SHLIB_PATH
[ ! -z "$LIBPATH" ] && unset LIBPATH
[ ! -z "$CMAKE_PREFIX_PATH" ] && unset CMAKE_PREFIX_PATH
[ ! -z "$SOFTWARE_HOME" ] && unset SOFTWARE_HOME
[ ! -z "$ROOTSYS" ] && unset ROOTSYS
#!/bin/bash
## nothing here currently
#!/bin/bash
. /etc/lager-env.sh
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version
# control.
# However, in case of collaboration, if having platform-specific dependencies or
# dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or
# not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# Custom Spack Repository
Extra spack repository with EIC-related packages and overrides.
## How to load this repository
To load the repository, clone and then load with spack:
```bash
spack clone https://eicweb.phy.anl.gov/containers/eic_container.git
spack repo add eic_contaienr/spack
```
Then use spack as you normally would.
## Packages
* New packages
- `dawn`: A tool to visualize detector geometries.
- `dawncut`: A tool to edit detector visualizations.
* Package overrides
* `acts`: Patch bug for simple disk geometries
* `dd4hep`: Fix package hash which somehow is wrong in spack...
* `fmt`: Modified compiler flags to build shared library version.
* `madx`: Add madx package
* `mesa`: fix issue in meson step
* `podio`: add v0.13.1, also patch issue in cmake setup to allow build under /tmp/root, as needed by spack
* `qt`: Added gcc10.patch to fix issues compiling QT with gcc10
* `root`: Re-enabled http module as this builds fine on modern Linux systems and we use this heavily.
diff --git a/Core/src/Geometry/CylinderVolumeBuilder.cpp b/Core/src/Geometry/CylinderVolumeBuilder.cpp
index eba6f8617..950c5286b 100644
--- a/Core/src/Geometry/CylinderVolumeBuilder.cpp
+++ b/Core/src/Geometry/CylinderVolumeBuilder.cpp
@@ -542,6 +542,7 @@ Acts::VolumeConfig Acts::CylinderVolumeBuilder::analyzeContent(
double zMaxD = center.z() + 0.5 * thickness;
lConfig.rMin =
std::min(lConfig.rMin, rMinD - m_cfg.layerEnvelopeR.first);
+ lConfig.rMin = std::max(0.0, lConfig.rMin);
lConfig.rMax =
std::max(lConfig.rMax, rMaxD + m_cfg.layerEnvelopeR.second);
lConfig.zMin = std::min(lConfig.zMin, zMinD - m_cfg.layerEnvelopeZ);
@@ -576,4 +577,4 @@ Acts::VolumeConfig Acts::CylinderVolumeBuilder::analyzeContent(
// and return what you have
return lConfig;
-}
\ No newline at end of file
+}
diff --git a/Plugins/DD4hep/src/DD4hepLayerBuilder.cpp b/Plugins/DD4hep/src/DD4hepLayerBuilder.cpp
index 4d1695abf..455481538 100644
--- a/Plugins/DD4hep/src/DD4hepLayerBuilder.cpp
+++ b/Plugins/DD4hep/src/DD4hepLayerBuilder.cpp
@@ -128,6 +128,7 @@ const Acts::LayerVector Acts::DD4hepLayerBuilder::endcapLayers(
std::abs(zMax - pl.max(Acts::binZ))};
pl.envelope[Acts::binR] = {std::abs(rMin - pl.min(Acts::binR)),
std::abs(rMax - pl.max(Acts::binR))};
+ pl.extent.ranges[Acts::binR] = {rMin, rMax};
}
} else {
throw std::logic_error(
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Acts(CMakePackage, CudaPackage):
"""
A Common Tracking Software (Acts)
This project contains an experiment-independent set of track reconstruction
tools. The main philosophy is to provide high-level track reconstruction
modules that can be used for any tracking detector. The description of the
tracking detector's geometry is optimized for efficient navigation and
quick extrapolation of tracks. Converters for several common geometry
description languages exist. Having a highly performant, yet largely
customizable implementation of track reconstruction algorithms was a
primary objective for the design of this toolset. Additionally, the
applicability to real-life HEP experiments plays major role in the
development process. Apart from algorithmic code, this project also
provides an event data model for the description of track parameters and
measurements.
Key features of this project include: tracking geometry description which
can be constructed from TGeo, DD4Hep, or GDML input, simple and efficient
event data model, performant and highly flexible algorithms for track
propagation and fitting, basic seed finding algorithms.
"""
homepage = "http://acts.web.cern.ch/ACTS/"
git = "https://github.com/acts-project/acts.git"
maintainers = ['HadrienG2']
tags = ['hep']
# Supported Acts versions
version('master', branch='master')
version('8.03.0p1', commit='601c0a18b6738cae81c3e23422cfeb3ec7bddce9')
version('8.03.0', commit='601c0a18b6738cae81c3e23422cfeb3ec7bddce9')
version('8.02.0p1', commit='f25cf639915fc2ac65b03882ad3eb11fb037ed00')
version('8.02.0', commit='f25cf639915fc2ac65b03882ad3eb11fb037ed00')
version('8.01.0', commit='ccc8c77bbc011f3adc020c565a509815be0ea029')
version('8.00.0', commit='50c972823144c007b406ae12d7ca25a1e0c35532')
version('7.00.0', commit='e663df7ab023bdb5ef206202efc2e54ccb71d416')
version('6.00.0', commit='a5cf04acd4b1a2c625e0826189109472a3392558')
version('5.00.0', commit='df77b91a7d37b8db6ed028a4d737014b5ad86bb7')
version('4.01.0', commit='c383bf434ef69939b47e840e0eac0ba632e6af9f')
version('4.00.0', commit='ed64b4b88d366b63adc4a8d1afe5bc97aa5751eb')
version('3.00.0', commit='e20260fccb469f4253519d3f0ddb3191b7046db3')
version('2.00.0', commit='8708eae2b2ccdf57ab7b451cfbba413daa1fc43c')
version('1.02.1', commit='f6ebeb9a28297ba8c54fd08b700057dd4ff2a311')
version('1.02.0', commit='e69b95acc9a264e63aded7d1714632066e090542')
version('1.01.0', commit='836fddd02c3eff33825833ff97d6abda5b5c20a0')
version('1.00.0', commit='ec9ce0bcdc837f568d42a12ddf3fc9c80db62f5d')
version('0.32.0', commit='a4cedab7e727e1327f2835db29d147cc86b21054')
version('0.31.0', commit='cfbd901555579a2f32f4efe2b76a7048442b42c3')
version('0.30.0', commit='a71ef0a9c742731611645214079884585a92b15e')
version('0.29.0', commit='33aa3e701728112e8908223c4a7fd521907c8ea4')
version('0.28.0', commit='55626b7401eeb93fc562e79bcf385f0ad0ac48bf')
version('0.27.1', commit='8ba3010a532137bc0ab6cf83a38b483cef646a01')
version('0.27.0', commit='f7b1a1c27d5a95d08bb67236ad0e117fcd1c679f')
version('0.26.0', commit='cf542b108b31fcc349fc18fb0466f889e4e42aa6')
version('0.25.2', commit='76bf1f3e4be51d4d27126b473a2caa8d8a72b320')
version('0.25.1', commit='6e8a1ea6d2c7385a78e3e190efb2a8a0c1fa957f')
version('0.25.0', commit='0aca171951a214299e8ff573682b1c5ecec63d42')
version('0.24.0', commit='ef4699c8500bfea59a5fe88bed67fde2f00f0adf')
version('0.23.0', commit='dc443dd7e663bc4d7fb3c1e3f1f75aaf57ffd4e4')
version('0.22.1', commit='ca1b8b1645db6b552f44c48d2ff34c8c29618f3a')
version('0.22.0', commit='2c8228f5843685fc0ae69a8b95dd8fc001139efb')
version('0.21.0', commit='10b719e68ddaca15b28ac25b3daddce8c0d3368d')
version('0.20.0', commit='1d37a849a9c318e8ca4fa541ef8433c1f004637b')
version('0.19.0', commit='408335636486c421c6222a64372250ef12544df6')
version('0.18.0', commit='d58a68cf75b52a5e0f563bc237f09250aa9da80c')
version('0.17.0', commit='0789f654ff484b013fd27e5023cf342785ea8d97')
version('0.16.0', commit='b3d965fe0b8ae335909d79114ef261c6b996773a')
version('0.15.0', commit='267c28f69c561e64369661a6235b03b5a610d6da')
version('0.14.0', commit='38d678fcb205b77d60326eae913fbb1b054acea1')
version('0.13.0', commit='b33f7270ddbbb33050b7ec60b4fa255dc2bfdc88')
version('0.12.1', commit='a8b3d36e7c6cb86487637589e0eff7bbe626054a')
version('0.12.0', commit='f9cda77299606d78c889fb1db2576c1971a271c4')
version('0.11.1', commit='c21196cd6c3ecc6da0f14d0a9ef227a274be584b')
version('0.11.0', commit='22bcea1f19adb0021ca61b843b95cfd2462dd31d')
version('0.10.5', commit='b6f7234ca8f18ee11e57709d019c14bf41cf9b19')
version('0.10.4', commit='42cbc359c209f5cf386e620b5a497192c024655e')
version('0.10.3', commit='a3bb86b79a65b3d2ceb962b60411fd0df4cf274c')
version('0.10.2', commit='64cbf28c862d8b0f95232b00c0e8c38949d5015d')
version('0.10.1', commit='0692dcf7824efbc504fb16f7aa00a50df395adbc')
version('0.10.0', commit='30ef843cb00427f9959b7de4d1b9843413a13f02')
version('0.09.5', commit='12b11fe8b0d428ccb8e92dda7dc809198f828672')
version('0.09.4', commit='e5dd9fbe179201e70347d1a3b9fa1899c226798f')
version('0.09.3', commit='a8f31303ee8720ed2946bfe2d59e81d0f70e307e')
version('0.09.2', commit='4e1f7fa73ffe07457080d787e206bf6466fe1680')
version('0.09.1', commit='69c451035516cb683b8f7bc0bab1a25893e9113d')
version('0.09.0', commit='004888b0a412f5bbaeef2ffaaeaf2aa182511494')
version('0.08.2', commit='c5d7568714e69e7344582b93b8d24e45d6b81bf9')
version('0.08.1', commit='289bdcc320f0b3ff1d792e29e462ec2d3ea15df6')
version('0.08.0', commit='99eedb38f305e3a1cd99d9b4473241b7cd641fa9')
# Variants that affect the core Acts library
variant('benchmarks', default=False, description='Build the performance benchmarks')
variant('examples', default=False, description='Build the examples')
variant('integration_tests', default=False, description='Build the integration tests')
variant('unit_tests', default=False, description='Build the unit tests')
variant('log_failure_threshold', default='MAX', description='Log level above which examples should auto-crash')
# Variants that enable / disable Acts plugins
variant('autodiff', default=False, description='Build the auto-differentiation plugin')
variant('dd4hep', default=False, description='Build the DD4hep plugin')
variant('digitization', default=False, description='Build the geometric digitization plugin')
variant('fatras', default=False, description='Build the FAst TRAcking Simulation package')
variant('fatras_geant4', default=False, description='Build Geant4 Fatras package')
variant('identification', default=False, description='Build the Identification plugin')
variant('json', default=False, description='Build the Json plugin')
variant('legacy', default=False, description='Build the Legacy package')
# FIXME: Cannot build ONNX plugin as Spack doesn't have an ONNX runtime
# FIXME: Cannot build SyCL plugin yet as Spack doesn't have SyCL support
variant('tgeo', default=False, description='Build the TGeo plugin')
# Variants that only affect Acts examples for now
variant('geant4', default=False, description='Build the Geant4-based examples')
variant('hepmc3', default=False, description='Build the HepMC3-based examples')
variant('pythia8', default=False, description='Build the Pythia8-based examples')
## Temporary patch for ACTS to address
## https://github.com/acts-project/acts/issues/822
patch('acts-822.patch', when='@8.02.0p1')
patch('acts-822.patch', when='@8.03.0p1')
# Build dependencies
# FIXME: Use spack's autodiff package once there is one
depends_on('boost @1.62:1.69.99 +program_options +test', when='@:0.10.3')
depends_on('boost @1.71: +filesystem +program_options +test', when='@0.10.4:')
depends_on('cmake @3.14:', type='build')
depends_on('dd4hep @1.11:', when='+dd4hep')
depends_on('dd4hep @1.11: +geant4', when='+dd4hep +geant4')
depends_on('eigen @3.3.7:', type='build')
depends_on('geant4', when='+fatras_geant4')
depends_on('geant4', when='+geant4')
depends_on('hepmc3 @3.2.1:', when='+hepmc3')
depends_on('heppdt', when='+hepmc3 @:4.0')
depends_on('intel-tbb @2020.1:', when='+examples')
depends_on('nlohmann-json @3.9.1:', when='@0.14: +json')
depends_on('pythia8', when='+pythia8')
depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0')
depends_on('root @6.20: cxxstd=17', when='+tgeo @0.8.1:')
# Some variant combinations do not make sense
conflicts('+autodiff', when='@:1.01')
conflicts('+benchmarks', when='@:0.15')
conflicts('+dd4hep', when='-tgeo')
conflicts('+examples', when='@:0.22')
conflicts('+examples', when='-digitization')
conflicts('+examples', when='-fatras')
conflicts('+examples', when='-identification')
conflicts('+examples', when='-json')
conflicts('+examples', when='-tgeo')
conflicts('+fatras', when='@:0.15')
conflicts('+geant4', when='@:0.22')
conflicts('+geant4', when='-examples')
conflicts('+hepmc3', when='@:0.22')
conflicts('+hepmc3', when='-examples')
conflicts('+pythia8', when='@:0.22')
conflicts('+pythia8', when='-examples')
conflicts('+tgeo', when='-identification')
conflicts('%gcc@:7', when='@0.23:')
def cmake_args(self):
spec = self.spec
def cmake_variant(cmake_label, spack_variant):
enabled = spec.satisfies('+' + spack_variant)
return "-DACTS_BUILD_{0}={1}".format(cmake_label, enabled)
def example_cmake_variant(cmake_label, spack_variant):
enabled = spec.satisfies('+examples +' + spack_variant)
return "-DACTS_BUILD_EXAMPLES_{0}={1}".format(cmake_label, enabled)
def plugin_label(plugin_name):
if spec.satisfies('@0.33:'):
return "PLUGIN_" + plugin_name
else:
return plugin_name + "_PLUGIN"
def plugin_cmake_variant(plugin_name, spack_variant):
return cmake_variant(plugin_label(plugin_name), spack_variant)
integration_tests_label = "INTEGRATIONTESTS"
unit_tests_label = "UNITTESTS"
legacy_plugin_label = "LEGACY_PLUGIN"
if spec.satisfies('@:0.15'):
integration_tests_label = "INTEGRATION_TESTS"
unit_tests_label = "TESTS"
if spec.satisfies('@:0.32'):
legacy_plugin_label = "LEGACY"
args = [
plugin_cmake_variant("AUTODIFF", "autodiff"),
cmake_variant("BENCHMARKS", "benchmarks"),
plugin_cmake_variant("CUDA", "cuda"),
plugin_cmake_variant("DD4HEP", "dd4hep"),
plugin_cmake_variant("DIGITIZATION", "digitization"),
cmake_variant("EXAMPLES", "examples"),
example_cmake_variant("DD4HEP", "dd4hep"),
example_cmake_variant("GEANT4", "geant4"),
example_cmake_variant("HEPMC3", "hepmc3"),
example_cmake_variant("PYTHIA8", "pythia8"),
cmake_variant("FATRAS", "fatras"),
cmake_variant("FATRAS_GEANT4", "fatras_geant4"),
plugin_cmake_variant("IDENTIFICATION", "identification"),
cmake_variant(integration_tests_label, "integration_tests"),
plugin_cmake_variant("JSON", "json"),
cmake_variant(unit_tests_label, "unit_tests"),
cmake_variant(legacy_plugin_label, "legacy"),
plugin_cmake_variant("TGEO", "tgeo")
]
log_failure_threshold = spec.variants['log_failure_threshold'].value
args.append("-DACTS_LOG_FAILURE_THRESHOLD={0}".format(log_failure_threshold))
cuda_arch = spec.variants['cuda_arch'].value
if cuda_arch != 'none':
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
if 'root' in spec:
cxxstd = spec['root'].variants['cxxstd'].value
args.append("-DCMAKE_CXX_STANDARD={0}".format(cxxstd))
# FIXME: Once we can use spack's autodiff package, set
# ACTS_USE_SYSTEM_AUTODIFF too.
if spec.satisfies('@0.33: +json'):
args.append("-DACTS_USE_SYSTEM_NLOHMANN_JSON=ON")
elif spec.satisfies('@0.14.0: +json'):
args.append("-DACTS_USE_BUNDLED_NLOHMANN_JSON=OFF")
return args
diff --git a/DAWN_GUI_menu.header2 b/DAWN_GUI_menu.header2
index ef35492..7fa067b 100644
--- a/DAWN_GUI_menu.header2
+++ b/DAWN_GUI_menu.header2
@@ -1 +1 @@
-exec wish "$0" "$@"
+exec wish "$0" ${1+"$@"}
diff --git a/Makefile b/Makefile
index 96e42e7..c9bb589 100644
--- a/Makefile
+++ b/Makefile
@@ -267,6 +267,7 @@ clean:
### install
install:
+ mkdir -p $(INSTALL_DIR)
cp -p ./$(EXEC) $(INSTALL_DIR)/$(EXEC)
cp -p ./$(GUI_MENU) $(INSTALL_DIR)/$(GUI_MENU)
cp -p ./$(EXEC)unixd $(INSTALL_DIR)/$(EXEC)unixd
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Dawn(MakefilePackage):
""" Fukui Renderer DAWN (Drawer for Academic WritiNgs)
is a renderer, which reads 3D geometrical data and visualize them.
It is a vectorized 3D PostScript processor with analytical
hidden line/surface removal.
It aims at precise technical drawing of complex geometries.
It performs geometrical hidden line/surface removal and calculates
out all visible parts of the 3D data before drawing. This drawing
algorithm realizes device-independent technical high quality of
vectorized graphics.
3D Data files should be generated by a modeler or by hands
separately. If you use DAWN as a visualizer of,
say, a simulator of physical experiments,
the simulator application itself is recognized as a modeler.
An important feature of DAWN is that
it has been developed to visualize 3D data generated by
a high-energy experimental detector simulator "GEANT4"
( http://geant4.web.cern.ch/geant4 , http://geant4.kek.jp ).
Note: we set 'gv' as default pdf/ps viewer, assuming this is
installed on your system.
"""
# Note: Dawn homepage not available anymore...
homepage = "https://geant4.kek.jp/~tanaka"
#url = "http://geant4.kek.jp/~tanaka/src/dawn_3_91a.tgz"
url = "http://10.10.241.24/software/dawn_3_91a.tgz"
maintainers = ['sly2j']
version('3_91a', sha256='81d855ead1117681b188242dd0be3a24e005d9bd4063fd2bda9a7a794ebcf5f4')
depends_on('tcl')
depends_on('tk')
## Patch to ensure wish is called correctly
patch('exec.patch')
patch('install.patch')
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile.filter('CC= .*', 'CC = ' + env['CC'])
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])
makefile.filter('INSTALL_DIR = .*', 'INSTALL_DIR = {}/bin'.format(prefix))
os.environ['DAWN_PS_PREVIEWER'] = 'gv'
diff --git a/Makefile b/Makefile
index 1dda775..e8a567f 100644
--- a/Makefile
+++ b/Makefile
@@ -1,6 +1,7 @@
include ./Makefile.architecture
TARGET = dawncut
+INSTALL_DIR ?= /usr/local/bin
SRC_DIR = src
@@ -44,3 +45,7 @@ clean :
$(RM) $(TARGET) core
$(RM) *~ \#*
+### install
+install:
+ mkdir -p $(INSTALL_DIR)
+ cp -p ./$(TARGET) $(INSTALL_DIR)/$(TARGET)
# Copyright 203-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Dawncut(MakefilePackage):
"""DAWNCUT is a tool to generate a 3D scene data clipped with an arbitrary plane.
It reads a source DAWN-format file and outputs a new DAWN-format data,
describing a plane-clipped 3D scene. The output DAWN-format data can be
visualized with Fukui Renderer DAWN.
"""
# dawn webpage not available anymore
homepage = "https://geant4.kek.jp/~tanaka"
url = "http://10.10.241.24/software/dawncut_1_54a.tar.gz"
maintainers = ['sly2j']
version('1_54a',
sha256='17d7ccd2ff863e2f3700cc3e751cfca37a1425abfa0edc3b8f6497d8746ddcf4')
# FIXME: Add dependencies if required.
# depends_on('foo')
## Patch to add install directive to Makefile
patch('install.patch')
def edit(self, spec, prefix):
makefile = FileFilter("Makefile")
makefile.filter('CC= .*', 'CC = ' + env['CC'])
makefile.filter('CXX = .*', 'CXX = ' + env['CXX'])
os.environ['INSTALL_DIR'] = '{}/bin'.format(prefix)
diff --git a/DDG4/src/Geant4IsotropeGenerator.cpp b/DDG4/src/Geant4IsotropeGenerator.cpp
index 54588aaf..f63b29b4 100644
--- a/DDG4/src/Geant4IsotropeGenerator.cpp
+++ b/DDG4/src/Geant4IsotropeGenerator.cpp
@@ -48,7 +48,6 @@ void Geant4IsotropeGenerator::getParticleDirectionUniform(int, ROOT::Math::XYZVe
double x3 = std::cos(theta);
direction.SetXYZ(x1,x2,x3);
- momentum = rnd.rndm()*momentum;
}
/// Particle distribution ~ cos(theta)
@@ -63,7 +62,6 @@ void Geant4IsotropeGenerator::getParticleDirectionCosTheta(int, ROOT::Math::XYZV
double x3 = cos_theta;
direction.SetXYZ(x1,x2,x3);
- momentum = rnd.rndm()*momentum;
}
/// Particle distribution flat in eta (pseudo rapidity)
@@ -84,7 +82,6 @@ void Geant4IsotropeGenerator::getParticleDirectionEta(int, ROOT::Math::XYZVector
double x3 = std::sinh(eta);
double r = std::sqrt(1.0+x3*x3);
direction.SetXYZ(x1/r,x2/r,x3/r);
- momentum = rnd.rndm()*momentum;
}
/// e+e- --> ffbar particle distribution ~ 1 + cos^2(theta)
@@ -110,7 +107,6 @@ void Geant4IsotropeGenerator::getParticleDirectionFFbar(int, ROOT::Math::XYZVect
double x2 = std::sin(theta)*std::sin(phi);
double x3 = std::cos(theta);
direction.SetXYZ(x1,x2,x3);
- momentum = rnd.rndm()*momentum;
return;
}
}
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 618812f7..8316ba67 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -46,6 +46,7 @@ ENDIF()
#############################################################
ENABLE_LANGUAGE(CXX)
+ENABLE_LANGUAGE(C)
# Set C++ standard
set(CMAKE_CXX_STANDARD 14 CACHE STRING "C++ standard used for compiling")
diff --git a/DDCore/src/plugins/Compact2Objects.cpp b/DDCore/src/plugins/Compact2Objects.cpp
index fab6267f..581f535f 100644
--- a/DDCore/src/plugins/Compact2Objects.cpp
+++ b/DDCore/src/plugins/Compact2Objects.cpp
@@ -805,21 +805,48 @@ template <> void Converter<PropertyTable>::operator()(xml_h e) const {
}
#endif
-/** Convert compact visualization attribute to Detector visualization attribute
+/** Convert compact visualization attribute to Detector visualization attribute.
*
* <vis name="SiVertexBarrelModuleVis"
* alpha="1.0" r="1.0" g="0.75" b="0.76"
* drawingStyle="wireframe"
* showDaughters="false"
* visible="true"/>
+ *
+ * Optionally inherit an already defined VisAttr and override other properties.
+ *
+ * <vis name="SiVertexEndcapModuleVis"
+ * ref="SiVertexBarrelModuleVis"
+ * alpha="0.5"/>
*/
template <> void Converter<VisAttr>::operator()(xml_h e) const {
VisAttr attr(e.attr<string>(_U(name)));
+ float alpha = 1.0;
+ float red = 1.0;
+ float green = 1.0;
+ float blue = 1.0;
+ bool use_ref = false;
+ if(e.hasAttr(_U(ref))) {
+ use_ref = true;
+ auto refName = e.attr<string>(_U(ref));
+ const auto refAttr = description.visAttributes(refName);
+ if(!refAttr.isValid() ) {
+ throw runtime_error("reference VisAttr " + refName + " does not exist");
+ }
+ // Just copying things manually.
+ // I think a handle's copy constructor/assignment would reuse the underlying pointer... maybe?
+ refAttr.argb(alpha,red,green,blue);
+ attr.setColor(alpha,red,green,blue);
+ attr.setDrawingStyle( refAttr.drawingStyle());
+ attr.setLineStyle( refAttr.lineStyle());
+ attr.setShowDaughters(refAttr.showDaughters());
+ attr.setVisible(refAttr.visible());
+ }
xml_dim_t dim(e);
- float alpha = dim.alpha(1.0);
- float red = dim.r(1.0);
- float green = dim.g(1.0);
- float blue = dim.b(1.0);
+ alpha = dim.alpha(alpha);
+ red = dim.r(red );
+ green = dim.g(green);
+ blue = dim.b(blue );
printout(s_debug.visattr ? ALWAYS : DEBUG, "Compact",
"++ Converting VisAttr structure: %-16s. Alpha=%.2f R=%.3f G=%.3f B=%.3f",
@@ -835,7 +862,8 @@ template <> void Converter<VisAttr>::operator()(xml_h e) const {
attr.setLineStyle(VisAttr::DASHED);
}
else {
- attr.setLineStyle(VisAttr::SOLID);
+ if (!use_ref)
+ attr.setLineStyle(VisAttr::SOLID);
}
if (e.hasAttr(_U(drawingStyle))) {
string ds = e.attr<string>(_U(drawingStyle));
@@ -845,12 +873,15 @@ template <> void Converter<VisAttr>::operator()(xml_h e) const {
attr.setDrawingStyle(VisAttr::SOLID);
}
else {
- attr.setDrawingStyle(VisAttr::SOLID);
+ if (!use_ref)
+ attr.setDrawingStyle(VisAttr::SOLID);
}
if (e.hasAttr(_U(showDaughters)))
attr.setShowDaughters(e.attr<bool>(_U(showDaughters)));
- else
- attr.setShowDaughters(true);
+ else {
+ if (!use_ref)
+ attr.setShowDaughters(true);
+ }
description.addVisAttribute(attr);
}
diff --git a/DDG4/edm4hep/Geant4Output2EDM4hep.cpp b/DDG4/edm4hep/Geant4Output2EDM4hep.cpp
index 555e4e52..504fb8e4 100644
--- a/DDG4/edm4hep/Geant4Output2EDM4hep.cpp
+++ b/DDG4/edm4hep/Geant4Output2EDM4hep.cpp
@@ -20,6 +20,7 @@
#include "DDG4/Geant4HitCollection.h"
#include "DDG4/Geant4OutputAction.h"
#include "DDG4/Geant4SensDetAction.h"
+#include "DDG4/Geant4DataConversion.h"
#include "DDG4/EventParameters.h"
// Geant4 headers
@@ -563,16 +564,23 @@ void Geant4Output2EDM4hep::createCollections(OutputContext<G4Event>& ctxt){
continue ;
}
+ Geant4Sensitive* sd = coll->sensitive();
+ string sd_enc = dd4hep::sim::Geant4ConversionHelper::encoding(sd->sensitiveDetector());
+
if( typeid( Geant4Tracker::Hit ) == coll->type().type() ){
- m_store->create<edm4hep::SimTrackerHitCollection>(colName);
+ auto& sthc = m_store->create<edm4hep::SimTrackerHitCollection>(colName);
m_file->registerForWrite(colName);
+ auto& sthc_md = m_store->getCollectionMetaData( sthc.getID() );
+ sthc_md.setValue("CellIDEncodingString", sd_enc);
printout(DEBUG,"Geant4Output2EDM4hep","+++ created collection %s",colName.c_str() );
}
else if( typeid( Geant4Calorimeter::Hit ) == coll->type().type() ){
- m_store->create<edm4hep::SimCalorimeterHitCollection>(colName);
+ auto& schc = m_store->create<edm4hep::SimCalorimeterHitCollection>(colName);
m_file->registerForWrite(colName);
+ auto& schc_md = m_store->getCollectionMetaData( schc.getID() );
+ schc_md.setValue("CellIDEncodingString", sd_enc);
printout(DEBUG,"Geant4Output2EDM4hep","+++ created collection %s",colName.c_str() );
colName += "Contributions" ;
diff --git a/DDG4/src/Geant4ShapeConverter.cpp b/DDG4/src/Geant4ShapeConverter.cpp
index 2a79a69a..6f4af522 100644
--- a/DDG4/src/Geant4ShapeConverter.cpp
+++ b/DDG4/src/Geant4ShapeConverter.cpp
@@ -205,8 +205,9 @@ namespace dd4hep {
template <> G4VSolid* convertShape<TGeoSphere>(const TGeoShape* shape) {
const TGeoSphere* sh = (const TGeoSphere*) shape;
- return new G4Sphere(sh->GetName(), sh->GetRmin() * CM_2_MM, sh->GetRmax() * CM_2_MM, sh->GetPhi1() * DEGREE_2_RAD,
- sh->GetPhi2() * DEGREE_2_RAD, sh->GetTheta1() * DEGREE_2_RAD, sh->GetTheta2() * DEGREE_2_RAD);
+ return new G4Sphere(sh->GetName(), sh->GetRmin() * CM_2_MM, sh->GetRmax() * CM_2_MM,
+ sh->GetPhi1() * DEGREE_2_RAD, (sh->GetPhi2()-sh->GetPhi1()) * DEGREE_2_RAD,
+ sh->GetTheta1() * DEGREE_2_RAD, (sh->GetTheta2()- sh->GetTheta1()) * DEGREE_2_RAD);
}
template <> G4VSolid* convertShape<TGeoTorus>(const TGeoShape* shape) {
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment