Skip to content
Snippets Groups Projects
Commit a1e03a90 authored by Chao Peng's avatar Chao Peng
Browse files

Adjust the pedestal fitting in analysis code, add a script to plot event from root file

parent 1be90eed
No related branches found
No related tags found
No related merge requests found
import ROOT
import os
import random
import numpy as np
import pandas as pd
import argparse
from matplotlib import pyplot as plt
from scipy import signal
from collections.abc import Iterable
# fit pedestal
def fit_pedestal(bk, outlier_thres=2.0, min_lengths=10):
ped_mean = np.average(bk)
# no need to calculate error
if len(bk) < 5:
return ped_mean, 0.02*ped_mean
ped_err = np.std(bk)
# exclude outliers
errs = np.abs(bk - ped_mean)/ped_err
outliers = errs > outlier_thres
if sum(outliers) > 0 and sum(~outliers) > min_lengths:
return fit_pedestal(bk[~outliers], outlier_thres, min_lengths)
return ped_mean, ped_err
def get_peak(vals, thres=8.0, twindow=(0, 192), zero_thres=5.0):
if len(vals) < 1:
return 0, 0, 0, 0
# find peaks
peaks, peak_props = signal.find_peaks(vals, prominence=thres)
# fit pedestal
peak_intervals = pd.IntervalIndex.from_arrays(peak_props['left_bases'], peak_props['right_bases'])
ped_mean, ped_err = fit_pedestal(np.array(vals)[[~peak_intervals.contains(x).any() for x in np.arange(len(vals))]])
# check peak
peak, pos = 0, 0
for p, lside, rside in zip(peaks, peak_props['left_bases'], peak_props['right_bases']):
if p > twindow[1] or p < twindow[0]:
continue
height = vals[p] - ped_mean
if height < zero_thres*ped_err:
continue
width = rside - lside
l, r = max(0, lside - width), min(len(vals), rside + width)
valley = min(vals[l:r]) - ped_mean
if valley < 0 and abs(valley) >= height:
continue
if height > peak:
peak = height
pos = p
return peak, pos, ped_mean, ped_err
parser = argparse.ArgumentParser('Draw events from a root file')
parser.add_argument('root_file', help='a root file of waveform data')
parser.add_argument('output_dir', help='Output directory')
parser.add_argument('-e', dest='event', help='specify an event number to plot', type=int)
parser.add_argument('-n', dest='nevents', help='number of random events to plot', type=int, default=10)
parser.add_argument('--time-min', dest='tmin', help='lower limit of the time window, default 0', type=int, default=0)
parser.add_argument('--time-max', dest='tmax', help='upper limit of the time window, default 192 (max)', type=int, default=192)
args = parser.parse_args()
f = ROOT.TFile.Open(args.root_file, 'read')
tree = f.EvTree
# alignment of channels
figsize = (16, 16)
nrows, ncols = 4, 4
ch = [
'Cer11_5', 'Cer12_5', 'Cer13_5', 'Cer14_5',
'Cer21_5', 'Cer22_5', 'Cer23_5', 'Cer24_5',
'Cer31_5', 'Cer32_5', 'Cer33_5', 'Cer34_5',
'Cer41_5', 'Cer42_5', 'Cer43_5', 'Cer44_5',
]
pmt = [
'A', 'A', 'A', 'A',
'B', 'B', 'B', 'B',
'C', 'C', 'C', 'C',
'D', 'D', 'D', 'D',
]
pmt_colors = {
'A': ('lightcoral', 0.05),
'B': ('lightgreen', 0.05),
'C': ('moccasin', 0.05),
'D': ('lightskyblue', 0.05),
}
def branch_to_array1d(br, t):
return np.ndarray((len(br), ), dtype=t, buffer=br)
# get event list to plot
if args.event:
evs = [args.event]
else:
evs = random.choices(np.arange(0, tree.GetEntries()), k=args.nevents)
# plot
for iev in evs:
tree.GetEntry(iev)
samples = dict()
pprops = dict()
for c in ch:
samples[c] = branch_to_array1d(tree.__getattr__(c + '_raw'), np.int32)
pprops[c] = (
branch_to_array1d(tree.__getattr__(c + '_Ppeak'), np.float32),
branch_to_array1d(tree.__getattr__(c + '_Ptime'), np.float32),
tree.__getattr__(c + '_ped_mean'),
tree.__getattr__(c + '_ped_err')
)
fig, axs = plt.subplots(nrows, ncols, figsize=figsize)
for (i, ax) in enumerate(axs.flat):
box = dict(boxstyle='round', facecolor='white', alpha=0.5)
ax.text(0.68, 0.83, ch[i], transform=ax.transAxes, fontsize=16, verticalalignment='top', bbox=box)
ax.patch.set_facecolor(pmt_colors[pmt[i]][0])
ax.patch.set_alpha(pmt_colors[pmt[i]][1])
sp = samples[ch[i]]
if len(sp) > 0:
pp = pprops[ch[i]]
ax.plot(np.arange(len(sp)), sp, label='Waveform Data')
ax.axhline(y=pp[2], color='red', label='Pedestal Mean')
xr = ax.get_xlim()
ax.fill_between(xr, pp[2]-pp[3], pp[2]+pp[3], color='red', alpha=0.25, label='Pedestal Error')
ax.set_xlim(xr[0], xr[1])
ax.axvline(x=args.tmin, color='k', linestyle=':', label='Time Window')
ax.axvline(x=args.tmax, color='k', linestyle=':')
for peak, pos in zip(pp[0], pp[1]):
if pos <= args.tmax and pos >= args.tmin:
ax.plot(pos, peak + pp[2], 'kv', label='Peak')
lines, labels = ax.get_legend_handles_labels()
fig.tight_layout(rect=(0.03, 0.03, 0.99, 0.95))
fig.text(0.5, 0.02, 'Sample', ha='center', fontsize=18)
fig.text(0.02, 0.5, 'ADC Value', ha='center', rotation=90, fontsize=18)
fig.legend(lines, labels, loc = 'upper center', ncol=5, fontsize=18)
fig.savefig(os.path.join(args.output_dir, 'sample_ev{}.png'.format(iev)))
File moved
File moved
File moved
File moved
File moved
File moved
...@@ -82,15 +82,15 @@ res = pd.DataFrame(columns=[c + '_peak' for c in list(ch)] + [c + '_pos' for c i ...@@ -82,15 +82,15 @@ res = pd.DataFrame(columns=[c + '_peak' for c in list(ch)] + [c + '_pos' for c i
count = 0 count = 0
for iev, ev in enumerate(f.EvTree): for iev, ev in enumerate(f.EvTree):
if iev > 10000:
break
if iev % 100 == 0: if iev % 100 == 0:
print('processed {}'.format(iev), end='\r') print('processed {}'.format(iev), end='\r')
evpeaks, evposes = [], []
for c, t in ch.items(): for c, t in ch.items():
branch = ev.__getattr__(c + '_raw') branch = ev.__getattr__(c + '_raw')
samples = np.ndarray((len(branch), ), dtype=np.int32, buffer=branch) samples = np.ndarray((len(branch), ), dtype=np.int32, buffer=branch)
peak, pos, _ , _ = get_peak(samples, args.pthres, (args.tmin, args.tmax)) peak, pos, _ , _ = get_peak(samples, args.pthres, (args.tmin, args.tmax))
res.loc[iev, c + '_peak'] = peak evpeaks.append(peak)
res.loc[iev, c + '_pos'] = pos evposes.append(pos)
res.loc[iev, :] = evpeaks + evposes
res.to_csv(args.output_file) res.to_csv(args.output_file)
...@@ -51,7 +51,7 @@ def get_peak(vals, thres=8.0, twindow=(0, 192), zero_thres=5.0): ...@@ -51,7 +51,7 @@ def get_peak(vals, thres=8.0, twindow=(0, 192), zero_thres=5.0):
return peak, pos, ped_mean, ped_err return peak, pos, ped_mean, ped_err
parser = argparse.ArgumentParser('Raw waveform analysis') parser = argparse.ArgumentParser('Draw events from raw waveform analysis')
parser.add_argument('root_file', help='a root file of waveform data') parser.add_argument('root_file', help='a root file of waveform data')
parser.add_argument('output_dir', help='Output directory') parser.add_argument('output_dir', help='Output directory')
......
...@@ -205,6 +205,7 @@ std::vector<Module> read_modules(const std::string &path) ...@@ -205,6 +205,7 @@ std::vector<Module> read_modules(const std::string &path)
res.emplace_back(mod); res.emplace_back(mod);
} }
/*
// print out all channels // print out all channels
std::cout << "Read-in " << res.size() << " modules from \"" << path << "\"." << std::endl; std::cout << "Read-in " << res.size() << " modules from \"" << path << "\"." << std::endl;
for (auto &mod : res) { for (auto &mod : res) {
...@@ -219,6 +220,7 @@ std::vector<Module> read_modules(const std::string &path) ...@@ -219,6 +220,7 @@ std::vector<Module> read_modules(const std::string &path)
<< std::endl; << std::endl;
} }
} }
*/
return res; return res;
} }
...@@ -234,58 +236,102 @@ struct BranchData ...@@ -234,58 +236,102 @@ struct BranchData
float ped_mean, ped_err, best_peak; float ped_mean, ped_err, best_peak;
}; };
#define BUF_SIZE 1000
static double buffer[BUF_SIZE], wfbuf[BUF_SIZE], bkbuf[BUF_SIZE];
void refine_pedestal(const std::vector<uint32_t> &raw, float &ped_mean, float &ped_err)
{
int count = 0;
float mean = 0.;
for (auto &val : raw) {
if (std::abs(val - ped_mean) < 2.0 * ped_err) {
buffer[count] = val;
mean += val;
count ++;
}
}
if (count == 0) {
return;
}
mean /= count;
float change = std::abs(ped_mean - mean);
// no outliers
if (change < 0.05 * ped_err) {
return;
}
// recursively refine
float err = 0.;
for (int i = 0; i < count; ++i) {
err += (buffer[i] - mean)*(buffer[i] - mean);
}
ped_mean = mean;
ped_err = std::sqrt(err/count);
refine_pedestal(raw, ped_mean, ped_err);
}
void find_pedestal(const std::vector<uint32_t> &raw, float &ped_mean, float &ped_err)
{
ped_mean = 0;
ped_err = 0;
for (auto &val : raw) {
ped_mean += val;
}
ped_mean /= raw.size();
for (auto &val : raw) {
ped_err += (val - ped_mean)*(val - ped_mean);
}
ped_err = std::sqrt(ped_err/raw.size());
refine_pedestal(raw, ped_mean, ped_err);
}
void waveform_analysis(const std::vector<uint32_t> &raw, BranchData &res) void waveform_analysis(const std::vector<uint32_t> &raw, BranchData &res)
{ {
int len = raw.size(); // no need to analyze
res.nraw = len; if (raw.empty()) {
// sanity check
if (len == 0) {
return; return;
} }
double wf[len], bk[len]; // fill in the raw data
for (int i = 0; i < len; ++i) { res.nraw = raw.size();
for (size_t i = 0; i < raw.size(); ++i) {
res.raw[i] = raw[i]; res.raw[i] = raw[i];
bk[i] = raw[i];
wf[i] = raw[i];
} }
// background analysis // get pedestals
TSpectrum s; find_pedestal(raw, res.ped_mean, res.ped_err);
s.Background(bk, len, 6, TSpectrum::kBackDecreasingWindow,
TSpectrum::kBackOrder2, kFALSE,
TSpectrum::kBackSmoothing3, kFALSE);
// pedestal mean
for (int i = 0; i < len; ++i) { res.ped_mean += bk[i]; }
res.ped_mean /= len;
// pedestal standard deviation // fill in spectrum buffer
for (int i = 0; i < len; ++i) { res.ped_err += std::pow(bk[i] - res.ped_mean, 2); } for (size_t i = 0; i < raw.size(); ++i) {
res.ped_err = std::sqrt(res.ped_err/len); wfbuf[i] = raw[i] - res.ped_mean;
}
// find the peaks // find peaks
for (int i = 0; i < len; ++i) { wf[i] -= bk[i]; } TSpectrum s;
int npeaks = s.SearchHighRes(wf, bk, len, 5.0, 5.0, false, 3, true, 3); int npeaks = s.SearchHighRes(wfbuf, bkbuf, res.nraw, 5.0, 5.0, false, 6, false, 3);
// add peak, timing and integral // fill branch data
double *pos = s.GetPositionX(); double *pos = s.GetPositionX();
res.npul = 0; res.npul = 0;
res.best_peak = 0; res.best_peak = 0;
for (int i = 0; i < npeaks; ++i) { for (int i = 0; i < npeaks; ++i) {
int j = pos[i]; int j = pos[i] - 1;
if (wf[j] < 5.*res.ped_err) { continue; } if (wfbuf[j] < 5.*res.ped_err) { continue; }
res.time[res.npul] = j; res.time[res.npul] = j;
res.peak[res.npul] = wf[j]; res.peak[res.npul] = wfbuf[j];
res.integral[res.npul] = wf[j]; res.integral[res.npul] = wfbuf[j];
j = pos[i] - 1; j = pos[i] - 1;
while ( (j > 0) && (wf[j] > 5.0*res.ped_err) ) { res.integral[res.npul] += wf[j--]; } while ( (j > 0) && (wfbuf[j] > 3.0*res.ped_err) ) { res.integral[res.npul] += wfbuf[j--]; }
j = pos[i] + 1; j = pos[i] + 1;
while ( (j < len) && (wf[j] > 5.0*res.ped_err) ) { res.integral[res.npul] += wf[j++]; } while ( (j < res.nraw) && (wfbuf[j] > 3.0*res.ped_err) ) { res.integral[res.npul] += wfbuf[j++]; }
res.npul ++; res.npul ++;
if (j < 30 && j < 10 && wf[j] > res.best_peak) { if (j < 40 && j < 10 && wfbuf[j] > res.best_peak) {
res.best_peak = wf[j]; res.best_peak = wfbuf[j];
} }
} }
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment