Commit 28425f73 authored by Steffen Hallmann's avatar Steffen Hallmann
Browse files

script to generate a run summary table

parent 2adc644e
import libconf, io # for reading the config files
import uproot
import numpy as np
from astropy.time import Time
import astropy
import pandas as pd
pd.set_option('display.max_rows', 1000)
from glob import glob
from pathlib import Path
# toplevel path of transferred data
DATA_PATH="/pnfs/ifh.de/acs/radio/diskonly/data/inbox/"
summary = []
for filename_cfg in Path(DATA_PATH).rglob("acq.cfg"):
# get the filenames right
filename_cfg = str(filename_cfg)
filename_combined = filename_cfg.replace("cfg/acq.cfg",'combined.root')
filename_hdr = filename_cfg.replace("cfg/acq.cfg",'headers.root')
### info from config file
with io.open(str(filename_cfg)) as config_file:
config = libconf.load(config_file)
has_pps = config['radiant']['trigger']['pps']['enabled']
has_soft = config['radiant']['trigger']['soft']['enabled']
has_rf0 = config['radiant']['trigger']['RF0']['enabled']
has_rf1 = config['radiant']['trigger']['RF1']['enabled']
soft_trigger_rate = 1./config['radiant']['trigger']['soft']['interval'] #soft trigger rate is approximate only and imprecise (~1.05 larger than PPS second)
try:
comment = config['output']['comment']
except:
# at the beginning there are some runs where the config does not contain comments
comment = "MISSING"
# extract station/run from filename
split_fn = filename_cfg.replace(DATA_PATH,"").split("/")
station_cfg = int(split_fn[0].replace("station",""))
run_cfg = int(split_fn[1].replace("run",""))
### info from combined files
try:
f = uproot.open(filename_combined)
except:
print('SKIPPING FILE (not there):', filename_combined, "config comment:", comment)
continue
n_combined = 0
station = None
run = None
if 'combined' not in f:
print('SKIPPING FILE: (no tree)', filename_combined, "config comment:", comment)
continue
if f['combined'].num_entries==0:
print('SKIPPING FILE: (empty tree)', filename_combined, "config comment:", comment)
continue
f = f['combined']
# get station number from first event
station = int(np.array(f['header/station_number'])[0])
# get run number from first event
run = int(np.array(f['header/run_number'])[0])
n_combined = f.num_entries
### get full event info from headers file
try:
f = uproot.open(filename_hdr)
except:
print('SKIPPING FILE:', filename_hdr, "config comment:", comment)
continue
if 'hdr' not in f:
print('SKIPPING FILE:', filename_hdr, "config comment:", comment)
continue
if f['hdr'].num_entries==0:
print('SKIPPING FILE:', filename_hdr, "config comment:", comment)
continue
# get station number from first event
station_hdr = np.array(f['hdr/station_number'])[0]
# get run number from first event
run_hdr = np.array(f['hdr/run_number'])[0]
# get event times in file
readout_times_hdr = np.array(f['hdr/readout_time'])
readout_times_hdr.sort()
times_first_last = np.array([readout_times_hdr[0], readout_times_hdr[-1]])
times_first_last = np.array(astropy.time.Time(times_first_last, format='unix'))
time_first_hdr = times_first_last[0].iso
time_last_hdr = times_first_last[-1].iso
dt_hdr = (times_first_last[-1].utc-times_first_last[0].utc)
n_header = f['hdr'].num_entries
transfer_fraction = n_combined/float(n_header)
summary.append([station_cfg, run_cfg, station, run, time_first_hdr, time_last_hdr, round(dt_hdr.sec/60,2), round(transfer_fraction,2), round(len(readout_times_hdr)/dt_hdr.sec,2), n_header, n_combined, has_rf0, has_rf1, has_pps, has_soft, soft_trigger_rate, comment,filename_combined.replace(DATA_PATH,"")])
df = pd.DataFrame(summary)
df.columns=["station_cfg", "run_cfg", "station", "run", "time first event", "time last event", "duration [min]", "transfer subsampling", "trigger rate [Hz]", "n_events (greenland)", "n_events (transferred)", "has_rf0 (surface)", "has_rf1 (deep)", "has_pps (PPS signal)", "has_soft (forced)", "soft_rate [Hz]", "comment","path"]
df = df.sort_values(["station_cfg", "run_cfg"])
df = df.reset_index(drop=True)
df.drop(columns=["station_cfg", "run_cfg"], inplace=True)
df.to_csv("rnog_run_summary/rnog_run_summary.csv", index=False)
from pretty_html_table import build_table
html_table_blue_light = build_table(df
, 'blue_light'
, font_size='small'
, font_family='Arial, sans-serif'
, text_align='right'
, width='auto'
, index=True
, even_color='black'
, even_bg_color='white'
)
# Save to html file
from datetime import datetime
# datetime object containing current date and time
now = datetime.now()
# dd/mm/YY H:M:S
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
start = """<html>
<body>
<H1>Summary of data runs transferred from Greenland:</H1>
<p>Table generated: %s</p>
<p>Download table as:&emsp;<a href="rnog_run_summary.csv" download="rnog_run_summary.csv">csv file (.csv)</a>&emsp;<a href="rnog_run_summary.txt" download="rnog_run_summary.csv">pre-formatted text (.txt)</a><br />""" %(dt_string)
end = """ </body>
</html>"""
with open('rnog_run_summary/rnog_run_summary.html', 'w') as f:
f.write(start)
f.write(html_table_blue_light)
f.write(end)
with open('rnog_run_summary/rnog_run_summary.txt', 'w') as outfile:
df.to_string(outfile, index=False)
print(df)
cd /afs/ifh.de/user/s/shallman
source .zprofile
which python
/afs/ifh.de/group/radio/software/anaconda_p3/bin/python /afs/ifh.de/user/s/shallman/list_transferred_rnog_data.py
cp /afs/ifh.de/user/s/shallman/rnog_run_summary/* ~/public/www/.
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment