diff --git a/.travis.yml b/.travis.yml index 7d593b4..45c9670 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,10 +20,18 @@ install: - export PATH="$HOME/miniconda/bin:$PATH" - conda update --yes conda - conda info -a - - conda create --yes -n test-environment python="$TRAVIS_PYTHON_VERSION" bitarray numpy pytables pyyaml tqdm nose + - conda create --yes -n test-environment python="$TRAVIS_PYTHON_VERSION" bitarray matplotlib numba numpy pyqt pytables pyyaml qtpy tqdm nose - source activate test-environment + - pip install xvfbwrapper # fake x server for Qt gui tests + # Install basil + - pip install 'basil-daq>=3.0.0,<4.0.0' + # Install pybar_mimosa26_interpreter + - pip install git+https://github.com/SiLab-Bonn/pyBAR_mimosa26_interpreter@master + # Install online_monitor + - pip install git+https://github.com/SiLab-Bonn/online_monitor@development - pip install -e . - conda list + - pip list script: - nosetests diff --git a/README.md b/README.md index 4b66e89..088c308 100644 --- a/README.md +++ b/README.md @@ -38,12 +38,22 @@ Python 2.7 or Python 3 or higher must be used. There are many ways to install Py Install additional required packages: ```bash -conda install bitarray numpy pytables pyyaml tqdm +conda install bitarray matplotlib numba numpy pyqt pytables pyyaml qtpy tqdm ``` -Install [Basil](https://github.com/SiLab-Bonn/basil) (>=2.4.12,<3.0.0): +Install [Basil](https://github.com/SiLab-Bonn/basil) (>=3.0.0,<4.0.0): ```bash -pip install 'basil_daq>=2.4.12,<3.0.0' +pip install 'basil_daq>=3.0.0,<4.0.0' +``` + +Install [Mimosa26 Interpreter](https://github.com/SiLab-Bonn/pyBAR_mimosa26_interpreter): +```bash +pip install git+https://github.com/SiLab-Bonn/pyBAR_mimosa26_interpreter@master +``` + +Install [Online Monitor](https://github.com/SiLab-Bonn/online_monitor): +```bash +pip install 'online_monitor>=0.4.2,<0.5' ``` Finally, install pymosa via: diff --git a/data/telescope_data.h5 b/data/telescope_data.h5 new file mode 100644 index 0000000..9e5c56d Binary files /dev/null and b/data/telescope_data.h5 differ diff --git a/pymosa/m26.py b/pymosa/m26.py index 35979ae..bbc76ea 100644 --- a/pymosa/m26.py +++ b/pymosa/m26.py @@ -23,6 +23,7 @@ logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) +FORMAT = '%(asctime)s [%(name)-17s] - %(levelname)-7s %(message)s' class m26(object): @@ -111,7 +112,7 @@ def check_jtag(irs, IR): self.dut['JTAG'].scan_ir([BitLogic(IR[ir])] * 6) ret[ir] = self.dut['JTAG'].scan_dr([self.dut[ir][:]])[0] # check registers - for k, v in ret.iteritems(): + for k, v in ret.items(): if k == "CTRL_8b10b_REG1_ALL": pass elif k == "BSR_ALL": @@ -205,7 +206,7 @@ def scan(self): while not self.stop_scan: sleep(1.0) if not got_data: - if self.m26_readout.data_words_per_second() > 0: + if self.m26_readout.data_words_per_second()[0] > 0: got_data = True logging.info('Taking data...') if self.max_triggers: @@ -261,6 +262,7 @@ def start(self): # set up logger self.fh = logging.FileHandler(self.run_filename + '.log') self.fh.setLevel(logging.DEBUG) + self.fh.setFormatter(logging.Formatter(FORMAT)) self.logger = logging.getLogger() self.logger.addHandler(self.fh) @@ -396,7 +398,7 @@ def main(): # Open Mimosa26 std. configuration pymosa_path = os.path.dirname(pymosa.__file__) with open(os.path.join(pymosa_path, 'm26_configuration.yaml'), 'r') as f: - config = yaml.load(f) + config = yaml.safe_load(f) # Set config from arguments if args.filename is not None: diff --git a/pymosa/m26_configuration.yaml b/pymosa/m26_configuration.yaml index e85864f..90ab856 100644 --- a/pymosa/m26_configuration.yaml +++ b/pymosa/m26_configuration.yaml @@ -6,7 +6,7 @@ m26_jtag_configuration : True # Send Mimosa26 configuration via JTAG, default: no_data_timeout : 30 # No data timeout after which the scan will be aborted, in seconds; if 0, the timeout is disabled scan_timeout : 0 # Timeout after which the scan will be stopped, in seconds; if 0, the timeout is disabled; use Ctrl-C to stop run max_triggers : 0 # Maximum number of triggers; if 0, there is no limit on the number of triggers; use Ctrl-C to stop run -#send_data : 'tcp://127.0.0.1:4500' # TCP address to which the telescope data is send; to allow incoming connections on all interfaces use 0.0.0.0 +send_data : 'tcp://127.0.0.1:8500' # TCP address to which the telescope data is send; to allow incoming connections on all interfaces use 0.0.0.0 #output_folder: telescope_data # Name of the subfolder which will be created in order to store the telescope data #filename: run_1 # Filename of the telescope data file diff --git a/pymosa/m26_raw_data.py b/pymosa/m26_raw_data.py index 0c9fdea..090dee2 100644 --- a/pymosa/m26_raw_data.py +++ b/pymosa/m26_raw_data.py @@ -277,7 +277,7 @@ def save_conf(): scan_param_table = h5_file.create_table(configuration_group, name=configuation_name, description=NameValue, title=configuation_name) row_scan_param = scan_param_table.row - for key, value in dict.iteritems(configuration): + for key, value in dict.items(configuration): row_scan_param['name'] = key row_scan_param['value'] = str(value) row_scan_param.append() diff --git a/pymosa/m26_readout.py b/pymosa/m26_readout.py index 6af7fce..4bf0670 100644 --- a/pymosa/m26_readout.py +++ b/pymosa/m26_readout.py @@ -1,7 +1,6 @@ import logging import datetime from time import sleep, time, mktime -from itertools import izip from threading import Thread, Event, Lock, Condition from collections import deque, Iterable import sys @@ -12,6 +11,12 @@ data_iterable = ("data", "timestamp_start", "timestamp_stop", "error") +# Python 2/3 compability +try: + basestring # noqa +except NameError: + basestring = str # noqa + def get_float_time(): '''returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's @@ -206,7 +211,7 @@ def wait_for_thread_timeout(thread, fifo, timeout): thread.join(timeout=timeout) if thread.is_alive(): raise StopTimeout('Stopping %s readout thread timed out after %0.1fs' % (fifo, timeout)) - except StopTimeout, e: + except StopTimeout as e: self.force_stop[fifo].set() if self.errback: self.errback(sys.exc_info()) @@ -329,7 +334,7 @@ def worker(self, fifo): if data_tuple is None: # if None then exit break else: - for index, (filter_func, converter_func, fifo_select) in enumerate(izip(self.filter_func, self.converter_func, self.fifo_select)): + for index, (filter_func, converter_func, fifo_select) in enumerate(zip(self.filter_func, self.converter_func, self.fifo_select)): if fifo_select is None or fifo_select == fifo: # filter and do the conversion converted_data_tuple = convert_data_iterable((data_tuple,), filter_func=filter_func, converter_func=converter_func)[0] @@ -359,7 +364,7 @@ def writer(self, index, no_data_timeout=None): while True: try: if no_data_timeout: - for m26_id, time_last_data_m26 in time_last_data.iteritems(): + for m26_id, time_last_data_m26 in time_last_data.items(): if time_last_data_m26 + no_data_timeout < time(): raise NoDataTimeout('Received no data for %0.1f second(s) from Mimosa26 plane with ID %d' % (no_data_timeout, m26_id)) if time_last_data_all + no_data_timeout < time(): diff --git a/pymosa/noise_occupancy_scan.py b/pymosa/noise_occupancy_scan.py new file mode 100644 index 0000000..f2a7b74 --- /dev/null +++ b/pymosa/noise_occupancy_scan.py @@ -0,0 +1,130 @@ +import logging +import os +from time import sleep + +import yaml +import numpy as np +from tqdm import tqdm +from matplotlib.backends.backend_pdf import PdfPages + +from pymosa.m26 import m26 +from pymosa import online as oa +from pymosa.m26_raw_data import open_raw_data_file, send_meta_data +from pymosa import plotting as plotting + + +class NoiseOccScan(m26): + '''Noise Occupancy Scan + + This script measures the noise occupancy. + ''' + + def print_log_status(self): + m26_rx_names = [rx.name for rx in self.dut.get_modules('m26_rx')] + logging.info('Mimosa26 RX channel: %s', " | ".join([name.rjust(3) for name in m26_rx_names])) + for i, region in enumerate(['A', 'B', 'C', 'D']): + logging.info('Fake hit rate %s: %s', region, " | ".join([format(count, '.1e').rjust(max(3, len(m26_rx_names[index]))) for index, count in enumerate(self.fake_hit_rate_meas[:len(m26_rx_names), i])])) + logging.info('Noise occupancy: %s', " | ".join([repr(count).rjust(max(3, len(m26_rx_names[index]))) for index, count in enumerate(self.hit_occ_map[:, :, :len(m26_rx_names)].sum(axis=(0, 1)))])) + + def take_data(self, update_rate=1): + with self.readout(): + logging.info('Taking data...') + self.pbar = tqdm(total=self.scan_timeout, ncols=80) + for _ in range(int(self.scan_timeout / update_rate)): + sleep(update_rate) + try: + self.pbar.update(update_rate) + except ValueError: + pass + + self.pbar.close() + + # Get hit occupancy for every plane using fast online analysis + hit_occ_map = self.hist_occ.get() + + return hit_occ_map + + def init(self, init_conf=None, configure_m26=True): + # set name of scan + init_conf["run_id"] = 'noise_occupancy_scan' + + super(NoiseOccScan, self).init(init_conf=init_conf, configure_m26=configure_m26) + self.hist_occ = oa.OccupancyHistogramming() + + self.scan_timeout = self.telescope_conf.get('scan_timeout', 5) # time for which noise occupancy is measured in seconds + + def open_file(self): + self.raw_data_file = open_raw_data_file(filename=self.run_filename, + mode='w', + title=os.path.basename(self.run_filename), + socket_address=self.send_data) + if self.raw_data_file.socket: + # send reset to indicate a new scan for the online monitor + send_meta_data(self.raw_data_file.socket, None, name='Reset') + + def handle_data(self, data, new_file=False, flush=True): + '''Handling of raw data and meta data during readout. + ''' + for data_tuple in data: + if data_tuple is None: + continue + self.raw_data_file.append(data_iterable=data_tuple, scan_parameters=None, new_file=new_file, flush=flush) + # Add every raw data chunk to online analysis + for data in data_tuple: + self.hist_occ.add(raw_data=data[0]) + + def scan(self): + # Define columns which belong to regions A, B, C, D + m26_regions = [(0, 288), (288, 576), (576, 864), (864, 1151)] + self.fake_hit_rate_meas = np.zeros(shape=(6, 4)) + + # Take data and get noise occupancy + self.hit_occ_map = self.take_data() + for plane in range(6): + for region in range(4): + self.fake_hit_rate_meas[plane, region] = self.hit_occ_map[m26_regions[region][0]:m26_regions[region][1], :, plane].sum() / 576. / 288. / self.scan_timeout / 1e6 * 115.2 + + self.hist_occ.stop.set() # stop analysis process + + # Log status (fake hit rate, noise occupoancy, threshold setting) + self.print_log_status() + + def analyze(self): + output_file = self.run_filename + '_interpreted.pdf' + logging.info('Plotting results into {0}'.format(output_file)) + with PdfPages(output_file) as output_pdf: + for plane in range(6): + plotting.plot_occupancy(hist=np.ma.masked_where(self.hit_occ_map[:, :, plane] == 0, self.hit_occ_map[:, :, plane]).T, + title='Occupancy for plane %i' % plane, + output_pdf=output_pdf) + + +if __name__ == "__main__": + try: + from pymosa import __version__ as pymosa_version + except ImportError: + try: + with open(os.path.join(os.path.split(os.path.split(os.path.abspath(__file__))[0])[0], 'VERSION')) as version_file: + pymosa_version = version_file.read().strip() + except IOError: + raise + pymosa_version = "(local)" + + import argparse + parser = argparse.ArgumentParser(description='Noise occupancy scan for pymosa %s\nExample: python noise_occupancy_scan.py' % pymosa_version, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-t', '--scan_timeout', type=int, metavar='', action='store', help='Scan timeout, time in which noise occupancy is integrated, in seconds') + args = parser.parse_args() + + with open('./m26_configuration.yaml', 'r') as f: + config = yaml.safe_load(f) + + if args.scan_timeout is not None: + config["scan_timeout"] = range(args.scan_timeout) + + noise_occ_scan = NoiseOccScan() # None: use default hardware configuration + # Initialize telescope hardware and set up parameters + noise_occ_scan.init(init_conf=config) + # Start telescope readout + noise_occ_scan.start() + # Close the resources + noise_occ_scan.close() diff --git a/pymosa/noise_tuning.py b/pymosa/noise_tuning.py new file mode 100644 index 0000000..109b3c5 --- /dev/null +++ b/pymosa/noise_tuning.py @@ -0,0 +1,244 @@ +import logging +import os +from time import sleep + +import yaml +import numpy as np +from tqdm import tqdm +from matplotlib.backends.backend_pdf import PdfPages + +from basil.utils.BitLogic import BitLogic + +from pymosa.m26 import m26 +from pymosa import online as oa +from pymosa.m26_raw_data import open_raw_data_file, send_meta_data +from pymosa import plotting as plotting + + +class NoiseOccTuning(m26): + '''Noise Occupancy Tuning + + This script finds the lowest possible threshold setting for a specified fake hit rate (per pixel, per readout frame [115.2 us]) + by setting for each Mimosa26 plane the corresponding threshold of the four regions A, B, C, D. + ''' + + def store_configuration(self, config_file=None): + if config_file is None: + config_file = './m26_config/m26_noise_tuning.yaml' + logging.info('Dumping configuration to {0:s}'.format(config_file)) + with open(config_file, mode='w') as f: + yaml.dump(self.dut.get_configuration(), f) + + def print_log_status(self): + m26_rx_names = [rx.name for rx in self.dut.get_modules('m26_rx')] + logging.info('Mimosa26 RX channel: %s', " | ".join([name.rjust(3) for name in m26_rx_names])) + for i, region in enumerate(['A', 'B', 'C', 'D']): + logging.info('Threshold setting %s: %s', region, " | ".join([repr(count).rjust(max(3, len(m26_rx_names[index]))) for index, count in enumerate(self.thr[:len(m26_rx_names), i])])) + for i, region in enumerate(['A', 'B', 'C', 'D']): + logging.info('Fake hit rate %s: %s', region, " | ".join([format(count, '.1e').rjust(max(3, len(m26_rx_names[index]))) for index, count in enumerate(self.fake_hit_rate_meas[:len(m26_rx_names), i])])) + logging.info('Noise occupancy: %s', " | ".join([repr(count).rjust(max(3, len(m26_rx_names[index]))) for index, count in enumerate(self.hit_occ_map[:, :, :len(m26_rx_names)].sum(axis=(0, 1)))])) + + def set_threshold(self, thr_a=None, thr_b=None, thr_c=None, thr_d=None, thr_global=None): + ''' + Sets and writes thresholds to Mimosa26. It can be written a global threshold (IVDREF2) or + a local threshold (IVDREF1A - D) for four regions (A, B, C, D) of the chip. + + Note: + - Threshold configuration belongs to BIAS_DAC_ALL register (6 x 152 bits for 6 planes, MSB: plane 6, LSB: plane 1) + - Local threshold: IVDREF1A - D stored in bits 104-112, 96-104, 88-96, 80-88 of 152 bit word + - Global threshold: IVDREF2 stored in bits 112-120 of 152 bit word + ''' + self.dut['JTAG'].reset() + # Convert binary string to array in order to modify it + bias_dac_all = np.array(list(map(int, self.dut['BIAS_DAC_ALL'][:]))) + # Set local thresholds A - D. MSB: plane 6, LSB: plane 1 + if thr_a is not None: + for i, thr in enumerate(thr_a): + bias_dac_all[(5 - i) * 152 + 104:(5 - i) * 152 + 112] = np.array(list(map(int, format(thr, '008b')[::-1]))) + if thr_b is not None: + for i, thr in enumerate(thr_b): + bias_dac_all[(5 - i) * 152 + 96:(5 - i) * 152 + 104] = np.array(list(map(int, format(thr, '008b')[::-1]))) + if thr_c is not None: + for i, thr in enumerate(thr_c): + bias_dac_all[(5 - i) * 152 + 88:(5 - i) * 152 + 96] = np.array(list(map(int, format(thr, '008b')[::-1]))) + if thr_d is not None: + for i, thr in enumerate(thr_d): + bias_dac_all[(5 - i) * 152 + 80:(5 - i) * 152 + 88] = np.array(list(map(int, format(thr, '008b')[::-1]))) + # Set global threshold + if thr_global is not None: + for i, thr in enumerate(thr_global): + bias_dac_all[(5 - i) * 152 + 112:(5 - i) * 152 + 120] = np.array(list(map(int, format(thr, '008b')[::-1]))) + + # Set configuration + self.dut['BIAS_DAC_ALL'][:] = ''.join(map(str, bias_dac_all[::-1])) + # Write register + self.dut['JTAG'].scan_ir([BitLogic('01111')] * 6) + self.dut['JTAG'].scan_dr([self.dut['BIAS_DAC_ALL'][:]])[0] + + def deactivate_column(self, disable_columns): + ''' + Deactivate specific columns of Mimosa26. + MSB: plane 6, LSB: plane 1 + ''' + self.dut['JTAG'].reset() + # Convert binary string to array in order to modify it + dis_discri_all_old = np.array(map(int, self.dut['DIS_DISCRI_ALL'][:])) + dis_discri_all = np.logical_or(dis_discri_all_old, disable_columns).astype(int) + # Set configuration + self.dut['DIS_DISCRI_ALL'][:] = ''.join(map(str, dis_discri_all[::-1])) + # Write register + self.dut['JTAG'].scan_ir([BitLogic('10001')] * 6) + self.dut['JTAG'].scan_dr([self.dut['DIS_DISCRI_ALL'][:]])[0] + + def take_data(self, update_rate=1): + with self.readout(): + logging.info('Taking data...') + self.pbar = tqdm(total=self.scan_timeout, ncols=80) + for _ in range(int(self.scan_timeout / update_rate)): + sleep(update_rate) + try: + self.pbar.update(update_rate) + except ValueError: + pass + + self.pbar.close() + + # Get hit occupancy for every plane using fast online analysis + hit_occ_map = self.hist_occ.get() + + return hit_occ_map + + def init(self, init_conf=None, configure_m26=True): + # set name of scan + init_conf["run_id"] = 'noise_occupancy_tuning' + + super(NoiseOccTuning, self).init(init_conf=init_conf, configure_m26=configure_m26) + self.hist_occ = oa.OccupancyHistogramming() + + self.scan_timeout = self.telescope_conf.get('scan_timeout', 5) # time for which noise occupancy is measured in seconds + self.fake_hit_rate = self.telescope_conf.get('fake_hit_rate', 1e-6) # average fake hits per pixel per 115.2 us + self.thr_start = self.telescope_conf.get('thr_start', 255) # start value from which threshold is lowered, same value for all regions A, B, C, D + self.thr_step = self.telescope_conf.get('thr_step', 2) # step size for lowering threshold, same value for all regions A, B, C, D + + def open_file(self): + self.raw_data_file = open_raw_data_file(filename=self.run_filename, + mode='w', + title=os.path.basename(self.run_filename), + socket_address=self.send_data) + if self.raw_data_file.socket: + # send reset to indicate a new scan for the online monitor + send_meta_data(self.raw_data_file.socket, None, name='Reset') + + def handle_data(self, data, new_file=False, flush=True): + '''Handling of raw data and meta data during readout. + ''' + for data_tuple in data: + if data_tuple is None: + continue + self.raw_data_file.append(data_iterable=data_tuple, scan_parameters=None, new_file=new_file, flush=flush) + # Add every raw data chunk to online analysis + for data in data_tuple: + self.hist_occ.add(raw_data=data[0]) + + def scan(self): + logging.info('Allowed fake hit rate (per pixel / 115.2 us): {0:.1e}'.format(self.fake_hit_rate)) + logging.info('Starting from threshold setting {0} in steps of {1}'.format(self.thr_start, self.thr_step)) + + # Define columns which belong to regions A, B, C, D + m26_regions = [(0, 288), (288, 576), (576, 864), (864, 1151)] + + # Find lowest threshold setting until max fake hit rate is reached. + proceed = np.ones(shape=(6, 4), dtype=np.bool) # Indicator if fake hit rate is reached (6 planes, 4 regions) + self.fake_hit_rate_meas = np.full(shape=proceed.shape, fill_value=np.nan) + thr_start = np.full(shape=proceed.shape, fill_value=self.thr_start) + self.thr = thr_start + init = True + + while np.any(proceed): + # Set threshold for all planes + self.set_threshold(thr_a=self.thr[:, 0], thr_b=self.thr[:, 1], thr_c=self.thr[:, 2], thr_d=self.thr[:, 3]) + # Take data and get hit occupancy + self.hit_occ_map = self.take_data() + + # Calculate fake hit rate + for region in range(4): + occs = self.hit_occ_map[slice(m26_regions[region][0], m26_regions[region][1]), :, :].sum(axis=(0, 1)) + self.fake_hit_rate_meas[np.nonzero(occs), region] = occs[np.nonzero(occs)] / 576. / 288. / self.scan_timeout / 1e6 * 115.2 + + # Log status (fake hit rate, noise occupoancy, threshold setting) + self.print_log_status() + # Check if threshold needs to be lowered or increased + for plane in range(6): + for region in range(4): + if proceed[plane, region]: + if (self.fake_hit_rate_meas[plane, region] < self.fake_hit_rate) or np.isnan(self.fake_hit_rate_meas[plane, region]): + self.thr[plane, region] -= self.thr_step + else: + if self.thr[plane, region] + self.thr_step <= 255: + self.thr[plane, region] += self.thr_step + else: + self.thr[plane, region] = 255 + proceed[plane, region] = False + + # Append measured fake hit rate for later result plot + if init: + self.fake_hit_rate_meas_all = self.fake_hit_rate_meas[np.newaxis, :, :] + init = False + else: + self.fake_hit_rate_meas_all = np.concatenate([self.fake_hit_rate_meas_all, self.fake_hit_rate_meas[np.newaxis, :, :]], axis=0) + + for plane in range(6): + logging.info('Fake hit rate limit is reached. Final thresholds are {0:s}'.format([val for val in self.thr[plane, :]])) + + self.hist_occ.stop.set() # stop analysis process + + # Store configuration to file + self.store_configuration() + + def analyze(self): + output_file = self.run_filename + '_interpreted.pdf' + logging.info('Plotting results into {0:s}'.format(output_file)) + with PdfPages(output_file) as output_pdf: + plotting.plot_noise_tuning_result(fake_hit_rate=self.fake_hit_rate_meas_all, + fake_hit_rate_spec=self.fake_hit_rate, + output_pdf=output_pdf) + + +if __name__ == "__main__": + try: + from pymosa import __version__ as pymosa_version + except ImportError: + try: + with open(os.path.join(os.path.split(os.path.split(os.path.abspath(__file__))[0])[0], 'VERSION')) as version_file: + pymosa_version = version_file.read().strip() + except IOError: + raise + pymosa_version = "(local)" + + import argparse + parser = argparse.ArgumentParser(description='Noise occupancy tuning for pymosa %s\nExample: python noise_tuning.py' % pymosa_version, formatter_class=argparse.RawTextHelpFormatter) + parser.add_argument('-t', '--scan_timeout', type=int, metavar='', action='store', help='Scan timeout, time in which noise occupancy is integrated, in seconds') + parser.add_argument('-r', '--fake_hit_rate', type=int, metavar='', action='store', help='Allowed (average) fake hit rate, per pixel per readout frame (115.2 us)') + parser.add_argument('-s', '--thr_start', type=int, metavar='', action='store', help='Start value from which threshold is lowered, same value for all regions A, B, C, D') + parser.add_argument('-w', '--thr_step', type=int, metavar='', action='store', help='Step width for lowering threshold, same value for all regions A, B, C, D') + args = parser.parse_args() + + with open('./m26_configuration.yaml', 'r') as f: + config = yaml.safe_load(f) + + if args.scan_timeout is not None: + config["scan_timeout"] = range(args.scan_timeout) + if args.fake_hit_rate is not None: + config["fake_hit_rate"] = args.fake_hit_rate + if args.thr_start is not None: + config["thr_start"] = args.thr_start + if args.fake_hit_rate is not None: + config["thr_step"] = args.thr_step + + noise_tuning = NoiseOccTuning() # None: use default hardware configuration + # Initialize telescope hardware and set up parameters + noise_tuning.init(init_conf=config) + # Start telescope readout + noise_tuning.start() + # Close the resources + noise_tuning.close() diff --git a/pymosa/online.py b/pymosa/online.py new file mode 100644 index 0000000..a312ffa --- /dev/null +++ b/pymosa/online.py @@ -0,0 +1,341 @@ +# +# ------------------------------------------------------------ +# Copyright (c) All rights reserved +# SiLab, Institute of Physics, University of Bonn +# ------------------------------------------------------------ +# + +''' + Online data analysis functions +''' + +import ctypes +import logging +import multiprocessing +import time +import queue + +import numpy as np +from numba import njit + +logger = logging.getLogger('OnlineAnalysis') + + +# Error codes +TRIGGER_NUMBER_ERROR = 0x00000001 # Trigger number has not increased by one +NO_TRIGGER_WORD_ERROR = 0x00000002 # Event has no trigger word associated +TRIGGER_TIMESTAMP_OVERFLOW = 0x00000004 # Indicating the overflow of the trigger timestamp +TRIGGER_NUMBER_OVERFLOW = 0x00000008 # Indicating the overflow of the trigger number +DATA_ERROR = 0x00000010 # Indicating any occurrence of data errors in the Momosa26 protocol (e.g., invalid column/row, invalid data length, data loss) +TIMESTAMP_OVERFLOW = 0x00000020 # Indicating the overflow of the Mimosa26 timestamp +FRAME_ID_OVERFLOW = 0x00000040 # Indicating the overflow of the Mimosa26 frame ID +OVERFLOW_FLAG = 0x00000080 # Indicating the occurrence of the overflow flag for a particular Mimosa26 row + + +# Mimosa26 raw data +@njit +def is_mimosa_data(word): # Check for Mimosa data word + return (0xff000000 & word) == 0x20000000 + + +@njit +def get_plane_number(word): # There are 6 planes in the stream, starting from 1; return plane number + return (word >> 20) & 0xf + + +# Frame header +@njit +def is_frame_header(word): # Check if frame header high word (frame start flag is set by R/0) + return (0x00010000 & word) == 0x00010000 + + +@njit +def is_data_loss(word): # Indicates data loss + return (0x00020000 & word) == 0x00020000 + + +@njit +def get_m26_timestamp_low(word): # Timestamp of Mimosa26 data from frame header low (generated by R/0) + return 0x0000ffff & word + + +@njit +def get_m26_timestamp_high(word): # Timestamp of Mimosa26 data from frame header high (generated by R/0) + return (0x0000ffff & word) << 16 + + +@njit +def is_frame_header0(word): # Check if frame header0 word + return (0x0000ffff & word) == 0x00005555 + + +@njit +def is_frame_header1(word, plane): # Check if frame header1 word for the actual plane + return (0x0000ffff & word) == (0x00005550 | plane) + + +# Frame counter +@njit +def get_frame_id_low(word): # Get the frame id from the frame id low word + return 0x0000ffff & word + + +@njit +def get_frame_id_high(word): # Get the frame id from the frame id high word + return (0x0000ffff & word) << 16 + + +# Data length +@njit +def get_frame_length(word): # Get length of Mimosa26 frame + return (0x0000ffff & word) + + +# Status / line word +@njit +def get_n_words(word): # Return the number of data words for the actual row + return 0x0000000f & word + + +@njit +def get_row(word): # Extract row from Mimosa26 hit word + return (0x00007ff0 & word) >> 4 + + +@njit +def has_overflow(word): + return (0x00008000 & word) != 0 + + +# State word +@njit +def get_n_hits(word): # Returns the number of hits given by actual column word + return 0x00000003 & word + + +@njit +def get_column(word): # Extract column from Mimosa26 hit word + return (0x00001ffc & word) >> 2 + + +# Frame trailer +@njit +def is_frame_trailer0(word): # Check if frame trailer0 word + return (0x0000ffff & word) == 0xaa50 + + +@njit +def is_frame_trailer1(word, plane): # Check if frame trailer1 word for the actual plane + return (0x0000ffff & word) == (0xaa50 | plane) + + +@njit +def histogram(raw_data, occ_hist, m26_frame_ids, m26_frame_length, m26_data_loss, m26_word_index, m26_timestamps, last_m26_timestamps, m26_n_words, m26_rows, m26_frame_status, last_completed_m26_frame_ids): + ''' Raw data to 2D occupancy histogram ''' + + # Loop over the raw data words + for raw_data_word in raw_data: + if is_mimosa_data(raw_data_word): # Check if word is from Mimosa26. + # Check to which plane the data belongs + plane_id = get_plane_number(raw_data_word) - 1 # The actual_plane if the actual word belongs to (0 to 5) + # In the following, interpretation of the raw data words of the actual plane + # Check for data loss bit set by the M26 RX FSM + if is_data_loss(raw_data_word): + # Setting the data loss flag to true. + # The data loss bit is set by the M26 RX FSM. + # The bit is set only once after each data loss, i.e., + # the first data word after the lost data words. + m26_data_loss[plane_id] = True + if is_frame_header(raw_data_word): # New frame for actual plane, M26 timestamp (LSB), frame header0 + # Get Mimosa26 timestamp from raw data word (LSB) + last_m26_timestamps[plane_id] = m26_timestamps[plane_id] + m26_timestamps[plane_id] = (m26_timestamps[plane_id] & 0x7fffffffffff0000) | get_m26_timestamp_low(raw_data_word) + m26_word_index[plane_id] = 0 + # Reset parameters after header + m26_frame_length[plane_id] = 0 + m26_n_words[plane_id] = 0 + m26_data_loss[plane_id] = False + m26_frame_status[plane_id] = 0 + elif m26_data_loss[plane_id] is True: # Trash data + # Nothing to do, do not trust data + continue + else: # Interpreting M26 raw data + m26_word_index[plane_id] += 1 + if m26_word_index[plane_id] == 1: # Mimosa26 timestamp, M26 timestamp (MSB), frame header1 + # Check for 32bit timestamp overflow + if m26_timestamps[plane_id] >= 0 and get_m26_timestamp_high(raw_data_word) < (m26_timestamps[plane_id] & 0x00000000ffff0000): + m26_frame_status[plane_id] |= TIMESTAMP_OVERFLOW + m26_timestamps[plane_id] = np.int64(2**32) + m26_timestamps[plane_id] + # Get Mimosa26 timestamp from raw data word (MSB) + m26_timestamps[plane_id] = get_m26_timestamp_high(raw_data_word) | (m26_timestamps[plane_id] & 0x7fffffff0000ffff) + elif m26_word_index[plane_id] == 2: # Mimosa26 frame ID + # Get Mimosa26 frame ID from raw data word (LSB) + m26_frame_ids[plane_id] = (m26_frame_ids[plane_id] & 0x7fffffffffff0000) | get_frame_id_low(raw_data_word) + elif m26_word_index[plane_id] == 3: # Mimosa26 frame ID + # Check for 32bit frame ID overflow + if m26_frame_ids[plane_id] >= 0 and get_frame_id_high(raw_data_word) < (m26_frame_ids[plane_id] & 0x00000000ffff0000): + m26_frame_status[plane_id] |= FRAME_ID_OVERFLOW + m26_frame_ids[plane_id] = np.int64(2**32) + m26_frame_ids[plane_id] + # Get Mimosa26 frame ID from raw data word (MSB) + m26_frame_ids[plane_id] = get_frame_id_high(raw_data_word) | (m26_frame_ids[plane_id] & 0x7fffffff0000ffff) + elif m26_word_index[plane_id] == 4: # Mimosa26 frame length + m26_frame_length[plane_id] = get_frame_length(raw_data_word) + if m26_frame_length[plane_id] > 570: # Defined in the Mimosa26 protocol, no more than 570 "useful" data words + m26_data_loss[plane_id] = True + continue + elif m26_word_index[plane_id] == 5: # Mimosa26 frame length, a second time + if m26_frame_length[plane_id] != get_frame_length(raw_data_word): # DO0 & DO1 should always have the same data length + m26_data_loss[plane_id] = True + continue + else: + m26_frame_length[plane_id] += get_frame_length(raw_data_word) + elif m26_word_index[plane_id] == 5 + m26_frame_length[plane_id] + 1: # Frame trailer0 + if not is_frame_trailer0(raw_data_word): + m26_data_loss[plane_id] = True + continue + elif m26_word_index[plane_id] == 5 + m26_frame_length[plane_id] + 2: # Frame trailer1 + if not is_frame_trailer1(raw_data_word, plane=plane_id + 1): + m26_data_loss[plane_id] = True + continue + else: + last_completed_m26_frame_ids[plane_id] = m26_frame_ids[plane_id] + elif m26_word_index[plane_id] > 5 + m26_frame_length[plane_id] + 2: # Ignore any occurrence of additional raw data words + m26_data_loss[plane_id] = True + continue + else: # Column / Row words (actual data word with hits) + if m26_n_words[plane_id] == 0: # First word contains the row info and the number of data words for this row + if m26_word_index[plane_id] == 5 + m26_frame_length[plane_id]: # Always even amount of words or this fill word is used + # Ignore this fill word + continue + else: + m26_n_words[plane_id] = get_n_words(raw_data_word) + m26_rows[plane_id] = get_row(raw_data_word) # Get row from data word + if m26_rows[plane_id] >= 576: # Row overflow + m26_data_loss[plane_id] = True + continue + if has_overflow(raw_data_word): + m26_frame_status[plane_id] |= OVERFLOW_FLAG # set overflow bit + else: + m26_frame_status[plane_id] & ~OVERFLOW_FLAG # unset overflow bit + else: + m26_n_words[plane_id] = m26_n_words[plane_id] - 1 # Count down the words + n_hits = get_n_hits(raw_data_word) + column = get_column(raw_data_word) # Get column from data word + if column >= 1152: # Column overflow + m26_data_loss[plane_id] = True + continue + for k in range(n_hits + 1): + if column + k >= 1152: + m26_data_loss[plane_id] = True + break + # Fill occupancy hist + occ_hist[column + k, m26_rows[plane_id], plane_id] += 1 + else: # Raw data word is TLU/trigger word + pass # Not needed here + + return m26_frame_ids, m26_frame_length, m26_data_loss, m26_word_index, m26_timestamps, last_m26_timestamps, m26_n_words, m26_rows, m26_frame_status, last_completed_m26_frame_ids + + +class OccupancyHistogramming(object): + ''' Fast histogramming of raw data to a 2D hit histogramm + + No event building and seperate process for speed up + ''' + _queue_timeout = 0.01 # max blocking time to delete object [s] + _raw_data_queue = multiprocessing.Queue() + stop = multiprocessing.Event() + lock = multiprocessing.Lock() + + def __init__(self): + # Create shared memory 32 bit unsigned int numpy array + shared_array_base = multiprocessing.Array(ctypes.c_uint, 1152 * 576 * 6) + shared_array = np.ctypeslib.as_array(shared_array_base.get_obj()) + self.occ_hist = shared_array.reshape(1152, 576, 6) + + self.p = multiprocessing.Process(target=self.worker, + args=(self._raw_data_queue, shared_array_base, + self.lock, self.stop, )) + self.p.start() + + def add(self, raw_data): + ''' Add raw data to be histogrammed ''' + self._raw_data_queue.put(raw_data) + + def reset(self, wait=True, timeout=0.5): + ''' Reset histogram ''' + if not wait: + if self._raw_data_queue.qsize() != 0: + logger.warning('Resetting histogram while adding data') + else: + n_time = 0 + while self._raw_data_queue.qsize() != 0: + time.sleep(0.01) + n_time += 1 + if n_time * 0.01 > timeout: + logger.warning('Resetting histogram while adding data') + break + with self.lock: + # No overwrite with a new zero array due to shared memory + for col in range(1152): + for row in range(576): + for plane in range(6): + self.occ_hist[col, row, plane] = 0 + + def get(self, wait=True, timeout=0.5, reset=True): + ''' Get the result histogram ''' + if not wait: + if self._raw_data_queue.qsize() != 0: + logger.warning('Getting histogram while adding data') + else: + n_time = 0 + while self._raw_data_queue.qsize() != 0: + time.sleep(0.01) + n_time += 1 + if n_time * 0.01 > timeout: + logger.warning('Getting histogram while adding data') + break + with self.lock: + if reset: + occ_hist = self.occ_hist.copy() + # No overwrite with a new zero array due to shared memory + for col in range(1152): + for row in range(576): + for plane in range(6): + self.occ_hist[col, row, plane] = 0 + return occ_hist + else: + return self.occ_hist + + def worker(self, raw_data_queue, shared_array_base, lock, stop): + ''' Histogramming in seperate process ''' + occ_hist = np.ctypeslib.as_array(shared_array_base.get_obj()).reshape(1152, 576, 6) + # Raw data interpreter + # Per frame variables + m26_frame_ids = np.zeros(shape=(6, ), dtype=np.int64) # The Mimosa26 frame ID of the actual frame + m26_frame_length = np.zeros(shape=(6, ), dtype=np.uint32) # The number of "useful" data words for the actual frame + m26_data_loss = np.ones((6, ), dtype=np.bool) # The data loss status for the actual frame + m26_word_index = np.zeros(shape=(6, ), dtype=np.uint32) # The word index per device of the actual frame + m26_timestamps = np.zeros(shape=(6, ), dtype=np.int64) # The timestamp for each plane (in units of 40 MHz) + last_m26_timestamps = np.zeros(shape=(6, ), dtype=np.int64) + m26_n_words = np.zeros(shape=(6, ), dtype=np.uint32) # The number of words containing column / row info + m26_rows = np.zeros(shape=(6, ), dtype=np.uint32) # The actual readout row (rolling shutter) + m26_frame_status = np.zeros(shape=(6, ), dtype=np.uint32) # The status flags for the actual frames + last_completed_m26_frame_ids = np.full(shape=6, dtype=np.int64, fill_value=-1) # The status if the frame is complete for the actual frame + while not stop.is_set(): + try: + raw_data = raw_data_queue.get(timeout=self._queue_timeout) + with lock: + m26_frame_ids, m26_frame_length, m26_data_loss, m26_word_index, m26_timestamps, last_m26_timestamps, m26_n_words, m26_rows, m26_frame_status, last_completed_m26_frame_ids = histogram( + raw_data, occ_hist, m26_frame_ids, m26_frame_length, m26_data_loss, m26_word_index, m26_timestamps, last_m26_timestamps, m26_n_words, m26_rows, m26_frame_status, last_completed_m26_frame_ids) + except queue.Empty: + continue + + def __del__(self): + self._raw_data_queue.close() + self._raw_data_queue.join_thread() # Needed otherwise IOError: [Errno 232] The pipe is being closed + self.stop.set() + self.p.join() + + +if __name__ == "__main__": + pass diff --git a/pymosa/online_monitor/__init__.py b/pymosa/online_monitor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pymosa/online_monitor/configuration.yaml b/pymosa/online_monitor/configuration.yaml new file mode 100644 index 0000000..05212f2 --- /dev/null +++ b/pymosa/online_monitor/configuration.yaml @@ -0,0 +1,74 @@ +# producer_sim : +# PYMOSA_Producer : +# backend : tcp://127.0.0.1:8500 +# delay : 0.1 +# kind : pymosa_producer_sim +# data_file: /home/silab/git/pymosa/data/telescope_data.h5 + +converter : + PYMOSA_Interpreter : + kind : pymosa_converter + frontend : tcp://127.0.0.1:8500 + backend : tcp://127.0.0.1:8700 + # analyze_m26_header_ids : [1, 2, 3, 4, 5, 6] # Specify which M26 planes should be interpreted. Default is all planes. + # Pybar_Interpreter : + # kind : pybar_fei4 + # frontend : tcp://127.0.0.1:9600 + # backend : tcp://127.0.0.1:9700 + PYMOSA_Histogrammer : + kind : pymosa_histogrammer + frontend : tcp://127.0.0.1:8700 + backend : tcp://127.0.0.1:8800 + noisy_threshold : 100 +# Pybar_Histogrammer : +# kind : pybar_fei4_histogrammer +# frontend : tcp://127.0.0.1:9700 +# backend : tcp://127.0.0.1:9800 + HIT_Correlator : + kind : hit_correlator_converter + frontend : + - tcp://127.0.0.1:8700 + - tcp://127.0.0.1:9700 + backend : tcp://127.0.0.1:8900 + correlation_planes: + - name : Mimosa26 Plane 1 + dut_type : M26 + address : tcp://127.0.0.1:8700 + id : 0 + - name : Mimosa26 Plane 2 + dut_type : M26 + address : tcp://127.0.0.1:8700 + id : 1 + - name : Mimosa26 Plane 3 + dut_type : M26 + address : tcp://127.0.0.1:8700 + id : 2 + - name : Mimosa26 Plane 4 + dut_type : M26 + address : tcp://127.0.0.1:8700 + id : 3 +# - name : FE-I4 plane +# dut_type : FE-I4 +# address : tcp://127.0.0.1:9700 + +receiver : + PYMOSA_Receiver : + kind : pymosa_receiver + frontend : tcp://127.0.0.1:8800 +# Pybar_Receiver : +# kind : pybar_fei4 +# frontend : tcp://127.0.0.1:9800 + HIT_Correlator : + kind : hit_correlator_receiver + frontend : tcp://127.0.0.1:8900 + correlation_planes: + - name : Mimosa26 Plane 1 + dut_type : M26 + - name : Mimosa26 Plane 2 + dut_type : M26 + - name : Mimosa26 Plane 3 + dut_type : M26 + - name : Mimosa26 Plane 4 + dut_type : M26 +# - name : FE-I4 plane +# dut_type : FE-I4 diff --git a/pymosa/online_monitor/correlation_duts.yaml b/pymosa/online_monitor/correlation_duts.yaml new file mode 100644 index 0000000..7de3e3b --- /dev/null +++ b/pymosa/online_monitor/correlation_duts.yaml @@ -0,0 +1,17 @@ +M26 : + n_columns : 1152 + column_size : 18.4 + n_rows : 576 + row_size : 18.4 + +FE-I4 : + n_columns : 80 + column_size : 250 + n_rows : 336 + row_size : 50 + +RD53 : + n_columns : 400 + column_size : 50 + n_rows : 192 + row_size : 40 diff --git a/pymosa/online_monitor/hit_correlator_converter.py b/pymosa/online_monitor/hit_correlator_converter.py new file mode 100644 index 0000000..9ef6c7f --- /dev/null +++ b/pymosa/online_monitor/hit_correlator_converter.py @@ -0,0 +1,277 @@ +import os + +import yaml +import gc +import numpy as np +from numba import njit +from zmq.utils import jsonapi + +from online_monitor.converter.transceiver import Transceiver +from online_monitor.utils import utils + + +@njit +def correlate_position_on_event_number(ref_event_numbers, dut_event_numbers, ref_x_indices, ref_y_indices, dut_x_indices, dut_y_indices, x_corr_histo, y_corr_histo, transpose=False): + """Correlating the hit/cluster positions on event basis including all permutations. + The hit/cluster positions are used to fill the X and Y correlation histograms. + + Does the same than the merge of the pandas package: + df = data_1.merge(data_2, how='left', on='event_number') + df.dropna(inplace=True) + correlation_column = np.hist2d(df[column_mean_dut_0], df[column_mean_dut_x]) + correlation_row = np.hist2d(df[row_mean_dut_0], df[row_mean_dut_x]) + The following code is > 10x faster than the above code. + + Parameters + ---------- + ref_event_numbers: array + Event number array of the reference DUT. + dut_event_numbers: array + Event number array of the second DUT. + ref_x_indices: array + X position indices of the refernce DUT. + ref_y_indices: array + Y position indices of the refernce DUT. + dut_x_indices: array + X position indices of the second DUT. + dut_y_indices: array + Y position indices of the second DUT. + x_corr_hist: array + X correlation array (2D). + y_corr_hist: array + Y correlation array (2D). + transpose: boolean + If True tranpose x/y of reference DUT. Default is False. + """ + dut_index = 0 + + # Loop to determine the needed result array size.astype(np.uint32) + for ref_index in range(ref_event_numbers.shape[0]): + while dut_index < dut_event_numbers.shape[0] and dut_event_numbers[dut_index] < ref_event_numbers[ref_index]: # Catch up with outer loop + dut_index += 1 + + for curr_dut_index in range(dut_index, dut_event_numbers.shape[0]): + if ref_event_numbers[ref_index] == dut_event_numbers[curr_dut_index]: + if transpose: + x_index_ref = ref_y_indices[ref_index] + y_index_ref = ref_x_indices[ref_index] + else: + x_index_ref = ref_x_indices[ref_index] + y_index_ref = ref_y_indices[ref_index] + x_index_dut = dut_x_indices[curr_dut_index] + y_index_dut = dut_y_indices[curr_dut_index] + + # Add correlation to histogram + x_corr_histo[x_index_ref, x_index_dut] += 1 + y_corr_histo[y_index_ref, y_index_dut] += 1 + else: + break + + +class HitCorrelator(Transceiver): + def setup_transceiver(self): + self.set_bidirectional_communication() # We want to be able to change the histogrammmer settings + + def setup_interpretation(self): + self.active_tab = None # Stores name of active tab in online monitor + self.hit_corr_tab = 'HIT_Correlator' # name of correlator tab, has to match with name specified in configuration.yaml for online monitor + self.start_signal = 1 # will be set in handle_command function; correlation starts if this is set to 0 + self.active_dut1 = 0 + self.active_dut2 = 0 + self.fps = 0 + self.updateTime = 0 + self.remove_background = False # Remove noisy background + self.remove_background_checkbox = 0 + self.remove_background_percentage = 99.0 + self.transpose = False # If True transpose column and row of reference DUT (first DUT) + self.transpose_checkbox = 0 + # Data buffer and correlation histogramms + self.max_buffer_size = 1000000 + self.data_buffer_dtype = np.dtype([('event_number', np.uint64), + ('column', np.uint16), + ('row', np.uint16)]) + self.corr_data_buffer = [np.zeros(shape=(self.max_buffer_size,), dtype=self.data_buffer_dtype), + np.zeros(shape=(self.max_buffer_size,), dtype=self.data_buffer_dtype)] # Correlation data buffer. Contains events of DUTs which should be correlated. + self.corr_data_buffer_index = [0, 0] # fill index of correlation data buffer + self.column_corr_hist = 0 # Correlation histogram for columns + self.row_corr_hist = 0 # Correlation histogram for rows + self.corr_data_buffer_filled = False # Flag indicating if correlation data buffer is filled. + + # Load correlation DUT types + config = os.path.join(os.path.dirname(__file__), 'correlation_duts.yaml') + with open(config) as f: + self.correlator_config = yaml.safe_load(f) + + def deserialize_data(self, data): # According to pyBAR data serilization + datar, meta = utils.simple_dec(data) + if 'hits' in meta: + meta['hits'] = datar + return meta + + def interpret_data(self, data): + # Since correlation is CPU intensive process, do correlation only if Correlator Tab is active + if self.active_tab != self.hit_corr_tab: + return + + # Each DUT specified in configuration will get a unique index. Store index of selected DUTs + self.active_duts = [self.active_dut1, self.active_dut2] + + # Do correlation only if start button is pressed, stop correlation if stop button is pressed + if self.start_signal != 0: + return + + # Show readout rate in GUI + if 'meta_data' in data[0][1]: + meta_data = data[0][1]['meta_data'] + now = float(meta_data['timestamp_stop']) + if now != self.updateTime: # FIXME: sometimes = ZeroDivisionError: because of https://github.com/SiLab-Bonn/pyBAR/issues/48 + recent_fps = 1.0 / (now - self.updateTime) # FIXME: does not show real rate, shows rate data was recorded with + self.updateTime = now + self.fps = self.fps * 0.7 + recent_fps * 0.3 + meta_data.update({'fps': self.fps}) + return [data[0][1]] + + # Loop over incoming data + for actual_dut_data in data: + # Skip meta data + if 'meta_data' in actual_dut_data[1]: + continue + if actual_dut_data[1]['hits'].shape[0] == 0: # empty array is skipped + continue + + # Separate hits by identifier and fill correlation buffers. + for i, device in enumerate(self.config['correlation_planes']): + # Check if tcp address of incoming data matches with specified tcp address. + if actual_dut_data[0] == device['address']: + # If more than one plane from the same address, use additional field 'id' to separate data of same address. + if 'id' in device: + sel = (actual_dut_data[1]['hits']['plane'] == device['id'] + 1) + actual_dut_hit_data = actual_dut_data[1]['hits'][sel] + else: + actual_dut_hit_data = actual_dut_data[1]['hits'] + # Append only hit data for duts which need to be correlated to correlation buffer. + if i in self.active_duts: + dut_index = self.active_duts.index(i) + actual_buffer_index = self.corr_data_buffer_index[dut_index] + actual_dut_correlation_data = actual_dut_hit_data[['event_number', 'column', 'row']] + # Check if correlation data can still be filled into buffer. If not replace oldest events. + if actual_buffer_index + actual_dut_correlation_data.shape[0] > self.corr_data_buffer[dut_index].shape[0] - 1: + # Calculate size upto which buffer can be filled. Other data is stored at beginning of buffer. + max_data_buffer_index = self.corr_data_buffer[dut_index].shape[0] - actual_buffer_index + self.corr_data_buffer[dut_index][actual_buffer_index:] = actual_dut_correlation_data[:max_data_buffer_index] + self.corr_data_buffer[dut_index][:actual_dut_correlation_data.shape[0] - max_data_buffer_index] = actual_dut_correlation_data[max_data_buffer_index:] + self.corr_data_buffer_index[dut_index] = actual_dut_correlation_data.shape[0] - max_data_buffer_index + self.corr_data_buffer_filled = True + else: # Enough space left in buffer. Fill buffer with actual data. + self.corr_data_buffer[dut_index][actual_buffer_index:actual_buffer_index + actual_dut_correlation_data.shape[0]] = actual_dut_correlation_data + self.corr_data_buffer_index[dut_index] += actual_dut_correlation_data.shape[0] + + if self.corr_data_buffer_filled: + ref_event_numbers = self.corr_data_buffer[0]['event_number'] + dut_event_numbers = self.corr_data_buffer[1]['event_number'] + ref_x_indices = self.corr_data_buffer[0]['column'] + ref_y_indices = self.corr_data_buffer[0]['row'] + dut_x_indices = self.corr_data_buffer[1]['column'] + dut_y_indices = self.corr_data_buffer[1]['row'] + else: + ref_event_numbers = self.corr_data_buffer[0]['event_number'][:self.corr_data_buffer_index[0]] + dut_event_numbers = self.corr_data_buffer[1]['event_number'][:self.corr_data_buffer_index[1]] + ref_x_indices = self.corr_data_buffer[0]['column'][:self.corr_data_buffer_index[0]] + ref_y_indices = self.corr_data_buffer[0]['row'][:self.corr_data_buffer_index[0]] + dut_x_indices = self.corr_data_buffer[1]['column'][:self.corr_data_buffer_index[1]] + dut_y_indices = self.corr_data_buffer[1]['row'][:self.corr_data_buffer_index[1]] + + # Check if buffers are not empty + if self.corr_data_buffer_index[0] == 0 or self.corr_data_buffer_index[1] == 0: + return + + # Main correlation function + correlate_position_on_event_number(ref_event_numbers=ref_event_numbers, + dut_event_numbers=dut_event_numbers, + ref_x_indices=ref_x_indices, + ref_y_indices=ref_y_indices, + dut_x_indices=dut_x_indices, + dut_y_indices=dut_y_indices, + x_corr_histo=self.column_corr_hist, + y_corr_histo=self.row_corr_hist, + transpose=self.transpose) + + # Remove background function in order to exclude noisy pixels + def remove_background(cols_corr, rows_corr, percentage): + cols_corr[cols_corr < np.percentile(cols_corr, percentage)] = 0 + rows_corr[rows_corr < np.percentile(rows_corr, percentage)] = 0 + + if self.remove_background: + remove_background(self.column_corr_hist, self.row_corr_hist, self.remove_background_percentage) + + return [{'column': self.column_corr_hist, 'row': self.row_corr_hist}] + + def serialize_data(self, data): + return jsonapi.dumps(data, cls=utils.NumpyEncoder) + + def handle_command(self, command): + # Reset histogramms and data buffer, call garbage collector + def reset(): + self.column_corr_hist = np.zeros_like(self.column_corr_hist) + self.row_corr_hist = np.zeros_like(self.row_corr_hist) + self.corr_data_buffer = [np.zeros(shape=(self.max_buffer_size,), dtype=self.data_buffer_dtype), + np.zeros(shape=(self.max_buffer_size,), dtype=self.data_buffer_dtype)] + self.corr_data_buffer_index = [0, 0] + self.corr_data_buffer_filled = False + gc.collect() # garbage collector is called to free unused memory + + # Determine the needed histogramm size according to selected DUTs + def create_corr_hist(ref, dut, transpose): + n_cols_ref = self.correlator_config[self.config['correlation_planes'][ref]['dut_type']]['n_columns'] + n_rows_ref = self.correlator_config[self.config['correlation_planes'][ref]['dut_type']]['n_rows'] + n_cols_dut = self.correlator_config[self.config['correlation_planes'][dut]['dut_type']]['n_columns'] + n_rows_dut = self.correlator_config[self.config['correlation_planes'][dut]['dut_type']]['n_rows'] + if transpose: + self.column_corr_hist = np.zeros(shape=(n_rows_ref, n_cols_dut), dtype=np.uint32) + self.row_corr_hist = np.zeros(shape=(n_cols_ref, n_rows_dut), dtype=np.uint32) + else: + self.column_corr_hist = np.zeros(shape=(n_cols_ref, n_cols_dut), dtype=np.uint32) + self.row_corr_hist = np.zeros(shape=(n_rows_ref, n_rows_dut), dtype=np.uint32) + reset() + + # Commands + if command[0] == 'RESET': + reset() + elif 'combobox1' in command[0]: + # Get active DUT from combobox selection + self.active_dut1 = int(command[0].split()[1]) + create_corr_hist(self.active_dut1, self.active_dut2, self.transpose) + elif 'combobox2' in command[0]: + # Get active DUT from combobox selection + self.active_dut2 = int(command[0].split()[1]) + create_corr_hist(self.active_dut1, self.active_dut2, self.transpose) + elif 'START' in command[0]: + # Get status of start button. Only do correlation if start buton is pressed + self.start_signal = int(command[0].split()[1]) + create_corr_hist(self.active_dut1, self.active_dut2, self.transpose) + elif 'ACTIVETAB' in command[0]: + # Get active tab. Only do correlation if active tab is correlator tab + self.active_tab = str(command[0].split()[1]) + elif 'STOP' in command[0]: + # Get status of stop button. If pressed, stop correlation + self.start_signal = int(command[0].split()[1]) + 1 + reset() + elif 'BACKGROUND' in command[0]: + self.remove_background_checkbox = int(command[0].split()[1]) + if self.remove_background_checkbox == 0: + self.remove_background = False + reset() + elif self.remove_background_checkbox == 2: + self.remove_background = True + elif 'PERCENTAGE' in command[0]: + self.remove_background_percentage = float(command[0].split()[1]) + if self.remove_background: + reset() + elif 'TRANSPOSE' in command[0]: + self.transpose_checkbox = int(command[0].split()[1]) + if self.active_dut1 == 0 or self.active_dut2 == 0: + if self.transpose_checkbox == 0: + self.transpose = False + elif self.transpose_checkbox == 2: + self.transpose = True + create_corr_hist(self.active_dut1, self.active_dut2, self.transpose) diff --git a/pymosa/online_monitor/hit_correlator_receiver.py b/pymosa/online_monitor/hit_correlator_receiver.py new file mode 100644 index 0000000..dbe9b5a --- /dev/null +++ b/pymosa/online_monitor/hit_correlator_receiver.py @@ -0,0 +1,208 @@ +import os + +import yaml +import numpy as np +from PyQt5 import Qt +import pyqtgraph as pg +from pyqtgraph.Qt import QtGui +from pyqtgraph.dockarea import DockArea, Dock +from zmq.utils import jsonapi + +from online_monitor.utils import utils +from online_monitor.receiver.receiver import Receiver + + +class HitCorrelator(Receiver): + + def setup_receiver(self): + self.set_bidirectional_communication() # We want to change converter settings + # Load correlation DUT types + config = os.path.join(os.path.dirname(__file__), 'correlation_duts.yaml') + with open(config) as f: + self.correlator_config = yaml.safe_load(f) + + def setup_widgets(self, parent, name): + self.occupancy_images_columns = {} + self.occupancy_images_rows = {} + + dut_names = [] + for device in self.config['correlation_planes']: + dut_names.append(device['name']) + + dock_area = DockArea() + parent.addTab(dock_area, name) + # Send active tab index to converter so that it only does something when user is looking at corresponding receiver + parent.currentChanged.connect(lambda value: self.send_command('ACTIVETAB %s' % str(parent.tabText(value)))) + + dock_status = Dock("Status") + dock_status.setMinimumSize(400, 90) + dock_status.setMaximumHeight(110) + dock_select_duts = Dock("Select DUT's") + dock_select_duts.setMinimumSize(400, 90) + dock_select_duts.setMaximumHeight(110) + dock_corr_column = Dock('Column Correlation') + dock_corr_column.setMinimumSize(400, 400) + dock_corr_row = Dock('Row Correlation') + dock_corr_row.setMinimumSize(400, 400) + + cb = QtGui.QWidget() + layout0 = QtGui.QGridLayout() + cb.setLayout(layout0) + self.combobox1 = Qt.QComboBox() + self.combobox1.addItems(dut_names) + self.combobox1.setMinimumSize(100, 50) + self.combobox1.setMaximumSize(200, 50) + self.combobox2 = Qt.QComboBox() + self.combobox2.addItems(dut_names) + self.combobox2.setMinimumSize(100, 50) + self.combobox2.setMaximumSize(200, 50) + self.select_label = QtGui.QLabel('Correlate:') + self.select_label1 = QtGui.QLabel(' to ') + self.start_button = QtGui.QPushButton('Start') + self.stop_button = QtGui.QPushButton('Stop') + self.start_button.setMinimumSize(75, 38) + self.start_button.setMaximumSize(150, 38) + self.stop_button.setMinimumSize(75, 38) + self.stop_button.setMaximumSize(150, 38) + layout0.setHorizontalSpacing(25) + layout0.addWidget(self.select_label, 0, 0, 0, 1) + layout0.addWidget(self.combobox1, 0, 1, 0, 1) + layout0.addWidget(self.select_label1, 0, 2, 0, 1) + layout0.addWidget(self.combobox2, 0, 3, 0, 1) + layout0.addWidget(self.start_button, 0, 4, 0, 1) + layout0.addWidget(self.stop_button, 0, 5, 0, 1) + dock_select_duts.addWidget(cb) + self.combobox1.activated.connect(lambda value: self.send_command('combobox1 %d' % value)) + self.combobox2.activated.connect(lambda value: self.send_command('combobox2 %d' % value)) + self.start_button.clicked.connect(lambda value: self.send_command('START %d' % value)) + self.stop_button.clicked.connect(lambda value: self.send_command('STOP %d' % value)) + + cw = QtGui.QWidget() + layout = QtGui.QGridLayout() + cw.setLayout(layout) + reset_button = QtGui.QPushButton('Reset') + reset_button.setMinimumSize(100, 30) + reset_button.setMaximumSize(300, 30) + layout.setHorizontalSpacing(25) + layout.addWidget(reset_button, 0, 1, 0, 1) + remove_background_checkbox = QtGui.QCheckBox('Remove background:') + layout.addWidget(remove_background_checkbox, 0, 2, 1, 1) + remove_background_spinbox = QtGui.QDoubleSpinBox() + remove_background_spinbox.setRange(0.0, 100.0) + remove_background_spinbox.setValue(99.0) + remove_background_spinbox.setSingleStep(1.0) + remove_background_spinbox.setDecimals(1) + remove_background_spinbox.setPrefix('< ') + remove_background_spinbox.setSuffix(' % maximum occupancy') + layout.addWidget(remove_background_spinbox, 0, 3, 1, 1) + self.transpose_checkbox = QtGui.QCheckBox('Transpose columns and rows (of ref. plane)') + layout.addWidget(self.transpose_checkbox, 1, 3, 1, 1) + self.convert_checkbox = QtGui.QCheckBox('Axes in ' + u'\u03BC' + 'm') + layout.addWidget(self.convert_checkbox, 1, 2, 1, 1) + self.rate_label = QtGui.QLabel("Readout Rate: Hz") + layout.addWidget(self.rate_label, 0, 4, 1, 1) + dock_status.addWidget(cw) + reset_button.clicked.connect(lambda: self.send_command('RESET')) + self.transpose_checkbox.stateChanged.connect(lambda value: self.send_command('TRANSPOSE %d' % value)) + remove_background_checkbox.stateChanged.connect(lambda value: self.send_command('BACKGROUND %d' % value)) + remove_background_spinbox.valueChanged.connect(lambda value: self.send_command('PERCENTAGE %f' % value)) + # Add plot docks for column correlation + occupancy_graphics1 = pg.GraphicsLayoutWidget() + occupancy_graphics1.show() + view1 = occupancy_graphics1.addViewBox() + occupancy_img_col = pg.ImageItem(border='w') + poss = np.array([0.0, 0.01, 0.5, 1.0]) + color = np.array([[1.0, 1.0, 1.0, 1.0], [0.267004, 0.004874, 0.329415, 1.0], [0.127568, 0.566949, 0.550556, 1.0], [0.993248, 0.906157, 0.143936, 1.0]]) # Zero is white + mapp = pg.ColorMap(poss, color) + lutt = mapp.getLookupTable(0.0, 1.0, 100) + occupancy_img_col.setLookupTable(lutt, update=True) + self.plot1 = pg.PlotWidget(viewBox=view1) + self.plot1.getAxis('bottom').setLabel(text='Columns') + self.plot1.getAxis('left').setLabel(text='Columns') + self.plot1.addItem(occupancy_img_col) + dock_corr_column.addWidget(self.plot1) + self.occupancy_images_columns = occupancy_img_col + # Add plot docks for row correlation + occupancy_graphics2 = pg.GraphicsLayoutWidget() + occupancy_graphics2.show() + view2 = occupancy_graphics2.addViewBox() + occupancy_img_rows = pg.ImageItem(border='w') + occupancy_img_rows.setLookupTable(lutt, update=True) + self.plot2 = pg.PlotWidget(viewBox=view2) + self.plot2.getAxis('bottom').setLabel(text='Rows') + self.plot2.getAxis('left').setLabel(text='Rows') + self.plot2.addItem(occupancy_img_rows) + dock_corr_row.addWidget(self.plot2) + self.occupancy_images_rows = occupancy_img_rows + dock_area.addDock(dock_status, 'top') + dock_area.addDock(dock_select_duts, 'left') + dock_area.addDock(dock_corr_column, 'bottom') + dock_area.addDock(dock_corr_row, 'right', dock_corr_column) + + def scale_and_label_axes(scale_state, dut1, dut2, transpose_state): + ''' Rescale axis and change labels (according to tranpose and scale option). + ''' + dut1_name = self.config['correlation_planes'][dut1]['name'] + dut2_name = self.config['correlation_planes'][dut2]['name'] + if scale_state == 0: # Column/Row scaling + self.plot1.getAxis('bottom').setScale(1.0) + self.plot1.getAxis('left').setScale(1.0) + self.plot2.getAxis('bottom').setScale(1.0) + self.plot2.getAxis('left').setScale(1.0) + self.plot1.getAxis('bottom').setTickSpacing() + self.plot1.getAxis('left').setTickSpacing() + self.plot2.getAxis('bottom').setTickSpacing() + self.plot2.getAxis('left').setTickSpacing() + if transpose_state == 0: # False + self.plot1.getAxis('bottom').setLabel(text=dut1_name + ' Columns') + self.plot2.getAxis('bottom').setLabel(text=dut1_name + ' Rows') + elif transpose_state == 2: # True + self.plot1.getAxis('bottom').setLabel(text=dut1_name + ' Rows') + self.plot2.getAxis('bottom').setLabel(text=dut1_name + ' Columns') + + self.plot1.getAxis('left').setLabel(text=dut2_name + ' Columns') + self.plot2.getAxis('left').setLabel(text=dut2_name + ' Rows') + + elif scale_state == 2: # um scaling + col_size_dut_1 = self.correlator_config[self.config['correlation_planes'][dut1]['dut_type']]['column_size'] + row_size_dut_1 = self.correlator_config[self.config['correlation_planes'][dut1]['dut_type']]['row_size'] + col_size_dut_2 = self.correlator_config[self.config['correlation_planes'][dut1]['dut_type']]['column_size'] + row_size_dut_2 = self.correlator_config[self.config['correlation_planes'][dut1]['dut_type']]['row_size'] + if transpose_state == 0: # False + self.plot1.getAxis('bottom').setScale(row_size_dut_1) + self.plot2.getAxis('bottom').setScale(col_size_dut_1) + self.plot1.getAxis('left').setScale(col_size_dut_2) + self.plot2.getAxis('left').setScale(row_size_dut_2) + self.plot1.getAxis('bottom').setLabel(text=dut1_name + ' Columns / ' + u'\u03BC' + 'm') + self.plot2.getAxis('bottom').setLabel(text=dut1_name + ' Rows / ' + u'\u03BC' + 'm') + self.plot1.getAxis('left').setLabel(text=dut2_name + ' Columns / ' + u'\u03BC' + 'm') + self.plot2.getAxis('left').setLabel(text=dut2_name + ' Rows / ' + u'\u03BC' + 'm') + elif transpose_state == 2: # True + self.plot1.getAxis('bottom').setScale(col_size_dut_1) + self.plot2.getAxis('bottom').setScale(row_size_dut_1) + self.plot1.getAxis('left').setScale(col_size_dut_2) + self.plot2.getAxis('left').setScale(row_size_dut_2) + self.plot1.getAxis('bottom').setLabel(text=dut1_name + ' Rows / ' + u'\u03BC' + 'm') + self.plot2.getAxis('bottom').setLabel(text=dut1_name + ' Columns / ' + u'\u03BC' + 'm') + self.plot1.getAxis('left').setLabel(text=dut2_name + ' Columns / ' + u'\u03BC' + 'm') + self.plot2.getAxis('left').setLabel(text=dut2_name + ' Rows / ' + u'\u03BC' + 'm') + + self.convert_checkbox.stateChanged.connect(lambda value: scale_and_label_axes(value, self.combobox1.currentIndex(), self.combobox2.currentIndex(), self.transpose_checkbox.checkState())) + self.combobox1.activated.connect(lambda value: scale_and_label_axes(self.convert_checkbox.checkState(), value, self.combobox2.currentIndex(), self.transpose_checkbox.checkState())) + self.combobox2.activated.connect(lambda value: scale_and_label_axes(self.convert_checkbox.checkState(), self.combobox1.currentIndex(), value, self.transpose_checkbox.checkState())) + self.transpose_checkbox.stateChanged.connect(lambda value: scale_and_label_axes(self.convert_checkbox.checkState(), self.combobox1.currentIndex(), self.combobox2.currentIndex(), value)) + + def deserialize_data(self, data): + return jsonapi.loads(data, object_hook=utils.json_numpy_obj_hook) + + def handle_data(self, data): + if 'meta_data' not in data: + for key in data: + if 'column' == key: + self.occupancy_images_columns.setImage(data[key][:, :], autoDownsample=True) + self.plot1.setTitle('Column Correlation, Sum: %i' % data[key][:, :].sum()) + if 'row' == key: + self.occupancy_images_rows.setImage(data[key][:, :], autoDownsample=True) + self.plot2.setTitle('Row Correlation, Sum: %i' % data[key][:, :].sum()) + else: + self.rate_label.setText('Readout Rate: %d Hz' % data['meta_data']['fps']) diff --git a/pymosa/online_monitor/pymosa_converter.py b/pymosa/online_monitor/pymosa_converter.py new file mode 100644 index 0000000..b6fc9d2 --- /dev/null +++ b/pymosa/online_monitor/pymosa_converter.py @@ -0,0 +1,56 @@ +import numpy as np +from zmq.utils import jsonapi + +from online_monitor.converter.transceiver import Transceiver +from online_monitor.utils import utils +from pyBAR_mimosa26_interpreter import raw_data_interpreter + + +class PymosaMimosa26(Transceiver): + + def setup_interpretation(self): + analyze_m26_header_ids = self.config.get('analyze_m26_header_ids', [1, 2, 3, 4, 5, 6]) + self._raw_data_interpreter = raw_data_interpreter.RawDataInterpreter(analyze_m26_header_ids=analyze_m26_header_ids) + self.n_hits = 0 + self.n_events = 0 + + def deserialize_data(self, data): # According to pyBAR data serilization + try: + self.meta_data = jsonapi.loads(data) + except ValueError: + try: + dtype = self.meta_data.pop('dtype') + shape = self.meta_data.pop('shape') + if self.meta_data: + try: + raw_data_array = np.frombuffer(data, dtype=dtype).reshape(shape) + return raw_data_array + except (KeyError, ValueError): # KeyError happens if meta data read is omitted; ValueError if np.frombuffer fails due to wrong shape + return None + except AttributeError: # Happens if first data is not meta data + return None + return {'meta_data': self.meta_data} + + def interpret_data(self, data): + if isinstance(data[0][1], dict): # Meta data is omitted, only raw data is interpreted + # Add info to meta data + data[0][1]['meta_data'].update({'n_hits': self.n_hits, 'n_events': self.n_events}) + return [data[0][1]] + hits = self._raw_data_interpreter.interpret_raw_data(raw_data=data[0][1]) + + interpreted_data = { + 'hits': hits + } + + self.n_hits = hits.shape[0] + self.n_events = np.unique(hits['event_number']).shape[0] + + return [interpreted_data] + + def serialize_data(self, data): + if 'hits' in data: + hits_data = data['hits'] + data['hits'] = None + return utils.simple_enc(hits_data, data) + else: + return utils.simple_enc(None, data) diff --git a/pymosa/online_monitor/pymosa_histogrammer.py b/pymosa/online_monitor/pymosa_histogrammer.py new file mode 100644 index 0000000..93aae7c --- /dev/null +++ b/pymosa/online_monitor/pymosa_histogrammer.py @@ -0,0 +1,134 @@ +''' Histograms the Mimosa26 hit table''' + +import numpy as np +from numba import njit + +# Online monitor imports +from online_monitor.converter.transceiver import Transceiver +from online_monitor.utils import utils + + +@njit +def create_occupancy_hist(hist, hits): + for hit_index in range(hits.shape[0]): + col = hits[hit_index]['column'] + row = hits[hit_index]['row'] + plane_id = hits[hit_index]['plane'] + hist[plane_id - 1, col, row] += 1 + + +@njit +def create_event_status_hist(hist, hits): + for hit_index in range(hits.shape[0]): + event_status = hits[hit_index]['event_status'] + plane = hits[hit_index]['plane'] + for i in range(32): + if event_status & (1 << i): + hist[plane - 1][i] += 1 + + +class PymosaMimosa26Histogrammer(Transceiver): + + def setup_transceiver(self): + self.set_bidirectional_communication() # We want to be able to change the histogrammmer settings + + def setup_interpretation(self): + self.occupancy_arrays = np.zeros(shape=(6, 1152, 576), dtype=np.int32) + self.event_status_hist = np.zeros(shape=(6, 32), dtype=np.int32) + # Variables + self.n_readouts = 0 + self.readout = 0 + self.ts_last_readout = 0 # timestamp of last readout + self.fps = 0 # data frames per second + self.hps = 0 # hits per second + self.eps = 0 # events per second + self.plot_delay = 0 + self.total_hits = 0 + self.total_events = 0 + self.updateTime = 0 + self.mask_noisy_pixel = False + + def deserialize_data(self, data): + # return jsonapi.loads(data, object_hook=utils.json_numpy_obj_hook) + datar, meta = utils.simple_dec(data) + if 'hits' in meta: + meta['hits'] = datar + return meta + + def interpret_data(self, data): + if 'meta_data' in data[0][1]: + meta_data = data[0][1]['meta_data'] + total_hits_now = meta_data['n_hits'] + self.hits_last_readout = total_hits_now + total_events_now = meta_data['n_events'] + self.events_last_readout = total_events_now + ts_now = float(meta_data['timestamp_stop']) + # Calculate readout per second with smoothing + recent_fps = 1.0 / (ts_now - self.ts_last_readout) + self.fps = self.fps * 0.95 + recent_fps * 0.05 + # Calulate hits per second with smoothing + recent_hps = self.hits_last_readout * recent_fps + self.hps = self.hps * 0.95 + recent_hps * 0.05 + # Calulate hits per second with smoothing + recent_eps = self.events_last_readout * recent_fps + self.eps = self.eps * 0.95 + recent_eps * 0.05 + + self.ts_last_readout = ts_now + self.total_hits += total_hits_now + self.total_events += total_events_now + + meta_data.update({'fps': self.fps, 'hps': self.hps, 'total_hits': self.total_hits, 'eps': self.eps, 'total_events': self.total_events}) + return [data[0][1]] + + self.readout += 1 + + if self.n_readouts != 0: + if self.readout % self.n_readouts == 0: + self.occupancy_arrays = np.zeros(shape=(6, 1152, 576), dtype=np.int32) # Reset occ hists + self.event_status_hist = np.zeros(shape=(6, 32), dtype=np.int32) # Reset event status hists + self.readouts = 0 + hits = data[0][1]['hits'] + + if hits.shape[0] == 0: # Empty array + return + + # Create histograms + create_occupancy_hist(self.occupancy_arrays, hits) + create_event_status_hist(self.event_status_hist, hits) + + # Mask Noisy pixels + if self.mask_noisy_pixel: + for plane in range(6): + self.occupancy_arrays[plane, self.occupancy_arrays[plane, :, :] > self.config['noisy_threshold']] = 0 + + histogrammed_data = { + 'occupancies': self.occupancy_arrays, + 'event_status': self.event_status_hist + } + + return [histogrammed_data] + + def serialize_data(self, data): + # return jsonapi.dumps(data, cls=utils.NumpyEncoder) + if 'occupancies' in data: + hits_data = data['occupancies'] + data['occupancies'] = None + return utils.simple_enc(hits_data, data) + else: + return utils.simple_enc(None, data) + + def handle_command(self, command): + if command[0] == 'RESET': + self.occupancy_arrays = np.zeros(shape=(6, 1152, 576), dtype=np.int32) # Reset occ hists + self.event_status_hist = np.zeros(shape=(6, 32), dtype=np.int32) # Reset event status hists + self.total_hits = 0 + self.total_events = 0 + elif 'MASK' in command[0]: + if '0' in command[0]: + self.mask_noisy_pixel = False + else: + self.mask_noisy_pixel = True + else: + self.n_readouts = int(command[0]) + self.occupancy_arrays = np.zeros(shape=(6, 1152, 576), dtype=np.int32) # Reset occ hists + self.event_status_hist = np.zeros(shape=(6, 32), dtype=np.int32) # Reset event status hists diff --git a/pymosa/online_monitor/pymosa_producer_sim.py b/pymosa/online_monitor/pymosa_producer_sim.py new file mode 100644 index 0000000..aa54b02 --- /dev/null +++ b/pymosa/online_monitor/pymosa_producer_sim.py @@ -0,0 +1,91 @@ +''' This is a producer faking data coming from Pymosa by taking real data and sending these in chunks''' + +import logging +import time + +import numpy as np +import tables as tb +import zmq + +from online_monitor.utils.producer_sim import ProducerSim + + +class Pymosa(ProducerSim): + + def setup_producer_device(self): + ProducerSim.setup_producer_device(self) + self.in_file_h5 = tb.open_file(self.config['data_file'], mode="r") + self.meta_data = self.in_file_h5.root.meta_data[:] + self.raw_data = self.in_file_h5.root.raw_data + self.n_readouts = self.meta_data.shape[0] + self.total_data = 0 # amount of replayed data in MB + self.time_start = time.time() # calculate duration of replay + self.time_end = 0 # calculate duration of replay + + try: + self.scan_parameter_name = self.in_file_h5.root.scan_parameters.dtype.names + self.scan_parameters = self.in_file_h5.root.scan_parameters[:] + except tb.NoSuchNodeError: + self.scan_parameter_name = 'No parameter' + self.scan_parameters = None + + self.readout_word_indeces = np.column_stack((self.meta_data['index_start'], self.meta_data['index_stop'])) + self.actual_readout = 0 + self.last_readout_time = None + + def get_data(self): # Return the data of one readout + if self.actual_readout < self.n_readouts: + index_start, index_stop = self.readout_word_indeces[self.actual_readout] + data = [] + data.append(self.raw_data[index_start:index_stop]) + data.extend((float(self.meta_data[self.actual_readout]['timestamp_start']), + float(self.meta_data[self.actual_readout]['timestamp_stop']), + int(self.meta_data[self.actual_readout]['error']))) + + # FIXME: Simple syncronization to replay with similar timing, does not really work + now = time.time() + if self.last_readout_time is not None: + delay = now - self.last_readout_time + additional_delay = self.meta_data[self.actual_readout]['timestamp_stop'] - self.meta_data[self.actual_readout]['timestamp_start'] - delay + if additional_delay > 0: + time.sleep(additional_delay) + self.last_readout_time = now + + if self.scan_parameters is not None: + return data, {str(self.scan_parameter_name): int(self.scan_parameters[self.actual_readout][0])} + else: + return data, {'No parameter': 0} + + def send_data(self): + '''Sends the data of every read out (raw data and meta data) via ZeroMQ to a specified socket + ''' + time.sleep(float(self.config['delay'])) # Delay is given in seconds + + try: + data, scan_parameters = self.get_data() # Get data of actual readout + except TypeError: # Data is fully replayes + self.time_end = time.time() + logging.warning('%s producer: No data to replay anymore! Data send in %.2f s is %.2f MB' % (self.name, self.time_end - self.time_start, self.total_data / (1024.0 ** 2))) # show amount of sent data after replay ended + time.sleep(10) + return + + self.actual_readout += 1 + + data_meta_data = dict( + name='ReadoutData', + dtype=str(data[0].dtype), + shape=data[0].shape, + timestamp_start=data[1], # float + timestamp_stop=data[2], # float + readout_error=data[3], # int + scan_parameters=scan_parameters # dict + ) + try: + self.total_data += data[0].nbytes # sum up sent data packages + self.sender.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK) + self.sender.send(data[0], flags=zmq.NOBLOCK) # PyZMQ supports sending numpy arrays without copying any data + except zmq.Again: + pass + + def __del__(self): + self.in_file_h5.close() diff --git a/pymosa/online_monitor/pymosa_receiver.py b/pymosa/online_monitor/pymosa_receiver.py new file mode 100644 index 0000000..00996fd --- /dev/null +++ b/pymosa/online_monitor/pymosa_receiver.py @@ -0,0 +1,169 @@ +import time + +import numpy as np +from PyQt5 import Qt +import pyqtgraph as pg +from pyqtgraph.Qt import QtGui +import pyqtgraph.ptime as ptime +from pyqtgraph.dockarea import DockArea, Dock + +from online_monitor.receiver.receiver import Receiver +from online_monitor.utils import utils + + +class PymosaMimosa26(Receiver): + + def setup_receiver(self): + self.set_bidirectional_communication() # We want to change converter settings + + def setup_widgets(self, parent, name): + dock_area = DockArea() + parent.addTab(dock_area, name) + # Occupancy Docks + self.occupancy_images = [] + self.event_status_plots = [] + # Plots with axis stored in here + self.plots = [] + self.event_status_widgets = [] + poss = np.array([0.0, 0.01, 0.5, 1.0]) + color = np.array([[1.0, 1.0, 1.0, 1.0], [0.267004, 0.004874, 0.329415, 1.0], [0.127568, 0.566949, 0.550556, 1.0], [0.993248, 0.906157, 0.143936, 1.0]]) # Zero is white + mapp = pg.ColorMap(poss, color) + lutt = mapp.getLookupTable(0.0, 1.0, 100) + + self.occ_hist_sum = np.zeros(shape=(6,)) + for plane in range(3): # Loop over 3 * 2 plot widgets + # Dock left + dock_occcupancy = Dock("Occupancy plane %d" % (2 * plane + 1), size=(100, 150)) + dock_event_status = Dock("Event status plane %d" % (2 * plane + 1), size=(100, 50)) + if plane > 0: + dock_area.addDock(dock_occcupancy, 'bottom') + else: + dock_area.addDock(dock_occcupancy, 'left') + dock_area.addDock(dock_event_status, 'right', dock_occcupancy) + occupancy_graphics = pg.GraphicsLayoutWidget() # Plot docks + occupancy_graphics.show() + view = occupancy_graphics.addViewBox() + self.occupancy_images.append(pg.ImageItem(border='w')) + view.addItem(self.occupancy_images[2 * plane]) + self.occupancy_images[2 * plane].setLookupTable(lutt, update=True) + self.plots.append(pg.PlotWidget(viewBox=view, labels={'bottom': 'Column', 'left': 'Row'}, title='Occupancy Map, Sum: %i' % self.occ_hist_sum[2 * plane])) + self.plots[2 * plane].addItem(self.occupancy_images[2 * plane]) + dock_occcupancy.addWidget(self.plots[2 * plane]) + +# event_status_widget = pg.PlotWidget() +# self.event_status_plots.append(event_status_widget.plot(np.linspace(-0.5, 15.5, 17), np.zeros((16)), stepMode=True)) +# event_status_widget.showGrid(y=True) +# dock_event_status.addWidget(event_status_widget) + + self.event_status_widgets.append(pg.PlotWidget()) + self.event_status_plots.append(self.event_status_widgets[2 * plane].plot(np.linspace(-0.5, 15.5, 17), np.zeros((16)), stepMode=True)) + self.event_status_widgets[2 * plane].showGrid(y=True) + dock_event_status.addWidget(self.event_status_widgets[2 * plane]) + + # Dock right + dock_occcupancy_2 = Dock("Occupancy plane %d" % (2 * plane + 2), size=(100, 150)) + dock_event_status_2 = Dock("Event status plane %d" % (2 * plane + 2), size=(100, 50)) + dock_area.addDock(dock_occcupancy_2, 'right', dock_event_status) + dock_area.addDock(dock_event_status_2, 'right', dock_occcupancy_2) + occupancy_graphics = pg.GraphicsLayoutWidget() # Plot docks + occupancy_graphics.show() + view = occupancy_graphics.addViewBox() + self.occupancy_images.append(pg.ImageItem(border='w')) + view.addItem(self.occupancy_images[2 * plane + 1]) + self.occupancy_images[2 * plane + 1].setLookupTable(lutt, update=True) + self.plots.append(pg.PlotWidget(viewBox=view, labels={'bottom': 'Column', 'left': 'Row'}, title='Occupancy Map, Sum: %i' % self.occ_hist_sum[2 * plane + 1])) + self.plots[2 * plane + 1].addItem(self.occupancy_images[2 * plane + 1]) + dock_occcupancy_2.addWidget(self.plots[2 * plane + 1]) + + self.event_status_widgets.append(pg.PlotWidget()) + self.event_status_plots.append(self.event_status_widgets[2 * plane + 1].plot(np.linspace(-0.5, 15.5, 17), np.zeros((16)), stepMode=True)) + self.event_status_widgets[2 * plane + 1].showGrid(y=True) + # self.event_status_widgets[2 * plane + 1].setLogMode(y=True) + dock_event_status_2.addWidget(self.event_status_widgets[2 * plane + 1]) + + dock_status = Dock("Status", size=(800, 40)) + dock_area.addDock(dock_status, 'top') + + # Status dock on top + cw = QtGui.QWidget() + cw.setStyleSheet("QWidget {background-color:white}") + layout = QtGui.QGridLayout() + cw.setLayout(layout) + self.rate_label = QtGui.QLabel("Readout Rate\n0 Hz") + self.hit_rate_label = QtGui.QLabel("Hit Rate\n0 Hz") + self.event_rate_label = QtGui.QLabel("Event Rate\n0 Hz") + self.timestamp_label = QtGui.QLabel("Data Timestamp\n") + self.plot_delay_label = QtGui.QLabel("Plot Delay\n") + self.scan_parameter_label = QtGui.QLabel("Scan Parameters\n") + self.spin_box = Qt.QSpinBox(value=0) + self.spin_box.setMaximum(1000000) + self.spin_box.setSuffix(" Readouts") + self.reset_button = QtGui.QPushButton('Reset') + self.noisy_checkbox = QtGui.QCheckBox('Mask noisy pixels') + self.convert_checkbox = QtGui.QCheckBox('Axes in ' + u'\u03BC' + 'm') + layout.addWidget(self.timestamp_label, 0, 0, 0, 1) + layout.addWidget(self.plot_delay_label, 0, 1, 0, 1) + layout.addWidget(self.rate_label, 0, 2, 0, 1) + layout.addWidget(self.hit_rate_label, 0, 3, 0, 1) + layout.addWidget(self.event_rate_label, 0, 4, 0, 1) + layout.addWidget(self.scan_parameter_label, 0, 5, 0, 1) + layout.addWidget(self.spin_box, 0, 6, 0, 1) + layout.addWidget(self.noisy_checkbox, 0, 7, 0, 1) + layout.addWidget(self.convert_checkbox, 0, 8, 0, 1) + layout.addWidget(self.reset_button, 0, 9, 0, 1) + dock_status.addWidget(cw) + + # Connect widgets + self.reset_button.clicked.connect(lambda: self.send_command('RESET')) + self.spin_box.valueChanged.connect(lambda value: self.send_command(str(value))) + self.noisy_checkbox.stateChanged.connect(lambda value: self.send_command('MASK %d' % value)) + + # Change axis scaling + def scale_axes(scale_state): + for plot in self.plots: + if scale_state == 0: + plot.getAxis('bottom').setScale(1.0) + plot.getAxis('left').setScale(1.0) + plot.getAxis('bottom').setLabel('Columns') + plot.getAxis('left').setLabel('Rows') + elif scale_state == 2: + plot.getAxis('bottom').setScale(18.4) + plot.getAxis('left').setScale(18.4) + plot.getAxis('bottom').setLabel('Columns / ' + u'\u03BC' + 'm') + plot.getAxis('left').setLabel('Rows / ' + u'\u03BC' + 'm') + + self.convert_checkbox.stateChanged.connect(lambda value: scale_axes(value)) + self.plot_delay = 0 + + def deserialize_data(self, data): + + datar, meta = utils.simple_dec(data) + if 'occupancies' in meta: + meta['occupancies'] = datar + return meta + + def handle_data(self, data): + def update_rate(fps, hps, recent_total_hits, eps, recent_total_events): + self.rate_label.setText("Readout Rate\n%d Hz" % fps) + if self.spin_box.value() == 0: # show number of hits, all hits are integrated + self.hit_rate_label.setText("Total Hits\n%d" % int(recent_total_hits)) + else: + self.hit_rate_label.setText("Hit Rate\n%d Hz" % int(hps)) + if self.spin_box.value() == 0: # show number of events + self.event_rate_label.setText("Total Events\n%d" % int(recent_total_events)) + else: + self.event_rate_label.setText("Event Rate\n%d Hz" % int(eps)) + + if 'meta_data' not in data: + for plane, plot in enumerate(self.plots): + self.occupancy_images[plane].setImage(data['occupancies'][plane], autoDownsample=True) + self.occ_hist_sum[plane] = data['occupancies'][plane].sum() + self.event_status_plots[plane].setData(x=np.linspace(-0.5, 31.5, 33), y=data['event_status'][plane], stepMode=True, fillLevel=0, brush=(0, 0, 255, 150)) + plot.setTitle('Occupancy Map, Sum: %i' % self.occ_hist_sum[plane]) + else: + update_rate(data['meta_data']['fps'], data['meta_data']['hps'], data['meta_data']['total_hits'], data['meta_data']['eps'], data['meta_data']['total_events']) + self.timestamp_label.setText("Data Timestamp\n%s" % time.asctime(time.localtime(data['meta_data']['timestamp_stop']))) + self.scan_parameter_label.setText("Scan Parameters\n%s" % ', '.join('%s: %s' % (str(key), str(val)) for key, val in data['meta_data']['scan_parameters'].items())) + now = ptime.time() + self.plot_delay = self.plot_delay * 0.9 + (now - data['meta_data']['timestamp_stop']) * 0.1 + self.plot_delay_label.setText("Plot Delay\n%s" % 'not realtime' if abs(self.plot_delay) > 5 else "%1.2f ms" % (self.plot_delay * 1.e3)) diff --git a/pymosa/online_monitor/pymosa_sender.py b/pymosa/online_monitor/pymosa_sender.py new file mode 100644 index 0000000..c48c8a9 --- /dev/null +++ b/pymosa/online_monitor/pymosa_sender.py @@ -0,0 +1,57 @@ +import logging + +import zmq + + +def init(socket_address="tcp://127.0.0.1:5500"): + logging.info('Creating ZMQ context') + context = zmq.Context() + logging.info('Creating socket connection to server %s', socket_address) + socket = context.socket(zmq.PUB) # publisher socket + socket.bind(socket_address) + # send reset to indicate a new scan + send_meta_data(socket, None, name='Reset') + return socket + + +def send_meta_data(socket, conf, name): + '''Sends the config via ZeroMQ to a specified socket. Is called at the beginning of a run and when the config changes. Conf can be any config dictionary. + ''' + meta_data = dict( + name=name, + conf=conf + ) + try: + socket.send_json(meta_data, flags=zmq.NOBLOCK) + except zmq.Again: + pass + + +def send_data(socket, data, len_raw_data, scan_parameters={}, name='ReadoutData'): + '''Sends the data of every read out (raw data and meta data) via ZeroMQ to a specified socket + ''' + if not scan_parameters: + scan_parameters = {} + data_meta_data = dict( + name=name, + dtype=str(data[0].dtype), + shape=data[0].shape, + data_length=len_raw_data, # data length + timestamp_start=data[1], # float + timestamp_stop=data[2], # float + readout_error=data[3], # int + skipped_triggers=data[4], # skipped trigger counter + scan_parameters=scan_parameters # dict + ) + try: + socket.send_json(data_meta_data, flags=zmq.SNDMORE | zmq.NOBLOCK) + # PyZMQ supports sending numpy arrays without copying any data + socket.send(data[0], flags=zmq.NOBLOCK) + except zmq.Again: + pass + + +def close(socket): + if socket is not None: + logging.info('Closing socket connection') + socket.close() # close here, do not wait for garbage collector diff --git a/pymosa/online_monitor/start_pymosa_online_monitor.py b/pymosa/online_monitor/start_pymosa_online_monitor.py new file mode 100644 index 0000000..e4959dd --- /dev/null +++ b/pymosa/online_monitor/start_pymosa_online_monitor.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +''' Entry point to simplify the usage from command line for +the online_monitor with pymosa plugins. Not really needed +start_online_monitor config.yaml would also work... +''' +import logging +import sys +import os +import subprocess + +import psutil +from PyQt5 import Qt + +from online_monitor.OnlineMonitor import OnlineMonitorApplication +from online_monitor.utils import utils + + +def kill(proc): + ''' Kill process by id, including subprocesses. + + Works for Linux and Windows + ''' + process = psutil.Process(proc.pid) + for child_proc in process.children(recursive=True): + child_proc.kill() + process.kill() + + +def run_script_in_shell(script, arguments, command=None): + if os.name == 'nt': + creationflags = subprocess.CREATE_NEW_PROCESS_GROUP + else: + creationflags = 0 + return subprocess.Popen("%s %s %s" % ('python' if not command else command, + script, arguments), shell=True, + creationflags=creationflags) + + +def main(): + if sys.argv[1:]: + args = utils.parse_arguments() + else: # no config yaml provided -> start online monitor with std. settings + class Dummy(object): + def __init__(self): + folder = os.path.dirname(os.path.realpath(__file__)) + self.config_file = os.path.join(folder, r'configuration.yaml') + self.log = 'INFO' + args = Dummy() + logging.info('No configuration file provided! Use std. settings!') + + utils.setup_logging(args.log) + + # Start the producer + producer_manager_process = run_script_in_shell('', + args.config_file, + 'start_producer_sim') + # Start the converter + converter_manager_process = run_script_in_shell('', + args.config_file, + 'start_converter') + +# Helper function to run code after OnlineMonitor Application exit + def appExec(): + app.exec_() + # Stop other processes + try: + kill(converter_manager_process) + kill(producer_manager_process) + # If the process was never started it cannot be killed + except psutil.NoSuchProcess: + pass + # Start the online monitor + app = Qt.QApplication(sys.argv) + win = OnlineMonitorApplication(args.config_file) + win.show() + sys.exit(appExec()) + + +if __name__ == '__main__': + main() diff --git a/pymosa/plotting.py b/pymosa/plotting.py new file mode 100644 index 0000000..a533edc --- /dev/null +++ b/pymosa/plotting.py @@ -0,0 +1,70 @@ +import numpy as np +from matplotlib import colors, cm +from matplotlib.figure import Figure +from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas +from mpl_toolkits.axes_grid1 import make_axes_locatable + + +def plot_occupancy(hist, title='Occupancy', z_label='# of hits', z_min=None, z_max=None, output_pdf=None): + if z_max == 'median': + z_max = 2 * np.ma.median(hist) + elif z_max == 'maximum': + z_max = np.ma.max(hist) + elif z_max is None: + z_max = np.percentile(hist, q=90) + if np.any(hist > z_max): + z_max = 1.1 * z_max + if z_max < 1 or hist.all() is np.ma.masked: + z_max = 1.0 + + if z_min is None: + z_min = np.ma.min(hist) + if z_min == z_max or hist.all() is np.ma.masked: + z_min = 0 + + fig = Figure() + FigureCanvas(fig) + ax = fig.add_subplot(111) + + ax.set_adjustable('box') + extent = [0.5, 1152.5, 576.5, 0.5] + bounds = np.linspace(start=z_min, stop=z_max + 1, num=255, endpoint=True) + cmap = cm.get_cmap('plasma') + cmap.set_bad('w') + cmap.set_over('r') # Make noisy pixels red + norm = colors.BoundaryNorm(bounds, cmap.N) + + im = ax.imshow(hist, interpolation='none', aspect='equal', cmap=cmap, norm=norm, extent=extent) # TODO: use pcolor or pcolormesh + ax.set_ylim((576.5, 0.5)) + ax.set_xlim((0.5, 1152.5)) + ax.set_title(title + r' ($\Sigma$ = {0})'.format((0 if hist.all() is np.ma.masked else np.ma.sum(hist)))) + ax.set_xlabel('Column') + ax.set_ylabel('Row') + + divider = make_axes_locatable(ax) + pad = 0.6 + + cax = divider.append_axes("bottom", size="5%", pad=pad) + cb = fig.colorbar(im, cax=cax, ticks=np.linspace(start=z_min, stop=z_max, num=10, endpoint=True), orientation='horizontal') + cax.set_xticklabels([int(round(float(x.get_text().replace('\u2212', '-').encode('utf8')))) for x in cax.xaxis.get_majorticklabels()]) + cb.set_label(z_label) + output_pdf.savefig(fig, bbox_inches='tight') + + +def plot_noise_tuning_result(fake_hit_rate, fake_hit_rate_spec=None, output_pdf=None): + cmap = cm.get_cmap('tab20c') + colors = [cmap.colors[0], cmap.colors[1], cmap.colors[2], cmap.colors[3]] + markers = ['o', 's', 'p', 'P', '^', '*'] + fig = Figure() + FigureCanvas(fig) + ax = fig.add_subplot(111) + for plane in range(6): + for region in range(4): + ax.plot(fake_hit_rate[:, plane, region], ls='--', marker=markers[plane], color=colors[region]) + if fake_hit_rate_spec is not None: + ax.axhline(fake_hit_rate_spec, color='grey', ls='--', lw=1.5) + ax.set_yscale('log') + ax.set_ylabel('Average fake hit rate / pixel / 115.2 us') + ax.set_xlabel('Iteration') + ax.grid() + output_pdf.savefig(fig, bbox_inches='tight') diff --git a/pymosa/tests/test_monitor.py b/pymosa/tests/test_monitor.py new file mode 100644 index 0000000..deb49b0 --- /dev/null +++ b/pymosa/tests/test_monitor.py @@ -0,0 +1,206 @@ +''' Script to check the pymosa modules for the online monitor + + Simulation producer, interpreter converter and receiver. +''' + +import os +import sys +import unittest +import yaml +import time +import psutil +from PyQt5.QtWidgets import QApplication + +from online_monitor import OnlineMonitor + +import pymosa +from pymosa.online_monitor import start_pymosa_online_monitor + +pymosa_path = os.path.dirname(pymosa.__file__) +data_folder = os.path.abspath(os.path.join(pymosa_path, '..', 'data')) + + +# Create online monitor yaml config with pymosa monitor entities +def create_config_yaml(): + conf = {} + # Add producer + devices = {} + devices['PYMOSA_Producer'] = {'backend': 'tcp://127.0.0.1:8500', + 'kind': 'pymosa_producer_sim', + 'delay': 0.1, + 'data_file': os.path.join(data_folder, 'telescope_data.h5') + } + conf['producer_sim'] = devices + # Add converter + devices = {} + devices['PYMOSA_Interpreter'] = {'kind': 'pymosa_converter', + 'frontend': 'tcp://127.0.0.1:8500', + 'backend': 'tcp://127.0.0.1:8700' + } + devices['PYMOSA_Histogrammer'] = {'kind': 'pymosa_histogrammer', + 'frontend': 'tcp://127.0.0.1:8700', + 'backend': 'tcp://127.0.0.1:8800' + } + devices['HIT_Correlator'] = {'kind': 'hit_correlator_converter', + 'frontend': 'tcp://127.0.0.1:8700', + 'backend': 'tcp://127.0.0.1:8900', + 'correlation_planes': [{'name': 'Mimosa26 Plane 1', 'dut_type': 'M26', 'address': 'tcp://127.0.0.1:8700', 'id': 0}, + {'name': 'Mimosa26 Plane 2', 'dut_type': 'M26', 'address': 'tcp://127.0.0.1:8700', 'id': 1}] + } + + conf['converter'] = devices + # Add receiver + devices = {} + devices['PYMOSA_Receiver'] = {'kind': 'pymosa_receiver', + 'frontend': 'tcp://127.0.0.1:8800' + } + devices['HIT_Correlator'] = {'kind': 'hit_correlator_receiver', + 'frontend': 'tcp://127.0.0.1:8900', + 'correlation_planes': [{'name': 'Mimosa26 Plane 1', 'dut_type': 'M26'}, + {'name': 'Mimosa26 Plane 2', 'dut_type': 'M26'}] + } + conf['receiver'] = devices + return yaml.dump(conf, default_flow_style=False) + + +def get_python_processes(): # return the number of python processes + n_python = 0 + for proc in psutil.process_iter(): + try: + if 'python' in proc.name(): + n_python += 1 + except psutil.AccessDenied: + pass + return n_python + + +class TestOnlineMonitor(unittest.TestCase): + + @classmethod + def setUpClass(cls): + with open('tmp_cfg.yml', 'w') as outfile: + config_file = create_config_yaml() + outfile.write(config_file) + # Linux CIs run usually headless, thus virtual x server is needed for gui testing + if os.getenv('CI', False): + # raise unittest.SkipTest("CERN CI runner with Miniconda python docker has segfault in these tests.") + from xvfbwrapper import Xvfb + cls.vdisplay = Xvfb() + cls.vdisplay.start() + # Start the simulation producer to create some fake data + cls.prod_sim_proc = start_pymosa_online_monitor.run_script_in_shell('', 'tmp_cfg.yml', 'start_producer_sim') + # Start converter + cls.conv_manager_proc = start_pymosa_online_monitor.run_script_in_shell('', 'tmp_cfg.yml', command='start_converter') + # Create Gui + time.sleep(2) + cls.app = QApplication(sys.argv) + cls.online_monitor = OnlineMonitor.OnlineMonitorApplication('tmp_cfg.yml') + time.sleep(2) + + @classmethod + def tearDownClass(cls): # Remove created files + time.sleep(1) + start_pymosa_online_monitor.kill(cls.prod_sim_proc) + start_pymosa_online_monitor.kill(cls.conv_manager_proc) + time.sleep(1) + os.remove('tmp_cfg.yml') + cls.online_monitor.close() + time.sleep(1) + + def test_data_chain(self): + ''' Checks for received data for the 2 receivers + + This effectively checks the full chain: + producer --> converter --> receiver + ''' + + # Qt evsent loop does not run in tests, thus we have to trigger the + # event queue manually + self.app.processEvents() + # Check all receivers present + self.assertEqual(len(self.online_monitor.receivers), 2, 'Number of receivers wrong') + self.app.processEvents() # Clear event queue + + # Case 1: Activate status widget, no data should be received + self.online_monitor.tab_widget.setCurrentIndex(0) + self.app.processEvents() + time.sleep(5) + self.app.processEvents() + time.sleep(5) + # Data structure to check for no data since receiver widget + # is not active + data_recv_0 = [] + self.app.processEvents() + for receiver in self.online_monitor.receivers: + if receiver.name == 'PYMOSA_Receiver': + # Check histogram for each plane + for k in range(6): + data_recv_0.append(receiver.occupancy_images[k].getHistogram(bins=100, step=100)) + + # Case 2: Activate DUT widget, receiver 1 should show data + self.online_monitor.tab_widget.setCurrentIndex(2) # Yaml dumps dict in alphabetical order. + self.app.processEvents() + time.sleep(5) + self.app.processEvents() + time.sleep(5) + # Data structure to check for data since receiver widget + # is active + data_recv_1 = [] + for receiver in self.online_monitor.receivers: + if receiver.name == 'PYMOSA_Receiver': + # Check histogram for each plane + for k in range(6): + data_recv_1.append(receiver.occupancy_images[k].getHistogram(bins=100, step=100)) + + # Case 3: Activate correlator tab, receiver 2 should have no data since start button not pressed + self.online_monitor.tab_widget.setCurrentIndex(1) # Yaml dumps dict in alphabetical order. + self.app.processEvents() + time.sleep(5) + self.app.processEvents() + time.sleep(5) + data_recv_2 = [] + for receiver in self.online_monitor.receivers: + if receiver.name == 'HIT_Correlator': + data_recv_2.append(receiver.occupancy_images_rows.getHistogram(bins=100, step=100)) + + # Case 4: Activate correlator tab, receiver 2 should show data since start button is pressed + self.online_monitor.tab_widget.setCurrentIndex(1) # Yaml dumps dict in alphabetical order. + self.app.processEvents() + time.sleep(5) + self.app.processEvents() + time.sleep(5) + data_recv_3 = [] + for receiver in self.online_monitor.receivers: + if receiver.name == 'HIT_Correlator': + receiver.send_command('combobox1 0') # Select DUT + self.app.processEvents() + time.sleep(2) + self.app.processEvents() + time.sleep(2) + receiver.send_command('combobox2 1') # Select DUT + self.app.processEvents() + time.sleep(2) + self.app.processEvents() + time.sleep(2) + receiver.send_command('START 0') # send command in order to start correlation + self.app.processEvents() + time.sleep(2) + self.app.processEvents() + time.sleep(2) + data_recv_3.append(receiver.occupancy_images_rows.getHistogram(bins=100, step=100)) + + self.assertListEqual(data_recv_0, [(None, None), (None, None), (None, None), (None, None), (None, None), (None, None)]) + for k in range(6): + self.assertTrue(data_recv_1[k][0] is not None) + self.assertListEqual(data_recv_2, [(None, None)]) + self.assertTrue(data_recv_3[0][0] is not None) + + # Test the UI + def test_ui(self): + # 2 receiver + status widget expected + self.assertEqual(self.online_monitor.tab_widget.count(), 3, 'Number of tab widgets wrong') + + +if __name__ == '__main__': + suite = unittest.TestLoader().loadTestsFromTestCase(TestOnlineMonitor) + unittest.TextTestRunner(verbosity=2).run(suite) diff --git a/requirements.txt b/requirements.txt index bbf77fe..fd478d6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,12 @@ # Core functionality -basil_daq>=2.4.12,<3.0.0 +basil-daq>=3.0.0,<4.0.0 bitarray>=0.8.1 +matplotlib +numba numpy -tables +online_monitor>=0.4.2,<0.5 +# pyqt -> this line is intentionally commented out pyyaml +qtpy +tables tqdm diff --git a/setup.py b/setup.py index 0d9af6b..fbb79fd 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ #!/usr/bin/env python from setuptools import setup, find_packages # This setup relies on setuptools since distutils is insufficient and badly hacked code +import pkg_resources -version = '0.0.1' author = 'Ivan Caicedo, Yannick Dieter, Toko Hirono, Jens Janssen, David-Leon Pohl' author_email = 'caicedo@physik.uni-bonn.de, dieter@physik.uni-bonn.de, hirono@physik.uni-bonn.de, janssen@physik.uni-bonn.de, pohl@physik.uni-bonn.de' @@ -24,13 +24,29 @@ maintainer_email=author_email, install_requires=install_requires, packages=find_packages(), - setup_requires=['setuptools'], + setup_requires=['setuptools', 'online_monitor>=0.4.2<0.5'], include_package_data=True, # accept all data files and directories matched by MANIFEST.in or found in source control keywords=['silicon', 'detector', 'telescope', 'Mimosa26', 'EUDET'], platforms='any', entry_points={ 'console_scripts': [ - 'pymosa = pymosa.m26:main' + 'pymosa = pymosa.m26:main', + 'pymosa_monitor = pymosa.online_monitor.start_pymosa_online_monitor:main', ] }, ) + +# FIXME: bad practice to put code into setup.py +# Add the online_monitor Pymosa plugins +try: + import os + import pymosa + from online_monitor.utils import settings + # Get the absoulte path of this package + package_path = os.path.dirname(pymosa.__file__) + # Add online_monitor plugin folder to entity search paths + settings.add_producer_sim_path(os.path.join(package_path, 'online_monitor')) + settings.add_converter_path(os.path.join(package_path, 'online_monitor')) + settings.add_receiver_path(os.path.join(package_path, 'online_monitor')) +except (ImportError, pkg_resources.DistributionNotFound): + pass