diff --git a/gstlal-calibration/bin/gstlal_compute_strain b/gstlal-calibration/bin/gstlal_compute_strain index 3f5b0c763405a34c82aa412db633782cbe5158c2..04594dabf775cb4e83dad7b7b9b880f8c8136cfd 100755 --- a/gstlal-calibration/bin/gstlal_compute_strain +++ b/gstlal-calibration/bin/gstlal_compute_strain @@ -310,6 +310,8 @@ fs_var = float(TDCFConfigs["fsvar"]) srcQinv_min = float(TDCFConfigs["srcqinvmin"]) srcQinv_max = float(TDCFConfigs["srcqinvmax"]) +latency_test_filename_suffix = DebuggingConfigs["latencytestfilenamesuffix"] if "latencytestfilenamesuffix" in DebuggingConfigs else "" + # Set up smoothing, averaging and integration sample sizes for kappa calulations integration_samples = demodulation_filter_time * compute_factors_sr factors_average_samples = int(TDCFConfigs["tdcfaveragingtime"]) * compute_factors_sr @@ -369,7 +371,7 @@ verbose = Config.getboolean("DebuggingConfigurations", "verbose") file_check_sum = Config.getboolean("InputConfigurations", "filechecksum") skip_bad_files = Config.getboolean("InputConfigurations", "skipbadfiles") # Booleans for calibration tests -test_latency = Config.getboolean("DebuggingConfigurations", "testlatency") if "testlatency" in DebuggingConfigs else False +test_latency = True if len(latency_test_filename_suffix) else Config.getboolean("DebuggingConfigurations", "testlatency") if "testlatency" in DebuggingConfigs else False test_filters = Config.getboolean("DebuggingConfigurations", "testfilters") if "testfilters" in DebuggingConfigs else False # @@ -1008,7 +1010,7 @@ elif InputConfigs["datasource"] == "frames": # Data is to be read from frame fil src = pipeparts.mklalcachesrc(pipeline, location = options.frame_cache, cache_dsc_regex = instrument) if test_latency: - src = pipeparts.mkgeneric(pipeline, src, "splitcounter", filename = "gstlal_compute_strain_timestamps_in.txt") + src = pipeparts.mkgeneric(pipeline, src, "splitcounter", filename = "gstlal_compute_strain_timestamps_in%s.txt" % latency_test_filename_suffix) # # Hook up the relevant channels to the demuxer @@ -2914,7 +2916,7 @@ else: mux = pipeparts.mkprogressreport(pipeline, mux, "progress_sink_%s" % instrument) if test_latency: - mux = pipeparts.mkgeneric(pipeline, mux, "splitcounter", filename = "gstlal_compute_strain_timestamps_out.txt") + mux = pipeparts.mkgeneric(pipeline, mux, "splitcounter", filename = "gstlal_compute_strain_timestamps_out%s.txt" % latency_test_filename_suffix) if OutputConfigs["datasink"] == "lvshm": pipeparts.mkgeneric(pipeline, mux, "gds_lvshmsink", sync=False, async=False, shm_name = OutputConfigs["outputshmpartition"], num_buffers = int(OutputConfigs["numbuffers"]), blocksize = int(OutputConfigs["framesize"])*options.frame_duration*options.frames_per_file, buffer_mode = int(OutputConfigs["buffermode"])) diff --git a/gstlal-calibration/tests/check_calibration/Makefile b/gstlal-calibration/tests/check_calibration/Makefile index 2d3783a476912fc411de213bd834fc4cbf5ae370..425033f5e84322e7adcd8eb1faa3ff4db860a7c9 100644 --- a/gstlal-calibration/tests/check_calibration/Makefile +++ b/gstlal-calibration/tests/check_calibration/Makefile @@ -4,9 +4,9 @@ ################################# # which interferometer (H or L) -IFO = H +IFO = L # determines where to look for filters files (e.g., O1, O2, O3, ER10, ER13, ER14, PreER10, PreER13, PreER14) -OBSRUN = O2 +OBSRUN = ER14 START = $(shell echo 1186099200 - 2260 | bc) #1229094912 @@ -16,7 +16,7 @@ END = $(shell echo 1186177024 + 2260 | bc) #1229099008 #1225968448 #1185771520 -SHMRUNTIME = 400 +SHMRUNTIME = 36000 # How much time does the calibration need to settle at the start and end? PLOT_WARMUP_TIME = 2260 PLOT_COOLDOWN_TIME = 2260 @@ -29,8 +29,10 @@ DCSFCCCONFIGS = ../../config_files/O2/H1/tests/H1DCS_FreqIndepAndFccCorrections_ GDSTESTCONFIGS = ../../config_files/PreER13/H1/H1GDS_TEST_1225558818.ini DCSTESTCONFIGS = ../../config_files/O2/H1/tests/H1DCS_AllCorrections_Cleaning_TEST.ini GDSSHMCONFIGS = Filters/ER14/GDSFilters/H1GDS_1234630818_latency_test.ini +GDSOLDCONFIGS = Filters/ER14/GDSFilters/L1GDS_1235491416_old.ini +GDSBETTERCONFIGS = Filters/ER14/GDSFilters/L1GDS_1235491416_better.ini -all: noise_subtraction_ASD_DCS noise_subtraction_tf_DCS lines_ratio_DCS +all: latency_tests ############################################### ### These commands should change less often ### @@ -96,6 +98,14 @@ $(IFO)1_hoft_GDS_SHM_frames.cache: filters framesdir -GST_DEBUG=3 timeout $(SHMRUNTIME) gstlal_compute_strain --output-path Frames/$(OBSRUN)/$(IFO)1/GDS/ --frame-duration=1 --frames-per-file=1 --wings=0 --config-file $(GDSSHMCONFIGS) ls Frames/$(OBSRUN)/$(IFO)1/GDS/$(IFO)-$(IFO)1GDS_SHM*.gwf | lalapps_path2cache > $@ +$(IFO)1_hoft_GDS_OLD_frames.cache: filters + -GST_DEBUG=3 timeout $(SHMRUNTIME) gstlal_compute_strain --output-path Frames/$(OBSRUN)/$(IFO)1/GDS/ --frame-duration=1 --frames-per-file=1 --wings=0 --config-file $(GDSOLDCONFIGS) + ls Frames/$(OBSRUN)/$(IFO)1/GDS/$(IFO)-$(IFO)1GDS_OLD*.gwf | lalapps_path2cache > $@ + +$(IFO)1_hoft_GDS_BETTER_frames.cache: filters + -GST_DEBUG=3 timeout $(SHMRUNTIME) gstlal_compute_strain --output-path Frames/$(OBSRUN)/$(IFO)1/GDS/ --frame-duration=1 --frames-per-file=1 --wings=0 --config-file $(GDSBETTERCONFIGS) + ls Frames/$(OBSRUN)/$(IFO)1/GDS/$(IFO)-$(IFO)1GDS_BETTER*.gwf | lalapps_path2cache > $@ + GDS_pcal2darm_plots: $(IFO)1_easy_raw_frames.cache $(IFO)1_hoft_GDS_frames.cache python pcal2darm_timeseries.py --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --ifo $(IFO)1 --raw-frame-cache $(IFO)1_easy_raw_frames.cache --gstlal-frame-cache-list $(IFO)1_hoft_GDS_frames.cache --config-file '$(GDSCONFIGS)' --pcal-channel-name CAL-PCALY_TX_PD_OUT_DQ --gstlal-channel-list GDS-CALIB_STRAIN --labels GDS-CALIB_STRAIN @@ -117,6 +127,10 @@ filters_tf_DCS: $(IFO)1_hoft_DCS_frames.cache latency_test: $(IFO)1_hoft_GDS_SHM_frames.cache python latency_plot.py --intime-file gstlal_compute_strain_timestamps_in.txt --outtime-file gstlal_compute_strain_timestamps_out.txt --plot-filename-prefix $(IFO)1GDS_latency --plot-title '$(IFO)1 Calibration Latency vs Time' +latency_tests: + #$(IFO)1_hoft_GDS_OLD_frames.cache $(IFO)1_hoft_GDS_BETTER_frames.cache + python latency_plot.py --intime-file-list 'gstlal_compute_strain_timestamps_in_resample.txt,gstlal_compute_strain_timestamps_in_old.txt,gstlal_compute_strain_timestamps_in_better.txt' --outtime-file-list 'gstlal_compute_strain_timestamps_out_resample.txt,gstlal_compute_strain_timestamps_out_old.txt,gstlal_compute_strain_timestamps_out_better.txt' --plot-filename-prefix $(IFO)1GDS_latency --labels 'Including resampling latency,6s Tukey-windowed actuation filter,3.5s Kaiser-windowed actuation filter' + CALCS_GDS_ASD: $(IFO)1_hoft_GDS_frames.cache $(IFO)1_easy_raw_frames.cache ./ASD_comparison_plots --ifo $(IFO)1 --gps-start-time $(PLOT_START) --gps-end-time $(PLOT_END) --raw-frame-cache $(IFO)1_easy_raw_frames.cache --hoft-frame-cache $(IFO)1_hoft_GDS_frames.cache diff --git a/gstlal-calibration/tests/check_calibration/latency_plot.py b/gstlal-calibration/tests/check_calibration/latency_plot.py index f5de5d67cc0309ecc183c3967656b48df2ad8cbe..920fc3395fb7a21286e87547cb082a42b2939073 100644 --- a/gstlal-calibration/tests/check_calibration/latency_plot.py +++ b/gstlal-calibration/tests/check_calibration/latency_plot.py @@ -5,65 +5,76 @@ from math import pi import datetime import time import matplotlib +matplotlib.rcParams['font.family'] = 'Times New Roman' +matplotlib.rcParams['font.size'] = 16 +matplotlib.rcParams['legend.fontsize'] = 14 +matplotlib.rcParams['mathtext.default'] = 'regular' matplotlib.use('Agg') +import matplotlib.pyplot as plt +from matplotlib import rc +rc('text', usetex = True) import glob from optparse import OptionParser, Option import matplotlib.pyplot as plt parser = OptionParser() -parser.add_option("--intime-file", metavar = "file", type = str, help = "File that contains data timestamps and real time of input data") -parser.add_option("--outtime-file", metavar = "file", type = str, help = "File that contains data timestamps and real time of output data") -parser.add_option("--plot-title", metavar = "name", type = str, help = "Title of the plot") +parser.add_option("--intime-file-list", metavar = "list", type = str, help = "Comma-separated list of files that contain data timestamps and real times of input data") +parser.add_option("--outtime-file-list", metavar = "file", type = str, help = "Comma-separated list of files that contain data timestamps and real times of output data") +parser.add_option("--labels", metavar = "list", type = str, default = "", help = "Comma-separated list of plot legends for the data sets (default is no legend)") +parser.add_option("--plot-title", metavar = "name", type = str, default = "", help = "Title of the plot (default is no title)") parser.add_option("--plot-filename-prefix", metavar = "file", type = str, default = "", help = "Start of the name of the file containing the plot. GPS start time, duration of plot, and .pdf are added") options, filenames = parser.parse_args() -intimes = numpy.loadtxt(options.intime_file) -outtimes = numpy.loadtxt(options.outtime_file) +labels = options.labels.split(',') + +# Get the list of files with the timestamp data +intime_file_list = options.intime_file_list.split(',') +outtime_file_list = options.outtime_file_list.split(',') +if len(intime_file_list) != len(outtime_file_list): + raise ValueError("intime-file-list and outtime-file-list must be the same length") -in_dt = intimes[1][0] - intimes[0][0] -out_dt = outtimes[1][0] - outtimes[0][0] +# Organize the times into lists +intimes = [] +outtimes = [] +for i in range(0, len(intime_file_list)): + intimes.append(numpy.loadtxt(intime_file_list[i])) + outtimes.append(numpy.loadtxt(outtime_file_list[i])) +# Find the least common multiple sample period. This assumes that all input +# sample periods are the same and all output sample periods are the same. +in_dt = intimes[0][1][0] - intimes[0][0][0] +out_dt = outtimes[0][1][0] - outtimes[0][0][0] long_dt = max(in_dt, out_dt) short_dt = min(in_dt, out_dt) common_dt = long_dt while common_dt % short_dt: common_dt = common_dt + long_dt -in_step = int(common_dt / in_dt) -out_step = int(common_dt / out_dt) - -first_index_in = 0 -first_index_out = 0 -last_index_in = len(intimes) - 1 -last_index_out = len(outtimes) - 1 - -while intimes[first_index_in][0] % common_dt: - first_index_in = first_index_in + 1 -while outtimes[first_index_out][0] % common_dt: - first_index_out = first_index_out + 1 -while intimes[last_index_in][0] % common_dt: - last_index_in = last_index_in - 1 -while outtimes[last_index_out][0] % common_dt: - last_index_out = last_index_out - 1 - -while intimes[first_index_in][0] < outtimes[first_index_out][0]: - first_index_in = first_index_in + in_step -while outtimes[first_index_out][0] < intimes[first_index_in][0]: - first_index_out = first_index_out + out_step -while intimes[last_index_in][0] > outtimes[last_index_out][0]: - last_index_in = last_index_in - in_step -while outtimes[last_index_out][0] > intimes[last_index_in][0]: - last_index_out = last_index_out - out_step - -t_start = intimes[first_index_in][0] -dur = intimes[last_index_in][0] - t_start -gps_time = [] -latency = [] -for i in range(0, 1 + (last_index_in - first_index_in) / in_step): - gps_time.append(intimes[first_index_in + i * in_step][0] - t_start) - latency.append(outtimes[first_index_out + i * out_step][1] - intimes[first_index_in + i * in_step][1]) +# Number of sample of input and output corresponding to least common multiple sample period +in_step = round(common_dt / in_dt) +out_step = round(common_dt / out_dt) + +# Find a start time that is not before any of the data sets start and an end time that is not after any of the data sets end +t_start = intimes[0][0][0] +t_end = intimes[0][-1][0] +for i in range(0, len(intimes)): + t_start = t_start if t_start > intimes[i][0][0] else intimes[i][0][0] + t_start = t_start if t_start > outtimes[i][0][0] else outtimes[i][0][0] + t_end = t_end if t_end < intimes[i][-1][0] else intimes[i][-1][0] + t_end = t_end if t_end < outtimes[i][-1][0] else outtimes[i][-1][0] + +# Make sure the start and end times are multiples of the chosen sample period +if t_start % common_dt: + t_start = t_start + common_dt - t_start % common_dt +if t_end % common_dt: + t_end = t_end - t_end % common_dt +# Make a time vector +dur = t_end - t_start +gps_time = numpy.arange(0, dur + common_dt / 2, common_dt) + +# Decide what unit of time to use t_unit = 'seconds' if gps_time[len(gps_time) - 1] > 100: for i in range(0, len(gps_time)): @@ -78,13 +89,45 @@ if gps_time[len(gps_time) - 1] > 100: gps_time[i] = gps_time[i] / 24.0 t_unit = 'days' +# Collect latency data in a list +latency = [] +for i in range(0, len(intimes)): + latency.append([]) + intimes_start_index = round((t_start - intimes[i][0][0]) / in_dt) + outtimes_start_index = round((t_start - outtimes[i][0][0]) / out_dt) + for j in range(0, len(gps_time)): + latency[i].append(outtimes[i][outtimes_start_index + j * out_step][1] - intimes[i][intimes_start_index + j * in_step][1]) + +# Make the plot +colors = ['purple', 'b', 'c', 'g', 'y', 'r'] +markersize = 200.0 * numpy.sqrt(len(intimes[0])) +markersize = min(markersize, 10.0) +markersize = max(markersize, 0.2) plt.figure(figsize = (10, 5)) -plt.plot(gps_time, latency, 'r.') -plt.title(options.plot_title) +if len(labels[0]): + plt.plot(gps_time, latency[0], colors[0], linestyle = 'None', marker = '.', markersize = markersize, label = labels[0]) +else: + plt.plot(gps_time, latency[0], colors[0], linestyle = 'None', marker = '.', markersize = markersize) +if len(options.plot_title): + plt.title(options.plot_title) plt.ylabel('Latency [s]') plt.xlabel('Time in %s since %s UTC' % (t_unit, time.strftime("%b %d %Y %H:%M:%S", time.gmtime(t_start + 315964782)))) -plt.ylim(0, 8) -plt.grid(True) +plt.ylim(0, 15) +plt.grid(True, which = "both", linestyle = ':', linewidth = 0.3, color = 'black') +if len(labels[0]): + leg = plt.legend(fancybox = True, loc = 'upper right') + leg.get_frame().set_alpha(0.8) + +for i in range(1, len(intimes)): + if len(labels) > 1: + plt.plot(gps_time, latency[i], colors[i % len(colors)], linestyle = 'None', marker = '.', markersize = markersize, label = labels[i]) + else: + plt.plot(gps_time, latency[i], colors[i % len(colors)], linestyle = 'None', marker = '.', markersize = markersize) + if len(options.labels): + leg = plt.legend(fancybox = True, loc = 'upper right') + leg.get_frame().set_alpha(0.8) + +# Save the plot to a file plt.savefig('%s_%d-%d.png' % (options.plot_filename_prefix, int(t_start), int(dur))) plt.savefig('%s_%d-%d.pdf' % (options.plot_filename_prefix, int(t_start), int(dur)))