Skip to content

Updates to known pulsar search pipeline for O2 analysis

Matthew Pitkin requested to merge matthew-pitkin/lalsuite:update_knope into master

I have made some minor updates to the known pulsar search pipeline for the O2 analysis. In particular, I have modified the pipeline so that it can run on data over multiple runs, for each of which there may be different frame file types, channel names, and/or segment file requirements.

An example of the pipeline configuration file for running on a couple of CW hardware injections over both O1 and O2 is given below. This has been tested using this branch and produces the expected results.

# Configuration file for testing combined O1 and O2 analysis pipeline

; general inputs for the whole analysis
[analysis]
# a list of the inteferometers
ifos = ['H1', 'L1']

# two sets of start times O1 and O2
starttime = {'H1': [1132441600, 1186642720], 'L1': [1132441600, 1186642720]}

# two sets of endtimes (one week later in both cases)
endtime = {'H1': [1133046400, 1187247520], 'L1': [1133046400, 1187247520]}

# choose whether to use lalapps_heterodyne_pulsar (heterodyne) or lalapps_SplInter (splinter) (value is case insensitive)
preprocessing_engine = heterodyne

# a flag to set if just wanting to do the data processing (e.g. heterodyne or splinter) and not parameter estimation
preprocessing_only = False

# a flag to set if just wanting to do the postprocessing (parameter estimation and webpage page creation)
postprocessing_only = False

# flag to set whether to run the analysis for individual detectors only
incoherent_only = False

# flag to set whether to only run the coherent multi-detector analysis
coherent_only = False

# set the number of background odds ratio studies when doing parameter estimation
num_background = 0

# a list of multiplicative factors of the pulsar's rotation frequency to analyse
freq_factors = [2.0]

# the path for timing and solar system ephemeris file
ephem_path = /home/mpitkin/lscsoft/share/lalpulsar

# the directory in which the DAGs are created and run from
run_dir = /home/mpitkin/analyses/O1O2/dags

# set this flag to automatically submit the Condor DAG created by the script
submit_dag = False

# this flag sets whether running in autonomous mode
autonomous = False

# a base directory (for each detector) for preprocessing outputs
preprocessing_base_dir = {'H1': '/home/mpitkin/analyses/O1O2/H1', 'L1': '/home/mpitkin/analyses/O1O2/L1'}

# path to directory containing pulsar parameter (.par) files
pulsar_param_dir = /home/mpitkin/analyses/O1O2/pulsars

# path to log files
log_dir = /home/mpitkin/analyses/O1O2/log

# set to true if running on software/hardware injections
injections = True

# file to output a pickled version of the KnownPulsarPipelineDAG class
pickle_file = /home/mpitkin/analyses/O1O2/run.pkl

# name of dag file for analysis
dag_name = O1O2run

# email address for job completion notication (if no email is given the no notications will be sent)
email = pitkin@gmail.com


; Condor information
[condor]
# Condor accounting group
accounting_group = ligo.dev.o2.cw.targeted.bayesian

# Condor accounting group user
accounting_group_user = matthew.pitkin

# the data find executable
datafind = /bin/gw_data_find

; inputs for running a data find job
[datafind]

# a dictionary of frame types to be returned; one for each run and each detector
type = {'H1': ['H1_HOFT_C00', 'H1_HOFT_C01'], 'L1': ['L1_HOFT_C00', 'L1_HOFT_C01']}

# a string to match in the URL paths returned
match = localhost

; inputs for running a science segment finding job
[segmentfind]
# path to segment database query script
segfind = 

# path to ligolw_print
ligolw_print = /bin/ligolw_print

# URL of segment database server
server = https://segments.ligo.org

# a dictionary of the required segment types
segmenttype = {'H1': ['H1:DMT-ANALYSIS_READY:1', 'H1:DMT-SCIENCE'], 'L1': ['L1:DMT-ANALYSIS_READY:1', 'L1:DMT-SCIENCE']}

# a dictionary of segment types to exclude
excludetype =

# for running the lalapps_heterodyne_pulsar code
[heterodyne]
# condor universe
universe = vanilla

# path to lalapps_heterodyne_pulsar
heterodyne_exec = /home/mpitkin/lscsoft/bin/lalapps_heterodyne_pulsar

# low-pass filter (9th order Butterworth) knee frequency
filter_knee = 0.25

# the frame data sample rate (for the coarse heterodyne)
coarse_sample_rate = 16384

# the re-sampling rate for the coarse heterodyne
coarse_resample_rate = 1

# a dictionary of frame channel names; one for each detector
channels = {'H1': ['H1:GDS-CALIB_STRAIN', 'H1:DCS-CALIB_STRAIN_C01'], 'L1': ['L1:GDS-CALIB_STRAIN', 'L1:DCS-CALIB_STRAIN_C01']}

# the fine heterodyne re-sampling rate (the sample rate is taken from coarse_resample_rate)
fine_resample_rate = 1/60

# the standard deviation threshold for removing outliers
stddev_thresh = 3.5

# set to output the coarse heterodyne data in binary files
binary_output = True

# gzip the coarse output files rather than outputting as binary
gzip_coarse_output = False

# gzip the fine output files
gzip_fine_output = True

; inputs for running the parameter estimation code lalapps_pulsar_parameter_estimation_nested
[pe]
# condor universe
universe = vanilla

# path to the parameter estimation executable
pe_exec = /home/mpitkin/lscsoft/bin/lalapps_pulsar_parameter_estimation_nested

# the base output directory for the nested samples
pe_output_dir = /home/mpitkin/analyses/O1O2/nested_samples

prior_options = {'PHI0': {'priortype': 'uniform', 'ranges': [0., 3.1415926535897931]}, 'COSIOTA': {'priortype': 'uniform', 'ranges': [-1, 1]}, 'PSI': {'priortype': 'uniform', 'ranges': [-0.7853981633974483, 0.7853981633974483]}, 'H0': {'priortype': 'uniform', 'ranges': [0, 1e-20]}}

derive_amplitude_prior = False

amplitude_prior_file = 

amplitude_prior_asds = 

; S6/VSR4 obsveration times (in days)
amplitude_prior_obstimes = 

amplitude_prior_type = 

amplitude_prior_model_type = 

# go through the pulsar parameter file and use the errors to set Gaussian priors in the prior file
use_parameter_errors = False

# the number of parallel runs for a given pulsar
n_runs = 4

# the number of live points for each run
n_live = 2048

# the number of MCMC samples for initial shuffling of the prior points
n_mcmc_initial = 0

# the tolerance (stopping criterion) for the runs
tolerance = 0.1

# flag to set whether running with non-GR parameterisation
non_gr = False

# flag to say whether to use the 'waveform' or 'source' parameterisation
model_type = source

# flag to set whether using a Gaussian likelihood, or the default Student's t-likelihood
gaussian_like = False

# flag to set whether the model under consideration is a biaxial model
biaxial = False

# path to lalapps_nest2pos for nested sample -> posterior conversion
n2p_exec = /home/mpitkin/lscsoft/bin/lalapps_nest2pos

# the base output directory for posteriors
n2p_output_dir = /home/mpitkin/analyses/O1O2/posterior_samples

# flag to set whether to clean (i.e. remove) all the nested sample files (keeping the posteriors)
clean_nest_samples = True

# set to true if wanting the output to use the l=m=2 gravitational wave phase as the initial phase, rather than the default rotational phase
use_gw_phase = True

# use Reduced Order Quadrature for speeding up code
use_roq = False

roq_ntraining = 

roq_chunkmax = 

roq_tolerance =

roq_uniform = 

; inputs for creating results pages
[results_page]
# condor universe
universe = local

# results page creation executable
results_exec = /home/mpitkin/lscsoft/bin/lalapps_knope_result_page

# results collation executable
collate_exec = /home/mpitkin/lscsoft/bin/lalapps_knope_collate_results

# the output base web directory for the results
web_dir = /home/mpitkin/public_html/analyses/O1O2

# the equivalent output base URL for the above path
base_url = https://ldas-jobs.ligo.caltech.edu/~mpitkin/analyses/O1O2

# the upper limit credible interval to use (default to 95%)
upper_limit = 95

# value on which to sort the results table
sort_value = name

# direction on which to sort the results table
sort_direction = ascending

# list upper limits to show in the results table
results = ['h0ul', 'ell', 'sdrat', 'q22', 'bsn', 'bci', 'bcin']

# list of source values to output
parameters = ['f0rot', 'f1rot', 'ra', 'dec', 'dist', 'sdlim']

Merge request reports