Commit 5ca9aa4c authored by Vivien Raymond's avatar Vivien Raymond
Browse files

Tried to redo commit bf503acc9036f43e9bdf5a1b86bd655ea6ad6855, fixing a build...

Tried to redo commit bf503acc9036f43e9bdf5a1b86bd655ea6ad6855, fixing a build failure that did not appear on CIT
Original: ad7c970f39391be78375a41d5d20f45264626d09
parent 583dc393
......@@ -253,6 +253,18 @@ LALInferenceRunState *initialize(ProcessParamsTable *commandLine)
{
LALInferenceRunState *irs=NULL;
LALInferenceIFOData *ifoPtr, *ifoListStart;
unsigned int n_basis, n_samples, time_steps;
n_basis = 965;//TODO: have it read from file or from command line.
ProcessParamsTable *ppt=NULL;
FILE *tempfp;
if(LALInferenceGetProcParamVal(commandLine,"--roqtime_steps")){
ppt=LALInferenceGetProcParamVal(commandLine,"--roqtime_steps");
tempfp = fopen (ppt->value,"r");
fscanf (tempfp, "%u", &time_steps);
fscanf (tempfp, "%u", &n_basis);
fscanf (tempfp, "%u", &n_samples);
}
MPI_Comm_rank(MPI_COMM_WORLD, &MPIrank);
......@@ -273,6 +285,10 @@ LALInferenceRunState *initialize(ProcessParamsTable *commandLine)
LALInferenceInjectInspiralSignal(irs->data,commandLine);
fprintf(stdout, " ==== LALInferenceInjectInspiralSignal(): finished. ====\n");
fprintf(stdout, " ==== LALInferenceSetupROQ(): started. ====\n");
LALInferenceSetupROQ(irs->data,commandLine);
fprintf(stdout, " ==== LALInferenceSetupROQ(): finished. ====\n");
ifoPtr = irs->data;
ifoListStart = irs->data;
while (ifoPtr != NULL) {
......@@ -288,6 +304,12 @@ LALInferenceRunState *initialize(ProcessParamsTable *commandLine)
ifoPtr->timeModelhCross=ifoPtrCompare->timeModelhCross;
ifoPtr->freqModelhCross=ifoPtrCompare->freqModelhCross;
ifoPtr->modelParams=ifoPtrCompare->modelParams;
if (ifoPtr->roqData){
ifoPtr->roqData->hplus = ifoPtrCompare->roqData->hplus;
ifoPtr->roqData->hcross = ifoPtrCompare->roqData->hcross;
ifoPtr->roqData->hstrain = ifoPtrCompare->roqData->hstrain;
ifoPtr->roqData->amp_squared = ifoPtrCompare->roqData->amp_squared;
}
foundIFOwithSameSampleRate=1;
break;
}
......@@ -319,6 +341,12 @@ LALInferenceRunState *initialize(ProcessParamsTable *commandLine)
&lalDimensionlessUnit,
ifoPtr->freqData->data->length);
ifoPtr->modelParams = XLALCalloc(1, sizeof(LALInferenceVariables));
if (ifoPtr->roqData){
ifoPtr->roqData->hplus = gsl_vector_complex_calloc(n_basis);
ifoPtr->roqData->hcross = gsl_vector_complex_calloc(n_basis);
ifoPtr->roqData->hstrain = gsl_vector_complex_calloc(n_basis);
ifoPtr->roqData->amp_squared = XLALCalloc(1, sizeof(REAL8));
}
}
ifoPtr = ifoPtr->next;
}
......
......@@ -497,6 +497,11 @@ void PTMCMCAlgorithm(struct tagLALInferenceRunState *runState)
LALInferenceSetVariable(runState->currentParams, "distance", &d);
} else {
nullLikelihood = 0.0;
LALInferenceIFOData *headData = runState->data;
while (headData != NULL) {
headData->nullloglikelihood = 0.0;
headData = headData->next;
}
}
//null log likelihood logic doesn't work with noise parameters
......
......@@ -70,7 +70,8 @@ pybin_SCRIPTS = \
lalinference_pipe \
lalapps_nest2pos \
lalapps_merge_nested_sampling_runs \
lalinference_pp_pipe
lalinference_pp_pipe \
lalapps_compute_roq_weights
pkgpython_PYTHON = \
nest_utils.py \
combine_evidence.py \
......
import numpy as np
import sys
from math import ceil
from optparse import OptionParser
import os
data_dir = './'
# load in data from file
parser = OptionParser(usage="usage: %prog [options]",
version="%prog")
parser.add_option("-d", "--data", type='string',
action="append",
dest="data_file",
help="data file",)
parser.add_option("-p", "--psd", type='string',
action="append", # optional because action defaults to "store"
dest="psd_file",
help="psd file",)
parser.add_option("-t", "--time_prior", type=float,
action="store", # optional because action defaults to "store"
dest="dt",
help="width of time prior",)
parser.add_option("-i", "--ifo", type='string',
action="append",
dest="IFOs",
help="list of ifos",)
parser.add_option("-s", "--seglen", type=float,
action="store",
dest="seglen",
help="segment length",)
parser.add_option("-f", "--fLow", type=float,
action="store",
dest="fLow",
help="low frequency cut off",)
parser.add_option("-T", "--delta_tc", type=float,
action="store",
dest="delta_tc",
help="width of tc subdomain",)
parser.add_option("-B", "--basis-set", type='string',
action="store",
dest="b_matrix_path",
help="B matrix",)
parser.add_option("-o", "--out", type='string',
action="store",
dest="outpath",
default="./",
help="output path",)
(options, args) = parser.parse_args()
B = np.load(options.b_matrix_path)
def BuildWeights(data, B, deltaF):
''' for a data array and reduced basis compute roq weights
B: (reduced basis element)*invV (the inverse Vandermonde matrix)
data: data set
PSD: detector noise power spectral density (must be same shape as data)
deltaF: integration element df
'''
weights = np.dot(B, data.conjugate()) * deltaF * 4.
return weights.T
##################################
relative_tc_shift = options.seglen - 2.
# loop over ifos
i=0
for ifo in options.IFOs:
dat_file = np.column_stack( np.loadtxt(options.data_file[i]) )
data = dat_file[1] + 1j*dat_file[2]
fseries = dat_file[0]
deltaF = fseries[1] - fseries[0]
fseries = fseries[int(options.fLow/deltaF):len(fseries)]
data = data[int(options.fLow/deltaF):len(data)]
psdfile = np.column_stack( np.loadtxt(options.psd_file[i]) )
psd = psdfile[1]
psd = psd[int(options.fLow/deltaF):len(psd)]
data /= psd
# print len(data),len(psd),len(basis_set)
assert len(data) == len(psd) == B.shape[1]
for k in range(len(data)):
if np.isnan(data[k].real):
data[k] = 0+0j
tc_shifted_data = [] # array to be filled with data, shifted by discrete time tc
tcs = np.linspace(relative_tc_shift - options.dt - 0.022, relative_tc_shift + options.dt + 0.022, ceil(2.*(options.dt+0.022) / options.delta_tc) )# array of relative time shifts to be applied to the data
print "time steps = "+str(len(tcs))
for j in range(len(tcs)):
tc = tcs[j]
exp_2pi_i_tc = np.array(np.exp(1j*2.*np.pi*fseries*tc))
data_tc = np.array(data*exp_2pi_i_tc)
tc_shifted_data.append(data_tc)
tc_shifted_data = np.array(tc_shifted_data).T
#*************************************************************************** #
weights_path = os.path.join(options.outpath,"weights_%s.dat"%ifo)
weights_file = open(weights_path, "wb")
print "Computing weights for "+ifo
weights = BuildWeights(tc_shifted_data, B, deltaF)
print "Weights have been computed for "+ifo
(weights.T).tofile(weights_file)
weights_file.close()
i += 1
size_file_path = os.path.join(options.outpath,"roq_sizes.dat")
np.savetxt(size_file_path,np.array((len(tcs),B.shape[0],B.shape[1])),fmt='%u')
# DAG generation code for running LALInference pipeline
# (C) 2012 John Veitch
# (C) 2012 John Veitch, Vivien Raymond
from lalapps import lalinference_pipe_utils as pipe_utils
from lalapps import inspiralutils
......@@ -15,6 +15,7 @@ the config.ini file.
The user can specify either an injection file to analyse, with the --inj option,
a list of SnglInspiralTable or CoincInspiralTable triggers with the --<x>-triggers options,
a GraceDB ID with the --gid option,
or an ASCII list of GPS times with the --gps-time-file option.
If none of the above options are given, the pipeline will analyse the entire
......
#flow DAG Class definitions for LALInference Pipeline
# (C) 2012 John Veitch, Kiersten Ruisard, Kan Wang
# (C) 2012 John Veitch, Vivien Raymond, Kiersten Ruisard, Kan Wang
import itertools
import glue
......@@ -406,6 +407,7 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
else: self.veto_categories=[]
for ifo in self.ifos:
self.segments[ifo]=[]
self.romweightsnodes={}
self.dq={}
self.frtypes=ast.literal_eval(cp.get('datafind','types'))
self.channels=ast.literal_eval(cp.get('data','channels'))
......@@ -416,9 +418,16 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
else:
self.dataseed=None
# Set up necessary job files.
self.prenodes={}
self.datafind_job = pipeline.LSCDataFindJob(self.cachepath,self.logpath,self.config,dax=self.is_dax())
self.datafind_job.add_opt('url-type','file')
self.datafind_job.set_sub_file(os.path.abspath(os.path.join(self.basepath,'datafind.sub')))
self.preengine_job = EngineJob(self.config, os.path.join(self.basepath,'prelalinference.sub'),self.logpath,ispreengine=True,dax=self.is_dax())
self.preengine_job.set_grid_site('local')
self.preengine_job.set_universe('vanilla')
if cp.has_option('lalinference','roq'):
self.romweights_job = ROMJob(self.config,os.path.join(self.basepath,'romweights.sub'),self.logpath,dax=self.is_dax())
self.romweights_job.set_grid_site('local')
# Need to create a job file for each IFO combination
self.engine_jobs={}
ifocombos=[]
......@@ -630,7 +639,7 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
gid=self.config.get('input','gid')
flow=40.0
if self.config.has_option('lalinference','flow'):
flow=min(ast.literal_eval(self.config.get('lalinference','flow')))
flow=min(ast.literal_eval(self.config.get('lalinference','flow')).values())
events = readLValert(gid=gid,flow=flow,gracedb=self.config.get('condor','gracedb'))
# pipedown-database
else: gid=None
......@@ -853,14 +862,31 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
if len(ifos)==0:
print 'No data found for time %f - %f, skipping'%(segstart,segend)
return
romweightsnode={}
prenode=self.EngineNode(self.preengine_job)
node=self.EngineNode(self.engine_jobs[tuple(ifos)])
roqeventpath=os.path.join(self.preengine_job.roqpath,str(event.event_id))
if self.config.has_option('lalinference','roq'):
mkdirs(roqeventpath)
node.set_trig_time(end_time)
node.set_seed(random.randint(1,2**31))
if event.srate: node.set_srate(event.srate)
if event.trigSNR: node.set_trigSNR(event.trigSNR)
prenode.set_trig_time(end_time)
randomseed=random.randint(1,2**31)
node.set_seed(randomseed)
prenode.set_seed(randomseed)
srate=0
if event.srate:
srate=event.srate
if self.config.has_option('lalinference','srate'):
srate=ast.literal_eval(self.config.get('lalinference','srate'))
if srate is not 0:
node.set_srate(srate)
prenode.set_srate(srate)
if event.trigSNR:
node.set_trigSNR(event.trigSNR)
if self.dataseed:
node.set_dataseed(self.dataseed+event.event_id)
prenode.set_dataseed(self.dataseed+event.event_id)
gotdata=0
for ifo in ifos:
if event.timeslides.has_key(ifo):
......@@ -870,44 +896,88 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
for seg in self.segments[ifo]:
if segstart >= seg.start() and segend < seg.end():
if not self.config.has_option('lalinference','fake-cache'):
if self.config.has_option('lalinference','roq'):
prenode.add_ifo_data(ifo,seg,self.channels[ifo],timeslide=slide)
gotdata+=node.add_ifo_data(ifo,seg,self.channels[ifo],timeslide=slide)
else:
fakecachefiles=ast.literal_eval(self.config.get('lalinference','fake-cache'))
if self.config.has_option('lalinference','roq'):
prenode.add_fake_ifo_data(ifo,seg,fakecachefiles[ifo],timeslide=slide)
gotdata+=node.add_fake_ifo_data(ifo,seg,fakecachefiles[ifo],timeslide=slide)
if self.config.has_option('lalinference','psd-xmlfile'):
psdpath=os.path.realpath(self.config.get('lalinference','psd-xmlfile'))
node.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=end_time)
if len(ifos)==0: node.ifos=node.cachefiles.keys()
else: node.ifos=ifos
node.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=end_time)
prenode.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=end_time)
if len(ifos)==0:
node.ifos=node.cachefiles.keys()
prenode.ifos=prenode.cachefiles.keys()
else:
node.ifos=ifos
prenode.ifos=ifos
gotdata=1
if self.config.has_option('input','gid'):
if os.path.isfile(os.path.join(self.basepath,'psd.xml.gz')):
psdpath=os.path.join(self.basepath,'psd.xml.gz')
node.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=None)
node.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=None)
prenode.psds=get_xml_psds(psdpath,ifos,os.path.join(self.basepath,'PSDs'),end_time=None)
if self.config.has_option('lalinference','flow'):
node.flows=ast.literal_eval(self.config.get('lalinference','flow'))
prenode.flows=ast.literal_eval(self.config.get('lalinference','flow'))
if event.fhigh:
for ifo in ifos:
node.fhighs[ifo]=str(event.fhigh)
if self.config.has_option('lalinference','ER2-cache'):
node.cachefiles=ast.literal_eval(self.config.get('lalinference','ER2-cache'))
node.channels=ast.literal_eval(self.config.get('data','channels'))
node.psds=ast.literal_eval(self.config.get('lalinference','psds'))
for ifo in ifos:
node.add_input_file(os.path.join(self.basepath,node.cachefiles[ifo]))
node.cachefiles[ifo]=os.path.join(self.basepath,node.cachefiles[ifo])
node.add_input_file(os.path.join(self.basepath,node.psds[ifo]))
node.psds[ifo]=os.path.join(self.basepath,node.psds[ifo])
if len(ifos)==0: node.ifos=node.cachefiles.keys()
else: node.ifos=ifos
node.timeslides=dict([ (ifo,0) for ifo in node.ifos])
gotdata=1
else:
# Add the nodes it depends on
for seg in node.scisegs.values():
dfnode=seg.get_df_node()
if dfnode is not None and dfnode not in self.get_nodes() and not self.config.has_option('lalinference','fake-cache'):
self.add_node(dfnode)
prenode.fhighs[ifo]=str(event.fhigh)
if self.config.has_option('lalinference','fhigh'):
node.fhighs=ast.literal_eval(self.config.get('lalinference','fhigh'))
prenode.fhighs=ast.literal_eval(self.config.get('lalinference','fhigh'))
prenode.set_max_psdlength(self.config.getint('input','max-psd-length'))
prenode.set_padding(self.config.getint('input','padding'))
#prenode[ifo].set_output_file('/dev/null')
prenode.add_var_arg('--Niter 1')
prenode.add_var_arg('--data-dump '+roqeventpath)
if self.config.has_option('lalinference','seglen'):
prenode.set_seglen(self.config.getint('lalinference','seglen'))
elif self.config.has_option('engine','seglen'):
prenode.set_seglen(self.config.getint('engine','seglen'))
else:
prenode.set_seglen(event.duration)
# Add the nodes it depends on
for ifokey, seg in node.scisegs.items():
dfnode=seg.get_df_node()
if 1==1:
#if dfnode is not None and dfnode not in self.get_nodes():
if self.config.has_option('lalinference','roq'):
#if gotdata and seg.id() not in self.prenodes.keys():
if gotdata and event.event_id not in self.prenodes.keys():
if prenode not in self.get_nodes():
self.add_node(prenode)
for ifo in ifos:
romweightsnode[ifo]=self.add_rom_weights_node(ifo,prenode)
#self.add_node(romweightsnode[ifo])
if self.config.has_option('input','injection-file'):
freqDataFile=os.path.join(roqeventpath,ifo+'-freqDataWithInjection.dat')
else:
freqDataFile=os.path.join(roqeventpath,ifo+'-freqData.dat')
prenode.add_output_file(freqDataFile)
prenode.add_output_file(os.path.join(roqeventpath,ifo+'-PSD.dat'))
romweightsnode[ifo].add_var_arg('-d '+freqDataFile)
romweightsnode[ifo].add_input_file(freqDataFile)
romweightsnode[ifo].add_var_arg('-p '+os.path.join(roqeventpath,ifo+'-PSD.dat'))
romweightsnode[ifo].add_input_file(os.path.join(roqeventpath,ifo+'-PSD.dat'))
romweightsnode[ifo].add_var_arg('-o '+roqeventpath)
romweightsnode[ifo].add_output_file(os.path.join(roqeventpath,'weights_'+ifo+'.dat'))
#self.prenodes[seg.id()]=(prenode,romweightsnode)
self.prenodes[event.event_id]=(prenode,romweightsnode)
if self.config.has_option('lalinference','roq'):
#node.add_parent(self.prenodes[seg.id()][1][ifokey])
node.add_parent(self.prenodes[event.event_id][1][ifokey])
if dfnode is not None and dfnode not in self.get_nodes():
if not self.config.has_option('lalinference','fake-cache'):
self.add_node(dfnode)
if gotdata:
self.add_node(node)
else:
......@@ -919,6 +989,7 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
# Add control options
if self.config.has_option('input','injection-file'):
node.set_injection(self.config.get('input','injection-file'),event.event_id)
prenode.set_injection(self.config.get('input','injection-file'),event.event_id)
if self.config.has_option('lalinference','seglen'):
node.set_seglen(self.config.getint('lalinference','seglen'))
elif self.config.has_option('engine','seglen'):
......@@ -934,6 +1005,20 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
out_dir=os.path.join(self.basepath,'engine')
mkdirs(out_dir)
node.set_output_file(os.path.join(out_dir,node.engine+'-'+str(event.event_id)+'-'+node.get_ifos()+'-'+str(node.get_trig_time())+'-'+str(node.id)))
if self.config.has_option('lalinference','roq'):
for ifo in ifos:
node.add_var_arg('--'+ifo+'-roqweights '+os.path.join(roqeventpath,'weights_'+ifo+'.dat'))
node.add_input_file(os.path.join(roqeventpath,'weights_'+ifo+'.dat'))
node.add_var_arg('--roqtime_steps '+os.path.join(roqeventpath,'roq_sizes.dat'))
node.add_input_file(os.path.join(roqeventpath,'roq_sizes.dat'))
if self.config.has_option('paths','rom_nodes'):
nodes_path=self.config.get('paths','rom_nodes')
node.add_var_arg('--roqnodes '+nodes_path)
node.add_input_file(nodes_path)
else:
print 'No nodes specified for ROM likelihood'
return None
for (opt,arg) in event.engine_opts.items():
node.add_var_opt(opt,arg)
return node
......@@ -958,38 +1043,65 @@ class LALInferencePipelineDAG(pipeline.CondorDAG):
self.add_node(node)
return node
def add_rom_weights_node(self,ifo,parent=None):
#try:
#node=self.romweightsnodes[ifo]
#except KeyError:
node=ROMNode(self.romweights_job,ifo,parent.seglen,parent.flows[ifo])
self.romweightsnodes[ifo]=node
if parent is not None:
node.add_parent(parent)
self.add_node(node)
return node
class EngineJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
def __init__(self,cp,submitFile,logdir,dax=False,site=None):
def __init__(self,cp,submitFile,logdir,ispreengine=False,dax=False,site=None):
self.ispreengine=ispreengine
self.engine=cp.get('analysis','engine')
basepath=cp.get('paths','basedir')
snrpath=os.path.join(basepath,'SNR')
self.snrpath=snrpath
mkdirs(snrpath)
if self.engine=='lalinferencemcmc':
exe=cp.get('condor','mpirun')
self.binary=cp.get('condor',self.engine)
#universe="parallel"
universe="vanilla"
self.write_sub_file=self.__write_sub_file_mcmc_mpi
elif self.engine=='lalinferencebambimpi':
exe=cp.get('condor','mpirun')
self.binary=cp.get('condor','lalinferencebambi')
universe="vanilla"
self.write_sub_file=self.__write_sub_file_mcmc_mpi
elif self.engine=='lalinferencenest':
exe=cp.get('condor',self.engine)
if site is not None and site!='local':
universe='vanilla'
else:
# Run in the vanilla universe when using resume
if cp.has_option('engine','resume'):
universe='vanilla'
if ispreengine is True:
roqpath=os.path.join(basepath,'ROQdata')
self.roqpath=roqpath
mkdirs(roqpath)
self.engine='lalinferencemcmc'
exe=cp.get('condor',self.engine)
if cp.has_option('engine','site'):
if self.site is not None and self.self!='local':
universe='vanilla'
else:
universe="standard"
else:
universe='standard'
universe='vanilla'
else:
print 'LALInferencePipe: Unknown engine node type %s!'%(self.engine)
sys.exit(1)
if self.engine=='lalinferencemcmc':
exe=cp.get('condor','mpirun')
self.binary=cp.get('condor',self.engine)
#universe="parallel"
universe="vanilla"
self.write_sub_file=self.__write_sub_file_mcmc_mpi
elif self.engine=='lalinferencebambimpi':
exe=cp.get('condor','mpirun')
self.binary=cp.get('condor','lalinferencebambi')
universe="vanilla"
self.write_sub_file=self.__write_sub_file_mcmc_mpi
elif self.engine=='lalinferencenest':
exe=cp.get('condor',self.engine)
if site is not None and site!='local':
universe='vanilla'
else:
# Run in the vanilla universe when using resume
if cp.has_option('engine','resume'):
universe='vanilla'
else:
universe='standard'
else:
print 'LALInferencePipe: Unknown engine node type %s!'%(self.engine)
sys.exit(1)
pipeline.CondorDAGJob.__init__(self,universe,exe)
pipeline.AnalysisJob.__init__(self,cp,dax=dax)
# Set grid site if needed
......@@ -1001,8 +1113,12 @@ class EngineJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
self.set_sub_file(os.path.abspath(submitFile))
if self.engine=='lalinferencemcmc' or self.engine=='lalinferencebambimpi':
#openmpipath=cp.get('condor','openmpi')
self.machine_count=cp.get('mpi','machine-count')
self.machine_memory=cp.get('mpi','machine-memory')
if ispreengine is False:
self.machine_count=cp.get('mpi','machine-count')
self.machine_memory=cp.get('mpi','machine-memory')
else:
self.machine_count=str(1)
self.machine_memory=cp.get('mpi','machine-memory')
#self.add_condor_cmd('machine_count',machine_count)
#self.add_condor_cmd('environment','CONDOR_MPI_PATH=%s'%(openmpipath))
try:
......@@ -1019,9 +1135,11 @@ class EngineJob(pipeline.CondorDAGJob,pipeline.AnalysisJob):
self.add_condor_cmd('+'+cp.get('condor','queue'),'True')
self.add_condor_cmd('Requirements','(TARGET.'+cp.get('condor','queue')+' =?= True)')
if cp.has_section(self.engine):
self.add_ini_opts(cp,self.engine)
if ispreengine is False:
self.add_ini_opts(cp,self.engine)
if cp.has_section('engine'):
self.add_ini_opts(cp,'engine')