Commit d651dcd7 authored by Salvatore Vitale's avatar Salvatore Vitale
Browse files

LALInference to store command line in metadata and print it in postprocessing

parent b952629b
Pipeline #105834 passed with stages
in 129 minutes and 11 seconds
......@@ -1093,7 +1093,6 @@ LALInferenceModel *LALInferenceInitCBCModel(LALInferenceRunState *state) {
} else {
LALInferenceAddVariable(model->params, "flow", &fLow, LALINFERENCE_REAL8_t, LALINFERENCE_PARAM_FIXED);
}
/* Set up the variable parameters */
/********************* TBL: Adding noise-fitting parameters *********************/
......
......@@ -1113,6 +1113,11 @@ void LALInferenceNestedSamplingAlgorithm(LALInferenceRunState *runState)
XLALH5FileAddScalarAttribute(groupPtr, "log_max_likelihood", &logLmax , LAL_D_TYPE_CODE);
XLALH5FileAddScalarAttribute(groupPtr, "number_live_points", &Nlive, LAL_U4_TYPE_CODE);
XLALH5FileAddScalarAttribute(groupPtr, "log_prior_volume", &logvolume, LAL_D_TYPE_CODE);
/*Add the whole command line to the output headers */
char *cl=NULL;
cl=LALInferencePrintCommandLine(runState->commandLine);
XLALH5FileAddStringAttribute(groupPtr,"CommandLine",cl);
/* Get the cpu usage */
struct tms tms_buffer;
if(times(&tms_buffer))
......
......@@ -1559,6 +1559,9 @@ void LALInferenceWriteMCMCSamples(LALInferenceRunState *runState) {
LALInferenceH5VariablesArrayToDataset(group, output_array, N_output_array, thread->name);
}
}
char *cl=NULL;
cl=LALInferencePrintCommandLine(runState->commandLine);
XLALH5FileAddStringAttribute(group,"CommandLine",cl);
XLALH5FileClose(group);
XLALH5FileClose(output);
LALInferencePrintCheckpointFileInfo(runState->outFileName);
......
......@@ -213,7 +213,6 @@ def weight_and_combine(pos_chains, verbose=False):
metadata[run_level]['log_evidence'] = final_log_evidence
metadata[run_level]['log_noise_evidence'] = final_log_noise_evidence
metadata[run_level]['log_max_likelihood'] = final_posterior['logl'].max()
# This has already been burned-in and downsampled,
# remove the cycle column to stop cbcBayesPosProc
# from doing it again.
......@@ -281,5 +280,14 @@ if __name__ == '__main__':
for colname in final_posterior.columns:
metadata[path_to_samples].pop(colname, None)
# for metadata which is in a list, take the average.
for level in metadata:
for key in metadata[level]:
#if isinstance(metadata[level][key], list) and all(isinstance(x, (int,float)) for x in metadata[level][key]):
# metadata[level][key] = mean(metadata[level][key])
if isinstance(metadata[level][key], list) and all(isinstance(x, (str)) for x in metadata[level][key]):
print("Warning: only printing the first of the %d entries found for metadata %s/%s. You can find the whole list in the headers of individual hdf5 output files\n"%(len(metadata[level][key]),level,key))
metadata[level][key] = metadata[level][key][0]
write_samples(final_posterior, opts.pos,
path=path_to_samples, metadata=metadata)
......@@ -56,7 +56,9 @@ def read_nested_from_hdf5(nested_path_list, strict_versions=True):
for key in metadata[level]:
if isinstance(metadata[level][key], list) and all(isinstance(x, (int,float)) for x in metadata[level][key]):
metadata[level][key] = mean(metadata[level][key])
elif isinstance(metadata[level][key], list) and all(isinstance(x, (str)) for x in metadata[level][key]):
print("Warning: only printing the first of the %d entries found for metadata %s/%s. You can find the whole list in the headers of individual hdf5 output files\n"%(len(metadata[level][key]),level,key))
metadata[level][key] = metadata[level][key][0]
log_noise_evidence = reduce(logaddexp, log_noise_evidences) - log(len(log_noise_evidences))
log_max_likelihood = max(log_max_likelihoods)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment