Commit e1fa1afb authored by Anchal Gupta's avatar Anchal Gupta
Browse files

Using 37C cavity temperature.

parent e947e5ee
This diff is collapsed.
This diff is collapsed.
......@@ -621,8 +621,8 @@
"outputs": [],
"source": [
"lowNoiseMeasLog = 'lowNoiseMeasWithSlope.pkl'\n",
"SavedPSDsDir = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20200805/')\n",
"SavedPSDs = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20200805/SavedPSDs_20200805_192122.csv')"
"SavedPSDsDir = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20201119/')\n",
"SavedPSDs = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20201119/SavedPSDs_20201125_144250.csv')"
]
},
{
......@@ -1238,7 +1238,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.3"
"version": "3.7.6"
}
},
"nbformat": 4,
......
%% Cell type:code id: tags:
``` python
import matplotlib.pyplot as plt # For plotting
from matplotlib.backends.backend_pdf import PdfPages #For saving figures to single pdf
from matplotlib import cm
figlist = []
#*******************************************************************************************************
#Setting RC Parameters for figure size and fontsizes
import matplotlib.pylab as pylab
params = {'figure.figsize': (16, 12),
'xtick.labelsize':'xx-large',
'ytick.labelsize':'xx-large',
'text.usetex': False,
'lines.linewidth': 4,
'font.family': 'serif',
'font.serif': 'Georgia',
'font.size': 20,
'xtick.direction': 'in',
'ytick.direction': 'in',
'xtick.labelsize': 'xx-large',
'ytick.labelsize': 'xx-large',
'axes.labelsize': 'xx-large',
'axes.titlesize':'medium',
'axes.grid.axis': 'both',
'axes.grid.which': 'both',
'axes.grid': True,
'grid.color': 'xkcd:cement',
'grid.alpha': 0.3,
'lines.markersize': 12,
'lines.linewidth': 2.0,
'legend.borderpad': 0.2,
'legend.fancybox': True,
'legend.fontsize': 'large',
'legend.framealpha': 0.8,
'legend.handletextpad': 0.5,
'legend.labelspacing': 0.33,
'legend.loc': 'best',
'savefig.dpi': 140,
'savefig.bbox': 'tight',
'pdf.compression': 9}
pylab.rcParams.update(params)
#********************************************************************************************************
from noiseBudgetModule import noiseBudget
import numpy as np
from uncertainties import ufloat as uf
from uncertainties import unumpy as unp
import scipy.constants as scc
from scipy import signal
from scipy.stats import skewnorm
from scipy.optimize import curve_fit
import os
from os.path import expanduser as eu
import time
from collections.abc import Iterable
from IPython.display import clear_output
import sys
import yaml
import pickle
figlist = []
```
%% Cell type:markdown id: tags:
---
---
## Prior Distribution
%% Cell type:code id: tags:
``` python
# Normal distributed prior based on Penn et al. measurements
def priorNphiB(phiB):
priorMean = 5.33e-4
priorStd = 2.7e-4
prefac = 1/np.sqrt(2*np.pi)/priorStd
exp = np.exp(-0.5*((priorMean -phiB)/priorStd)**2)
return -0.5*((priorMean -phiB)/priorStd)**2
# return prefac*exp
def priorNphiS(phiS):
priorMean = 2.6e-7
priorStd = 2.6e-7
prefac = 1/np.sqrt(2*np.pi)/priorStd
exp = np.exp(-0.5*((priorMean -phiS)/priorStd)**2)
return -0.5*((priorMean -phiS)/priorStd)**2
# return prefac*exp
# Uniformly distributed prior
def priorUphiB(phiB):
if phiB>0 and phiB<200e-5:
P_B = 0
else:
P_B = -np.inf
return P_B
def priorUphiS(phiS):
if phiS>0 and phiS<94e-7:
P_S = 0
else:
P_S = -np.inf
return P_S
def priorUphiBslope(phiBslope):
if phiBslope>-0.5 and phiBslope<1.5:
P_phiBslope = 0
else:
P_phiBslope = -np.inf
return P_phiBslope
def logprior(BulkLA, BulkLAslope):
X, Y = np.meshgrid(BulkLA, BulkLAslope)
logpriorDist = np.zeros(np.shape(X))
for ii in range(len(BulkLAslope)):
for jj in range(len(BulkLA)):
logpriorDist[ii][jj] = priorNphiB(BulkLA[jj]) + priorUphiBslope(BulkLAslope[ii])
return logpriorDist
```
%% Cell type:markdown id: tags:
---
## Likelihood Distribution
%% Cell type:markdown id: tags:
### Time Series to PSD distribution conversion
%% Cell type:code id: tags:
``` python
def tsDatatoWelchArray(tsData, noverlap=None, timeSeg=5, rebinSize=5, skip=3,
lowerCutoffFreq=70, upperCutoffFreq=600,
average='median'):
timeSeries = tsData[:, 0]
noiseSig = tsData[:, 1]
SampleRate = 1/(timeSeries[1] - timeSeries[0])
nperseg = int(timeSeg * SampleRate)
#Sort data into rows with number in each row = to nperseg
# length of columns
col_len=0
end_edge=nperseg
if noverlap is None:
noverlap = nperseg//2
while(end_edge<len(noiseSig)):
end_edge=end_edge+nperseg-noverlap
col_len=col_len+1
# make array of row length nperseg and column length however much data
# will fit fully in row
sorted_data = np.zeros((col_len,nperseg))
# make array for PSD will have same col len but
# row len = max freq/freq_spacing + 1
# or (samplerate/2) /(samplerate/nperseg) + 1 which = nperseg/2 +1
welch_array = np.zeros((col_len, nperseg//2 + 1))
#Move through signal data array populating array
next_start=0
for ii in range(col_len):
sorted_data[ii, :] = noiseSig[next_start: next_start+nperseg]
next_start = next_start+nperseg-noverlap
# Do row-wise welch
for k in range(col_len):
ff, welch_array[k, :] = signal.welch(sorted_data[k, :], SampleRate,
window = 'hann',
nperseg = nperseg,
nfft = None, detrend = False,
return_onesided = True,
scaling = 'density', axis = -1,
average = 'median')
# Rebin to remove correlations
redff = np.zeros(len(ff[skip:])//rebinSize)
redWelchArr = np.zeros((col_len, len(redff)))
for ii in range(len(redff)):
startInd = skip + rebinSize * ii
endInd = skip + rebinSize * (ii + 1)
redff[ii] = np.mean(ff[startInd:endInd])
for k in range(col_len):
if average=='mean':
redWelchArr[k, ii] = np.mean(welch_array[k, startInd:endInd])
elif average=='median':
redWelchArr[k, ii] = np.median(welch_array[k, startInd:endInd])
# Reduce PSD to frequencies of interest
redWelchArr = redWelchArr[:, redff>lowerCutoffFreq]
redff = redff[redff>lowerCutoffFreq]
redWelchArr = redWelchArr[:, redff<upperCutoffFreq]
redff = redff[redff<upperCutoffFreq]
return redff, redWelchArr
def logWAtoPSDDist(ff, logWelchArray, nbins=10):
psd_dist = []
for ii, f in enumerate(ff):
logWA = logWelchArray[:, ii]
hist, bin_edges = np.histogram(logWA, bins=nbins)
hist = hist/np.max(hist)
bin_centers = 0.5*(bin_edges[1:] + bin_edges[0:-1])
Q1 = np.percentile(logWA, 25)
Q2 = np.percentile(logWA, 50)
Q3 = np.percentile(logWA, 75)
skewEst = (Q1 + Q2 - 2*Q3)/(Q3 - Q1)
popt, pcov = curve_fit(skewgaus, bin_centers, hist, p0=[np.mean(logWA),np.std(logWA), skewEst, 1],
maxfev=10000)
psd_dist += [[f, popt, logWA, hist, bin_centers]]
return psd_dist
def tsDatatoPSDDist(tsData, noverlap=None, timeSeg=5, rebinSize=5, skip=3,
lowerCutoffFreq=70, upperCutoffFreq=600, nbins=10,
average='median'):
ff, WelchArray = tsDatatoWelchArray(tsData, noverlap=noverlap,
timeSeg=timeSeg, rebinSize=rebinSize,
skip=skip, lowerCutoffFreq=lowerCutoffFreq,
upperCutoffFreq=upperCutoffFreq,
average=average)
return logWAtoPSDDist(ff, np.log(WelchArray), nbins)
def tsFiletoPSDDist(tsfile, noverlap=None, timeSeg=5, rebinSize=5, skip=3,
lowerCutoffFreq=70, upperCutoffFreq=600, nbins=10,
average='median', redo=False):
if isinstance(tsfile, np.ndarray):
tsData = tsfile
elif tsfile.find('TimeSeries') != -1:
tsfile = eu(tsfile)
psdDistFile = tsfile.replace('TimeSeries', 'PSDDist')
if os.path.exists(psdDistFile) and not redo:
print("Found calculated PSD Distribution. Reading that...")
psdDistData = np.loadtxt(psdDistFile)
psd_dist = []
for ii in range(np.shape(psdDistData)[0]):
psd_dist += [[psdDistData[ii, 0], list(psdDistData[ii, 1:]) ]]
return psd_dist
print('Reading time series data...')
tsData = np.loadtxt(tsfile)
print('Done.')
else:
raise RuntimeError('Not a time series file')
print('Fitting PSD Data Distribution...')
psd_dist_1 = tsDatatoPSDDist(tsData, noverlap=noverlap, timeSeg=timeSeg,
lowerCutoffFreq=lowerCutoffFreq, upperCutoffFreq=101,
rebinSize=rebinSize, skip=skip, nbins=nbins,
average=average)
psd_dist_2 = tsDatatoPSDDist(tsData, noverlap=noverlap, timeSeg=timeSeg/10,
lowerCutoffFreq=101, upperCutoffFreq=upperCutoffFreq,
rebinSize=rebinSize, skip=skip, nbins=nbins,
average=average)
print('Done')
psd_dist = psd_dist_1 + psd_dist_2
if isinstance(tsfile, str):
if not os.path.exists(psdDistFile):
print('Storing PSD Distribution Data for later use...')
psdDistData = np.zeros((len(psd_dist), 5))
for ii in range(len(psd_dist)):
psdDistData[ii, 0] = psd_dist[ii][0]
psdDistData[ii, 1] = psd_dist[ii][1][0]
psdDistData[ii, 2] = psd_dist[ii][1][1]
psdDistData[ii, 3] = psd_dist[ii][1][2]
psdDistData[ii, 4] = psd_dist[ii][1][3]
np.savetxt(psdDistFile, psdDistData)
return psd_dist
```
%% Cell type:markdown id: tags:
### Supporting function for likelihood calculation
%% Cell type:code id: tags:
``` python
# Bulk
def S_Bk_red(freq, nosbud):
S_Bk_num = (4*scc.Boltzmann*nosbud.temp*nosbud.coatStack.WaveLength
* (1 - nosbud.coatStack.Poisson
- 2*nosbud.coatStack.Poisson**2))
S_Bk_den = (3*np.pi*freq*nosbud.coatStack.Young
* ((1 - nosbud.coatStack.Poisson)**2)
* nosbud.coatStack.Aeff)
# Leaving out substrate from return value
return (S_Bk_num/S_Bk_den)[0:-1]
# Shear
def S_Sk_red(freq, nosbud):
S_Sk_num = (4*scc.Boltzmann*nosbud.temp*nosbud.coatStack.WaveLength
* (1 - nosbud.coatStack.Poisson
- 2*nosbud.coatStack.Poisson**2))
S_Sk_den = (3*np.pi*freq*nosbud.coatStack.Young
* ((1 - nosbud.coatStack.Poisson)**2)
* nosbud.coatStack.Aeff)
# Leaving out substrate from return value
return (S_Sk_num/S_Sk_den)[0:-1]
def BulkContSlope(freq, nosbud):
if isinstance(freq, Iterable):
return unp.nominal_values(np.array([BulkContSlope(f, nosbud) for f in freq]))
return nosbud.nom*(nosbud.fConv**2)*np.sum(nosbud.q_Bk*S_Bk_red(freq, nosbud))
def ShearContSlope(freq, nosbud):
if isinstance(freq, Iterable):
return unp.nominal_values(np.array([ShearContSlope(f, nosbud) for f in freq]))
return nosbud.nom*(nosbud.fConv**2)*np.sum(nosbud.q_Sk*S_Sk_red(freq, nosbud))
```
%% Cell type:code id: tags:
``` python
def Srest(freq, nosbud):
rest = ['coatTO', 'subBr', 'subTE', 'pdhShot',
'pllOsc', 'pllReadout', 'seismic', 'photoThermal','resNPRO']
S_rest = np.zeros_like(freq)
for psd in rest:
PSDest = unp.nominal_values(nosbud.PSDList[psd][0])
S_rest = S_rest + np.interp(freq, nosbud.freq, PSDest)
return S_rest
```
%% Cell type:code id: tags:
``` python
def rem60HzHarm(psd_dist, remNeighbors=False):
ff = np.array([ele[0] for ele in psd_dist])
remInd = []
# Remove any repeated index
for ii in range(1, len(ff)):
if ff[ii] == ff[ii-1]:
remInd = list(set(remInd + [ii]))
# Remove bad region known to have peaks due to RIN
for ii in range(1, len(ff)):
if ff[ii] > 260 and ff[ii] < 290:
remInd = list(set(remInd + [ii]))
# Remove 60 Hz harmonics and neighbours
sixtyHarm = np.arange(60, 1000, 60)
for har in sixtyHarm:
if har > ff.min() and har < ff.max():
if np.min(np.abs(ff - har)) < 1:
closestInd = np.argmin(np.abs(ff - har))
remInd = list(set(remInd + [closestInd]))
if closestInd < len(ff)-1:
remInd = list(set(remInd + [closestInd + 1]))
if closestInd > 0:
remInd = list(set(remInd + [closestInd - 1]))
for ind in sorted(remInd, reverse=True):
del psd_dist[ind]
return psd_dist
def skewgaus(x, x0, sigma, skewness, prefac):
return prefac*skewnorm.pdf(x, skewness, loc=x0, scale=sigma)
def loggaus(x, mean, std):
return -0.5 * ((x - mean) / std)**2
```
%% Cell type:markdown id: tags:
### Log-likelihoog calculation
%% Cell type:code id: tags:
``` python
def loglikelihood(BulkLA, BulkLAslope, ShearLA, BNfile, nosbud, useBNfileOnly=False, overallCompPerc=None):
X, Y = np.meshgrid(BulkLA, BulkLAslope)
loglkhDist = np.zeros_like(X)
tsfile = BNfile.replace('Spectrum', 'TimeSeries')
psdDistFile = BNfile.replace('Spectrum', 'PSDDist')
if ((os.path.exists(tsfile) or os.path.exists(psdDistFile)) and not useBNfileOnly):
psd_dist = tsFiletoPSDDist(tsfile)
psd_dist = rem60HzHarm(psd_dist)
ff = np.array([ele[0] for ele in psd_dist])
S_rest = Srest(ff, nosbud)
X_B = BulkContSlope(ff, nosbud)
X_S = ShearContSlope(ff, nosbud)
ct = 0
perc = 0
for ii in range(len(BulkLAslope)):
for jj in range(len(BulkLA)):
estArr = np.zeros_like(ff)
tempLogLkh = np.zeros_like(ff)
for kk, element in enumerate(psd_dist):
phiB = BulkLA[jj] * ((ff[kk]/100)**(BulkLAslope[ii]))
estArr[kk] = np.log(S_rest[kk] + X_B[kk] * phiB + X_S[kk] * ShearLA)
tempLogLkh[kk] = np.log(skewgaus(estArr[kk], *element[1]))
infPresent = (np.sum(tempLogLkh) == -np.inf)
while infPresent:
for kk in range(len(ff)):
if tempLogLkh[kk] == -np.inf:
if kk > 0 and kk < len(ff) - 1:
tempLogLkh[kk] = 0.5 * (tempLogLkh[kk-1] + tempLogLkh[kk+1])
elif kk == 0:
tempLogLkh[kk] = tempLogLkh[kk+1]
elif kk == len(ff) - 1:
tempLogLkh[kk] = tempLogLkh[kk-1]
infPresent = (np.sum(tempLogLkh) == -np.inf)
loglkhDist[ii, jj] = np.sum(tempLogLkh)
ct = ct + 1
lastperc = perc
perc = np.round(ct*100/len(BulkLAslope)/len(BulkLA))
if perc != lastperc:
clear_output()
if overallCompPerc is not None:
print('Overall Progress: {}% Completed'.format(str(np.round(overallCompPerc))))
print('This calculation: {}% Completed'.format(perc))
sys.stdout.flush()
return loglkhDist
else:
print('Using Beatnote Spectrum file...')
data = np.loadtxt(BNfile)
ff = data[:, 0]
measLogPSD = np.log(data[:, 1]**2)
measLogPSDstd = (np.log(data[:, 3]**2) - np.log(data[:, 2]**2))/2
ff, measLogPSD, measLogPSDstd = cleanData(ff, measLogPSD, measLogPSDstd)
S_rest = Srest(ff, nosbud)
X_B = BulkContSlope(ff, nosbud)
X_S = ShearContSlope(ff, nosbud)
ct = 0
perc = 0
for ii in range(len(ShearLA)):
for jj in range(len(BulkLA)):
estArr = np.zeros_like(ff)
for kk, element in enumerate(psd_dist):
phiB = BulkLA[jj] * ((ff[kk]/100)**(BulkLAslope[ii]))
estArr[kk] = np.log(S_rest[kk] + X_B[kk] * phiB + X_S[kk] * ShearLA)
loglkhDist[ii, jj] += np.log(loggaus(estArr[kk], measLogPSD, measLogPSDstd))
ct = ct + 1
lastperc = perc
perc = np.round(ct*100/len(BulkLAslope)/len(BulkLA))
if perc != lastperc:
clear_output()
if overallCompPerc is not None:
print('Overall Progress: {}% Completed'.format(str(np.round(overallCompPerc))))
print('This calculation: {}% Completed'.format(perc))
sys.stdout.flush()
return loglkhDist
```
%% Cell type:markdown id: tags:
## Sweep through the measured data
Calculate integrated noise for the data from median value beatnote spectrum calculated during measurement.
Take top 10 lowest noise measurements and run ful bayesian analysis on them and choose the measurement that gives lowest most likely bulk loss angle.
%% Cell type:code id: tags:
``` python
def cleanData(ff, beat, beatstd, lowerFreqCutOff=70, upperFreqCutOff=600):
'''
Remove 60 Hz Harmonics and neighbouring bins
'''
beat = beat[ff < upperFreqCutOff]
beatstd = beatstd[ff < upperFreqCutOff]
ff = ff[ff < upperFreqCutOff]
beat = beat[ff > lowerFreqCutOff]
beatstd = beatstd[ff > lowerFreqCutOff]
ff = ff[ff > lowerFreqCutOff]
remInd = []
# Remove any repeated index
for ii in range(1, len(ff)):
if ff[ii] == ff[ii-1]:
remInd = list(set(remInd + [ii]))
# Remove bad region
for ii in range(1, len(ff)):
if ff[ii] > 260 and ff[ii] < 290:
remInd = list(set(remInd + [ii]))
# Remove 60 Hz harmonics and neighbours
sixtyHarm = np.arange(60, 1000, 60)
for har in sixtyHarm:
if har > ff.min() and har < ff.max():
closestInd = np.argmin(np.abs(ff - har))
remInd = list(set(remInd + [closestInd]))
if closestInd < len(ff)-1:
remInd = list(set(remInd + [closestInd + 1]))
if closestInd > 0:
remInd = list(set(remInd + [closestInd - 1]))
return np.delete(ff, remInd), np.delete(beat, remInd), np.delete(beatstd, remInd)
def updateFromTransRIN(nosbud, BNfile):
transRINfile = BNfile.replace('Spectrum', 'TransRIN')
if os.path.exists(transRINfile):
RINdata = np.loadtxt(transRINfile)
with open(transRINfile, 'r') as f:
header = f.readline()
temp = header[header.find('North DC Val:')+13:].replace(' ', '')
NDC = float(temp.split('Volts')[0])
temp = header[header.find('South DC Val:')+13:].replace(' ', '')
SDC = float(temp.split('Volts')[0])
cpb = int((np.shape(RINdata)[1]-1)/2)
RINdata[:, 1:cpb+1] = RINdata[:, 1:cpb+1]/NDC
RINdata[:, cpb+1:] = RINdata[:, cpb+1:]/SDC
NFin = uf(16700, 1400)
SFin = uf(15100, 340)
PincN = 3.9 * NDC * 1e-3 # Using calibration from 3/20/20
PincS = 4.005 * SDC * 1e-3 # Using calibration from 3/16/20
PcircN = PincN * NFin / np.pi
PcircS = PincS * SFin / np.pi
nosbud.updatePhotoThermalNoise(RINdata, uf(6, 1) * 1e-6, Pcirc=[PcircN, PcircS])
nosbud.updatePDHShotNoise([PincN, PincS])
return nosbud
```
%% Cell type:code id: tags:
``` python
allMeasurements = {}
''' Uncomment if you have all files and wish to scan them all
dataDir = ['~/Git/cit_ctnlab/ctn_labdata/data/20200313_SuperBNMeasurement/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200420_SuperBNMeasurement/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200511_SuperBNMeasurement/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200521_SuperBNMeasurement/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200523_SuperBNMeasurementSR/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200528_SuperBNMeasurementSR/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200602_SuperBNMeasurementSR/Data/',
'~/Git/cit_ctnlab/ctn_labdata/data/20200610_SuperBNMeasurementSR/']
#'~/Git/cit_ctnlab/ctn_noisebudget/ScienceRun/20200522/',
#'~/Git/cit_ctnlab/ctn_noisebudget/ScienceRun/20200527/',
#'~/Git/cit_ctnlab/ctn_noisebudget/Data/dailyBeatNoteData/']
subDir = []
for dr in dataDir:
for fn in os.listdir(eu(dr)):
subdr = os.path.join(eu(dr), fn)
if os.path.isdir(subdr) and fn.find('Data')==0:
subDir += [subdr]
dataDir += subDir
dataDir = [eu(dr) for dr in dataDir]
for direc in dataDir:
fl = [fn for fn in os.listdir(direc) if fn.find('Spectrum')!=-1]
for fn in fl:
data = np.loadtxt(os.path.join(direc, fn))
ffclean, beatclean, xx = cleanData(data[:, 0], data[:, 1], data[:, 1])
intNoise = np.sum(beatclean)
if intNoise == 0:
intNoise = np.inf
allMeasurements[os.path.join(direc, fn)] = intNoise
'''
```
%% Cell type:code id: tags:
``` python
def takeEle(x):
return allMeasurements[x]
lowestNoiseFiles = list(allMeasurements.keys())
lowestNoiseFiles.sort(key=takeEle)
lowestNoiseFiles = lowestNoiseFiles[:60]
for ii, fn in enumerate(lowestNoiseFiles):
fn = '~' + fn[fn.find('/Git'):]
lowestNoiseFiles[ii] = fn
print('----------------------------------------------------------------------------')
print('Rank:', ii+1)
print(os.path.basename(fn))
tsFiletoPSDDist(fn.replace('Spectrum', 'TimeSeries'))
```
%% Cell type:code id: tags:
``` python
lowNoiseMeasLog = 'lowNoiseMeasWithSlope.pkl'
SavedPSDsDir = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20200805/')
SavedPSDs = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20200805/SavedPSDs_20200805_192122.csv')
SavedPSDsDir = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20201119/')
SavedPSDs = eu('~/Git/cit_ctnlab/ctn_noisebudget/Data/SavedPSDs_20201119/SavedPSDs_20201125_144250.csv')
```
%% Cell type:code id: tags:
``` python
cwd = eu('~/Git/cit_ctnlab/ctn_noisebudget/BayesianAnalysis')
os.chdir(SavedPSDsDir)
nosbud = noiseBudget(params='CTN_Noise_Budget_Diff_Loss_Angles.yml')
os.chdir(cwd)
nosbud.calculateCoatingBrownianNoise();
```
%% Cell type:code id: tags:
``` python
def updateLog(lowNoiseMeasLog=lowNoiseMeasLog, nosbud=nosbud, SavedPSDs=SavedPSDs):
with open(lowNoiseMeasLog, 'rb') as p:
lowNoiseMeas = pickle.load(p)
minmlBLA = np.inf
BulkLA = lowNoiseMeas['BulkLA']
BulkLAslope = lowNoiseMeas['BulkLAslope']
ShearLA = lowNoiseMeas['ShearLA']
priorDist = logprior(BulkLA, BulkLAslope)
for fnind, fn in enumerate(lowNoiseMeas):
overallCompPerc = 100 * fnind / len(lowNoiseMeas)
if fn.find('Spectrum') != -1:
nosbud.loadPSD(SavedPSDs,
overridePresentFreq=True, overridePresentPSD=True)
nosbud = updateFromTransRIN(nosbud, eu(fn))
nosbud.calculateTotalEstNoise()
lkhDist = loglikelihood(BulkLA, BulkLAslope, ShearLA, eu(fn), nosbud, overallCompPerc=overallCompPerc)
bayProbDist = priorDist + lkhDist
mlBLAslopeind, mlBLAind = np.unravel_index(np.argmax(bayProbDist), np.shape(bayProbDist))
mlBLA = BulkLA[mlBLAind]
mlBLAslope = BulkLAslope[mlBLAslopeind]
totArea = np.sum(np.exp(bayProbDist[mlBLAslopeind, :]))
for ii in range(1, len(BulkLA)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind, mlBLAind-ii:mlBLAind+ii])) > 0.9*totArea:
BLAci90ll = BulkLA[mlBLAind - ii + 1]
BLAci90ul = BulkLA[mlBLAind + ii - 1]
BLAci90pm = (BLAci90ul - BLAci90ll)/2
break
totArea = np.sum(np.exp(bayProbDist[:, mlBLAind]))
for ii in range(1, len(BulkLAslope)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind-ii:mlBLAslopeind+ii, mlBLAind])) > 0.9*totArea:
if mlBLAslopeind - ii + 1 < 0:
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = BLAslopeci90ul - mlBLAslope
elif mlBLAslopeind + ii - 1 > len(BulkLAslope):
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90pm = mlBLAslope - BLAslopeci90ll
else:
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = (BLAslopeci90ul - BLAslopeci90ll)/2
break
lowNoiseMeas[fn] = {'mlBLA': mlBLA,
'BLAci90pm': BLAci90pm,
'mlBLAslope': mlBLAslope,
'BLAslopeci90pm': BLAslopeci90pm,
'lkhDist': lkhDist}
if mlBLA < minmlBLA:
BNfile = fn
minmlBLA = mlBLA
with open(lowNoiseMeasLog, 'wb') as logFile:
pickle.dump(lowNoiseMeas, logFile)
```
%% Cell type:code id: tags:
``` python
def updateLog2(lowNoiseMeasLog=lowNoiseMeasLog, nosbud=nosbud, SavedPSDs=SavedPSDs):
# ONLY UPDATE PRIOR
with open(lowNoiseMeasLog, 'rb') as p:
lowNoiseMeas = pickle.load(p)
minmlBLA = np.inf
BulkLA = lowNoiseMeas['BulkLA']
BulkLAslope = lowNoiseMeas['BulkLAslope']
ShearLA = lowNoiseMeas['ShearLA']
priorDist = logprior(BulkLA, BulkLAslope)
lowNoiseMeas['priorDist'] = priorDist
for fnind, fn in enumerate(lowNoiseMeas):
overallCompPerc = 100 * fnind / len(lowNoiseMeas)
if fn.find('Spectrum') != -1:
#nosbud.loadPSD(SavedPSDs,
# overridePresentFreq=True, overridePresentPSD=True)
#nosbud = updateFromTransRIN(nosbud, eu(fn))
#nosbud.calculateTotalEstNoise()
lkhDist = lowNoiseMeas[fn]['lkhDist']
#lkhDist = loglikelihood(BulkLA, BulkLAslope, ShearLA, eu(fn), nosbud, overallCompPerc=overallCompPerc)
bayProbDist = priorDist + lkhDist
mlBLAslopeind, mlBLAind = np.unravel_index(np.argmax(bayProbDist), np.shape(bayProbDist))
mlBLA = BulkLA[mlBLAind]
mlBLAslope = BulkLAslope[mlBLAslopeind]
totArea = np.sum(np.exp(bayProbDist[mlBLAslopeind, :]))
for ii in range(1, len(BulkLA)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind, mlBLAind-ii:mlBLAind+ii])) > 0.9*totArea:
BLAci90ll = BulkLA[mlBLAind - ii + 1]
BLAci90ul = BulkLA[mlBLAind + ii - 1]
BLAci90pm = (BLAci90ul - BLAci90ll)/2
break
totArea = np.sum(np.exp(bayProbDist[:, mlBLAind]))
for ii in range(1, len(BulkLAslope)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind-ii:mlBLAslopeind+ii, mlBLAind])) > 0.9*totArea:
if mlBLAslopeind - ii + 1 < 0:
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = BLAslopeci90ul - mlBLAslope
elif mlBLAslopeind + ii - 1 > len(BulkLAslope):
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90pm = mlBLAslope - BLAslopeci90ll
else:
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = (BLAslopeci90ul - BLAslopeci90ll)/2
break
lowNoiseMeas[fn] = {'mlBLA': mlBLA,
'BLAci90pm': BLAci90pm,
'mlBLAslope': mlBLAslope,
'BLAslopeci90pm': BLAslopeci90pm,
'lkhDist': lkhDist}
if mlBLA < minmlBLA:
BNfile = fn
minmlBLA = mlBLA
with open(lowNoiseMeasLog, 'wb') as logFile:
pickle.dump(lowNoiseMeas, logFile)
```
%% Cell type:code id: tags:
``` python
try:
with open(lowNoiseMeasLog, 'rb') as p:
lowNoiseMeas = pickle.load(p)
except BaseException:
BulkLA = np.arange(4e-4, 9e-4, 1e-6)
ShearLA = 5.2e-7
BulkLAslope = np.arange(0.0, 1, 0.01)
priorDist = logprior(BulkLA, BulkLAslope)
lowNoiseMeas = {'BulkLA': BulkLA,
'ShearLA': ShearLA,
'BulkLAslope': BulkLAslope,
'priorDist': priorDist}
try:
minmlBLA = lowNoiseMeas['minmlBLA']
except BaseException:
minmlBLA = np.inf
BulkLA = lowNoiseMeas['BulkLA']
BulkLAslope = lowNoiseMeas['BulkLAslope']
ShearLA = lowNoiseMeas['ShearLA']
priorDist = lowNoiseMeas['priorDist']
try:
for fnind, fn in enumerate(lowestNoiseFiles):
overallCompPerc = 100 * fnind / len(lowNoiseMeas)
if fn not in lowNoiseMeas:
nosbud.loadPSD(SavedPSDs,
overridePresentFreq=True, overridePresentPSD=True)
nosbud = updateFromTransRIN(nosbud, eu(fn))
nosbud.calculateTotalEstNoise()
lkhDist = loglikelihood(BulkLA, BulkLAslope, ShearLA, eu(fn), nosbud, overallCompPerc=overallCompPerc)
bayProbDist = priorDist + lkhDist
mlBLAslopeind, mlBLAind = np.unravel_index(np.argmax(bayProbDist), np.shape(bayProbDist))
mlBLA = BulkLA[mlBLAind]
mlBLAslope = BulkLAslope[mlBLAslopeind]
totArea = np.sum(np.exp(bayProbDist[mlBLAslopeind, :]))
for ii in range(1, len(BulkLA)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind, mlBLAind-ii:mlBLAind+ii])) > 0.9*totArea:
BLAci90ll = BulkLA[mlBLAind - ii + 1]
BLAci90ul = BulkLA[mlBLAind + ii - 1]
BLAci90pm = (BLAci90ul - BLAci90ll)/2
break
totArea = np.sum(np.exp(bayProbDist[:, mlBLAind]))
for ii in range(1, len(BulkLAslope)//2):
if np.sum(np.exp(bayProbDist[mlBLAslopeind-ii:mlBLAslopeind+ii, mlBLAind])) > 0.9*totArea:
if mlBLAslopeind - ii + 1 < 0:
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = BLAslopeci90ul - mlBLAslope
elif mlBLAslopeind + ii - 1 > len(BulkLAslope):
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90pm = mlBLAslope - BLAslopeci90ll
else:
BLAslopeci90ll = BulkLAslope[mlBLAslopeind - ii + 1]
BLAslopeci90ul = BulkLAslope[mlBLAslopeind + ii - 1]
BLAslopeci90pm = (BLAslopeci90ul - BLAslopeci90ll)/2
break
lowNoiseMeas[fn] = {'mlBLA': mlBLA,
'BLAci90pm': BLAci90pm,
'mlBLAslope': mlBLAslope,
'BLAslopeci90pm': BLAslopeci90pm,
'lkhDist': lkhDist}
if mlBLA < minmlBLA:
BNfile = fn
minmlBLA = mlBLA
with open(lowNoiseMeasLog, 'wb') as logFile:
pickle.dump(lowNoiseMeas, logFile)
except BaseException:
pass
```
%% Cell type:code id: tags:
``` python
BNfile = lowNoiseMeas['BNfile']
print('Best measurement found:')
print(os.path.basename(BNfile))
mlBLA = lowNoiseMeas[BNfile]['mlBLA']
BLAci90pm = lowNoiseMeas[BNfile]['BLAci90pm']
mlBLAslope = lowNoiseMeas[BNfile]['mlBLAslope']
BLAslopeci90pm = lowNoiseMeas[BNfile]['BLAslopeci90pm']
print('Estimated Bulk Loss Angle {:.2e} +- {:.2e} radians.'.format(mlBLA, BLAci90pm))
print('Estimated Bulk Loss Angle Slope {:.2e} +- {:.2e}.'.format(mlBLAslope, BLAslopeci90pm))
lkhDist = lowNoiseMeas[BNfile]['lkhDist']
bayProbDist = priorDist + lkhDist
```
%% Cell type:code id: tags:
``` python
BLAslopestr = (r'\Phi_\text{B} = ' + '({:.2f} \pm {:.2f})'.format(mlBLA*1e4, BLAci90pm*1e4)
+ r'(\frac{f}{100\,\text{Hz}})^{' + '{:.2f} \pm {:.2f}'.format(mlBLAslope, BLAslopeci90pm)
+ r'} \times 10^{-4}')
print(BLAslopestr)
#with open(eu('~/Git/cit_ctnlab/ctn_paper/data/Bulk_Loss_Fit_With_Slope_Value_String.tex'), 'w') as f:
# f.writelines(BLAslopestr)
```
%% Cell type:code id: tags:
``` python
def bayesFactor(model1='lowNoiseMeas.pkl', model2='lowNoiseMeasWithSlope.pkl'):
with open(model1, 'rb') as p:
model1 = pickle.load(p)
minmlBLA = np.inf
for key, value in model1.items():
if key.find('.txt')!=-1:
if value['mlBLA'] < minmlBLA:
minmlBLA = value['mlBLA']
minKey = key
dBLA = model1['BulkLA'][1] - model1['BulkLA'][0]
evidence1 = np.sum(np.exp(model1['priorDist'] + model1[minKey]['lkhDist']) * dBLA)
print('Evidence for model 1 is {:.2e}'.format(evidence1))
with open(model2, 'rb') as p:
model2 = pickle.load(p)
minmlBLA = np.inf
for key, value in model2.items():
if key.find('.txt')!=-1:
if value['mlBLA'] < minmlBLA:
minmlBLA = value['mlBLA']
minKey = key
dBLA = model2['BulkLA'][1] - model2['BulkLA'][0]
dBLAslope = model2['BulkLAslope'][1] - model2['BulkLAslope'][0]
evidence2 = np.sum(np.exp(model2['priorDist'] + model2[minKey]['lkhDist']) * dBLA * dBLAslope)
print('Evidence for model 2 is {:.2e}'.format(evidence2))
print('Bayes factor for model2:mode1 is {:.2e}'.format(evidence2/evidence1))
bayesFactor()
```
%% Cell type:code id: tags:
``` python
fig = plt.figure(figsize=[16,12])
ax = fig.gca()
X, Y = np.meshgrid(BulkLA * 1e4, BulkLAslope)
Z = np.exp(priorDist)
pcm = ax.pcolormesh(X, Y, Z,
#norm=colors.LogNorm(vmin=Z.min(), vmax=Z.max()),
cmap=cm.coolwarm)
ax.set_xlabel('Bulk Loss Angle (' + r'$\times 10^{-4}$' + ' rad)', linespacing=4)
ax.set_ylabel('Bulk Loss Angle Slope', linespacing=4)
ax.ticklabel_format(axis='x', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='z', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='y', style='sci', scilimits=(0,0))
#ax.set_xlim((0, 3e-4))
#ax.set_yli
ax.set_title('Prior probability distribution of Bulk Loss Angle and its power law slope\n')
# Add a color bar which maps values to colors.
fig.colorbar(pcm, shrink=0.5, aspect=5, label='$p(\phi_B,\phi_S)$')
figlist = [fig]
```
%% Cell type:code id: tags:
``` python
fig = plt.figure(figsize=[16,12])
ax = fig.gca()
X, Y = np.meshgrid(BulkLA * 1e4, BulkLAslope)
Z = np.exp(lkhDist)
Z = Z/np.max(Z)
pcm = ax.pcolormesh(X, Y, Z,
#norm=colors.LogNorm(vmin=Z.min(), vmax=Z.max()),
cmap=cm.coolwarm)
ax.set_xlabel('Bulk Loss Angle (' + r'$\times 10^{-4}$' + ' rad)', linespacing=4)
ax.set_ylabel('Bulk Loss Angle Slope', linespacing=4)
ax.ticklabel_format(axis='x', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='z', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='y', style='sci', scilimits=(0,0))
# Add a color bar which maps values to colors.
fig.colorbar(pcm, shrink=0.5, aspect=5, label='Norm. $\mathcal{L}(\phi_B,\phi_{Bslope}|S^{BN}_{meas})$')
```
%% Cell type:code id: tags:
``` python
#fig.savefig(eu('~/Git/cit_ctnlab/ctn_paper/figures/LikelihoodProbDist_Slope.pdf'),
# facecolor=fig.get_facecolor(),
# bbox_inches='tight')
```
%% Cell type:code id: tags:
``` python
ax.set_title('Likelihood Distribution of Bulk Loss Angle and its power law slope\n')
figlist +=[fig]
```
%% Cell type:code id: tags:
``` python
fig = plt.figure(figsize=[16,12])
ax = fig.gca()
X, Y = np.meshgrid(BulkLA * 1e4, BulkLAslope)
Z = np.exp(bayProbDist)
Z = Z / np.max(Z)
pcm = ax.pcolormesh(X, Y, Z,
#norm=colors.LogNorm(vmin=Z.min(), vmax=Z.max()),
cmap=cm.coolwarm)
ax.set_xlabel('Bulk Loss Angle (' + r'$\times 10^{-4}$' + ' rad)', linespacing=4)
ax.set_ylabel('Bulk Loss Angle Slope', linespacing=4)
ax.ticklabel_format(axis='x', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='z', style='sci', scilimits=(0,0))
ax.ticklabel_format(axis='y', style='sci', scilimits=(0,0))
ax.set_title('Log Bayesian Probability Distribution of Bulk Loss Angle and its power law slope\n')
# Add a color bar which maps values to colors.
fig.colorbar(pcm, shrink=0.5, aspect=5, label='$\mathcal{B}(\phi_B,\phi_{Bslope}|S^{BN}_{meas})$')
figlist +=[fig]
```
%% Cell type:code id: tags:
``` python
bayProbSlopeData = eu('~/Git/cit_ctnlab/ctn_paper/'
'/data/bayProbSlopeData.pkl')
#with open(bayProbSlopeData, 'wb') as results:
# pickle.dump({'BulkLA': BulkLA,
# 'BulkLAslope': BulkLAslope,
# 'logBayProbDist': bayProbDist}, results)
```
%% Cell type:code id: tags:
``` python
nosbud.loadPSD(SavedPSDs,
overridePresentFreq=True, overridePresentPSD=True)
nosbud.loadASD(eu(BNfile),
label='Measured Beatnote Spectrum',
key='beat')
nosbud = updateFromTransRIN(nosbud, eu(BNfile))
nosbud.calculateTotalEstNoise()
```
%% Cell type:code id: tags:
``` python
ff = nosbud.PSDList['coatBr'][1]
X_B = BulkContSlope(ff, nosbud)
X_S = ShearContSlope(ff, nosbud)
nosbud.PSDList['coatBr'][0] = X_B * mlBLA * ((ff/100)**(mlBLAslope)) + X_S * ShearLA
```
%% Cell type:code id: tags:
``` python
plotList = ['coatBr', 'coatTO', 'subBr', 'subTE',
'pdhShot', 'pllOsc', 'pllReadout', 'seismic',
'photoThermal', 'resNPRO', 'total', 'beat']
nosbud.PSDList['pllOsc'][2] = 'DPLL Frequency Noise'
fig = nosbud.plotPSD(plotList=plotList,
savePlot=False,
doTotal=True)
fn = BNfile.replace('.txt', '')
tstamp = fn[fn.find('Spectrum_')+9:]
tstruc = time.strptime(tstamp, '%Y%m%d_%H%M%S')
BNdate = time.strftime('%b %d, %Y', tstruc)
ax = fig.gca()
ax.set_xlim([0.05, 5e3])
ax.set_title('')
```
%% Cell type:code id: tags:
``` python
NoiseBudgetResultsData = eu('~/Git/cit_ctnlab/ctn_paper/'
'/data/NoiseBudgetResultsData_Slope.pkl')
#with open(NoiseBudgetResultsData, 'wb') as results:
# pickle.dump(nosbud.PSDList, results, protocol=2)
```
%% Cell type:markdown id: tags:
### Comparison with conventional formula:
[Nakagawa et al.](http://doi.org/10.1103/PhysRevD.65.102001) (Eq.18) expression (2002):
$$S_x^{\text{(cBr)}}(f) = \frac{4 k_\text{B} T}{\pi^2 f} \frac{(1+\sigma_\text{s})(1-2\sigma_\text{s})d}{w^2 E_\text{s}}\phi_\text{c}$$
[Harry et al.](https://iopscience.iop.org/article/10.1088/0264-9381/19/5/305/meta) (Eq.21) expression (assuming $\phi_\perp = \phi_\parallel$) (2002):
$$
S_x^{\text{(cBr)}}(f) = \frac{2 k_\text{B} T}{\pi^2 f} \frac{d \phi_\text{c}}{w^2 E_\text{s}^2 E_\text{c}(1-\sigma_\text{c}^2)}
\left[E_\text{c}^2 (1+\sigma_\text{s})^2 (1-2\sigma_\text{s})^2
+ E_\text{s}^2 (1+\sigma_\text{c})^2(1-2\sigma_\text{c})\right]
$$
[Yam et al.](http://doi.org/10.1103/PhysRevD.91.042002) (Eq.1) is currently in use in gwinc:
$$
S_x^{\text{(cBr)}}(f) = \frac{4 k_\text{B} T}{2 \pi^2 w^2 f}
\frac{1 - \sigma_\text{s} - 2 \sigma_\text{s}^2}{E_\text{s}}
\sum_j b_j d_j \Phi_{\text{M}j}
$$
where $b_j$ is a unitless weighin factor given by:
$$
b_j = \frac{1}{1 - \sigma_j}
\left[
\left(
1 - n_j \frac{\partial \phi_\text{c}}{\partial \phi_j}
\right)^2
\frac{E_\text{s}}{E_j}
+ \frac{(1 - \sigma_\text{s} - 2 \sigma_\text{s}^2)^2}
{(1 + \sigma_\text{j})^2 (1 - 2 \sigma_j)}
\frac{E_j}{E_\text{s}}
\right]
$$
%% Cell type:markdown id: tags:
We will calculate conventional coating browninan noise PSD per effective coating loss angle at 100 Hz
%% Cell type:code id: tags:
``` python
dLogRho_dPhik = np.array([val.n for val in nosbud.coatStack.delLogRho_delPhik])
dcdp = -0.5*np.imag(dLogRho_dPhik)[:-1]
pratN = nosbud.coatStack.Poisson[:-1]
pratsub = nosbud.sub.Poisson
nN = nosbud.coatStack.RefInd[:-1]
yN = nosbud.coatStack.Young[:-1]
Ysub = nosbud.sub.Young
dGeo = nosbud.coatStack.PhyThick[:-1]
wBeam = nosbud.spotSize
kBT = scc.Boltzmann*nosbud.temp
brLayer = ( 1/(1-pratN)
* ( (1-nN*dcdp/2)**2 * (1-2*pratN)*(1+pratN)*Ysub / ((1-2*pratsub)*(1+pratsub)*yN)
+ (1-2*pratsub)*(1+pratsub)*yN / ((1+pratN)*Ysub) ) )
gwincCoatBrZperLA = ((4 * kBT / (np.pi * wBeam**2 * 2 * np.pi * nosbud.freq[279]))
* (1 - pratsub - 2 * pratsub**2) / Ysub
* np.sum(dGeo * brLayer)
* 4 * nosbud.fConv**2 )
effCLA = nosbud.PSDList['coatBr'][0][279] / gwincCoatBrZperLA
effCLApm = effCLA * BLAci90pm / mlBLA
```
%% Cell type:code id: tags:
``` python
print('Estimated Effective Coating Loss Angle '
'{:.2e} +- {:.2e} radians'.format(effCLA, effCLApm))
```
%% Cell type:code id: tags:
``` python
#fig.savefig(eu('~/Git/cit_ctnlab/ctn_paper/figures/CTN_Noise_Budget_Slope.pdf'),
# facecolor=fig.get_facecolor(),
# bbox_inches='tight')
```
%% Cell type:code id: tags:
``` python
ax.set_title('CTN Noise Budget, '
+ BNdate
+ '\n'
+ r'$\Phi_{B}$'
+ ' = ({:.2f} '.format(mlBLA*1e4)
+ r'$\pm$' + ' {:.2f}) '.format(BLAci90pm*1e4)
+ r'$\times \left(\frac{f}{100\,Hz}\right)^{'
+ '{:.2f} '.format(mlBLAslope)
+ r'\pm'
+ ' {:.2f} '.format(BLAslopeci90pm)
+ r'}$'
+ r'$ \times 10^{-4}$ '
+ ' radians; '
+ r'$\Phi_{S} = 5.2 \times 10^{-7}$' + ' radians')
fig.savefig('CTN_Best_Measurement_Result_With_Slope.pdf',
facecolor=fig.get_facecolor(),
bbox_inches='tight')
fig.savefig('CTN_Best_Measurement_Result_With_Slope.png',
facecolor=fig.get_facecolor(),
bbox_inches='tight')
figlist += [fig]
```
%% Cell type:code id: tags:
``` python
pp = PdfPages('CTN_Bayesian_Inference_Final_Analysis_with_Slope.pdf')
for fig in figlist:
pp.savefig(fig, bbox_inches='tight')
pp.close()
```
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment