Skip to content
Snippets Groups Projects
Commit e46bf5b8 authored by LeGuen Yann's avatar LeGuen Yann
Browse files

New cleaner and commented version of the analysis code

parent 62b6830a
No related branches found
No related tags found
No related merge requests found
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
from contourpy import contour_generator
import itertools as itt
import functools as ft
from types import MethodType
from collections import namedtuple,OrderedDict
import os
import sys
from scipy.optimize import curve_fit,least_squares
from scipy.interpolate import RegularGridInterpolator
from sklearn.cluster import OPTICS, Birch
from sklearn.neighbors import LocalOutlierFactor
from scipy.ndimage import gaussian_filter
from PIL import Image
import cv2
import tifffile as TIFF
import json
from tabulate import tabulate
import time
from tqdm import tqdm
scriptFolder = os.path.dirname(os.path.abspath(__file__))
#%% base variable
mainDir = scriptFolder # automaticaly takes the folder were the script is as main folder (change here if you don't want to use current folder)
dirList = [ # list of sub-folder where the images are stored
r"\25-02-12 945_1c\1400nm\QWP0",
r"\25-02-12 945_1c\1400nm\QWP45",
r"\25-02-12 945_1c\1400nm\QWP315",
]
saveDir = mainDir + "/output/" # output folder by default creates an output folder in the main folder
if not os.path.exists(saveDir): # create the output folder in it doesn't exist
os.makedirs(saveDir)
binFactor = 4 # binning factor of the imported image set (must be the same for all images)
pxSizeX = 0.3255E-6 * binFactor # m/px
pxSizeY = 0.2865E-6 * binFactor # m/px
repRate = 100e3 #Hz
LOWSTATE = "P+" # initial state with lowest grey level
HIGHSTATE = "P-" # initial state with highest grey level
TAG_INDEX = 699 # tag index to recuperate the metadata stored in the tiff images (Note : only use for images generated by this setup not implemented yet in other setups)
#%% def funct
def getImageTagsDict(FilePath):
Tags = []
with TIFF.TiffFile(FilePath) as tif:
for page in tif.pages:
Tags.append({tag['Decription']:{k:tag[k] for k in ['Short','units','value']} for tag in json.loads(page.tags[TAG_INDEX].value)})
return Tags
def getImageTagsList(FilePath):
Tags = []
with TIFF.TiffFile(FilePath) as tif:
for page in tif.pages:
Tags.append(json.loads(page.tags[TAG_INDEX].value))
return Tags
def openTiffImagesAndPropreties(FilePath):
images=[]
propLists=[]
with TIFF.TiffFile(FilePath) as tif:
for page in tif.pages:
images.append(page.asarray())
propLists.append(json.loads(page.tags[TAG_INDEX].value))
return images,propLists
#def mapDictList(f,d):
# return {k: [f(k,a) for a in v] for k, v in d.items()}
def mapDictList(f, d):
return {
k: [f(k, a) for a in v]
for k, v in tqdm(d.items(), desc="Processing Dictionary", mininterval=1, ncols=80)}
# for k, v in d.items()}
def cropIm(im,ROI):
return im[int(ROI[1]):int(ROI[1]+ROI[3]), int(ROI[0]):int(ROI[0]+ROI[2])]
def plotIm(im,fig,ax,title=None,aspectRatio=None,colorBar=None):
imx = ax.imshow(im,aspect=aspectRatio,cmap="bwr",vmin=0,vmax=1)
ax.set_title(title)
if colorBar==True:
fig.colorbar(imx)
def plotDictListIm(d,keyFilter=None,aspectRatio=None,sortKey=None):
keyList = filter(keyFilter,d.keys())
if sortKey!=None:
keyList = list(sorted(keyList,key=sortKey))
for prop in keyList:
for im in d[prop]:
fig, ax = plt.subplots()
plotIm(im,fig,ax,title=prop,aspectRatio=aspectRatio,colorBar=True)
plt.show()
def plotDictListValue(d,figax=None,keyFilter=None,Xaxis=None,logScale=False,label=None,title=None):
keyList = list(filter(keyFilter,d.keys()))
if Xaxis!=None:
keyList = sorted(keyList,key=lambda x:getattr(x, Xaxis))
keyLookUp = []
X,Y = [],[]
for i,prop in enumerate(keyList):
for j,val in enumerate(d[prop]):
if Xaxis!=None:
X.append(getattr(prop, Xaxis))
else:
X.append(i)
Y.append(val)
keyLookUp.append((prop,j))
if figax==None:
figax = plt.subplots()
figax[1].plot(X,Y,'o',markersize=2,picker=5,label=label)
if logScale:
figax[1].set_yscale('log')
figax[1].set_title(title)
if label!=None:
figax[1].legend(bbox_to_anchor=(1.05, 1.0), loc='upper left')
figax[0].tight_layout()
return figax[0], figax[1], keyLookUp
def plotContourList(cs,fig,ax,title=None,color=None):
if color==None:
color="k"
for c in cs:
ax.plot(c[:,0],c[:,1],c=color)
ax.set_title(title)
def plotDictListContour(d,keyFilter=None,sortKey=None):
keyList = list(filter(keyFilter,d.keys()))
colorCycle = plt.rcParams['axes.prop_cycle']()
color = lambda x:next(colorCycle)["color"]
if sortKey!=None:
keyList = list(sorted(keyList,key=sortKey))
cmap = plt.cm.get_cmap('viridis')
cmin,cmax=min(map(sortKey,keyList)),max(map(sortKey,keyList))
color = lambda x:cmap((sortKey(x)-cmin)/(cmax-cmin))
fig, ax = plt.subplots()
for prop in keyList:
plotContourList(d[prop],fig,ax,color=color(prop))
ax.invert_yaxis()
ax.axis('scaled')
return fig, ax, keyList
def area(vs):
a = 0
x0,y0 = vs[0]
for [x1,y1] in vs[1:]:
dx = x1-x0
dy = y1-y0
a += 0.5*(y0*dx - x0*dy)
x0 = x1
y0 = y1
return abs(a)
def plotCircles(fig,ax,center,radius,color="k"):
circle = plt.Circle(center, radius, color=color, fill=False)
ax.add_patch(circle)
return fig, ax
def plotDictListImageCircles(dIm,dCircles,keyFilter=None,sortKey=None):
keyList = list(filter(keyFilter,dCircles.keys()))
if sortKey!=None:
keyList = list(sorted(keyList,key=sortKey))
for prop in keyList:
for i,im in enumerate(dIm[prop]):
fig, ax = plt.subplots()
plotIm(im,fig,ax,title=prop,colorBar=True)
center1,radius1,center2,radius2 = dCircles[prop][i]
print(center1,radius1,center2,radius2)
plotCircles(fig,ax,center1,radius1,color="k")
plotCircles(fig,ax,center2,radius2,color="k")
plt.show()
#%% open image and background
propDef = { # descriptor of propreties to import from metadata. parameter name to be imported : (name,type) in metadata
"Power" : ('Pump power',float),
"QWP" : ('Pump QWP angle',float),
# "Compressor" : ('Compressor',int),
"Initial_State" : ('Initial state',str),
"Number_pulse" : ('Number of pulse',int),
"Wavelength" : ('Pump wavelength',int)
}
Params = namedtuple('Params', list(propDef.keys())) # create the Params class from the propDef descriptor to store images' propreties
Params.__repr__ = lambda self:'Params:\n'+tabulate(self._asdict().items()) # change Params string representation to be easier to read (Comment it out if you don't like it)
communParam = None
rawImages = {} # dictonary storing the parameters as the key (namedtuple) and list of images as values
bkgImage = {} # dictonary storing the parameters as the key (namedtuple) and list of backgrounds as values
for subDir in dirList:
imageDir = mainDir + subDir
for filename in filter(lambda x:x.endswith('.tiff'),os.listdir(imageDir)): # iterate over the filenames ending in .tiff in the curent directory imageDir
images,propLists = openTiffImagesAndPropreties(os.path.join(imageDir,filename)) # open images and propreties from file
for im,propTag in zip(images,propLists): # iterate over the stack of image and propreties (for multipage tiff)
communParam = [a if (communParam == None or a in communParam) else {**a, "value":"\x1b[35m\x1b[1mvariable\x1b[0m"} for a in propTag] #compare the metadata to communParam in order to gather commun parameters and flag variable ones
prop = Params(**{k:v[1](next(x for x in propTag if x['Decription']==v[0])["value"])for k,v in propDef.items()}) # create the propreties name tuple storing the parameters of the image to be used as a key for the dictonary
if "Bkg" in filename: # seperate background images (must have Bkg in their name)
if prop not in bkgImage: # check if the key already exist in the bkgImage dictonary
bkgImage[prop] = [] # if not create a new entry with a empty list
bkgImage[prop].append(im) # append the background images in bkgImage dictonary
else: # seperate images from background
if prop not in rawImages: # check if the key already exist in the rawImages dictonary
rawImages[prop] = [] # if not create a new entry with a empty list
rawImages[prop].append(im) # append the images in rawImages dictonar
communParam = [{**a,"Param":"\x1B[31m\x1b[1mX\x1b[0m"} if a['Decription'] in [p[0] for p in propDef.values()] else {**a,"Param":""} for a in communParam] # add a red cross next to the parameters selected in propDef descriptor
print(tabulate([OrderedDict([(k,p[k]) for k in ['Decription','Short','value','units','Param']]) for p in communParam], # print the information about the loaded data from the metadata
colalign=('right','left','left','left','center'),
tablefmt="rst",
headers="keys"))
#%% get bkg Intensity
# gets an ROI from which the illumination's levels are ajusted
cv2.namedWindow("output", cv2.WINDOW_NORMAL) # create a cv2 window
ROIBoxInt = cv2.selectROI("output",cv2.cvtColor((rawImages[sorted(rawImages.keys())[-1]][0]/np.max(rawImages[sorted(rawImages.keys())[-1]][0])).astype('float32'),cv2.COLOR_GRAY2RGB)) # display the image with the highest power and prompt for a ROI of an empty area
cv2.destroyAllWindows() # destroy the cv2 window
# function to get the mean grey level in the previously selcted ROI
AveIntenFunc = lambda prop,im : np.mean(cropIm(im.astype('float32'),ROIBoxInt))
# gets the mean illumination in the ROI for each image and background
rawImagesIntesity = mapDictList(AveIntenFunc,rawImages)
bkgImageIntesity = mapDictList(AveIntenFunc,bkgImage)
#%% Correct bkg Intensity
# get the illumination levels for P+ and P- state as the average of all coresponding images' ROI
posBkgLevel = np.mean(ft.reduce(lambda a,b:a+b[1],filter(lambda x:x[0].Initial_State=="P+",itt.chain(rawImagesIntesity.items(),bkgImageIntesity.items())),[]))
negBkgLevel = np.mean(ft.reduce(lambda a,b:a+b[1],filter(lambda x:x[0].Initial_State=="P-",itt.chain(rawImagesIntesity.items(),bkgImageIntesity.items())),[]))
# function that correct an image illumination levels to the previously calculated one
fixIntenFunc = lambda prop,im : (posBkgLevel if prop.Initial_State == "P+" else negBkgLevel) * im.astype('float32')/np.mean(cropIm(im.astype('float32'),ROIBoxInt))
# Correct each image illumination levels to the previously calculated one
rawImagesCorected = mapDictList(fixIntenFunc,rawImages)
bkgImageCorected = mapDictList(fixIntenFunc,bkgImage)
# delete raw data to save memory (comment out if you need to use them later)
del(rawImages)
del(bkgImage)
#%% norm images
imageDictIn = rawImagesCorected # select data set to use for nomalisation (rawImages or rawImagesCorected)
bkgDictIn = bkgImageCorected # select bkg set to use for nomalisation (bkgImage or bkgImageCorected)
# function to normalising an image by substracting the coresponding averaged backgrounds
# then dividing by the difference of opposite initial state of the averaged backgrounds
# passed through a gaussian filter (sigma =1).
# The resulting image is then rescaled by (pxSizeX/pxSizeY) in the x direction to compensate the probe angle
normFunc = lambda prop,im : cv2.resize((prop.Initial_State==HIGHSTATE) + (im.astype('float32') - np.average(bkgDictIn[prop],axis=0).astype('float32'))\
/gaussian_filter(np.average(bkgDictIn[prop._replace(Initial_State=HIGHSTATE)],axis=0).astype('float32') - np.average(bkgDictIn[prop._replace(Initial_State=LOWSTATE)],axis=0).astype('float32'),5),
dsize=(0,0),fx=pxSizeX/pxSizeY,fy=1)
# normalise all the data set
normImages = mapDictList(normFunc,imageDictIn)
# Select this data set for new steps
imageDictIn = normImages
#%% display images
# keyFilterFunc = lambda x:x.Initial_State=="P+" and x.QWP==45.0 and x.Number_pulse==1000 # Filter images to display based on their propreties
# sortKey = lambda x:x.Power # Sort displayed images based on their propreties
# plotDictListIm(normImages,keyFilter=keyFilterFunc,sortKey=sortKey)
#%% crop set
# gets an ROI for crop
cv2.namedWindow("output", cv2.WINDOW_NORMAL) # create a cv2 window
ROIBox = cv2.selectROI("output",cv2.cvtColor(normImages[sorted(normImages.keys())[-1]][1],cv2.COLOR_GRAY2RGB)) # display the image with the highest power and prompt for a ROI for croping
cv2.destroyAllWindows() # destroy the cv2 window
# Crop the image set
cropImages = mapDictList(lambda prop,im:cropIm(im,ROIBox),normImages)
# delete raw data to save memory (comment out if you need to use them later)
del(rawImagesCorected)
del(bkgImageCorected)
del(normImages)
# Select this data set for new steps (Note if this step is skiped normImages will be used for the next steps)
imageDictIn = cropImages
#%% plots histograms + integrate threshold
# sigmaGauss = 1 # sigma to use for the gaussian filter
# groups = ["QWP","Number_pulse","Power"] # group data based on the propreties
# # groups = ["Compressor","QWP","Number_pulse","Power] # group data based on the propreties
# subFilterFunc = lambda x:x[0]==45.0 # Filter data to display based on group parameter list
# keyfunc = lambda x:[getattr(x, key) for key in groups] # get keys of the grouped data
# sortedKeys = list(sorted(imageDictIn.keys(),key=keyfunc)) # sort dict keys !! necessary for itt.groupby
# for k, g in itt.groupby(sortedKeys, keyfunc): # group data with commun keyfunc output (more info : https://docs.python.org/3/library/itertools.html#itertools.groupby)
# if subFilterFunc(k): # apply filter
# g = list(sorted(g)) # convert and sort from grouper type to list (if not the generator will empty itself after the first passage)
# figax = plt.subplots(2,1,sharex=True,figsize=(10,10)) # new plot with 2 axis
# figax[1][0].hist(np.ravel(gaussian_filter(imageDictIn[g[0]],sigmaGauss,axes=(1,2))),100,label=g[0].Initial_State,histtype="step") # plot the histogram of all pixels of the group of images the LOWSTATE after appling a gaussian filter
# figax[1][0].hist(np.ravel(gaussian_filter(imageDictIn[g[1]],sigmaGauss,axes=(1,2))),100,label=g[1].Initial_State,histtype="step") # plot the histogram of all pixels of the group of images the HIGHSTATE after appling a gaussian filter
# figax[1][0].set_yscale('log')
# figax[1][0].legend()
# figax[1][0].set_title(' '.join([str(a)+" = "+str(b) for a,b in zip(groups,k)])) # format the name from the group key
# figax[1][0].axvline(0)
# figax[1][0].axvline(1,color="orange")
# X = np.linspace(0, 1,100) # list of threshold
# Y = [np.sum(threshold<np.ravel(gaussian_filter(imageDictIn[g[0]],sigmaGauss,axes=(1,2)))) for threshold in X] # for each threshold count the number of pixel above the threshold for the LOWSTATE after appling a gaussian filter
# figax[1][1].plot(X,Y,label=g[0].Initial_State)
# Y = [np.sum(threshold>np.ravel(gaussian_filter(imageDictIn[g[1]],sigmaGauss,axes=(1,2)))) for threshold in X] # for each threshold count the number of pixel below the threshold for the HIGHSTATE after appling a gaussian filter
# figax[1][1].plot(X,Y,label=g[1].Initial_State)
# figax[1][1].set_yscale('log')
# figax[1][1].legend()
# figax[1][1].axvline(0)
# figax[1][1].axvline(1,color="orange")
# plt.show()
#%% plot histogram empty spot for detection threshold
sigmaGauss = 1 # sigma to use for the gaussian filter
# get an ROI to plot the histogram of empty spot to select the thresholds
cv2.namedWindow("HistROI", cv2.WINDOW_NORMAL) # create a cv2 window
ROIBoxInt = cv2.selectROI("HistROI",cv2.cvtColor((imageDictIn[sorted(imageDictIn.keys())[-1]][0]/np.max(imageDictIn[sorted(imageDictIn.keys())[-1]][0])).astype('float32'),cv2.COLOR_GRAY2RGB)) # display the image with the highest power and prompt for a ROI of an empty area
cv2.destroyAllWindows() # destroy the cv2 window
# plot the histogram of the ROI of the images after appling a gaussian filter
plt.hist(np.ravel([cropIm(gaussian_filter(im,sigmaGauss),ROIBoxInt) for imList in cropImages.values() for im in imList]),200,histtype="step")
plt.gca().set_yscale('log')
plt.show()
#%% get circles
sigmaGauss = 1 # sigma to use for the gaussian filter
INNERCERCLE = 1 # FIX CONSTANT for better legibility of which circle are selected to be used in radiusSelect
OUTERCERCLE = 3 # FIX CONSTANT for better legibility of which circle are selected to be used in radiusSelect
# threshold selection for LOW and HIGH state to choose from the previous histogram such as the threshold is just above/bellow the background noise
ThresholdLow = 0.2 # LOWSTATE (0.0) < ThresholdLow < || selected range || < HIGHSTATE (1.0)
ThresholdHigh = 0.8 # LOWSTATE (0.0) < || selected range || < ThresholdHigh < HIGHSTATE (1.0)
# function to get the inclosing and inscribed circle of the switched region
def getCircles(prop,im):
# get contours based on the initial state and coresponding threshold
cs = cv2.findContours(((gaussian_filter(im,1)>ThresholdLow)*(prop.Initial_State==LOWSTATE) + (gaussian_filter(im,1)<ThresholdHigh)*(prop.Initial_State==HIGHSTATE)).astype("uint8"), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# check if the returned countours is empty
if len(cs[0]) > 0:
X = np.concatenate(cs[0])[:,0,:] # concatenate all point of all contours
# check if there is more than one point
if len(X) > 1:
clf = LocalOutlierFactor() # outlier detector initialisation. To be optimise !!!
y_pred = clf.fit_predict(X) # group points
# get the inliers and outliers
inliers = X[y_pred == 1]
# outliers = X[y_pred == -1]
# get convex hull of inliers
h = cv2.convexHull(inliers)
# get incribe circle from the convex hull (based on https://gist.github.com/DIYer22/f82dc329b27c2766b21bec4a563703cc)
binContour = cv2.fillPoly(np.zeros(gaussian_filter(im,1).shape, dtype=np.uint8),pts=[h[:,0,:]],color=255)
_, radius1, _, center1 = cv2.minMaxLoc(cv2.distanceTransform(binContour,cv2.DIST_L2, cv2.DIST_MASK_PRECISE))
# get enclosing circle from the inliers (more info : https://docs.opencv.org/3.4/dd/d49/tutorial_py_contour_features.html)
center2,radius2 = cv2.minEnclosingCircle(inliers)
return center1,radius1,center2,radius2 # return the center and radius of the circles the first one is the inner circle the second one is the outer circle
else: # if only one point return the coodinate of the point but zero radius
return tuple(X[0]),0,tuple(X[0]),0
else: # if empy return only zeros
return (0,0),0,(0,0),0
# get the circles of all images
circles = mapDictList(getCircles,imageDictIn)
#%% plot norm images and circles
# keyFilterFunc = lambda x:x.Initial_State=="P+" and x.QWP==45.0 and x.Number_pulse==1000
# sortKey = lambda x:x.Power
# plotDictListImageCircles(imageDictIn,circles,keyFilter=keyFilterFunc,sortKey=sortKey)
#%% full process plot
# FilterFunc = lambda x:x.QWP==0.0 # filter the image to display
# sortKey = lambda x:(x.Power,x.Number_pulse,x.QWP,x.Initial_State) # method of sorting of the images to be displayed
# for prop in sorted(filter(FilterFunc,circles.keys()),key=sortKey): # iterate over all filtered and sorted keys of circles
# for im,c in zip(imageDictIn[prop],circles[prop]): # iterate in parallele over the list of image and coresponding circle with a given key
# fig,axs = plt.subplots(1,3,figsize=(18,7)) # new plot with 3 axis
# plotIm(im,fig,axs[0]) # plot image from imageDictIn in the first axis
# binIm = (gaussian_filter(im,1)>ThresholdLow)*(prop.Initial_State==LOWSTATE) + (gaussian_filter(im,1)<ThresholdHigh)*(prop.Initial_State==HIGHSTATE) # get binary image based on the initial state and coresponding threshold
# axs[1].imshow(binIm,cmap="bwr",vmin=0,vmax=1) # plot binarised image
# pxChangeCoor = (np.array(np.where(binIm)).T)[:, [1, 0]] # get all point inside the binary image
# if len(pxChangeCoor) > 2: # if there is enough points will get the convex hull
# clf = LocalOutlierFactor() # outlier detector initialisation. To be optimise !!!
# y_pred = clf.fit_predict(pxChangeCoor) # group points
# # get the inliers and outliers
# inliers = pxChangeCoor[y_pred == 1]
# outliers = pxChangeCoor[y_pred == -1]
# # get convex hull of inliers and plot over the binary image
# h = cv2.convexHull(inliers)
# h = np.concatenate((h,[h[0,:,:]]))
# axs[1].plot(h[:,0,0],h[:,0,1],"k-")
# # plot initial image overlayed with the previously calculated circles
# plotIm(im,fig,axs[2])
# plotCircles(fig,axs[2],c[0],c[1],color="k")
# plotCircles(fig,axs[2],c[2],c[3],color="k")
# # hide axis tick and tick-labels
# for ax in axs:
# ax.get_xaxis().set_visible(False)
# ax.get_yaxis().set_visible(False)
# fig.suptitle(prop) # use image propreties as title
# plt.show()
#%% plot effective radius
# radiusSelect = OUTERCERCLE # select circle radius to plot can be INNERCERCLE or OUTERCERCLE
# groups = ["QWP","Number_pulse"] # group data based on the propreties
# # groups = ["Compressor","QWP","Number_pulse"] # group data based on the propreties
# subFilterFunc = lambda x:True # Filter data to display based on group parameter list
# keyfunc = lambda x:[getattr(x, key) for key in groups] # get keys of the grouped data
# sortedKeys = list(sorted(circles.keys(),key=keyfunc)) # sort dict keys !! necessary for itt.groupby
# for k, g in itt.groupby(sortedKeys, keyfunc): # group data with commun keyfunc output (more info : https://docs.python.org/3/library/itertools.html#itertools.groupby)
# if subFilterFunc(k): # apply filter
# figax = plt.subplots(figsize=(10,7)) # create new plot
# g = list(g) # convert from grouper type to list (if not the generator will empty itself after the first passage)
# keyfunc2 = lambda x:x.Initial_State # key for the sub grouping
# sortedKeys2 = list(sorted(g,key=keyfunc2)) # sort sub group key !! necessary for itt.groupby
# for k2, g2 in itt.groupby(sortedKeys2, keyfunc2): # sub-group grouped data with commun keyfunc2 output the data will be on the same plot but with different label
# g2 = list(g2) # convert from grouper type to list (if not the generator will empty itself after the first passage)
# # plot the sub grouped data point
# plotDictListValue(mapDictList(lambda prop,x:x[radiusSelect], circles), # create a sub dictionary of circle only selecting one of the circles
# Xaxis="Power", # name of the proprety to use as the X axis
# keyFilter=lambda x:x in g2, # only select point of the sub group
# figax=figax, # figure and axis to use
# logScale=True, # logaritmic Y scaling
# label=k2, # use the sub-grouping key as label
# title='\n'.join([str(a)+" = "+str(b) for a,b in zip(groups,k)]) # format title using the outer group keys
# )
# plt.show()
#%% export radius
# parameters = ["Compressor","Number_pulse","QWP","Initial_State","Power"] # list of propreties to use as column to discribe the output
parameters = ["Number_pulse","QWP","Initial_State","Power"] # list of propreties to use as column to discribe the output
data = np.array(sorted([[getattr(k,param) for param in parameters]+ # retrive propreties to fill the columns
[k.Power/1E3/repRate,r[1]*pxSizeY,r[3]*pxSizeY] for k,v in sorted(circles.items(),key=lambda x:x[0]) for r in v])) # for each data point calculate the pulse energy in J and the inner and outer radius in m
paramName = ",".join(parameters) + ",Ep,R1,R2" # format the columns name
# get the units of the selected column parameters and append the fixed ouput unit for pulse energy and radius
units = ",".join([next(filter(lambda x:x['Decription']==propDef[param][0],communParam))['units'] for param in parameters]) + \
',J,m,m'
np.savetxt(os.path.join(saveDir,'Radius.csv'),data,header=paramName+'\n'+units,fmt='%s',delimiter=",") # export to .csv as str array and with the constructed header
#%% def inv gauss radius (Prefer the other fit method)
# # define the inverted Gauss function for fit
# Rfunc = lambda Ep,s,Fth:np.nan_to_num(s*np.sqrt(np.log(Ep/(np.pi*s**2*Fth))))
#%% fit inv gauss radius (Prefer the other fit method)
# radiusSelect = OUTERCERCLE # select circle radius to plot can be INNERCERCLE or OUTERCERCLE
# fitParam = {}
# groups = ["Number_pulse","QWP","Initial_State"] # group data based on the propreties
# # groups = ["Compressor","Number_pulse","QWP","Initial_State"] # group data based on the propreties
# subParams = namedtuple('subParams', groups) # create a subparameter nametuple class to store selected images' propreties
# keyfunc = lambda x:subParams(**{key:getattr(x, key) for key in groups}) # get keys of the grouped data
# sortedKeys = list(sorted(circles.keys(),key=keyfunc)) # sort dict keys !! necessary for itt.groupby
# for k, g in itt.groupby(sortedKeys, keyfunc): # group data with commun keyfunc output (more info : https://docs.python.org/3/library/itertools.html#itertools.groupby)
# try: # exception handeling if the fiting fails
# EpRList = np.array([[k.Power*1E-3/repRate,pxSizeY*cs[radiusSelect]] for k in g for cs in circles[k]]) # construct the data to be fited X: Energy per pulse [J] ; Y: Radius [m]
# popt, pcov = curve_fit(Rfunc, EpRList[:,0], EpRList[:,1],p0=(1e-5,100),maxfev=1000000) # fit with the previously defined function Rfunc. the initial guess need to be ajusted for the data if fitting fails
# plt.plot(EpRList[:,0], EpRList[:,1],".") # plot to be fitted data
# newEp = np.linspace(min(EpRList[:,0]),max(EpRList[:,0]),500) # X for ploting of the the fited function calculated from the min and max of the data
# newR = [Rfunc(ep,*popt) for ep in newEp] # Y for ploting of the the fited function calculaded at every X
# plt.plot(newEp,newR) # plot fited function
# plt.title(k) # use group key as title
# plt.show()
# fitParam[k] = popt # store the fit parameters into the fitParam dictonary
# except RuntimeError as err: # if fit fails
# print(err) # print error
# plt.plot(EpRList[:,0], EpRList[:,1],".") # plot the data without fit
# plt.title(k) # use group key as title
# plt.show()
#%% export inv gauss fit (Prefer the other fit method)
# # parameters = ["Compressor","Number_pulse","QWP","Initial_State"] # list of propreties to use as column to discribe the output
# parameters = ["Number_pulse","QWP","Initial_State"] # list of propreties to use as column to discribe the output
# # retrive propreties to fill the columns and append the fit parameters
# data = np.array(sorted([[getattr(k,param) for param in parameters] + list(v) for k,v in sorted(fitParam.items(),key=lambda x:x[0])]))
# # format the columns name
# paramName = ",".join(parameters) + ",Sigma,Fth"
# # get the units of the selected column parameters and append the fixed ouput unit for sigma and fluence threshold
# units = ",".join([next(filter(lambda x:x['Decription']==propDef[param][0],communParam))['units'] for param in parameters]) +\
# ',m,J/m2'
# np.savetxt(os.path.join(saveDir,'invGaussFit.csv'),data,header=paramName+'\n'+units,fmt='%s',delimiter=",") # export to .csv as str array and with the constructed header
#%% def gauss fit
# define the Gauss function for fit
OneOverEpFunc = lambda r,s,Eth:(1/Eth)*np.exp(-(r/s)**2)
#%% fit gauss radius
radiusSelect = OUTERCERCLE # select circle radius to plot can be INNERCERCLE or OUTERCERCLE
fitParam = {}
groups = ["Number_pulse","QWP","Initial_State"] # group data based on the propreties
# groups = ["Compressor","Number_pulse","QWP","Initial_State"] # group data based on the propreties
subParams = namedtuple('subParams', groups) # create a subparameter nametuple class to store selected images' propreties
keyfunc = lambda x:subParams(**{key:getattr(x, key) for key in groups}) # get keys of the grouped data
sortedKeys = list(sorted(circles.keys(),key=keyfunc)) # sort dict keys !! necessary for itt.groupby
for k, g in itt.groupby(sortedKeys, keyfunc): # group data with commun keyfunc output (more info : https://docs.python.org/3/library/itertools.html#itertools.groupby)
try: # exception handeling if the fiting fails
oneOverEpRList = np.array([[repRate/k.Power/1E-3,pxSizeY*cs[radiusSelect]] for k in g for cs in circles[k] if cs[radiusSelect]>0]) # construct the data to be fited excluding zero size radius Y: 1/(Energy per pulse) [J-1] ; X: Radius [m]
symDataList = np.vstack((np.flip(oneOverEpRList*[1,-1],axis=0),oneOverEpRList)) # construct the symetric radius data for the fit
popt, pcov = curve_fit(OneOverEpFunc, symDataList[:,1], symDataList[:,0],p0=(1e-4,1E-6),maxfev=10000) # fit with the previously defined function OneOverEpFunc. The initial guess need to be ajusted for the data if fitting fails
plt.plot(symDataList[:,1], symDataList[:,0],".") # plot to be fitted data
newR = np.linspace(min(symDataList[:,1]),max(symDataList[:,1]),100) # X for ploting of the the fited function calculated from the min and max of the data
newOneOverEp = [OneOverEpFunc(r,*popt) for r in newR] # Y for ploting of the the fited function calculaded at every X
plt.plot(newR,newOneOverEp) # plot fited function
plt.title(k) # use group key as title
plt.show()
fitParam[k] = np.array([popt[0],popt[1]/np.pi/popt[0]**2]) # convert to sigma and fluence threshold and store the fit parameters into the fitParam dictonary
except RuntimeError as err: # if fit fails
print(err) # print error
#%% export gauss fit
# parameters = ["Compressor","Number_pulse","QWP","Initial_State"] # list of propreties to use as column to discribe the output
parameters = ["Number_pulse","QWP","Initial_State"] # list of propreties to use as column to discribe the output
# retrive propreties to fill the columns and append the fit parameters
data = np.array(sorted([[getattr(k,param) for param in parameters] + list(v) for k,v in sorted(fitParam.items(),key=lambda x:x[0])]))
# format the columns name
paramName = ",".join(parameters) + ",Sigma,Fth"
# get the units of the selected column parameters and append the fixed ouput unit for sigma and fluence threshold
units = ",".join([next(filter(lambda x:x['Decription']==propDef[param][0],communParam))['units'] for param in parameters]) +\
',m,J/m2'
np.savetxt(os.path.join(saveDir,'GaussFit.csv'),data,header=paramName+'\n'+units,fmt='%s',delimiter=",") # export to .csv as str array and with the constructed header
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment