From e05fcc849c6ed1c31b9d9c23c1e40ae897c63b23 Mon Sep 17 00:00:00 2001 From: Zhang Yugang Date: Tue, 14 Jun 2016 11:52:32 -0400 Subject: [PATCH 1/2] New Developed Codes for SAXS-GiSAXS_XPCS Codes developed at May 2016 --- chxtools/Circular_Average.py | 158 ++ chxtools/Mask_Maker.py | 131 ++ chxtools/Spatial_Correlation_Function.py | 317 ++++ chxtools/Time_Correlation_Functions.py | 182 ++- chxtools/Two_Time_Correlation_Function.py | 911 ++++++++++++ chxtools/XPCS_GiSAXS.py | 1112 ++++++++++++++ chxtools/XPCS_SAXS.py | 1640 +++++++++++++++++++++ chxtools/chx_generic_functions.py | 653 ++++++++ chxtools/chx_libs.py | 53 + chxtools/develop.py | 14 +- chxtools/speckle.py | 278 +++- 11 files changed, 5425 insertions(+), 24 deletions(-) create mode 100644 chxtools/Circular_Average.py create mode 100644 chxtools/Mask_Maker.py create mode 100644 chxtools/Spatial_Correlation_Function.py create mode 100644 chxtools/Two_Time_Correlation_Function.py create mode 100644 chxtools/XPCS_GiSAXS.py create mode 100644 chxtools/XPCS_SAXS.py create mode 100644 chxtools/chx_generic_functions.py create mode 100644 chxtools/chx_libs.py diff --git a/chxtools/Circular_Average.py b/chxtools/Circular_Average.py new file mode 100644 index 0000000..01296b0 --- /dev/null +++ b/chxtools/Circular_Average.py @@ -0,0 +1,158 @@ +import numpy as np + + +def radial_grid(center, shape, pixel_size=None): + """Convert a cartesian grid (x,y) to the radius relative to some center + + Parameters + ---------- + center : tuple + point in image where r=0; may be a float giving subpixel precision. + Order is (rr, cc). + shape : tuple + Image shape which is used to determine the maximum extent of output + pixel coordinates. + Order is (rr, cc). + pixel_size : sequence, optional + The physical size of the pixels. + len(pixel_size) should be the same as len(shape) + defaults to (1,1) + + Returns + ------- + r : array + The distance of each pixel from `center` + Shape of the return value is equal to the `shape` input parameter + """ + + if pixel_size is None: + pixel_size = (1, 1) + + X, Y = np.meshgrid(pixel_size[1] * (np.arange(shape[1]) - center[1]), + pixel_size[0] * (np.arange(shape[0]) - center[0])) + return np.sqrt(X*X + Y*Y) + + +def bin_1D(x, y, nx=None, min_x=None, max_x=None): + """ + Bin the values in y based on their x-coordinates + + Parameters + ---------- + x : array + position + y : array + intensity + nx : integer, optional + number of bins to use defaults to default bin value + min_x : float, optional + Left edge of first bin defaults to minimum value of x + max_x : float, optional + Right edge of last bin defaults to maximum value of x + + Returns + ------- + edges : array + edges of bins, length nx + 1 + + val : array + sum of values in each bin, length nx + + count : array + The number of counts in each bin, length nx + """ + + # handle default values + if min_x is None: + min_x = np.min(x) + if max_x is None: + max_x = np.max(x) + if nx is None: + nx = int(max_x - min_x) + + # use a weighted histogram to get the bin sum + bins = np.linspace(start=min_x, stop=max_x, num=nx+1, endpoint=True) + val, _ = np.histogram(a=x, bins=bins, weights=y) + # use an un-weighted histogram to get the counts + count, _ = np.histogram(a=x, bins=bins) + # return the three arrays + return bins, val, count + +def bin_edges_to_centers(input_edges): + """ + Helper function for turning a array of bin edges into + an array of bin centers + + Parameters + ---------- + input_edges : array-like + N + 1 values which are the left edges of N bins + and the right edge of the last bin + + Returns + ------- + centers : ndarray + A length N array giving the centers of the bins + """ + input_edges = np.asarray(input_edges) + return (input_edges[:-1] + input_edges[1:]) * 0.5 + + +def circular_average(image, calibrated_center, threshold=0, nx=1500, + pixel_size=(1, 1), min_x=None, max_x=None, mask=None): + """Circular average of the the image data + The circular average is also known as the radial integration + Parameters + ---------- + image : array + Image to compute the average as a function of radius + calibrated_center : tuple + The center of the image in pixel units + argument order should be (row, col) + threshold : int, optional + Ignore counts above `threshold` + default is zero + nx : int, optional + number of bins in x + defaults is 100 bins + pixel_size : tuple, optional + The size of a pixel (in a real unit, like mm). + argument order should be (pixel_height, pixel_width) + default is (1, 1) + min_x : float, optional number of pixels + Left edge of first bin defaults to minimum value of x + max_x : float, optional number of pixels + Right edge of last bin defaults to maximum value of x + Returns + ------- + bin_centers : array + The center of each bin in R. shape is (nx, ) + ring_averages : array + Radial average of the image. shape is (nx, ). + """ + radial_val = radial_grid(calibrated_center, image.shape, pixel_size) + + + if mask is not None: + #maks = np.ones_like( image ) + mask = np.array( mask, dtype = bool) + binr = radial_val[mask] + image_mask = np.array( image )[mask] + + else: + binr = np.ravel( radial_val ) + image_mask = np.ravel(image) + + bin_edges, sums, counts = bin_1D( binr, + image_mask, + nx, + min_x=min_x, + max_x=max_x) + + #print (counts) + th_mask = counts > threshold + ring_averages = sums[th_mask] / counts[th_mask] + + bin_centers = bin_edges_to_centers(bin_edges)[th_mask] + + return bin_centers, ring_averages diff --git a/chxtools/Mask_Maker.py b/chxtools/Mask_Maker.py new file mode 100644 index 0000000..9ae4c34 --- /dev/null +++ b/chxtools/Mask_Maker.py @@ -0,0 +1,131 @@ +from chx_libs import * +from chx_generic_functions import show_img + + +##### +#load data by databroker + + +def get_sid_filenames(header): + """get a bluesky scan_id, unique_id, filename by giveing uid and detector + + Parameters + ---------- + header: a header of a bluesky scan, e.g. db[-1] + + Returns + ------- + scan_id: integer + unique_id: string, a full string of a uid + filename: sring + + Usuage: + sid,uid, filenames = get_sid_filenames(db[uid]) + + """ + + keys = [k for k, v in header.descriptors[0]['data_keys'].items() if 'external' in v] + events = get_events( header, keys, handler_overrides={key: RawHandler for key in keys}) + key, = keys + filenames = [ str( ev['data'][key][0]) + '_'+ str(ev['data'][key][2]['seq_id']) for ev in events] + sid = header['start']['scan_id'] + uid= header['start']['uid'] + + return sid,uid, filenames + + +def load_data( uid , detector = 'eiger4m_single_image' ): + """load bluesky scan data by giveing uid and detector + + Parameters + ---------- + uid: unique ID of a bluesky scan + detector: the used area detector + + Returns + ------- + image data: a pims frames series + if not success read the uid, will return image data as 0 + + Usuage: + imgs = load_data( uid, detector ) + md = imgs.md + """ + hdr = db[uid] + flag =1 + while flag<4 and flag !=0: + try: + ev, = get_events(hdr, [detector]) + flag =0 + except: + flag += 1 + print ('Trying again ...!') + + if flag: + print ("Can't Load Data!") + uid = '00000' #in case of failling load data + imgs = 0 + else: + imgs = ev['data'][detector] + #print (imgs) + return imgs + +def get_frames_from_dscan( hdr, detector = 'eiger4m_single_image' ): + ev = get_events(hdr, [detector]) + length = int( hdr['start']['plan_args']['num'] ) + shape = hdr['descriptors'][0]['data_keys'][detector]['shape'][:2] + imgs = np.zeros( [ length, shape[1],shape[0]] ) + for i in range( int( hdr['start']['plan_args']['num'] ) ): + imgs[i] = next(ev)['data']['eiger4m_single_image'][0] + + return np.array( imgs ) + +def load_metadata(hdr, name): + seq_of_img_stacks = get_images(hdr, name) + return seq_of_img_stacks[0].md +def load_images(hdr, name): + seq_of_img_stacks = get_images(hdr, name) # 1 x 21 x 2167 x 2070 + return np.squeeze(np.asarray(seq_of_img_stacks)) + + +def RemoveHot( img,threshold= 1E7, plot_=True ): + mask = np.ones_like( np.array( img ) ) + badp = np.where( np.array(img) >= threshold ) + if len(badp[0])!=0: + mask[badp] = 0 + if plot_: + show_img( mask ) + return mask + + +############ +###plot data + + + + + + + + + +def get_avg_img( data_series, sampling = 100, plot_ = False , *argv,**kwargs): + '''Get average imagef from a data_series by every sampling number to save time''' + avg_img = np.average(data_series[:: sampling], axis=0) + if plot_: + fig, ax = plt.subplots() + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + im = ax.imshow(avg_img , cmap='viridis',origin='lower', + norm= LogNorm(vmin=0.001, vmax=1e2)) + #ax.set_title("Masked Averaged Image") + ax.set_title('Uid= %s--Masked Averaged Image'%uid) + fig.colorbar(im) + plt.show() + return avg_img + + + + diff --git a/chxtools/Spatial_Correlation_Function.py b/chxtools/Spatial_Correlation_Function.py new file mode 100644 index 0000000..f17cb3e --- /dev/null +++ b/chxtools/Spatial_Correlation_Function.py @@ -0,0 +1,317 @@ +from databroker import DataBroker as db, get_images, get_table, get_events, get_fields +from filestore.api import register_handler, deregister_handler +#from filestore.retrieve import _h_registry, _HANDLER_CACHE, HandlerBase +from eiger_io.pims_reader import EigerImages +from chxtools import handlers + +## Import all the required packages for Data Analysis + +#* scikit-beam - data analysis tools for X-ray science +# - https://github.com/scikit-beam/scikit-beam +#* xray-vision - plotting helper functions for X-ray science +# - https://github.com/Nikea/xray-vision + +import xray_vision +import xray_vision.mpl_plotting as mpl_plot +from xray_vision.mpl_plotting import speckle +from xray_vision.mask.manual_mask import ManualMask + +import skbeam.core.roi as roi +import skbeam.core.correlation as corr +import skbeam.core.utils as utils + +import numpy as np +from datetime import datetime +import h5py + +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm +#%matplotlib notebook +#%matplotlib inline +import scipy.optimize as opt + + + + + +from numpy.fft import fftshift, fft2, ifft2 +from skimage.util import crop, pad +from numpy import log + +from chx_generic_functions import show_img, plot1D + + + +def mkbox( mask_seg ): + + ''' Crop the non_zeros pixels of an image to a new image + and pad with zeros to make double the new image size + to avoid periodic artifacts from the FFT at correlation lengths. + ''' + pxlst = np.where(mask_seg.ravel())[0] + dims = mask_seg.shape + imgwidthy = dims[1] #dimension in y, but in plot being x + imgwidthx = dims[0] #dimension in x, but in plot being y + #x and y are flipped??? + #matrix notation!!! + pixely = pxlst%imgwidthy + pixelx = pxlst//imgwidthy + + minpixelx = np.min(pixelx) + minpixely = np.min(pixely) + maxpixelx = np.max(pixelx) + maxpixely = np.max(pixely) + img_crop = crop( mask_seg, ((minpixelx, imgwidthx - maxpixelx -1 ), + (minpixely, imgwidthy - maxpixely -1 )) ) + widthx = maxpixelx - minpixelx + 1 + widthy = maxpixely - minpixely + 1 + + oldimg = np.zeros(dims) + oldimg.ravel()[pxlst] = 1 + #img_crop = np.copy(oldimg[minpixelx:maxpixelx+1,minpixely:maxpixely+1]) + #subpxlst = np.where(img_crop.ravel() != 0)[0] + + #print("min/maxpixelx: {},{};min/maxpixely: {},{}".format(minpixelx,maxpixelx,minpixely,maxpixely)) + + #padx = 2**np.int( np.ceil( log(widthx +20 )/log(2) ) ) - widthx + #pady = 2**np.int( np.ceil( log(widthy +20)/log(2) ) ) -widthy + + padx = 2**np.int( np.ceil( log(widthx*3)/log(2) ) ) - widthx + pady = 2**np.int( np.ceil( log(widthy*3)/log(2) ) ) -widthy + + img_pad = pad( img_crop, ((0, padx),(0, pady ) ) ,'constant', constant_values=(0,0)) + subpxlst = np.where(img_pad.ravel() != 0)[0] + return np.array(subpxlst,dtype = np.int32), np.array(img_pad,dtype = np.int32) + +def cross_corr(img1,img2,mask=None): + '''Compute the autocorrelation of two images. + Right now does not take mask into account. + todo: take mask into account (requires extra calculations) + input: + img1: first image + img2: second image + mask: a mask array + output: + the autocorrelation of the two images (same shape as the correlated images) + + ''' + #if(mask is not None): + # img1 *= mask + # img2 *= mask + + #img1_mean = np.mean( img1.flat ) + #img2_mean = np.mean( img2.flat ) + + # imgc = fftshift( ifft2( + # fft2(img1/img1_mean -1.0 )*np.conj(fft2( img2/img2_mean -1.0 ))).real ) + + #imgc = fftshift( ifft2( + # fft2( img1/img1_mean )*np.conj(fft2( img2/img2_mean ))).real ) + + imgc = fftshift( ifft2( + fft2( img1 )*np.conj(fft2( img2 ))).real ) + + #imgc /= (img1.shape[0]*img1.shape[1])**2 + if(mask is not None): + maskc = cross_corr(mask,mask) + imgc /= np.maximum( 1, maskc ) + + + return imgc + + +def cross_corr_allregion(img1, img2, labeled_array): + """ + Spatial correlation between all ROI subregions of two images. + + Correctly handle irregular masks by carefully zero-padding + to avoid periodic artifacts from the FFT at correlation + lengths. + """ + result = [] + for label in np.unique(labeled_array): + mask = labeled_array == label + res = cross_corr( img1, img2, mask ) + result.append(res) + return result + +def cross_corr_subregion(img1, img2, mask_seg, center = None, q_pixel=None,sq = None): + """ + Spatial correlation between a subregion of two images. + + Correctly handle irregular masks by carefully zero-padding + to avoid periodic artifacts from the FFT at correlation + lengths. + + input: + img1: first image + img2: second image + mask_seg: a mask array to take the ROI from images + + center = None, q_pixel=None,sq =None: for isotropic samples + + output: + imgc: the autocorrelation of the two images + + """ + pxlst = np.where( mask_seg.ravel())[0] + + dims = img1.shape + imgwidthy = dims[1] #dimension in y, but in plot being x + imgwidthx = dims[0] #dimension in x, but in plot being y + #x and y are flipped??? + #matrix notation!!! + + subpxlst,submask = mkbox( mask_seg ) + subimg1 = np.zeros_like(submask, dtype = float) + subimg2 = np.zeros_like(submask, dtype=float) + + #print (subimg1.shape, subimg2.shape) + + if center is not None: + pixely = pxlst%imgwidthy -center[1] + pixelx = pxlst//imgwidthy - center[0] + r= np.int_( np.hypot(pixelx, pixely) + 0.5 ) #why add 0.5? + + subimg1.ravel()[subpxlst] = img1.ravel()[pxlst]/ np.interp( r,q_pixel,sq ) -1.0 + subimg2.ravel()[subpxlst] = img2.ravel()[pxlst]/ np.interp( r,q_pixel,sq ) -1.0 + else: + subimg1.ravel()[subpxlst] = img1.ravel()[pxlst] + subimg2.ravel()[subpxlst] = img2.ravel()[pxlst] + + imgc = cross_corr(subimg1,subimg2, mask=submask) + # autocorr(img2, img2, mask=mask) + return imgc#, subimg1, subimg2 + + + + + + +import scipy.optimize as opt + + +def get_cross_plot( imgc, imgc_width = None, imgc_widthy = None,plot_ = False, cx=None, cy=None ): + ''' + imgc: the autocorrelation of the two images + the center part with imgc_width of the correlated result + + Return: + imgc_center_part, cx, cy (the max intensity center) + ''' + + #imgc = cross_corr_subregion(img1, img2, center, mask_seg, q_pixel, sq ) + cenx,ceny = int(imgc.shape[0]/2), int(imgc.shape[1]/2) + if imgc_width is None: + mx,my = np.where( imgc >1e-10 ) + x1,x2,y1,y2 = mx[0], mx[-1], my[0], my[-1] + imgc_center_part = imgc[x1:x2, y1:y2] + + else: + imgc_widthx = imgc_width + if imgc_widthy is None: + imgc_widthy = imgc_width + imgc_center_part = imgc[ cenx-imgc_widthx:cenx +imgc_widthx, ceny-imgc_widthy:ceny + imgc_widthy ] + if cx is None: + cx, cy = np.where( imgc_center_part == imgc_center_part.max() ) + cx = cx[0] + cy = cy[0] + #print ( 'The pixel with max intensity in X-direction is: %s' % np.argmax ( imgc_center[imgc_w ]) ) + #print ( 'The pixel with max intensity in Y-direction is: %s' % np.argmax ( imgc_center[:,imgc_w ]) ) + + #print ( 'The pixel shift in X-direction is: %s' % (cx - imgc_width)) + #print ( 'The pixel shift in Y-direction is: %s' % (cy - imgc_width)) + + if plot_: + plot1D( imgc_center_part[ cx] ) + if plot_: + plot1D( imgc_center_part[:,cy ] ) + + return imgc_center_part, cx, cy + + +def twoD_Gaussian( xy, amplitude, xo, yo, sigma_x, sigma_y, theta, offset): + ''' + Two-D Gaussian Function + + ''' + x,y = xy[0], xy[1] + xo = float(xo) + yo = float(yo) + a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) + b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) + c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) + g = offset + amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + + c*((y-yo)**2))) + return g.ravel() + + +def fit_two_Gaussian( imgc_center_part, cx, cy, initial_guess = (1., 30, 30, 2, 2, 0, 0),plot_=False ): + ''' + Fit image by a two-D gaussian function. + initial_guess = amplitude, xo, yo, sigma_x, sigma_y, theta, offset + ''' + + # Create x and y indices + + imgc_w = int( imgc_center_part.shape[0]/2 ) + x = np.linspace(0, imgc_w*2-1, imgc_w*2 ) + y = np.linspace(0, imgc_w*2-1, imgc_w*2 ) + x, y = np.meshgrid(x, y) + #xy = np.array ( (x,y) ) + xy = np.array ( (x.ravel(),y.ravel()) ) + popt, pcov = opt.curve_fit(twoD_Gaussian, xy, imgc_center_part.ravel(), p0=initial_guess) + data_fitted = twoD_Gaussian( xy, *popt) #.reshape(imgc_w*2, imgc_w*2) + #show_img( data_fitted.reshape(imgc_w*2, imgc_w*2) ) + + + kai = np.sum( np.square( data_fitted - imgc_center_part.ravel() ) ) + + print ( kai, popt) + #print ( pcov ) + + print ( "Area = %s"%( popt[0]/( 2*np.pi* popt[3]* popt[4 ]) ) ) + #print ( popt[0]/( 2*np.pi* popt[3]* popt[4 ]) ) + + + fx_h, fy_h = np.meshgrid( cx, np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ) ) + fxy_h = np.array ( (fx_h.ravel(),fy_h.ravel()) ) + data_fitted_h = twoD_Gaussian( fxy_h, *popt) + + + fx_v, fy_v = np.meshgrid( np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ), cy ) + fxy_v = np.array ( (fx_v.ravel(),fy_h.ravel()) ) + data_fitted_v = twoD_Gaussian( fxy_v, *popt) + + if plot_: + + fig = plt.figure( ) + ax = fig.add_subplot( 211 ) + ax.set_title('Horzontal Fit') + plotx = np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ) + ax.plot( imgc_center_part[ cx] , 'bo' ) + ax.plot( plotx, data_fitted_h , 'r-') + + + ax = fig.add_subplot( 212 ) + ax.set_title('Vertical Fit') + plotx = np.linspace(0, imgc_w*2-1, imgc_w*2 *4 ) + ax.plot( imgc_center_part[ :,cy] , 'bo' ) + ax.plot( plotx, data_fitted_v , 'r-') + + return + #plot1D( imgc_center[ cx] ) + #plot1D( imgc_center[:,cy ] ) + + + +def load_metadata(hdr, name): + seq_of_img_stacks = get_images(hdr, name) + return seq_of_img_stacks[0].md + +def load_images(hdr, name): + seq_of_img_stacks = get_images(hdr, name) # 1 x 21 x 2167 x 2070 + return np.squeeze(np.asarray(seq_of_img_stacks)) + + diff --git a/chxtools/Time_Correlation_Functions.py b/chxtools/Time_Correlation_Functions.py index 2f43052..08d7a6d 100644 --- a/chxtools/Time_Correlation_Functions.py +++ b/chxtools/Time_Correlation_Functions.py @@ -9,7 +9,7 @@ import numpy as np import sys import time -import skxray.core.roi as roi +import skbeam.core.roi as roi from matplotlib import gridspec import itertools @@ -1138,7 +1138,7 @@ def __init__(self, indexable, pixelist): ''' self.indexable = indexable self.pixelist = pixelist - self.shape = indexable.shape + #self.shape = indexable.shape try: self.length= len(indexable) except: @@ -1161,19 +1161,15 @@ def get_data(self ): class Reverse_Coordinate(object): - ''' - a class - to reverse the images in y-direction - due to the coordination difference between python and Eiger software - - One example: - imgsr = Reverse_Coordinate( imgs, mask) - ''' - def __init__(self, indexable, mask): self.indexable = indexable self.mask = mask - self.shape = indexable.shape + try: + self.shape = indexable.shape + except: + #if + self.shape = [len(indexable), indexable[0].shape[0], indexable[0].shape[1] ] + #self.shape = indexable.shape self.length= len(indexable) def __getitem__(self, key ): if self.mask is not None: @@ -1185,8 +1181,7 @@ def __getitem__(self, key ): img_=img[:,::-1,:] if len(img.shape)==2: img_=img[::-1,:] - return img_ - + return img_ @@ -1221,10 +1216,169 @@ def get_mean_intensity( data_pixel, qind): +def show_C12(C12, qz_ind=0, qr_ind=0, N1=None,N2=None, vmin=None, vmax=None, title=False): + + g12_num = qz_ind * num_qr + qr_ind + if N1 is None: + N1=0 + if N2 is None: + N2=Nming + if vmin is None: + vmin = 1 + if vmax is None: + vmax = 1.02 + data = g12b[N1:N2,N1:N2,g12_num] + fig, ax = plt.subplots() + im=ax.imshow( data, origin='lower' , cmap='viridis', + norm= LogNorm( vmin, vmax ), + extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] ) + #ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num)) + if title: + ax.set_title('%s-%s frames--Qz= %s--Qr= %s'%(N1,N2, qz_center[qz_ind], qr_center[qr_ind] )) + ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18) + ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18) + fig.colorbar(im) + + plt.show() +def show_C12(C12, q_ind=0, *argv,**kwargs): + + ''' + plot one-q of two-time correlation function + C12: two-time correlation function, with shape as [ time, time, qs] + q_ind: if integer, for a SAXS q, the nth of q to be plotted + if a list: for a GiSAXS [qz_ind, qr_ind] + kwargs: support + timeperframe: the time interval + N1: the start frame(time) + N2: the end frame(time) + vmin/vmax: for plot + title: if True, show the tile + + e.g., + show_C12(g12b, q_ind=1, N1=0, N2=500, vmin=1.05, vmax=1.07, ) + + ''' + + #strs = [ 'timeperframe', 'N1', 'N2', 'vmin', 'vmax', 'title'] + + shape = C12.shape + if isinstance(q_ind, int): + C12_num = q_ind + else: + qz_ind, qr_ind = q_ind + C12_num = qz_ind * num_qr + qr_ind + + if 'timeperframe' in kwargs.keys(): + timeperframe = kwargs['timeperframe'] + else: + timeperframe=1 + + if 'vmin' in kwargs.keys(): + vmin = kwargs['vmin'] + else: + vmin=1 + if 'vmax' in kwargs.keys(): + vmax = kwargs['vmax'] + else: + vmax=1.05 + + if 'N1' in kwargs.keys(): + N1 = kwargs['N1'] + else: + N1=0 + + if 'N2' in kwargs.keys(): + N2 = kwargs['N2'] + else: + N2= shape[0] + if 'title' in kwargs.keys(): + title = kwargs['title'] + else: + title=True + + data = C12[N1:N2,N1:N2,C12_num] + fig, ax = plt.subplots() + im=ax.imshow( data, origin='lower' , cmap='viridis', + norm= LogNorm( vmin, vmax ), + extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] ) + if title: + if isinstance(q_ind, int): + ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,C12_num)) + else: + ax.set_title('%s-%s frames--Qzth= %s--Qrth= %s'%(N1,N2, qz_ind, qr_ind )) + + #ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num)) + ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18) + ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18) + fig.colorbar(im) + plt.show() +def plot_saxs_g2( g2, taus, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function + taus: the time delays + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_saxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + else: + q_ring_center = np.arange( g2.shape[1] ) + + + num_rings = g2.shape[1] + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + fig = plt.figure(figsize=(14, 10)) + plt.title('uid= %s'%uid,fontsize=20, y =1.06) + plt.axis('off') + #plt.axes(frameon=False) + plt.xticks([]) + plt.yticks([]) + for i in range(num_rings): + ax = fig.add_subplot(sx, sy, i+1 ) + ax.set_ylabel("g2") + ax.set_title(" Q= " + '%.5f '%(q_ring_center[i]) + r'$\AA^{-1}$') + y=g2[:, i] + ax.semilogx(taus, y, '-o', markersize=6) + #ax.set_ylim([min(y)*.95, max(y[1:])*1.05 ]) + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + plt.show() + fig.tight_layout() + + diff --git a/chxtools/Two_Time_Correlation_Function.py b/chxtools/Two_Time_Correlation_Function.py new file mode 100644 index 0000000..b7b6f08 --- /dev/null +++ b/chxtools/Two_Time_Correlation_Function.py @@ -0,0 +1,911 @@ +###################################################################################### +########Dec 16, 2015, Yugang Zhang, yuzhang@bnl.gov, CHX, NSLS-II, BNL################ +########Time correlation function, include one-time, two-time, four-time############## +########Muli-tau method, array-operation method####################################### +###################################################################################### + + +import numpy as np +import sys +import time +import skbeam.core.roi as roi +from matplotlib import gridspec +from datetime import datetime + + +import itertools +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm +mcolors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k','darkgoldenrod','oldlace', 'brown','dodgerblue' ]) +markers = itertools.cycle(list(plt.Line2D.filled_markers)) +lstyles = itertools.cycle(['-', '--', '-.','.',':']) + + + + + +def delays( num_lev=3, num_buf=4, time=1 ): + ''' DOCUMENT delays(time=) + return array of delays. + KEYWORD: time: scale delays by time ( should be time between frames) + ''' + if num_buf%2!=0:print ("nobuf must be even!!!" ) + dly=np.zeros( (num_lev+1)*int(num_buf/2) +1 ) + dict_dly ={} + for i in range( 1,num_lev+1): + if i==1:imin= 1 + else:imin= int(num_buf/2)+1 + ptr=(i-1)*int(num_buf/2)+ np.arange(imin,num_buf+1) + dly[ptr]= np.arange( imin, num_buf+1) *2**(i-1) + dict_dly[i] = dly[ptr-1] + dly*=time + #print (i, ptr, imin) + return dly, dict_dly + + + + +class Get_Pixel_Array(object): + ''' + a class to get intested pixels from a images sequence, + load ROI of all images into memory + get_data: to get a 2-D array, shape as (len(images), len(pixellist)) + + One example: + data_pixel = Get_Pixel_Array( imgsr, pixelist).get_data() + ''' + + def __init__(self, indexable, pixelist): + ''' + indexable: a images sequences + pixelist: 1-D array, interest pixel list + ''' + self.indexable = indexable + self.pixelist = pixelist + #self.shape = indexable.shape + try: + self.length= len(indexable) + except: + self.length= indexable.length + + def get_data(self ): + ''' + To get intested pixels array + Return: 2-D array, shape as (len(images), len(pixellist)) + ''' + + #print (self.length) + data_array = np.zeros([ self.length,len(self.pixelist)]) + for key in range(self.length ): + data_array[key] = np.ravel( self.indexable[key])[self.pixelist] + return data_array + + + + + +class Reverse_Coordinate(object): + def __init__(self, indexable, mask): + self.indexable = indexable + self.mask = mask + try: + self.shape = indexable.shape + except: + #if + self.shape = [len(indexable), indexable[0].shape[0], indexable[0].shape[1] ] + #self.shape = indexable.shape + self.length= len(indexable) + def __getitem__(self, key ): + if self.mask is not None: + img =self.indexable[key] * self.mask + else: + img = self.indexable[key] + + if len(img.shape) ==3: + img_=img[:,::-1,:] + if len(img.shape)==2: + img_=img[::-1,:] + return img_ + + + +def get_mean_intensity( data_pixel, qind): + ''' + Dec 16, 2015, Y.G.@CHX + a function to get mean intensity as a function of time (image number) + + Parameters: + data_pixel: 2-D array, shape as (len(images), len(qind)), + use function Get_Pixel_Array( ).get_data( ) to get + qind: 1-D int array, a index list of interest pixel, values change from 1 to int number + + Return: + mean_inten: a dict, with keys as the unique values of qind, + each dict[key]: 1-D array, with shape as data_pixel.shape[0],namely, len(images) + + One example: + mean_inten = get_mean_intensity( data_pixel, qind) + ''' + + noqs = len( np.unique(qind) ) + mean_inten = {} + + for qi in range(1, noqs + 1 ): + pixelist_qi = np.where( qind == qi)[0] + #print (pixelist_qi.shape, data_pixel[qi].shape) + data_pixel_qi = data_pixel[:,pixelist_qi] + mean_inten[qi] = data_pixel_qi.mean( axis =1 ) + return mean_inten + + + + +def run_time(t0): + '''Calculate running time of a program + Parameters + ---------- + t0: time_string, t0=time.time() + The start time + Returns + ------- + Print the running time + + One usage + --------- + t0=time.time() + .....(the running code) + run_time(t0) + ''' + + elapsed_time = time.time() - t0 + print ('Total time: %.2f min' %(elapsed_time/60.)) + + + + + + + +def auto_two_Array( data, rois, data_pixel=None ): + + ''' + Dec 16, 2015, Y.G.@CHX + a numpy operation method to get two-time correlation function + + Parameters: + data: images sequence, shape as [img[0], img[1], imgs_length] + rois: 2-D array, the interested roi, has the same shape as image, can be rings for saxs, boxes for gisaxs + + Options: + + data_pixel: if not None, + 2-D array, shape as (len(images), len(qind)), + use function Get_Pixel_Array( ).get_data( ) to get + + + Return: + g12: a 3-D array, shape as ( imgs_length, imgs_length, q) + + One example: + g12 = auto_two_Array( imgsr, ring_mask, data_pixel = data_pixel ) + ''' + + + start_time = time.time() + + qind, pixelist = roi.extract_label_indices( rois ) + noqs = len( np.unique(qind) ) + nopr = np.bincount(qind, minlength=(noqs+1))[1:] + + if data_pixel is None: + data_pixel = Get_Pixel_Array( data, pixelist).get_data() + #print (data_pixel.shape) + + + + noframes = data_pixel.shape[0] + g12b = np.zeros( [noframes, noframes, noqs] ) + Unitq = (noqs/10) + proi=0 + + for qi in range(1, noqs + 1 ): + pixelist_qi = np.where( qind == qi)[0] + #print (pixelist_qi.shape, data_pixel[qi].shape) + data_pixel_qi = data_pixel[:,pixelist_qi] + + sum1 = (np.average( data_pixel_qi, axis=1)).reshape( 1, noframes ) + sum2 = sum1.T + + g12b[:,:,qi -1 ] = np.dot( data_pixel_qi, data_pixel_qi.T) /sum1 / sum2 / nopr[qi -1] + #print ( proi, int( qi //( Unitq) ) ) + if int( qi //( Unitq) ) == proi: + sys.stdout.write("#") + sys.stdout.flush() + proi += 1 + + elapsed_time = time.time() - start_time + print ('Total time: %.2f min' %(elapsed_time/60.)) + + return g12b + + + + +#################################### +##Derivation of Two time correlation +##################################### + +##################################### +#get one-time +##################################### + + +def get_one_time_from_two_time( g12, norms=None, nopr = None ): + + ''' + Dec 16, 2015, Y.G.@CHX + Get one-time correlation function from two correlation function + namely, calculate the mean of each diag line of g12 to get one-time correlation fucntion + + Parameters: + g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) + + Options: + norms: if not None, a 2-D array, shape as ( imgs_length, q), a normalization for further get one-time from two time, get by: g12b_norm, g12b_not_norm, norms = auto_two_Array_g1_norm( imgsr, ring_mask, data_pixel = data_pixel ) + nopr: if not None, 1-D array, shape as [q], the number of interested pixel of each q + + + Return: + g2f12: a 2-D array, shape as ( imgs_length, q), + a one-time correlation function + + One example: + g2b_norm = get_one_time_from_two_time(g12b_norm, norms=None, nopr=None ) + g2b_not_norm = get_one_time_from_two_time(g12b_not_norm, norms=norms, nopr=nopr) + ''' + + m,n,noqs = g12.shape + g2f12 = np.zeros( [m,noqs ] ) + for q in range(noqs): + y=g12[:,:,q] + for tau in range(m): + + if norms is None: + g2f12[tau,q] = np.nanmean( np.diag(y,k=int(tau)) ) + else: + yn = norms[:,q] + yn1 = np.average( yn[tau:] ) + yn2 = np.average( yn[: m-tau] ) + g2f12[tau,q] = np.nanmean( np.diag(y,k=int(tau)) ) / (yn1*yn2*nopr[q]) + + return g2f12 + + + +##################################### +#get one-time @different age +##################################### + +def get_qedge( qstart,qend,qwidth,noqs, ): + ''' DOCUMENT make_qlist( ) + give qstart,qend,qwidth,noqs + return a qedge by giving the noqs, qstart,qend,qwidth. + a qcenter, which is center of each qedge + KEYWORD: None ''' + import numpy as np + qcenter = np.linspace(qstart,qend,noqs) + #print ('the qcenter is: %s'%qcenter ) + qedge=np.zeros(2*noqs) + qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value + qedge[1::2]= ( qcenter+ qwidth/2) #render odd value + return qedge, qcenter + + +def rotate_g12q_to_rectangle( g12q ): + + ''' + Dec 16, 2015, Y.G.@CHX + Rotate anti clockwise 45 of a one-q two correlation function along diagonal to a masked array + the shape ( imgs_length, imgs_length ) of g12q will change to ( imgs_length, 2*imgs_length -1) + + + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + + + Return: + g12qr: a masked 2-D array, shape as ( imgs_length, 2*imgs_length -1 ) + x-axis: taus, from 0 to imgs_length + y-axis: ages, from 0 to imgs_length( the middle of y) to 2imgs_length-1 (top) + One example: + g12qr = rotate_g12q_to_rectangle(g12bm[:,:,0] ) + ''' + M,N = g12q.shape + g12qr = np.ma.empty(( 2*N-1,N )) + g12qr.mask = True + for i in range(N): + g12qr[i:(2*N-1-i):2, i ] = g12q.diagonal(i) + return g12qr + + + +def get_aged_g2_from_g12q( g12q, age_edge, age_center ): + + ''' + Dec 16, 2015, Y.G.@CHX + Get one-time correlation function of different age from two correlation function + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + + Options: + slice_num: int, the slice number of the diagonal of g12 + slice_width: int, each slice width in unit of pixel + slice start: int, can start from 0 + slice end: int, can end at 2*imgs_length -1 + + + Return: + g2_aged: a dict, one time correlation function at different age + the keys of dict is ages in unit of pixel + dict[key]: + a 1-D array, shape as ( imgs_length ), + a one-q one-time correlation function + One example: + g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500, + slice_start=4000, slice_end= 20000-4000 ) + ''' + + + arr= rotate_g12q_to_rectangle( g12q ) + m,n = arr.shape #m should be 2*n-1 + #age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end, + # qwidth = slice_width, noqs =slice_num ) + age_edge, age_center = np.int_(age_edge), np.int_(age_center) + #print (age_edge, age_center) + g2_aged = {} + for i,age in enumerate(age_center): + age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2] + g2i = arr[ age_edges_0: age_edges_1 ].mean( axis =0 ) + g2i_ = np.array( g2i ) + g2_aged[age] = g2i_[np.nonzero( g2i_)[0]] + + return g2_aged + + + +def get_aged_g2_from_g12q2( g12q, slice_num = 6, slice_width=5, slice_start=0, slice_end= 1 ): + + ''' + Dec 16, 2015, Y.G.@CHX + Get one-time correlation function of different age from two correlation function + namely, calculate the different aged mean of each diag line of g12 to get one-time correlation fucntion + + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + + Options: + slice_num: int, the slice number of the diagonal of g12 + slice_width: int, each slice width in unit of pixel + slice start: int, can start from 0 + slice end: int, can end at 2*imgs_length -1 + + + Return: + g2_aged: a dict, one time correlation function at different age + the keys of dict is ages in unit of pixel + dict[key]: + a 1-D array, shape as ( imgs_length ), + a one-q one-time correlation function + One example: + g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500, + slice_start=4000, slice_end= 20000-4000 ) + ''' + + + arr= rotate_g12q_to_rectangle( g12q ) + m,n = arr.shape #m should be 2*n-1 + age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end, + qwidth = slice_width, noqs =slice_num ) + age_edge, age_center = np.int_(age_edge), np.int_(age_center) + #print (age_edge, age_center) + g2_aged = {} + for i,age in enumerate(age_center): + age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2] + g2i = arr[ age_edges_0: age_edges_1 ].mean( axis =0 ) + g2i_ = np.array( g2i ) + g2_aged[age] = g2i_[np.nonzero( g2i_)[0]] + + return g2_aged + +def show_g12q_aged_g2( g12q, g2_aged,slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25 ): + + ''' + Dec 16, 2015, Y.G.@CHX + Plot one-time correlation function of different age with two correlation function + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + g2_aged: a dict, one time correlation function at different age + obtained by: for example, + g2_aged = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500, + slice_start=4000, slice_end= 20000-4000 ) + the keys of dict is ages in unit of pixel + dict[key]: + a 1-D array, shape as ( imgs_length ), + a one-q one-time correlation function + + Options: + slice_width: int, each slice width in unit of pixel, for line width of a plot + timeperframe: float, time per frame for axis unit + vmin, float, matplot vmin + vmax, float, matplot vmax + + Return: + two plots, one for the two-time correlation, g12q, + + One example: + show_g12q_aged_g2( g12q, g2_aged,timeperframe=1,vmin= 1, vmax= 1.22 ) + ''' + + age_center = list( sorted( g2_aged.keys() ) ) + print ('the cut age centers are: ' +str(age_center) ) + M,N = g12q.shape + + #fig, ax = plt.subplots( figsize = (8,8) ) + + figw =10 + figh = 10 + fig = plt.figure(figsize=(figw,figh)) + gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] ) + ax = plt.subplot(gs[0]) + ax1 = plt.subplot(gs[1]) + + im=ax.imshow( g12q, origin='lower' , cmap='viridis', + norm= LogNorm( vmin, vmax ) , extent=[0, N, 0, N ] ) + + linS = [] + linE=[] + linS.append( zip( [0]*len(age_center), np.int_(age_center) )) + linE.append( zip( np.int_(age_center), [0]*len(age_center) )) + for i, [ps,pe] in enumerate(zip(linS[0],linE[0])): + if ps[1]>=N:s0=ps[1] - N;s1=N + else:s0=0;s1=ps[1] + if pe[0]>=N:e0=N;e1=pe[0] - N + else:e0=pe[0];e1=0 + lined= slice_width/2. #in data width + linewidth= (lined * (figh*72./N)) * 0.8 + ax.plot( [s0,e0],[s1,e1], linewidth=linewidth ,alpha=0.3 ) #, color= ) + + ax.set_title( '%s_frames'%(N) ) + ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18) + ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18) + fig.colorbar(im) + + ax1.set_title("Aged_G2") + for i in sorted(g2_aged.keys()): + #ax = fig.add_subplot(sx,sy,sn+1 ) + gx= np.arange(len(g2_aged[i])) * timeperframe + marker = next(markers) + ax1.plot( gx,g2_aged[i], '-%s'%marker, label=r"$age= %.1f s$"%(i*timeperframe)) + ax1.set_ylim( vmin, vmax ) + ax1.set_xlabel(r"$\tau $ $(s)$", fontsize=18) + ax1.set_ylabel("g2") + ax1.set_xscale('log') + ax1.legend(fontsize='small', loc='best' ) + plt.show() + + + + +def plot_aged_g2( g2_aged, tau=None,timeperframe=1, ylim=None, xlim=None): + ''''A plot of g2 calculated from two-time''' + fig = plt.figure(figsize=(8,10)) + age_center = list( sorted( g2_aged.keys() ) ) + gs = gridspec.GridSpec(len(age_center),1 ) + for n,i in enumerate( age_center): + ax = plt.subplot(gs[n]) + if tau is None: + gx= np.arange(len(g2_aged[i])) * timeperframe + marker = next(markers) + ax.plot( gx,g2_aged[i], '-%s'%marker, label=r"$age= %.1f s$"%(i*timeperframe)) + ax.set_xscale('log') + ax.legend(fontsize='large', loc='best' ) + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=18) + ax.set_ylabel("g2") + if ylim is not None: + ax.set_ylim( ylim ) + if xlim is not None: + ax.set_ylim( xlim ) + +##################################### +#get fout-time + +def get_tau_from_g12q( g12q, slice_num = 6, slice_width=1, slice_start=None, slice_end=None ): + + + ''' + Dec 16, 2015, Y.G.@CHX + Get tau lines from two correlation function + namely, get diag line of g12 as a function of ages + + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + + Options: + slice_num: int, the slice number of the diagonal of g12 + slice_width: int, each slice width in unit of pixel + slice start: int, can start from 0 + slice end: int, can end at imgs_length -1 + + + Return: + return: tau, a dict, tau lines + the keys of dict is tau(slice center) in unit of pixel + dict[key]: + a 1-D array, shape as ( tau_line-length ), + + One example: + taus = get_aged_g2_from_g12q( g12q, slice_num =3, slice_width= 500, + slice_start=4000, slice_end= 20000-4000 ) + ''' + + + + arr= rotate_g12q_to_rectangle( g12q ) + m,n = arr.shape #m should be 2*n-1 + + age_edge, age_center = get_qedge( qstart=slice_start,qend= slice_end, + qwidth = slice_width, noqs =slice_num ) + age_edge, age_center = np.int_(age_edge), np.int_(age_center) + #print (age_edge, age_center) + tau = {} + for i,age in enumerate(age_center): + age_edges_0, age_edges_1 = age_edge[ i*2 : 2*i+2] + #print (age_edges_0, age_edges_1) + g2i = arr[ :,age_edges_0: age_edges_1 ].mean( axis =1 ) + g2i_ = np.array( g2i ) + tau[age] = g2i_[np.nonzero( g2i_)[0]] + + return tau + + + + +def show_g12q_taus( g12q, taus, slice_width=10, timeperframe=1,vmin= 1, vmax= 1.25 ): + + ''' + Dec 16, 2015, Y.G.@CHX + Plot tau-lines as a function of age with two correlation function + + + Parameters: + g12q: a 2-D array, one-q two correlation function, shape as ( imgs_length, imgs_length ) + tau, a dict, tau lines + the keys of dict is tau(slice center) in unit of pixel + dict[key]: + a 1-D array, shape as ( tau_line-length ), + obtained by: for example, + taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1, + slice_start=3, slice_end= 5000-1 )) + + + Options: + slice_width: int, each slice width in unit of pixel, for line width of a plot + timeperframe: float, time per frame for axis unit + vmin, float, matplot vmin + vmax, float, matplot vmax + + Return: + two plots, one for tau lines~ages, g12q, + + One example: + show_g12q_taus( g12b_norm[:,:,0], taus, slice_width=50, + timeperframe=1,vmin=1.01,vmax=1.55 ) + ''' + + + + age_center = list( taus.keys() ) + print ('the cut tau centers are: ' +str(age_center) ) + M,N = g12q.shape + + #fig, ax = plt.subplots( figsize = (8,8) ) + + figw =10 + figh = 10 + fig = plt.figure(figsize=(figw,figh)) + gs = gridspec.GridSpec(1, 2, width_ratios=[10, 8],height_ratios=[8,8] ) + ax = plt.subplot(gs[0]) + ax1 = plt.subplot(gs[1]) + + im=ax.imshow( g12q, origin='lower' , cmap='viridis', + norm= LogNorm( vmin= vmin, vmax= vmax ) , extent=[0, N, 0, N ] ) + + linS = [] + linE=[] + linS.append( zip( np.int_(age_center) -1, [0]*len(age_center) )) + linE.append( zip( [N -1]*len(age_center), N - np.int_(age_center) )) + for i, [ps,pe] in enumerate(zip(linS[0],linE[0])): + lined= slice_width #/2. *draw_scale_tau #in data width + linewidth= (lined * (figh*72./N)) * 0.8 + #print (ps,pe) + ax.plot( [ps[0],pe[0]],[ps[1],pe[1]], linewidth=linewidth ) #, color= ) + + ax.set_title( '%s_frames'%(N) ) + ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18) + ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18) + fig.colorbar(im) + + ax1.set_title("Tau_Cuts_in_G12") + for i in sorted(taus.keys()): + gx= np.arange(len(taus[i])) * timeperframe + marker = next(markers) + ax1.plot( gx,taus[i], '-%s'%marker, label=r"$tau= %.1f s$"%(i*timeperframe)) + ax1.set_ylim( vmin,vmax ) + ax1.set_xlabel(r'$t (s)$',fontsize=5) + ax1.set_ylabel("g2") + ax1.set_xscale('log') + ax1.legend(fontsize='small', loc='best' ) + plt.show() + + + + +def histogram_taus(taus, hisbin=20, plot=True,timeperframe=1): + ''' + Dec 16, 2015, Y.G.@CHX + Do histogram and plot of tau-lines + + + Parameters: + taus, a dict, tau lines + the keys of dict is tau(slice center) in unit of pixel + dict[key]: + a 1-D array, shape as ( tau_line-length ), + obtained by: for example, + taus = get_tau_from_g12q( g12b_norm[:,:,0], slice_num = 5, slice_width=1, + slice_start=3, slice_end= 5000-1 )) + + Options: + bins: int, bins number for the histogram + plot: if True, show the histogram plot + timeperframe: float, time per frame for axis unit + + + Return: + his: a dict, his[key], the histogram of tau-lines + if plot, plot the histogram of tau-lines + + One example: + his = histogram_taus(taus, hisbin=30, plot=True, timeperframe=timeperframe) + ''' + + + his={} + for key in list(taus.keys()): + his[key] = np.histogram( taus[key], bins=hisbin) + + if plot: + fig, ax1 = plt.subplots(figsize=(8, 8)) + ax1.set_title("Tau_histgram") + for key in sorted(his.keys()): + tx= 0.5*( his[key][1][:-1] + his[key][1][1:]) + marker = next(markers) + ax1.plot( tx, his[key][0], '-%s'%marker, label=r"$tau= %.1f s$"%(key*timeperframe) ) + #ax1.set_ylim( 1.05,1.35 ) + ax1.set_xlim( 1.05,1.35 ) + ax1.set_xlabel(r'$g_2$',fontsize=19) + ax1.set_ylabel(r"histgram of g2 @ tau",fontsize=15) + #ax1.set_xscale('log') + ax1.legend(fontsize='large', loc='best' ) + plt.show() + + return his + + + + + + + + + +def get_four_time_from_two_time( g12,g2=None, rois=None ): + ''' + Dec 16, 2015, Y.G.@CHX + Get four-time correlation function from two correlation function + namely, calculate the deviation of each diag line of g12 to get four-time correlation fucntion + TOBEDONE: deal with bad frames + + Parameters: + g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) + + Options: + g2: if not None, a 2-D array, shape as ( imgs_length, q), or (tau, q) + one-time correlation fucntion, for normalization of the four-time + rois: if not None, a list, [x-slice-start, x-slice-end, y-slice-start, y-slice-end] + + Return: + g4f12: a 2-D array, shape as ( imgs_length, q), + a four-time correlation function + + One example: + s1,s2 = 0,2000 + g4 = get_four_time_from_two_time( g12bm, g2b, roi=[s1,s2,s1,s2] ) + + ''' + + + m,n,noqs = g12.shape + g4f12 = [] + for q in range(noqs): + temp=[] + if rois is None: + y=g12[:,:,q] + else: + x1,x2,y1,y2 = rois + y=g12[x1:x2,y1:y2, q] + m,n = y.shape + norm = ( g2[:,q][0] -1)**2 + for tau in range(m): + d_ = np.diag(y,k=int(tau)) + d = d_[ np.where( d_ !=1) ] + g4 = ( d.std() )**2 /norm + temp.append( g4 ) + + temp = np.array( temp).reshape( len(temp),1) + if q==0: + g4f12 = temp + else: + g4f12=np.hstack( [g4f12, temp] ) + + return g4f12 + + + + + +###### +def make_g12_mask( badframes_list, g12_shape): + ''' + Dec 16, 2015, Y.G.@CHX + make g12 mask by badlines + + Parameters: + badframes_list: list, contains the bad frame number, like [100, 155, 10000] + g12_shape: the shape of one-q two correlation function, shape as ( imgs_length, imgs_length ) + Return: + g12_mask: a 2-D array, shape as ( imgs_length, imgs_length ) + + + One example: + g12_mask = make_g12_mask(bad_frames, g12b[:,:,0].shape) + + ''' + + m,n = g12_shape + #g12_mask = np.ma.empty( ( m,n ) ) + g12_mask = np.ma.ones( ( m,n ) ) + g12_mask.mask= False + for bdl in badframes_list: + g12_mask.mask[:,bdl] = True + g12_mask.mask[bdl,:] = True + return g12_mask + + + +def masked_g12( g12, badframes_list): + ''' + Dec 16, 2015, Y.G.@CHX + make masked g12 with mask defined by badframes_list + + + Parameters: + g12: a 3-D array, two correlation function, shape as ( imgs_length, imgs_length, q) + badframes_list: list, contains the bad frame number, like [100, 155, 10000] + + Return: + g12m: a masked 3-D array, shape as same as g12, ( imgs_length, imgs_length, q ) + + + One example: + g12m = masked_g12( g12b, bad_frames) + + ''' + + m,n,qs = g12.shape + g12m = np.ma.empty_like( g12 ) + g12_mask = make_g12_mask( badframes_list, g12[:,:,0].shape) + + for i in range(qs): + g12m[:,:,i] = g12[:,:,i] * g12_mask + return g12m + + + +def show_C12(C12, q_ind=0, *argv,**kwargs): + + ''' + plot one-q of two-time correlation function + C12: two-time correlation function, with shape as [ time, time, qs] + q_ind: if integer, for a SAXS q, the nth of q to be plotted + if a list: for a GiSAXS [qz_ind, qr_ind] + kwargs: support + timeperframe: the time interval + N1: the start frame(time) + N2: the end frame(time) + vmin/vmax: for plot + title: if True, show the tile + + e.g., + show_C12(g12b, q_ind=1, N1=0, N2=500, vmin=1.05, vmax=1.07, ) + + ''' + + #strs = [ 'timeperframe', 'N1', 'N2', 'vmin', 'vmax', 'title'] + + shape = C12.shape + if isinstance(q_ind, int): + C12_num = q_ind + else: + qz_ind, qr_ind = q_ind + C12_num = qz_ind * num_qr + qr_ind + + if 'timeperframe' in kwargs.keys(): + timeperframe = kwargs['timeperframe'] + else: + timeperframe=1 + + if 'vmin' in kwargs.keys(): + vmin = kwargs['vmin'] + else: + vmin=1 + if 'vmax' in kwargs.keys(): + vmax = kwargs['vmax'] + else: + vmax=1.05 + + if 'N1' in kwargs.keys(): + N1 = kwargs['N1'] + else: + N1=0 + + if 'N2' in kwargs.keys(): + N2 = kwargs['N2'] + else: + N2= shape[0] + if 'title' in kwargs.keys(): + title = kwargs['title'] + else: + title=True + + data = C12[N1:N2,N1:N2,C12_num] + fig, ax = plt.subplots() + im=ax.imshow( data, origin='lower' , cmap='viridis', + norm= LogNorm( vmin, vmax ), + extent=[0, data.shape[0]*timeperframe, 0, data.shape[0]*timeperframe ] ) + if title: + if isinstance(q_ind, int): + ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,C12_num)) + else: + ax.set_title('%s-%s frames--Qzth= %s--Qrth= %s'%(N1,N2, qz_ind, qr_ind )) + + #ax.set_title('%s-%s frames--Qth= %s'%(N1,N2,g12_num)) + ax.set_xlabel( r'$t_1$ $(s)$', fontsize = 18) + ax.set_ylabel( r'$t_2$ $(s)$', fontsize = 18) + fig.colorbar(im) + plt.show() + + + + + + + + + + + + + diff --git a/chxtools/XPCS_GiSAXS.py b/chxtools/XPCS_GiSAXS.py new file mode 100644 index 0000000..d925a02 --- /dev/null +++ b/chxtools/XPCS_GiSAXS.py @@ -0,0 +1,1112 @@ +from chx_generic_functions import * + + + +########################################## +###Functions for GiSAXS +########################################## + + +def make_gisaxs_grid( qr_w= 10, qz_w = 12, dim_r =100,dim_z=120): + y, x = np.indices( [dim_z,dim_r] ) + Nr = int(dim_r/qp_w) + Nz = int(dim_z/qz_w) + noqs = Nr*Nz + + ind = 1 + for i in range(0,Nr): + for j in range(0,Nz): + y[ qr_w*i: qr_w*(i+1), qz_w*j:qz_w*(j+1)]= ind + ind += 1 + return y + + +########################################### +#for Q-map, convert pixel to Q +########################################### + + +def get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75,75], Lsd=5.0): + ''' giving: incident beam center: bcenx,bceny + reflected beam on detector: rcenx, rceny + sample to detector distance: Lsd, in meters + pixelsize: 75 um for Eiger4M detector + get incident_angle (alphai), the title angle (phi) + ''' + px,py = pixelsize + phi = np.arctan2( (refl_x0 - inc_x0)*px *10**(-6), (refl_y0 - inc_y0)*py *10**(-6) ) + alphai = np.arctan2( (refl_y0 -inc_y0)*py *10**(-6), Lsd ) /2. + #thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #?? + + return alphai,phi + + +def get_reflected_angles(inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0, + pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167.): + + ''' giving: incident beam center: bcenx,bceny + reflected beam on detector: rcenx, rceny + sample to detector distance: Lsd, in meters + pixelsize: 75 um for Eiger4M detector + detector image size: dimx = 2070,dimy=2167 for Eiger4M detector + get reflected angle alphaf (outplane) + reflected angle thetaf (inplane ) + ''' + + alphai, phi = get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize, Lsd) + print ('The incident_angle (alphai) is: %s'%(alphai* 180/np.pi)) + px,py = pixelsize + y, x = np.indices( [dimy,dimx] ) + #alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd )/2 - alphai + alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd ) - alphai + thetaf = np.arctan2( (x-inc_x0)*px*10**(-6), Lsd )/2 - thetai + + return alphaf,thetaf, alphai, phi + + + +def convert_gisaxs_pixel_to_q( inc_x0, inc_y0, refl_x0, refl_y0, + pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167., + thetai=0.0, lamda=1.0 ): + + ''' + + giving: incident beam center: bcenx,bceny + reflected beam on detector: rcenx, rceny + sample to detector distance: Lsd, in meters + pixelsize: 75 um for Eiger4M detector + detector image size: dimx = 2070,dimy=2167 for Eiger4M detector + wavelength: angstron + + get: q_parallel (qp), q_direction_z (qz) + + ''' + + + alphaf,thetaf,alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd,dimx,dimy) + + pref = 2*np.pi/lamda + + qx = np.cos( alphaf)*np.cos( 2*thetaf) - np.cos( alphai )*np.cos( 2*thetai) + qy_ = np.cos( alphaf)*np.sin( 2*thetaf) - np.cos( alphai )*np.sin ( 2*thetai) + qz_ = np.sin(alphaf) + np.sin(alphai) + + + qy = qz_* np.sin( phi) + qy_*np.cos(phi) + qz = qz_* np.cos( phi) - qy_*np.sin(phi) + + qr = np.sqrt( qx**2 + qy**2 ) + + + return qx*pref , qy*pref , qr*pref , qz*pref + + + + +def get_qedge( qstart,qend,qwidth,noqs, ): + ''' DOCUMENT make_qlist( ) + give qstart,qend,qwidth,noqs + return a qedge by giving the noqs, qstart,qend,qwidth. + a qcenter, which is center of each qedge + KEYWORD: None ''' + import numpy as np + qcenter = np.linspace(qstart,qend,noqs) + #print ('the qcenter is: %s'%qcenter ) + qedge=np.zeros(2*noqs) + qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value + qedge[1::2]= ( qcenter+ qwidth/2) #render odd value + return qedge, qcenter + + +########################################### +#for plot Q-map +########################################### + +def get_qmap_label( qmap, qedge ): + import numpy as np + '''give a qmap and qedge to bin the qmap into a label array''' + edges = np.atleast_2d(np.asarray(qedge)).ravel() + label_array = np.digitize(qmap.ravel(), edges, right=False) + label_array = np.int_(label_array) + label_array = (np.where(label_array % 2 != 0, label_array, 0) + 1) // 2 + label_array = label_array.reshape( qmap.shape ) + return label_array + + + +def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center ): + '''get qzrmap ''' + qzmax = label_array_qz.max() + label_array_qr_ = np.zeros( label_array_qr.shape ) + ind = np.where(label_array_qr!=0) + label_array_qr_[ind ] = label_array_qr[ ind ] + 1E4 #add some large number to qr + label_array_qzr = label_array_qz * label_array_qr_ + + #convert label_array_qzr to [1,2,3,...] + uqzr = np.unique( label_array_qzr )[1:] + + uqz = np.unique( label_array_qz )[1:] + uqr = np.unique( label_array_qr )[1:] + #print (uqzr) + label_array_qzr_ = np.zeros_like( label_array_qzr ) + newl = np.arange( 1, len(uqzr)+1) + + qzc =list(qz_center) * len( uqr ) + qrc= [ [qr_center[i]]*len( uqz ) for i in range(len( uqr )) ] + + for i, label in enumerate(uqzr): + #print (i, label) + label_array_qzr_.ravel()[ np.where( label_array_qzr.ravel() == label)[0] ] = newl[i] + + + return np.int_(label_array_qzr_), np.array( qzc ), np.concatenate(np.array(qrc )) + + + +def show_label_array_on_image(ax, image, label_array, cmap=None,norm=None, log_img=True,alpha=0.3, + imshow_cmap='gray', **kwargs): #norm=LogNorm(), + """ + This will plot the required ROI's(labeled array) on the image + Additional kwargs are passed through to `ax.imshow`. + If `vmin` is in kwargs, it is clipped to minimum of 0.5. + Parameters + ---------- + ax : Axes + The `Axes` object to add the artist too + image : array + The image array + label_array : array + Expected to be an unsigned integer array. 0 is background, + positive integers label region of interest + cmap : str or colormap, optional + Color map to use for plotting the label_array, defaults to 'None' + imshow_cmap : str or colormap, optional + Color map to use for plotting the image, defaults to 'gray' + norm : str, optional + Normalize scale data, defaults to 'Lognorm()' + Returns + ------- + im : AxesImage + The artist added to the axes + im_label : AxesImage + The artist added to the axes + """ + ax.set_aspect('equal') + if log_img: + im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=LogNorm(norm),**kwargs) #norm=norm, + else: + im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=norm,**kwargs) #norm=norm, + + im_label = mpl_plot.show_label_array(ax, label_array, cmap=cmap, norm=norm, alpha=alpha, + **kwargs) # norm=norm, + + + return im, im_label + + + + + +def show_qz(qz): + ''' + plot qz mape + + ''' + + + fig, ax = plt.subplots() + im=ax.imshow(qz, origin='lower' ,cmap='viridis',vmin=qz.min(),vmax= qz.max() ) + fig.colorbar(im) + ax.set_title( 'Q-z') + plt.show() + +def show_qr(qr): + ''' + plot qr mape + + ''' + fig, ax = plt.subplots() + im=ax.imshow(qr, origin='lower' ,cmap='viridis',vmin=qr.min(),vmax= qr.max() ) + fig.colorbar(im) + ax.set_title( 'Q-r') + plt.show() + +def show_alphaf(alphaf,): + ''' + plot alphaf mape + + ''' + + fig, ax = plt.subplots() + im=ax.imshow(alphaf*180/np.pi, origin='lower' ,cmap='viridis',vmin=-1,vmax= 1.5 ) + #im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00)) + fig.colorbar(im) + ax.set_title( 'alphaf') + plt.show() + + + + + +######################## +# get one-d of I(q) as a function of qr for different qz +##################### + + + + +def get_1d_qr( data, Qr,Qz, qr, qz, inc_x0, mask=None, show_roi=True, ticks=None, alpha=0.3 ): + ''' + plot one-d of I(q) as a function of qr for different qz + data: a dataframe + Qr: info for qr, = qr_start , qr_end, qr_width, qr_num + Qz: info for qz, = qz_start, qz_end, qz_width , qz_num + qr: qr-map + qz: qz-map + inc_x0: x-center of incident beam + mask: a mask for qr-1d integration + show_roi: boolean, if ture, show the interest ROI + ticks: ticks for the plot, = zticks, zticks_label, rticks, rticks_label + alpha: transparency of ROI + Return: qr_1d, a dict, with keys as qz number + qr_1d[key]: + Plot 1D cureve as a function of Qr for each Qz + + Examples: + #to make two-qz, from 0.018 to 0.046, width as 0.008, + qz_width = 0.008 + qz_start = 0.018 + qz_width/2 + qz_end = 0.046 - qz_width/2 + qz_num= 2 + + + #to make one-qr, from 0.02 to 0.1, and the width is 0.1-0.012 + qr_width = 0.1-0.02 + qr_start = 0.02 + qr_width /2 + qr_end = 0.01 - qr_width /2 + qr_num = 1 + + Qr = [qr_start , qr_end, qr_width, qr_num] + Qz= [qz_start, qz_end, qz_width , qz_num ] + new_mask[ :, 1020:1045] =0 + ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) + qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) + + qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8) + + + ''' + + + qr_start , qr_end, qr_width, qr_num =Qr + qz_start, qz_end, qz_width , qz_num =Qz + qr_edge, qr_center = get_qedge(qr_start , qr_end, qr_width, qr_num ) + qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num ) + + print ('The qr_edge is: %s\nThe qr_center is: %s'%(qr_edge, qr_center)) + print ('The qz_edge is: %s\nThe qz_center is: %s'%(qz_edge, qz_center)) + label_array_qr = get_qmap_label( qr, qr_edge) + + if show_roi: + label_array_qz0 = get_qmap_label( qz , qz_edge) + label_array_qzr0,qzc0,qrc0 = get_qzrmap(label_array_qz0, label_array_qr,qz_center, qr_center ) + + if mask is not None:label_array_qzr0 *= mask + #data_ = data*label_array_qzr0 + show_qzr_roi( data,label_array_qzr0, inc_x0, ticks, alpha) + + fig, ax = plt.subplots() + qr_1d ={} + for i,qzc_ in enumerate(qz_center): + + #print (i,qzc_) + label_array_qz = get_qmap_label( qz, qz_edge[i*2:2*i+2]) + #print (qzc_, qz_edge[i*2:2*i+2]) + label_array_qzr,qzc,qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center ) + #print (np.unique(label_array_qzr )) + if mask is not None:label_array_qzr *= mask + roi_pixel_num = np.sum( label_array_qzr, axis=0) + qr_ = qr *label_array_qzr + data_ = data*label_array_qzr + qr_ave = np.sum( qr_, axis=0)/roi_pixel_num + data_ave = np.sum( data_, axis=0)/roi_pixel_num + qr_1d[i]= [qr_ave, data_ave] + ax.plot( qr_ave, data_ave, '--o', label= 'qz= %f'%qzc_) + + + #ax.set_xlabel( r'$q_r$', fontsize=15) + ax.set_xlabel('$q $'r'($\AA^{-1}$)', fontsize=18) + ax.set_ylabel('$Intensity (a.u.)$', fontsize=18) + ax.set_yscale('log') + #ax.set_xscale('log') + ax.set_xlim( qr.max(),qr.min() ) + ax.legend(loc='best') + return qr_1d + +def interp_zeros( data ): + from scipy.interpolate import interp1d + gf = data.ravel() + indice, = gf.nonzero() + start, stop = indice[0], indice[-1]+1 + dx,dy = data.shape + x=np.arange( dx*dy ) + f = interp1d(x[indice], gf[indice]) + gf[start:stop] = f(x[start:stop]) + return gf.reshape([dx,dy]) + + +def get_qr_tick_label( qr, label_array_qr, inc_x0, interp=True): + ''' + Dec 16, 2015, Y.G.@CHX + get zticks,zticks_label + + Parameters: + + qr: 2-D array, qr of a gisaxs image (data) + label_array_qr: a labelled array of qr map, get by: + label_array_qr = get_qmap_label( qr, qz_edge) + Options: + interp: if True, make qz label round by np.round(data, 2) + inc_x0: x-center of incident beam + Return: + rticks: list, r-tick positions in unit of pixel + rticks_label: list, r-tick positions in unit of real space + + Examples: + rticks,rticks_label = get_qr_tick_label( qr, label_array_qr) + + ''' + + rticks =[] + rticks_label = [] + num = len( np.unique( label_array_qr ) ) + for i in range( 1, num ): + ind = np.where( label_array_qr==i )[1] + #tick = round( qr[label_array_qr==i].mean(),2) + tick = qr[label_array_qr==i].mean() + if ind[0] < inc_x0 and ind[-1]>inc_x0: + + #mean1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() ) + #mean2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() ) + + mean1 = int( (ind[np.where(ind < inc_x0)[0]])[0] ) + mean2 = int( (ind[np.where(ind > inc_x0)[0]])[0] ) + + + + rticks.append( mean1) + rticks.append(mean2) + + rticks_label.append( tick ) + rticks_label.append( tick ) + + else: + + #print('here') + #mean = int( ind.mean() ) + mean = int( ind[0] ) + rticks.append(mean) + rticks_label.append( tick ) + #print (rticks) + #print (mean, tick) + + if interp: + rticks = np.array(rticks) + rticks_label = np.array( rticks_label) + w= np.where( rticks <= inc_x0)[0] + rticks1 = np.int_(np.interp( np.round( rticks_label[w], 3), rticks_label[w], rticks[w] )) + rticks_label1 = np.round( rticks_label[w], 3) + + w= np.where( rticks > inc_x0)[0] + rticks2 = np.int_(np.interp( np.round( rticks_label[w], 3), rticks_label[w], rticks[w] )) + rticks = np.append( rticks1, rticks2) + rticks_label2 = np.round( rticks_label[w], 3) + + rticks_label = np.append( rticks_label1, rticks_label2) + + return rticks, rticks_label + + +def get_qz_tick_label( qz, label_array_qz,interp=True): + ''' + Dec 16, 2015, Y.G.@CHX + get zticks,zticks_label + + Parameters: + + qz: 2-D array, qz of a gisaxs image (data) + label_array_qz: a labelled array of qz map, get by: + label_array_qz = get_qmap_label( qz, qz_edge) + interp: if True, make qz label round by np.round(data, 2) + + Return: + zticks: list, z-tick positions in unit of pixel + zticks_label: list, z-tick positions in unit of real space + + Examples: + zticks,zticks_label = get_qz_tick_label( qz, label_array_qz) + + ''' + + num = len( np.unique( label_array_qz ) ) + #zticks = np.array( [ int( np.where( label_array_qz==i )[0].mean() ) for i in range( 1,num ) ]) + zticks = np.array( [ int( np.where( label_array_qz==i )[0][0] ) for i in range( 1,num ) ]) + + + + #zticks_label = np.array( [ round( qz[label_array_qz==i].mean(),4) for i in range( 1, num ) ]) + #zticks_label = np.array( [ qz[label_array_qz==i].mean() for i in range( 1, num ) ]) + zticks_label = np.array( [ qz[label_array_qz==i][0] for i in range( 1, num ) ]) + + + if interp: + zticks = np.int_(np.interp( np.round( zticks_label, 3), zticks_label, zticks )) + zticks_label = np.round( zticks_label, 3) + return zticks,zticks_label + + + + +def show_qzr_map( qr, qz, inc_x0, data=None, Nzline=10,Nrline=10 ): + + ''' + Dec 16, 2015, Y.G.@CHX + plot a qzr map of a gisaxs image (data) + + Parameters: + qr: 2-D array, qr of a gisaxs image (data) + qz: 2-D array, qz of a gisaxs image (data) + inc_x0: the incident beam center x + + Options: + data: 2-D array, a gisaxs image, if None, =qr+qz + Nzline: int, z-line number + Nrline: int, r-line number + + + Return: + zticks: list, z-tick positions in unit of pixel + zticks_label: list, z-tick positions in unit of real space + rticks: list, r-tick positions in unit of pixel + rticks_label: list, r-tick positions in unit of real space + + + Examples: + + ticks = show_qzr_map( qr, qz, inc_x0, data = None, Nzline=10, Nrline= 10 ) + ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) + ''' + + + import matplotlib.pyplot as plt + import copy + import matplotlib.cm as mcm + + + + cmap='viridis' + _cmap = copy.copy((mcm.get_cmap(cmap))) + _cmap.set_under('w', 0) + + + qr_start, qr_end, qr_num = qr.min(),qr.max(), Nrline + qz_start, qz_end, qz_num = qz.min(),qz.max(), Nzline + qr_edge, qr_center = get_qedge(qr_start , qr_end, ( qr_end- qr_start)/(qr_num+100), qr_num ) + qz_edge, qz_center = get_qedge( qz_start, qz_end, (qz_end - qz_start)/(qz_num+100 ) , qz_num ) + + label_array_qz = get_qmap_label( qz, qz_edge) + label_array_qr = get_qmap_label( qr, qr_edge) + + labels_qz, indices_qz = roi.extract_label_indices( label_array_qz ) + labels_qr, indices_qr = roi.extract_label_indices( label_array_qr ) + num_qz = len(np.unique( labels_qz )) + num_qr = len(np.unique( labels_qr )) + + + fig, ax = plt.subplots( figsize=(8,14) ) + + + + if data is None: + data=qr+qz + im = ax.imshow(data, cmap='viridis',origin='lower') + else: + im = ax.imshow(data, cmap='viridis',origin='lower', norm= LogNorm(vmin=0.001, vmax=1e1)) + + imr=ax.imshow(label_array_qr, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',) + imz=ax.imshow(label_array_qz, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',) + + #caxr = fig.add_axes([0.88, 0.2, 0.03, .7]) #x,y, width, heigth + #cba = fig.colorbar(im, cax=caxr ) + #cba = fig.colorbar(im, fraction=0.046, pad=0.04) + + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.05) + plt.colorbar(im, cax=cax) + + #fig.colorbar(im, shrink =.82) + #cba = fig.colorbar(im) + + ax.set_xlabel(r'$q_r$', fontsize=18) + ax.set_ylabel(r'$q_z$',fontsize=18) + + zticks,zticks_label = get_qz_tick_label(qz,label_array_qz) + #rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) + + rticks,rticks_label = zip(*sorted( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0) )) ) + + #stride = int(len(zticks)/10) + + stride = 1 + ax.set_yticks( zticks[::stride] ) + yticks = zticks_label[::stride] + ax.set_yticklabels(yticks, fontsize=7) + + #stride = int(len(rticks)/10) + stride = 1 + ax.set_xticks( rticks[::stride] ) + xticks = rticks_label[::stride] + ax.set_xticklabels(xticks, fontsize=7) + + #print (xticks, yticks) + + + ax.set_title( 'Q-zr_Map', y=1.03,fontsize=18) + plt.show() + return zticks,zticks_label,rticks,rticks_label + + + + +def show_qzr_roi( data, rois, inc_x0, ticks, alpha=0.3): + + ''' + Dec 16, 2015, Y.G.@CHX + plot a qzr map of a gisaxs image with rois( a label array) + + Parameters: + data: 2-D array, a gisaxs image + rois: 2-D array, a label array + inc_x0: the incident beam center x + ticks: zticks, zticks_label, rticks, rticks_label = ticks + zticks: list, z-tick positions in unit of pixel + zticks_label: list, z-tick positions in unit of real space + rticks: list, r-tick positions in unit of pixel + rticks_label: list, r-tick positions in unit of real space + + Options: + alpha: transparency of the label array on top of data + + Return: + a plot of a qzr map of a gisaxs image with rois( a label array) + + + Examples: + show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks) + + ''' + + + + #import matplotlib.pyplot as plt + #import copy + #import matplotlib.cm as mcm + + #cmap='viridis' + #_cmap = copy.copy((mcm.get_cmap(cmap))) + #_cmap.set_under('w', 0) + zticks, zticks_label, rticks, rticks_label = ticks + avg_imgr, box_maskr = data, rois + num_qzr = len(np.unique( box_maskr)) -1 + fig, ax = plt.subplots(figsize=(8,12)) + ax.set_title("ROI--Labeled Array on Data") + im,im_label = show_label_array_on_image(ax, avg_imgr, box_maskr, imshow_cmap='viridis', + cmap='Paired', alpha=alpha, + vmin=0.01, vmax=30. , origin="lower") + + + for i in range( 1, num_qzr+1 ): + ind = np.where( box_maskr == i)[1] + indz = np.where( box_maskr == i)[0] + c = '%i'%i + y_val = int( indz.mean() ) + + #print (ind[0], ind[1]) + if ind[0] < inc_x0 and ind[-1]>inc_x0: + x_val1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() ) + x_val2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() ) + ax.text(x_val1, y_val, c, va='center', ha='center') + ax.text(x_val2, y_val, c, va='center', ha='center') + + else: + x_val = int( ind.mean() ) + #print (xval, y) + ax.text(x_val, y_val, c, va='center', ha='center') + + #print (x_val1,x_val2) + + #stride = int(len(zticks)/3) + stride = 1 + ax.set_yticks( zticks[::stride] ) + yticks = zticks_label[::stride] + ax.set_yticklabels(yticks, fontsize=9) + + #stride = int(len(rticks)/3) + stride = 1 + ax.set_xticks( rticks[::stride] ) + xticks = rticks_label[::stride] + ax.set_xticklabels(xticks, fontsize=9) + + #print (xticks, yticks) + + #caxr = fig.add_axes([0.95, 0.1, 0.03, .8]) #x,y, width, heigth + #cba = fig.colorbar(im_label, cax=caxr ) + + #fig.colorbar(im_label, shrink =.85) + #fig.colorbar(im, shrink =.82) + + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.05) + plt.colorbar(im, cax=cax) + + ax.set_xlabel(r'$q_r$', fontsize=22) + ax.set_ylabel(r'$q_z$',fontsize=22) + + plt.show() + + + +#plot g2 results + +def plot_gisaxs_g2( g2, taus, res_pargs=None, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function + taus: the time delays + res_pargs, a dict, can contains + uid/path/qr_center/qz_center/ + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_gisaxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + qz_center = res_pargs[ 'qz_center'] + num_qz = len( qz_center) + qr_center = res_pargs[ 'qr_center'] + num_qr = len( qr_center) + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + if 'qz_center' in kwargs.keys(): + qz_center = kwargs[ 'qz_center'] + num_qz = len( qz_center) + else: + print( 'Please give qz_center') + if 'qr_center' in kwargs.keys(): + qr_center = kwargs[ 'qr_center'] + num_qr = len( qr_center) + else: + print( 'Please give qr_center') + + + + + for qz_ind in range(num_qz): + fig = plt.figure(figsize=(10, 12)) + #fig = plt.figure() + title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$' + plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1) + #print (qz_ind,title_qz) + if num_qz!=1:plt.axis('off') + sx = int(round(np.sqrt(num_qr)) ) + if num_qr%sx == 0: + sy = int(num_qr/sx) + else: + sy=int(num_qr/sx+1) + for sn in range(num_qr): + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) + + title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$' + if num_qz==1: + + title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr + else: + title = title_qr + ax.set_title( title ) + + + y=g2[:, sn + qz_ind * num_qr] + ax.semilogx(taus, y, '-o', markersize=6) + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s-qz=%s'%(uid,qz_center[qz_ind]) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + + + + + +#plot g2 results + +def plot_gisaxs_two_g2( g2, taus, g2b, tausb,res_pargs=None, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function from a multi-tau method + g2b: another g2 from a two-time method + taus: the time delays + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_saxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + qz_center = res_pargs[ 'qz_center'] + num_qz = len( qz_center) + qr_center = res_pargs[ 'qr_center'] + num_qr = len( qr_center) + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + if 'qz_center' in kwargs.keys(): + qz_center = kwargs[ 'qz_center'] + num_qz = len( qz_center) + else: + print( 'Please give qz_center') + if 'qr_center' in kwargs.keys(): + qr_center = kwargs[ 'qr_center'] + num_qr = len( qr_center) + else: + print( 'Please give qr_center') + + + + + for qz_ind in range(num_qz): + fig = plt.figure(figsize=(10, 12)) + #fig = plt.figure() + title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$' + plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1) + #print (qz_ind,title_qz) + if num_qz!=1:plt.axis('off') + sx = int(round(np.sqrt(num_qr)) ) + if num_qr%sx == 0: + sy = int(num_qr/sx) + else: + sy=int(num_qr/sx+1) + for sn in range(num_qr): + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) + + title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$' + if num_qz==1: + + title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr + else: + title = title_qr + ax.set_title( title ) + + y=g2b[:, sn + qz_ind * num_qr] + ax.semilogx( tausb, y, '--r', markersize=6,label= 'by-two-time') + + y2=g2[:, sn] + ax.semilogx(taus, y2, 'o', markersize=6, label= 'by-multi-tau') + + + if sn + qz_ind * num_qr==0: + ax.legend() + + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + #dt =datetime.now() + #CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + #fp = path + 'g2--uid=%s-qz=%s'%(uid,qz_center[qz_ind]) + CurTime + '.png' + #fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + + + + + +def save_gisaxs_g2( g2,res_pargs , *argv,**kwargs): + + '''save g2 results, + res_pargs should contain + g2: one-time correlation function + res_pargs: contions taus, q_ring_center values + path: + uid: + + ''' + taus = res_pargs[ 'taus'] + qz_center = res_pargs['qz_center'] + qr_center = res_pargs['qr_center'] + path = res_pargs['path'] + uid = res_pargs['uid'] + + df = DataFrame( np.hstack( [ (taus).reshape( len(g2),1) , g2] ) ) + columns=[] + columns.append('tau') + for qz in qz_center: + for qr in qr_center: + columns.append( [str(qz),str(qr)] ) + df.columns = columns + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + filename = os.path.join(path, 'g2-%s-%s.csv' % (uid,CurTime)) + df.to_csv(filename) + print( 'The g2 of uid= %s is saved in %s with filename as g2-%s-%s.csv'%(uid, path, uid, CurTime)) + + + + + +def stretched_auto_corr_scat_factor(lags, beta, relaxation_rate, alpha=1.0, baseline=1): + return beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline + +def simple_exponential(lags, beta, relaxation_rate, baseline=1): + return beta * np.exp(-2 * relaxation_rate * lags) + baseline + + +def fit_gisaxs_g2( g2, res_pargs, function='simple_exponential', *argv,**kwargs): + ''' + Fit one-time correlation function + + The support functions include simple exponential and stretched/compressed exponential + Parameters + ---------- + g2: one-time correlation function for fit, with shape as [taus, qs] + res_pargs: a dict, contains keys + taus: the time delay, with the same length as g2 + q_ring_center: the center of q rings, for the title of each sub-plot + uid: unique id, for the title of plot + + function: + 'simple_exponential': fit by a simple exponential function, defined as + beta * np.exp(-2 * relaxation_rate * lags) + baseline + 'streched_exponential': fit by a streched exponential function, defined as + beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline + + Returns + ------- + fit resutls: + a dict, with keys as + 'baseline': + 'beta': + 'relaxation_rate': + an example: + result = fit_g2( g2, res_pargs, function = 'simple') + result = fit_g2( g2, res_pargs, function = 'stretched') + ''' + + + taus = res_pargs[ 'taus'] + qz_center = res_pargs[ 'qz_center'] + num_qz = len( qz_center) + qr_center = res_pargs[ 'qr_center'] + num_qr = len( qr_center) + uid=res_pargs['uid'] + #uid=res_pargs['uid'] + + num_rings = g2.shape[1] + beta = np.zeros( num_rings ) # contrast factor + rate = np.zeros( num_rings ) # relaxation rate + alpha = np.zeros( num_rings ) # alpha + baseline = np.zeros( num_rings ) # baseline + + + if function=='simple_exponential' or function=='simple': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=['alpha', 'lags']) + + elif function=='stretched_exponential' or function=='stretched': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=[ 'lags']) + else: + print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function) + + + + + for qz_ind in range(num_qz): + fig = plt.figure(figsize=(10, 12)) + #fig = plt.figure() + title_qz = ' Qz= %.5f '%( qz_center[qz_ind]) + r'$\AA^{-1}$' + plt.title('uid= %s:--->'%uid + title_qz,fontsize=20, y =1.1) + #print (qz_ind,title_qz) + if num_qz!=1:plt.axis('off') + sx = int(round(np.sqrt(num_qr)) ) + if num_qr%sx == 0: + sy = int(num_qr/sx) + else: + sy=int(num_qr/sx+1) + for sn in range(num_qr): + + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) + + title_qr = " Qr= " + '%.5f '%( qr_center[sn]) + r'$\AA^{-1}$' + if num_qz==1: + + title = 'uid= %s:--->'%uid + title_qz + '__' + title_qr + else: + title = title_qr + ax.set_title( title ) + + i = sn + qz_ind * num_qr + y=g2[1:, i] + + result1 = mod.fit(y, lags=taus[1:], beta=.05, alpha=1.0, + relaxation_rate =0.005, baseline=1.0, ) + + #print ( result1.best_values) + rate[i] = result1.best_values['relaxation_rate'] + #rate[i] = 1e-16 + beta[i] = result1.best_values['beta'] + + #baseline[i] = 1.0 + baseline[i] = result1.best_values['baseline'] + + if function=='simple_exponential' or function=='simple': + alpha[i] =1.0 + elif function=='stretched_exponential' or function=='stretched': + alpha[i] = result1.best_values['alpha'] + + ax.semilogx(taus[1:], y, 'ro') + ax.semilogx(taus[1:], result1.best_fit, '-b') + + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + txts = r'$\gamma$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$' + ax.text(x =0.02, y=.55, s=txts, fontsize=14, transform=ax.transAxes) + txts = r'$\alpha$' + r'$ = %.3f$'%(alpha[i]) + #txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' + ax.text(x =0.02, y=.45, s=txts, fontsize=14, transform=ax.transAxes) + + txts = r'$baseline$' + r'$ = %.3f$'%( baseline[i]) + ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes) + + + #dt =datetime.now() + #CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + #fp = path + 'g2--uid=%s-qz=%s-fit'%(uid,qz_center[qz_ind]) + CurTime + '.png' + #fig.savefig( fp, dpi=fig.dpi) + + fig.tight_layout() + plt.show() + result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline ) + + return result + + + + + + +#GiSAXS End +############################### + + + +def get_each_box_mean_intensity( data_series, box_mask, sampling, timeperframe, plot_ = True , *argv,**kwargs): + + + mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), box_mask) + try: + N = len(data_series) + except: + N = data_series.length + times = np.arange( N )*timeperframe # get the time for each frame + num_rings = len( np.unique( box_mask)[1:] ) + if plot_: + fig, ax = plt.subplots(figsize=(8, 8)) + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + ax.set_title("Uid= %s--Mean intensity of each box"%uid) + for i in range(num_rings): + ax.plot( times[::sampling], mean_int_sets[:,i], label="Box "+str(i+1),marker = 'o', ls='-') + ax.set_xlabel("Time") + ax.set_ylabel("Mean Intensity") + ax.legend() + plt.show() + return times, mean_int_sets + + + + + + + + + + diff --git a/chxtools/XPCS_SAXS.py b/chxtools/XPCS_SAXS.py new file mode 100644 index 0000000..a234f0e --- /dev/null +++ b/chxtools/XPCS_SAXS.py @@ -0,0 +1,1640 @@ +from chx_generic_functions import * + + + + + + +def circular_average(image, calibrated_center, threshold=0, nx=1500, + pixel_size=(1, 1), min_x=None, max_x=None, mask=None): + """Circular average of the the image data + The circular average is also known as the radial integration + Parameters + ---------- + image : array + Image to compute the average as a function of radius + calibrated_center : tuple + The center of the image in pixel units + argument order should be (row, col) + threshold : int, optional + Ignore counts above `threshold` + default is zero + nx : int, optional + number of bins in x + defaults is 100 bins + pixel_size : tuple, optional + The size of a pixel (in a real unit, like mm). + argument order should be (pixel_height, pixel_width) + default is (1, 1) + min_x : float, optional number of pixels + Left edge of first bin defaults to minimum value of x + max_x : float, optional number of pixels + Right edge of last bin defaults to maximum value of x + Returns + ------- + bin_centers : array + The center of each bin in R. shape is (nx, ) + ring_averages : array + Radial average of the image. shape is (nx, ). + """ + radial_val = utils.radial_grid(calibrated_center, image.shape, pixel_size) + + + if mask is not None: + #maks = np.ones_like( image ) + mask = np.array( mask, dtype = bool) + binr = radial_val[mask] + image_mask = np.array( image )[mask] + + else: + binr = np.ravel( radial_val ) + image_mask = np.ravel(image) + + bin_edges, sums, counts = utils.bin_1D( binr, + image_mask, + nx, + min_x=min_x, + max_x=max_x) + + #print (counts) + th_mask = counts > threshold + ring_averages = sums[th_mask] / counts[th_mask] + + bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask] + + return bin_centers, ring_averages + + + +def get_angular_average( avg_img, mask, pargs, min_r, max_r, + nx=3600, plot_ = False , save=False, *argv,**kwargs): + """get a angular average of an image + Parameters + ---------- + + avg_img: 2D-array, the image + mask: 2D-array + pargs: a dict, should contains + center: the beam center in pixel + Ldet: sample to detector distance + lambda_: the wavelength + dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm) + + nx : int, optional + number of bins in x + defaults is 1500 bins + + plot_: a boolen type, if True, plot the one-D curve + plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + + Returns + ------- + ang: ang in degree + iq: intensity of circular average + + + + """ + + center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix'] + uid = pargs['uid'] + + angq, ang = angular_average( avg_img, calibrated_center=center, pixel_size=(dpix,dpix), nx =nx, + min_r = min_r , max_r = max_r, mask=mask ) + + + if plot_: + fig = plt.figure(figsize=(8, 6)) + ax = fig.add_subplot(111) + ax.plot(angq, ang , '-o') + ax.set_xlabel("angle (deg)") + ax.set_ylabel("I(ang)") + #ax.legend(loc = 'best') + uid = pargs['uid'] + title = ax.set_title('Uid= %s--t~I(Ang)'%uid) + title.set_y(1.01) + + if save: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + path = pargs['path'] + uid = pargs['uid'] + + fp = path + 'Uid= %s--Ang-Iq~t-'%uid + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + + return angq, ang + + + + + +def angular_average(image, calibrated_center, threshold=0, nx=1500, + pixel_size=(1, 1), min_r=None, max_r=None, min_x=None, max_x=None, mask=None): + """Angular_average of the the image data + + Parameters + ---------- + image : array + Image to compute the average as a function of radius + calibrated_center : tuple + The center of the image in pixel units + argument order should be (row, col) + threshold : int, optional + Ignore counts above `threshold` + default is zero + nx : int, optional + number of bins in x + defaults is 100 bins + pixel_size : tuple, optional + The size of a pixel (in a real unit, like mm). + argument order should be (pixel_height, pixel_width) + default is (1, 1) + + min_r: float, optional number of pixels + The min r, e.g., the starting radius for angule average + max_r:float, optional number of pixels + The max r, e.g., the ending radius for angule average + max_r - min_r gives the width of the angule average + + min_x : float, optional number of pixels + Left edge of first bin defaults to minimum value of x + max_x : float, optional number of pixels + Right edge of last bin defaults to maximum value of x + Returns + ------- + bin_centers : array + The center of each bin in degree shape is (nx, ) + ring_averages : array + Radial average of the image. shape is (nx, ). + """ + + angle_val = utils.angle_grid(calibrated_center, image.shape, pixel_size) + + if min_r is None: + min_r=0 + if max_r is None: + max_r = np.sqrt( (image.shape[0] - center[0])**2 + (image.shape[1] - center[1])**2 ) + r_mask = make_ring_mask( calibrated_center, image.shape, min_r, max_r ) + + + if mask is not None: + #maks = np.ones_like( image ) + mask = np.array( mask*r_mask, dtype = bool) + + bina = angle_val[mask] + image_mask = np.array( image )[mask] + + else: + bina = np.ravel( angle_val ) + image_mask = np.ravel(image*r_mask) + + bin_edges, sums, counts = utils.bin_1D( bina, + image_mask, + nx, + min_x=min_x, + max_x=max_x) + + #print (counts) + th_mask = counts > threshold + ang_averages = sums[th_mask] / counts[th_mask] + + bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask] + + return bin_centers*180/np.pi, ang_averages + +def get_circular_average( avg_img, mask, pargs, show_pixel=True, + nx=1500, plot_ = False , save=False, *argv,**kwargs): + """get a circular average of an image + Parameters + ---------- + + avg_img: 2D-array, the image + mask: 2D-array + pargs: a dict, should contains + center: the beam center in pixel + Ldet: sample to detector distance + lambda_: the wavelength + dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm) + + nx : int, optional + number of bins in x + defaults is 1500 bins + + plot_: a boolen type, if True, plot the one-D curve + plot_qinpixel:a boolen type, if True, the x-axis of the one-D curve is q in pixel; else in real Q + + Returns + ------- + qp: q in pixel + iq: intensity of circular average + q: q in real unit (A-1) + + + """ + + center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix'] + uid = pargs['uid'] + + qp_, iq = circular_average(avg_img, + center, threshold=0, nx=nx, pixel_size=(dpix, dpix), mask=mask) + + # convert bin_centers from r [um] to two_theta and then to q [1/px] (reciprocal space) + two_theta = utils.radius_to_twotheta(Ldet, qp_) + q = utils.twotheta_to_q(two_theta, lambda_) + + qp = qp_/dpix + + if plot_: + fig = plt.figure(figsize=(8, 6)) + ax1 = fig.add_subplot(111) + ax2 = ax1.twiny() + + if show_pixel: + ax2.semilogy(qp, iq, '-o') + ax1.semilogy(q, iq , '-o') + + ax2.set_xlabel('q (pixel)') + ax1.set_xlabel('q ('r'$\AA^{-1}$)') + + else: + + ax2.semilogy(q, iq , '-o') + ax1.semilogy(qp, iq, '-o') + ax1.set_xlabel('q (pixel)') + ax2.set_xlabel('q ('r'$\AA^{-1}$)') + ax2.cla() + ax1.set_ylabel('I(q)') + + + if 'xlim' in kwargs.keys(): + ax1.set_xlim( kwargs['xlim'] ) + x1,x2 = kwargs['xlim'] + w = np.where( (q >=x1 )&( q<=x2) )[0] + + ax2.set_xlim( [ qp[w[0]], qp[w[-1]]] ) + + if 'ylim' in kwargs.keys(): + ax1.set_ylim( kwargs['ylim'] ) + + + #ax1.semilogy(qp, np.ones_like( iq) , '-o') + + + #ax2.semilogy(q, iq, '-o') + + #ax2.cla() + + title = ax2.set_title('Uid= %s--Circular Average'%uid) + title.set_y(1.1) + fig.subplots_adjust(top=0.85) + + + #if plot_qinpixel: + #ax2.set_xlabel('q (pixel)') + #else: + #ax1.set_xlabel('q ('r'$\AA^{-1}$)') + #axes.set_xlim(30, 1050) + #axes.set_ylim(-0.0001, 10000) + if save: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + path = pargs['path'] + fp = path + 'Uid= %s--Circular Average'%uid + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + return qp, iq, q + + +def get_distance(p1,p2): + '''Calc the distance between two point''' + return np.sqrt( (p1[0] - p2[0])**2 + (p1[1] - p2[1])**2 ) + + +def calc_q(L,a,wv): + ''' calc_q(L,a,wv) - calculate the q value for length L, transverse + distance a and wavelength wv. + Use this to calculate the speckle size + + L - sample to detector distance (mm) + a - pixel size transverse length from beam direction (mm) + wv - wavelength + Units of L and a should match and resultant q is in inverse units of wv. + ''' + theta = np.arctan2(a,L) + q = 4*np.pi*np.sin(theta/2.)/wv + return q + + + + +def get_t_iq( data_series, frame_edge, mask, pargs, nx=1500, plot_ = False , save=False, *argv,**kwargs): + '''Get t-dependent Iq + + Parameters + ---------- + data_series: a image series + frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ] + mask: a image mask + + nx : int, optional + number of bins in x + defaults is 1500 bins + plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + + Returns + --------- + qp: q in pixel + iq: intensity of circular average + q: q in real unit (A-1) + + ''' + + Nt = len( frame_edge ) + iqs = list( np.zeros( Nt ) ) + for i in range(Nt): + t1,t2 = frame_edge[i] + #print (t1,t2) + avg_img = get_avg_img( data_series[t1:t2], sampling = 1, + plot_ = False ) + qp, iqs[i], q = get_circular_average( avg_img, mask,pargs, nx=nx, + plot_ = False) + + if plot_: + fig,ax = plt.subplots(figsize=(8, 6)) + for i in range( Nt ): + t1,t2 = frame_edge[i] + ax.semilogy(q, iqs[i], label="frame: %s--%s"%( t1,t2) ) + #ax.set_xlabel("q in pixel") + ax.set_xlabel('Q 'r'($\AA^{-1}$)') + ax.set_ylabel("I(q)") + + if 'xlim' in kwargs.keys(): + ax.set_xlim( kwargs['xlim'] ) + if 'ylim' in kwargs.keys(): + ax.set_ylim( kwargs['ylim'] ) + + + ax.legend(loc = 'best') + + uid = pargs['uid'] + title = ax.set_title('Uid= %s--t~I(q)'%uid) + title.set_y(1.01) + if save: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + path = pargs['path'] + uid = pargs['uid'] + fp = path + 'Uid= %s--Iq~t-'%uid + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + + return qp, np.array( iqs ),q + + + +def get_t_ang( data_series, frame_edge, mask, center, pixel_size, min_r, max_r,pargs, + nx=1500, plot_ = False , save=False, *argv,**kwargs): + '''Get t-dependent angule intensity + + Parameters + ---------- + data_series: a image series + frame_edge: list, the ROI frame regions, e.g., [ [0,100], [200,400] ] + mask: a image mask + + pixel_size : tuple, optional + The size of a pixel (in a real unit, like mm). + argument order should be (pixel_height, pixel_width) + default is (1, 1) + center: the beam center in pixel + min_r: float, optional number of pixels + The min r, e.g., the starting radius for angule average + max_r:float, optional number of pixels + The max r, e.g., the ending radius for angule average + + max_r - min_r gives the width of the angule average + + nx : int, optional + number of bins in x + defaults is 1500 bins + plot_: a boolen type, if True, plot the time~one-D curve with qp as x-axis + + Returns + --------- + qp: q in pixel + iq: intensity of circular average + q: q in real unit (A-1) + + ''' + + Nt = len( frame_edge ) + iqs = list( np.zeros( Nt ) ) + for i in range(Nt): + t1,t2 = frame_edge[i] + #print (t1,t2) + avg_img = get_avg_img( data_series[t1:t2], sampling = 1, + plot_ = False ) + qp, iqs[i] = angular_average( avg_img, center, pixel_size=pixel_size, + nx=nx, min_r=min_r, max_r = max_r, mask=mask ) + + if plot_: + fig,ax = plt.subplots(figsize=(8, 8)) + for i in range( Nt ): + t1,t2 = frame_edge[i] + #ax.semilogy(qp* 180/np.pi, iqs[i], label="frame: %s--%s"%( t1,t2) ) + ax.plot(qp, iqs[i], label="frame: %s--%s"%( t1,t2) ) + ax.set_xlabel("angle (deg)") + ax.set_ylabel("I(ang)") + ax.legend(loc = 'best') + uid = pargs['uid'] + title = ax.set_title('Uid= %s--t~I(Ang)'%uid) + title.set_y(1.01) + + if save: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + path = pargs['path'] + uid = pargs['uid'] + + fp = path + 'Uid= %s--Ang-Iq~t-'%uid + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + + return qp, np.array( iqs ) + + +def make_ring_mask(center, shape, min_r, max_r ): + """ + Make a ring mask. + + Parameters + ---------- + center : tuple + point in image where r=0; may be a float giving subpixel precision. + Order is (rr, cc). + shape: tuple + Image shape which is used to determine the maximum extent of output + pixel coordinates. Order is (rr, cc). + + min_r: float, optional number of pixels + The min r, e.g., the starting radius of the ring + max_r:float, optional number of pixels + The max r, e.g., the ending radius of the ring + max_r - min_r gives the width of the ring + Returns + ------- + ring_mask : array + + + """ + r_val = utils.radial_grid(center, shape, [1.,1.] ) + r_mask = np.zeros_like( r_val, dtype=np.int32) + r_mask[np.where( (r_val >min_r) & (r_val < max_r ) )] = 1 + + return r_mask + + +def _make_roi(coords, edges, shape): + """ Helper function to create ring rois and bar rois + Parameters + ---------- + coords : array + shape is image shape + edges : list + List of tuples of inner (left or top) and outer (right or bottom) + edges of each roi. + e.g., edges=[(1, 2), (11, 12), (21, 22)] + shape : tuple + Shape of the image in which to create the ROIs + e.g., shape=(512, 512) + Returns + ------- + label_array : array + Elements not inside any ROI are zero; elements inside each + ROI are 1, 2, 3, corresponding to the order they are + specified in `edges`. + Has shape=`image shape` + """ + label_array = np.digitize(coords, edges, right=False) + # Even elements of label_array are in the space between rings. + label_array = (np.where(label_array % 2 != 0, label_array, 0) + 1) // 2 + return label_array.reshape(shape) + + +def angulars(edges, center, shape): + """ + Draw annual (angluar-shaped) shaped regions of interest. + Each ring will be labeled with an integer. Regions outside any ring will + be filled with zeros. + Parameters + ---------- + edges: list + giving the inner and outer angle in unit of radians + e.g., [(1, 2), (11, 12), (21, 22)] + center: tuple + point in image where r=0; may be a float giving subpixel precision. + Order is (rr, cc). + shape: tuple + Image shape which is used to determine the maximum extent of output + pixel coordinates. Order is (rr, cc). + Returns + ------- + label_array : array + Elements not inside any ROI are zero; elements inside each + ROI are 1, 2, 3, corresponding to the order they are specified + in edges. + """ + edges = np.atleast_2d(np.asarray(edges)).ravel() + if not 0 == len(edges) % 2: + raise ValueError("edges should have an even number of elements, " + "giving inner, outer radii for each angular") + if not np.all( np.diff(edges) > 0): + raise ValueError("edges are expected to be monotonically increasing, " + "giving inner and outer radii of each angular from " + "r=0 outward") + + angle_val = utils.angle_grid( center, shape) .ravel() + + return _make_roi(angle_val, edges, shape) + + + + + + +def get_angular_mask( mask, inner_angle=0, outer_angle = 360, width = None, edges = None, + num_angles = 12, center = None, dpix=[1,1] ): + + ''' + mask: 2D-array + inner_angle # the starting angle in unit of degree + outer_angle # the ending angle in unit of degree + width # width of each angle, in degree, default is None, there is no gap between the neighbour angle ROI + edges: default, None. otherwise, give a customized angle edges + num_angles # number of angles + + center: the beam center in pixel + dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm) + + Returns + ------- + ang_mask: a ring mask, np.array + ang_center: ang in unit of degree + ang_val: ang edges in degree + + ''' + + #center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix'] + + #spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings + #inner_angle,outer_angle = np.radians(inner_angle), np.radians(outer_angle) + + #if edges is None: + # ang_center = np.linspace( inner_angle,outer_angle, num_angles ) + # edges = np.zeros( [ len(ang_center), 2] ) + # if width is None: + # width = ( -inner_angle + outer_angle)/ float( num_angles -1 + 1e-10 ) + # else: + # width = np.radians( width ) + # edges[:,0],edges[:,1] = ang_center - width/2, ang_center + width/2 + + + if edges is None: + if num_angles!=1: + spacing = (outer_angle - inner_angle - num_angles* width )/(num_angles-1) # spacing between rings + else: + spacing = 0 + edges = roi.ring_edges(inner_angle, width, spacing, num_angles) + + #print (edges) + angs = angulars( np.radians( edges ), center, mask.shape) + ang_center = np.average(edges, axis=1) + ang_mask = angs*mask + ang_mask = np.array(ang_mask, dtype=int) + + labels, indices = roi.extract_label_indices(ang_mask) + nopr = np.bincount( np.array(labels, dtype=int) )[1:] + + if len( np.where( nopr ==0 )[0] !=0): + print (nopr) + print ("Some angs contain zero pixels. Please redefine the edges.") + return ang_mask, ang_center, edges + + +def get_ring_mask( mask, inner_radius=40, outer_radius = 762, width = 6, num_rings = 12, edges=None, pargs=None ): + + ''' + mask: 2D-array + inner_radius #radius of the first ring + outer_radius # radius of the last ring + width # width of each ring + num_rings # number of rings + pargs: a dict, should contains + center: the beam center in pixel + Ldet: sample to detector distance + lambda_: the wavelength + dpix, the pixel size in mm. For Eiger1m/4m, the size is 75 um (0.075 mm) + + Returns + ------- + ring_mask: a ring mask, np.array + q_ring_center: q in real unit (A-1) + q_ring_val: q edges in A-1 + + ''' + + center, Ldet, lambda_, dpix= pargs['center'], pargs['Ldet'], pargs['lambda_'], pargs['dpix'] + + #spacing = (outer_radius - inner_radius)/(num_rings-1) - 2 # spacing between rings + #qc = np.int_( np.linspace( inner_radius,outer_radius, num_rings ) ) + #edges = np.zeros( [ len(qc), 2] ) + #if width%2: + # edges[:,0],edges[:,1] = qc - width//2, qc + width//2 +1 + #else: + # edges[:,0],edges[:,1] = qc - width//2, qc + width//2 + + # find the edges of the required rings + if edges is None: + if num_rings!=1: + spacing = (outer_radius - inner_radius - num_rings* width )/(num_rings-1) # spacing between rings + else: + spacing = 0 + edges = roi.ring_edges(inner_radius, width, spacing, num_rings) + + #print ( edges ) + + two_theta = utils.radius_to_twotheta(Ldet, edges*dpix) + q_ring_val = utils.twotheta_to_q(two_theta, lambda_) + + q_ring_center = np.average(q_ring_val, axis=1) + + rings = roi.rings(edges, center, mask.shape) + ring_mask = rings*mask + ring_mask = np.array(ring_mask, dtype=int) + + labels, indices = roi.extract_label_indices(ring_mask) + nopr = np.bincount( np.array(labels, dtype=int) )[1:] + + if len( np.where( nopr ==0 )[0] !=0): + print (nopr) + print ("Some rings contain zero pixels. Please redefine the edges.") + return ring_mask, q_ring_center, q_ring_val + + + + + +def get_ring_anglar_mask(ring_mask, ang_mask, + q_ring_center, ang_center ): + '''get ring_anglar mask ''' + + ring_max = ring_mask.max() + + ang_mask_ = np.zeros( ang_mask.shape ) + ind = np.where(ang_mask!=0) + ang_mask_[ind ] = ang_mask[ ind ] + 1E9 #add some large number to qr + + ring_ang = ring_mask * ang_mask_ + + #convert label_array_qzr to [1,2,3,...] + ura = np.unique( ring_ang )[1:] + + ur = np.unique( ring_mask )[1:] + ua = np.unique( ang_mask )[1:] + #print (uqzr) + + ring_ang_ = np.zeros_like( ring_ang ) + newl = np.arange( 1, len(ura)+1) + + rc= [ [ q_ring_center[i]]*len( ua ) for i in range(len( ur )) ] + ac =list( ang_center) * len( ur ) + + #rc =list( q_ring_center) * len( ua ) + #ac= [ [ ang_center[i]]*len( ur ) for i in range(len( ua )) ] + + for i, label in enumerate(ura): + #print (i, label) + ring_ang_.ravel()[ np.where( ring_ang.ravel() == label)[0] ] = newl[i] + + + return np.int_(ring_ang_), np.concatenate( np.array( rc )), np.array( ac ) + + + + +def show_ring_ang_roi( data, rois, alpha=0.3): + + ''' + May 16, 2016, Y.G.@CHX + plot a saxs image with rois( a label array) + + Parameters: + data: 2-D array, a gisaxs image + rois: 2-D array, a label array + + + Options: + alpha: transparency of the label array on top of data + + Return: + a plot of a qzr map of a gisaxs image with rois( a label array) + + + Examples: + show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks) + + ''' + + + + #import matplotlib.pyplot as plt + #import copy + #import matplotlib.cm as mcm + + #cmap='viridis' + #_cmap = copy.copy((mcm.get_cmap(cmap))) + #_cmap.set_under('w', 0) + + avg_imgr, box_maskr = data, rois + num_qzr = len(np.unique( box_maskr)) -1 + fig, ax = plt.subplots(figsize=(8,12)) + ax.set_title("ROI--Labeled Array on Data") + im,im_label = show_label_array_on_image(ax, avg_imgr, box_maskr, imshow_cmap='viridis', + cmap='Paired', alpha=alpha, + vmin=0.01, vmax=30. , origin="lower") + + + for i in range( 1, num_qzr+1 ): + ind = np.where( box_maskr == i)[1] + indz = np.where( box_maskr == i)[0] + c = '%i'%i + y_val = int( indz.mean() ) + + x_val = int( ind.mean() ) + #print (xval, y) + ax.text(x_val, y_val, c, va='center', ha='center') + + #print (x_val1,x_val2) + + + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.05) + plt.colorbar(im, cax=cax) + + #ax.set_xlabel(r'$q_r$', fontsize=22) + #ax.set_ylabel(r'$q_z$',fontsize=22) + + plt.show() + + + + + + + + +def plot_qIq_with_ROI( q, iq, q_ring_center, logs=True, *argv,**kwargs): + '''plot q~Iq with interested q rings''' + + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + #Plot the circular average as a + fig, axes = plt.subplots( figsize=(8, 6)) + if logs: + axes.semilogy(q, iq, '-o') + else: + axes.plot(q, iq, '-o') + + axes.set_title('Uid= %s--Circular Average with the Q ring values'%uid) + axes.set_ylabel('I(q)') + axes.set_xlabel('Q 'r'($\AA^{-1}$)') + #axes.set_xlim(0, 0.02) + #axes.set_xlim(-0.00001, 0.1) + #axes.set_ylim(-0.0001, 10000) + #axes.set_ylim(0, 100) + + if 'xlim' in kwargs.keys(): + axes.set_xlim( kwargs['xlim'] ) + if 'ylim' in kwargs.keys(): + axes.set_ylim( kwargs['ylim'] ) + + + + num_rings = len( np.unique( q_ring_center) ) + for i in range(num_rings): + axes.axvline(q_ring_center[i] )#, linewidth = 5 ) + plt.show() + + + +def get_each_ring_mean_intensity( data_series, ring_mask, sampling, timeperframe, plot_ = True , save=False, *argv,**kwargs): + + """ + get time dependent mean intensity of each ring + """ + mean_int_sets, index_list = roi.mean_intensity(np.array(data_series[::sampling]), ring_mask) + + times = np.arange(len(data_series))*timeperframe # get the time for each frame + num_rings = len( np.unique( ring_mask)[1:] ) + if plot_: + fig, ax = plt.subplots(figsize=(8, 8)) + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + ax.set_title("Uid= %s--Mean intensity of each ring"%uid) + for i in range(num_rings): + ax.plot( mean_int_sets[:,i], label="Ring "+str(i+1),marker = 'o', ls='-') + ax.set_xlabel("Time") + ax.set_ylabel("Mean Intensity") + ax.legend(loc = 'best') + + if save: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + path = kwargs['path'] + fp = path + "Uid= %s--Mean intensity of each ring-"%uid + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + return times, mean_int_sets + + + + + + + + + +def plot_saxs_g2( g2, taus, res_pargs=None, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function + taus: the time delays + res_pargs, a dict, can contains + uid/path/qr_center/qz_center/ + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_gisaxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + q_ring_center = res_pargs[ 'q_ring_center'] + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + else: + q_ring_center = np.arange( g2.shape[1] ) + + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + + num_rings = g2.shape[1] + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + + #print (num_rings) + + if num_rings!=1: + + fig = plt.figure(figsize=(12, 10)) + plt.axis('off') + #plt.axes(frameon=False) + + plt.xticks([]) + plt.yticks([]) + + + else: + fig = plt.figure(figsize=(8,8)) + #print ('here') + plt.title('uid= %s'%uid,fontsize=20, y =1.06) + for i in range(num_rings): + ax = fig.add_subplot(sx, sy, i+1 ) + ax.set_ylabel("g2") + ax.set_title(" Q= " + '%.5f '%(q_ring_center[i]) + r'$\AA^{-1}$') + y=g2[:, i] + ax.semilogx(taus, y, '-o', markersize=6) + #ax.set_ylim([min(y)*.95, max(y[1:])*1.05 ]) + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s'%(uid) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + + + + + + + + +def plot_saxs_two_g2( g2, taus, g2b, tausb,res_pargs=None, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function from a multi-tau method + g2b: another g2 from a two-time method + taus: the time delays + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_saxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + q_ring_center = res_pargs[ 'q_ring_center'] + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + else: + q_ring_center = np.arange( g2.shape[1] ) + + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + + + + + num_rings = g2.shape[1] + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + if num_rings!=1: + + #fig = plt.figure(figsize=(14, 10)) + fig = plt.figure(figsize=(12, 10)) + plt.axis('off') + #plt.axes(frameon=False) + #print ('here') + plt.xticks([]) + plt.yticks([]) + + + else: + fig = plt.figure(figsize=(8,8)) + + plt.title('uid= %s'%uid,fontsize=20, y =1.06) + plt.axis('off') + for sn in range(num_rings): + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_title(" Q= " + '%.5f '%(q_ring_center[sn]) + r'$\AA^{-1}$') + y=g2b[:, sn] + ax.semilogx( tausb, y, '--rs', markersize=6,label= 'from_two-time') + + y2=g2[:, sn] + ax.semilogx(taus, y2, '--bo', markersize=6, label= 'from_one_time') + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y2)*vmin, max(y2[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + if sn==0: + ax.legend(loc = 'best') + + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + fig.tight_layout() + fp = path + 'g2--uid=%s'%(uid) + CurTime + '--twog2.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + + + + +#plot g2 results + +def plot_saxs_rad_ang_g2( g2, taus, res_pargs=None, *argv,**kwargs): + '''plot g2 results of segments with radius and angle partation , + + g2: one-time correlation function + taus: the time delays + res_pargs, a dict, can contains + uid/path/qr_center/qz_center/ + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_saxs_rad_ang_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, ang_center=ang_center, vlim=[.99, 1.01] ) + + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + q_ring_center= res_pargs[ 'q_ring_center'] + num_qr = len( q_ring_center) + ang_center = res_pargs[ 'ang_center'] + num_ang = len( ang_center ) + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + num_qr = len( q_ring_center) + else: + print( 'Please give q_ring_center') + if 'ang_center' in kwargs.keys(): + ang_center = kwargs[ 'ang_center'] + num_qa = len( ang_center) + else: + print( 'Please give ang_center') + + + + + for qr_ind in range(num_qr): + fig = plt.figure(figsize=(10, 12)) + #fig = plt.figure() + title_qr = ' Qr= %.5f '%( q_ring_center[qr_ind]) + r'$\AA^{-1}$' + plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.1) + #print (qz_ind,title_qz) + if num_qr!=1:plt.axis('off') + sx = int(round(np.sqrt(num_qa)) ) + if num_qa%sx == 0: + sy = int(num_qa/sx) + else: + sy=int(num_qa/sx+1) + for sn in range(num_qa): + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) + i = sn + qr_ind * num_qa + title_qa = " Angle= " + '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%i + if num_qr==1: + + title = 'uid= %s:--->'%uid + title_qr + '__' + title_qa + else: + title = title_qa + ax.set_title( title ) + + + y=g2[:, i] + ax.semilogx(taus, y, '-o', markersize=6) + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s-qr=%s'%(uid,q_ring_center[qr_ind]) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + + + + +def fit_saxs_rad_ang_g2( g2, taus, res_pargs=None,function='simple_exponential', *argv,**kwargs): + + ''' + Fit one-time correlation function + + The support functions include simple exponential and stretched/compressed exponential + Parameters + ---------- + g2: one-time correlation function for fit, with shape as [taus, qs] + res_pargs: a dict, contains keys + taus: the time delay, with the same length as g2 + q_ring_center: the center of q rings, for the title of each sub-plot + uid: unique id, for the title of plot + + function: + 'simple_exponential': fit by a simple exponential function, defined as + beta * np.exp(-2 * relaxation_rate * lags) + baseline + 'streched_exponential': fit by a streched exponential function, defined as + beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline + + Returns + ------- + fit resutls: + a dict, with keys as + 'baseline': + 'beta': + 'relaxation_rate': + an example: + result = fit_g2( g2, res_pargs, function = 'simple') + result = fit_g2( g2, res_pargs, function = 'stretched') + ''' + + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + q_ring_center= res_pargs[ 'q_ring_center'] + num_qr = len( q_ring_center) + ang_center = res_pargs[ 'ang_center'] + num_ang = len( ang_center ) + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + num_qr = len( q_ring_center) + else: + print( 'Please give q_ring_center') + if 'ang_center' in kwargs.keys(): + ang_center = kwargs[ 'ang_center'] + num_qa = len( ang_center) + else: + print( 'Please give ang_center') + + num_rings = g2.shape[1] + beta = np.zeros( num_rings ) # contrast factor + rate = np.zeros( num_rings ) # relaxation rate + alpha = np.zeros( num_rings ) # alpha + baseline = np.zeros( num_rings ) # baseline + + + if function=='simple_exponential' or function=='simple': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=['alpha', 'lags']) + + elif function=='stretched_exponential' or function=='stretched': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=[ 'lags']) + else: + print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function) + + + + for qr_ind in range(num_qr): + fig = plt.figure(figsize=(10, 12)) + #fig = plt.figure() + title_qr = ' Qr= %.5f '%( q_ring_center[qr_ind]) + r'$\AA^{-1}$' + plt.title('uid= %s:--->'%uid + title_qr,fontsize=20, y =1.1) + #print (qz_ind,title_qz) + if num_qr!=1:plt.axis('off') + sx = int(round(np.sqrt(num_qa)) ) + if num_qa%sx == 0: + sy = int(num_qa/sx) + else: + sy=int(num_qa/sx+1) + for sn in range(num_qa): + ax = fig.add_subplot(sx,sy,sn+1 ) + ax.set_ylabel("g2") + ax.set_xlabel(r"$\tau $ $(s)$", fontsize=16) + i = sn + qr_ind * num_qa + title_qa = " Angle= " + '%.2f'%( ang_center[sn]) + r'$^\circ$' + '( %d )'%i + if num_qr==1: + + title = 'uid= %s:--->'%uid + title_qr + '__' + title_qa + else: + title = title_qa + ax.set_title( title ) + + + y=g2[1:, i] + #print (y.shape) + + result1 = mod.fit(y, lags=taus[1:], beta=.05, alpha=1.0, + relaxation_rate =0.005, baseline=1.0, ) + + #print ( result1.best_values) + rate[i] = result1.best_values['relaxation_rate'] + #rate[i] = 1e-16 + beta[i] = result1.best_values['beta'] + + #baseline[i] = 1.0 + baseline[i] = result1.best_values['baseline'] + + if function=='simple_exponential' or function=='simple': + alpha[i] =1.0 + elif function=='stretched_exponential' or function=='stretched': + alpha[i] = result1.best_values['alpha'] + + ax.semilogx(taus[1:], y, 'ro') + ax.semilogx(taus[1:], result1.best_fit, '-b') + + + txts = r'$\gamma$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$' + ax.text(x =0.02, y=.55, s=txts, fontsize=14, transform=ax.transAxes) + txts = r'$\alpha$' + r'$ = %.3f$'%(alpha[i]) + #txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' + ax.text(x =0.02, y=.45, s=txts, fontsize=14, transform=ax.transAxes) + + txts = r'$baseline$' + r'$ = %.3f$'%( baseline[i]) + ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes) + + + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s-qr=%s'%(uid,q_ring_center[qr_ind]) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + #fig.tight_layout() + #plt.show() + result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline ) + + return result + + + + +def save_saxs_g2( g2, res_pargs, taus=None, filename=None ): + + '''save g2 results, + res_pargs should contain + g2: one-time correlation function + res_pargs: contions taus, q_ring_center values + path: + uid: + + ''' + + + if taus is None: + taus = res_pargs[ 'taus'] + q_ring_center = res_pargs['q_ring_center'] + path = res_pargs['path'] + uid = res_pargs['uid'] + + df = DataFrame( np.hstack( [ (taus).reshape( len(g2),1) , g2] ) ) + df.columns = ( ['tau'] + [str(qs) for qs in q_ring_center ] ) + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + if filename is None: + filename = 'g2' + filename += '-uid=%s-%s.csv' % (uid,CurTime) + filename1 = os.path.join(path, filename) + df.to_csv(filename1) + print( 'The g2 is saved in %s with filename as %s'%( path, filename)) + + + + + + +def stretched_auto_corr_scat_factor(lags, beta, relaxation_rate, alpha=1.0, baseline=1): + beta=abs(beta) + relaxation_rate = abs( relaxation_rate ) + baseline = abs( baseline ) + return beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline + +def simple_exponential(lags, beta, relaxation_rate, baseline=1): + beta=abs(beta) + relaxation_rate = abs( relaxation_rate ) + baseline = abs( baseline ) + return beta * np.exp(-2 * relaxation_rate * lags) + baseline + + +def fit_saxs_g2( g2, res_pargs=None, function='simple_exponential', *argv,**kwargs): + ''' + Fit one-time correlation function + + The support functions include simple exponential and stretched/compressed exponential + Parameters + ---------- + g2: one-time correlation function for fit, with shape as [taus, qs] + res_pargs: a dict, contains keys + taus: the time delay, with the same length as g2 + q_ring_center: the center of q rings, for the title of each sub-plot + uid: unique id, for the title of plot + + function: + 'simple_exponential': fit by a simple exponential function, defined as + beta * np.exp(-2 * relaxation_rate * lags) + baseline + 'streched_exponential': fit by a streched exponential function, defined as + beta * (np.exp(-2 * relaxation_rate * lags))**alpha + baseline + + Returns + ------- + fit resutls: + a dict, with keys as + 'baseline': + 'beta': + 'relaxation_rate': + an example: + result = fit_g2( g2, res_pargs, function = 'simple') + result = fit_g2( g2, res_pargs, function = 'stretched') + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + taus = res_pargs[ 'taus'] + q_ring_center = res_pargs[ 'q_ring_center'] + + else: + taus = kwargs['taus'] + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + else: + q_ring_center = np.arange( g2.shape[1] ) + + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + + if 'fit_range' in kwargs.keys(): + fit_range = kwargs['fit_range'] + else: + fit_range=None + + + num_rings = g2.shape[1] + + + beta = np.zeros( num_rings ) # contrast factor + rate = np.zeros( num_rings ) # relaxation rate + alpha = np.zeros( num_rings ) # alpha + baseline = np.zeros( num_rings ) # baseline + + sx = int( round (np.sqrt(num_rings)) ) + if num_rings%sx==0: + sy = int(num_rings/sx) + else: + sy = int(num_rings/sx+1) + + if function=='simple_exponential' or function=='simple': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=['alpha', 'lags']) + + elif function=='stretched_exponential' or function=='stretched': + mod = Model(stretched_auto_corr_scat_factor, independent_vars=[ 'lags']) + else: + print ("The %s is not supported.The supported functions include simple_exponential and stretched_exponential"%function) + + if num_rings!=1: + + #fig = plt.figure(figsize=(14, 10)) + fig = plt.figure(figsize=(12, 10)) + plt.axis('off') + #plt.axes(frameon=False) + #print ('here') + plt.xticks([]) + plt.yticks([]) + + + else: + fig = plt.figure(figsize=(8,8)) + + #fig = plt.figure(figsize=(8,8)) + plt.title('uid= %s'%uid,fontsize=20, y =1.06) + + for i in range(num_rings): + ax = fig.add_subplot(sx, sy, i+1 ) + if fit_range is not None: + y=g2[1:, i][fit_range[0]:fit_range[1]] + lags=taus[1:][fit_range[0]:fit_range[1]] + else: + y=g2[1:, i] + lags=taus[1:] + + result1 = mod.fit(y, lags=lags, beta=.05, alpha=1.0, + relaxation_rate =0.005, baseline=1.0, ) + + #print ( result1.best_values) + rate[i] = result1.best_values['relaxation_rate'] + #rate[i] = 1e-16 + beta[i] = result1.best_values['beta'] + + #baseline[i] = 1.0 + baseline[i] = result1.best_values['baseline'] + + if function=='simple_exponential' or function=='simple': + alpha[i] =1.0 + elif function=='stretched_exponential' or function=='stretched': + alpha[i] = result1.best_values['alpha'] + + ax.semilogx(taus[1:], g2[1:, i], 'bo') + + ax.semilogx(lags, result1.best_fit, '-r') + + ax.set_title(" Q= " + '%.5f '%(q_ring_center[i]) + r'$\AA^{-1}$') + #ax.set_ylim([min(y)*.95, max(y[1:]) *1.05]) + #ax.set_ylim([0.9999, max(y[1:]) *1.002]) + txts = r'$\gamma$' + r'$ = %.3f$'%(1/rate[i]) + r'$ s$' + ax.text(x =0.02, y=.55, s=txts, fontsize=14, transform=ax.transAxes) + txts = r'$\alpha$' + r'$ = %.3f$'%(alpha[i]) + #txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) + r'$ s^{-1}$' + ax.text(x =0.02, y=.45, s=txts, fontsize=14, transform=ax.transAxes) + + txts = r'$baseline$' + r'$ = %.3f$'%( baseline[i]) + ax.text(x =0.02, y=.35, s=txts, fontsize=14, transform=ax.transAxes) + + txts = r'$\beta$' + r'$ = %.3f$'%(beta[i]) #+ r'$ s^{-1}$' + ax.text(x =0.02, y=.25, s=txts, fontsize=14, transform=ax.transAxes) + + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + ax.set_ylim([min(y)*.95, max(y[1:]) *1.05]) + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s'%(uid) + CurTime + '--Fit.png' + fig.savefig( fp, dpi=fig.dpi) + + fig.tight_layout() + plt.show() + + result = dict( beta=beta, rate=rate, alpha=alpha, baseline=baseline ) + + return result + + +def linear_fit( x,y): + D0 = np.polyfit(x, y, 1) + gmfit = np.poly1d(D0) + return D0, gmfit + + +def fit_q2_rate( q2, rate, plot_=True, *argv,**kwargs): + x= q2 + y=rate + + if 'fit_range' in kwargs.keys(): + fit_range = kwargs['fit_range'] + else: + fit_range= None + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + + if fit_range is not None: + y=rate[fit_range[0]:fit_range[1]] + x=q2[fit_range[0]:fit_range[1]] + + D0, gmfit = linear_fit( x,y) + print ('The fitted diffusion coefficient D0 is: %.2E A^2S-1'%D0[0]) + if plot_: + fig,ax = plt.subplots() + plt.title('Q2-Rate--uid= %s_Fit'%uid,fontsize=20, y =1.06) + + ax.plot(q2,rate, 'ro', ls='') + ax.plot(x, gmfit(x), ls='-') + + txts = r'$D0: %.2f$'%D0[0] + r' $A^2$' + r'$s^{-1}$' + ax.text(x =0.15, y=.75, s=txts, fontsize=14, transform=ax.transAxes) + + + + ax.set_ylabel('Relaxation rate 'r'$\gamma$'"($s^{-1}$)") + ax.set_xlabel("$q^2$"r'($\AA^{-2}$)') + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + fp = path + 'Q2-Rate--uid=%s'%(uid) + CurTime + '--Fit.png' + fig.savefig( fp, dpi=fig.dpi) + + fig.tight_layout() + plt.show() + + return D0 + + + +def plot_gamma(): + '''not work''' + fig, ax = plt.subplots() + ax.set_title('Uid= %s--Beta'%uid) + ax.set_title('Uid= %s--Gamma'%uid) + #ax.plot( q_ring_center**2 , 1/rate, 'ro', ls='--') + + ax.loglog( q_ring_center , 1/result['rate'], 'ro', ls='--') + #ax.set_ylabel('Log( Beta0 'r'$\beta$'"($s^{-1}$)") + ax.set_ylabel('Log( Gamma )') + ax.set_xlabel("$Log(q)$"r'($\AA^{-1}$)') + plt.show() diff --git a/chxtools/chx_generic_functions.py b/chxtools/chx_generic_functions.py new file mode 100644 index 0000000..409842a --- /dev/null +++ b/chxtools/chx_generic_functions.py @@ -0,0 +1,653 @@ +from chx_libs import * + +##### +#load data by databroker + + +def get_sid_filenames(header): + """get a bluesky scan_id, unique_id, filename by giveing uid and detector + + Parameters + ---------- + header: a header of a bluesky scan, e.g. db[-1] + + Returns + ------- + scan_id: integer + unique_id: string, a full string of a uid + filename: sring + + Usuage: + sid,uid, filenames = get_sid_filenames(db[uid]) + + """ + + keys = [k for k, v in header.descriptors[0]['data_keys'].items() if 'external' in v] + events = get_events( header, keys, handler_overrides={key: RawHandler for key in keys}) + key, = keys + filenames = [ str( ev['data'][key][0]) + '_'+ str(ev['data'][key][2]['seq_id']) for ev in events] + sid = header['start']['scan_id'] + uid= header['start']['uid'] + + return sid,uid, filenames + + +def load_data( uid , detector = 'eiger4m_single_image' ): + """load bluesky scan data by giveing uid and detector + + Parameters + ---------- + uid: unique ID of a bluesky scan + detector: the used area detector + + Returns + ------- + image data: a pims frames series + if not success read the uid, will return image data as 0 + + Usuage: + imgs = load_data( uid, detector ) + md = imgs.md + """ + hdr = db[uid] + flag =1 + while flag<2 and flag !=0: + try: + ev, = get_events(hdr, [detector]) + flag = 0 + + except: + flag += 1 + print ('Trying again ...!') + + + if flag: + try: + imgs = get_images( hdr, detector) + if len(imgs[0])==1: + md = imgs[0].md + imgs = pims.pipeline(lambda img: img[0])(imgs) + imgs.md = md + except: + print ("Can't Load Data!") + uid = '00000' #in case of failling load data + imgs = 0 + else: + imgs = ev['data'][detector] + #print (imgs) + return imgs + + + +def load_data2( uid , detector = 'eiger4m_single_image' ): + """load bluesky scan data by giveing uid and detector + + Parameters + ---------- + uid: unique ID of a bluesky scan + detector: the used area detector + + Returns + ------- + image data: a pims frames series + if not success read the uid, will return image data as 0 + + Usuage: + imgs = load_data( uid, detector ) + md = imgs.md + """ + hdr = db[uid] + flag =1 + while flag<4 and flag !=0: + try: + ev, = get_events(hdr, [detector]) + flag =0 + except: + flag += 1 + print ('Trying again ...!') + + if flag: + print ("Can't Load Data!") + uid = '00000' #in case of failling load data + imgs = 0 + else: + imgs = ev['data'][detector] + + #print (imgs) + return imgs + + +def load_mask( path, mask_name, plot_ = False, *argv,**kwargs): + + """load a mask file + the mask is a numpy binary file (.npy) + + Parameters + ---------- + path: the path of the mask file + mask_name: the name of the mask file + plot_: a boolen type + + Returns + ------- + mask: array + if plot_ =True, will show the mask + + Usuage: + mask = load_mask( path, mask_name, plot_ = True ) + """ + + mask = np.load( path + mask_name ) + if plot_: + show_img( mask, *argv,**kwargs) + return mask + + +def create_hot_pixel_mask(img, threshold): + '''create a hot pixel mask by giving threshold''' + hmask = np.ones_like( img ) + hmask[np.where( img > threshold)]=0 + return hmask + + +def apply_mask( imgs, mask): + '''apply mask to imgs to produce a generator + + Usuages: + imgsa = apply_mask( imgs, mask ) + good_series = apply_mask( imgs[good_start:], mask ) + + ''' + return pims.pipeline(lambda img: np.int_(mask) * img)(imgs) # lazily apply mask + + +def reverse_updown( imgs): + '''reverse imgs upside down to produce a generator + + Usuages: + imgsr = reverse_updown( imgs) + + + ''' + return pims.pipeline(lambda img: img[::-1,:])(imgs) # lazily apply mask + +############ +###plot data + + +def show_img( image, ax=None,xlim=None, ylim=None, save=False,image_name=None,path=None, + aspect=None, logs=False,vmin=None,vmax=None, + *argv,**kwargs ): + """a simple function to show image by using matplotlib.plt imshow + pass *argv,**kwargs to imshow + + Parameters + ---------- + image : array + Image to show + Returns + ------- + None + """ + + if ax is None: + fig, ax = plt.subplots() + else: + fig, ax=ax + + if not logs: + #im=ax.imshow(img, origin='lower' ,cmap='viridis',interpolation="nearest" , vmin=vmin,vmax=vmax) + im=ax.imshow(image, origin='lower' ,cmap='viridis',interpolation="nearest", vmin=vmin,vmax=vmax) #vmin=0,vmax=1, + else: + im=ax.imshow(image, origin='lower' ,cmap='viridis', + interpolation="nearest" , norm=LogNorm(vmin, vmax)) + + + + fig.colorbar(im) + ax.set_title( image_name ) + if xlim is not None: + ax.set_xlim( xlim ) + if ylim is not None: + ax.set_ylim( ylim ) + if aspect is not None: + #aspect = image.shape[1]/float( image.shape[0] ) + ax.set_aspect(aspect) + else: + ax.set_aspect(aspect='auto') + if save: + dt =datetime.now() + CurTime = '_%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + fp = path + '%s'%( image_name ) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + plt.show() + + +def plot1D( y,x=None, ax=None,*argv,**kwargs): + """a simple function to plot two-column data by using matplotlib.plot + pass *argv,**kwargs to plot + + Parameters + ---------- + y: column-y + x: column-x, by default x=None, the plot will use index of y as x-axis + Returns + ------- + None + """ + if ax is None: + fig, ax = plt.subplots() + if 'legend' in kwargs.keys(): + legend = kwargs['legend'] + else: + legend = ' ' + + if x is None: + ax.plot( y, marker = 'o', ls='-',label= legend)#,*argv,**kwargs) + if 'logx' in kwargs.keys(): + ax.semilogx( y, marker = 'o', ls='-')#,*argv,**kwargs) + elif 'logy' in kwargs.keys(): + ax.semilogy( y, marker = 'o', ls='-')#,*argv,**kwargs) + elif 'logxy' in kwargs.keys(): + ax.loglog( y, marker = 'o', ls='-')#,*argv,**kwargs) + else: + ax.plot(x,y, marker='o',ls='-',label= legend)#,*argv,**kwargs) + if 'logx' in kwargs.keys(): + ax.semilogx( x,y, marker = 'o', ls='-')#,*argv,**kwargs) + elif 'logy' in kwargs.keys(): + ax.semilogy( x,y, marker = 'o', ls='-')#,*argv,**kwargs) + elif 'logxy' in kwargs.keys(): + ax.loglog( x,y, marker = 'o', ls='-')#,*argv,**kwargs) + + if 'xlim' in kwargs.keys(): + ax.set_xlim( kwargs['xlim'] ) + if 'ylim' in kwargs.keys(): + ax.set_ylim( kwargs['ylim'] ) + if 'xlabel' in kwargs.keys(): + ax.set_xlabel(kwargs['xlabel']) + if 'ylabel' in kwargs.keys(): + ax.set_ylabel(kwargs['ylabel']) + + #ax.set_xlabel("$Log(q)$"r'($\AA^{-1}$)') + + ax.legend(loc = 'best') + if 'save' in kwargs.keys(): + if kwargs['save']: + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + fp = kwargs['path'] + '%s'%(kwargs['plot_name'] ) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + + + + +### + +def check_shutter_open( data_series, min_inten=0, frame_edge = [0,100], plot_ = False, *argv,**kwargs): + + '''Check the first frame with shutter open + + Parameters + ---------- + data_series: a image series + min_inten: the total intensity lower than min_inten is defined as shtter close + frame_edge: the searching frame number range + + return: + shutter_open_frame: a integer, the first frame number with open shutter + + Usuage: + good_start = check_shutter_open( imgsa, min_inten=5, time_edge = [0,20], plot_ = False ) + + ''' + imgsum = np.array( [np.sum(img ) for img in data_series[frame_edge[0]:frame_edge[1]:1]] ) + if plot_: + fig, ax = plt.subplots() + ax.plot(imgsum,'bo') + ax.set_title('Uid= %s--imgsum'%uid) + ax.set_xlabel( 'Frame' ) + ax.set_ylabel( 'Total_Intensity' ) + plt.show() + shutter_open_frame = np.where( np.array(imgsum) > min_inten )[0][0] + print ('The first frame with open shutter is : %s'%shutter_open_frame ) + return shutter_open_frame + + + + + +def get_each_frame_intensity( data_series, sampling = 50, bad_pixel_threshold=1e10, plot_ = False, *argv,**kwargs): + '''Get the total intensity of each frame by sampling every N frames + Also get bad_frame_list by check whether above bad_pixel_threshold + + Usuage: + imgsum, bad_frame_list = get_each_frame_intensity(good_series ,sampling = 1000, + bad_pixel_threshold=1e10, plot_ = True) + ''' + + #print ( argv, kwargs ) + imgsum = np.array( [np.sum(img ) for img in data_series[::sampling]] ) + if plot_: + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + fig, ax = plt.subplots() + ax.plot(imgsum,'bo') + ax.set_title('Uid= %s--imgsum'%uid) + ax.set_xlabel( 'Frame_bin_%s'%sampling ) + ax.set_ylabel( 'Total_Intensity' ) + plt.show() + bad_frame_list = np.where( np.array(imgsum) > bad_pixel_threshold )[0] + if len(bad_frame_list): + print ('Bad frame list are: %s' %bad_frame_list) + else: + print ('No bad frames are involved.') + return imgsum,bad_frame_list + + + + +def create_time_slice( N, slice_num, slice_width, edges=None ): + '''create a ROI time regions ''' + if edges is not None: + time_edge = edges + else: + if slice_num==1: + time_edge = [ [0,N] ] + else: + tstep = N // slice_num + te = np.arange( 0, slice_num +1 ) * tstep + tc = np.int_( (te[:-1] + te[1:])/2 )[1:-1] + if slice_width%2: + sw = slice_width//2 +1 + time_edge = [ [0,slice_width], ] + [ [s-sw+1,s+sw] for s in tc ] + [ [N-slice_width,N]] + else: + sw= slice_width//2 + time_edge = [ [0,slice_width], ] + [ [s-sw,s+sw] for s in tc ] + [ [N-slice_width,N]] + + + + return time_edge + + +def show_label_array_on_image(ax, image, label_array, cmap=None,norm=None, log_img=True,alpha=0.3, + imshow_cmap='gray', **kwargs): #norm=LogNorm(), + """ + This will plot the required ROI's(labeled array) on the image + Additional kwargs are passed through to `ax.imshow`. + If `vmin` is in kwargs, it is clipped to minimum of 0.5. + Parameters + ---------- + ax : Axes + The `Axes` object to add the artist too + image : array + The image array + label_array : array + Expected to be an unsigned integer array. 0 is background, + positive integers label region of interest + cmap : str or colormap, optional + Color map to use for plotting the label_array, defaults to 'None' + imshow_cmap : str or colormap, optional + Color map to use for plotting the image, defaults to 'gray' + norm : str, optional + Normalize scale data, defaults to 'Lognorm()' + Returns + ------- + im : AxesImage + The artist added to the axes + im_label : AxesImage + The artist added to the axes + """ + ax.set_aspect('equal') + if log_img: + im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=LogNorm(norm),**kwargs) #norm=norm, + else: + im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=norm,**kwargs) #norm=norm, + + im_label = mpl_plot.show_label_array(ax, label_array, cmap=cmap, norm=norm, alpha=alpha, + **kwargs) # norm=norm, + + + return im, im_label + + + +def show_ROI_on_image( image, ROI, center=None, rwidth=400,alpha=0.3, label_on = True, *argv,**kwargs): + '''show ROI on an image + image: the data frame + ROI: the interested region + center: the plot center + rwidth: the plot range around the center + + ''' + + vmin=0.01 + if 'vmin' in kwargs.keys(): + vmin = kwargs['vmin'] + + vmax=5 + if 'vmax' in kwargs.keys(): + vmax = kwargs['vmax'] + + fig, axes = plt.subplots(figsize=(8,8)) + axes.set_title("ROI on Image") + im,im_label = show_label_array_on_image(axes, image, ROI, imshow_cmap='viridis', + cmap='Paired',alpha=alpha, + vmin=vmin, vmax=vmax, origin="lower") + + #fig.colorbar(im) + #rwidth = 400 + if center is not None: + x1,x2 = [center[1] - rwidth, center[1] + rwidth] + y1,y2 = [center[0] - rwidth, center[0] + rwidth] + axes.set_xlim( [x1,x2]) + axes.set_ylim( [y1,y2]) + + if label_on: + num_qzr = len(np.unique( ROI )) -1 + for i in range( 1, num_qzr + 1 ): + ind = np.where( ROI == i)[1] + indz = np.where( ROI == i)[0] + c = '%i'%i + y_val = int( indz.mean() ) + + x_val = int( ind.mean() ) + #print (xval, y) + axes.text(x_val, y_val, c, va='center', ha='center') + + + fig.colorbar(im_label) + plt.show() + + + + +def crop_image( image, crop_mask ): + + ''' Crop the non_zeros pixels of an image to a new image + + + ''' + from skimage.util import crop, pad + pxlst = np.where(crop_mask.ravel())[0] + dims = crop_mask.shape + imgwidthy = dims[1] #dimension in y, but in plot being x + imgwidthx = dims[0] #dimension in x, but in plot being y + #x and y are flipped??? + #matrix notation!!! + pixely = pxlst%imgwidthy + pixelx = pxlst//imgwidthy + + minpixelx = np.min(pixelx) + minpixely = np.min(pixely) + maxpixelx = np.max(pixelx) + maxpixely = np.max(pixely) + crops = crop_mask*image + img_crop = crop( crops, ((minpixelx, imgwidthx - maxpixelx -1 ), + (minpixely, imgwidthy - maxpixely -1 )) ) + return img_crop + + +def get_avg_img( data_series, sampling = 100, plot_ = False , *argv,**kwargs): + '''Get average imagef from a data_series by every sampling number to save time''' + avg_img = np.average(data_series[:: sampling], axis=0) + if plot_: + fig, ax = plt.subplots() + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + + im = ax.imshow(avg_img , cmap='viridis',origin='lower', + norm= LogNorm(vmin=0.001, vmax=1e2)) + #ax.set_title("Masked Averaged Image") + ax.set_title('Uid= %s--Masked Averaged Image'%uid) + fig.colorbar(im) + plt.show() + return avg_img + + + +def check_ROI_intensity( avg_img, ring_mask, ring_number=3 , *argv,**kwargs): + + """plot intensity versus pixel of a ring + Parameters + ---------- + avg_img: 2D-array, the image + ring_mask: 2D-array + ring_number: which ring to plot + + Returns + ------- + + + """ + uid = 'uid' + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + pixel = roi.roi_pixel_values(avg_img, ring_mask, [ring_number] ) + fig, ax = plt.subplots() + ax.set_title('Uid= %s--check-RIO-%s-intensity'%(uid, ring_number) ) + ax.plot( pixel[0][0] ,'bo', ls='-' ) + ax.set_ylabel('Intensity') + ax.set_xlabel('pixel') + plt.show() + return pixel[0][0] + +from tqdm import tqdm + +def cal_g2( image_series, ring_mask, bad_image_process, + bad_frame_list=None,good_start=0, num_buf = 8 ): + '''calculation g2 by using a multi-tau algorithm''' + + noframes = len( image_series) # number of frames, not "no frames" + #num_buf = 8 # number of buffers + + if bad_image_process: + import skbeam.core.mask as mask_image + bad_img_list = np.array( bad_frame_list) - good_start + new_imgs = mask_image.bad_to_nan_gen( image_series, bad_img_list) + + num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1 + print ('In this g2 calculation, the buf and lev number are: %s--%s--'%(num_buf,num_lev)) + print ('%s frames will be processed...'%(noframes)) + print( 'Bad Frames involved!') + + g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, tqdm( new_imgs) ) + print( 'G2 calculation DONE!') + + else: + + num_lev = int(np.log( noframes/(num_buf-1))/np.log(2) +1) +1 + print ('In this g2 calculation, the buf and lev number are: %s--%s--'%(num_buf,num_lev)) + print ('%s frames will be processed...'%(noframes)) + g2, lag_steps = corr.multi_tau_auto_corr(num_lev, num_buf, ring_mask, image_series ) + print( 'G2 calculation DONE!') + + return g2, lag_steps + + + +def run_time(t0): + '''Calculate running time of a program + Parameters + ---------- + t0: time_string, t0=time.time() + The start time + Returns + ------- + Print the running time + + One usage + --------- + t0=time.time() + .....(the running code) + run_time(t0) + ''' + + elapsed_time = time.time() - t0 + print ('Total time: %.2f min' %(elapsed_time/60.)) + + +def trans_data_to_pd(data, label=None,dtype='array'): + ''' + convert data into pandas.DataFrame + Input: + data: list or np.array + label: the coloum label of the data + dtype: list or array + Output: + a pandas.DataFrame + ''' + #lists a [ list1, list2...] all the list have the same length + from numpy import arange,array + import pandas as pd,sys + if dtype == 'list': + data=array(data).T + elif dtype == 'array': + data=array(data) + else: + print("Wrong data type! Now only support 'list' and 'array' tpye") + N,M=data.shape + index = arange( N ) + if label is None:label=['data%s'%i for i in range(M)] + #print label + df = pd.DataFrame( data, index=index, columns= label ) + return df + + +def save_lists( data, label=None, filename=None, path=None): + ''' + save lists to a CSV file wit filename in path + + ''' + M,N = len(data[0]),len(data) + d = np.zeros( [N,M] ) + for i in range(N): + d[i] = data[i] + + + df = trans_data_to_pd(d.T, label, 'array') + #dt =datetime.now() + #CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + if filename is None: + filename = 'data' + filename = os.path.join(path, filename +'.csv') + df.to_csv(filename) + + +def save_arrays( data, label=None, dtype='array', filename=None, path=None): + ''' + save data to a CSV file wit filename in path + + ''' + df = trans_data_to_pd(data, label,dtype) + #dt =datetime.now() + #CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + if filename is None: + filename = 'data' + filename = os.path.join(path, filename +'.csv') + df.to_csv(filename) + #print( 'The g2 of uid= %s is saved in %s with filename as g2-%s-%s.csv'%(uid, path, uid, CurTime)) + + \ No newline at end of file diff --git a/chxtools/chx_libs.py b/chxtools/chx_libs.py new file mode 100644 index 0000000..3bba7b9 --- /dev/null +++ b/chxtools/chx_libs.py @@ -0,0 +1,53 @@ +from databroker import DataBroker as db, get_images, get_table, get_events, get_fields +from filestore.api import register_handler, deregister_handler +#from filestore.retrieve import _h_registry, _HANDLER_CACHE, HandlerBase +from eiger_io.pims_reader import EigerImages +from chxtools import handlers +from filestore.path_only_handlers import RawHandler +## Import all the required packages for Data Analysis + +#* scikit-beam - data analysis tools for X-ray science +# - https://github.com/scikit-beam/scikit-beam +#* xray-vision - plotting helper functions for X-ray science +# - https://github.com/Nikea/xray-vision + +import xray_vision +import xray_vision.mpl_plotting as mpl_plot +from xray_vision.mpl_plotting import speckle +from xray_vision.mask.manual_mask import ManualMask + +import skbeam.core.roi as roi +import skbeam.core.correlation as corr +import skbeam.core.utils as utils + +import numpy as np +from datetime import datetime +import h5py + +import pims +from pandas import DataFrame +import os, sys, time + +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm + + +from lmfit import Model + + +from matplotlib import gridspec + +from mpl_toolkits.axes_grid1 import make_axes_locatable + + +import itertools +mcolors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k','darkgoldenrod','oldlace', 'brown','dodgerblue' ]) +markers = itertools.cycle(list(plt.Line2D.filled_markers)) +lstyles = itertools.cycle(['-', '--', '-.','.',':']) + + + + + + diff --git a/chxtools/develop.py b/chxtools/develop.py index 34bab8c..1dca6f5 100644 --- a/chxtools/develop.py +++ b/chxtools/develop.py @@ -10,13 +10,13 @@ from databroker import DataBroker as db, get_images, get_table, get_events from filestore.api import register_handler, deregister_handler -from filestore.retrieve import _h_registry, _HANDLER_CACHE +#from filestore.retrieve import _h_registry, _HANDLER_CACHE import numpy as np import matplotlib as mpl import matplotlib.pyplot as plt from matplotlib.colors import LogNorm -import skxray.core.roi as roi +import skbeam.core.roi as roi from datetime import datetime import logging @@ -29,10 +29,10 @@ from xray_vision.mpl_plotting import speckle from xray_vision.mask.manual_mask import ManualMask -import skxray.core.roi as roi +import skbeam.core.roi as roi -import skxray.core.correlation as corr -import skxray.core.utils as utils +import skbeam.core.correlation as corr +import skbeam.core.utils as utils @@ -198,7 +198,7 @@ def __call__(self, seq_id): return FixedEigerImages(master_path, md) deregister_handler('AD_EIGER') -_HANDLER_CACHE.clear() +#_HANDLER_CACHE.clear() register_handler('AD_EIGER', LazyEigerHandler) @@ -1256,4 +1256,4 @@ def _process(buf, G, past_intensity_norm, future_intensity_norm, - \ No newline at end of file + diff --git a/chxtools/speckle.py b/chxtools/speckle.py index 2f0dd0b..7ca0fa9 100644 --- a/chxtools/speckle.py +++ b/chxtools/speckle.py @@ -10,14 +10,19 @@ import numpy as np import time -from skxray.core import roi -from skxray.core.utils import bin_edges_to_centers, geometric_series +from skbeam.core import roi +from skbeam.core.utils import bin_edges_to_centers, geometric_series import logging logger = logging.getLogger(__name__) import sys +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib.colors import LogNorm +from datetime import datetime + def xsvs(image_sets, label_array, number_of_img, timebin_num=2, time_bin=None, max_cts=None, bad_images = None, threshold=None): """ @@ -164,7 +169,10 @@ def xsvs(image_sets, label_array, number_of_img, timebin_num=2, time_bin=None, # check whether the number of levels is one, otherwise # continue processing the next level level = 1 - processing=1 + if number_of_img>1: + processing=1 + else: + processing=0 #print ('track_level: %s'%track_level) #while level < num_times: #if not track_level[level]: @@ -550,4 +558,268 @@ def get_roi(data, threshold=1e-3): +def plot_sxvs( Knorm_bin_edges, spe_cts_all, uid=None,q_ring_center=None,xlim=[0,3.5],time_steps=None): + '''a convinent function to plot sxvs results''' + num_rings = spe_cts_all.shape[1] + num_times = Knorm_bin_edges.shape[0] + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + fig = plt.figure(figsize=(10,6)) + plt.title('uid= %s'%uid,fontsize=20, y =1.02) + plt.axes(frameon=False) + plt.xticks([]) + plt.yticks([]) + if time_steps is None: + time_steps = [ 2**i for i in range(num_times)] + for i in range(num_rings): + for j in range(num_times): + axes = fig.add_subplot(sx, sy, i+1 ) + axes.set_xlabel("K/") + axes.set_ylabel("P(K)") + art, = axes.plot(Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], '-o', + label=str(time_steps[j])+" ms") + axes.set_xlim(xlim) + axes.set_title("Q "+ '%.4f '%(q_ring_center[i])+ r'$\AA^{-1}$') + axes.legend(loc='best', fontsize = 6) + plt.show() + fig.tight_layout() + + + + +def fit_xsvs( Knorm_bin_edges, bin_edges,spe_cts_all, K_mean=None,func= 'bn',threshold=1e-7, + uid=None,q_ring_center=None,xlim=[0,3.5], ylim=None,time_steps=None): + '''a convinent function to plot sxvs results + supporting fit function include: + 'bn': Negative Binomaial Distribution + 'gm': Gamma Distribution + 'ps': Poission Distribution + + ''' + from lmfit import Model + from scipy.interpolate import UnivariateSpline + + if func=='bn': + mod=Model(nbinom_dist) + elif func=='gm': + mod = Model(gamma_dist, indepdent_vars=['K']) + elif func=='ps': + mod = Model(poisson_dist) + else: + print ("the current supporting function include 'bn', 'gm','ps'") + + #g_mod = Model(gamma_dist, indepdent_vars=['K']) + #g_mod = Model( gamma_dist ) + #n_mod = Model(nbinom_dist) + #p_mod = Model(poisson_dist) + #dc_mod = Model(diff_mot_con_factor) + + num_rings = spe_cts_all.shape[1] + num_times = Knorm_bin_edges.shape[0] + + M_val = {} + K_val = {} + sx = int(round(np.sqrt(num_rings))) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy = int(num_rings/sx+1) + fig = plt.figure(figsize=(10, 6)) + plt.title('uid= %s'%uid+" Fitting with Negative Binomial Function", fontsize=20, y=1.02) + plt.axes(frameon=False) + plt.xticks([]) + plt.yticks([]) + if time_steps is None: + time_steps = [ 2**i for i in range(num_times)] + + for i in range(num_rings): + M_val[i]=[] + K_val[i]=[] + for j in range( num_times ): + # find the best values for K and M from fitting + if threshold is not None: + rois = get_roi(data=spe_cts_all[j, i], threshold=threshold) + else: + rois = range( len( spe_cts_all[j, i] ) ) + + #print ( rois ) + if func=='bn': + result = mod.fit(spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=5 * 2**j, M=12) + elif func=='gm': + result = mod.fit(spe_cts_all[j, i][rois] , + bin_values=bin_edges[j, i][:-1][rois] , + K=K_mean[i]*2**j, M= 20 ) + elif func=='ps': + result = mod.fit(spe_cts_all[j, i][rois], + bin_values=bin_edges[j, i][:-1][rois], + K=K_mean[i]*2**j) + else: + pass + + if func=='bn': + K_val[i].append(result.best_values['K']) + M_val[i].append(result.best_values['M']) + elif func=='gm': + M_val[i].append(result.best_values['M']) + elif func=='ps': + K_val[i].append(result.best_values['K']) + else: + pass + + axes = fig.add_subplot(sx, sy, i+1 ) + axes.set_xlabel("K/") + axes.set_ylabel("P(K)") + + # Using the best K and M values interpolate and get more values for fitting curve + fitx_ = np.linspace(0, max(Knorm_bin_edges[j, i][:-1]), 1000 ) + fitx = np.linspace(0, max(bin_edges[j, i][:-1]), 1000 ) + if func=='bn': + fity = nbinom_dist( fitx, K_val[i][j], M_val[i][j] ) # M and K are fitted best values + label="nbinom" + txt= 'K='+'%.3f'%( K_val[i][0]) +','+ 'M='+'%.3f'%(M_val[i][0]) + elif func=='gm': + fity = gamma_dist( fitx, K_mean[i]*2**j, M_val[i][j] ) + label="gamma" + txt = 'M='+'%.3f'%(M_val[i][0]) + elif func=='ps': + fity = poisson_dist(fitx, K_val[i][j] ) + label="poisson" + txt = 'K='+'%.3f'%(K_val[i][0]) + else:pass + + + if j == 0: + art, = axes.plot( fitx_,fity, '-b', label=label) + else: + art, = axes.plot( fitx_,fity, '-b') + + + if i==0: + art, = axes.plot( Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], 'o', + label=str(time_steps[j])+" ms") + else: + art, = axes.plot( Knorm_bin_edges[j, i][:-1], spe_cts_all[j, i], 'o', + ) + + + axes.set_xlim(0, 3.5) + if ylim is not None: + axes.set_ylim( ylim) + # Annotate the best K and M values on the plot + + axes.annotate(r'%s'%txt, + xy=(1, 0.25), + xycoords='axes fraction', fontsize=10, + horizontalalignment='right', verticalalignment='bottom') + axes.set_title("Q "+ '%.4f '%(q_ring_center[i])+ r'$\AA^{-1}$') + axes.legend(loc='best', fontsize = 6) + plt.show() + fig.tight_layout() + + return M_val, K_val + + +def plot_xsvs_g2( g2, taus, res_pargs=None, *argv,**kwargs): + '''plot g2 results, + g2: one-time correlation function + taus: the time delays + res_pargs, a dict, can contains + uid/path/qr_center/qz_center/ + kwargs: can contains + vlim: [vmin,vmax]: for the plot limit of y, the y-limit will be [vmin * min(y), vmx*max(y)] + ylim/xlim: the limit of y and x + + e.g. + plot_gisaxs_g2( g2b, taus= np.arange( g2b.shape[0]) *timeperframe, q_ring_center = q_ring_center, vlim=[.99, 1.01] ) + + ''' + + + if res_pargs is not None: + uid = res_pargs['uid'] + path = res_pargs['path'] + q_ring_center = res_pargs[ 'q_ring_center'] + + else: + + if 'uid' in kwargs.keys(): + uid = kwargs['uid'] + else: + uid = 'uid' + + if 'q_ring_center' in kwargs.keys(): + q_ring_center = kwargs[ 'q_ring_center'] + else: + q_ring_center = np.arange( g2.shape[1] ) + + if 'path' in kwargs.keys(): + path = kwargs['path'] + else: + path = '' + + num_rings = g2.shape[1] + sx = int(round(np.sqrt(num_rings)) ) + if num_rings%sx == 0: + sy = int(num_rings/sx) + else: + sy=int(num_rings/sx+1) + + #print (num_rings) + if num_rings!=1: + + #fig = plt.figure(figsize=(14, 10)) + fig = plt.figure(figsize=(12, 10)) + plt.axis('off') + #plt.axes(frameon=False) + #print ('here') + plt.xticks([]) + plt.yticks([]) + + + else: + fig = plt.figure(figsize=(8,8)) + + + + plt.title('uid= %s'%uid,fontsize=20, y =1.06) + for i in range(num_rings): + ax = fig.add_subplot(sx, sy, i+1 ) + ax.set_ylabel("beta") + ax.set_title(" Q= " + '%.5f '%(q_ring_center[i]) + r'$\AA^{-1}$') + y=g2[:, i] + #print (y) + ax.semilogx(taus, y, '-o', markersize=6) + #ax.set_ylim([min(y)*.95, max(y[1:])*1.05 ]) + if 'ylim' in kwargs: + ax.set_ylim( kwargs['ylim']) + elif 'vlim' in kwargs: + vmin, vmax =kwargs['vlim'] + ax.set_ylim([min(y)*vmin, max(y[1:])*vmax ]) + else: + pass + if 'xlim' in kwargs: + ax.set_xlim( kwargs['xlim']) + + + dt =datetime.now() + CurTime = '%s%02d%02d-%02d%02d-' % (dt.year, dt.month, dt.day,dt.hour,dt.minute) + + fp = path + 'g2--uid=%s'%(uid) + CurTime + '.png' + fig.savefig( fp, dpi=fig.dpi) + fig.tight_layout() + plt.show() + + + + + + + + + From 40f06d00703e897a8545dfc6ceeac9bd3a0dd509 Mon Sep 17 00:00:00 2001 From: Zhang Yugang Date: Tue, 14 Jun 2016 12:36:51 -0400 Subject: [PATCH 2/2] fix import problem, delete obsolete py --- chxtools/Circular_Average.py | 158 ---- chxtools/XPCS_GiSAXS.py | 2 +- chxtools/XPCS_SAXS.py | 2 +- chxtools/chx_generic_functions.py | 4 +- chxtools/develop.py | 1259 ----------------------------- 5 files changed, 4 insertions(+), 1421 deletions(-) delete mode 100644 chxtools/Circular_Average.py delete mode 100644 chxtools/develop.py diff --git a/chxtools/Circular_Average.py b/chxtools/Circular_Average.py deleted file mode 100644 index 01296b0..0000000 --- a/chxtools/Circular_Average.py +++ /dev/null @@ -1,158 +0,0 @@ -import numpy as np - - -def radial_grid(center, shape, pixel_size=None): - """Convert a cartesian grid (x,y) to the radius relative to some center - - Parameters - ---------- - center : tuple - point in image where r=0; may be a float giving subpixel precision. - Order is (rr, cc). - shape : tuple - Image shape which is used to determine the maximum extent of output - pixel coordinates. - Order is (rr, cc). - pixel_size : sequence, optional - The physical size of the pixels. - len(pixel_size) should be the same as len(shape) - defaults to (1,1) - - Returns - ------- - r : array - The distance of each pixel from `center` - Shape of the return value is equal to the `shape` input parameter - """ - - if pixel_size is None: - pixel_size = (1, 1) - - X, Y = np.meshgrid(pixel_size[1] * (np.arange(shape[1]) - center[1]), - pixel_size[0] * (np.arange(shape[0]) - center[0])) - return np.sqrt(X*X + Y*Y) - - -def bin_1D(x, y, nx=None, min_x=None, max_x=None): - """ - Bin the values in y based on their x-coordinates - - Parameters - ---------- - x : array - position - y : array - intensity - nx : integer, optional - number of bins to use defaults to default bin value - min_x : float, optional - Left edge of first bin defaults to minimum value of x - max_x : float, optional - Right edge of last bin defaults to maximum value of x - - Returns - ------- - edges : array - edges of bins, length nx + 1 - - val : array - sum of values in each bin, length nx - - count : array - The number of counts in each bin, length nx - """ - - # handle default values - if min_x is None: - min_x = np.min(x) - if max_x is None: - max_x = np.max(x) - if nx is None: - nx = int(max_x - min_x) - - # use a weighted histogram to get the bin sum - bins = np.linspace(start=min_x, stop=max_x, num=nx+1, endpoint=True) - val, _ = np.histogram(a=x, bins=bins, weights=y) - # use an un-weighted histogram to get the counts - count, _ = np.histogram(a=x, bins=bins) - # return the three arrays - return bins, val, count - -def bin_edges_to_centers(input_edges): - """ - Helper function for turning a array of bin edges into - an array of bin centers - - Parameters - ---------- - input_edges : array-like - N + 1 values which are the left edges of N bins - and the right edge of the last bin - - Returns - ------- - centers : ndarray - A length N array giving the centers of the bins - """ - input_edges = np.asarray(input_edges) - return (input_edges[:-1] + input_edges[1:]) * 0.5 - - -def circular_average(image, calibrated_center, threshold=0, nx=1500, - pixel_size=(1, 1), min_x=None, max_x=None, mask=None): - """Circular average of the the image data - The circular average is also known as the radial integration - Parameters - ---------- - image : array - Image to compute the average as a function of radius - calibrated_center : tuple - The center of the image in pixel units - argument order should be (row, col) - threshold : int, optional - Ignore counts above `threshold` - default is zero - nx : int, optional - number of bins in x - defaults is 100 bins - pixel_size : tuple, optional - The size of a pixel (in a real unit, like mm). - argument order should be (pixel_height, pixel_width) - default is (1, 1) - min_x : float, optional number of pixels - Left edge of first bin defaults to minimum value of x - max_x : float, optional number of pixels - Right edge of last bin defaults to maximum value of x - Returns - ------- - bin_centers : array - The center of each bin in R. shape is (nx, ) - ring_averages : array - Radial average of the image. shape is (nx, ). - """ - radial_val = radial_grid(calibrated_center, image.shape, pixel_size) - - - if mask is not None: - #maks = np.ones_like( image ) - mask = np.array( mask, dtype = bool) - binr = radial_val[mask] - image_mask = np.array( image )[mask] - - else: - binr = np.ravel( radial_val ) - image_mask = np.ravel(image) - - bin_edges, sums, counts = bin_1D( binr, - image_mask, - nx, - min_x=min_x, - max_x=max_x) - - #print (counts) - th_mask = counts > threshold - ring_averages = sums[th_mask] / counts[th_mask] - - bin_centers = bin_edges_to_centers(bin_edges)[th_mask] - - return bin_centers, ring_averages diff --git a/chxtools/XPCS_GiSAXS.py b/chxtools/XPCS_GiSAXS.py index d925a02..66f6fd5 100644 --- a/chxtools/XPCS_GiSAXS.py +++ b/chxtools/XPCS_GiSAXS.py @@ -1,4 +1,4 @@ -from chx_generic_functions import * +from .chx_generic_functions import * diff --git a/chxtools/XPCS_SAXS.py b/chxtools/XPCS_SAXS.py index a234f0e..9861a0f 100644 --- a/chxtools/XPCS_SAXS.py +++ b/chxtools/XPCS_SAXS.py @@ -1,4 +1,4 @@ -from chx_generic_functions import * +from .chx_generic_functions import * diff --git a/chxtools/chx_generic_functions.py b/chxtools/chx_generic_functions.py index 409842a..79afd54 100644 --- a/chxtools/chx_generic_functions.py +++ b/chxtools/chx_generic_functions.py @@ -1,4 +1,4 @@ -from chx_libs import * +from .chx_libs import * ##### #load data by databroker @@ -650,4 +650,4 @@ def save_arrays( data, label=None, dtype='array', filename=None, path=None): df.to_csv(filename) #print( 'The g2 of uid= %s is saved in %s with filename as g2-%s-%s.csv'%(uid, path, uid, CurTime)) - \ No newline at end of file + diff --git a/chxtools/develop.py b/chxtools/develop.py deleted file mode 100644 index 1dca6f5..0000000 --- a/chxtools/develop.py +++ /dev/null @@ -1,1259 +0,0 @@ -#################################################################################### -###some latest developed functions for chx beamline -##this is a dynamic collections -##Once functions are incorporated in skxray, those function will be deleted in this file. -############################################################################ - - - -from __future__ import absolute_import, division, print_function - -from databroker import DataBroker as db, get_images, get_table, get_events -from filestore.api import register_handler, deregister_handler -#from filestore.retrieve import _h_registry, _HANDLER_CACHE - -import numpy as np -import matplotlib as mpl -import matplotlib.pyplot as plt -from matplotlib.colors import LogNorm -import skbeam.core.roi as roi -from datetime import datetime - -import logging -import time -from math import isnan - - -import xray_vision -import xray_vision.mpl_plotting as mpl_plot -from xray_vision.mpl_plotting import speckle -from xray_vision.mask.manual_mask import ManualMask - -import skbeam.core.roi as roi - -import skbeam.core.correlation as corr -import skbeam.core.utils as utils - - - -import h5py -from filestore.retrieve import HandlerBase -from eiger_io.pims_reader import EigerImages - - -################################################### -##for radial average intensity, correct some bugs -################################################### -def circular_average(image, calibrated_center, threshold=-1, nx=None, - pixel_size=None, mask=None): - """Circular average of the the image data - The circular average is also known as the radial integration - Parameters - ---------- - image : array - Image to compute the average as a function of radius - calibrated_center : tuple - The center of the image in pixel units - argument order should be (row, col) - threshold : int, optional - Ignore counts above `threshold` - nx : int, optional - Number of bins in R. Defaults to 100 - pixel_size : tuple, optional - The size of a pixel (in a real unit, like mm). - argument order should be (pixel_height, pixel_width) - Returns - ------- - bin_centers : array - The center of each bin in R. shape is (nx, ) - ring_averages : array - Radial average of the image. shape is (nx, ). - """ - radial_val = utils.radial_grid(calibrated_center, image.shape, pixel_size ) - - if nx is None: - ps =np.min( pixel_size ) - max_x = np.max(radial_val)/ps - min_x = np.min(radial_val)/ps - nx = int(max_x - min_x) - #print (nx) - - if mask is None: mask =1 - - bin_edges, sums, counts = bin_1D(np.ravel(radial_val * mask ), - np.ravel(image * mask), nx) - th_mask = counts > threshold - ring_averages = sums[th_mask] / counts[th_mask] - - bin_centers = utils.bin_edges_to_centers(bin_edges)[th_mask] - - return bin_centers, ring_averages - - -def bin_1D(x, y, nx=None, min_x=None, max_x=None): - """ - Bin the values in y based on their x-coordinates - Parameters - ---------- - x : array - position - y : array - intensity - nx : integer, optional - number of bins to use defaults to default bin value - min_x : float, optional - Left edge of first bin defaults to minimum value of x - max_x : float, optional - Right edge of last bin defaults to maximum value of x - Returns - ------- - edges : array - edges of bins, length nx + 1 - val : array - sum of values in each bin, length nx - count : array - The number of counts in each bin, length nx - """ - - # handle default values - if min_x is None: - min_x = np.min(x) - if max_x is None: - max_x = np.max(x) - if nx is None: - nx = int(max_x - min_x) - - # use a weighted histogram to get the bin sum - bins = np.linspace(start=min_x, stop=max_x, num=nx+1, endpoint=True) - val, _ = np.histogram(a=x, bins=bins, weights=y) - # use an un-weighted histogram to get the counts - count, _ = np.histogram(a=x, bins=bins) - # return the three arrays - return bins, val, count - - -################################################### -##for eiger detector files -################################################### - -EIGER_MD_DICT = { - 'y_pixel_size': 'entry/instrument/detector/y_pixel_size', - 'x_pixel_size': 'entry/instrument/detector/x_pixel_size', - 'detector_distance': 'entry/instrument/detector/detector_distance', - 'incident_wavelength': 'entry/instrument/beam/incident_wavelength', - 'frame_time': 'entry/instrument/detector/frame_time', - 'beam_center_x': 'entry/instrument/detector/beam_center_x', - 'beam_center_y': 'entry/instrument/detector/beam_center_y', - 'count_time': 'entry/instrument/detector/count_time', - 'pixel_mask': 'entry/instrument/detector/detectorSpecific/pixel_mask', -} - -class FixedEigerImages(EigerImages): - def __init__(self, path, metadata): - super().__init__(path) - self._metadata = metadata - - @property - def md(self): - return self._metadata - - @property - def dtype(self): - return self.pixel_type - - @property - def shape(self): - return self.frame_shape - -class LazyEigerHandler(HandlerBase): - specs = {'AD_EIGER'} | HandlerBase.specs - def __init__(self, fpath, frame_per_point, mapping=None): - # create pims handler - self.vals_dict = EIGER_MD_DICT.copy() - if mapping is not None: - self.vals_dict.update(mapping) - self._base_path = fpath - self.fpp = frame_per_point - - def __call__(self, seq_id): - import h5py - master_path = '{}_{}_master.h5'.format(self._base_path, seq_id) - md = {} - print('hdf5 path = %s' % master_path) - with h5py.File(master_path, 'r') as f: - md = {k: f[v].value for k, v in self.vals_dict.items()} - # the pixel mask from the eiger contains: - # 1 -- gap - # 2 -- dead - # 4 -- under-responsive - # 8 -- over-responsive - # 16 -- noisy - pixel_mask = md['pixel_mask'] - pixel_mask[pixel_mask>0] = 1 - pixel_mask[pixel_mask==0] = 2 - pixel_mask[pixel_mask==1] = 0 - pixel_mask[pixel_mask==2] = 1 - md['framerate'] = 1./md['frame_time'] - # TODO Return a multi-dimensional PIMS seq - return FixedEigerImages(master_path, md) - -deregister_handler('AD_EIGER') -#_HANDLER_CACHE.clear() -register_handler('AD_EIGER', LazyEigerHandler) - - -def print_attrs(name, obj): - print(name) - for key, val in obj.attrs.items(): - print(" %s: %s" % (key, val)) - - - -################################################### -##for image process -################################################### - -class Reverse_Coordinate(object): - def __init__(self, indexable, mask): - self.indexable = indexable - self.mask = mask - self.shape = indexable.shape - self.length= len(indexable) - def __getitem__(self, key ): - if self.mask is not None: - img =self.indexable[key] * self.mask - else: - img = self.indexable[key] - - if len(img.shape) ==3: - img_=img[:,::-1,:] - if len(img.shape)==2: - img_=img[::-1,:] - return img_ - - -class RemoveHotSpots(object): - def __init__(self, indexable, threshold= 1E7 ): - self.indexable = indexable - self.threshold = threshold - - try: - self.N = len( indexable ) - except: - self.N= indexable.length - def _get_mask(self, Ns=None,Ne=None ): - mask = np.ones_like(np.array(self.indexable[0])) - if Ns is None:Ns=0 - if Ne is None:Ne=self.N - #for key in range(self.N): - for key in range( Ns,Ne ): - data = np.array( self.indexable[key]) #.copy() - badp = np.where( data >= self.threshold ) - if len(badp[0])!=0: - mask[badp] = 0 - return mask - def __getitem__(self, key): - return self.indexable[key] * mask - - - -class Masker(object): - def __init__(self, indexable, mask): - self.indexable = indexable - self.mask = mask - self.length = len( indexable) - def __getitem__(self, key): - img =self.indexable[key] * self.mask - return img - - - - -def view_image(imgsr,i): - #from ipywidgets import interact - fig, ax = plt.subplots() - ax.imshow(imgsr[i], interpolation='nearest', cmap='viridis', - origin='lower', norm= LogNorm(vmin=0.001, vmax=1e1 ) ) - ax.set_title("Browse the Image Stack") - plt.show() - - - - -def view_image_movie(imgsr,sleeps=1, ims=0, ime = 1): - fig, ax = plt.subplots() - for i in range( ims, ime ): - ax.imshow(imgsr[i], interpolation='nearest', cmap='viridis', - origin='lower', norm= LogNorm( vmin=0.001, vmax=1e1 ) ) - ax.set_title("images_%s"%i) - time.sleep( sleeps ) - plt.draw() - if i!=ime-1: - ax.cla() - - - -def average_img( imgs, Ns=None,Ne = None ): - ''' Do imgs average, - Options: - imgs: the image seriers - Ns: the start image - Ne: the last image - e.g., - ave = average_img(imgs)''' - import numpy as np - ave = np.zeros_like(imgs[0],dtype =float) - #if Ns is None:Ns=0 - #if Ne is None:Ne=len(imgs) - #if Ne>len(imgs):Ne=len(imgs) - for i in range(Ns,Ne): - ave += imgs[i] - ave /= (Ne-Ns) - return ave - - - - -################################################### -##for calculation time -################################################### - -def run_time(t0): - '''Calculate running time of a program - Parameters - ---------- - t0: time_string, t0=time.time() - The start time - Returns - ------- - Print the running time - - One usage - --------- - t0=time.time() - .....(the running code) - run_time(t0) - ''' - - elapsed_time = time.time() - t0 - print ('Total time: %.2f min' %(elapsed_time/60.)) - - -################################################### -##for file operation -################################################### - -def cpopen( filename=None, inDir=None, ): - import _pickle as cPickle - import os - if inDir!=None:filename=inDir + filename - if os.path.isfile(filename): - #fp=file(filename,'rb') - fp=open(filename,'rb') - data = cPickle.load(fp) - fp.close() - return data - else: - return None - - - -########################################## -###Functions for GiSAXS -########################################## - - -def make_gisaxs_grid( qr_w= 10, qz_w = 12, dim_r =100,dim_z=120): - y, x = np.indices( [dim_z,dim_r] ) - Nr = int(dim_r/qp_w) - Nz = int(dim_z/qz_w) - noqs = Nr*Nz - - ind = 1 - for i in range(0,Nr): - for j in range(0,Nz): - y[ qr_w*i: qr_w*(i+1), qz_w*j:qz_w*(j+1)]= ind - ind += 1 - return y - - -########################################### -#for Q-map, convert pixel to Q -########################################### - - -def get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize=[75,75], Lsd=5.0): - ''' giving: incident beam center: bcenx,bceny - reflected beam on detector: rcenx, rceny - sample to detector distance: Lsd, in meters - pixelsize: 75 um for Eiger4M detector - get incident_angle (alphai), the title angle (phi) - ''' - px,py = pixelsize - phi = np.arctan2( (refl_x0 - inc_x0)*px *10**(-6), (refl_y0 - inc_y0)*py *10**(-6) ) - alphai = np.arctan2( (refl_y0 -inc_y0)*py *10**(-6), Lsd ) /2. - #thetai = np.arctan2( (rcenx - bcenx)*px *10**(-6), Lsd ) /2. #?? - - return alphai,phi - - -def get_reflected_angles(inc_x0, inc_y0, refl_x0, refl_y0, thetai=0.0, - pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167.): - - ''' giving: incident beam center: bcenx,bceny - reflected beam on detector: rcenx, rceny - sample to detector distance: Lsd, in meters - pixelsize: 75 um for Eiger4M detector - detector image size: dimx = 2070,dimy=2167 for Eiger4M detector - get reflected angle alphaf (outplane) - reflected angle thetaf (inplane ) - ''' - - alphai, phi = get_incident_angles( inc_x0, inc_y0, refl_x0, refl_y0, pixelsize, Lsd) - print ('The incident_angle (alphai) is: %s'%(alphai* 180/np.pi)) - px,py = pixelsize - y, x = np.indices( [dimy,dimx] ) - #alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd )/2 - alphai - alphaf = np.arctan2( (y-inc_y0)*py*10**(-6), Lsd ) - alphai - thetaf = np.arctan2( (x-inc_x0)*px*10**(-6), Lsd )/2 - thetai - - return alphaf,thetaf, alphai, phi - - - -def convert_gisaxs_pixel_to_q( inc_x0, inc_y0, refl_x0, refl_y0, - pixelsize=[75,75], Lsd=5.0,dimx = 2070.,dimy=2167., - thetai=0.0, lamda=1.0 ): - - ''' - - giving: incident beam center: bcenx,bceny - reflected beam on detector: rcenx, rceny - sample to detector distance: Lsd, in meters - pixelsize: 75 um for Eiger4M detector - detector image size: dimx = 2070,dimy=2167 for Eiger4M detector - wavelength: angstron - - get: q_parallel (qp), q_direction_z (qz) - - ''' - - - alphaf,thetaf,alphai, phi = get_reflected_angles( inc_x0, inc_y0, refl_x0, refl_y0, thetai, pixelsize, Lsd,dimx,dimy) - - pref = 2*np.pi/lamda - - qx = np.cos( alphaf)*np.cos( 2*thetaf) - np.cos( alphai )*np.cos( 2*thetai) - qy_ = np.cos( alphaf)*np.sin( 2*thetaf) - np.cos( alphai )*np.sin ( 2*thetai) - qz_ = np.sin(alphaf) + np.sin(alphai) - - qy = qz_* np.sin( phi) + qy_*np.cos(phi) - qz = qz_* np.cos( phi) - qy_*np.sin(phi) - - qr = np.sqrt( qx**2 + qy**2 ) - - - return qx*pref , qy*pref , qr*pref , qz*pref - - - - -def get_qedge( qstart,qend,qwidth,noqs, ): - ''' DOCUMENT make_qlist( ) - give qstart,qend,qwidth,noqs - return a qedge by giving the noqs, qstart,qend,qwidth. - a qcenter, which is center of each qedge - KEYWORD: None ''' - import numpy as np - qcenter = np.linspace(qstart,qend,noqs) - #print ('the qcenter is: %s'%qcenter ) - qedge=np.zeros(2*noqs) - qedge[::2]= ( qcenter- (qwidth/2) ) #+1 #render even value - qedge[1::2]= ( qcenter+ qwidth/2) #render odd value - return qedge, qcenter - - -########################################### -#for plot Q-map -########################################### - -def get_qmap_label( qmap, qedge ): - import numpy as np - '''give a qmap and qedge to bin the qmap into a label array''' - edges = np.atleast_2d(np.asarray(qedge)).ravel() - label_array = np.digitize(qmap.ravel(), edges, right=False) - label_array = np.int_(label_array) - label_array = (np.where(label_array % 2 != 0, label_array, 0) + 1) // 2 - label_array = label_array.reshape( qmap.shape ) - return label_array - - - -def get_qzrmap(label_array_qz, label_array_qr, qz_center, qr_center ): - '''get qzrmap ''' - qzmax = label_array_qz.max() - label_array_qr_ = np.zeros( label_array_qr.shape ) - ind = np.where(label_array_qr!=0) - label_array_qr_[ind ] = label_array_qr[ ind ] + 1E4 #add some large number to qr - label_array_qzr = label_array_qz * label_array_qr_ - - #convert label_array_qzr to [1,2,3,...] - uqzr = np.unique( label_array_qzr )[1:] - - uqz = np.unique( label_array_qz )[1:] - uqr = np.unique( label_array_qr )[1:] - #print (uqzr) - label_array_qzr_ = np.zeros_like( label_array_qzr ) - newl = np.arange( 1, len(uqzr)+1) - - qzc =list(qz_center) * len( uqr ) - qrc= [ [qr_center[i]]*len( uqz ) for i in range(len( uqr )) ] - - for i, label in enumerate(uqzr): - #print (i, label) - label_array_qzr_.ravel()[ np.where( label_array_qzr.ravel() == label)[0] ] = newl[i] - - - return np.int_(label_array_qzr_), np.array( qzc ), np.concatenate(np.array(qrc )) - - - -def show_label_array_on_image(ax, image, label_array, cmap=None,norm=None, log_img=True,alpha=0.3, - imshow_cmap='gray', **kwargs): #norm=LogNorm(), - """ - This will plot the required ROI's(labeled array) on the image - Additional kwargs are passed through to `ax.imshow`. - If `vmin` is in kwargs, it is clipped to minimum of 0.5. - Parameters - ---------- - ax : Axes - The `Axes` object to add the artist too - image : array - The image array - label_array : array - Expected to be an unsigned integer array. 0 is background, - positive integers label region of interest - cmap : str or colormap, optional - Color map to use for plotting the label_array, defaults to 'None' - imshow_cmap : str or colormap, optional - Color map to use for plotting the image, defaults to 'gray' - norm : str, optional - Normalize scale data, defaults to 'Lognorm()' - Returns - ------- - im : AxesImage - The artist added to the axes - im_label : AxesImage - The artist added to the axes - """ - ax.set_aspect('equal') - if log_img: - im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=LogNorm(norm),**kwargs) #norm=norm, - else: - im = ax.imshow(image, cmap=imshow_cmap, interpolation='none',norm=norm,**kwargs) #norm=norm, - - im_label = mpl_plot.show_label_array(ax, label_array, cmap=cmap, norm=norm, alpha=alpha, - **kwargs) # norm=norm, - - - return im, im_label - - - - - -def get_qr_tick_label( qr, label_array_qr, inc_x0): - ''' - Dec 16, 2015, Y.G.@CHX - get zticks,zticks_label - - Parameters: - - qr: 2-D array, qr of a gisaxs image (data) - label_array_qr: a labelled array of qr map, get by: - label_array_qr = get_qmap_label( qr, qz_edge) - Options: - - inc_x0: x-center of incident beam - Return: - rticks: list, r-tick positions in unit of pixel - rticks_label: list, r-tick positions in unit of real space - - Examples: - rticks,rticks_label = get_qr_tick_label( qr, label_array_qr) - - ''' - - rticks =[] - rticks_label = [] - num = len( np.unique( label_array_qr ) ) - for i in range( 1, num ): - ind = np.where( label_array_qr==i )[1] - tick = round( qr[label_array_qr==i].mean(),2) - if ind[0] < inc_x0 and ind[-1]>inc_x0: - - mean1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() ) - mean2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() ) - rticks.append( mean1) - rticks.append(mean2) - - rticks_label.append( tick ) - rticks_label.append( tick ) - - else: - mean = int( ind.mean() ) - rticks.append(mean) - rticks_label.append( tick ) - #print (rticks) - - return np.array(rticks), np.array(rticks_label) - -def get_qz_tick_label( qz, label_array_qz): - ''' - Dec 16, 2015, Y.G.@CHX - get zticks,zticks_label - - Parameters: - - qz: 2-D array, qz of a gisaxs image (data) - label_array_qz: a labelled array of qz map, get by: - label_array_qz = get_qmap_label( qz, qz_edge) - - Return: - zticks: list, z-tick positions in unit of pixel - zticks_label: list, z-tick positions in unit of real space - - Examples: - zticks,zticks_label = get_qz_tick_label( qz, label_array_qz) - - ''' - - num = len( np.unique( label_array_qz ) ) - zticks = np.array( [ int( np.where( label_array_qz==i )[0].mean() ) for i in range( 1,num ) ]) - zticks_label = np.array( [ round( qz[label_array_qz==i].mean(),2) for i in range( 1, num ) ]) - return zticks,zticks_label - -def show_qz(qz): - ''' - plot qz mape - - ''' - - - fig, ax = plt.subplots() - im=ax.imshow(qz, origin='lower' ,cmap='viridis',vmin=qz.min(),vmax= qz.max() ) - fig.colorbar(im) - ax.set_title( 'Q-z') - plt.show() - -def show_qr(qr): - ''' - plot qr mape - - ''' - fig, ax = plt.subplots() - im=ax.imshow(qr, origin='lower' ,cmap='viridis',vmin=qr.min(),vmax= qr.max() ) - fig.colorbar(im) - ax.set_title( 'Q-r') - plt.show() - -def show_alphaf(alphaf,): - ''' - plot alphaf mape - - ''' - - fig, ax = plt.subplots() - im=ax.imshow(alphaf*180/np.pi, origin='lower' ,cmap='viridis',vmin=-1,vmax= 1.5 ) - #im=ax.imshow(alphaf, origin='lower' ,cmap='viridis',norm= LogNorm(vmin=0.0001,vmax=2.00)) - fig.colorbar(im) - ax.set_title( 'alphaf') - plt.show() - - -def show_qzr_map( qr, qz, inc_x0, data=None, Nzline=10,Nrline=10 ): - - ''' - Dec 16, 2015, Y.G.@CHX - plot a qzr map of a gisaxs image (data) - - Parameters: - qr: 2-D array, qr of a gisaxs image (data) - qz: 2-D array, qz of a gisaxs image (data) - inc_x0: the incident beam center x - - Options: - data: 2-D array, a gisaxs image, if None, =qr+qz - Nzline: int, z-line number - Nrline: int, r-line number - - - Return: - zticks: list, z-tick positions in unit of pixel - zticks_label: list, z-tick positions in unit of real space - rticks: list, r-tick positions in unit of pixel - rticks_label: list, r-tick positions in unit of real space - - - Examples: - - ticks = show_qzr_map( qr, qz, inc_x0, data = None, Nzline=10, Nrline= 10 ) - ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - ''' - - - import matplotlib.pyplot as plt - import copy - import matplotlib.cm as mcm - - - - cmap='viridis' - _cmap = copy.copy((mcm.get_cmap(cmap))) - _cmap.set_under('w', 0) - - - - qr_start, qr_end, qr_num = qr.min(),qr.max(), Nzline - qz_start, qz_end, qz_num = qz.min(),qz.max(), Nrline - qr_edge, qr_center = get_qedge(qr_start , qr_end, ( qr_end- qr_start)/(qr_num+100), qr_num ) - qz_edge, qz_center = get_qedge( qz_start, qz_end, (qz_end - qz_start)/(qz_num+100 ) , qz_num ) - - label_array_qz = get_qmap_label( qz, qz_edge) - label_array_qr = get_qmap_label( qr, qr_edge) - - labels_qz, indices_qz = roi.extract_label_indices( label_array_qz ) - labels_qr, indices_qr = roi.extract_label_indices( label_array_qr ) - num_qz = len(np.unique( labels_qz )) - num_qr = len(np.unique( labels_qr )) - - - - fig, ax = plt.subplots() - if data is None: - data=qr+qz - im = ax.imshow(data, cmap='viridis',origin='lower') - else: - im = ax.imshow(data, cmap='viridis',origin='lower', norm= LogNorm(vmin=0.001, vmax=1e1)) - - imr=ax.imshow(label_array_qr, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',) - imz=ax.imshow(label_array_qz, origin='lower' ,cmap='viridis', vmin=0.5,vmax= None )#,interpolation='nearest',) - - caxr = fig.add_axes([0.81, 0.1, 0.03, .8]) #x,y, width, heigth - - cba = fig.colorbar(im, cax=caxr ) - ax.set_xlabel(r'$q_r$', fontsize=18) - ax.set_ylabel(r'$q_z$',fontsize=18) - - zticks,zticks_label = get_qz_tick_label(qz,label_array_qz) - #rticks,rticks_label = get_qr_tick_label(label_array_qr,inc_x0) - - rticks,rticks_label = zip(*sorted( zip( *get_qr_tick_label( qr, label_array_qr, inc_x0) )) ) - - stride = int(len(zticks)/7) - ax.set_yticks( zticks[::stride] ) - yticks = zticks_label[::stride] - ax.set_yticklabels(yticks, fontsize=9) - - stride = int(len(rticks)/7) - ax.set_xticks( rticks[::stride] ) - xticks = rticks_label[::stride] - ax.set_xticklabels(xticks, fontsize=9) - - - ax.set_title( 'Q-zr_Map', y=1.03,fontsize=18) - plt.show() - return zticks,zticks_label,rticks,rticks_label - - - - - -def show_qzr_roi( data, rois, inc_x0, ticks, alpha=0.3): - - ''' - Dec 16, 2015, Y.G.@CHX - plot a qzr map of a gisaxs image with rois( a label array) - - Parameters: - data: 2-D array, a gisaxs image - rois: 2-D array, a label array - inc_x0: the incident beam center x - ticks: zticks, zticks_label, rticks, rticks_label = ticks - zticks: list, z-tick positions in unit of pixel - zticks_label: list, z-tick positions in unit of real space - rticks: list, r-tick positions in unit of pixel - rticks_label: list, r-tick positions in unit of real space - - Options: - alpha: transparency of the label array on top of data - - Return: - a plot of a qzr map of a gisaxs image with rois( a label array) - - - Examples: - show_qzr_roi( avg_imgr, box_maskr, inc_x0, ticks) - - ''' - - - - #import matplotlib.pyplot as plt - #import copy - #import matplotlib.cm as mcm - - #cmap='viridis' - #_cmap = copy.copy((mcm.get_cmap(cmap))) - #_cmap.set_under('w', 0) - zticks, zticks_label, rticks, rticks_label = ticks - avg_imgr, box_maskr = data, rois - num_qzr = len(np.unique( box_maskr)) -1 - fig, ax = plt.subplots(figsize=(8,8)) - ax.set_title("ROI--Labeled Array on Data") - im,im_label = show_label_array_on_image(ax, avg_imgr, box_maskr, imshow_cmap='viridis', - cmap='Paired', alpha=alpha, - vmin=0.01, vmax=30. , origin="lower") - - - for i in range( 1, num_qzr+1 ): - ind = np.where( box_maskr == i)[1] - indz = np.where( box_maskr == i)[0] - c = '%i'%i - y_val = int( indz.mean() ) - - #print (ind[0], ind[1]) - if ind[0] < inc_x0 and ind[-1]>inc_x0: - x_val1 = int( (ind[np.where(ind < inc_x0)[0]]).mean() ) - x_val2 = int( (ind[np.where(ind > inc_x0)[0]]).mean() ) - ax.text(x_val1, y_val, c, va='center', ha='center') - ax.text(x_val2, y_val, c, va='center', ha='center') - - else: - x_val = int( ind.mean() ) - #print (xval, y) - ax.text(x_val, y_val, c, va='center', ha='center') - - #print (x_val1,x_val2) - - stride = int(len(zticks)/7) - ax.set_yticks( zticks[::stride] ) - yticks = zticks_label[::stride] - ax.set_yticklabels(yticks, fontsize=9) - - stride = int(len(rticks)/7) - ax.set_xticks( rticks[::stride] ) - xticks = rticks_label[::stride] - ax.set_xticklabels(xticks, fontsize=9) - - #caxr = fig.add_axes([0.95, 0.1, 0.03, .8]) #x,y, width, heigth - #cba = fig.colorbar(im_label, cax=caxr ) - - #fig.colorbar(im_label, shrink =.85) - fig.colorbar(im, shrink =.82) - ax.set_xlabel(r'$q_r$', fontsize=22) - ax.set_ylabel(r'$q_z$',fontsize=22) - - plt.show() - - - -######################## -# get one-d of I(q) as a function of qr for different qz -##################### - - - - -def get_1d_qr( data, Qr,Qz, qr, qz, inc_x0, mask=None, show_roi=True, ticks=None, alpha=0.3 ): - ''' - plot one-d of I(q) as a function of qr for different qz - data: a dataframe - Qr: info for qr, = qr_start , qr_end, qr_width, qr_num - Qz: info for qz, = qz_start, qz_end, qz_width , qz_num - qr: qr-map - qz: qz-map - inc_x0: x-center of incident beam - mask: a mask for qr-1d integration - show_roi: boolean, if ture, show the interest ROI - ticks: ticks for the plot, = zticks, zticks_label, rticks, rticks_label - alpha: transparency of ROI - Return: qr_1d, a dict, with keys as qz number - qr_1d[key]: - Plot 1D cureve as a function of Qr for each Qz - - Examples: - #to make two-qz, from 0.018 to 0.046, width as 0.008, - qz_width = 0.008 - qz_start = 0.018 + qz_width/2 - qz_end = 0.046 - qz_width/2 - qz_num= 2 - - - #to make one-qr, from 0.02 to 0.1, and the width is 0.1-0.012 - qr_width = 0.1-0.02 - qr_start = 0.02 + qr_width /2 - qr_end = 0.01 - qr_width /2 - qr_num = 1 - - Qr = [qr_start , qr_end, qr_width, qr_num] - Qz= [qz_start, qz_end, qz_width , qz_num ] - new_mask[ :, 1020:1045] =0 - ticks = show_qzr_map( qr,qz, inc_x0, data = avg_imgmr, Nzline=10, Nrline=10 ) - qx, qy, qr, qz = convert_gisaxs_pixel_to_q( inc_x0, inc_y0,refl_x0,refl_y0, lamda=lamda, Lsd=Lsd ) - - qr_1d = get_1d_qr( avg_imgr, Qr, Qz, qr, qz, inc_x0, new_mask, True, ticks, .8) - - - ''' - - - qr_start , qr_end, qr_width, qr_num =Qr - qz_start, qz_end, qz_width , qz_num =Qz - qr_edge, qr_center = get_qedge(qr_start , qr_end, qr_width, qr_num ) - qz_edge, qz_center = get_qedge( qz_start, qz_end, qz_width , qz_num ) - - print ('The qr_edge is: %s\nThe qr_center is: %s'%(qr_edge, qr_center)) - print ('The qz_edge is: %s\nThe qz_center is: %s'%(qz_edge, qz_center)) - label_array_qr = get_qmap_label( qr, qr_edge) - - if show_roi: - label_array_qz0 = get_qmap_label( qz , qz_edge) - label_array_qzr0,qzc0,qrc0 = get_qzrmap(label_array_qz0, label_array_qr,qz_center, qr_center ) - - if mask is not None:label_array_qzr0 *= mask - #data_ = data*label_array_qzr0 - show_qzr_roi( data,label_array_qzr0, inc_x0, ticks, alpha) - - fig, ax = plt.subplots() - qr_1d ={} - for i,qzc_ in enumerate(qz_center): - - #print (i,qzc_) - label_array_qz = get_qmap_label( qz, qz_edge[i*2:2*i+2]) - #print (qzc_, qz_edge[i*2:2*i+2]) - label_array_qzr,qzc,qrc = get_qzrmap(label_array_qz, label_array_qr,qz_center, qr_center ) - #print (np.unique(label_array_qzr )) - if mask is not None:label_array_qzr *= mask - roi_pixel_num = np.sum( label_array_qzr, axis=0) - qr_ = qr *label_array_qzr - data_ = data*label_array_qzr - qr_ave = np.sum( qr_, axis=0)/roi_pixel_num - data_ave = np.sum( data_, axis=0)/roi_pixel_num - qr_1d[i]= [qr_ave, data_ave] - ax.plot( qr_ave, data_ave, '--o', label= 'qz= %f'%qzc_) - - - #ax.set_xlabel( r'$q_r$', fontsize=15) - ax.set_xlabel('$q $'r'($\AA^{-1}$)', fontsize=18) - ax.set_ylabel('$Intensity (a.u.)$', fontsize=18) - ax.set_yscale('log') - #ax.set_xscale('log') - ax.set_xlim( qr.max(),qr.min() ) - ax.legend(loc='best') - return qr_1d - -def interp_zeros( data ): - from scipy.interpolate import interp1d - gf = data.ravel() - indice, = gf.nonzero() - start, stop = indice[0], indice[-1]+1 - dx,dy = data.shape - x=np.arange( dx*dy ) - f = interp1d(x[indice], gf[indice]) - gf[start:stop] = f(x[start:stop]) - return gf.reshape([dx,dy]) - -#GiSAXS End -############################### - - - - - - - -#import numpy as np - -#from . import utils as core -#from . import roi - - - -##################### -##old one-time -##### - -from lmfit import minimize, Model, Parameters - -logger = logging.getLogger(__name__) - - -def multi_tau_auto_corr(num_levels, num_bufs, labels, images): - ##comments, please add start_image, end_image, the default as None - - - from skxray.core import roi - from skxray.core import utils as core - - """ - This function computes one-time correlations. - It uses a scheme to achieve long-time correlations inexpensively - by downsampling the data, iteratively combining successive frames. - The longest lag time computed is num_levels * num_bufs. - Parameters - ---------- - num_levels : int - how many generations of downsampling to perform, i.e., - the depth of the binomial tree of averaged frames - num_bufs : int, must be even - maximum lag step to compute in each generation of - downsampling - labels : array - labeled array of the same shape as the image stack; - each ROI is represented by a distinct label (i.e., integer) - images : iterable of 2D arrays - dimensions are: (rr, cc) - Returns - ------- - g2 : array - matrix of normalized intensity-intensity autocorrelation - shape (num_levels, number of labels(ROI)) - lag_steps : array - delay or lag steps for the multiple tau analysis - shape num_levels - Notes - ----- - The normalized intensity-intensity time-autocorrelation function - is defined as - :math :: - g_2(q, t') = \frac{ }{^2} - ; t' > 0 - Here, I(q, t) refers to the scattering strength at the momentum - transfer vector q in reciprocal space at time t, and the brackets - <...> refer to averages over time t. The quantity t' denotes the - delay time - This implementation is based on code in the language Yorick - by Mark Sutton, based on published work. [1]_ - References - ---------- - .. [1] D. Lumma, L. B. Lurio, S. G. J. Mochrie and M. Sutton, - "Area detector based photon correlation in the regime of - short data batches: Data reduction for dynamic x-ray - scattering," Rev. Sci. Instrum., vol 70, p 3274-3289, 2000. - """ - # In order to calculate correlations for `num_bufs`, images must be - # kept for up to the maximum lag step. These are stored in the array - # buffer. This algorithm only keeps number of buffers and delays but - # several levels of delays number of levels are kept in buf. Each - # level has twice the delay times of the next lower one. To save - # needless copying, of cyclic storage of images in buf is used. - - if num_bufs % 2 != 0: - raise ValueError("number of channels(number of buffers) in " - "multiple-taus (must be even)") - - if hasattr(images, 'frame_shape'): - # Give a user-friendly error if we can detect the shape from pims. - if labels.shape != images.frame_shape: - raise ValueError("Shape of the image stack should be equal to" - " shape of the labels array") - - # get the pixels in each label - label_mask, pixel_list = roi.extract_label_indices(labels) - - num_rois = np.max(label_mask) - - # number of pixels per ROI - num_pixels = np.bincount(label_mask, minlength=(num_rois+1)) - num_pixels = num_pixels[1:] - - if np.any(num_pixels == 0): - raise ValueError("Number of pixels of the required roi's" - " cannot be zero, " - "num_pixels = {0}".format(num_pixels)) - - # G holds the un normalized auto-correlation result. We - # accumulate computations into G as the algorithm proceeds. - G = np.zeros(((num_levels + 1)*num_bufs/2, num_rois), - dtype=np.float64) - - # matrix of past intensity normalizations - past_intensity_norm = np.zeros(((num_levels + 1)*num_bufs/2, num_rois), - dtype=np.float64) - - # matrix of future intensity normalizations - future_intensity_norm = np.zeros(((num_levels + 1)*num_bufs/2, num_rois), - dtype=np.float64) - - # Ring buffer, a buffer with periodic boundary conditions. - # Images must be keep for up to maximum delay in buf. - buf = np.zeros((num_levels, num_bufs, np.sum(num_pixels)), - dtype=np.float64) - - # to track processing each level - track_level = np.zeros(num_levels) - - # to increment buffer - cur = np.ones(num_levels, dtype=np.int64) - - # to track how many images processed in each level - img_per_level = np.zeros(num_levels, dtype=np.int64) - - start_time = time.time() # used to log the computation time (optionally) - - for n, img in enumerate(images): - - cur[0] = (1 + cur[0]) % num_bufs # increment buffer - - # Put the image into the ring buffer. - buf[0, cur[0] - 1] = (np.ravel(img))[pixel_list] - - # Compute the correlations between the first level - # (undownsampled) frames. This modifies G, - # past_intensity_norm, future_intensity_norm, - # and img_per_level in place! - _process(buf, G, past_intensity_norm, - future_intensity_norm, label_mask, - num_bufs, num_pixels, img_per_level, - level=0, buf_no=cur[0] - 1) - - # check whether the number of levels is one, otherwise - # continue processing the next level - processing = num_levels > 1 - - # Compute the correlations for all higher levels. - level = 1 - while processing: - if not track_level[level]: - track_level[level] = 1 - processing = False - else: - prev = 1 + (cur[level - 1] - 2) % num_bufs - cur[level] = 1 + cur[level] % num_bufs - - buf[level, cur[level] - 1] = (buf[level - 1, prev - 1] + - buf[level - 1, - cur[level - 1] - 1])/2 - - # make the track_level zero once that level is processed - track_level[level] = 0 - - # call the _process function for each multi-tau level - # for multi-tau levels greater than one - # Again, this is modifying things in place. See comment - # on previous call above. - _process(buf, G, past_intensity_norm, - future_intensity_norm, label_mask, - num_bufs, num_pixels, img_per_level, - level=level, buf_no=cur[level]-1,) - level += 1 - - # Checking whether there is next level for processing - processing = level < num_levels - - # ending time for the process - end_time = time.time() - - logger.info("Processing time for {0} images took {1} seconds." - "".format(n, (end_time - start_time))) - - # the normalization factor - if len(np.where(past_intensity_norm == 0)[0]) != 0: - g_max = np.where(past_intensity_norm == 0)[0][0] - else: - g_max = past_intensity_norm.shape[0] - - # g2 is normalized G - g2 = (G[:g_max] / (past_intensity_norm[:g_max] * - future_intensity_norm[:g_max])) - - # Convert from num_levels, num_bufs to lag frames. - tot_channels, lag_steps = core.multi_tau_lags(num_levels, num_bufs) - lag_steps = lag_steps[:g_max] - - return g2, lag_steps - - -def _process(buf, G, past_intensity_norm, future_intensity_norm, - label_mask, num_bufs, num_pixels, img_per_level, level, buf_no): - """ - Internal helper function. This modifies inputs in place. - This helper function calculates G, past_intensity_norm and - future_intensity_norm at each level, symmetric normalization is used. - Parameters - ---------- - buf : array - image data array to use for correlation - G : array - matrix of auto-correlation function without - normalizations - past_intensity_norm : array - matrix of past intensity normalizations - future_intensity_norm : array - matrix of future intensity normalizations - label_mask : array - labels of the required region of interests(roi's) - num_bufs : int, even - number of buffers(channels) - num_pixels : array - number of pixels in certain roi's - roi's, dimensions are : [number of roi's]X1 - img_per_level : array - to track how many images processed in each level - level : int - the current multi-tau level - buf_no : int - the current buffer number - Notes - ----- - :math :: - G = - :math :: - past_intensity_norm = - :math :: - future_intensity_norm = - """ - img_per_level[level] += 1 - - # in multi-tau correlation other than first level all other levels - # have to do the half of the correlation - if level == 0: - i_min = 0 - else: - i_min = num_bufs//2 - - for i in range(i_min, min(img_per_level[level], num_bufs)): - t_index = level*num_bufs/2 + i - - delay_no = (buf_no - i) % num_bufs - - past_img = buf[level, delay_no] - future_img = buf[level, buf_no] - - # get the matrix of auto-correlation function without normalizations - tmp_binned = (np.bincount(label_mask, - weights=past_img*future_img)[1:]) - G[t_index] += ((tmp_binned / num_pixels - G[t_index]) / - (img_per_level[level] - i)) - - # get the matrix of past intensity normalizations - pi_binned = (np.bincount(label_mask, - weights=past_img)[1:]) - past_intensity_norm[t_index] += ((pi_binned/num_pixels - - past_intensity_norm[t_index]) / - (img_per_level[level] - i)) - - # get the matrix of future intensity normalizations - fi_binned = (np.bincount(label_mask, - weights=future_img)[1:]) - future_intensity_norm[t_index] += ((fi_binned/num_pixels - - future_intensity_norm[t_index]) / - (img_per_level[level] - i)) - - return None # modifies arguments in place! - - - - -