Skip to content

Commit

Permalink
getting lots of fixing done
Browse files Browse the repository at this point in the history
  • Loading branch information
Andrew McCluskey committed Aug 4, 2020
1 parent d3da0f2 commit 975f19e
Show file tree
Hide file tree
Showing 8 changed files with 410 additions and 331 deletions.
5 changes: 2 additions & 3 deletions docs/source/conf.py
Expand Up @@ -23,10 +23,9 @@
copyright = '2020, Andrew R. McCluskey'
author = 'Andrew R. McCluskey'

# The short X.Y version
version = '0.1'
version = '0.0.26'
# The full version, including alpha/beta/rc tags
release = '0.0.1'
release = '0.0.26'


# -- General configuration ---------------------------------------------------
Expand Down
74 changes: 62 additions & 12 deletions docs/source/liquid_surface.ipynb
Expand Up @@ -99,24 +99,74 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 51,
"metadata": {},
"outputs": [],
"source": [
"a = np.zeros(10)\n",
"a[3] = 1"
]
},
{
"cell_type": "code",
"execution_count": 56,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"True"
]
},
"execution_count": 56,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"(a > 0).any()"
]
},
{
"cell_type": "code",
"execution_count": 41,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAACpCAYAAADQg30VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8GearUAAAKZ0lEQVR4nO3dYazd9V3H8feHW+jcHEIHNg1thGmTpSauzhtgcQ9wZNoRIzNZCMRkjanWByyZiYkpMXH6TB8oukSJNRIwURhOFxpCROiW7JGD4hBbsOMyIbQp1G2Ai2YdLV8fnF/ryb3n2t577rmn/Z33K/nn/P+//++e8z3f9H76v797zj2pKiRJfbls2gVIktae4S5JHTLcJalDhrskdchwl6QOGe6S1KGJhXuSXUmOJllIsm9SjyNJWiqTeJ17kjngm8AngGPAM8BdVfXCmj+YJGmJSV253wgsVNW3quoHwMPA7RN6LEnSIpMK9+uA14aOj7UxSdI62DCtB06yF9gLMMfcz7yXK6dViiRdkr7Hm9+uqmtHnZtUuB8Htg0db21j51TVfmA/wJXZVDfl1gmVIkl9eqq+9Opy5ya1LPMMsD3JDUmuAO4EDkzosSRJi0zkyr2qTif5LPAEMAfcX1VHJvFYkqSlJrbmXlWPA49P6v4lScvzHaqS1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtShwx3SeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6tGGcL07yCvA94Axwuqrmk2wCvghcD7wC3FFVb45XpiRpJdbiyv3nqmpnVc23433AwaraDhxsx5KkdTSJZZnbgQfb/oPApybwGJKk/8e44V7APyV5NsneNra5qk60/deBzaO+MMneJIeSHHqHU2OWIUkaNtaaO/Cxqjqe5EeBJ5P8+/DJqqokNeoLq2o/sB/gymwaOUeStDpjXblX1fF2exL4MnAj8EaSLQDt9uS4RUqSVmbV4Z7kfUnef3Yf+HngMHAA2N2m7QYeHbdISdLKjLMssxn4cpKz9/O3VfWPSZ4BHkmyB3gVuGP8MiVJK7HqcK+qbwEfHjH+HeDWcYqSJI3Hd6hKUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDo375wekbl22cwc1F3LkZd79/venXY60Il65SyNkwwaO/tr7Ofrr7+Oya6+ZdjnSinnlLo1Qp0/zofveohLOvO6fR9Klx3CXlnHmyNFplyCtmssyktQhw11azmVzg026BLksIy0jlw++PeoH70L5eTK6tBju0jLqlB//qEuXyzKS1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHzhvuSe5PcjLJ4aGxTUmeTPJSu726jSfJF5IsJHk+yUcmWbwkabQLuXJ/ANi1aGwfcLCqtgMH2zHAJ4HtbdsL3Lc2ZUqSVuK84V5VXwO+u2j4duDBtv8g8Kmh8b+ugX8GrkqyZa2KlSRdmNWuuW+uqhNt/3Vgc9u/DnhtaN6xNiZJWkdj/0K1qgpY8ScZJNmb5FCSQ+/g382WpLW02nB/4+xyS7s9+/Hwx4FtQ/O2trElqmp/Vc1X1fzlbFxlGZKkUVYb7geA3W1/N/Do0Phn2qtmbgbeHlq+kSStk/N+zF6Sh4BbgGuSHAM+D/wB8EiSPcCrwB1t+uPAbcAC8D/Ar06gZknSeZw33KvqrmVO3TpibgF3j1uUJGk8vkNVkjpkuEtShwx3SeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjp03nBPcn+Sk0kOD439XpLjSZ5r221D5+5JspDkaJJfmFThkqTlXciV+wPArhHj91bVzrY9DpBkB3An8JPta/48ydxaFStJujDnDfeq+hrw3Qu8v9uBh6vqVFX9B7AA3DhGfZKkVRhnzf2zSZ5vyzZXt7HrgNeG5hxrY5KkdbTacL8P+HFgJ3AC+KOV3kGSvUkOJTn0DqdWWYYkaZRVhXtVvVFVZ6rqXeAv+b+ll+PAtqGpW9vYqPvYX1XzVTV/ORtXU4YkaRmrCvckW4YOfxk4+0qaA8CdSTYmuQHYDjw9XomSpJXacL4JSR4CbgGuSXIM+DxwS5KdQAGvAL8BUFVHkjwCvACcBu6uqjOTKV2StJxU1bRr4Mpsqpty67TLkKRLylP1pWeran7UOd+hKkkdMtwlqUOGuyR1yHCXpA4Z7pLUIcNdkjpkuEtShwx3SeqQ4S5JHTLcJalDhrskdchwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ4a7JHXIcJekDhnuktQhw12SOmS4S1KHDHdJ6pDhLkkdMtwlqUOpqmnXQJL/BP4b+Pa0a7mIXIP9WMyeLGVPlpqlnvxYVV076sRFEe4ASQ5V1fy067hY2I+l7MlS9mQpezLgsowkdchwl6QOXUzhvn/aBVxk7MdS9mQpe7KUPeEiWnOXJK2di+nKXZK0RqYe7kl2JTmaZCHJvmnXs16S3J/kZJLDQ2ObkjyZ5KV2e3UbT5IvtB49n+Qj06t8MpJsS/LVJC8kOZLkc218lnvyniRPJ/nX1pPfb+M3JPl6e+5fTHJFG9/Yjhfa+eunWf8kJZlL8o0kj7Xjme/JYlMN9yRzwJ8BnwR2AHcl2THNmtbRA8CuRWP7gINVtR042I5h0J/tbdsL3LdONa6n08BvVdUO4Gbg7vZvYZZ7cgr4eFV9GNgJ7EpyM/CHwL1V9RPAm8CeNn8P8GYbv7fN69XngBeHju3JYlU1tQ34KPDE0PE9wD3TrGmdn//1wOGh46PAlra/BTja9v8CuGvUvF434FHgE/bk3PN7L/AvwE0M3qCzoY2f+x4CngA+2vY3tHmZdu0T6MVWBv/Rfxx4DMis92TUNu1lmeuA14aOj7WxWbW5qk60/deBzW1/pvrUfnT+aeDrzHhP2vLDc8BJ4EngZeCtqjrdpgw/73M9aeffBj6wvhWviz8Bfht4tx1/AHuyxLTDXcuowaXGzL2UKckPA38P/GZV/dfwuVnsSVWdqaqdDK5WbwQ+NOWSpirJLwInq+rZaddysZt2uB8Htg0db21js+qNJFsA2u3JNj4TfUpyOYNg/5uq+oc2PNM9Oauq3gK+ymDJ4aokG9qp4ed9rift/I8A31nnUiftZ4FfSvIK8DCDpZk/ZbZ7MtK0w/0ZYHv7TfcVwJ3AgSnXNE0HgN1tfzeDdeez459prxC5GXh7aKmiC0kC/BXwYlX98dCpWe7JtUmuavs/xOB3EC8yCPlPt2mLe3K2V58GvtJ+2ulGVd1TVVur6noGefGVqvoVZrgny5r2oj9wG/BNBmuJvzPtetbxeT8EnADeYbBGuIfBWuBB4CXgKWBTmxsGryp6Gfg3YH7a9U+gHx9jsOTyPPBc226b8Z78FPCN1pPDwO+28Q8CTwMLwN8BG9v4e9rxQjv/wWk/hwn35xbgMXsyevMdqpLUoWkvy0iSJsBwl6QOGe6S1CHDXZI6ZLhLUocMd0nqkOEuSR0y3CWpQ/8LC5k1I5oGS1sAAAAASUVORK5CYII=\n",
"text/plain": [
"<Figure size 432x288 with 1 Axes>"
]
},
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"The file: refl_1.dat contains 61 images from q = 0.01 to q = 0.04.\n",
"The file: refl_2.dat contains 15 images from q = 0.03 to q = 0.1.\n",
"The file: refl_3.dat contains 14 images from q = 0.07 to q = 0.2.\n",
"The file: refl_4.dat contains 19 images from q = 0.16 to q = 0.34.\n",
"The file: refl_5.dat contains 16 images from q = 0.3 to q = 0.6.\n"
"[[0. 0. 0. ... 0. 0. 0.]\n",
" [0. 0. 0. ... 0. 0. 0.]\n",
" [0. 0. 0. ... 0. 0. 0.]\n",
" ...\n",
" [0. 0. 0. ... 0. 0. 0.]\n",
" [0. 0. 0. ... 0. 0. 0.]\n",
" [0. 0. 0. ... 0. 0. 0.]]\n",
"94965\n"
]
}
],
"source": [
"for i in refl.scans:\n",
" print(i)"
" im = Image(i.data[\"file\"][0], i.data, i.metadata)\n",
" plt.imshow(im.n)\n",
" plt.show()\n",
" print(im.n)\n",
" print(im.n.size)\n",
" break"
]
},
{
Expand Down Expand Up @@ -344,11 +394,11 @@
"name": "stderr",
"output_type": "stream",
"text": [
"100%|██████████| 61/61 [00:44<00:00, 1.36it/s]\n",
"100%|██████████| 15/15 [00:11<00:00, 1.36it/s]\n",
"100%|██████████| 14/14 [00:12<00:00, 1.11it/s]\n",
"100%|██████████| 19/19 [00:15<00:00, 1.21it/s]\n",
"100%|██████████| 16/16 [00:11<00:00, 1.45it/s]\n"
"100%|██████████| 61/61 [00:44<00:00, 1.39it/s]\n",
"100%|██████████| 15/15 [00:10<00:00, 1.49it/s]\n",
"100%|██████████| 14/14 [00:09<00:00, 1.51it/s]\n",
"100%|██████████| 19/19 [00:12<00:00, 1.54it/s]\n",
"100%|██████████| 16/16 [00:10<00:00, 1.51it/s]\n"
]
}
],
Expand Down
106 changes: 48 additions & 58 deletions islatu/background.py
@@ -1,7 +1,8 @@
"""
Background substraction is a necssary component of reflectometry reduction, where the background scattering is removed from the reflected intensity.
Background substraction is a necessary component of reflectometry reduction,
where the background scattering is removed from the reflected intensity.
Herein are some function to enable that for a two-dimensional detector image.
Herein are some functions to enable that for a two-dimensional detector image.
"""

# Copyright (c) Andrew R. McCluskey
Expand All @@ -19,19 +20,23 @@ def bivariate_normal(data, mu_1, mu_2, sigma_1, sigma_2, offset, factor):
"""
Produce a bivariate normal distribution.
*Note*: the covariance of the two dimensions is assumed to be zero to unsure greater stability.
*Note*: the covariance of the two dimensions is assumed to be zero to
unsure greater stability.
Args:
data (:py:attr:`array_like`): Two-dimensional abscissa data.
mu_1 (:py:attr:`float`): Mean in dimension 0 (horizontal).
mu_2 (:py:attr:`float`): Mean in dimension 1 (vertical).
sigma_1 (:py:attr:`float`): Variance in dimension 0 (horizontal).
sigma_2 (:py:attr:`float`): Variance in dimension 1 (vertical).
offset (:py:attr:`float`): Offset from the 0 for the ordinate, this is the background level.
factor (:py:attr:`float`): Multiplicative factor for area of normal distribution.
offset (:py:attr:`float`): Offset from the 0 for the ordinate, this is
the background level.
factor (:py:attr:`float`): Multiplicative factor for area of normal
distribution.
Returns:
:py:attr:`array_like`: Flattened ordinate data for bivariate normal distribution.
:py:attr:`array_like`: Flattened ordinate data for bivariate normal
distribution.
"""
# Setting the data up in the correct format
pos = np.empty(data[0].shape + (2,))
Expand All @@ -51,42 +56,33 @@ def fit_gaussian_2d(image, image_e, p0=None, bounds=None):
Args:
image (:py:attr:`array_like`): The data to fit the Gaussian to.
image_e (:py:attr:`array_like`): The data uncertainty.
p0 (:py:attr:`list`, optional): An initial guess at the parameters. Defaults to values based on the image.
bounds (:py:attr:`list` of :py:attr:`tuple`, optional): Bounds for the fitting. Defaults to values based on the image.
p0 (:py:attr:`list`, optional): An initial guess at the parameters.
Defaults to values based on the image.
bounds (:py:attr:`list` of :py:attr:`tuple`, optional): Bounds for the
fitting. Defaults to values based on the image.
Returns:
:py:attr:`tuple`: Containing:
- :py:attr:`array_like`: The results (with uncertainties) for each of the 6 parameters fit.
- :py:attr:`array_like`: The results (with uncertainties) for each
of the 6 parameters fit.
- :py:attr:`int`: The index of the offset.
- :py:attr:`int`: The index of the vertical distribution width.
"""
# Setting default values
if p0 is None:
p0 = [
image.shape[0] / 2,
image.shape[1] / 2,
1,
1,
image.min(),
image.max(),
]
p0 = [image.shape[0] / 2, image.shape[1] / 2,
1, 1, image.min(), image.max()]
if bounds is None:
bounds = (
0,
[image.shape[0], image.shape[1], 100, 100, image.max(), image.max() * 10,],
)
abscissa = np.array(np.mgrid[0 : image.shape[0] : 1, 0 : image.shape[1] : 1])
0, [image.shape[0], image.shape[1], 100, 100,
image.max(), image.max() * 10])
abscissa = np.array(np.mgrid[0:image.shape[0]:1, 0:image.shape[1]:1])
# Perform the fitting
popt, pcov = curve_fit(
bivariate_normal,
abscissa,
image.flatten(),
bounds=bounds,
sigma=image_e.flatten(),
p0=p0,
maxfev=2000 * (len(p0) + 1),
)
# Determine uncertainty from covarience matrix
bivariate_normal, abscissa, image.flatten(),
bounds=bounds, sigma=image_e.flatten(), p0=p0,
maxfev=2000 * (len(p0) + 1))
# Determine uncertainty from covariance matrix
p_sigma = np.sqrt(np.diag(pcov))
return unp.uarray(popt, p_sigma), 4, 2

Expand All @@ -99,11 +95,14 @@ def univariate_normal(data, mu, sigma, offset, factor):
data (:pyttr:`array_like`): Abscissa data.
mu (:py:attr:`float`): Mean (horizontal).
sigma (:py:attr:`float`): Variance (horizontal).
offset (:py:attr:`float`): Offset from the 0 for the ordinate, this is the background level.
factor (:py:attr:`float`): Multiplicative factor for area of normal distribution.
offset (:py:attr:`float`): Offset from the 0 for the ordinate, this is
the background level.
factor (:py:attr:`float`): Multiplicative factor for area of normal
distribution.
Returns:
:py:attr:`array_like`: Ordinate data for uniivariate normal distribution.
:py:attr:`array_like`: Ordinate data for univariate normal
distribution.
"""
# Creation of the bivariate normal distribution
normal = norm(loc=mu, scale=sigma)
Expand All @@ -112,48 +111,39 @@ def univariate_normal(data, mu, sigma, offset, factor):

def fit_gaussian_1d(image, image_e, p0=None, bounds=None, axis=0):
"""
Fit a one-dimensional Gaussian function with some ordinate offset to an image with uncertainty.
This is achieved by averaging in a given ``axis`` before performing the fit.
Return the results, and index of the offset.
Fit a one-dimensional Gaussian function with some ordinate offset to an
image with uncertainty. This is achieved by averaging in a given ``axis``
before performing the fit. Return the results, and index of the offset.
Args:
image (:py:attr:`array_like`): The data to fit the Gaussian to.
image_e (:py:attr:`array_like`): The data uncertainty.
p0 (:py:attr:`list`, optional): An initial guess at the parameters. Defaults to values based on the image.
bounds (:py:attr:`list` of :py:attr:`tuple`, optional): Bounds for the fitting. Defaults to values based on the image.
p0 (:py:attr:`list`, optional): An initial guess at the parameters.
Defaults to values based on the image.
bounds (:py:attr:`list` of :py:attr:`tuple`, optional): Bounds for the
fitting. Defaults to values based on the image.
axis (:py:attr:`int`): The dimension in which to perform the averaging.
Returns:
:py:attr:`tuple`: Containing:
- :py:attr:`array_like`: The results (with uncertainties) for each of the 6 parameters fit.
- :py:attr:`array_like`: The results (with uncertainties) for each
of the 6 parameters fit.
- :py:attr:`int`: The index of the offset.
- :py:attr:`None`: As it is not possible to describe the reflected peak width.
- :py:attr:`None`: As it is not possible to describe the reflected
peak width.
"""
ordinate = image.mean(axis=axis)
ordinate_e = image_e.mean(axis=axis)
# Setting default values
if p0 is None:
p0 = [
ordinate.shape[0] / 2,
1,
image.min(),
image.max(),
]
p0 = [ordinate.shape[0] / 2, 1, image.min(), image.max()]
if bounds is None:
bounds = (
0,
[ordinate.shape[0], 100, image.max(), image.max() * 10,],
)
bounds = (0, [ordinate.shape[0], 100, image.max(), image.max() * 10])
# Perform the fitting
popt, pcov = curve_fit(
univariate_normal,
np.arange(0, ordinate.shape[0], 1),
ordinate,
bounds=bounds,
sigma=ordinate_e.flatten(),
p0=p0,
maxfev=2000 * (len(p0) + 1),
)
np.arange(0, ordinate.shape[0], 1), ordinate, bounds=bounds,
sigma=ordinate_e.flatten(), p0=p0, maxfev=2000 * (len(p0) + 1))
# Determine uncertainty from covarience matrix
p_sigma = np.sqrt(np.diag(pcov))
return unp.uarray(popt, p_sigma), 2, None
return unp.uarray(popt, p_sigma), 2, None
34 changes: 19 additions & 15 deletions islatu/corrections.py
@@ -1,6 +1,7 @@
"""
Reflectometry data must be corrected as a part of reduction.
These functions facilitate this, including the footprint and DCD q-variance corrections.
Reflectometry data must be corrected as a part of reduction.
These functions facilitate this, including the footprint and
DCD q-variance corrections.
"""

# Copyright (c) Andrew R. McCluskey
Expand All @@ -9,9 +10,8 @@

import numpy as np
from scipy.stats import norm
from uncertainties import unumpy as unp
from islatu.io import i07_dat_parser
from scipy.interpolate import splrep
from uncertainties import unumpy as unp


def footprint_correction(beam_width, sample_size, theta):
Expand All @@ -21,7 +21,8 @@ def footprint_correction(beam_width, sample_size, theta):
Args:
beam_width (:py:attr:`float`): Width of incident beam, in metres.
sample_size (:py:class:`uncertainties.core.Variable`): Width of sample in the dimension of the beam, in metres.
sample_size (:py:class:`uncertainties.core.Variable`): Width of sample
in the dimension of the beam, in metres.
theta (:py:attr:`float`): Incident angle, in degrees.
Returns:
Expand All @@ -33,22 +34,25 @@ def footprint_correction(beam_width, sample_size, theta):
upper = (unp.nominal_values(length) + unp.std_devs(length)) / 2.0 / beam_sd
lower = (unp.nominal_values(length) - unp.std_devs(length)) / 2.0 / beam_sd
probability = 2.0 * (
unp.uarray(norm.cdf(mid), (norm.cdf(upper) - norm.cdf(lower)) / 2) - 0.5
)
unp.uarray(
norm.cdf(mid), (norm.cdf(upper) - norm.cdf(lower)) / 2) - 0.5)
return probability


def get_interpolator(
file_path, parser, q_axis_name="qdcd_", intensity_axis_name="adc2"
):
file_path, parser, q_axis_name="qdcd_", intensity_axis_name="adc2"):
"""
Get an interpolator object from scipy, this is useful for the DCD q-normalisation step.
Get an interpolator object from scipy, this is useful for the DCD
q-normalisation step.
Args:
file_path (:py:attr:`str`): File path to the normalisation file.
parser (:py:attr:`callable`): Parser function for the normalisation file.
q_axis_name (:py:attr:`str`, optional): Label for the q-value in the normalisation file. Defaults to ``'qdcd_'``.
intensity_axis_name (:py:attr:`str`, optional): Label for the intensity in the normalisation file. Defaults to ``'adc2'``.
parser (:py:attr:`callable`): Parser function for the normalisation
file.
q_axis_name (:py:attr:`str`, optional): Label for the q-value in the
normalisation file. Defaults to ``'qdcd_'``.
intensity_axis_name (:py:attr:`str`, optional): Label for the
intensity in the normalisation file. Defaults to ``'adc2'``.
Returns:
:py:attr:`tuple`: Containing:
Expand All @@ -58,5 +62,5 @@ def get_interpolator(
"""
normalisation_data = parser(file_path)[1]
return splrep(
normalisation_data[q_axis_name], normalisation_data[intensity_axis_name]
)
normalisation_data[q_axis_name],
normalisation_data[intensity_axis_name])

0 comments on commit 975f19e

Please sign in to comment.