Here is an example of nonlinear fitting with data expected to be represented by a power law nonlinear_fit.py
# import modules needed for data analysis
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
=np.array([10.000, 22.857, 35.714, 48.571, 61.429, 74.286, 87.143, 100.000])
x=np.array([4.6e-01, 6.e-02, 2.1e-02, 1.e-02, 6.9e-03, 4.6e-03, 3.2e-03, 2.6e-03])
y
# First we plot our data with error bars
plt.figure() =.005, xerr=3, fmt='o', label='data')
plt.errorbar(x,y, yerr
# The data seems to be some sort of a power law, so we scale our axes logarithmically
'log')
plt.xscale('log')
plt.yscale(
# We will attempt to fit data with inverse square dependence
def model(x, a, b, c):
return a*np.power((x-b),-2)+c
## When you do nonlinear fitting, having a good guess is important.
# Otherwise fit my converge to unexpected parameters
=[13, 5, 0]; # [a, b, c] coefficients
guessParameters*guessParameters), 'b--', label='guess fit: a=%.4g, b=%.4g, c=%.4g' % tuple(guessParameters))
plt.plot(x, model(x, # plt.show() , we might delay or not the plot rendering
# We might repeat it several time to have a reasonable guess which is close to the data
## Finally we do the fitting
= curve_fit(model, x, y, guessParameters)
finalParameters, finalParametersErrors
# lets add the best fit line to the plot
*finalParameters), 'r-', label='best fit: a=%.4g, b=%.4g, c=%.4g' % tuple(finalParameters))
plt.plot(x, model(x,
plt.legend(); # time to see the final plot
plt.show()# Note that our very first axes setup commands: plt.xscale('log') and plt.yscale('log') are still in effect,
# since we are plotting to the same figure
## Output fit parameter values and their uncertainties
print("Model parameters:")
print("a =", finalParameters[0], " +/- ",np.sqrt(finalParametersErrors[0,0]))
print("b =", finalParameters[1], " +/- ",np.sqrt(finalParametersErrors[1,1]))
print("c =", finalParameters[2], " +/- ",np.sqrt(finalParametersErrors[2,2]))