# run in ipython --pylab # e.g., execfile('multipleLinearRegression.py') from numpy.linalg import lstsq, inv # for least squares fitting # Define a generalized least squares fitting code def LinearModelFit(x, y, u): from numpy.linalg import lstsq, inv from numpy import array, diag ''' x = list of x values [x0, x1, x2, ...] y = dependent variable u = uncertainties on y ''' W = diag(1/u) Xw = dot(W, x) Yw = dot(y, W) A = lstsq(Xw, Yw)[0] model = dot(x, A) n, K = shape(x) SSE = sum((y - model)**2/u**2) MSE = SSE / float(n - K - 1) covariance = MSE * inv(dot(Xw.T, Xw)) print '---------------------------------------' print 'Results from Multiple Linear Regression' print '---------------------------------------' for i in range(K): print 'parameter ', i, ' = ', A[i], '+/-', sqrt(covariance[i,i]) return A, model, covariance ''' Load the data ''' x, y, u = loadtxt('mlrData.txt', unpack=True) ''' The expected model is y = a0 * exp(-x/5.) + a1 * exp(-x/2.) + a2 where a1, a2, and a3 are the parameters we want to fit. ''' x1 = exp(-x/5.) x2 = exp(-x/2.) x3 = x1 * 0.0 + 1. ''' Now the model is linearized to y = a1 * x1 + a2 * x2 + a3 * x3 ''' A, model, covariance = LinearModelFit(zip(x1, x2, x3), y, u) figure(0) clf() errorbar(x, y, u, fmt='ko') plot(x, model, 'r-') title('Multiple Linear Regression') ''' Now try a non-linear fit ''' def eModel(x, a, b, c): model = a * exp(-x/5.) + b * exp(-x/2.) + c return model from scipy.optimize import curve_fit popt, pcov = curve_fit(eModel, x, y, sigma = u) print '---------------------------------------' print 'Results from Non-Linear Regression ' print '---------------------------------------' for i in range(len(popt)): print 'parameter ', i, ' = ', popt[i], '+/-', sqrt(pcov[i,i])