Commit ddb128ec authored by Emil's avatar Emil
Browse files

Week 4 homework solution

parent 789c154f
# Instructions to launch the program
In order to launch the program, simply launch:
python3 ./main.py a_11 a_12 a_21 a_22 b_1 b_2
With the coefficients a_ij and b_i the terms of the matrix A and vector b in the quadratic form 1/2 x^T A x - x^T b, as advertise by the help:
python3 ./main.py -h
This will compute the minimum of the quadratic function using one optimizer.
In order to change the method you can add an additional argument:
python3 ./main.py a_11 a_12 a_21 a_22 b_1 b_2 -m cg
python3 ./main.py a_11 a_12 a_21 a_22 b_1 b_2 -m BFGS
In order to make a plot, use the following option:
python3 ./main.py a_11 a_12 a_21 a_22 b_1 b_2 -p 1
\ No newline at end of file
#!/usr/bin/env python3
import numpy as np
def solve(func, x0, jac=None, hess=None,
max_iter=50, tol=1e-8,
callback=None):
'Minimize function with home-made conjugate gradient'
if jac is None:
raise RuntimeError('this method needs a jacobian to be provided')
if hess(0)[1, 0] != hess(0)[0, 1]:
raise RuntimeError('this method requires a symmetric matrix A')
if hess is None:
raise RuntimeError('this method needs a hessian to be provided')
x_old = x0
r_old = -jac(x0)
p_old = r_old
# conjugate iteration loop
for i in range(0, max_iter):
# fetch the hessian
A = hess(x_old)
Ap_old = A@p_old
alpha = (r_old@r_old) / (p_old@Ap_old)
x_new = x_old + alpha * p_old
r_new = r_old - alpha * Ap_old
# call the callback over iterations
if callback:
callback(x_new)
# evaluate stop criterion
if np.linalg.norm(x_new-x_old) < tol:
return x_new
beta = (r_new@r_new) / (r_old@r_old)
p_old = r_new + beta * p_old
r_old = r_new
x_old = x_new
return False
#!/usr/bin/env python3
from optimizer import optimize
import numpy as np
import argparse
################################################################
# main function: parsing of program arguments
################################################################
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Optimization exercise in python')
parser.add_argument(
'coeffs', type=float,
nargs='+',
help=('Specify the coefficients of the quadratic function,'
' in the following order: a_11, a_12, a_21, a_22, b_1, b_2.'
' The coefficients are the terms of the matrix A'
' and the vector b in the quadratic form 1/2 x^T A x - x^T b.'))
parser.add_argument(
'-m', '--method', type=str,
help=('Specify the method to use for the optimization.'
' Can be cg/BFGS'), default='cg')
parser.add_argument(
'-p', '--plot', type=bool,
help=('Choose to plot or not the solution.'
' True/False'), default=False)
args = parser.parse_args()
A = np.array(args.coeffs[:4]).reshape(2, 2)
b = np.array(args.coeffs[4:])
# transform to dictionary
args = vars(args)
del args['coeffs']
# definitions of the quadratic form
def func(X, A, b):
"functor calculation of the quadratic form"
X = X.reshape(X.flatten().shape[0]//2, 2)
# computes the quadratic form with einsum
res = 0.5*np.einsum('ai,ij,aj->a', X, A, X)
res -= np.einsum('i,ai->a', b, X)
return res
optimize(func=lambda x: func(x, A, b),
jac=lambda x: A@x-b,
hess=lambda x: A,
**args)
#!/usr/bin/env python3
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
from conjugate_gradient import solve as cgSolve
from scipy.optimize import minimize as scipySolve
def plotFunction(func, iterations, title):
'Plotting function for plane and iterations'
x = np.linspace(-3, 3, 100)
y = np.linspace(-3, 3, 100)
x, y = np.meshgrid(x, y)
X = np.stack([x.flatten(), y.flatten()], axis=1)
z = func(X).reshape((100, 100))
xy = np.array(iterations)
zs = func(xy)
fig = plt.figure()
ax = Axes3D(fig)
ax.plot_surface(x, y, z, linewidth=0, antialiased=True,
cmap=plt.cm.viridis, alpha=0.2)
ax.contour(x, y, z, 10, colors="k", linestyles="solid")
ax.plot(xy[:, 0], xy[:, 1], zs, 'ro--')
ax.grid(False)
ax.view_init(elev=62., azim=143)
ax.set_title(title)
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.show()
################################################################
# generic optimization program
################################################################
def optimize(method='cg', plot=False, func=None, **kwargs):
if func is None:
raise RuntimeError('Need a function to optimize')
tol = 1e-6
x0 = np.array([2, 2])
if method == 'BFGS':
optimizer = scipySolve
if 'hess' in kwargs:
del kwargs['hess']
elif method == 'cg':
optimizer = cgSolve
else:
raise RuntimeError('unknown solve method: ' + str(method))
iterations = []
iterations.append(x0)
res = optimizer(
func, x0, tol=tol, callback=lambda Xi: iterations.append(Xi),
**kwargs)
print('Method: ', method, ' Solution :', res)
if plot is True:
plotFunction(func, iterations, method)
#!/usr/bin/python
import numpy as np
import math
import matplotlib.pyplot as plt
################################################################
def evalFunction(f, xmin, xmax, npoints):
x = np.linspace(xmin, xmax, npoints)
y = f(x)
return x, y
################################################################
def myPlot(x, y, fig, label=None,
linestyle='-', marker='', xlabel='', ylabel='', title='', axe=None):
x = np.array(x)
y = np.array(y)
if axe is None:
axe = fig.add_subplot(111)
axe.plot(x, y, linestyle=linestyle, marker=marker, label=label)
axe.set_title(title)
axe.set_xlabel(xlabel)
axe.set_ylabel(ylabel)
if label is not None:
axe.legend()
################################################################
def exo1():
x = [0., 2., 4., 6., 8., 10.]
y = [0., 0., 0., 1., 1., 1.]
myPlot(x, y, plt.figure())
myPlot(x, y, plt.figure(), marker='x', linestyle='--')
################################################################
def exo2():
x = np.array([0., 2., 4., 6., 8., 10.])
y = np.array([0., 0., 0., 1., 1., 1.])
fig = plt.figure()
axe = fig.add_subplot(111)
myPlot(x, y, fig, title='my super cool plot',
xlabel='my X', ylabel='my Y', label='step', axe=axe)
myPlot(x, -1.*y, fig, title='my super cool plot',
xlabel='my X', ylabel='my Y', label='$-y$', axe=axe)
fig.savefig('myfigure.pdf')
################################################################
def exo3():
xmin = 0.
xmax = 2.*math.pi
npoints = 100
x = np.linspace(xmin, xmax, npoints)
y = np.sin(x)
myPlot(x, y, plt.figure(), label='$sin(x)$')
################################################################
def exo4():
fdata = np.loadtxt('data.plot')
print(fdata.shape)
fig = plt.figure()
axe = fig.add_subplot(111)
myPlot(fdata[:, 0], fdata[:, 1], fig, label='analytic', axe=axe)
myPlot(fdata[:, 0], fdata[:, 2], fig, label='measure', axe=axe)
fig2 = plt.figure()
error_val = np.sqrt((fdata[:, 1]-fdata[:, 2])**2)
myPlot(fdata[:, 0], error_val, fig2, label='error')
fig3 = plt.figure()
axe = fig3.add_subplot(111)
axe.errorbar(fdata[:, 0], fdata[:, 1], error_val,
marker='D', linestyle='--', label='prediction')
################################################################
exo1()
exo2()
exo3()
exo4()
plt.show()
#!/usr/bin/python
import numpy as np
import random
a = 0.1
npoints = 100
data = np.zeros([3,npoints])
random.seed = 100
for i in range(0,npoints):
data[0,i] = float(i)
data[1,i] = float(i)*a
data[2,i] = float(i)*a + random.gauss(0., 1.)
f = open('data.plot', 'w')
for i in range(0,npoints):
line = "{0}\t{1}\t{2}\n".format(data[0,i],data[1,i],data[2,i])
f.write(line)
f.close()
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment