UP | HOME

Exercise 27 solutions

Table of Contents


Python

e27.py

# start from ipython --pylab

from scipy.optimize import minimize

# part 1: 1D cost landscape

# define our cost function
def mycostfun(x):
        return (x * exp(-(x**2))) + ((x**2)/20.0)

# Map J

x = linspace(-10, 10, 201)
J = mycostfun(x)
plot(x, J, 'b-')
xlabel('parameter value x')
ylabel('cost function J')

# Optimize x

# an initial guess at the parameter value
x0 = -5.0

# optimize!
opt_result = minimize(mycostfun, x0, method="Nelder-Mead")
print opt_result

x_opt = opt_result["x"]
min_cost = opt_result["fun"]

print "initial guess was x = " + str(x0)
print "the optimal value of x is " + str(x_opt)
print "the minimum cost found is " + str(min_cost)


# part 2: 2D cost landscape

# define our cost function
def mycostfun(X):
        x,y = X[0],X[1]
        return (x * exp(-(x**2)-(y**2))) + (((x**2)+(y**2))/20.0)

# Map J

#xxx

# Optimize (x,y)

# an initial guess at the parameter values
X0 = [-5.0, -5.0]

# optimize!
opt_result = minimize(mycostfun, X0, method="Nelder-Mead")
print opt_result

X_opt = opt_result["x"]
min_cost = opt_result["fun"]

print "initial guess was (x,y) = " + str(X0)
print "the optimal value of (x,y) is " + str(X_opt)
print "the minimum cost found is " + str(min_cost)

MATLAB / Octave

e27.m

% part 1: 1D cost landscape

%% Map J

x = linspace(-10,10,201); % sample from -10 to +10 in 200 steps
J = mycostfun1d(x);       % compute cost function over all values
figure                    % visualize the cost landscape
plot(x,J,'b-','linewidth',2);
xlabel('X');
ylabel('J');

%% Optimize x

X0 = 4.0;                                % initial guess
[Xf,FVAL] = fminsearch('mycostfun1d', X0); % optimize!
hold on
plot([Xf,Xf], get(gca,'ylim'), 'color','r--', 'linewidth',2);


% part 2: 2D cost landscape

%% Map J

x = linspace(-3,3,51); % sample from -3 to +3 in 50 steps
y = linspace(-3,3,51);
XY = combvec(x,y);
J = mycostfun2d(XY');     % compute cost function over all values
[Y,X] = meshgrid(x,y);    % reshape into matrix form
Z = reshape(J,length(x),length(y));
figure                    % visualize the cost landscape
meshc(X,Y,Z);
shading flat
xlabel('X');
ylabel('Y');
zlabel('J');

%% Optimize (x,y)

X0 = [2.5, 2.5];                          % initial guess
[Xf,FVAL] = fminsearch('mycostfun2d', X0); % optimize!
hold on
z0 = get(gca,'zlim');
z0 = z0(1);
plot3([Xf(1),Xf(1)],[get(gca,'ylim')],[z0 z0],'color','r','linewidth',2);
plot3([get(gca,'xlim')],[Xf(2),Xf(2)],[z0 z0],'color','r','linewidth',2);

mycostfun1d.m

function J = mycostfun1d(X)
%
% note use of dot notation on .* and ./ operators
% this enables the computation to happen for
% vector values of x
%
J = (X .* exp(-X.*X)) + ((X.*X)./20);

mycostfun2d.m

function J = mycostfun2d(X)
%
% note use of dot notation on .* and ./ operators
% this enables the computation to happen for
% vector values of x
%
x = X(:,1);
y = X(:,2);
J = (x .* exp(-(x.*x)-(y.*y))) + (((x.*x)+(y.*y))./20);

R

e27.R

# part 1: 1D cost landscape

# define the cost function
mycostfun <- function(x) {
	J <- (x * exp(-(x**2))) + ((x**2)/20.0)
	J
}

# Map J

x <- seq(-10, 10, 0.1)
J <- mycostfun(x)
plot(x, J, "l", col="blue", lwd=2, xlab="parameter value x", ylab="cost function J")

# Optimize x

# an initial guess
x0 <- -5.0

# optimize!
optim_res <- optim(x0, mycostfun, method="Nelder-Mead")
print(optim_res)

x_opt <- optim_res$par
min_cost <- optim_res$value

cat(paste("initial guess was", x0, "\n"))
cat(paste("the optimal value of x is", x_opt, "\n"))
cat(paste("the minimum cost found is", min_cost, "\n"))

# note that R gives us a warning about using Nelder-Mead for
# one-dimensional optimization and suggests we use the "Brent"
# method instead. You could try this as an exercise... You will
# need to give optim() lower and upper bounds on the parameter, e.g.:

optim_res <- optim(x0, mycostfun, method="Brent", lower=-20.0, upper=20.0)
print(optim_res)


# part 2: 2D cost landscape

# Map J

xxx

# Optimize (x,y)

C

e27.c

// e27.c
// compile with: gcc -o e27 e27.c nmsimplex.c

#include <stdio.h>
#include "nmsimplex.h"
#include <math.h>

double mycostfun1d(double x[], void *extras) {
  return (x[0] * exp(-(x[0]*x[0]))) + ((x[0]*x[0])/20);
}

double mycostfun2d(double x[], void *extras) {
  return (x[0] * exp(-(x[0]*x[0])-(x[1]*x[1]))) + (((x[0]*x[0])+(x[1]*x[1]))/20);
}

int main(int argc, char *argv[]) {

  double x1d[] = {-5.0}; // our initial guess
  double f_min1d = simplex(mycostfun1d, x1d, 1, 1e-8, 1, NULL, NULL);
  printf("f_min=%f\n", f_min1d);
  printf("x_min=%f\n", x1d[0]);

  double x2d[] = {-5.0, -5.0}; // our initial guess
  double f_min2d = simplex(mycostfun2d, x2d, 2, 1e-8, 1, NULL, NULL);
  printf("f_min=%f\n", f_min2d);
  printf("x_min=(%f,%f)\n", x2d[0], x2d[1]);

  return 0;
}

Paul Gribble | fall 2014
This work is licensed under a Creative Commons Attribution 4.0 International License
Creative Commons License