\documentclass[11pt]{article}
\usepackage{epsfig, bail,amsfonts,amsmath}
\begin{document}
%
% Title goes here
%
\title{BAIL 2002 Sample Paper:
A Unified Gradient Flow Approach to Constrained Nonlinear
Optimization Problems\thanks{This is a sample only}}
%
% Authors
%
\author{S. Wang}
%
% Addresses
%
\address{Department of Mathematics and Statistics\\
University of Western Australia \\
35 Stirling Hwy, Crawley 6009, Australia\\
{\tt swang@maths.uwa.edu.au}}
%
\maketitle
%
% No Abstract. Start with your introduction
%
\section{Introduction}
Constrained
nonlinear programming problems appear in almost every subject in engineering,
science, economics, finance and management. Many efficient methods have
been developed for solving problems with nonlinear
inequality constraints.
However, most of these methods
fail to be effective when they are
applied to problems with equality constraints
without the use of other techniques such as penalty function approaches.
Recently, gradient flow type methods have been successfully
used for solving this kind of problems by various authors
(cf., for example, \cite{evt94a, evt94b}). In this approach an
optimization problem is formulated as an ordinary differential equation
(ODE)
so that the solution of this ODE converges to a local minimum of the
original problem as $t\rightarrow \infty$.
\section{Problems with equality constraints}
Consider the following constrained nonlinear optimization problem
\begin{eqnarray}
\text{minimize} && f(x) \label{p11} \\
\text{subject to}&& g (x) = 0, \label{p12}
\end{eqnarray}
where $x = (x_1,x_2,...,x_n)^\top\in \mathbb{R}^n$,
$f : \mathbb{R}^n \mapsto \mathbb{R}$
and $g=(g_1,g_2,...,g_m)^\top : \mathbb{R}^n \mapsto \mathbb{R}^m$.
Here $m$ and $n$ are positive integers with $m \leq n$.
The functions $f$ and $g$ are assumed to be
continuously differentiable.
We define the Lagrangian of the problem by
\begin{equation}\label{lagrange}
L(x,u) = f(x) + u^\top g(x) \end{equation}
where $u = (u_1,u_2,...,u_m)^\top \in \mathbb{R}^m $ is the
Lagrangian multiplier.
\section{Numerical experiments}
To demonstrate the usefulness and efficiency
of the developed method, numerical experiments
were performed.
In all the examples solved below,
the Jacobian and Hessians
were computed symbolically using the MATLAB
symbolic tool box. The numerical results are depicted in Figure \ref{fig1}.
\begin{figure}
\[
\psfig{file=figure,height=7.0cm,width=10.0cm}
\]
\caption{Computed errors.}\label{fig1}
\end{figure}
%
\begin{thebibliography}{19}
\bibitem{evt94a} Y.G. Evtushenko and V.G. Zhadan,
``Stable barrier-projection and barrier Newton methods in nonlinear
programming'',
{\em Optimization Methods and Software}, {\textbf 3}, 237--256 (1994).
\bibitem{evt94b} Y.G. Evtushenko and V.G. Zhadan,
``Stable barrier-projection and barrier Newton methods in linear
programming'',
{\em Computational Optimization and Applications}, {\textbf 3},
289--303 (1994).
\end{thebibliography}
\end{document}