An example from Convex Optimization page 680
The original equation:
I❤️LA implementation:
[`P₁` 0
0 `P₃`][ L 0
`P₃`ᵀC`P₂`ᵀU⁻¹ -L̃][U L⁻¹`P₁`ᵀB
0 Ũ ][`P₂` 0
0 I_n]
where
`P₁` ∈ ℝ^(m×m)
`P₂` ∈ ℝ^(m×m)
`P₃` ∈ ℝ^(n×n)
B ∈ ℝ^(m×n)
C ∈ ℝ^(n×m)
L ∈ ℝ^(m×m)
L̃ ∈ ℝ^(n×n)
U ∈ ℝ^(m×m)
Ũ ∈ ℝ^(n×n)
I❤️LA compiled to C++/Eigen:
/*
[`P₁` 0
0 `P₃`][ L 0
`P₃`ᵀC`P₂`ᵀU⁻¹ -L̃][U L⁻¹`P₁`ᵀB
0 Ũ ][`P₂` 0
0 I_n]
where
`P₁` ∈ ℝ^(m×m)
`P₂` ∈ ℝ^(m×m)
`P₃` ∈ ℝ^(n×n)
B ∈ ℝ^(m×n)
C ∈ ℝ^(n×m)
L ∈ ℝ^(m×m)
L̃ ∈ ℝ^(n×n)
U ∈ ℝ^(m×m)
Ũ ∈ ℝ^(n×n)
*/
#include <Eigen/Core>
#include <Eigen/Dense>
#include <Eigen/Sparse>
#include <iostream>
#include <set>
struct convex_optimization_680ResultType {
Eigen::MatrixXd ret;
convex_optimization_680ResultType(const Eigen::MatrixXd & ret)
: ret(ret)
{}
};
convex_optimization_680ResultType convex_optimization_680(
const Eigen::MatrixXd & P₁,
const Eigen::MatrixXd & P₂,
const Eigen::MatrixXd & P₃,
const Eigen::MatrixXd & B,
const Eigen::MatrixXd & C,
const Eigen::MatrixXd & L,
const Eigen::MatrixXd & L̃,
const Eigen::MatrixXd & U,
const Eigen::MatrixXd & Ũ)
{
const long m = P₁.cols();
const long n = P₃.cols();
assert( P₁.rows() == m );
assert( P₂.rows() == m );
assert( P₂.cols() == m );
assert( P₃.rows() == n );
assert( B.rows() == m );
assert( B.cols() == n );
assert( C.rows() == n );
assert( C.cols() == m );
assert( L.rows() == m );
assert( L.cols() == m );
assert( L̃.rows() == n );
assert( L̃.cols() == n );
assert( U.rows() == m );
assert( U.cols() == m );
assert( Ũ.rows() == n );
assert( Ũ.cols() == n );
Eigen::MatrixXd ret_0(m+n, m+n);
ret_0 << P₁, Eigen::MatrixXd::Zero(m, n),
Eigen::MatrixXd::Zero(n, m), P₃;
Eigen::MatrixXd ret_1(m+n, m+n);
ret_1 << L, Eigen::MatrixXd::Zero(m, n),
P₃.transpose() * C * P₂.transpose() * U.inverse(), -L̃;
Eigen::MatrixXd ret_2(m+n, m+n);
ret_2 << U, L.colPivHouseholderQr().solve(P₁.transpose()) * B,
Eigen::MatrixXd::Zero(n, m), Ũ;
Eigen::MatrixXd ret_3(m+n, m+n);
ret_3 << P₂, Eigen::MatrixXd::Zero(m, n),
Eigen::MatrixXd::Zero(n, m), Eigen::MatrixXd::Identity(n, n);
Eigen::MatrixXd ret = ret_0 * ret_1 * ret_2 * ret_3;
return convex_optimization_680ResultType(ret);
}
void generateRandomData(Eigen::MatrixXd & P₁,
Eigen::MatrixXd & P₂,
Eigen::MatrixXd & P₃,
Eigen::MatrixXd & B,
Eigen::MatrixXd & C,
Eigen::MatrixXd & L,
Eigen::MatrixXd & L̃,
Eigen::MatrixXd & U,
Eigen::MatrixXd & Ũ)
{
const int m = rand()%10;
const int n = rand()%10;
P₁ = Eigen::MatrixXd::Random(m, m);
P₂ = Eigen::MatrixXd::Random(m, m);
P₃ = Eigen::MatrixXd::Random(n, n);
B = Eigen::MatrixXd::Random(m, n);
C = Eigen::MatrixXd::Random(n, m);
L = Eigen::MatrixXd::Random(m, m);
L̃ = Eigen::MatrixXd::Random(n, n);
U = Eigen::MatrixXd::Random(m, m);
Ũ = Eigen::MatrixXd::Random(n, n);
}
int main(int argc, char *argv[])
{
srand((int)time(NULL));
Eigen::MatrixXd P₁;
Eigen::MatrixXd P₂;
Eigen::MatrixXd P₃;
Eigen::MatrixXd B;
Eigen::MatrixXd C;
Eigen::MatrixXd L;
Eigen::MatrixXd L̃;
Eigen::MatrixXd U;
Eigen::MatrixXd Ũ;
generateRandomData(P₁, P₂, P₃, B, C, L, L̃, U, Ũ);
convex_optimization_680ResultType func_value = convex_optimization_680(P₁, P₂, P₃, B, C, L, L̃, U, Ũ);
std::cout<<"return value:\n"<<func_value.ret<<std::endl;
return 0;
}
I❤️LA compiled to Python/NumPy/SciPy:
"""
[`P₁` 0
0 `P₃`][ L 0
`P₃`ᵀC`P₂`ᵀU⁻¹ -L̃][U L⁻¹`P₁`ᵀB
0 Ũ ][`P₂` 0
0 I_n]
where
`P₁` ∈ ℝ^(m×m)
`P₂` ∈ ℝ^(m×m)
`P₃` ∈ ℝ^(n×n)
B ∈ ℝ^(m×n)
C ∈ ℝ^(n×m)
L ∈ ℝ^(m×m)
L̃ ∈ ℝ^(n×n)
U ∈ ℝ^(m×m)
Ũ ∈ ℝ^(n×n)
"""
import numpy as np
import scipy
import scipy.linalg
from scipy import sparse
from scipy.integrate import quad
from scipy.optimize import minimize
class convex_optimization_680ResultType:
def __init__( self, ret):
self.ret = ret
def convex_optimization_680(P1, P2, P3, B, C, L, L̃, U, Ũ):
P1 = np.asarray(P1, dtype=np.float64)
P2 = np.asarray(P2, dtype=np.float64)
P3 = np.asarray(P3, dtype=np.float64)
B = np.asarray(B, dtype=np.float64)
C = np.asarray(C, dtype=np.float64)
L = np.asarray(L, dtype=np.float64)
L̃ = np.asarray(L̃, dtype=np.float64)
U = np.asarray(U, dtype=np.float64)
Ũ = np.asarray(Ũ, dtype=np.float64)
m = P1.shape[1]
n = P3.shape[1]
assert P1.shape == (m, m)
assert P2.shape == (m, m)
assert P3.shape == (n, n)
assert B.shape == (m, n)
assert C.shape == (n, m)
assert L.shape == (m, m)
assert L̃.shape == (n, n)
assert U.shape == (m, m)
assert Ũ.shape == (n, n)
ret_0 = np.block([[P1, np.zeros((m, n))], [np.zeros((n, m)), P3]])
ret_1 = np.block([[L, np.zeros((m, n))], [P3.T @ C @ P2.T @ np.linalg.inv(U), -L̃]])
ret_2 = np.block([[U, np.linalg.solve(L, P1.T) @ B], [np.zeros((n, m)), Ũ]])
ret_3 = np.block([[P2, np.zeros((m, n))], [np.zeros((n, m)), np.identity(n)]])
ret = ret_0 @ ret_1 @ ret_2 @ ret_3
return convex_optimization_680ResultType(ret)
def generateRandomData():
m = np.random.randint(10)
n = np.random.randint(10)
P1 = np.random.randn(m, m)
P2 = np.random.randn(m, m)
P3 = np.random.randn(n, n)
B = np.random.randn(m, n)
C = np.random.randn(n, m)
L = np.random.randn(m, m)
L̃ = np.random.randn(n, n)
U = np.random.randn(m, m)
Ũ = np.random.randn(n, n)
return P1, P2, P3, B, C, L, L̃, U, Ũ
if __name__ == '__main__':
P1, P2, P3, B, C, L, L̃, U, Ũ = generateRandomData()
print("P1:", P1)
print("P2:", P2)
print("P3:", P3)
print("B:", B)
print("C:", C)
print("L:", L)
print("L̃:", L̃)
print("U:", U)
print("Ũ:", Ũ)
func_value = convex_optimization_680(P1, P2, P3, B, C, L, L̃, U, Ũ)
print("return value: ", func_value.ret)
I❤️LA compiled to MATLAB:
function output = convex_optimization_680(P1, P2, P3, B, C, L, L_tilde, U, U_tilde)
% output = convex_optimization_680(`P₁`, `P₂`, `P₃`, B, C, L, L̃, U, Ũ)
%
% [`P₁` 0
% 0 `P₃`][ L 0
% `P₃`ᵀC`P₂`ᵀU⁻¹ -L̃][U L⁻¹`P₁`ᵀB
% 0 Ũ ][`P₂` 0
% 0 I_n]
%
% where
%
% `P₁` ∈ ℝ^(m×m)
% `P₂` ∈ ℝ^(m×m)
% `P₃` ∈ ℝ^(n×n)
% B ∈ ℝ^(m×n)
% C ∈ ℝ^(n×m)
% L ∈ ℝ^(m×m)
% L̃ ∈ ℝ^(n×n)
% U ∈ ℝ^(m×m)
% Ũ ∈ ℝ^(n×n)
if nargin==0
warning('generating random input data');
[P1, P2, P3, B, C, L, L_tilde, U, U_tilde] = generateRandomData();
end
function [P1, P2, P3, B, C, L, L_tilde, U, U_tilde] = generateRandomData()
m = randi(10);
n = randi(10);
P1 = randn(m, m);
P2 = randn(m, m);
P3 = randn(n, n);
B = randn(m, n);
C = randn(n, m);
L = randn(m, m);
L_tilde = randn(n, n);
U = randn(m, m);
U_tilde = randn(n, n);
end
m = size(P1, 2);
n = size(P3, 2);
assert( isequal(size(P1), [m, m]) );
assert( isequal(size(P2), [m, m]) );
assert( isequal(size(P3), [n, n]) );
assert( isequal(size(B), [m, n]) );
assert( isequal(size(C), [n, m]) );
assert( isequal(size(L), [m, m]) );
assert( isequal(size(L_tilde), [n, n]) );
assert( isequal(size(U), [m, m]) );
assert( isequal(size(U_tilde), [n, n]) );
ret_0 = [[P1, zeros(m, n)]; [zeros(n, m), P3]];
ret_1 = [[L, zeros(m, n)]; [P3' * C * P2' * inv(U), -L_tilde]];
ret_2 = [[U, (L\P1') * B]; [zeros(n, m), U_tilde]];
ret_3 = [[P2, zeros(m, n)]; [zeros(n, m), speye(n)]];
ret = ret_0 * ret_1 * ret_2 * ret_3;
output.ret = ret;
end
I❤️LA compiled to LaTeX:
\documentclass[12pt]{article}
\usepackage{mathdots}
\usepackage[bb=boondox]{mathalfa}
\usepackage{mathtools}
\usepackage{amssymb}
\usepackage{libertine}
\DeclareMathOperator*{\argmax}{arg\,max}
\DeclareMathOperator*{\argmin}{arg\,min}
\usepackage[paperheight=8in,paperwidth=4in,margin=.3in,heightrounded]{geometry}
\let\originalleft\left
\let\originalright\right
\renewcommand{\left}{\mathopen{}\mathclose\bgroup\originalleft}
\renewcommand{\right}{\aftergroup\egroup\originalright}
\begin{document}
\begin{center}
\resizebox{\textwidth}{!}
{
\begin{minipage}[c]{\textwidth}
\begin{align*}
\omit \span \begin{bmatrix}
\textit{P₁} & 0\\
0 & \textit{P₃}\\
\end{bmatrix}\begin{bmatrix}
\mathit{L} & 0\\
{\textit{P₃}}^T\mathit{C}{\textit{P₂}}^T\mathit{U}^{-1} & -\textit{L̃}\\
\end{bmatrix}\begin{bmatrix}
\mathit{U} & \mathit{L}^{-1}{\textit{P₁}}^T\mathit{B}\\
0 & \textit{Ũ}\\
\end{bmatrix}\begin{bmatrix}
\textit{P₂} & 0\\
0 & I_{ \mathit{n} }\\
\end{bmatrix} \\
\intertext{where}
\textit{P₁} & \in \mathbb{R}^{ \mathit{m} \times \mathit{m} } \\
\textit{P₂} & \in \mathbb{R}^{ \mathit{m} \times \mathit{m} } \\
\textit{P₃} & \in \mathbb{R}^{ \mathit{n} \times \mathit{n} } \\
\mathit{B} & \in \mathbb{R}^{ \mathit{m} \times \mathit{n} } \\
\mathit{C} & \in \mathbb{R}^{ \mathit{n} \times \mathit{m} } \\
\mathit{L} & \in \mathbb{R}^{ \mathit{m} \times \mathit{m} } \\
\textit{L̃} & \in \mathbb{R}^{ \mathit{n} \times \mathit{n} } \\
\mathit{U} & \in \mathbb{R}^{ \mathit{m} \times \mathit{m} } \\
\textit{Ũ} & \in \mathbb{R}^{ \mathit{n} \times \mathit{n} } \\
\\
\end{align*}
\end{minipage}
}
\end{center}
\end{document}
I❤️LA LaTeX output: