Download the notebook here!

Use a closed-form gradient in maximize or minimizeΒΆ

[1]:
from estimagic import minimize
import pandas as pd
import numpy as np
[2]:
def sum_of_squares(params):
    return (params["value"] ** 2).sum()


def sum_of_squares_gradient(params):
    return params["value"].to_numpy() * 2

start_params = pd.DataFrame()
start_params["value"] = [1, 2.5, - 1]
[9]:
info, params = minimize(
    criterion=sum_of_squares,
    params=start_params,
    algorithm="scipy_L-BFGS-B",
    gradient=sum_of_squares_gradient
)
[10]:
info
[10]:
{'status': 'success',
 'message': b'CONVERGENCE: NORM_OF_PROJECTED_GRADIENT_<=_PGTOL',
 'x': [0.0, 0.0, 0.0],
 'success': True,
 'fitness': 0.0,
 'n_evaluations': 3,
 'jacobian': array([0., 0., 0.]),
 'hessian': None,
 'n_evaluations_jacobian': None,
 'n_evaluations_hessian': None,
 'n_iterations': 2,
 'max_constraints_violations': None,
 'hessian_inverse': <3x3 LbfgsInvHessProduct with dtype=float64>}