Skip to content

Optimization (API)

Optimizer Base

pyadm1ode_calibration.calibration.optimization.optimizer.Optimizer(bounds, max_iterations=100, tolerance=1e-06, verbose=True)

Bases: ABC

Abstract base class for optimization algorithms.

All optimizers must implement the optimize() method and provide a consistent interface for parameter calibration.

Parameters:

Name Type Description Default
bounds Dict[str, Tuple[float, float]]

Parameter bounds as {name: (min, max)}.

required
max_iterations int

Maximum number of iterations. Defaults to 100.

100
tolerance float

Convergence tolerance. Defaults to 1e-6.

1e-06
verbose bool

Whether to enable progress output. Defaults to True.

True
Source code in pyadm1ode_calibration/calibration/optimization/optimizer.py
def __init__(
    self,
    bounds: Dict[str, Tuple[float, float]],
    max_iterations: int = 100,
    tolerance: float = 1e-6,
    verbose: bool = True,
):
    self.bounds = bounds
    self.max_iterations = max_iterations
    self.tolerance = tolerance
    self.verbose = verbose

    self.parameter_names = list(bounds.keys())
    self.bounds_array = np.array([bounds[name] for name in self.parameter_names])

    # History tracking
    self.history: List[Dict[str, Any]] = []
    self._best_value = float("inf")
    self._n_evaluations = 0

Functions

optimize(objective_func, initial_guess=None) abstractmethod

Run the optimization process.

Parameters:

Name Type Description Default
objective_func Callable

Function to minimize f(x) -> float.

required
initial_guess Optional[ndarray]

Optional starting point for the optimizer.

None

Returns:

Name Type Description
OptimizationResult OptimizationResult

The result of the optimization.

Source code in pyadm1ode_calibration/calibration/optimization/optimizer.py
@abstractmethod
def optimize(
    self, objective_func: Callable[[np.ndarray], float], initial_guess: Optional[np.ndarray] = None
) -> OptimizationResult:
    """
    Run the optimization process.

    Args:
        objective_func (Callable): Function to minimize f(x) -> float.
        initial_guess (Optional[np.ndarray]): Optional starting point for the optimizer.

    Returns:
        OptimizationResult: The result of the optimization.
    """
    pass

Objective Functions

pyadm1ode_calibration.calibration.optimization.objective.MultiObjectiveFunction(simulator, measurements_dict, objectives, weights, parameter_names, error_metric='rmse', normalize=True)

Bases: ObjectiveFunction

Multi-objective function with weighted combination.

Combines errors from multiple plant outputs into a single scalar value.

Parameters:

Name Type Description Default
simulator Callable

Function that takes parameters and returns outputs.

required
measurements_dict Dict[str, ndarray]

Measured values for each objective.

required
objectives List[str]

Names of variables to include in the objective.

required
weights Dict[str, float]

Relative weights for each objective.

required
parameter_names List[str]

Names of optimized parameters.

required
error_metric str

Metric to minimize (e.g., 'rmse', 'mae'). Defaults to 'rmse'.

'rmse'
normalize bool

Whether to normalize errors by measurement mean. Defaults to True.

True
Source code in pyadm1ode_calibration/calibration/optimization/objective.py
def __init__(
    self,
    simulator: Callable[[Dict[str, float]], Dict[str, np.ndarray]],
    measurements_dict: Dict[str, np.ndarray],
    objectives: List[str],
    weights: Dict[str, float],
    parameter_names: List[str],
    error_metric: str = "rmse",
    normalize: bool = True,
):
    super().__init__(parameter_names)
    self.simulator = simulator
    self.measurements_dict = measurements_dict
    self.objectives = objectives
    self.weights = weights
    self.error_metric = error_metric.lower()
    self.normalize = normalize

    total_weight = sum(weights.values())
    if total_weight > 0:
        self.weights = {k: v / total_weight for k, v in weights.items()}

Functions

__call__(x)

Evaluate the multi-objective weighted sum.

Parameters:

Name Type Description Default
x ndarray

Parameter array.

required

Returns:

Name Type Description
float float

Total weighted error.

Source code in pyadm1ode_calibration/calibration/optimization/objective.py
def __call__(self, x: np.ndarray) -> float:
    """
    Evaluate the multi-objective weighted sum.

    Args:
        x (np.ndarray): Parameter array.

    Returns:
        float: Total weighted error.
    """
    params = self._params_to_dict(x)
    try:
        outputs = self.simulator(params)
        total_error = 0.0
        n_valid = 0

        for obj_name in self.objectives:
            if obj_name not in outputs or obj_name not in self.measurements_dict:
                continue

            simulated = outputs[obj_name]
            measured = self.measurements_dict[obj_name]
            metrics = ErrorMetrics.compute(measured, simulated)

            error_map = {
                "mse": metrics.mse,
                "rmse": metrics.rmse,
                "mae": metrics.mae,
                "mape": metrics.mape,
                "nse": -metrics.nse,
                "r2": -metrics.r2,
            }
            error = error_map.get(self.error_metric, metrics.rmse)

            if self.normalize:
                mean_measured = np.mean(np.abs(measured))
                if mean_measured > 1e-10:
                    error /= mean_measured

            total_error += self.weights.get(obj_name, 0.0) * error
            n_valid += 1

        return total_error if n_valid > 0 else 1e10
    except Exception:
        return 1e10

pyadm1ode_calibration.calibration.optimization.objective.WeightedSumObjective(simulator, measurements_dict, objectives, parameter_names, weights=None, **kwargs)

Bases: MultiObjectiveFunction

Convenience class for MultiObjectiveFunction with equal weights by default.

Parameters:

Name Type Description Default
simulator Callable

Function that takes parameters and returns outputs.

required
measurements_dict Dict[str, ndarray]

Measured values for each objective.

required
objectives List[str]

Names of variables to include in the objective.

required
parameter_names List[str]

Names of optimized parameters.

required
weights Optional[Dict[str, float]]

Optional custom weights.

None
**kwargs Any

Passed to MultiObjectiveFunction.

{}
Source code in pyadm1ode_calibration/calibration/optimization/objective.py
def __init__(
    self,
    simulator: Callable[[Dict[str, float]], Dict[str, np.ndarray]],
    measurements_dict: Dict[str, np.ndarray],
    objectives: List[str],
    parameter_names: List[str],
    weights: Optional[Dict[str, float]] = None,
    **kwargs: Any,
):
    if weights is None:
        weights = {obj: 1.0 / len(objectives) for obj in objectives}
    super().__init__(simulator, measurements_dict, objectives, weights, parameter_names, **kwargs)

Constraints

pyadm1ode_calibration.calibration.optimization.constraints.ParameterConstraints(penalty_function=None)

Manager for parameter constraints.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def __init__(self, penalty_function: Optional[PenaltyFunction] = None):
    self.box_constraints: Dict[str, BoxConstraint] = {}
    self.linear_constraints: List[LinearConstraint] = []
    self.nonlinear_constraints: List[NonlinearConstraint] = []
    self.penalty_function = penalty_function or QuadraticPenalty()
    self.penalty_weights: Dict[str, float] = {}

Functions

add_box_constraint(parameter_name, lower, upper, hard=True, weight=1.0)

Add box constraint.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def add_box_constraint(self, parameter_name: str, lower: float, upper: float, hard: bool = True, weight: float = 1.0):
    """Add box constraint."""
    self.box_constraints[parameter_name] = BoxConstraint(parameter_name, lower, upper, hard)
    if not hard:
        self.penalty_weights[f"box_{parameter_name}"] = weight

add_linear_equality(coefficients, target, weight=1.0)

Add linear equality.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def add_linear_equality(self, coefficients, target, weight=1.0):
    """Add linear equality."""
    c = LinearConstraint(coefficients, None, target, "equality")
    self.linear_constraints.append(c)
    self.penalty_weights[f"linear_eq_{len(self.linear_constraints)}"] = weight

add_linear_inequality(coefficients, lower_bound=None, upper_bound=None, weight=1.0)

Add linear inequality.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def add_linear_inequality(self, coefficients, lower_bound=None, upper_bound=None, weight=1.0):
    """Add linear inequality."""
    c = LinearConstraint(coefficients, lower_bound, upper_bound, "inequality")
    self.linear_constraints.append(c)
    self.penalty_weights[f"linear_{len(self.linear_constraints)}"] = weight

add_nonlinear_constraint(name, function, constraint_type='inequality', weight=1.0)

Add nonlinear constraint.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def add_nonlinear_constraint(self, name, function, constraint_type="inequality", weight=1.0):
    """Add nonlinear constraint."""
    c = NonlinearConstraint(name, function, constraint_type)
    self.nonlinear_constraints.append(c)
    self.penalty_weights[f"nonlinear_{name}"] = weight

calculate_penalty(parameters)

Calculate total penalty for all violated constraints.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def calculate_penalty(self, parameters: Dict[str, float]) -> float:
    """Calculate total penalty for all violated constraints."""
    p = 0.0
    for name, c in self.box_constraints.items():
        v = c.violation(parameters.get(name, 0.0))
        if v > 0:
            if c.hard:
                return float("inf")
            p += self.penalty_function(v, self.penalty_weights.get(f"box_{name}", 1.0))
    for i, c in enumerate(self.linear_constraints, 1):
        v = c.violation(parameters)
        if v > 0:
            p += self.penalty_function(v, self.penalty_weights.get(f"linear_{i}", 1.0))
    for c in self.nonlinear_constraints:
        v = c.violation(parameters)
        if v > 0:
            p += self.penalty_function(v, self.penalty_weights.get(f"nonlinear_{c.name}", 1.0))
    return p

get_scipy_constraints(parameter_names)

Convert constraints to scipy format.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def get_scipy_constraints(self, parameter_names: List[str]) -> List[Dict]:
    """Convert constraints to scipy format."""
    scipy_constraints = []
    for constraint in self.linear_constraints:
        coef_array = np.array([constraint.coefficients.get(name, 0.0) for name in parameter_names])
        if constraint.constraint_type == "equality":
            scipy_constraints.append(
                {"type": "eq", "fun": lambda x, c=coef_array, b=constraint.upper_bound: np.dot(c, x) - b}
            )
        else:
            if constraint.lower_bound is not None:
                scipy_constraints.append(
                    {"type": "ineq", "fun": lambda x, c=coef_array, b=constraint.lower_bound: np.dot(c, x) - b}
                )
            if constraint.upper_bound is not None:
                scipy_constraints.append(
                    {"type": "ineq", "fun": lambda x, c=coef_array, b=constraint.upper_bound: b - np.dot(c, x)}
                )
    for constraint in self.nonlinear_constraints:

        def constraint_func(x, names=parameter_names, func=constraint.function):
            params = {name: val for name, val in zip(names, x)}
            return func(params)

        if constraint.constraint_type == "equality":
            scipy_constraints.append({"type": "eq", "fun": constraint_func})
        else:
            scipy_constraints.append({"type": "ineq", "fun": lambda x, f=constraint_func: -f(x)})
    return scipy_constraints

is_feasible(parameters)

Check if parameters satisfy all hard constraints.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def is_feasible(self, parameters: Dict[str, float]) -> bool:
    """Check if parameters satisfy all hard constraints."""
    for c in self.box_constraints.values():
        if c.hard and not c.is_feasible(parameters.get(c.parameter_name, 0.0)):
            return False
    for c in self.linear_constraints:
        if not c.is_feasible(parameters):
            return False
    for c in self.nonlinear_constraints:
        if not c.is_feasible(parameters):
            return False
    return True

project_to_feasible(parameters)

Project parameters to feasible region (box constraints only).

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def project_to_feasible(self, parameters: Dict[str, float]) -> Dict[str, float]:
    """Project parameters to feasible region (box constraints only)."""
    projected = parameters.copy()
    for name, constraint in self.box_constraints.items():
        if name in projected:
            projected[name] = constraint.project(projected[name])
    return projected

validate_parameters(parameters)

Validate parameters and return detailed error messages.

Source code in pyadm1ode_calibration/calibration/optimization/constraints.py
def validate_parameters(self, parameters: Dict[str, float]) -> Tuple[bool, List[str]]:
    """Validate parameters and return detailed error messages."""
    errors = []
    for name, constraint in self.box_constraints.items():
        if name in parameters:
            value = parameters[name]
            if not constraint.is_feasible(value):
                errors.append(
                    f"Parameter '{name}' = {value:.4f} violates bounds [{constraint.lower:.4f}, {constraint.upper:.4f}]"
                )
    for i, constraint in enumerate(self.linear_constraints, 1):
        if not constraint.is_feasible(parameters):
            value = constraint.evaluate(parameters)
            if constraint.constraint_type == "equality":
                errors.append(f"Linear constraint {i}: {value:.4f} != {constraint.upper_bound:.4f}")
            else:
                if constraint.lower_bound and value < constraint.lower_bound:
                    errors.append(f"Linear constraint {i}: {value:.4f} < {constraint.lower_bound:.4f}")
                if constraint.upper_bound and value > constraint.upper_bound:
                    errors.append(f"Linear constraint {i}: {value:.4f} > {constraint.upper_bound:.4f}")
    for constraint in self.nonlinear_constraints:
        if not constraint.is_feasible(parameters):
            value = constraint.evaluate(parameters)
            errors.append(
                f"Nonlinear constraint '{constraint.name}': g(x) = {value:.4f} violates "
                f"{constraint.constraint_type} constraint"
            )
    return len(errors) == 0, errors