Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
612e272
Add files via upload
MrzvskK Jan 20, 2026
82760b5
Merge pull request #1 from MrzvskK/MrzvskK-enting-tutorial
MrzvskK Jan 20, 2026
276e30b
trailing-whitespace
MrzvskK Jan 21, 2026
a57d154
Merge branch 'experimental-design:main' into main
MrzvskK Jan 23, 2026
7824dd1
Update docs/tutorials/advanced_examples/EntingStrategy_Suzuki-Miyaura…
MrzvskK Jan 23, 2026
01dd371
Merge branch 'experimental-design:main' into main
MrzvskK Jan 28, 2026
62e92b1
Add helper functions for nonlinear constraints
MrzvskK Feb 16, 2026
b5b1092
updated nonlinear.py and reverting api to the earlier version
MrzvskK Feb 17, 2026
e28f1fd
delete unnecesary test
MrzvskK Feb 18, 2026
ff9ad1d
update
MrzvskK Feb 20, 2026
92ee319
added smoke test and changes yml file
MrzvskK Feb 20, 2026
a10ff2d
wokring nonlinear constraints
MrzvskK Feb 20, 2026
302cae2
nonlinear constraints, tests and an example
MrzvskK Mar 4, 2026
343e760
edits to the nonlinear constraints, making sure all tests pass and tu…
MrzvskK Mar 4, 2026
61a27a5
small edits to nonlinear package
MrzvskK Mar 4, 2026
a7cb501
tutorials to qmd format
MrzvskK Mar 4, 2026
669157f
tutorials to qmd
MrzvskK Mar 4, 2026
0c0404d
tutorials to qmd
MrzvskK Mar 4, 2026
8220c8b
Merge branch 'upstream-main' into nonlinearC
MrzvskK Mar 4, 2026
67fbaa2
not create conflicts deleting enting tutorial
MrzvskK Mar 4, 2026
164dc81
addressed some errors
MrzvskK Mar 16, 2026
d390ef1
should fix quarto error with torch generator
MrzvskK Mar 17, 2026
31bdd72
changing nonlinear.py to work for pd.dataframe
MrzvskK Mar 17, 2026
4235d09
added torch generator
MrzvskK Mar 17, 2026
c9ff32b
trying to fix the remaining tests
MrzvskK Mar 17, 2026
a116486
another fix
MrzvskK Mar 17, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions _quarto.yml
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ website:
- docs/tutorials/advanced_examples/custom_sobo.qmd
- docs/tutorials/advanced_examples/desirability_objectives.qmd
- docs/tutorials/advanced_examples/genetic_algorithm.qmd
- docs/tutorials/advanced_examples/enting_strategy_suzuki-miyaura_reaction.qmd
- docs/tutorials/advanced_examples/merging_objectives.qmd
- docs/tutorials/advanced_examples/multifidelity_bo.qmd
- docs/tutorials/advanced_examples/objectives_on_inputs.qmd
Expand Down
4 changes: 2 additions & 2 deletions bofire/data_models/constraints/interpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ def is_fulfilled(
i * multiplicity : min((i + 1) * multiplicity, len(experiments))
]
if not np.allclose(batch, batch[0]):
return pd.Series([False])
return pd.Series([True])
return pd.Series([False] * len(experiments), index=experiments.index)
return pd.Series([True] * len(experiments), index=experiments.index)

def __call__(self, experiments: pd.DataFrame) -> pd.Series:
"""Numerically evaluates the constraint. Returns the distance to the constraint fulfillment
Expand Down
150 changes: 142 additions & 8 deletions bofire/data_models/constraints/nonlinear.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import inspect
import warnings
from typing import Callable, Dict, Literal, Optional, Union
from typing import TYPE_CHECKING, Callable, Dict, Literal, Optional, Union

import numpy as np
import pandas as pd
Expand All @@ -14,7 +14,9 @@

torch_tensor = torch.tensor
torch_diag = torch.diag
_TORCH_AVAILABLE = True
except ImportError:
_TORCH_AVAILABLE = False

def error_func(*args, **kwargs):
raise NotImplementedError("torch must be installed to use this functionality")
Expand All @@ -24,6 +26,9 @@ def error_func(*args, **kwargs):
torch_diag = error_func
torch_hessian = error_func # ty: ignore[invalid-assignment]

if TYPE_CHECKING: # pragma: no cover
import torch as _torch

from bofire.data_models.constraints.constraint import (
EqualityConstraint,
InequalityConstraint,
Expand Down Expand Up @@ -52,6 +57,12 @@ class NonlinearConstraint(IntrapointConstraint):
)

def validate_inputs(self, inputs: Inputs):
"""Validate that all constraint features are continuous inputs.
Args:
inputs (Inputs): Input feature collection from the domain.
Raises:
ValueError: If any feature is not a ContinuousInput.
"""
keys = inputs.get_keys(ContinuousInput)
for f in self.features:
if f not in keys:
Expand All @@ -61,6 +72,7 @@ def validate_inputs(self, inputs: Inputs):

@model_validator(mode="after")
def validate_features(self):
"""Validate that provided features match callable expression arguments."""
if isinstance(self.expression, Callable):
features = list(inspect.getfullargspec(self.expression).args)
if set(features) != set(self.features):
Expand All @@ -72,6 +84,13 @@ def validate_features(self):
@field_validator("jacobian_expression")
@classmethod
def set_jacobian_expression(cls, jacobian_expression, info) -> Union[str, Callable]:
"""Auto-compute Jacobian using SymPy for string expressions if not provided.
Args:
jacobian_expression: User-provided Jacobian or None.
info: Pydantic validation context.
Returns:
Union[str, Callable]: Jacobian expression.
"""
if (
jacobian_expression is None
and "features" in info.data.keys()
Expand Down Expand Up @@ -107,6 +126,12 @@ def set_jacobian_expression(cls, jacobian_expression, info) -> Union[str, Callab
@field_validator("hessian_expression")
@classmethod
def set_hessian_expression(cls, hessian_expression, info) -> Union[str, Callable]:
"""Auto-compute Hessian using SymPy for string expressions if not provided.
Args: hessian_expression: User-provided Hessian or None.
info: Pydantic validation context.
Returns:
Union[str, Callable]: Hessian expression.
"""
if (
hessian_expression is None
and "features" in info.data.keys()
Expand Down Expand Up @@ -146,18 +171,102 @@ def set_hessian_expression(cls, hessian_expression, info) -> Union[str, Callable

return hessian_expression

def __call__(self, experiments: pd.DataFrame) -> pd.Series:
def __call__(
self, experiments: Union[pd.DataFrame, "_torch.Tensor"]
) -> Union[pd.Series, "_torch.Tensor"]:
"""Evaluate the constraint.

Args:
experiments: Either a DataFrame with feature columns or a PyTorch tensor

Returns:
Constraint values as Series (for DataFrame) or Tensor (for Tensor input)
"""
# Handle Tensor input from BoTorch
if _TORCH_AVAILABLE and isinstance(experiments, torch.Tensor):
# Handle 3D tensor from BoTorch: [n_restarts, q, n_features]
if experiments.ndim == 3:
batch_size, q, n_features = experiments.shape
# Reshape to 2D: [batch_size * q, n_features]
experiments_2d = experiments.reshape(-1, n_features)
# Evaluate and reshape back
results_2d = self.__call__(experiments_2d)
return results_2d.reshape(batch_size, q)

if isinstance(self.expression, str):
# For string expressions, convert tensor to dict
if experiments.ndim == 1:
# Single point: shape (n_features,)
feature_dict = {
feat: experiments[i] for i, feat in enumerate(self.features)
}
# Use eval with torch operations available
return eval(
self.expression,
{"__builtins__": {}, "torch": torch},
feature_dict,
)
else:
# Batch: shape (batch_size, n_features)
results = []
for point in experiments:
feature_dict = {
feat: point[i] for i, feat in enumerate(self.features)
}
result = eval(
self.expression,
{"__builtins__": {}, "torch": torch},
feature_dict,
)
results.append(result)
return torch.stack(results)

elif isinstance(self.expression, Callable):
# Callable expression - pass as dict
if experiments.ndim == 1:
feature_dict = {
feat: experiments[i] for i, feat in enumerate(self.features)
}
return self.expression(**feature_dict)
else:
# Batch processing
results = []
for point in experiments:
feature_dict = {
feat: point[i] for i, feat in enumerate(self.features)
}
results.append(self.expression(**feature_dict))
return torch.stack(results)

# Handle DataFrame input (existing logic)
if isinstance(self.expression, str):
return experiments.eval(self.expression)
elif isinstance(self.expression, Callable):
# Support both:
# - torch installed: pass torch tensors (enables torch-based callables)
# - torch not installed: pass numpy arrays (enables numpy-based callables)
if _TORCH_AVAILABLE:
func_input = {
col: torch.tensor(
experiments[col].values,
dtype=torch.float64,
requires_grad=False,
)
for col in experiments.columns
}
out = self.expression(**func_input)
if hasattr(out, "detach"):
out = out.detach().cpu().numpy()
return pd.Series(
np.asarray(out),
index=experiments.index, # Preserve original indices
)

func_input = {
col: torch_tensor(experiments[col], requires_grad=False)
for col in experiments.columns
col: experiments[col].to_numpy() for col in experiments.columns
}
return pd.Series(
self.expression(**func_input).cpu().numpy(),
index=experiments.index, # Preserves orogonal indices instead of creating new ones.
)
out = self.expression(**func_input)
return pd.Series(np.asarray(out), index=experiments.index)
raise ValueError("expression must be a string or callable")

def jacobian(self, experiments: pd.DataFrame) -> pd.DataFrame:
Expand Down Expand Up @@ -298,6 +407,31 @@ class NonlinearEqualityConstraint(NonlinearConstraint, EqualityConstraint):

type: Literal["NonlinearEqualityConstraint"] = "NonlinearEqualityConstraint"

def is_fulfilled(self, experiments: pd.DataFrame, tol: float = 1e-6) -> pd.Series:
"""
Check if the nonlinear equality constraint is fulfilled.

Since this constraint is converted to two inequality constraints during
optimization (f(x) <= tol and f(x) >= -tol), we validate consistently
by checking if the violation is within the tolerance band.

Args:
experiments: DataFrame containing the candidate points to validate
tol: Tolerance for constraint fulfillment (default: 1e-6)

Returns:
Boolean Series indicating whether each candidate fulfills the constraint
"""

violation = self(experiments)
# Small epsilon to handle floating-point boundary cases
# e.g. violation = -0.001 with tol = 0.001 should pass
# Add a small absolute epsilon to avoid false negatives when we're right on
# the boundary (e.g. 0.0010000000000001 with tol=0.001).
eps = max(tol * 1e-9, 1e-15, 1e-9)
result = pd.Series(np.abs(violation) <= (tol + eps), index=experiments.index)
return result


class NonlinearInequalityConstraint(NonlinearConstraint, InequalityConstraint):
"""Nonlinear inequality constraint of the form 'expression <= 0'.
Expand Down
36 changes: 33 additions & 3 deletions bofire/data_models/strategies/predictives/acqf_optimization.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import warnings
import logging
from abc import abstractmethod
from typing import Literal, Optional, Type, Union

Expand All @@ -18,6 +18,9 @@
from bofire.data_models.types import IntPowerOfTwo


logger = logging.getLogger(__name__)


class AcquisitionOptimizer(BaseModel):
prefer_exhaustive_search_for_purely_categorical_domains: bool = True

Expand Down Expand Up @@ -138,14 +141,40 @@ def is_constraint_implemented(self, my_type: Type[constraints.Constraint]) -> bo
constraints.NonlinearInequalityConstraint,
constraints.NonlinearEqualityConstraint,
]:
return False
return True # was False
return True

def validate_domain(self, domain: Domain):
def validate_nonlinear_equality_constraints(domain: Domain):
"""Enforce batch_limit=1 and n_restarts=1 for nonlinear equality constraints."""
if any(
isinstance(
c,
(
constraints.NonlinearEqualityConstraint,
constraints.NonlinearInequalityConstraint,
),
)
for c in domain.constraints
):
if self.batch_limit != 1:
logger.info(
"Nonlinear constraints require batch_limit=1. "
"Overriding current value.",
)
# Use object.__setattr__ to bypass Pydantic's frozen model behavior
object.__setattr__(self, "batch_limit", 1)
if self.n_restarts != 1:
logger.info(
"Nonlinear constraints require n_restarts=1 "
"to avoid parallel batch optimization. Overriding current value.",
)
object.__setattr__(self, "n_restarts", 1)

def validate_local_search_config(domain: Domain):
if self.local_search_config is not None:
if has_local_search_region(domain) is False:
warnings.warn(
logger.info(
"`local_search_region` config is specified, but no local search region is defined in `domain`",
)
if (
Expand Down Expand Up @@ -182,6 +211,7 @@ def validate_exclude_constraints(domain: Domain):
"CategoricalExcludeConstraints can only be used with exhaustive search for purely categorical/discrete search spaces.",
)

validate_nonlinear_equality_constraints(domain)
validate_local_search_config(domain)
validate_interpoint_constraints(domain)
validate_exclude_constraints(domain)
Expand Down
2 changes: 2 additions & 0 deletions bofire/data_models/strategies/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
LinearEqualityConstraint,
LinearInequalityConstraint,
NChooseKConstraint,
NonlinearEqualityConstraint,
NonlinearInequalityConstraint,
ProductInequalityConstraint,
)
Expand All @@ -34,6 +35,7 @@ def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
NChooseKConstraint,
InterpointEqualityConstraint,
NonlinearInequalityConstraint,
NonlinearEqualityConstraint,
ProductInequalityConstraint,
CategoricalExcludeConstraint,
]
Expand Down
Loading
Loading