diff --git a/.github/workflows/choose_branch.yaml b/.github/workflows/choose_branch.yaml index 4d95e94b4..342f36575 100644 --- a/.github/workflows/choose_branch.yaml +++ b/.github/workflows/choose_branch.yaml @@ -2,8 +2,8 @@ name: Release Branch # Note that push and pull-request builds are automatically # now skipped by GitHub if -# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] -# are in the commit message. We don't need to check for this ourselves. +# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] +# are in the commit message. We don't need to check for this ourselves. on: workflow_dispatch: @@ -31,11 +31,11 @@ jobs: - platform: { name: "macos", os: "macos-latest", shell: "bash -l {0}" } python-version: "3.10" - - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } - python-version: "3.10" - platform: { name: "macos", os: "macos-latest", shell: "bash -l {0}" } - python-version: "3.12" # MacOS can't run 3.12 yet... + python-version: "3.11" + - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } + python-version: "3.10" - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } python-version: "3.11" environment: @@ -50,31 +50,36 @@ jobs: REPO: "${{ github.repository }}" steps: # - - uses: conda-incubator/setup-miniconda@v3 + - uses: actions/checkout@v4 + with: + ref: ${{ github.event.inputs.branch }} + fetch-depth: 0 + # + - name: Install pixi + uses: prefix-dev/setup-pixi@v0.9.4 with: - auto-update-conda: true - python-version: ${{ matrix.python-version }} - activate-environment: sire_build - miniforge-version: latest + run-install: false # - - name: Clone the desired branch - run: git clone https://github.com/${{ env.REPO }} -b ${{ github.event.inputs.branch }} sire + - name: Install rattler-build + shell: bash + run: pixi global install rattler-build # - - name: Setup Conda - run: conda install -y -c conda-forge conda-build boa anaconda-client packaging pip-requirements-parser + - name: Generate recipe + run: python ${{ github.workspace }}/actions/generate_recipe.py --features obs emle # - - name: Update Conda recipe - run: python ${{ github.workspace }}/sire/actions/update_recipe.py + - name: Write Python variant config + shell: bash + run: printf 'python:\n - "${{ matrix.python-version }}"\n' > "${{ github.workspace }}/python_variant.yaml" # - - name: Prepare build location - run: mkdir ${{ github.workspace }}/build + - name: Build package using rattler-build + shell: bash + run: rattler-build build --recipe "${{ github.workspace }}/recipes/sire" -c conda-forge -c openbiosim/label/dev --variant-config "${{ github.workspace }}/python_variant.yaml" # - - name: Build Conda package using conda build - run: conda build -c conda-forge -c openbiosim/label/dev ${{ github.workspace }}/sire/recipes/sire + - name: Install anaconda-client + run: python -m pip install anaconda-client # - - name: Upload Conda package - # Maybe add the logic here that this is a dev package? - run: python ${{ github.workspace }}/sire/actions/upload_package.py + - name: Upload package + run: python ${{ github.workspace }}/actions/upload_package.py env: - SRC_DIR: ${{ github.workspace }}/sire + SRC_DIR: ${{ github.workspace }} ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} diff --git a/.github/workflows/devel.yaml b/.github/workflows/devel.yaml index bf520b79a..c741c6e81 100644 --- a/.github/workflows/devel.yaml +++ b/.github/workflows/devel.yaml @@ -2,8 +2,8 @@ name: Release Devel # Note that push and pull-request builds are automatically # now skipped by GitHub if -# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] -# are in the commit message. We don't need to check for this ourselves. +# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] +# are in the commit message. We don't need to check for this ourselves. on: workflow_dispatch: @@ -28,7 +28,10 @@ jobs: # but Linux - platform: { name: "macos", os: "macos-latest", shell: "bash -l {0}" } - python-version: "3.12" # MacOS can't run 3.12 yet... We want 3.10 and 3.11 + python-version: "3.10" + - platform: + { name: "macos", os: "macos-latest", shell: "bash -l {0}" } + python-version: "3.11" - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } python-version: "3.10" - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } @@ -45,31 +48,35 @@ jobs: REPO: "${{ github.repository }}" steps: # - - uses: conda-incubator/setup-miniconda@v3 + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + # + - name: Install pixi + uses: prefix-dev/setup-pixi@v0.9.4 with: - auto-update-conda: true - python-version: ${{ matrix.python-version }} - activate-environment: sire_build - miniforge-version: latest + run-install: false # - - name: Clone the devel branch (push to devel) - run: git clone https://github.com/${{ env.REPO }} sire + - name: Install rattler-build + shell: bash + run: pixi global install rattler-build # - - name: Setup Conda - run: conda install -y -c conda-forge conda-build boa anaconda-client packaging pip-requirements-parser + - name: Generate recipe + run: python ${{ github.workspace }}/actions/generate_recipe.py --features obs emle # - - name: Update Conda recipe - run: python ${{ github.workspace }}/sire/actions/update_recipe.py + - name: Write Python variant config + shell: bash + run: printf 'python:\n - "${{ matrix.python-version }}"\n' > "${{ github.workspace }}/python_variant.yaml" # - - name: Prepare build location - run: mkdir ${{ github.workspace }}/build + - name: Build package using rattler-build + shell: bash + run: rattler-build build --recipe "${{ github.workspace }}/recipes/sire" -c conda-forge -c openbiosim/label/dev --variant-config "${{ github.workspace }}/python_variant.yaml" # - - name: Build Conda package using conda build - run: conda build -c conda-forge -c openbiosim/label/dev ${{ github.workspace }}/sire/recipes/sire + - name: Install anaconda-client + run: python -m pip install anaconda-client # - - name: Upload Conda package - # Maybe add the logic here that this is a dev package? - run: python ${{ github.workspace }}/sire/actions/upload_package.py + - name: Upload package + run: python ${{ github.workspace }}/actions/upload_package.py env: - SRC_DIR: ${{ github.workspace }}/sire + SRC_DIR: ${{ github.workspace }} ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 3a1f0761f..b232b9770 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -2,8 +2,8 @@ name: Release Main # Note that push and pull-request builds are automatically # now skipped by GitHub if -# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] -# are in the commit message. We don't need to check for this ourselves. +# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] +# are in the commit message. We don't need to check for this ourselves. # Only allow this action to run on a manual run. # We should specify when run whether or not we want @@ -29,10 +29,7 @@ jobs: - { name: "windows", os: "windows-latest", shell: "pwsh" } - { name: "linux", os: "ubuntu-latest", shell: "bash -l {0}" } - { name: "macos", os: "macos-latest", shell: "bash -l {0}" } - exclude: - - platform: - { name: "macos", os: "macos-latest", shell: "bash -l {0}" } - python-version: "3.12" # MacOS can't run 3.12 yet... + # No exclusions - release builds all combinations environment: name: sire-build defaults: @@ -45,32 +42,39 @@ jobs: REPO: "${{ github.event.pull_request.head.repo.full_name || github.repository }}" steps: # - - uses: conda-incubator/setup-miniconda@v3 + - uses: actions/checkout@v4 with: - auto-update-conda: true - python-version: ${{ matrix.python-version }} - activate-environment: sire_build - miniforge-version: latest + ref: main + fetch-depth: 0 # - - name: Clone the main branch (push to main) - run: git clone -b main https://github.com/openbiosim/sire sire + - name: Install pixi + uses: prefix-dev/setup-pixi@v0.9.4 + with: + run-install: false + # + - name: Install rattler-build + shell: bash + run: pixi global install rattler-build # - - name: Setup Conda - run: conda install -y -c conda-forge conda-build boa anaconda-client packaging pip-requirements-parser + - name: Generate recipe + run: python ${{ github.workspace }}/actions/generate_recipe.py --features obs emle # - - name: Update Conda recipe - run: python ${{ github.workspace }}/sire/actions/update_recipe.py + - name: Write Python variant config + shell: bash + run: printf 'python:\n - "${{ matrix.python-version }}"\n' > "${{ github.workspace }}/python_variant.yaml" # - - name: Prepare build location - run: mkdir ${{ github.workspace }}/build + - name: Build package using rattler-build + shell: bash + run: rattler-build build --recipe "${{ github.workspace }}/recipes/sire" -c conda-forge -c openbiosim/label/dev --variant-config "${{ github.workspace }}/python_variant.yaml" # - - name: Build Conda package using conda build - run: conda build -c conda-forge -c openbiosim/label/dev ${{ github.workspace }}/sire/recipes/sire + - name: Install anaconda-client + run: python -m pip install anaconda-client + if: github.event.inputs.upload_packages == 'yes' # - - name: Upload Conda package + - name: Upload package # upload to the 'test' channel - run: python ${{ github.workspace }}/sire/actions/upload_package.py test + run: python ${{ github.workspace }}/actions/upload_package.py test env: - SRC_DIR: ${{ github.workspace }}/sire + SRC_DIR: ${{ github.workspace }} ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} if: github.event.inputs.upload_packages == 'yes' diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml index 4b4dd4a45..0f4d8e4e8 100644 --- a/.github/workflows/pr.yaml +++ b/.github/workflows/pr.yaml @@ -2,8 +2,8 @@ name: Pull-Request # Note that push and pull-request builds are automatically # now skipped by GitHub if -# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] -# are in the commit message. We don't need to check for this ourselves. +# [skip ci], [ci skip], [no ci], [skip actions], or [actions skip] +# are in the commit message. We don't need to check for this ourselves. on: pull_request: @@ -28,11 +28,11 @@ jobs: - platform: { name: "macos", os: "macos-latest", shell: "bash -l {0}" } python-version: "3.10" - - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } - python-version: "3.10" - platform: { name: "macos", os: "macos-latest", shell: "bash -l {0}" } - python-version: "3.12" # MacOS can't run 3.12 yet... + python-version: "3.11" + - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } + python-version: "3.10" - platform: { name: "windows", os: "windows-latest", shell: "pwsh" } python-version: "3.11" environment: @@ -47,24 +47,26 @@ jobs: REPO: "${{ github.event.pull_request.head.repo.full_name || github.repository }}" steps: # - - uses: conda-incubator/setup-miniconda@v3 + - uses: actions/checkout@v4 with: - auto-update-conda: true - python-version: ${{ matrix.python-version }} - activate-environment: sire_build - miniforge-version: latest + fetch-depth: 0 # - - name: Clone the feature branch (pull request to devel) - run: git clone -b ${{ github.head_ref }} --single-branch https://github.com/${{ env.REPO }} sire + - name: Install pixi + uses: prefix-dev/setup-pixi@v0.9.4 + with: + run-install: false # - - name: Setup Conda - run: conda install -y -c conda-forge conda-build boa anaconda-client packaging pip-requirements-parser + - name: Install rattler-build + shell: bash + run: pixi global install rattler-build # - - name: Update Conda recipe - run: python ${{ github.workspace }}/sire/actions/update_recipe.py + - name: Generate recipe + run: python ${{ github.workspace }}/actions/generate_recipe.py --features obs emle # - - name: Prepare build location - run: mkdir ${{ github.workspace }}/build + - name: Write Python variant config + shell: bash + run: printf 'python:\n - "${{ matrix.python-version }}"\n' > "${{ github.workspace }}/python_variant.yaml" # - - name: Build Conda package using conda build - run: conda build -c conda-forge -c openbiosim/label/dev ${{ github.workspace }}/sire/recipes/sire + - name: Build package using rattler-build + shell: bash + run: rattler-build build --recipe "${{ github.workspace }}/recipes/sire" -c conda-forge -c openbiosim/label/dev --variant-config "${{ github.workspace }}/python_variant.yaml" diff --git a/.gitignore b/.gitignore index aa5948dad..e2b790dfd 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ build/wrapper/* build/module/* build/downloads/* recipes/sire/meta.yaml -#files for wrapper +# files for wrappers module_info active_headers.data exposed_decl.pypp.txt @@ -57,12 +57,15 @@ tests/cache/* # Python autogenerated backup wrappers *~ -#Atom +# Atom .idea -#VS Code +# VS Code .vscode .DS_Store .coverage + +# Pixi startup file +pixi.sh diff --git a/README.rst b/README.rst index 8d1aa9ce2..7ea785b2c 100644 --- a/README.rst +++ b/README.rst @@ -104,59 +104,54 @@ Installation from source However, as you are here, it is likely you want to download the latest, greatest version of the code, which you will need to compile. To compile -sire, -you need a git client to download the source, and a working internet connection -(needed by the sire compilation scripts to download additional dependencies). +sire, you need a git client to download the source and +`pixi `__ to manage the build environment. -First, you need to create and activate a conda environment, e.g. +First, clone the sire source code and change into the directory: .. code-block:: bash - conda create -n openbiosim-dev "python<3.12" - conda activate openbiosim-dev - -Next, you need to install the Sire build dependencies. - -.. code-block:: bash - - conda install cmake pip-requirements-parser - -You will also need to install compilers, e.g. on Linux use - -.. code-block:: bash - - conda install gcc gxx + git clone https://github.com/OpenBioSim/sire + cd sire -on MacOS use +Next, use pixi to create and activate the development environment. This +will install all required dependencies, including compilers: .. code-block:: bash - conda install clang clangxx + pixi install -e dev + pixi shell -e dev -and on Windows use +Now compile and install sire: .. code-block:: bash - conda install conda-build - -Next, you can clone the sire source code and compile and install sire:: - - git clone https://github.com/OpenBioSim/sire - cd sire python setup.py install A small word of warning, the compilation can easily take over an hour! -The above will compile sire in your existing conda environment. +Other pixi environments are available depending on your needs: + +* ``pixi install -e default`` - core sire dependencies only +* ``pixi install -e obs`` - include downstream OpenBioSim package dependencies +* ``pixi install -e emle`` - include `emle-engine `__ dependencies +* ``pixi install -e dev`` - all of the above plus test dependencies + +Any additional startup commands can be specified in the ``pixi.sh`` file +in the root of the sire repository. This file is automatically sourced when +you activate the pixi environment, so you can add any additional environment +variables or startup commands here. (Note that you might see a warning if +you haven't created this file.) -If you plan to build `BioSimSpace `__ -on top of sire, then you will need to resolve BioSimSpace's dependencies at -the time sire is installed to ensure that it is built in a self-consistent way. -This can be achieved as follows: +If you need OpenCL support (e.g. for OpenMM), note that pixi does not run +conda post-link scripts, so the ``ocl-icd-system`` symlink won't be created +automatically. After creating the environment, run the following once to fix +this: .. code-block:: bash - python setup.py --install-bss-deps install + pixi shell -e dev + ln -s /etc/OpenCL/vendors "${CONDA_PREFIX}/etc/OpenCL/vendors/ocl-icd-system" Support and Development ======================= diff --git a/actions/collect_failed.py b/actions/collect_failed.py index 2836870aa..8048b4b03 100644 --- a/actions/collect_failed.py +++ b/actions/collect_failed.py @@ -1,4 +1,3 @@ - # Script that collects as much as it can from a failed conda build so that it can # be stored as a GitHub Actions artifact for download and further debugging @@ -29,15 +28,17 @@ print(f"Zipping up {zipdirs} to {output_filename}") + def filter_function(tarinfo): filename = tarinfo.name - #print(filename) - if filename.find('.git') != -1: - #print("excluded!") + # print(filename) + if filename.find(".git") != -1: + # print("excluded!") return None else: return tarinfo + with tarfile.open(output_filename, "w:bz2") as tar: for dir in zipdirs: tar.add(dir, arcname=os.path.basename(dir), filter=filter_function) diff --git a/actions/generate_recipe.py b/actions/generate_recipe.py new file mode 100644 index 000000000..8010a0e0c --- /dev/null +++ b/actions/generate_recipe.py @@ -0,0 +1,450 @@ +"""Generate a rattler-build recipe.yaml from pixi.toml. + +This script reads the pixi.toml file (the single source of truth for +dependencies) and generates a rattler-build recipe.yaml with the +appropriate if/then conditional blocks for platform-specific dependencies. + +Usage: + python actions/generate_recipe.py [--features obs emle] + +The --features flag controls which optional dependency groups are +included in the host section of the recipe. +""" + +import argparse +import os +import subprocess +import sys + +try: + import tomllib +except ModuleNotFoundError: + import tomli as tomllib + + +# Categorisation of core dependencies into conda recipe sections. +# Dependencies listed here go into "build"; everything else from +# [dependencies] goes into "host". The "run" list is a subset of +# host that is also needed at runtime. +BUILD_DEPS = { + "cmake", + "git", + "make", + "libtool", + "pybind11", + "sysroot_linux-64", +} + +RUN_DEPS = { + "gsl", + "lazy_import", + "libnetcdf", + "openmm", + "pandas", + "python", + "qt-main", + "rich", + "tbb", +} + +# Mapping from pixi platform strings to rattler-build selector expressions. +PLATFORM_SELECTORS = { + "linux-64": "linux and x86_64", + "linux-aarch64": "linux and aarch64", + "osx-arm64": "osx and arm64", + "win-64": "win", +} + +ALL_PLATFORMS = list(PLATFORM_SELECTORS.keys()) + + +def parse_args(): + parser = argparse.ArgumentParser( + description="Generate a rattler-build recipe.yaml from pixi.toml" + ) + parser.add_argument( + "--features", + nargs="*", + default=[], + help="Optional feature groups to include (e.g. obs emle)", + ) + parser.add_argument( + "--pixi-toml", + default=None, + help="Path to pixi.toml (default: auto-detect from repo root)", + ) + parser.add_argument( + "--output", + default=None, + help="Output path for recipe.yaml (default: recipes/sire/recipe.yaml)", + ) + return parser.parse_args() + + +def run_cmd(cmd): + """Run a shell command and return stripped stdout.""" + p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) + return str(p.stdout.read().decode("utf-8")).lstrip().rstrip() + + +def get_git_info(srcdir): + """Get the git remote URL and branch/tag.""" + gitdir = os.path.join(srcdir, ".git") + + remote = run_cmd( + f"git --git-dir={gitdir} --work-tree={srcdir} config --get remote.origin.url" + ) + if not remote.endswith(".git"): + remote += ".git" + + branch = run_cmd( + f"git --git-dir={gitdir} --work-tree={srcdir} rev-parse --abbrev-ref HEAD" + ) + + if branch == "HEAD": + # Handle detached HEAD (e.g. in GitHub Actions PR checkouts). + # GITHUB_HEAD_REF is set for pull_request events. + # GITHUB_REF_NAME is set for push/workflow_dispatch events. + branch = os.environ.get("GITHUB_HEAD_REF") or os.environ.get( + "GITHUB_REF_NAME", "" + ) + + if not branch: + # Fall back to tag detection for release builds. + branch = run_cmd( + f"git --git-dir={gitdir} --work-tree={srcdir} describe --tags" + ) + if "-" in branch: + raise RuntimeError("Cannot perform a tag build from a non-tag commit!") + + # Get the most recent tag for specifying the version in the recipe. If there are + # no tags, use "PR". + version = run_cmd( + f"git --git-dir={gitdir} --work-tree={srcdir} describe --tags --abbrev=0" + ) + if not version: + version = "PR" + + # Work out the build number as the number of commits since the most recent tag, + # or "0" if there are no commits. + build = run_cmd( + f"git --git-dir={gitdir} --work-tree={srcdir} rev-list --count {version}.." + ) + if not build: + build = "0" + + return remote, branch, version, build + + +def load_pixi_toml(path): + """Load and parse the pixi.toml file.""" + with open(path, "rb") as f: + return tomllib.load(f) + + +def format_dep(name, spec): + """Format a dependency as a conda-style string.""" + if spec == "*" or spec == "": + return name + # Handle version specs that already start with an operator + if spec[0] in ">= 1: - from pathlib import Path - import yaml - - d = yaml.safe_load(Path(sys.argv[1]).read_text()) - env_reqs = [x for x in d["dependencies"] if type(x) is str] - print(f"Using environment from {sys.argv[1]}") - - env_channels = d["channels"] -else: - env_reqs = [] - env_channels = [] - -# go up one directories to get the source directory -# (this script is in Sire/actions/) -srcdir = os.path.dirname(os.path.dirname(script)) - -print(f"Sire source is in {srcdir}") - -condadir = os.path.join(srcdir, "recipes", "sire") - -print(f"conda recipe in {condadir}") - -# Store the name of the recipe and template YAML files. -recipe = os.path.join(condadir, "meta.yaml") -template = os.path.join(condadir, "template.yaml") - -# Now parse all of the requirements -build_reqs = parse_requirements(os.path.join(srcdir, "requirements_build.txt")) -print(build_reqs) -host_reqs = parse_requirements(os.path.join(srcdir, "requirements_host.txt")) -print(host_reqs) -run_reqs = parse_requirements(os.path.join(srcdir, "requirements_run.txt")) -print(run_reqs) -bss_reqs = parse_requirements(os.path.join(srcdir, "requirements_bss.txt")) -print(bss_reqs) -if is_emle: - emle_reqs = parse_requirements(os.path.join(srcdir, "requirements_emle.txt")) - print(emle_reqs) -else: - emle_reqs = [] -test_reqs = parse_requirements(os.path.join(srcdir, "requirements_test.txt")) - - -def run_cmd(cmd): - p = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) - return str(p.stdout.read().decode("utf-8")).lstrip().rstrip() - - -gitdir = os.path.join(srcdir, ".git") - -# Get the sire remote -sire_remote = run_cmd( - f"git --git-dir={gitdir} --work-tree={srcdir} config --get remote.origin.url" -) -sire_remote += ".git" -print(sire_remote) - -# Get the Sire branch. -sire_branch = run_cmd( - f"git --git-dir={gitdir} --work-tree={srcdir} rev-parse --abbrev-ref HEAD" -) - -# If the branch is "HEAD", then we might be in detached head mode. If so, check -# the tag. -if sire_branch == "HEAD": - sire_branch = run_cmd( - f"git --git-dir={gitdir} --work-tree={srcdir} describe --tags" - ) - # Make sure this is a pure tag commit. - if "-" in sire_branch: - raise RuntimeError("Cannot perform a tag build from a non-tag commit!") - -print(sire_branch) - -lines = open(template, "r").readlines() - - -def dep_lines(deps): - lines = [] - - for dep in deps: - lines.append(f" - {dep}\n") - - return "".join(lines) - - -def check_environment_reqs(reqs): - """ - Run through the environment reqs and try to - fix any conflicts that may be created - """ - if type(reqs) is not list: - reqs = [reqs] - - import re - - r = re.compile( - r"([\w\d\-_]*)(>=|<=|==|<|>|=)(\d\.?\*?)*,?(>=|<=|=|==|<|>?)(\d\.?\*?)*|(\d\.?\*?)*\|(\d\.?\*?)*|(\d\.?\*?)*" - ) - - for req in reqs: - m = r.match(req) - - if m.groups()[0] is not None: - req = m.groups()[0] - - if req == "rdkit": - print(m.groups()) - - elif req == "alchemlyb": - print(m.groups()) - - -def combine(reqs0, reqs1): - """ - Combine requirements together, removing those from reqs0 - that appear in reqs1 (reqs1 has priority) - """ - if type(reqs0) is not list: - reqs0 = [reqs0] - - if type(reqs1) is not list: - reqs1 = [reqs1] - - import re - - r = re.compile( - r"([\w\d\-_]*)(>=|<=|==|<|>|=)(\d\.?\*?)*,?(>=|<=|=|==|<|>?)(\d\.?\*?)*|(\d\.?\*?)*\|(\d\.?\*?)*|(\d\.?\*?)*" - ) - - reqs = [] - - for req0 in reqs0: - found = False - - m = r.match(req0) - - if m.groups()[0] is None: - r0 = req0 - else: - r0 = m.groups()[0] - - for req1 in reqs1: - m = r.match(req1) - - if m.groups()[0] is None: - req = req1 - else: - req = m.groups()[0] - - if r0 == req: - found = True - break - - if not found: - reqs.append(req0) - - return reqs + reqs1 - - -def check_reqs(reqs0, reqs1): - """ - Update reqs0 so that if there are any version requirements - in reqs1 that affect dependencies in reqs0, then - reqs0 is updated to include those versions. - """ - if type(reqs0) is not list: - reqs0 = [reqs0] - - if type(reqs1) is not list: - reqs1 = [reqs1] - - import re - - r = re.compile( - r"([\w\d\-_]*)(>=|<=|==|<|>|=)(\d\.?\*?)*,?(>=|<=|=|==|<|>?)(\d\.?\*?)*|(\d\.?\*?)*\|(\d\.?\*?)*|(\d\.?\*?)*" - ) - - reqs = [] - - for req0 in reqs0: - found = False - found_req = None - - m = r.match(req0) - - if m.groups()[0] is None: - r0 = req0 - else: - r0 = m.groups()[0] - - for req1 in reqs1: - m = r.match(req1) - - if m.groups()[0] is None: - req = req1 - else: - req = m.groups()[0] - - if r0 == req: - found = True - found_req = req1 - break - - if found: - reqs.append(found_req) - else: - reqs.append(req0) - - return reqs - - -# check_environment_reqs(env_reqs) - -build_reqs = dep_lines(check_reqs(build_reqs, env_reqs)) -host_reqs = combine(host_reqs, bss_reqs) -host_reqs = combine(host_reqs, emle_reqs) -host_reqs = dep_lines(combine(host_reqs, env_reqs)) -run_reqs = dep_lines(check_reqs(run_reqs, env_reqs)) -test_reqs = dep_lines(check_reqs(test_reqs, env_reqs)) - -print("\nRECIPE") - -with open(recipe, "w") as FILE: - for line in lines: - if line.find("SIRE_BUILD_REQUIREMENTS") != -1: - line = build_reqs - elif line.find("SIRE_HOST_REQUIREMENTS") != -1: - line = host_reqs - elif line.find("SIRE_RUN_REQUIREMENTS") != -1: - line = run_reqs - elif line.find("SIRE_TEST_REQUIREMENTS") != -1: - line = test_reqs - else: - line = line.replace("SIRE_REMOTE", sire_remote) - line = line.replace("SIRE_BRANCH", sire_branch) - - FILE.write(line) - print(line, end="") - -channels = ["conda-forge", "openbiosim/label/dev"] - -for channel in env_channels: - if channel not in channels: - channels.insert(0, channel) - -channels = " ".join([f"-c {x}" for x in channels]) - -print("\nBuild this package using the command") -print(f"conda build {channels} {condadir}") diff --git a/actions/upload_package.py b/actions/upload_package.py index bf80f1415..cdb329603 100644 --- a/actions/upload_package.py +++ b/actions/upload_package.py @@ -22,20 +22,21 @@ else: conda_token = "TEST" -# get the root conda directory -conda = os.environ["CONDA"] +# rattler-build outputs to an 'output' directory by default. +# Try rattler-build output first, fall back to conda-bld. +output_dir = os.path.join(srcdir, "output") -# Set the path to the conda-bld directory. -conda_bld = os.path.join(conda, "envs", "sire_build", "conda-bld") - -print(f"conda_bld = {conda_bld}") - -# Find the packages to upload -sire_pkg = glob.glob(os.path.join(conda_bld, "*-*", "sire-*.tar.bz2")) +sire_pkg = glob.glob(os.path.join(output_dir, "**", "sire-*.conda"), recursive=True) if len(sire_pkg) == 0: - print("No sire packages to upload?") - sys.exit(-1) + # Fall back to conda-bld location (legacy) + if "CONDA" in os.environ: + conda_bld = os.path.join(os.environ["CONDA"], "envs", "sire_build", "conda-bld") + sire_pkg = glob.glob(os.path.join(conda_bld, "*-*", "sire-*.tar.bz2")) + + if len(sire_pkg) == 0: + print("No sire packages to upload?") + sys.exit(-1) packages = sire_pkg @@ -57,9 +58,9 @@ def run_cmd(cmd): tag = run_cmd(f"git --git-dir={gitdir} --work-tree={srcdir} tag --contains") # The channel has been specified as an extra argument -# This will either be 'test' for main releases, or +# This will either be 'test' for main releases, or # 'dev' for dev releases (the default) -channel = channel.lstrip().rstrip().replace(" ","_").lower() +channel = channel.lstrip().rstrip().replace(" ", "_").lower() if len(channel) == 0: channel = "dev" diff --git a/doc/source/changelog.rst b/doc/source/changelog.rst index a2b39cab0..b1e49a8eb 100644 --- a/doc/source/changelog.rst +++ b/doc/source/changelog.rst @@ -12,11 +12,9 @@ Development was migrated into the `OpenBioSim `__ organisation on `GitHub `__. -`2025.4.0 `__ - December 2025 +`2025.4.0 `__ - February 2026 --------------------------------------------------------------------------------------------- -* Please add an item to this CHANGELOG for any new features or bug fixes when creating a PR. - * Use ``"0"`` for null EMLE interpolation force since ``""`` causes issues on some platforms. * Fix ``delta`` parameter in soft-core Coulomb potential. @@ -25,8 +23,18 @@ organisation on `GitHub `__. * Fix recursion bug in :func:`sire.base.wrap()` function. +* Add support for passing cell vectors to ``PyQMForce`` and ``TorchQMForce``. + +* Add ``--install-metadata`` option to ``setup.py`` to register development source installations + with ``conda``. + * Fix :meth:`Dynamics.get_rest2_scale()` method. +* Add automatic parametrisation for Morse potential restraints when bonds are + being created in alchemical simulations. + +* Switch build system to ``pixi`` and ``rattler-build``. + `2025.3.0 `__ - November 2025 --------------------------------------------------------------------------------------------- diff --git a/doc/source/install.rst b/doc/source/install.rst index 72e745f9b..95563d001 100644 --- a/doc/source/install.rst +++ b/doc/source/install.rst @@ -253,14 +253,30 @@ operating system, or because you want to use a newer version (e.g. code from the ``devel`` branch, or from your own feature branch if you are a developer). -You compile :mod:`sire` into an existing anaconda / miniconda environment. -Please create and activate an environment, e.g. by following -`the instructions <_Install_miniforge>` to install a fresh ``miniforge`` and -then creating and activating Python 3.11 environment called -``openbiosim``. +Prerequisites +------------- -Next, download the source code. You could download the latest development -version of :mod:`sire` by typing; +You need `pixi `__ installed to manage the build +environment and dependencies. Follow the +`pixi installation instructions `__ +for your platform. + +.. note:: + + You need to have Visual Studio C++ (2017 or newer) installed to compile on Windows. + The easiest way to do this is to install the free + `Visual Studio 2022 Community Edition `__. + Make sure to install "Desktop development with C++", + including the options "MSVC v143 - VS 2022 C++ x64/x86 build tools (v14.30)", + "C++ CMake tools for Windows", and at least one of "Windows 11 SDK" and/or + "Windows 10 SDK" (any version will do). Currently only the X64 compilers + have been tested - we are interested to try Windows/ARM64 once more of + the dependencies are available. + +Download the source code +------------------------ + +Download the latest development version of :mod:`sire` by typing; .. code-block:: bash @@ -269,12 +285,6 @@ version of :mod:`sire` by typing; This will download into a directory called :mod:`sire`. Navigate into this directory (e.g. ``cd sire``). -.. note:: - - This will fail if ``git`` is not installed on your computer. - You can easily install ``git`` using ``conda``, e.g. - run ``conda install git``. - You can change to a different branch using the ``git checkout BRANCH`` command, e.g. @@ -293,6 +303,49 @@ feature branch using where ``feat_name`` should be replaced by the name of the feature branch you want to compile. +Create the build environment +----------------------------- + +Use ``pixi`` to create and activate the development environment. This will +install all required dependencies, including compilers: + +.. code-block:: bash + + $ pixi install -e dev + $ pixi shell -e dev + +Several environments are available depending on your needs: + +* ``default`` - core sire dependencies only +* ``obs`` - include downstream OpenBioSim package dependencies (e.g. BioSimSpace) +* ``emle`` - include `emle-engine `__ dependencies +* ``full`` - include both OBS and EMLE dependencies +* ``dev`` - all of the above plus test dependencies + +If you plan to install `BioSimSpace `__ on +top of :mod:`sire`, use at least the ``obs`` or ``dev`` environment. +This ensures that incompatible versions of shared dependencies are not +accidentally installed. + +Any additional startup commands can be specified in the ``pixi.sh`` file +in the root of the sire repository. This file is automatically sourced when +you activate the pixi environment, so you can add any additional environment +variables or startup commands here. (Note that you might see a warning if +you haven't created this file.) + +If you need OpenCL support (e.g. for OpenMM), note that pixi does not run +conda post-link scripts, so the ``ocl-icd-system`` symlink won't be created +automatically. After creating the environment, run the following once to fix +this: + +.. code-block:: bash + + pixi shell -e dev + ln -s /etc/OpenCL/vendors "${CONDA_PREFIX}/etc/OpenCL/vendors/ocl-icd-system" + +Compile and install +------------------- + Compilation and installation of :mod:`sire` is managed via the `setup.py `__ script. @@ -303,7 +356,7 @@ Run $ python setup.py --help -to get a help on all of the options. +to get help on all of the options. Typically, you just want to compile and install :mod:`sire`. To do this, type @@ -312,34 +365,8 @@ type $ python setup.py install -This will download and install all of the dependencies via ``conda``. It will then compile -the :mod:`sire` C++ libraries, and then the Python wrappers. Be patient, -as compilation can take quite a while! - -.. note:: - - You need to have Visual Studio C++ (2017 or newer) installed to compile on Windows. - The easiest way to do this is to install the free - `Visual Studio 2022 Community Edition `__. - Make sure to install "Desktop development with C++", - including the options "MSVC v143 - VS 2022 C++ x64/x86 build tools (v14.30)", - "C++ CMake tools for Windows", and at least one of "Windows 11 SDK" and/or - "Windows 10 SDK" (any version will do). You can, optionally, install the - older C++ compilers too, e.g. "MSVC v142 - VS 2019 C++ x64/x86 build tools (v14.29)", - and/or "MSVC v141 - VS 2017 C++ x64/x86 build tools (v14.16)". Currently - only the X64 compilers have been tested - we are interested to try - Windows/ARM64 once more of the dependencies are available. - -If you plan to install `BioSimSpace `__ on -top of :mod:`sire`, then you should install using; - -.. code-block:: bash - - $ python --install-bss-deps install - -This will use ``conda`` to download and install all of -BioSimSpace's dependencies as well. This ensures that incompatible versions -of shared dependencies are not accidentally installed. +This will compile the :mod:`sire` C++ libraries and then the Python +wrappers. Be patient, as compilation can take quite a while! Once :mod:`sire` has installed, you can import it in a ``python``, ``ipython`` or ``jupyter lab`` session by typing @@ -355,165 +382,74 @@ Please take a look at our :doc:`developer guide ` for more information on how to develop and contribute new code to :mod:`sire`. -5. Hardest install - build your own custom conda packages -========================================================= +5. Hardest install - build your own conda packages +=================================================== -The :mod:`sire` conda packages that we build have a lot of dependencies that -may conflict with your own environment. This is because we build :mod:`sire` -to be compatible with the latest version of `BioSimSpace `__, -which itself optionally depends on a large number of simulation packages. +You can build your own :mod:`sire` conda package using +`rattler-build `__. This is useful if +you want a package with different dependencies to the one we distribute. -You can build your own :mod:`sire` conda package that has fewer dependencies, -or which is compatible with the packages already installed in your conda -environment. There are a few steps you need to complete. - -A. Define your runtime environment +A. Check out the sire source code ---------------------------------- -The first step is to describe the desired runtime environment for the package. -The easiest way to do this is to create that environment, e.g. by installing -the packages you want, and to then create an ``environment.yml`` file -that describes that environment. You can do this by running - -.. code-block:: bash - - $ conda env export -f environment.yml - -This will create an environment file called ``environment.yml`` -that creates pins for the exact version of all of the packages installed -in your environment. - -If you want, you can edit this file to add or remove pins. Simply delete -lines describing the version of packages that you don't need pinned, -add new lines if there are additional packages that you do want pinned, -or even update the version number of the pins if you can allow more -flexibility for the installation. - -B. Check out the sire source code ---------------------------------- - -The next step is to check out the :mod:`sire` source code (if you haven't -already). - .. code-block:: bash $ git clone https://github.com/openbiosim/sire -b main + $ cd sire -This checks the ``main`` branch of the code out into a directory called -``sire``. You can build a package for any branch of the code. Typically, -you will want to choose the ``main`` branch, as this always corresponds to the -last release. You can checkout the ``main`` branch by changing into the -``sire`` directory and running; - -.. code-block:: bash - - $ git checkout main +You can build a package for any branch of the code. The ``main`` branch +always corresponds to the last release. -C. Create the conda build environment -------------------------------------- +B. Install rattler-build +------------------------- -While you could build in your existing environment, it is cleaner to -build in a dedicated build environment. Here, we will create a build -environment called ``build_sire``. You can use any name you want. +Install ``rattler-build`` via ``pixi``: .. code-block:: bash - $ conda env create -n build_sire -f environment.yml + $ pixi global install rattler-build -Activate that environment +Or follow the +`rattler-build installation instructions `__. -.. code-block:: bash - - $ conda activate build_sire +C. Generate the recipe +----------------------- -And then install the tools needed to run conda-build +Generate the rattler-build recipe from the ``pixi.toml`` dependency +definitions: .. code-block:: bash - $ conda install -y -c conda-forge boa anaconda-client packaging=21 pip-requirements-parser - -D. Create the conda recipe --------------------------- + $ python actions/generate_recipe.py --features obs emle -Next, we need to create the conda recipe to build the package. We do this by -running the script ``actions/update_recipe.py``. You can add the path to -your ``environment.yml`` file as an argument. This tells the script to -create a recipe that includes all of the pins in the ``environment.yml``. -For example; +This creates ``recipes/sire/recipe.yaml``. The ``--features`` flag controls +which optional dependency groups are included. Omit features to build a +lighter package: .. code-block:: bash - $ python actions/update_recipe.py environment.yml + $ python actions/generate_recipe.py # core only + $ python actions/generate_recipe.py --features obs # core + BioSimSpace -would create the recipe using the pins in ``environment.yml`` (assuming this -file was in the current directory). +You can edit the generated ``recipe.yaml`` to further customise the +dependency pins if needed. -The recipe is written to ``recipes/sire/meta.yaml``. You can (optionally) -edit the pins in this file too, if you want to do some fine-tuning. - -.. note:: - - You may need to edit the recipe to fix version inconsistencies. - This is especially the case for ``rdkit`` - you need to to make - sure that if you specify a version for ``rdkit`` in your - ``environment.yml`` that you also use the same version - for the ``rdkit-devel`` package. - -E. Building the package ------------------------ - -You can now run ``conda-build`` to create the package. +D. Build the package +--------------------- .. code-block:: bash - $ conda build -c conda-forge -c openbiosim/label/dev recipes/sire + $ rattler-build build --recipe recipes/sire -c conda-forge -c openbiosim/label/dev -This will take a while. At the end, it will print out the location of the -sire conda package, e.g. - -.. note:: +This will take a while. The built package will be placed in the ``output/`` +directory. - The above command assumes that you don't need any other channels included - to install all of the packages included in your ``environment.yml``. - The ``actions/update_recipe.py`` script will print out the correct - ``conda build`` command at the end, which includes any extra - channels that are needed. - -:: - - # To have conda build upload to anaconda.org automatically, use - # conda config --set anaconda_upload yes - anaconda upload \ - /path/to/miniforge/envs/build_sire/conda-bld/osx-64/sire-2023.3.0-py310hf95ea87_25.tar.bz2 - anaconda_upload is not set. Not uploading wheels: [] - - INFO :: The inputs making up the hashes for the built packages are as follows: - { - "sire-2023.3.0-py310hf95ea87_25": { - "recipe": { - "c_compiler": "clang", - "cxx_compiler": "clangxx", - "numpy": "1.22", - "target_platform": "osx-64" - } - } - } - -In this case, you can see that the package is the file -``/path/to/miniforge/envs/build_sire/conda-bld/osx-64/sire-2023.3.0-py310hf95ea87_25.tar.bz2``. - -Copy this conda package to wherever you need (e.g. into a channel, upload -to conda, etc.). +You can then install it directly or upload it to a conda channel. .. note:: A full set of tests will be run on the package after it has been built. - Some of these tests may fail if you have edited the recipe to remove - some of the dependencies. If this happens, you can decide to ignore - the tests, e.g. by removing them from the conda recipe (``meta.yml``) - or by just copying the file that is produced and has been placed into - the ``conda-bld/broken`` directory. - -You can then install it, either via the channel you've uploaded to, or by -directly running ``conda install`` on the package file itself. + Some tests may fail if you have removed dependencies. If this happens, + you can edit the generated ``recipe.yaml`` to remove or adjust the + test section. diff --git a/doc/source/tutorial/part06/03_restraints.rst b/doc/source/tutorial/part06/03_restraints.rst index 7a43feeea..da56cd4e9 100644 --- a/doc/source/tutorial/part06/03_restraints.rst +++ b/doc/source/tutorial/part06/03_restraints.rst @@ -360,14 +360,14 @@ Morse Potential Restraints --------------------------- The :func:`sire.restraints.morse_potential` function is used to create Morse potential restraints, -which can be used to carry harmonic bond annihilations alchemical relative binding free energy calculations. +which can be used to carry harmonic bond annihilations or creations in alchemical relative binding free energy calculations. To create a Morse potential restraint, you need to specify the two atoms to be restrained. Like the distance restraints, the atoms can be specified using a search string, passing lists of atom indexes, or molecule views holding the atoms. You have to specify the bond force constants, equilibrium bond distance value and the dissociation energy for the restraints. If not supplied, automatic parametrisation feature can be used, which will detect the bond being alchemically -annihilated and set the parameters accordingly (dissociation energy value still needs to be provided). For example, +annihilated or created and set the parameters accordingly (dissociation energy value still needs to be provided). For example, >>> mols = sr.load_test_files("cyclopentane_cyclohexane.bss") >>> morse_restraints = sr.restraints.morse_potential( diff --git a/doc/source/tutorial/part08/01_intro.rst b/doc/source/tutorial/part08/01_intro.rst index 632cbd442..f68beef45 100644 --- a/doc/source/tutorial/part08/01_intro.rst +++ b/doc/source/tutorial/part08/01_intro.rst @@ -63,6 +63,7 @@ signature: charges_mm: List[float], xyz_qm: List[List[float]], xyz_mm: List[List[float]], + cell: Optional[List[List[float]]] = None, idx_mm: Optional[List[int]] = None, ) -> Tuple[float, List[List[float]], List[List[float]]]: diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 000000000..29f2d820f --- /dev/null +++ b/pixi.toml @@ -0,0 +1,197 @@ +[workspace] +name = "sire" +channels = ["conda-forge", "openbiosim/label/dev"] +platforms = ["linux-64", "linux-aarch64", "osx-arm64", "win-64"] + +# All core dependencies needed to build and run sire. +# The generate_recipe.py script categorises these into build/host/run +# sections when generating the rattler-build recipe. +[dependencies] +python = "*" +cmake = ">=3.30.0" +git = "*" +pybind11 = "*" +gsl = "*" +lazy_import = "*" +libboost-devel = "*" +libboost-python-devel = "*" +libcblas = "*" +libnetcdf = "*" +librdkit-dev = ">2024.09.6" +openmm = ">=8.1" +pandas = "*" +qt-main = "*" +rich = "*" +tbb = "*" +tbb-devel = "*" + +# Unix build tools +[target.linux-64.dependencies] +make = "*" +libtool = "*" +sysroot_linux-64 = "==2.17" +kartograf = ">=1.0.0" +gemmi = ">=0.6.4,<0.7.0" + +# Host dependencies for local source builds (not used by generate_recipe.py) +[target.linux-64.host-dependencies] +sysroot_linux-64 = ">=2.34" +c-compiler = "*" +cxx-compiler = "*" + +[target.linux-aarch64.dependencies] +make = "*" +libtool = "*" +kartograf = ">=1.0.0" + +[target.linux-aarch64.host-dependencies] +c-compiler = "*" +cxx-compiler = "*" + +[target.osx-arm64.dependencies] +make = "*" +libtool = "*" +kartograf = ">=1.0.0" +gemmi = ">=0.6.4,<0.7.0" + +[target.osx-arm64.host-dependencies] +c-compiler = "*" +cxx-compiler = "*" + +[target.win-64.dependencies] +gemmi = ">=0.6.4,<0.7.0" + +[target.win-64.host-dependencies] +c-compiler = "*" +cxx-compiler = "*" + +# ============================================================================= +# OBS (OpenBioSim) feature +# Dependencies for downstream OpenBioSim packages (biosimspace, ghostly, loch, somd2). +# ============================================================================= +[feature.obs.dependencies] +# biosimspace +configargparse = "*" +ipywidgets = "*" +kcombu_bss = "*" +lomap2 = "*" +nglview = "*" +openff-interchange-base = "*" +openff-toolkit-base = "*" +parmed = "*" +py3dmol = "*" +pydot = "*" +pygtail = "*" +pyyaml = "*" +# somd2 +loguru = "*" +numba = "*" +nvidia-ml-py = "*" + +[feature.obs.target.linux-64.dependencies] +# biosimspace +ambertools = ">=22" +gromacs = "*" +alchemlyb = "*" +mdtraj = "*" +mdanalysis = "*" +# loch +pyopencl = "*" +pycuda = "*" + +[feature.obs.target.linux-aarch64.dependencies] +gromacs = "*" +# ambertools, alchemlyb, mdtraj, mdanalysis not available on linux-aarch64 + +[feature.obs.target.osx-arm64.dependencies] +ambertools = ">=22" +# gromacs not available on osx-arm64 +alchemlyb = "*" +mdtraj = "*" +mdanalysis = "*" +# loch +pyopencl = "*" + +[feature.obs.target.win-64.dependencies] +# ambertools and gromacs not available on Windows +alchemlyb = "*" +mdtraj = "*" +mdanalysis = "*" +# loch +pycuda = "*" +pyopencl = "*" + +# ============================================================================= +# EMLE (emle-engine) feature +# ============================================================================= +[feature.emle.dependencies] +ase = "*" +loguru = "*" +pygit2 = "*" +pyyaml = "*" + +[feature.emle.target.linux-64.dependencies] +ambertools = ">=22" +deepmd-kit = "*" +nnpops = "*" +pytorch = "*" + +[feature.emle.target.linux-aarch64.dependencies] +pytorch = "*" +# ambertools, deepmd-kit, nnpops not available on linux-aarch64 + +[feature.emle.target.osx-arm64.dependencies] +ambertools = ">=22" +deepmd-kit = "*" +nnpops = "*" +pytorch = "*" +torchani = "*" +xtb-python = "*" + +# EMLE has no Windows-specific deps (ambertools, pytorch etc. excluded on Windows) + +# ============================================================================= +# Test feature +# ============================================================================= +[feature.test.dependencies] +black = "*" +pytest = "*" +rdkit = ">=2023.0.0" + +[feature.test.target.linux-64.dependencies] +kartograf = ">=1.0.0" +gemmi = ">=0.6.4,<0.7.0" + +[feature.test.target.linux-aarch64.dependencies] +kartograf = ">=1.0.0" + +[feature.test.target.osx-arm64.dependencies] +kartograf = ">=1.0.0" +gemmi = ">=0.6.4,<0.7.0" + +[feature.test.target.win-64.dependencies] +gemmi = ">=0.6.4,<0.7.0" + +# kartograf excluded from Windows test deps + +# ============================================================================= +# Lint feature (local development only, not included in recipes) +# ============================================================================= +[feature.lint.dependencies] +pre-commit = "*" +rattler-build = "*" +ruff = "*" + +# ============================================================================= +# Environments +# ============================================================================= +[environments] +default = [] +obs = ["obs"] +emle = ["emle"] +full = ["obs", "emle"] +test = ["test"] +dev = ["obs", "emle", "test", "lint"] + +[activation] +scripts = ["pixi.sh"] diff --git a/recipes/sire/bld.bat b/recipes/sire/build.bat similarity index 63% rename from recipes/sire/bld.bat rename to recipes/sire/build.bat index 3f9683041..e20de98de 100644 --- a/recipes/sire/bld.bat +++ b/recipes/sire/build.bat @@ -1,4 +1,4 @@ :: Sire build script for Windows. :: Build and install Sire. -python setup.py install --skip-deps +python setup.py install diff --git a/recipes/sire/build.sh b/recipes/sire/build.sh index 202bb1181..684860f98 100644 --- a/recipes/sire/build.sh +++ b/recipes/sire/build.sh @@ -6,4 +6,4 @@ set -e # Build/install Sire. -python setup.py install --skip-deps +python setup.py install diff --git a/recipes/sire/conda_build_config.yaml b/recipes/sire/conda_build_config.yaml deleted file mode 100644 index 4a15698f7..000000000 --- a/recipes/sire/conda_build_config.yaml +++ /dev/null @@ -1,19 +0,0 @@ -c_compiler: - - gcc # [linux] - - clang # [osx] - - vs2019 # [win] - -cxx_compiler: - - gxx # [linux] - - clangxx # [osx] - - vs2019 # [win] - -c_compiler_version: - - 12.3.0 # [linux] - -cxx_compiler_version: - - 12.3.0 # [linux] - -pin_run_as_build: - openmm: - max_pin: x.x diff --git a/recipes/sire/template.yaml b/recipes/sire/template.yaml deleted file mode 100644 index 9a33377bb..000000000 --- a/recipes/sire/template.yaml +++ /dev/null @@ -1,76 +0,0 @@ -{% set name = "sire" %} - -package: - name: {{ name }} - version: {{ environ.get('GIT_DESCRIBE_TAG', 'PR').replace('-','') }} - -source: - git_url: SIRE_REMOTE - git_tag: SIRE_BRANCH - -build: - number: {{ environ.get('GIT_DESCRIBE_NUMBER', 0) }} - -requirements: - build: - - {{ compiler('c') }} - - {{ compiler('cxx') }} - SIRE_BUILD_REQUIREMENTS - host: - SIRE_HOST_REQUIREMENTS - run: - SIRE_RUN_REQUIREMENTS - run_constrained: - - {{ pin_compatible('rdkit', max_pin='x.x.x') }} - -test: - script_env: - - SIRE_DONT_PHONEHOME - requires: - - pytest - SIRE_TEST_REQUIREMENTS - imports: - - sire - - sire.analysis - - sire.base - - sire.cas - - sire.cluster - - sire.error - - sire.ff - - sire.id - - sire.io - - sire.maths - - sire.mm - - sire.mol - - sire.move - - sire.qt - - sire.squire - - sire.stream - - sire.system - - sire.units - - sire.vol - source_files: - - tests - commands: - - pytest -vvv --color=yes --runveryslow tests - -about: - home: https://github.com/openbiosim/sire - license: GPL-3.0-or-later - license_file: '{{ environ["RECIPE_DIR"] }}/LICENSE' - summary: "An advanced molecular modelling framework." - dev_url: https://github.com/openbiosim/sire - doc_url: https://sire.openbiosim.org - description: | - Sire is a molecular modelling framework that provides - extensive functionality to manipulate representations - of biomolecular systems. - It is used as a key component of BioSimSpace, and is - distributed and supported as an open source community - project by OpenBioSim. - -extra: - recipe-maintainers: - - chryswoods - - jmichel80 - - lohedges diff --git a/requirements_bss.txt b/requirements_bss.txt deleted file mode 100644 index 744b98944..000000000 --- a/requirements_bss.txt +++ /dev/null @@ -1,40 +0,0 @@ -# BioSimSpace requirements. These aren't needed by Sire, but allow it to be -# built in a self-consistent way, i.e. so BioSimSpace can be installed on top -# of it. These will be added to the "host" section of Sire's requirements, -# hence Sire will be built in an environment in which they are already -# installed. - -# Optional packages: These packages aren't required by BioSimSpace, but are -# commonly installed by users alongside it. Adding them here allows a -# user to create environments with and without these packages installed. - -openmmtools >= 0.21.5 - -# Both ambertools and gromacs aren't available on Windows. -# The arm64 gromacs package is current broken. -ambertools >= 22 ; sys_platform != "win32" -gromacs ; sys_platform != "win32" and platform_machine != "arm64" - -# The following are actual BioSimSpace run-time requirements. Please update -# this list as new requirements are added. -configargparse -ipywidgets -kcombu_bss -lomap2 -nglview -openff-interchange-base -openff-toolkit-base -parmed -py3dmol -pydot -pygtail -pyyaml - -# The below are packages that aren't available on all -# platforms/OSs and so need to be conditionally included - -alchemlyb ; platform_machine != "aarch64" # Needs pymbar, not on Linux/aarch64 - -mdtraj ; platform_machine != "aarch64" # not on Linux/aarch64 - -mdanalysis ; platform_machine != "aarch64" # not on Linux/aarch64 diff --git a/requirements_build.txt b/requirements_build.txt deleted file mode 100644 index 679c45938..000000000 --- a/requirements_build.txt +++ /dev/null @@ -1,19 +0,0 @@ -# Build requirements for Sire. - -cmake >= 3.30.0 -git -python - -make ; sys_platform == "darwin" -libtool ; sys_platform == "darwin" - -make ; sys_platform == "linux" -libtool ; sys_platform == "linux" -sysroot_linux-64==2.17 ; sys_platform == "linux" - -# These packages are needed to compile -# the SireGemmi plugin -gemmi >=0.6.4<0.7.0 - -pybind11 ==2.11.1 ; sys_platform == "win32" -pybind11 sys_platform != "win32" diff --git a/requirements_emle.txt b/requirements_emle.txt deleted file mode 100644 index 23f445dcd..000000000 --- a/requirements_emle.txt +++ /dev/null @@ -1,11 +0,0 @@ -ambertools >= 22 ; sys_platform != "win32" -ase -deepmd-kit ; platform_machine != "aarch64" and sys_platform != "win32" -loguru -nnpops ; platform_machine != "aarch64" and sys_platform != "win32" -pygit2 -pytorch ; sys_platform != "win32" -python -pyyaml -torchani ; sys_platform != "win32" and (sys_platform != "linux" and platform_machine != "aarch64") -xtb-python ; sys_platform != "win32" and (sys_platform != "linux" and python_version != "3.12") diff --git a/requirements_host.txt b/requirements_host.txt deleted file mode 100644 index ef32ada64..000000000 --- a/requirements_host.txt +++ /dev/null @@ -1,21 +0,0 @@ -# Host requirements for Sire. - -gsl -lazy_import -libboost-devel -libboost-python-devel -libcblas -libnetcdf -librdkit-dev > 2024.09.6 -openmm >= 8.1 -pandas -python -qt-main -rich -tbb -tbb-devel -gemmi >=0.6.4<0.7.0 - -# kartograf on Windows pulls in an openfe that has an old / incompatble -# ambertools -kartograf >= 1.0.0 ; sys_platform != "win32" diff --git a/requirements_run.txt b/requirements_run.txt deleted file mode 100644 index 5a9028975..000000000 --- a/requirements_run.txt +++ /dev/null @@ -1,11 +0,0 @@ -# Runtime requirements for Sire. - -gsl -lazy_import -libnetcdf -openmm -pandas -python -qt-main -rich -tbb diff --git a/requirements_test.txt b/requirements_test.txt deleted file mode 100644 index ba7702c13..000000000 --- a/requirements_test.txt +++ /dev/null @@ -1,9 +0,0 @@ -# Test requirements. These aren't needed by Sire, but if installed, will -# enable test to run to validate advanced functionality - -rdkit >=2023.0.0 -gemmi >=0.6.4<0.7.0 - -# kartograf on Windows pulls in an openfe that has an old / incompatble -# ambertools -kartograf >= 1.0.0 ; sys_platform != "win32" diff --git a/setup.py b/setup.py index c5cfa5722..5e12e62f0 100644 --- a/setup.py +++ b/setup.py @@ -1,31 +1,29 @@ -"""l +""" Installation script for Sire This assumes that the python that is used to execute this script -is the conda / miniconda / miniforge environment into -which you want to install Sire. +is within a conda / pixi environment that already has all of the +required dependencies installed. Use pixi to create the environment +before running this script, e.g.: -USAGE: + pixi install -e dev - python setup.py install_requires : Will install all of the dependencies +USAGE: - python setup.py build : Will install requires and will then - compile sire (takes a long time!) + python setup.py build : Compile sire (takes a long time!) - python setup.py install : Will build sire and will then install + python setup.py install : Will build sire and then install python setup.py install_module : Will only install the Python module - -You can use `--skip-deps` to skip the installation of the conda dependencies -You can use `--skip-build` to skip the building of the corelib and wrappers """ -import sys +import glob +import json import os import platform import subprocess import shutil -import glob +import sys try: # We have to check the version, but we can't do this by @@ -56,7 +54,8 @@ raise EnvironmentError( f"This environment already contains an install of Sire version {curver}. " "Please delete the installation or create a new environment before " - f"installing version {ver}. Also remove old build directories from 'build'." + f"installing version {ver}. If you are using pixi, run: pixi clean -e dev. " + "Also remove old build directories from 'build'. " ) try: @@ -68,10 +67,6 @@ except Exception: total_memory_gb = None -# Debug - we need to print out all of the environment variables -# for key, value in os.environ.items(): -# print(f"{key}\n{value}\n") - # We can only run this script from the sire directory curdir = os.path.abspath(".") @@ -79,56 +74,38 @@ print("You can only run this script from the sire directory") sys.exit(-1) -# We need to import the 'parse_requirements' function to get the list -# of requirements - this will be in the 'actions' directory -sys.path.insert(0, os.path.join(curdir, "actions")) - -# We need to verify that this is a Python that is part of a -# conda installation - +# Detect the environment prefix (conda or pixi) if "PREFIX" in os.environ and "BUILD_PREFIX" in os.environ: - print("This a build initiated by conda-build") + print("This a build initiated by conda-build or rattler-build") conda_base = os.path.abspath(os.environ["PREFIX"]) print(f"Setting conda-base to {conda_base}") else: - # Find the path to the conda executable + # Find the prefix from the Python executable conda_base = os.path.abspath(os.path.dirname(sys.executable)) if os.path.basename(conda_base) == "bin": conda_base = os.path.dirname(conda_base) -python_exe = None -conda = None - -if "CONDA_EXE" in os.environ: - conda = os.environ["CONDA_EXE"] -else: - conda = None - if "CONDA_DEFAULT_ENV" in os.environ: conda_env = os.environ["CONDA_DEFAULT_ENV"] else: conda_env = None +python_exe = None + if os.path.exists(os.path.join(conda_base, "python.exe")): # Windows conda_bin = os.path.join(conda_base, "Library", "bin") python_exe = os.path.join(conda_base, "python.exe") - - if conda is None: - conda = os.path.join(conda_base, "Scripts", "conda.exe") elif os.path.exists(os.path.join(conda_base, "bin", "python")): # MacOS and Linux conda_bin = os.path.join(conda_base, "bin") python_exe = os.path.join(conda_bin, "python") - - if conda is None: - conda = os.path.join(conda_bin, "conda") else: print( "Cannot find a 'python' binary in directory '%s'. " "Are you running this script using the python executable " - "from a valid miniconda or anaconda installation?" % conda_base + "from a valid conda or pixi environment?" % conda_base ) sys.exit(-1) @@ -249,256 +226,31 @@ def parse_args(): "(defaults to the number of CPU cores used for compiling corelib)", ) parser.add_argument( - "--install-bss-deps", - action="store_true", - default=False, - help="Install BioSimSpace's dependencies too. This helps ensure " - "compatibility between Sire's and BioSimSpace's dependencies.", - ) - parser.add_argument( - "--install-emle-deps", - action="store_true", - default=False, - help="Install emle-engine's dependencies too.", - ) - parser.add_argument( - "--skip-deps", + "--skip-build", action="store_true", default=False, - help="Skip the installation of the dependencies (only use if you know " - "that they are already installed)", - ) - parser.add_argument( - "--skip-dep", - action="append", - help="List of dependencies to skip when installing. This is useful when " - "you know that a particular dependency is already installed or " - "it is uninstallable on your system.", + help="Skip the build of the C++ code (only use if you know that " + "the C++ code is already built)", ) parser.add_argument( - "--skip-build", + "--install-metadata", action="store_true", default=False, - help="Skip the build of the C++ code (only use if you know that " - "the C++ code is already built)", + help="Install package metadata. This is useful when you are building " + "from source but still want to be able to query the installation using " + "conda list sire.", ) parser.add_argument( "action", nargs="*", - help="Should be one of 'install_requires', 'build', 'install' or 'install_module'.\n" - "\n [install_requires] : Just install the conda dependencies.\n" - " [build] : 'install_requires' plus compile and install corelib, and just compile the wrappers.\n" + help="Should be one of 'build', 'install' or 'install_module'.\n" + "\n [build] : Compile and install corelib, and just compile the wrappers.\n" " [install] : 'build' plus install the wrappers and install the module.\n" - " [install_module] : Just install the module (no compilation or conda dependencies).", + " [install_module] : Just install the module (no compilation).", ) return parser.parse_args() -_installed_deps = None - - -def _get_installed(conda: str): - """Return the list of installed conda dependencies""" - global _installed_deps - - if _installed_deps is None: - p = subprocess.Popen([conda, "list"], stdout=subprocess.PIPE) - _installed_deps = str(p.stdout.read()) - - return _installed_deps - - -def is_installed(dep: str, conda: str) -> bool: - """Return whether or not the passed dependency is installed""" - installed = _get_installed(conda=conda) - return installed.find(dep) != -1 - - -def _add_to_dependencies(dependencies, lines): - import re - - for line in lines: - line = line.lstrip().rstrip() - - words = re.split("[<>]*=", line) - - if len(words) > 0: - package = words[0] - dependencies[package] = line - - -_is_conda_prepped = False - -dependencies_to_skip = [] - - -def conda_install( - dependencies, install_bss_reqs=False, install_emle_reqs=False, yes=True -): - """Install the passed list of dependencies using conda""" - - conda_exe = conda - - global _is_conda_prepped - - if not _is_conda_prepped: - if install_bss_reqs: - cmd = "%s config --prepend channels openbiosim/label/dev" % conda_exe - print("Activating openbiosim channel channel using: '%s'" % cmd) - status = subprocess.run(cmd.split()) - if status.returncode != 0: - print("Failed to add openbiosim channel!") - sys.exit(-1) - - print("\nSetting channel priorities to favour conda-forge") - cmd = "%s config --prepend channels conda-forge" % conda_exe - print("Activating conda-forge channel using: '%s'" % cmd) - status = subprocess.run(cmd.split()) - if status.returncode != 0: - print("Failed to add conda-forge channel!") - sys.exit(-1) - - cmd = "%s config --set channel_priority strict" % conda_exe - print("Setting channel priority to strict using: '%s'" % cmd) - status = subprocess.run(cmd.split()) - if status.returncode != 0: - print("Failed to set channel priority!") - sys.exit(-1) - - _is_conda_prepped = True - - conda_install = [conda, "install"] - - if yes: - conda_install.append("--yes") - - deps = [] - - global dependencies_to_skip - - try: - if len(dependencies_to_skip) > 0: - print(f"Skipping the following dependencies: {dependencies_to_skip}") - except Exception: - dependencies_to_skip = [] - - for dependency in dependencies: - # remove any quotes from the dependency - dependency = dependency.replace("\"", "") - - if dependency == "python" or is_installed(dependency, conda_exe): - # no need to install again - continue - - skip_dep = False - - for skip in dependencies_to_skip: - if dependency.find(skip) != -1: - skip_dep = True - break - - if skip_dep: - print(f"Skipping {dependency}") - continue - - # remove duplicates - if dependency not in deps: - deps.append(dependency) - - dependencies = deps - - cmd = [*conda_install, *dependencies] - print("\nInstalling packages using:\n\n%s\n\n" % " ".join(cmd)) - status = subprocess.run(cmd) - - if status.returncode != 0: - print("Something went wrong installing dependencies!") - print("If the python or conda executables were updated") - print("in the last install, then this can prevent them") - print("from running again. Please re-execute this script.") - sys.exit(-1) - - # Install emle-engine. - if install_emle_reqs: - cmd = [ - "pip", - "install", - "git+https://github.com/chemle/emle-engine.git", - ] - status = subprocess.run(cmd) - if status.returncode != 0: - print("Something went wrong installing emle-engine!") - sys.exit(-1) - - -def install_requires(install_bss_reqs=False, install_emle_reqs=False, yes=True): - """Installs all of the dependencies. This can safely be called - multiple times, as it will cache the result to prevent future - installs taking too long - """ - print(f"Installing requirements for {platform_string}") - - if not os.path.exists(conda): - print("\nSire can only be installed into a conda or miniconda environment.") - print( - "Please install conda, miniconda, miniforge or similar, then " - "activate the conda environment, then rerun this installation " - "script." - ) - sys.exit(-1) - - try: - import pip_requirements_parser as _pip_requirements_parser - from parse_requirements import parse_requirements - except Exception: - # this didn't import - maybe we are missing pip-requirements-parser - print("Installing pip-requirements-parser") - conda_install( - ["pip-requirements-parser"], - install_bss_reqs, - install_emle_reqs=False, - yes=yes, - ) - try: - from parse_requirements import parse_requirements - except ImportError as e: - print("\n\n[ERROR] ** You need to install pip-requirements-parser") - print("Run `conda install -c conda-forge pip-requirements-parser\n\n") - raise e - - try: - import pkg_resources - except Exception: - # this didn't import - we are missing setuptools - print("Installing setuptools") - conda_install( - ["setuptools"], - install_bss_reqs, - install_emle_reqs=False, - yes=yes) - try: - import pkg_resources - except Exception: - print("\n\n[ERROR] ** You need to install setuptools") - print("Run 'conda install -c conda-forge setuptools\n\n") - raise e - - reqs = parse_requirements("requirements_host.txt") - build_reqs = parse_requirements("requirements_build.txt") - - if install_bss_reqs: - bss_reqs = parse_requirements("requirements_bss.txt") - reqs = reqs + bss_reqs - - if install_emle_reqs: - emle_reqs = parse_requirements("requirements_emle.txt") - reqs = reqs + emle_reqs - - dependencies = build_reqs + reqs - conda_install(dependencies, install_bss_reqs, install_emle_reqs, yes=yes) - conda_install(dependencies, install_bss_reqs, yes=yes) - - def add_default_cmake_defs(cmake_defs, ncores): for a in ( "ANACONDA_BASE=%s" % conda_base.replace("\\", "/"), @@ -536,13 +288,12 @@ def make_cmd(ncores, install=False): def _get_build_ext(): if "CONDA_BUILD" in os.environ and os.environ["CONDA_BUILD"] == "1": return "conda_build" + elif "PIXI_ENVIRONMENT_NAME" in os.environ: + return os.environ["PIXI_ENVIRONMENT_NAME"] + elif conda_env is not None: + return conda_env else: - if conda_env is not None: - ext = "_" + conda_env.replace(" ", "_").replace(".", "_") - else: - ext = "" - - return os.path.basename(conda_base.replace(" ", "_").replace(".", "_")) + ext + return os.path.basename(conda_base.replace(" ", "_").replace(".", "_")) def _get_bin_dir(): @@ -579,60 +330,39 @@ def build(ncores: int = 1, npycores: int = 1, coredefs=[], pydefs=[]): # get the compilers if conda_build: - print("This is a conda build") - - CXX = os.environ["CXX"] - CC = os.environ["CC"] - - # make sure that these compilers are in the path - CXX_bin = shutil.which(CXX) - CC_bin = shutil.which(CC) - - print(f"{CXX} => {CXX_bin}") - print(f"{CC} => {CC_bin}") + print("This is a conda/rattler build") - if CXX_bin is None or CC_bin is None: - print("Cannot find the compilers requested by conda-build in the PATH") - print("Please check that the compilers are installed and available.") - sys.exit(-1) - - # use the full paths, in case CMake struggles - CXX = CXX_bin - CC = CC_bin + if is_windows: + # Windows: vcvars is activated, let CMake find the MSVC. + CXX = None + CC = None + else: + # Try to get compilers from environment + CXX = os.environ.get("CXX") + CC = os.environ.get("CC") elif is_macos: try: CXX = glob.glob(os.path.join(bindir, "clang++"))[0] CC = glob.glob(os.path.join(bindir, "clang"))[0] except Exception: - conda_install(["clang", "clangxx"], False, yes=True) - try: - CXX = glob.glob(os.path.join(bindir, "clang++"))[0] - CC = glob.glob(os.path.join(bindir, "clang"))[0] - except Exception: - print("Cannot find the conda clang++ binaries!") - print("Please install these, e.g. via") - print("conda install clang clangxx") - sys.exit(-1) + print("Cannot find the conda clang++ binaries!") + print("Please ensure your environment has clang and clangxx installed.") + print("If using pixi, run: pixi install -e dev") + sys.exit(-1) elif is_linux: try: CXX = glob.glob(os.path.join(bindir, "*-g++"))[0] CC = glob.glob(os.path.join(bindir, "*-gcc"))[0] except Exception: - # Need this version of gcc to stay compatible with conda-forge - # (i.e. gemmi needs the exact same compiler version) - conda_install(["gcc==12.3.0", "gxx==12.3.0"], False, yes=True) - try: - CXX = glob.glob(os.path.join(bindir, "*-g++"))[0] - CC = glob.glob(os.path.join(bindir, "*-gcc"))[0] - except Exception: - print("Cannot find the conda g++ binaries!") - print("Please install these, e.g. via") - print("conda install gcc gxx") - sys.exit(-1) - - print(f"Using compilers {CC} | {CXX}") + print("Cannot find the conda g++ binaries!") + print("Please ensure your environment has gcc and gxx installed.") + print("If using pixi, run: pixi install -e dev") + sys.exit(-1) + + if CC is not None and CXX is not None: + print(f"Using compilers {CC} | {CXX}") # Make sure all of the above output is printed to the screen # before we start running any actual compilation @@ -693,9 +423,9 @@ def build(ncores: int = 1, npycores: int = 1, coredefs=[], pydefs=[]): sourcedir, ] - if CC: + if CC is not None: os.environ["CC"] = CC - if CXX: + if CXX is not None: os.environ["CXX"] = CXX print(" ".join(cmake_cmd)) @@ -920,21 +650,14 @@ def install(ncores: int = 1, npycores: int = 1): if __name__ == "__main__": + OLDPWD = os.getcwd() + args = parse_args() if len(args.action) != 1: - print("Please use either 'install_requires', 'build' or 'install'") + print("Please use either 'build', 'install' or 'install_module'") sys.exit(-1) - install_bss = args.install_bss_deps - install_emle = args.install_emle_deps - - if install_emle and is_windows: - raise NotImplementedError("EMLE is current not supported on Windows") - - if args.skip_dep is not None: - dependencies_to_skip = args.skip_dep - action = args.action[0] if is_windows and (args.generator is None or len(args.generator) == 0): @@ -946,11 +669,6 @@ def install(ncores: int = 1, npycores: int = 1): os.environ["INSTALL_NAME_TOOL"] = "install_name_tool" if action == "install": - if not (args.skip_deps or args.skip_build): - install_requires( - install_bss_reqs=install_bss, install_emle_reqs=install_emle - ) - if not args.skip_build: build( ncores=args.ncores[0], @@ -962,9 +680,6 @@ def install(ncores: int = 1, npycores: int = 1): install(ncores=args.ncores[0], npycores=args.npycores[0]) elif action == "build": - if not args.skip_deps: - install_requires(install_bss_reqs=install_bss) - build( ncores=args.ncores[0], npycores=args.npycores[0], @@ -972,16 +687,36 @@ def install(ncores: int = 1, npycores: int = 1): pydefs=args.wrapper, ) - elif action == "install_requires": - install_requires( - install_bss_reqs=install_bss, install_emle_reqs=install_emle, yes=False - ) - elif action == "install_module": install_module(ncores=args.ncores[0]) else: print( - f"Unrecognised action '{action}'. Please use 'install_requires', " + f"Unrecognised action '{action}'. Please use " "'build', 'install' or 'install_module'" ) + + # Create minimist package metadata so that 'conda list sire' works. + if args.install_metadata: + os.chdir(OLDPWD) + if "CONDA_PREFIX" in os.environ: + metadata_dir = os.path.join(os.environ["CONDA_PREFIX"], "conda-meta") + if os.path.exists(metadata_dir): + # Get the Python version. + pyver = f"py{sys.version_info.major}{sys.version_info.minor}" + metadata = { + "name": "sire", + "version": open("version.txt").readline().strip(), + "build": pyver, + "build_number": 0, + "channel": "local", + "size": 0, + "license": "GPL-3.0-or-later", + "subdir": platform_string, + } + metadata_file = os.path.join( + metadata_dir, f"sire-{metadata['version']}-{pyver}.json" + ) + with open(metadata_file, "w") as f: + json.dump(metadata, f, indent=2) + print(f"Created conda package metadata file: {metadata_file}") diff --git a/src/sire/__init__.py b/src/sire/__init__.py index 44049f4a2..7ec14b306 100644 --- a/src/sire/__init__.py +++ b/src/sire/__init__.py @@ -213,7 +213,7 @@ def v(x, y=None, z=None, units=None): "You cannot specify y or z values when passing a list or tuple." ) - (x, y, z) = (x[0], x[1], x[2]) + x, y, z = (x[0], x[1], x[2]) else: if y is None: @@ -338,7 +338,7 @@ def molid( name = num num = None elif type(name) is int: - (num, name) = (name, num) + num, name = (name, num) else: raise TypeError("The number cannot be a string.") @@ -395,7 +395,7 @@ def atomid(num: int = None, name: str = None, idx: int = None, case_sensitive=Tr name = num num = None elif type(name) is int: - (num, name) = (name, num) + num, name = (name, num) else: raise TypeError("The number cannot be a string.") @@ -452,7 +452,7 @@ def resid(num: int = None, name: str = None, idx: int = None, case_sensitive=Tru name = num num = None elif type(name) is int: - (num, name) = (name, num) + num, name = (name, num) else: raise TypeError("The number cannot be a string.") @@ -508,7 +508,7 @@ def chainid(idx: int = None, name: str = None, case_sensitive: bool = True): name = idx idx = None elif type(name) is int: - (idx, name) = (name, idx) + idx, name = (name, idx) else: raise TypeError("The index cannot be a string.") @@ -558,7 +558,7 @@ def segid(idx: int = None, name: str = None, case_sensitive: bool = True): name = idx idx = None elif type(name) is int: - (idx, name) = (name, idx) + idx, name = (name, idx) else: raise TypeError("The index cannot be a string.") diff --git a/src/sire/base/_progressbar.py b/src/sire/base/_progressbar.py index 41541b168..219c05781 100644 --- a/src/sire/base/_progressbar.py +++ b/src/sire/base/_progressbar.py @@ -2,7 +2,6 @@ from ..legacy.Base import ProgressBar as _ProgressBar - _cached_in_notebook = None diff --git a/src/sire/mol/__init__.py b/src/sire/mol/__init__.py index 2f4d09770..20a0d5ca8 100644 --- a/src/sire/mol/__init__.py +++ b/src/sire/mol/__init__.py @@ -1667,9 +1667,10 @@ def _dynamics( Whether or not to swap the end states. If this is True, then the perturbation will run from the perturbed back to the reference molecule (the perturbed molecule will be at lambda=0, - while the reference molecule will be at lambda=1). This will - use the coordinates of the perturbed molecule as the - starting point. + while the reference molecule will be at lambda=1). Note that this + will still use the coordinates of the reference state as the + starting point for the simulation, since it is assumed that + this reflects the current equilibrated state of the system. ignore_perturbations: bool Whether or not to ignore perturbations. If this is True, then diff --git a/src/sire/mol/_smiles.py b/src/sire/mol/_smiles.py index 72eae3308..5c537871b 100644 --- a/src/sire/mol/_smiles.py +++ b/src/sire/mol/_smiles.py @@ -292,7 +292,7 @@ def _selector_view2d( if filename is not None: import os - (basename, format) = os.path.splitext(os.path.abspath(filename)) + basename, format = os.path.splitext(os.path.abspath(filename)) while format.startswith("."): format = format[1:] @@ -509,7 +509,7 @@ def _view2d( if filename is not None: import os - (basename, format) = os.path.splitext(os.path.abspath(filename)) + basename, format = os.path.splitext(os.path.abspath(filename)) while format.startswith("."): format = format[1:] diff --git a/src/sire/mol/_trajectory.py b/src/sire/mol/_trajectory.py index 37ccac992..13b6a8971 100644 --- a/src/sire/mol/_trajectory.py +++ b/src/sire/mol/_trajectory.py @@ -114,7 +114,7 @@ def __init__( from ..legacy.Mol import TrajectoryAligner if align is not None: - (align, reference) = _get_align_atoms_and_reference( + align, reference = _get_align_atoms_and_reference( view=self._view, align=align, mapping=mapping, @@ -927,7 +927,7 @@ def rmsd( else: align = True - (atoms, reference) = _get_align_atoms_and_reference( + atoms, reference = _get_align_atoms_and_reference( view=self.current(), align=reference, frame=frame, diff --git a/src/sire/restraints/_restraints.py b/src/sire/restraints/_restraints.py index cc443facc..c7b23ea83 100644 --- a/src/sire/restraints/_restraints.py +++ b/src/sire/restraints/_restraints.py @@ -790,8 +790,8 @@ def morse_potential( If True, will attempt to automatically parametrise the Morse potential from a perturbation that annihilates a bond. This requires that 'mols' contains exactly one molecule that is perturbable, and that this - molecule contains exactly one bond that is annihilated at lambda=1. - The atoms involved in the annihilated bond will be used as 'atoms0' + molecule contains exactly one bond that is annihilated or created. + The atoms involved in this bond will be used as 'atoms0' and 'atoms1', the equilibrium distance r0 will be set to the original bond length, and the force constant k will be set to the force constant of the bond in the unperturbed state. Note that 'de' must still be provided. @@ -838,24 +838,43 @@ def morse_potential( if len(ref_mol) != 1: raise ValueError( - "We need exactly one molecule that is perturbable to automatically " + "Exactly one perturbable molecule is required to automatically " "set up the Morse potential restraints" ) perturbable_mol = ref_mol[0] pert = perturbable_mol.perturbation(map=map) pert_omm = pert.to_openmm() changed_bonds = pert_omm.changed_bonds(to_pandas=False) + changed_bonds_df = pert_omm.changed_bonds(to_pandas=True) - # Attempt to find the bond that is annihilated at lambda=1 + # Check that exactly one bond is being created or annihilated, i.e that k0 column + # or k1 column has exactly one zero value + n_bonds_created = (changed_bonds_df["k0"] == 0).sum() + n_bonds_annihilated = (changed_bonds_df["k1"] == 0).sum() + if n_bonds_created + n_bonds_annihilated != 1: + raise ValueError( + "Exactly one bond must be created or annihilated to automatically " + "set up the Morse potential restraints" + ) + + # Attempt to find this bond now for bond in changed_bonds: bond_name, length0, length1, k0, k1 = bond - if k1 == 0: + if k1 == 0 or k0 == 0: - atom0_idx = [bond_name.atom0().index().value()][0] - atom1_idx = [bond_name.atom1().index().value()][0] + # If the bond is being created (k0 == 0), then we should + # use the parameters from the final state (length1, k1). + # If the bond is being annihilated (k1 == 0), then we don't + # need to do anything as length0 and k0 are already selected. + if k0 == 0: + length0 = length1 + k0 = k1 length0 = u(f"{length0} nm") + atom0_idx = [bond_name.atom0().index().value()][0] + atom1_idx = [bond_name.atom1().index().value()][0] + # Divide k0 by 2 to convert from force constant to sire half # force constant k if k is None: diff --git a/src/sire/utils/_console.py b/src/sire/utils/_console.py index 0f55dcf8e..53156ac8f 100644 --- a/src/sire/utils/_console.py +++ b/src/sire/utils/_console.py @@ -4,7 +4,6 @@ from contextlib import contextmanager as _contextmanager - __all__ = ["Console", "Table"] diff --git a/tests/conftest.py b/tests/conftest.py index 8f6d4fad8..4a46587c8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -155,6 +155,11 @@ def cyclopentane_cyclohexane(): return sr.load_test_files("cyclopentane_cyclohexane.bss") +@pytest.fixture(scope="session") +def propane_cyclopropane(): + return sr.load_test_files("propane_cyclopropane.bss") + + @pytest.fixture(scope="session") def pentane_cyclopentane(): return sr.load_test_files("pentane_cyclopentane.bss") diff --git a/tests/convert/test_openmm_restraints.py b/tests/convert/test_openmm_restraints.py index f8bcd1aeb..fe808b55d 100644 --- a/tests/convert/test_openmm_restraints.py +++ b/tests/convert/test_openmm_restraints.py @@ -60,8 +60,7 @@ def test_openmm_distance_restraints(ala_mols, openmm_platform): # check that we get the same result using bond restraints restraints2 = sr.restraints.bond( - mols, atoms0=mols[0][0], atoms1=mols[-1][0], r0="5A", - use_pbc=True + mols, atoms0=mols[0][0], atoms1=mols[-1][0], r0="5A", use_pbc=True ) assert len(restraints2) == 1 diff --git a/tests/io/test_sdf.py b/tests/io/test_sdf.py index 646de1d76..5ec973d3e 100644 --- a/tests/io/test_sdf.py +++ b/tests/io/test_sdf.py @@ -16,6 +16,7 @@ def test_charge(): from math import isclose + import os import tempfile # SDF format has a weird mapping for formal charge. Here the keys are @@ -34,12 +35,15 @@ def test_charge(): for c0, c1 in zip(mol.property("formal_charge").to_list(), mapping.values()): assert isclose(c0.value(), c1) - # Write back to file. - with tempfile.NamedTemporaryFile(suffix=".sdf") as f: - sr.save(mol, f.name) + # Write back to file. Use TemporaryDirectory to avoid Windows file locking + # issues with NamedTemporaryFile. + with tempfile.TemporaryDirectory() as tmpdir: + path = os.path.join(tmpdir, "test.sdf") + sr.save(mol, path) # Read back in and check that the charges are still correct. - for c0, c1 in zip(mol.property("formal_charge").to_list(), mapping.values()): + mol2 = sr.load(path)[0] + for c0, c1 in zip(mol2.property("formal_charge").to_list(), mapping.values()): assert isclose(c0.value(), c1) diff --git a/tests/qm/test_qm.py b/tests/qm/test_qm.py index 555b86857..897d81f75 100644 --- a/tests/qm/test_qm.py +++ b/tests/qm/test_qm.py @@ -25,7 +25,7 @@ def test_callback_method(): """Makes sure that a callback method works correctly""" class Test: - def callback(self, a, b, c, d, e=None): + def callback(self, a, b, c, d, e=None, f=None): return (42, d, c) # Instantiate the class. @@ -39,19 +39,20 @@ def callback(self, a, b, c, d, e=None): b = [3, 4] c = [a, b] d = [b, a] - e = [4, 5] + e = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] + f = [4, 7] # Call the callback. - result = cb.call(a, b, c, d, e) + result = cb.call(a, b, c, d, e, f) # Make sure the result is correct. - assert result == (42, d, c) == test.callback(a, b, c, d) + assert result == (42, d, c) == test.callback(a, b, c, d, e, f) def test_callback_function(): """Makes sure that a callback function works correctly""" - def callback(a, b, c, d, e=None): + def callback(a, b, c, d, e=None, f=None): return (42, d, c) # Create a callback object. @@ -62,13 +63,14 @@ def callback(a, b, c, d, e=None): b = [3, 4] c = [a, b] d = [b, a] - e = [4, 5] + e = [[1, 2, 3], [4, 5, 6], [7, 8, 9]] + f = [4, 5] # Call the callback. - result = cb.call(a, b, c, d, e) + result = cb.call(a, b, c, d, e, f) # Make sure the result is correct. - assert result == (42, d, c) == callback(a, b, c, d) + assert result == (42, d, c) == callback(a, b, c, d, e, f) @pytest.mark.parametrize( @@ -419,7 +421,7 @@ def test_create_engine(ala_mols): """ # A test callback function. Returns a known energy and dummy forces. - def callback(numbers_qm, charges_mm, xyz_qm, xyz_mm, idx_mm=None): + def callback(numbers_qm, charges_mm, xyz_qm, xyz_mm, cell=None, idx_mm=None): return (42, xyz_qm, xyz_mm) # Create a local copy of the test system. diff --git a/tests/restraints/test_morse_potential_restraints.py b/tests/restraints/test_morse_potential_restraints.py index 4addbcfd1..f994614a5 100644 --- a/tests/restraints/test_morse_potential_restraints.py +++ b/tests/restraints/test_morse_potential_restraints.py @@ -21,8 +21,9 @@ def test_morse_potential_restraints_setup(cyclopentane_cyclohexane): assert restraints[0].de().value() == 50.0 -def test_morse_potential_restraint_auto_param(cyclopentane_cyclohexane): - """Tests that morse_potential restraints can be set up correctly with automatic parametrisation.""" +def test_morse_potential_restraint_annihiliation_auto_param(cyclopentane_cyclohexane): + """Tests that morse_potential restraints can be set up correctly with automatic parametrisation + when a bond is to be annihilated.""" mols = cyclopentane_cyclohexane.clone() restraints = sr.restraints.morse_potential( mols, @@ -36,6 +37,20 @@ def test_morse_potential_restraint_auto_param(cyclopentane_cyclohexane): assert restraints[0].de().value() == 25.0 +def test_morse_potential_restraint_creation_auto_param(propane_cyclopropane): + """Tests that morse_potential restraints can be set up correctly with automatic parametrisation + when a bond is to be created.""" + mols = propane_cyclopropane.clone() + restraints = sr.restraints.morse_potential( + mols, + de="25 kcal mol-1", + auto_parametrise=True, + ) + assert restraints.num_restraints() == 1 + assert restraints[0].atom0() == 0 + assert restraints[0].atom1() == 2 + + def test_morse_potential_restraint_auto_param_override(cyclopentane_cyclohexane): """Tests that morse_potential restraints can be set up correctly with automatic parametrisation and some parameters can be overwritten.""" mols = cyclopentane_cyclohexane.clone() diff --git a/tests/somd/test_standardstatecorrection.py b/tests/somd/test_standardstatecorrection.py index f7b592345..6f3d6176d 100644 --- a/tests/somd/test_standardstatecorrection.py +++ b/tests/somd/test_standardstatecorrection.py @@ -28,7 +28,6 @@ import sire as sr - try: _wget = sr.legacy.Base.findExe("wget") have_wget = True diff --git a/version.txt b/version.txt index 56488616f..f2cedddf7 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2025.3.0 +2025.4.0 diff --git a/wrapper/Convert/CMakeLists.txt b/wrapper/Convert/CMakeLists.txt index 9528c860f..7eacb4607 100644 --- a/wrapper/Convert/CMakeLists.txt +++ b/wrapper/Convert/CMakeLists.txt @@ -10,7 +10,8 @@ set( CMAKE_CXX_FLAGS "${OPTIMISED_CXX_FLAGS}" ) add_subdirectory (SireRDKit) add_subdirectory (SireOpenMM) -add_subdirectory (SireGemmi) +# SireGemmi currently disabled due to build issues +# add_subdirectory (SireGemmi) # installation set( INSTALLDIR ${SIRE_PYTHON}/sire/legacy/Convert ) diff --git a/wrapper/Convert/SireOpenMM/PyQMCallback.pypp.cpp b/wrapper/Convert/SireOpenMM/PyQMCallback.pypp.cpp index 1264f0745..885a15bda 100644 --- a/wrapper/Convert/SireOpenMM/PyQMCallback.pypp.cpp +++ b/wrapper/Convert/SireOpenMM/PyQMCallback.pypp.cpp @@ -65,18 +65,18 @@ void register_PyQMCallback_class(){ typedef bp::class_< SireOpenMM::PyQMCallback > PyQMCallback_exposer_t; PyQMCallback_exposer_t PyQMCallback_exposer = PyQMCallback_exposer_t( "PyQMCallback", "A callback wrapper class to interface with external QM engines\nvia the CustomCPPForceImpl.", bp::init< >("Default constructor.") ); bp::scope PyQMCallback_scope( PyQMCallback_exposer ); - PyQMCallback_exposer.def( bp::init< bp::api::object, bp::optional< QString > >(( bp::arg("arg0"), bp::arg("name")="" ), "Constructor\nPar:am py_object\nA Python object that contains the callback function.\n\nPar:am name\nThe name of a callback method that take the following arguments:\n- numbers_qm: A list of atomic numbers for the atoms in the ML region.\n- charges_mm: A list of the MM charges in mod electron charge.\n- xyz_qm: A list of positions for the atoms in the ML region in Angstrom.\n- xyz_mm: A list of positions for the atoms in the MM region in Angstrom.\n- idx_mm: A list of indices for the MM atoms in the QM/MM region.\nThe callback should return a tuple containing:\n- The energy in kJmol.\n- A list of forces for the QM atoms in kJmolnm.\n- A list of forces for the MM atoms in kJmolnm.\nIf empty, then the object is assumed to be a callable.\n") ); + PyQMCallback_exposer.def( bp::init< bp::api::object, bp::optional< QString > >(( bp::arg("arg0"), bp::arg("name")="" ), "Constructor\nPar:am py_object\nA Python object that contains the callback function.\n\nPar:am name\nThe name of a callback method that take the following arguments:\n- numbers_qm: A list of atomic numbers for the atoms in the ML region.\n- charges_mm: A list of the MM charges in mod electron charge.\n- xyz_qm: A list of positions for the atoms in the ML region in Angstrom.\n- xyz_mm: A list of positions for the atoms in the MM region in Angstrom.\n- cell: A list of cell vectors in Angstrom.\n- idx_mm: A list of indices for the MM atoms in the QM/MM region.\nThe callback should return a tuple containing:\n- The energy in kJmol.\n- A list of forces for the QM atoms in kJmolnm.\n- A list of forces for the MM atoms in kJmolnm.\nIf empty, then the object is assumed to be a callable.\n") ); { //::SireOpenMM::PyQMCallback::call - typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMCallback::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >,::QVector< int > ) const; + typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMCallback::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >,::QVector >, ::QVector< int > ) const; call_function_type call_function_value( &::SireOpenMM::PyQMCallback::call ); PyQMCallback_exposer.def( "call" , call_function_value - , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("idx_mm") ) + , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("cell"), bp::arg("idx_mm") ) , bp::release_gil_policy() - , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); + , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am cell A list of cell vectors in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); } { //::SireOpenMM::PyQMCallback::typeName diff --git a/wrapper/Convert/SireOpenMM/PyQMEngine.pypp.cpp b/wrapper/Convert/SireOpenMM/PyQMEngine.pypp.cpp index b4589667e..ee197ff51 100644 --- a/wrapper/Convert/SireOpenMM/PyQMEngine.pypp.cpp +++ b/wrapper/Convert/SireOpenMM/PyQMEngine.pypp.cpp @@ -67,15 +67,15 @@ void register_PyQMEngine_class(){ PyQMEngine_exposer.def( bp::init< SireOpenMM::PyQMEngine const & >(( bp::arg("other") ), "Copy constructor.") ); { //::SireOpenMM::PyQMEngine::call - typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMEngine::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >,::QVector < int > ) const; + typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMEngine::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >,::QVector< QVector< double > >,::QVector < int > ) const; call_function_type call_function_value( &::SireOpenMM::PyQMEngine::call ); PyQMEngine_exposer.def( "call" , call_function_value - , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("idx_mm") ) + , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("cell"), bp::arg("idx_mm") ) , bp::release_gil_policy() - , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of the true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); + , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am cell A list of cell vectors in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of the true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); } { //::SireOpenMM::PyQMEngine::getAtoms diff --git a/wrapper/Convert/SireOpenMM/PyQMForce.pypp.cpp b/wrapper/Convert/SireOpenMM/PyQMForce.pypp.cpp index 18797552d..b8b8bebd3 100644 --- a/wrapper/Convert/SireOpenMM/PyQMForce.pypp.cpp +++ b/wrapper/Convert/SireOpenMM/PyQMForce.pypp.cpp @@ -69,15 +69,15 @@ void register_PyQMForce_class(){ PyQMForce_exposer.def( bp::init< SireOpenMM::PyQMForce const & >(( bp::arg("other") ), "Copy constructor.") ); { //::SireOpenMM::PyQMForce::call - typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMForce::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >, ::QVector < int > ) const; + typedef ::boost::tuples::tuple< double, QVector< QVector< double > >, QVector< QVector< double > >, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type, boost::tuples::null_type > ( ::SireOpenMM::PyQMForce::*call_function_type)( ::QVector< int >,::QVector< double >,::QVector< QVector< double > >,::QVector< QVector< double > >, ::QVector< QVector< double > >, ::QVector < int > ) const; call_function_type call_function_value( &::SireOpenMM::PyQMForce::call ); PyQMForce_exposer.def( "call" , call_function_value - , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("idx_mm") ) + , ( bp::arg("numbers_qm"), bp::arg("charges_mm"), bp::arg("xyz_qm"), bp::arg("xyz_mm"), bp::arg("cell"), bp::arg("idx_mm") ) , bp::release_gil_policy() - , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); + , "Call the callback function.\nPar:am numbers_qm\nA vector of atomic numbers for the atoms in the ML region.\n\nPar:am charges_mm\nA vector of the charges on the MM atoms in mod electron charge.\n\nPar:am xyz_qm\nA vector of positions for the atoms in the ML region in Angstrom.\n\nPar:am xyz_mm\nA vector of positions for the atoms in the MM region in Angstrom.\n\nPar:am cell A list of cell vectors in Angstrom.\n\nPar:am idx_mm A vector of indices for the MM atoms in the QM/MM region. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms or virtual charges.\n\nReturn:s\nA tuple containing:\n- The energy in kJmol.\n- A vector of forces for the QM atoms in kJmolnm.\n- A vector of forces for the MM atoms in kJmolnm.\n" ); } { //::SireOpenMM::PyQMForce::getAtoms diff --git a/wrapper/Convert/SireOpenMM/pyqm.cpp b/wrapper/Convert/SireOpenMM/pyqm.cpp index 8c37cc2ad..c8e0f1334 100644 --- a/wrapper/Convert/SireOpenMM/pyqm.cpp +++ b/wrapper/Convert/SireOpenMM/pyqm.cpp @@ -153,6 +153,7 @@ PyQMCallback::call( QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm) const { @@ -170,6 +171,7 @@ PyQMCallback::call( charges_mm, xyz_qm, xyz_mm, + cell, idx_mm ); } @@ -191,6 +193,7 @@ PyQMCallback::call( charges_mm, xyz_qm, xyz_mm, + cell, idx_mm ); } @@ -403,9 +406,10 @@ PyQMForce::call( QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm) const { - return this->callback.call(numbers_qm, charges_mm, xyz_qm, xyz_mm, idx_mm); + return this->callback.call(numbers_qm, charges_mm, xyz_qm, xyz_mm, cell, idx_mm); } ///////// @@ -546,6 +550,13 @@ double PyQMForceImpl::computeForce( Vector(10*box_z[0], 10*box_z[1], 10*box_z[2]) ); + // Store the cell vectors in Angstrom. + QVector> cell = { + {10*box_x[0], 10*box_x[1], 10*box_x[2]}, + {10*box_y[0], 10*box_y[1], 10*box_y[2]}, + {10*box_z[0], 10*box_z[1], 10*box_z[2]} + }; + // Store the QM atomic indices and numbers. auto qm_atoms = this->owner.getAtoms(); auto numbers = this->owner.getNumbers(); @@ -786,6 +797,7 @@ double PyQMForceImpl::computeForce( charges_mm, xyz_qm, xyz_mm, + cell, idx_mm ); @@ -1073,9 +1085,10 @@ PyQMEngine::call( QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm) const { - return this->callback.call(numbers_qm, charges_mm, xyz_qm, xyz_mm, idx_mm); + return this->callback.call(numbers_qm, charges_mm, xyz_qm, xyz_mm, cell, idx_mm); } QMForce* PyQMEngine::createForce() const diff --git a/wrapper/Convert/SireOpenMM/pyqm.h b/wrapper/Convert/SireOpenMM/pyqm.h index f4e94a658..5b149add6 100644 --- a/wrapper/Convert/SireOpenMM/pyqm.h +++ b/wrapper/Convert/SireOpenMM/pyqm.h @@ -88,6 +88,7 @@ namespace SireOpenMM - charges_mm: A list of the MM charges in mod electron charge. - xyz_qm: A list of positions for the atoms in the ML region in Angstrom. - xyz_mm: A list of positions for the atoms in the MM region in Angstrom. + - cell: A list of the 3 cell vectors in Angstrom. - idx_mm: A list of indices for MM atom indices in the QM/MM region. The callback should return a tuple containing: - The energy in kJ/mol. @@ -110,6 +111,9 @@ namespace SireOpenMM \param xyz_mm A vector of positions for the atoms in the MM region in Angstrom. + \param cell + A vector of the 3 cell vectors in Angstrom. + \param idx_mm A vector of MM atom indices. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms @@ -126,6 +130,7 @@ namespace SireOpenMM QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm ) const; @@ -323,6 +328,9 @@ namespace SireOpenMM \param xyz_mm A vector of positions for the atoms in the MM region in Angstrom. + \param cell + A vector of the 3 cell vectors in Angstrom. + \param idx_mm A vector of MM atom indices. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms @@ -339,6 +347,7 @@ namespace SireOpenMM QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm ) const; @@ -590,6 +599,9 @@ namespace SireOpenMM \param xyz_mm A vector of positions for the atoms in the MM region in Angstrom. + \param cell + A vector of the 3 cell vectors in Angstrom. + \param idx_mm A vector of MM atom indices. Note that len(idx_mm) <= len(charges_mm) since it only contains the indices of true MM atoms, not link atoms @@ -606,6 +618,7 @@ namespace SireOpenMM QVector charges_mm, QVector> xyz_qm, QVector> xyz_mm, + QVector> cell, QVector idx_mm ) const; diff --git a/wrapper/Convert/SireOpenMM/torchqm.cpp b/wrapper/Convert/SireOpenMM/torchqm.cpp index c75456c75..8b36db7c8 100644 --- a/wrapper/Convert/SireOpenMM/torchqm.cpp +++ b/wrapper/Convert/SireOpenMM/torchqm.cpp @@ -429,6 +429,13 @@ double TorchQMForceImpl::computeForce( Vector(10*box_z[0], 10*box_z[1], 10*box_z[2]) ); + // Store the cell vectors in Angstrom. + QVector> cell = { + {10*box_x[0], 10*box_x[1], 10*box_x[2]}, + {10*box_y[0], 10*box_y[1], 10*box_y[2]}, + {10*box_z[0], 10*box_z[1], 10*box_z[2]} + }; + // Store the QM atomic indices and numbers. auto qm_atoms = this->owner.getAtoms(); auto numbers = this->owner.getNumbers(); @@ -715,12 +722,19 @@ double TorchQMForceImpl::computeForce( .to(device); xyz_mm_torch.requires_grad_(true); + // Cell vectors. + torch::Tensor cell_torch = torch::from_blob(cell.data(), {3, 3}, + torch::TensorOptions().dtype(torch::kFloat64)) + .to(torch::kFloat32).to(device); + cell_torch.requires_grad_(false); + // Create the input vector. auto input = std::vector{ atomic_numbers_torch, charges_mm_torch, xyz_qm_torch, - xyz_mm_torch + xyz_mm_torch, + cell_torch }; // Compute the energies.