Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions .github/workflows/bit-compare-docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
name: bit-compare-docker

# Controls when the action will run
on:

# Trigger on pushes to main only. Feature branches and cycle LTS branches
# are validated via their pull request — running on push as well would
# double-spend minutes for internal PRs, and LTS branches are protected.
push:
branches:
- main

# Trigger the workflow on all pull requests
pull_request: ~

# Allow workflow to be dispatched on demand
workflow_dispatch: ~

concurrency:
group: bit-compare-docker-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

env:
CACHE_SUFFIX: v1 # Increase to force new cache to be created

jobs:
ci:
name: ci

strategy:
fail-fast: false # false: try to complete all jobs

matrix:
name:
- linux gcc-13

include:

- name: linux gcc-13
os: ubuntu-22.04
gcc: '13'

runs-on: ${{ matrix.os }}
timeout-minutes: 180

steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}

- name: Derive cycle from bundle.yml
run: |
set -eu
version=$(awk '/^name[[:space:]]*:[[:space:]]*ifs-bundle/{found=1} found && /^version[[:space:]]*:/{print $NF; exit}' bundle.yml)
IFS=. read -r MAJ MIN PAT <<< "$version"
OIFS_VERSION="${MAJ}r${MIN}"
CYCLE_BRANCH="openifs-lts/CY${MAJ}R${MIN}.${PAT}"
# Fall back to main when the cycle LTS branch has not been cut yet
# (e.g. a new cycle has landed on main but its openifs-lts/CY* branch
# does not yet exist on the remote).
if [ -z "$(git ls-remote --heads origin "$CYCLE_BRANCH")" ]; then
echo "Cycle branch ${CYCLE_BRANCH} not found on remote — falling back to main"
CYCLE_BRANCH=main
fi
echo "OIFS_VERSION=${OIFS_VERSION}" >> $GITHUB_ENV
echo "CYCLE_BRANCH=${CYCLE_BRANCH}" >> $GITHUB_ENV

- name: Environment
env:
PR_BASE_REF: ${{ github.event.pull_request.base.ref }}
run: |
set -eu
# WORK_DIR sits outside github.workspace because the test phase copies
# the workspace into WORK_DIR/build_dir_test/...; a destination inside
# the source would recurse into itself in shutil.copytree.
WORK_DIR="${{ runner.temp }}/_oifs_docker_ci"
echo "CI_DIR=${{ github.workspace }}/scripts/ci/docker_ci" >> $GITHUB_ENV
echo "WORK_DIR=${WORK_DIR}" >> $GITHUB_ENV
echo "NORMS_DIR=${WORK_DIR}/control_saved_norms" >> $GITHUB_ENV
echo "REPORTS_DIR=${WORK_DIR}/ci_reports" >> $GITHUB_ENV
CONTROL_BRANCH="${PR_BASE_REF:-$CYCLE_BRANCH}"
echo "CONTROL_BRANCH=${CONTROL_BRANCH}" >> $GITHUB_ENV

- name: Cache control NORMs
uses: actions/cache@v4
with:
path: ${{ env.NORMS_DIR }}
key: control-norms-gcc${{ matrix.gcc }}-${{ env.OIFS_VERSION }}-${{ github.event.pull_request.base.sha || github.sha }}-${{ env.CACHE_SUFFIX }}

- name: Set up Python venv
run: |
set -eu
python3 -m venv "${WORK_DIR}/venv"
source "${WORK_DIR}/venv/bin/activate"
python3 -m pip install --upgrade pip
python3 -m pip install gitpython pyyaml

- name: Render CI config
run: |
set -eu
mkdir -p "${WORK_DIR}"
cat > "${WORK_DIR}/ci_test_docker.yml" <<EOF
openifs_version : "${OIFS_VERSION}"
openifs_repo_url : "https://github.com/${{ github.repository }}.git"
control_branch : "${CONTROL_BRANCH}"
test_branch : "${{ github.workspace }}"
base_docker_image : "${{ matrix.gcc }}"
docker_template : "./Dockerfile.ci"
include_openifs_data_downloads : False
openifs_build_docker_dir : "${WORK_DIR}"
ci_reports : "${REPORTS_DIR}"
control_saved_norms_dir : "${NORMS_DIR}"
force_reclone : True
reuse_control_if_present : True
openifs_test_extra_flags : "--without-single-precision --cmake=BUILD_ifsbench=OFF --clean"
EOF

- name: Build & Test
id: build-test
run: |
set -eu
cd "$CI_DIR"
source "${WORK_DIR}/venv/bin/activate"
echo "::group::ci-oifs-docker.py"
python3 ci-oifs-docker.py -c "${WORK_DIR}/ci_test_docker.yml"
echo "::endgroup::"

- name: Upload reports
if: always()
uses: actions/upload-artifact@v4
with:
name: bit-compare-reports-gcc${{ matrix.gcc }}
path: ${{ env.REPORTS_DIR }}/
if-no-files-found: warn
165 changes: 165 additions & 0 deletions .github/workflows/bit-compare-host.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
name: bit-compare-host

# Controls when the action will run
on:

# Trigger on pushes to main only. Feature branches and cycle LTS branches
# are validated via their pull request — running on push as well would
# double-spend minutes for internal PRs, and LTS branches are protected.
push:
branches:
- main

# Trigger the workflow on all pull requests
pull_request: ~

# Allow workflow to be dispatched on demand
workflow_dispatch: ~

concurrency:
group: bit-compare-host-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

env:
CACHE_SUFFIX: v1 # Increase to force new cache to be created

jobs:
ci:
name: ci

strategy:
fail-fast: false # false: try to complete all jobs

matrix:
name:
- linux gcc-12
- linux gcc-13
- linux gcc-14

include:

- name: linux gcc-12
os: ubuntu-24.04
gcc: '12'

- name: linux gcc-13
os: ubuntu-24.04
gcc: '13'

- name: linux gcc-14
os: ubuntu-24.04
gcc: '14'

runs-on: ${{ matrix.os }}
timeout-minutes: 240

steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}

- name: Derive cycle from bundle.yml
run: |
set -eu
version=$(awk '/^name[[:space:]]*:[[:space:]]*ifs-bundle/{found=1} found && /^version[[:space:]]*:/{print $NF; exit}' bundle.yml)
IFS=. read -r MAJ MIN PAT <<< "$version"
OIFS_VERSION="${MAJ}r${MIN}"
CYCLE_BRANCH="openifs-lts/CY${MAJ}R${MIN}.${PAT}"
if [ -z "$(git ls-remote --heads origin "$CYCLE_BRANCH")" ]; then
echo "Cycle branch ${CYCLE_BRANCH} not found on remote — falling back to main"
CYCLE_BRANCH=main
fi
echo "OIFS_VERSION=${OIFS_VERSION}" >> $GITHUB_ENV
echo "CYCLE_BRANCH=${CYCLE_BRANCH}" >> $GITHUB_ENV

- name: Environment
env:
PR_BASE_REF: ${{ github.event.pull_request.base.ref }}
run: |
set -eu
# WORK_DIR sits outside github.workspace because the test phase copies
# the workspace into WORK_DIR/build_dir_test/...; a destination inside
# the source would recurse into itself in shutil.copytree.
WORK_DIR="${{ runner.temp }}/_oifs_host_ci"
echo "CI_DIR=${{ github.workspace }}/scripts/ci/host_ci" >> $GITHUB_ENV
echo "WORK_DIR=${WORK_DIR}" >> $GITHUB_ENV
echo "NORMS_DIR=${WORK_DIR}/control_saved_norms" >> $GITHUB_ENV
echo "REPORTS_DIR=${WORK_DIR}/ci_reports" >> $GITHUB_ENV
CONTROL_BRANCH="${PR_BASE_REF:-$CYCLE_BRANCH}"
echo "CONTROL_BRANCH=${CONTROL_BRANCH}" >> $GITHUB_ENV

- name: Install build dependencies (apt)
run: |
set -eu
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }} gfortran-${{ matrix.gcc }} \
cmake ninja-build make \
libopenmpi-dev openmpi-bin \
libnetcdf-dev libnetcdff-dev netcdf-bin \
libboost-dev libboost-date-time-dev libboost-filesystem-dev \
libboost-serialization-dev libboost-program-options-dev \
liblapack-dev libeigen3-dev libomp-dev \
python3-venv python3-yaml python3-ruamel.yaml \
git wget bc ca-certificates

- name: Select compiler
run: |
set -eu
echo "CC=gcc-${{ matrix.gcc }}" >> $GITHUB_ENV
echo "CXX=g++-${{ matrix.gcc }}" >> $GITHUB_ENV
echo "FC=gfortran-${{ matrix.gcc }}" >> $GITHUB_ENV
echo "F77=gfortran-${{ matrix.gcc }}" >> $GITHUB_ENV
echo "F90=gfortran-${{ matrix.gcc }}" >> $GITHUB_ENV

- name: Cache control NORMs
uses: actions/cache@v4
with:
path: ${{ env.NORMS_DIR }}
key: control-norms-host-gcc${{ matrix.gcc }}-${{ env.OIFS_VERSION }}-${{ github.event.pull_request.base.sha || github.sha }}-${{ env.CACHE_SUFFIX }}

- name: Set up Python venv
run: |
set -eu
python3 -m venv "${WORK_DIR}/venv"
source "${WORK_DIR}/venv/bin/activate"
python3 -m pip install --upgrade pip
python3 -m pip install gitpython pyyaml

- name: Render CI config
run: |
set -eu
mkdir -p "${WORK_DIR}"
cat > "${WORK_DIR}/ci_test_host.yml" <<EOF
openifs_version : "${OIFS_VERSION}"
openifs_repo_url : "https://github.com/${{ github.repository }}.git"
control_branch : "${CONTROL_BRANCH}"
test_branch : "${{ github.workspace }}"
compiler_version : "${{ matrix.gcc }}"
openifs_build_host_dir : "${WORK_DIR}"
ci_reports : "${REPORTS_DIR}"
control_saved_norms_dir : "${NORMS_DIR}"
clone_openifs_control : True
force_reclone : True
reuse_control_if_present : True
openifs_test_extra_flags : "--without-single-precision --cmake=BUILD_ifsbench=OFF --clean"
EOF

- name: Build & Test
id: build-test
run: |
set -eu
cd "$CI_DIR"
source "${WORK_DIR}/venv/bin/activate"
echo "::group::ci-oifs-host.py"
python3 ci-oifs-host.py -c "${WORK_DIR}/ci_test_host.yml"
echo "::endgroup::"

- name: Upload reports
if: always()
uses: actions/upload-artifact@v4
with:
name: bit-compare-host-reports-gcc${{ matrix.gcc }}
path: ${{ env.REPORTS_DIR }}/
if-no-files-found: warn
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ __pycache__/
.DS_Store

# User-specific Docker config
scripts/docker/config/my_config.yml
scripts/bootstrap/docker/config/my_config.yml
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -148,11 +148,11 @@ END ifstest on OpenIFS build

The previous section, [Installing and Building OpenIFS](#installing-and-building-openifs), describes the pre-requisites and build process for OpenIFS on a generic Linux based system.

[create-oifs-docker.py](scripts/docker/create-oifs-docker.py) and associated scripts and configuration automates the process described in section [Installing and Building OpenIFS](#installing-and-building-openifs), by creating a Docker container, installing OpenIFS and dependencies and then building OpenIFS and running the test.
[create-oifs-docker.py](scripts/bootstrap/docker/create-oifs-docker.py) and associated scripts and configuration automates the process described in section [Installing and Building OpenIFS](#installing-and-building-openifs), by creating a Docker container, installing OpenIFS and dependencies and then building OpenIFS and running the test.

* Please go to [OpenIFS Docker Builder](scripts/docker/README.md) for details about the Docker install.
* Please go to [OpenIFS Docker Builder](scripts/bootstrap/docker/README.md) for details about the Docker install.

[create-oifs-docker.py](scripts/docker/create-oifs-docker.py) and the resulting Docker development has been tested on macOS but it can be applied to other systems, as long as Docker is installed and the appropriate python dependencies are available.
[create-oifs-docker.py](scripts/bootstrap/docker/create-oifs-docker.py) and the resulting Docker development has been tested on macOS but it can be applied to other systems, as long as Docker is installed and the appropriate python dependencies are available.

## Install the static input data files for OpenIFS

Expand Down
2 changes: 0 additions & 2 deletions oifs-config.edit_me.sh
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ export OIFS_TEST="${OIFS_HOME}/scripts/build_test"
export OIFS_LOGFILE="${OIFS_HOME}/openifs-test.log"
#---Path to dir containing scripts to run OpenIFS experiment
export OIFS_RUN_SCRIPT="${OIFS_HOME}/scripts/exp_3d"
#---Path to OpenIFS docker scripts and yaml config for docker
export OIFS_DOCKER="${OIFS_HOME}/scripts/docker"

alias oenv="env -0 | sort -z | tr '\0' '\n' | grep -a OIFS_"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,23 +30,24 @@ RUN apt update && \
apt install -y git cmake python3 python3-ruamel.yaml python3-yaml python3-venv \
libomp-dev libboost-dev libboost-date-time-dev libboost-filesystem-dev \
libboost-serialization-dev libboost-program-options-dev netcdf-bin \
libnetcdf-dev libnetcdff-dev liblapack-dev libeigen3-dev vim emacs \
libnetcdf-dev libnetcdff-dev liblapack-dev libeigen3-dev vim less \
wget bc ca-certificates && \
update-ca-certificates && \
cd /tmp && \
wget https://download.open-mpi.org/release/open-mpi/v5.0/openmpi-5.0.10.tar.gz && \
tar -xvf openmpi-5.0.10.tar.gz && \
cd openmpi-5.0.10 && \
./configure --disable-libxml2 --prefix=/usr/local && \
make -j4 all && \
make -j"$(nproc)" all && \
make install && \
ldconfig && \
cd / && \
rm -rf /tmp/openmpi-5.0.10* && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*

# Create user and directory structure
# Create user and directory structure.
# Container security: run as unprivileged uid-1000 'openifs' user.
RUN groupadd --gid 1000 openifs && \
useradd --uid 1000 --gid openifs --shell /bin/bash --create-home openifs && \
mkdir -p /home/openifs/${OPENIFS_DIR} && \
Expand Down Expand Up @@ -76,7 +77,10 @@ RUN sed -i 's|export OIFS_HOME="${HOME}/.*"|export OIFS_HOME="${HOME}/'${OPENIFS
cd $OIFS_EXPT/ab7z/2016092500 && \
cp $OIFS_RUN_SCRIPT/oifs-run .

# Download and extract all OpenIFS data files in one layer
# Download and extract all OpenIFS data files in a separate RUN layer so that
# the source-copy layer above can be cached and re-used across rebuilds. The
# host bash installer fetches the same archives in a single shell function
# since there is no layer cache to optimise for.
WORKDIR /home/openifs/${OPENIFS_DIR}/openifs-data
RUN mkdir -p ifsdata rtables ${CLIMATE_VERSION} && \
cd rtables && \
Expand Down
Loading
Loading