diff --git a/.cicd/Jenkinsfile b/.cicd/Jenkinsfile
index 6e453f6f25..6629e60d4f 100644
--- a/.cicd/Jenkinsfile
+++ b/.cicd/Jenkinsfile
@@ -137,16 +137,6 @@ pipeline {
}
}
- // Run the unittest functional tests that require an HPC platform
- stage('Functional UnitTests') {
- steps {
- dir ("${env.SRW_PLATFORM}") {
- echo "Running unittest on retrieve_data.py"
- sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_unittest.sh"'
- }
- }
- }
-
// Run the unified build script; if successful create a tarball of the build and upload to S3
stage('Build') {
steps {
@@ -164,6 +154,16 @@ pipeline {
}
}
+ // Run the unittest functional tests that require an HPC platform
+ stage('Functional UnitTests') {
+ steps {
+ dir ("${env.SRW_PLATFORM}") {
+ echo "Running unittest on retrieve_data.py"
+ sh 'bash --login "${WORKSPACE}/${SRW_PLATFORM}/.cicd/scripts/srw_unittest.sh"'
+ }
+ }
+ }
+
// Try a few Workflow Task scripts to make sure E2E tests can be launched in a follow-on 'Test' stage
stage('Functional WorkflowTaskTests') {
steps {
diff --git a/.cicd/scripts/srw_ftest.sh b/.cicd/scripts/srw_ftest.sh
index 7a787a7137..cdbf1db2a8 100755
--- a/.cicd/scripts/srw_ftest.sh
+++ b/.cicd/scripts/srw_ftest.sh
@@ -85,7 +85,7 @@ module load build_${platform,,}_${SRW_COMPILER}
module load wflow_${platform,,}
[[ ${FORGIVE_CONDA} == true ]] && set +e +u # Some platforms have incomplete python3 or conda support, but wouldn't necessarily block workflow tests
-conda activate workflow_tools
+conda activate srw_app
set -e -u
export PYTHONPATH=${workspace}/ush/python_utils/workflow-tools:${workspace}/ush/python_utils/workflow-tools/src
diff --git a/.github/workflows/python_func_tests.yaml b/.github/workflows/python_func_tests.yaml
deleted file mode 100644
index 980b67a77c..0000000000
--- a/.github/workflows/python_func_tests.yaml
+++ /dev/null
@@ -1,39 +0,0 @@
-name: Python functional tests
-on:
- push:
- branches:
- - develop
- - 'release/*'
- paths:
- - ush/*retrieve_data.py
- - parm/data_locations.yml
- pull_request:
- branches:
- - develop
- - 'release/*'
- workflow_dispatch:
-
-env:
- CI: true
-
-jobs:
- python_functests:
- name: Python Functional Tests
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v2
-
- # Install dependencies
- - name: Install dependencies
- run: |
- sudo apt-get update
- sudo apt-get install python3 python3-pip netcdf-bin
- sudo pip3 install pyyaml
-
- # Run python functional tests
- - name: Run python functional tests
- run: |
- export PYTHONPATH=${PWD}/ush
- python3 -m unittest -b tests/test_python/test_retrieve_data.py
diff --git a/.github/workflows/python_linter.yaml b/.github/workflows/python_linter.yaml
deleted file mode 100644
index b63a2a513c..0000000000
--- a/.github/workflows/python_linter.yaml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: Python linting
-on:
- push:
- branches:
- - develop
- - 'release/*'
- pull_request:
- branches:
- - develop
- - 'release/*'
- workflow_dispatch:
-
-defaults:
- run:
- shell: bash
-jobs:
-
- python_linter:
- name: Python unittests
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v2
-
- # Install dependencies
- - name: Install dependencies
- run: |
- sudo apt-get update
- sudo apt-get install python3 python3-pip netcdf-bin
- sudo pip3 install pylint==2.16
-
- # Run python unittests
- - name: Lint the test directory
- run: |
- ./manage_externals/checkout_externals workflow-tools
- export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src
- pylint --ignore-imports=yes tests/test_python/
- pylint ush/create_*.py
- pylint ush/generate_FV3LAM_wflow.py
diff --git a/.github/workflows/python_tests.yaml b/.github/workflows/python_tests.yaml
new file mode 100644
index 0000000000..1bab329c0c
--- /dev/null
+++ b/.github/workflows/python_tests.yaml
@@ -0,0 +1,58 @@
+name: Python Tests
+on:
+ push:
+ branches:
+ - develop
+ - 'release/*'
+ pull_request:
+ branches:
+ - develop
+ - 'release/*'
+ workflow_dispatch:
+
+defaults:
+ run:
+ shell: bash -leo pipefail {0}
+
+jobs:
+ srw_tests:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v2
+
+ - name: Install Micromamba and srw_app environment
+ uses: mamba-org/setup-micromamba@v1
+ with:
+ environment-file: environment.yml
+ environment-name: srw_app
+ init-shell: bash
+ cache-downloads: true
+ cache-environment: true
+
+ - name: Checkout externals
+ run: |
+ ./manage_externals/checkout_externals ufs-weather-model workflow-tools
+
+ - name: Lint the python code
+ run: |
+ micromamba activate srw_app
+ export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src
+ pylint --ignore-imports=yes tests/test_python/
+ pylint ush/create_*.py
+ pylint ush/generate_FV3LAM_wflow.py
+
+ - name: Run python unittests
+ run: |
+ # exclude test_retrieve_data that is tested in functional test
+ micromamba activate srw_app
+ export UNIT_TEST=True
+ export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src
+ python -m unittest -b tests/test_python/*.py
+
+ - name: Run python functional tests
+ run: |
+ micromamba activate srw_app
+ export CI=true
+ export PYTHONPATH=${PWD}/ush
+ python3 -m unittest -b tests/test_python/test_retrieve_data.py
diff --git a/.github/workflows/python_unittests.yaml b/.github/workflows/python_unittests.yaml
deleted file mode 100644
index 5e491dea6e..0000000000
--- a/.github/workflows/python_unittests.yaml
+++ /dev/null
@@ -1,44 +0,0 @@
-name: Python unittests
-on:
- push:
- branches:
- - develop
- - 'release/*'
- pull_request:
- branches:
- - develop
- - 'release/*'
- workflow_dispatch:
-
-env:
- UNIT_TEST: True
-
-defaults:
- run:
- shell: bash
-jobs:
-
- python_unittests:
- name: Python unittests
- runs-on: ubuntu-latest
-
- steps:
- - name: Checkout repository
- uses: actions/checkout@v2
-
- # Install dependencies
- - name: Install dependencies
- run: |
- sudo apt-get update
- sudo apt-get install python3 python3-pip netcdf-bin
- sudo pip3 install pyyaml jinja2==2.11 f90nml
- sudo pip3 install numpy matplotlib basemap
-
- # Run python unittests
- - name: Run python unittests
- run: |
- ./manage_externals/checkout_externals ufs-weather-model workflow-tools
- # exclude test_retrieve_data that is tested in functional test
- export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src
- python3 -m unittest -b tests/test_python/*.py
-
diff --git a/.gitignore b/.gitignore
index b6da1c53a0..8566703a22 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,6 +15,7 @@ ush/config.yaml
ush/python_utils/__pycache__/
ush/python_utils/workflow-tools/
ush/*.swp
+conda_loc
*.swp
__pycache__
diff --git a/aqm_environment.yml b/aqm_environment.yml
new file mode 100644
index 0000000000..03d72f6706
--- /dev/null
+++ b/aqm_environment.yml
@@ -0,0 +1,13 @@
+name: srw_aqm
+channels:
+ - conda-forge
+ - ufs-community
+dependencies:
+ - esmpy
+ - netcdf4=1.6.*
+ - numpy=1.23.*
+ - pylint=2.17*
+ - pytest=7.2*
+ - scipy=1.10.*
+ - uwtools=1.0.0
+ - xarray=2022.11.*
diff --git a/devbuild.sh b/devbuild.sh
index b21998fc8e..6ec2615e23 100755
--- a/devbuild.sh
+++ b/devbuild.sh
@@ -41,6 +41,8 @@ OPTIONS
installation prefix
--bin-dir=BIN_DIR
installation binary directory name ("exec" by default; any name is available)
+ --conda-dir=CONDA_DIR
+ installation location for miniconda (SRW clone conda subdirectory by default)
--build-type=BUILD_TYPE
build type; defaults to RELEASE
(e.g. DEBUG | RELEASE | RELWITHDEBINFO)
@@ -52,9 +54,11 @@ OPTIONS
build with verbose output
TARGETS
- default = builds the default list of apps (also not passing any target does the same)
- all = builds all apps
- Or any combinations of (ufs, ufs_utils, upp)
+ default = builds the default list of components for the specified application
+ (also not passing any target does the same)
+ all = builds all standard components for ATM
+ conda_only = installs miniconda, but no other
+ Or any combinations of (ufs, ufs_utils, upp, nexus, aqm_utils)
NOTE: See User's Guide for detailed build instructions
@@ -99,11 +103,8 @@ usage_error () {
# default settings
LCL_PID=$$
-SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P)
-MACHINE_SETUP=${SRW_DIR}/src/UFS_UTILS/sorc/machine-setup.sh
-BUILD_DIR="${SRW_DIR}/build"
-INSTALL_DIR=${SRW_DIR}
BIN_DIR="exec"
+CONDA_BUILD_DIR="conda"
COMPILER=""
APPLICATION=""
CCPP_SUITES=""
@@ -117,6 +118,7 @@ VERBOSE=false
# Turn off all apps to build and choose default later
DEFAULT_BUILD=true
+BUILD_CONDA="on"
BUILD_UFS="off"
BUILD_UFS_UTILS="off"
BUILD_UPP="off"
@@ -164,6 +166,8 @@ while :; do
--install-dir|--install-dir=) usage_error "$1 requires argument." ;;
--bin-dir=?*) BIN_DIR=${1#*=} ;;
--bin-dir|--bin-dir=) usage_error "$1 requires argument." ;;
+ --conda-dir=?*) CONDA_BUILD_DIR=${1#*=} ;;
+ --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;;
--build-type=?*) BUILD_TYPE=${1#*=} ;;
--build-type|--build-type=) usage_error "$1 requires argument." ;;
--build-jobs=?*) BUILD_JOBS=$((${1#*=})) ;;
@@ -175,6 +179,7 @@ while :; do
default) ;;
all) DEFAULT_BUILD=false; BUILD_UFS="on";
BUILD_UFS_UTILS="on"; BUILD_UPP="on";;
+ conda_only) DEFAULT_BUILD=false;;
ufs) DEFAULT_BUILD=false; BUILD_UFS="on" ;;
ufs_utils) DEFAULT_BUILD=false; BUILD_UFS_UTILS="on" ;;
upp) DEFAULT_BUILD=false; BUILD_UPP="on" ;;
@@ -188,10 +193,9 @@ while :; do
done
# Ensure uppercase / lowercase ============================================
-APPLICATION="${APPLICATION^^}"
-PLATFORM="${PLATFORM,,}"
-COMPILER="${COMPILER,,}"
-EXTERNALS="${EXTERNALS^^}"
+APPLICATION=$(echo ${APPLICATION} | tr '[a-z]' '[A-Z]')
+PLATFORM=$(echo ${PLATFORM} | tr '[A-Z]' '[a-z]')
+COMPILER=$(echo ${COMPILER} | tr '[A-Z]' '[a-z]')
# check if PLATFORM is set
if [ -z $PLATFORM ] ; then
@@ -203,6 +207,55 @@ fi
MACHINE="${PLATFORM}"
printf "PLATFORM(MACHINE)=${PLATFORM}\n" >&2
+
+# Conda is not used on WCOSS2
+if [ "${PLATFORM}" = "wcoss2" ]; then
+ BUILD_CONDA="off"
+fi
+
+# build conda and conda environments, if requested.
+if [ "${BUILD_CONDA}" = "on" ] ; then
+ if [ ! -d "${CONDA_BUILD_DIR}" ] ; then
+ os=$(uname)
+ test $os == Darwin && os=MacOSX
+ hardware=$(uname -m)
+ installer=Miniforge3-${os}-${hardware}.sh
+ curl -L -O "https://github.com/conda-forge/miniforge/releases/download/23.3.1-1/${installer}"
+ bash ./${installer} -bfp "${CONDA_BUILD_DIR}"
+ rm ${installer}
+ fi
+
+ source ${CONDA_BUILD_DIR}/etc/profile.d/conda.sh
+ # Put some additional packages in the base environment on MacOS systems
+ if [ "${os}" == "MacOSX" ] ; then
+ mamba install -y bash coreutils sed
+ fi
+ conda activate
+ if ! conda env list | grep -q "^srw_app\s" ; then
+ mamba env create -n srw_app --file environment.yml
+ fi
+ if ! conda env list | grep -q "^srw_graphics\s" ; then
+ mamba env create -n srw_graphics --file graphics_environment.yml
+ fi
+ if [ "${APPLICATION}" = "ATMAQ" ]; then
+ if ! conda env list | grep -q "^srw_aqm\s" ; then
+ mamba env create -n srw_aqm --file aqm_environment.yml
+ fi
+ fi
+
+else
+ source ${CONDA_BUILD_DIR}/etc/profile.d/conda.sh
+ conda activate
+fi
+
+# Conda environment should have linux utilities to perform these tasks on macos.
+SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P)
+MACHINE_SETUP=${SRW_DIR}/src/UFS_UTILS/sorc/machine-setup.sh
+BUILD_DIR="${BUILD_DIR:-${SRW_DIR}/build}"
+INSTALL_DIR=${INSTALL_DIR:-$SRW_DIR}
+CONDA_BUILD_DIR="$(readlink -f "${CONDA_BUILD_DIR}")"
+echo ${CONDA_BUILD_DIR} > ${SRW_DIR}/conda_loc
+
# choose default apps to build
if [ "${DEFAULT_BUILD}" = true ]; then
BUILD_UFS="on"
diff --git a/devclean.sh b/devclean.sh
index 36fbc36898..8d13b0f061 100755
--- a/devclean.sh
+++ b/devclean.sh
@@ -16,12 +16,16 @@ OPTIONS
removes the "build" directory, keeps the "bin", "lib" and other build artifacts intact
--clean
removes "bin", "build" directories, and other build artifacts (same as "-a", "--all")
- --install-dir=INSTALL_DIR
+ --conda
+ removes "conda" directory and conda_loc file in SRW
+ --install-dir=INSTALL_DIR
installation directory name (\${SRW_DIR} by default)
--build-dir=BUILD_DIR
main build directory, absolute path (\${SRW_DIR}/build/ by default)
--bin-dir=BIN_DIR
binary directory name ("exec" by default); full path is \${INSTALL_DIR}/\${BIN_DIR})
+ --conda-dir=CONDA_DIR
+ directory where conda is installed. caution: if outside the SRW clone, it may have broader use
--sub-modules
remove sub-module directories. They will need to be checked out again by sourcing "\${SRW_DIR}/manage_externals/checkout_externals" before attempting subsequent builds
-v, --verbose
@@ -38,6 +42,7 @@ Settings:
INSTALL_DIR=${INSTALL_DIR}
BUILD_DIR=${BUILD_DIR}
BIN_DIR=${BIN_DIR}
+ CONDA_DIR=${CONDA_DIR}
REMOVE=${REMOVE}
VERBOSE=${VERBOSE}
@@ -61,6 +66,7 @@ SRW_DIR=$(cd "$(dirname "$(readlink -f -n "${BASH_SOURCE[0]}" )" )" && pwd -P)
INSTALL_DIR=${INSTALL_DIR:-${SRW_DIR}}
BUILD_DIR=${BUILD_DIR:-"${SRW_DIR}/build"}
BIN_DIR="exec"
+CONDA_DIR=${CONDA_DIR:-"${SRW_DIR}/conda"}
REMOVE=false
VERBOSE=false
@@ -74,6 +80,7 @@ if [[ ("$1" == "--help") || ("$1" == "-h") ]]; then
usage
exit 0
fi
+
# process optional arguments
while :; do
case $1 in
@@ -82,12 +89,15 @@ while :; do
--remove) REMOVE=true ;;
--remove=?*|--remove=) usage_error "$1 argument ignored." ;;
--clean) CLEAN=true ;;
+ --conda) REMOVE_CONDA=true ;;
--install-dir=?*) INSTALL_DIR=${1#*=} ;;
--install-dir|--install-dir=) usage_error "$1 requires argument." ;;
--build-dir=?*) BUILD_DIR=${1#*=} ;;
--build-dir|--build-dir=) usage_error "$1 requires argument." ;;
--bin-dir=?*) BIN_DIR=${1#*=} ;;
--bin-dir|--bin-dir=) usage_error "$1 requires argument." ;;
+ --conda-dir=?*) CONDA_DIR=${1#*=} ;;
+ --conda-dir|--conda-dir=) usage_error "$1 requires argument." ;;
--sub-modules) INCLUDE_SUB_MODULES=true ;;
--verbose|-v) VERBOSE=true ;;
--verbose=?*|--verbose=) usage_error "$1 argument ignored." ;;
@@ -95,7 +105,7 @@ while :; do
default) ALL_CLEAN=false ;;
# unknown
-?*|?*) usage_error "Unknown option $1" ;;
- *) usage; break ;;
+ *) break ;;
esac
shift
done
@@ -117,32 +127,48 @@ if [ "${REMOVE}" = true ] && [ "${CLEAN}" = false ] ; then
elif [ "${CLEAN}" = true ]; then
printf '%s\n' "Remove build directory, bin directory, and other build artifacts "
printf '%s\n' " from the installation directory = ${INSTALL_DIR} "
- [[ -d "${BUILD_DIR}" ]] && rm -rf ${BUILD_DIR} && printf '%s\n' "rm -rf ${BUILD_DIR}"
- [[ -d "${INSTALL_DIR}/${BIN_DIR}" ]] && ( rm -rf ${INSTALL_DIR}/${BIN_DIR} && printf '%s\n' "rm -rf ${INSTALL_DIR}/${BIN_DIR}" )
- [[ -d "${SRW_DIR}/${BIN_DIR}" ]] && ( rm -rf ${SRW_DIR}/${BIN_DIR} && printf '%s\n' "rm -rf ${SRW_DIR}/${BIN_DIR}" )
- [[ -d "${INSTALL_DIR}/share" ]] && ( rm -rf ${INSTALL_DIR}/share && printf '%s\n' "rm -rf ${INSTALL_DIR}/share" )
- [[ -d "${INSTALL_DIR}/include" ]] && ( rm -rf ${INSTALL_DIR}/include && printf '%s\n' "rm -rf ${INSTALL_DIR}/include" )
- [[ -d "${INSTALL_DIR}/lib" ]] && rm -rf ${INSTALL_DIR}/lib && printf '%s\n' "rm -rf ${INSTALL_DIR}/lib"
- [[ -d "${INSTALL_DIR}/lib64" ]] && rm -rf ${INSTALL_DIR}/lib && printf '%s\n' "rm -rf ${INSTALL_DIR}/lib64"
- [[ -d "${SRW_DIR}/manage_externals/manic" ]] && rm -f ${SRW_DIR}/manage_externals/manic/*.pyc && printf '%s\n' "rm -f ${SRW_DIR}/manage_externals/manic/*.pyc"
+
+ directories=( \
+ "${BUILD_DIR}" \
+ "${INSTALL_DIR}/${BIN_DIR}" \
+ "${INSTALL_DIR}/share" \
+ "${INSTALL_DIR}/include" \
+ "${INSTALL_DIR}/lib" \
+ "${INSTALL_DIR}/lib64" \
+ "${SRW_DIR}/manage_externals/manic" \
+ )
+ for directory in $directories; do
+ [[ -d $directory ]] && rm -rfv $directory
+ done
echo " "
fi
-# Clean all the submodules if requested. NB: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals
+# Clean all the submodules if requested. Note: Need to check out them again before attempting subsequent builds, by sourcing ${SRW_DIR}/manage_externals/checkout_externals
if [ ${INCLUDE_SUB_MODULES} == true ]; then
printf '%s\n' "Removing submodules ..."
declare -a submodules='()'
submodules=(${SRW_DIR}/sorc/*)
-# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) "
+# echo " submodules are: ${submodules[@]} (total of ${#submodules[@]}) "
if [ ${#submodules[@]} -ge 1 ]; then
for sub in ${submodules[@]}; do [[ -d "${sub}" ]] && ( rm -rf ${sub} && printf '%s\n' "rm -rf ${sub}" ); done
fi
- printf '%s\n' "NB: Need to check out submodules again for any subsequent builds, " \
+ printf '%s\n' "Note: Need to check out submodules again for any subsequent builds, " \
" by sourcing ${SRW_DIR}/manage_externals/checkout_externals "
fi
#
-echo " "
+
+# Clean conda if requested
+if [ "${REMOVE_CONDA}" = true ] ; then
+ printf '%s\n' "Removing conda location file"
+ rm -rf ${SRW_DIR}/conda_loc
+ printf '%s\n' "Removing conda installation"
+ rm -rf ${CONDA_DIR}
+fi
+
+
+
+echo " "
echo "All the requested cleaning tasks have been completed"
-echo " "
+echo " "
exit 0
diff --git a/docs/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst b/docs/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst
index a179d535e1..af64357675 100644
--- a/docs/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst
+++ b/docs/UsersGuide/source/BackgroundInfo/TechnicalOverview.rst
@@ -78,7 +78,7 @@ The following software is also required to run the SRW Application, but the :ter
* Only **MPICH** or **OpenMPI** can be built with HPC-Stack. Other implementations must be installed separately by the user (if desired).
-For MacOS systems, some additional software packages are needed. When possible, it is recommended that users install and/or upgrade this software (along with software listed above) using the `Homebrew `__ package manager for MacOS. See :doc:`HPC-Stack Documentation: Chapter 3 ` and :numref:`Chapter %s ` for further guidance on installing these prerequisites on MacOS.
+For MacOS systems, some additional software packages are needed. When possible, it is recommended that users install and/or upgrade this software (along with software listed above) using the `Homebrew `__ package manager for MacOS. See :doc:`HPC-Stack Documentation: Chapter 3 ` for further guidance on installing these prerequisites on MacOS.
* bash v4.x
* GNU compiler suite v11 or higher with gfortran
@@ -94,6 +94,7 @@ Optional but recommended prerequisites for all systems:
* Rocoto Workflow Management System (1.3.1)
* Python packages ``scipy``, ``matplotlib``, ``pygrib``, ``cartopy``, and ``pillow`` for graphics
+
.. _SRWStructure:
Code Repositories and Directory Structure
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/AQM.rst b/docs/UsersGuide/source/BuildingRunningTesting/AQM.rst
index 4c83bcee2b..1c4ff068f7 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/AQM.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/AQM.rst
@@ -68,7 +68,7 @@ If the SRW-AQM builds correctly, users should see the standard executables liste
* - nexus
- Runs the NOAA Emission and eXchange Unified System (:ref:`NEXUS `) emissions processing system
-Load the ``workflow_tools`` Environment
+Load the ``srw_app`` Environment
--------------------------------------------
Load the python environment for the workflow:
@@ -88,9 +88,9 @@ If the console outputs a message, the user should run the commands specified in
.. code-block:: console
Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
-then the user should run ``conda activate workflow_tools``. Otherwise, the user can continue with configuring the workflow.
+then the user should run ``conda activate srw_app``. Otherwise, the user can continue with configuring the workflow.
.. _AQMConfig:
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst b/docs/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst
index 5b871fe87f..1828341e06 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/BuildSRW.rst
@@ -200,6 +200,19 @@ On Level 1 systems for which a modulefile is provided under the ``modulefiles``
where ```` is replaced with the name of the platform the user is working on. Valid values include: ``cheyenne`` | ``gaea`` | ``hera`` | ``jet`` | ``linux`` | ``macos`` | ``noaacloud`` | ``orion``
+Directly following the release of SRW v2.2.0, the App will install miniconda and SRW environments as part
+of the build process. The location defaults to inside the SRW clone in ``ufs-srweather-app/conda``,
+however users can set any path on their system using the ``--conda-dir`` flag. If conda is already
+installed in that location, conda installation will be skipped. The following example uses a
+pre-installed conda installation at ``/path/to/conda``
+
+.. code-block:: console
+
+ ./devbuild.sh --platform= --conda-dir /path/to/conda
+
+Running ``./devbuild.sh`` without any arguments will show the usage statement for all available
+flags and targets for this script.
+
.. note::
Although build modulefiles exist for generic Linux and MacOS machines, users will need to alter these according to the instructions in Sections :numref:`%s ` & :numref:`%s `. Users on these systems may have more success building the SRW App with the :ref:`CMake Approach ` instead.
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/Quickstart.rst b/docs/UsersGuide/source/BuildingRunningTesting/Quickstart.rst
index df5a61b1ef..387e8d6d2e 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/Quickstart.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/Quickstart.rst
@@ -60,9 +60,9 @@ For a detailed explanation of how to build and run the SRW App on any supported
.. code-block:: console
Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
- then the user should run ``conda activate workflow_tools`` to activate the workflow environment.
+ then the user should run ``conda activate srw_app`` to activate the workflow environment.
#. Configure the experiment:
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/RunSRW.rst b/docs/UsersGuide/source/BuildingRunningTesting/RunSRW.rst
index c7ea16d6b0..4368dbcca1 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/RunSRW.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/RunSRW.rst
@@ -111,23 +111,23 @@ The first two steps depend on the platform being used and are described here for
Load the Conda/Python Environment
------------------------------------
-The SRW App workflow is often referred to as the *regional workflow* because it runs experiments on a regional scale (unlike the *global workflow* used in other applications). The SRW App workflow requires installation of Python3 using conda; it also requires additional packages built in a separate conda evironment named ``workflow_tools``. On Level 1 systems, a ``workflow_tools`` environment already exists, and users merely need to load the environment. On Level 2-4 systems, users must create and then load the environment. The process for each is described in detail below.
+The SRW App workflow requires a variety of Python packages. To manage the packages, the App relies
+on conda as a package manager and virtual environment manager. At build time users have the option
+to install the latest version of miniforge and automatically create the environments needed by SRW.
+Managed environments will no longer be updated on Tier I platforms for newer versions of SRW.
-.. _Load-WF-L1:
-
-Loading the Workflow Environment on Level 1 Systems
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-.. attention::
+.. _Load-WF-L1:
- Users on a Level 2-4 system should skip to the :ref:`next section ` for instructions.
+Loading the Workflow Environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-The ``workflow_tools`` conda/Python environment has already been set up on Level 1 platforms and can be activated in the following way:
+The ``srw_app`` conda/Python environment can be activated in the following way:
.. code-block:: console
source /path/to/etc/lmod-setup.sh
- module use /path/to/modulefiles
+ module use /path/to/ufs-srweather-app/modulefiles
module load wflow_
where ```` refers to a valid machine name (see :numref:`Section %s ` for ``MACHINE`` options). In a csh shell environment, users should replace ``lmod-setup.sh`` with ``lmod-setup.csh``.
@@ -140,106 +140,21 @@ The ``wflow_`` modulefile will then output instructions to activate th
.. code-block:: console
Please do the following to activate conda:
- > conda activate workflow_tools
-
-then the user should run ``conda activate workflow_tools``. This activates the ``workflow_tools`` conda environment, and the user typically sees ``(workflow_tools)`` in front of the Terminal prompt at this point.
-
-After loading the workflow environment, users may continue to :numref:`Section %s ` for instructions on setting the experiment configuration parameters.
+ > conda activate srw_app
-.. _Load-WF-L234:
-
-Loading the Workflow Environment on Level 2-4 Systems
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Users on non-Level 1 systems will need to create a conda workflow environment, modify a ``wflow_*`` file to reflect the location of required modules, and load the workflow modules using the modified ``wflow_*`` file.
-
-Create a *conda* Workflow Environment
-```````````````````````````````````````
+then the user should run ``conda activate srw_app``. This activates the ``srw_app`` conda environment, and the user typically sees ``(srw_app)`` in front of the Terminal prompt at this point.
.. note::
- Examples in this subsection presume that the user is running in the Terminal with a bash shell environment. If this is not the case, users will need to adjust the commands to fit their command line application and shell environment.
-
-.. _MacMorePackages:
-
-MacOS ONLY: Install/Upgrade Mac-Specific Packages
-"""""""""""""""""""""""""""""""""""""""""""""""""""
-
-.. attention::
-
- This subsection is for Mac OS users only. Users on Linux systems can skip to :ref:`Creating the workflow_tools Environment on Linux and Mac OS ` for instructions.
-
-
-MacOS requires the installation of a few additional packages and, possibly, an upgrade to bash. Users running on MacOS should execute the following commands:
-
-.. code-block:: console
-
- bash --version
- brew install bash # or: brew upgrade bash
- brew install coreutils
- brew gsed # follow directions to update the PATH env variable
-
-
-.. _LinuxMacVEnv:
-
-Creating the ``workflow_tools`` Environment on Linux and Mac OS
-"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
+ If users do not use the wflow module to load conda, ``conda`` will need to be initialized before running ``conda activate srw_app`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory and by default will be the full path to ``ufs-srweather-app/conda``.
-On generic Mac and Linux systems, users need to create a conda ``workflow_tools`` environment. The environment can be stored in a local path, which could be a default location or a user-specified location (e.g., ``$HOME/condaenv/venvs/`` directory). (To determine the default location, use the ``conda info`` command, and look for the ``envs directories`` list.) The following is a brief recipe for creating a virtual conda environment on non-Level 1 platforms. It uses the aarch64 (64-bit ARM) Miniforge for Linux and installs into $HOME/conda. Adjust as necessary for your target system.
-
-.. code-block:: console
-
- wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh
- bash Miniforge3-Linux-aarch64.sh -bfp ~/conda
- rm Miniforge3-Linux-aarch64.sh
- source ~/conda/etc/profile.d/conda.sh
- conda activate
- conda install -y conda-build conda-verify
- cd path/to/your/workflow-tools/clone
- conda build recipe
- conda create -y -n workflow_tools -c local workflow_tools
- conda activate workflow_tools
-
-In future shells, you can activate and use this environment with:
-
-.. code-block:: console
-
- source ~/conda/etc/profile.d/conda.sh
- conda activate uwtools
-
-See the `workflow-tools repository `__ for additional documentation.
+After loading the workflow environment, users may continue to :numref:`Section %s ` for instructions on setting the experiment configuration parameters.
Modify a ``wflow_`` File
``````````````````````````````````````
-Users can copy one of the provided ``wflow_`` files from the ``modulefiles`` directory and use it as a template to create a ``wflow_`` file that functions on their system. The ``wflow_macos`` and ``wflow_linux`` template modulefiles are provided as a starting point, but any ``wflow_`` file could be used. Users must modify the files to provide paths for python, miniconda modules, module loads, conda initialization, and the user's ``workflow_tools`` conda environment.
-
-Load the Workflow Environment
-```````````````````````````````
+Users can copy one of the provided ``wflow_`` files from the ``modulefiles`` directory and use it as a template to create a ``wflow_`` file that functions on their system. The ``wflow_macos`` and ``wflow_linux`` template modulefiles are provided as a starting point, but any ``wflow_`` file could be used. Since conda environments are installed with the SRW App build, the existing modulefiles will be able to automatically find those environments. No need to edit any of the information in those files for Python purposes.
-After creating a ``workflow_tools`` environment and making modifications to a ``wflow_`` file, users can run the commands below to activate the workflow environment:
-.. code-block:: console
-
- source /path/to/etc/lmod-setup.sh
- module use /path/to/modulefiles
- module load wflow_
-
-where ```` refers to a valid machine name (i.e., ``linux`` or ``macos``).
-
-.. note::
- If users source the lmod-setup file on a system that doesn't need it, it will not cause any problems (it will simply do a ``module purge``).
-
-The ``wflow_`` modulefile will then output the following instructions:
-
-.. code-block:: console
-
- Please do the following to activate conda:
- > conda activate workflow_tools
-
-After running ``conda activate workflow_tools``, the user will typically see ``(workflow_tools)`` in front of the Terminal prompt. This indicates that the workflow environment has been loaded successfully.
-
-.. note::
- ``conda`` needs to be initialized before running ``conda activate workflow_tools`` command. Depending on the user's system and login setup, this may be accomplished in a variety of ways. Conda initialization usually involves the following command: ``source /etc/profile.d/conda.sh``, where ```` is the base conda installation directory.
.. _ExptConfig:
@@ -416,7 +331,7 @@ A correct ``config.yaml`` file will output a ``SUCCESS`` message. A ``config.yam
.. hint::
- * The ``workflow_tools`` environment must be loaded for the ``config_utils.py`` script to validate the ``config.yaml`` file.
+ * The ``srw_app`` environment must be loaded for the ``config_utils.py`` script to validate the ``config.yaml`` file.
* Valid values for configuration variables should be consistent with those in the ``ush/valid_param_vals.yaml`` script.
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/Tutorial.rst b/docs/UsersGuide/source/BuildingRunningTesting/Tutorial.rst
index c27544f54e..ff510bafc3 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/Tutorial.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/Tutorial.rst
@@ -65,14 +65,14 @@ To load the workflow environment, source the lmod-setup file. Then load the work
where ```` is a valid, lowercased machine name (see ``MACHINE`` in :numref:`Section %s ` for valid values).
-After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run ``conda activate workflow_tools``. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username):
+After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run ``conda activate srw_app``. For example, a user on Hera with permissions on the ``nems`` project may issue the following commands to load the workflow (replacing ``User.Name`` with their actual username):
.. code-block:: console
source /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/etc/lmod-setup.sh hera
module use /scratch1/NCEPDEV/nems/User.Name/ufs-srweather-app/modulefiles
module load wflow_hera
- conda activate workflow_tools
+ conda activate srw_app
Configuration
-------------------------
diff --git a/docs/UsersGuide/source/BuildingRunningTesting/VXCases.rst b/docs/UsersGuide/source/BuildingRunningTesting/VXCases.rst
index 533f6e7fb2..b5f77363c3 100644
--- a/docs/UsersGuide/source/BuildingRunningTesting/VXCases.rst
+++ b/docs/UsersGuide/source/BuildingRunningTesting/VXCases.rst
@@ -93,7 +93,7 @@ First, navigate to the ``ufs-srweather-app/ush`` directory. Then, load the workf
Users running a csh/tcsh shell would run ``source /path/to/etc/lmod-setup.csh `` in place of the first command above.
-After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run ``conda activate regional_workflow``.
+After loading the workflow, users should follow the instructions printed to the console. Usually, the instructions will tell the user to run ``conda activate srw_app``.
Configure the Verification Sample Case
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -189,7 +189,7 @@ If a problem occurs and a task goes DEAD, view the task log files in ``$EXPTDIR/
Generate Plots
^^^^^^^^^^^^^^^^^
-The plots are created using the graphics generation script that comes with the SRW App v2.1.0 release. Information on the plots and instructions on how to run the script can be found in :doc:`Chapter 12 ` of the v2.1.0 release documentation. If the python environment is already loaded (i.e., ``(regional_workflow)`` is visible in the command prompt), users can navigate to the directory with the plotting scripts and run ``plot_allvars.py``:
+The plots are created using the graphics generation script that comes with the SRW App v2.1.0 release. Information on the plots and instructions on how to run the script can be found in :doc:`Chapter 12 ` of the v2.1.0 release documentation. If the python environment is already loaded (i.e., ``(srw_graphics)`` is visible in the command prompt), users can navigate to the directory with the plotting scripts and run ``plot_allvars.py``:
.. code-block:: console
diff --git a/docs/UsersGuide/source/Reference/FAQ.rst b/docs/UsersGuide/source/Reference/FAQ.rst
index 237980789c..702b0bfc7e 100644
--- a/docs/UsersGuide/source/Reference/FAQ.rst
+++ b/docs/UsersGuide/source/Reference/FAQ.rst
@@ -226,7 +226,7 @@ In addition to the options above, many standard terminal commands can be run to
How can I run a new experiment?
==================================
-To run a new experiment at a later time, users need to rerun the commands in :numref:`Section %s ` that reactivate the *workflow_tools* environment:
+To run a new experiment at a later time, users need to rerun the commands in :numref:`Section %s ` that reactivate the *srw_app* environment:
.. code-block:: console
@@ -234,7 +234,7 @@ To run a new experiment at a later time, users need to rerun the commands in :nu
module use /path/to/modulefiles
module load wflow_
-Follow any instructions output by the console (e.g., ``conda activate workflow_tools``).
+Follow any instructions output by the console (e.g., ``conda activate srw_app``).
Then, users can configure a new experiment by updating the environment variables in ``config.yaml`` to reflect the desired experiment configuration. Detailed instructions can be viewed in :numref:`Section %s `. Parameters and valid values are listed in :numref:`Section %s `. After adjusting the configuration file, generate the new experiment by running ``./generate_FV3LAM_wflow.py``. Check progress by navigating to the ``$EXPTDIR`` and running ``rocotostat -w FV3LAM_wflow.xml -d FV3LAM_wflow.db -v 10``.
@@ -276,4 +276,4 @@ If you encounter issues while generating ICS and LBCS for a predefined 3-km grid
Additionally, users can try increasing the number of processors or the wallclock time requested for the jobs. Sometimes jobs may fail without errors because the process is cut short. These settings can be adusted in one of the ``ufs-srweather-app/parm/wflow`` files. For ICs/LBCs tasks, these parameters are set in the ``coldstart.yaml`` file.
-Users can also update the hash of UFS_UTILS in the ``Externals.cfg`` file to the HEAD of that repository. There was a known memory issue with how ``chgres_cube`` was handling regridding of the 3-D wind field for large domains at high resolutions (see `UFS_UTILS PR #766 `__ and the associated issue for more information). If changing the hash in ``Externals.cfg``, users will need to rerun ``manage_externals`` and rebuild the code (see :numref:`Section %s `).
\ No newline at end of file
+Users can also update the hash of UFS_UTILS in the ``Externals.cfg`` file to the HEAD of that repository. There was a known memory issue with how ``chgres_cube`` was handling regridding of the 3-D wind field for large domains at high resolutions (see `UFS_UTILS PR #766 `__ and the associated issue for more information). If changing the hash in ``Externals.cfg``, users will need to rerun ``manage_externals`` and rebuild the code (see :numref:`Section %s `).
diff --git a/environment.yml b/environment.yml
index 2e75288d85..c574df5e23 100644
--- a/environment.yml
+++ b/environment.yml
@@ -1,8 +1,8 @@
-name: regional_workflow
+name: srw_app
channels:
- conda-forge
+ - ufs-community
dependencies:
- python: 3.7
- f90nml: 1.1.2
- jinja2: 2.11*
- pyyaml: 5.1.2
+ - pylint=2.17*
+ - pytest=7.2*
+ - uwtools=1.0.0
diff --git a/graphics_environment.yml b/graphics_environment.yml
new file mode 100644
index 0000000000..732561b6b6
--- /dev/null
+++ b/graphics_environment.yml
@@ -0,0 +1,16 @@
+name: srw_graphics
+channels:
+ - conda-forge
+dependencies:
+ - python=3.9*
+ - cartopy=0.18*
+ - f90nml 1.4*
+ - jinja2=3.0*
+ - matplotlib=3.5*
+ - pillow=9.2*
+ - pygrib=2.1*
+ - pylint=2.16*
+ - pyproj=2.6*
+ - pytest=7.1*
+ - pyyaml=6.0*
+ - scipy=1.9*
diff --git a/modulefiles/build_cheyenne_gnu.lua b/modulefiles/build_cheyenne_gnu.lua
index e9ed311712..58398891b2 100644
--- a/modulefiles/build_cheyenne_gnu.lua
+++ b/modulefiles/build_cheyenne_gnu.lua
@@ -9,7 +9,6 @@ load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3"))
load(pathJoin("gnu", os.getenv("gnu_ver") or "11.2.0"))
load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25"))
-load(pathJoin("python", os.getenv("python_ver") or "3.7.9"))
setenv("MKLROOT", "/glade/u/apps/opt/intel/2022.1/mkl/latest")
load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0"))
unload("netcdf")
diff --git a/modulefiles/build_cheyenne_intel.lua b/modulefiles/build_cheyenne_intel.lua
index 21bc7108ab..298c9879ac 100644
--- a/modulefiles/build_cheyenne_intel.lua
+++ b/modulefiles/build_cheyenne_intel.lua
@@ -10,7 +10,6 @@ load(pathJoin("ncarenv", os.getenv("ncarenv_ver") or "1.3"))
load(pathJoin("intel", os.getenv("intel_ver") or "2022.1"))
load(pathJoin("mpt", os.getenv("mpt_ver") or "2.25"))
load(pathJoin("mkl", os.getenv("mkl_ver") or "2022.1"))
-load(pathJoin("python", os.getenv("python_ver") or "3.7.9"))
load(pathJoin("ncarcompilers", os.getenv("ncarcompilers_ver") or "0.5.0"))
unload("netcdf")
diff --git a/modulefiles/build_gaea_intel.lua b/modulefiles/build_gaea_intel.lua
index df93eb2f17..d9d1a007d5 100644
--- a/modulefiles/build_gaea_intel.lua
+++ b/modulefiles/build_gaea_intel.lua
@@ -15,11 +15,9 @@ prepend_path("MODULEPATH", "/lustre/f2/pdata/esrl/gsd/spack-stack/modulefiles")
load("stack-intel/2022.0.2")
load("stack-cray-mpich/7.7.20")
-load("stack-python/3.9.12")
load("cmake/3.23.1")
load("srw_common")
-load("ufs-pyenv")
-- Need at runtime
load("alps")
diff --git a/modulefiles/build_hera_gnu.lua b/modulefiles/build_hera_gnu.lua
index 578ed64cbb..d4a67e44b2 100644
--- a/modulefiles/build_hera_gnu.lua
+++ b/modulefiles/build_hera_gnu.lua
@@ -10,7 +10,6 @@ prepend_path("MODULEPATH", "/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/modulef
load("stack-gcc/9.2.0")
load("stack-openmpi/4.1.5")
-load("stack-python/3.9.12")
load("cmake/3.23.1")
load("srw_common")
@@ -18,7 +17,6 @@ load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
load(pathJoin("openblas", os.getenv("openblas_ver") or "0.3.19"))
-load("ufs-pyenv")
setenv("CMAKE_C_COMPILER","mpicc")
setenv("CMAKE_CXX_COMPILER","mpicxx")
diff --git a/modulefiles/build_hera_intel.lua b/modulefiles/build_hera_intel.lua
index f04f4d4df1..df19b0a583 100644
--- a/modulefiles/build_hera_intel.lua
+++ b/modulefiles/build_hera_intel.lua
@@ -17,9 +17,6 @@ load(pathJoin("stack-intel", stack_intel_ver))
stack_impi_ver=os.getenv("stack_impi_ver") or "2021.5.1"
load(pathJoin("stack-intel-oneapi-mpi", stack_impi_ver))
-stack_python_ver=os.getenv("stack_python_ver") or "3.9.12"
-load(pathJoin("stack-python", stack_python_ver))
-
cmake_ver=os.getenv("cmake_ver") or "3.20.1"
load(pathJoin("cmake", cmake_ver))
@@ -27,7 +24,6 @@ load("srw_common")
load(pathJoin("nccmp", os.getenv("nccmp_ver") or "1.9.0.1"))
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("ufs-pyenv")
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/build_hercules_intel.lua b/modulefiles/build_hercules_intel.lua
index 1772460542..3ca93978eb 100644
--- a/modulefiles/build_hercules_intel.lua
+++ b/modulefiles/build_hercules_intel.lua
@@ -10,14 +10,12 @@ prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles")
load("stack-intel/2021.7.1")
load("stack-intel-oneapi-mpi/2021.7.1")
-load("stack-python/3.9.14")
load("cmake/3.26.3")
load("srw_common")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
-load("ufs-pyenv")
setenv("CFLAGS","-diag-disable=10441")
setenv("FFLAGS","-diag-disable=10441")
diff --git a/modulefiles/build_jet_intel.lua b/modulefiles/build_jet_intel.lua
index fd19234474..9e26cb0489 100644
--- a/modulefiles/build_jet_intel.lua
+++ b/modulefiles/build_jet_intel.lua
@@ -10,7 +10,6 @@ prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/spack-stack/modulefiles")
load("stack-intel/2021.5.0")
load("stack-intel-oneapi-mpi/2021.5.1")
-load("stack-python/3.9.12")
load("cmake/3.23.1")
load("srw_common")
@@ -18,7 +17,6 @@ load("srw_common")
load("prod-util/1.2.2")
load("nccmp/1.9.0.1")
load("nco/5.0.6")
-load("ufs-pyenv")
setenv("CMAKE_C_COMPILER","mpiicc")
setenv("CMAKE_CXX_COMPILER","mpiicpc")
diff --git a/modulefiles/build_linux_gnu.lua b/modulefiles/build_linux_gnu.lua
index cc5f6831f1..22dfaf9a70 100644
--- a/modulefiles/build_linux_gnu.lua
+++ b/modulefiles/build_linux_gnu.lua
@@ -11,7 +11,6 @@ local HPCstack="/home/username/hpc-stack/install"
-- Load HPC stack
prepend_path("MODULEPATH", pathJoin(HPCstack, "modulefiles/stack"))
load("hpc")
-load("hpc-python")
load("hpc-gnu")
load("hpc-openmpi")
diff --git a/modulefiles/build_macos_gnu.lua b/modulefiles/build_macos_gnu.lua
index 266bfd11a6..d26da73544 100644
--- a/modulefiles/build_macos_gnu.lua
+++ b/modulefiles/build_macos_gnu.lua
@@ -15,7 +15,6 @@ local HPCstack="/Users/username/hpc-stack/install"
-- Load HPC stack
prepend_path("MODULEPATH", pathJoin(HPCstack, "modulefiles/stack"))
load("hpc")
-load("hpc-python")
load("hpc-gnu")
load("hpc-openmpi")
diff --git a/modulefiles/build_orion_intel.lua b/modulefiles/build_orion_intel.lua
index 1ecd3fdab3..9e6b0cabbc 100644
--- a/modulefiles/build_orion_intel.lua
+++ b/modulefiles/build_orion_intel.lua
@@ -10,7 +10,6 @@ prepend_path("MODULEPATH", "/work/noaa/da/role-da/spack-stack/modulefiles")
load("stack-intel/2022.0.2")
load("stack-intel-oneapi-mpi/2021.5.1")
-load("stack-python/3.9.7")
load("cmake/3.22.1")
load("srw_common")
diff --git a/modulefiles/conda.lua b/modulefiles/conda.lua
new file mode 100644
index 0000000000..1400dadad2
--- /dev/null
+++ b/modulefiles/conda.lua
@@ -0,0 +1,44 @@
+help([[
+]])
+
+local pkgName = myModuleName()
+local pkgVersion = myModuleVersion()
+local shell=myShellType()
+
+conflict(pkgName)
+
+local mod_path, mod_file = splitFileName(myFileName())
+local conda_loc_file = pathJoin(mod_path, "..", "conda_loc")
+local base = capture("cat " .. conda_loc_file)
+local conda_file = pathJoin(base, "etc", "profile.d", "conda." .. shell)
+local command = "source " .. conda_file
+
+local level
+
+
+execute{cmd=command, modeA={"load", "unload"}}
+
+if mode() == "unload" then
+
+ level=tonumber(os.getenv("CONDA_SHLVL"))
+
+ while (level > 0) do
+ execute{cmd="conda deactivate", modeA={"unload"}}
+ level = level - 1
+ end
+
+
+ if shell == "csh" then
+ execute{cmd="unalias conda", modeA={"unload"}}
+ command = "unsetenv CONDA_EXE CONDA_PYTHON_EXE CONDA_SHLVL _CE_CONDA"
+ else
+ execute{cmd="unset conda", modeA={"unload"}}
+ command = "unset CONDA_EXE CONDA_PYTHON_EXE CONDA_SHLVL _CE_CONDA"
+ end
+ execute{cmd=command, modeA={"unload"}}
+ remove_path("PATH", pathJoin(base, "condabin"))
+ remove_path("MANPATH", pathJoin(base, "share", "man"))
+end
+
+prepend_path("PATH", pathJoin(base, "bin"))
+prepend_path("LD_LIBRARY_PATH", pathJoin(base, "lib"))
diff --git a/modulefiles/python_srw.lua b/modulefiles/python_srw.lua
new file mode 100644
index 0000000000..b4621c5480
--- /dev/null
+++ b/modulefiles/python_srw.lua
@@ -0,0 +1,5 @@
+unload("python")
+unload("miniconda3")
+
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/python_srw_cmaq.lua b/modulefiles/python_srw_cmaq.lua
new file mode 100644
index 0000000000..49bbdfa46e
--- /dev/null
+++ b/modulefiles/python_srw_cmaq.lua
@@ -0,0 +1,2 @@
+load("conda")
+setenv("SRW_ENV", "srw_aqm")
diff --git a/modulefiles/tasks/cheyenne/aqm_ics.local.lua b/modulefiles/tasks/cheyenne/aqm_ics.local.lua
index de700dbbc6..1040aab9a6 100644
--- a/modulefiles/tasks/cheyenne/aqm_ics.local.lua
+++ b/modulefiles/tasks/cheyenne/aqm_ics.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
index de700dbbc6..1040aab9a6 100644
--- a/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/cheyenne/aqm_lbcs.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/fire_emission.local.lua b/modulefiles/tasks/cheyenne/fire_emission.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/cheyenne/fire_emission.local.lua
+++ b/modulefiles/tasks/cheyenne/fire_emission.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/miniconda_regional_workflow_cmaq.lua b/modulefiles/tasks/cheyenne/miniconda_regional_workflow_cmaq.lua
deleted file mode 100644
index f0852058eb..0000000000
--- a/modulefiles/tasks/cheyenne/miniconda_regional_workflow_cmaq.lua
+++ /dev/null
@@ -1,6 +0,0 @@
-unload("python")
-load("conda")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow_cmaq")
diff --git a/modulefiles/tasks/cheyenne/nexus_emission.local.lua b/modulefiles/tasks/cheyenne/nexus_emission.local.lua
index b7fc0590e9..c46ead59a9 100644
--- a/modulefiles/tasks/cheyenne/nexus_emission.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_emission.local.lua
@@ -2,4 +2,4 @@ load("nco/4.9.5")
load("mpt/2.25")
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_gfs_sfc.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
index 16805914aa..c957eff552 100644
--- a/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
+++ b/modulefiles/tasks/cheyenne/nexus_post_split.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.5"))
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/plot_allvars.local.lua b/modulefiles/tasks/cheyenne/plot_allvars.local.lua
index 2263141a0a..b49b8bb863 100644
--- a/modulefiles/tasks/cheyenne/plot_allvars.local.lua
+++ b/modulefiles/tasks/cheyenne/plot_allvars.local.lua
@@ -1,5 +1,3 @@
unload("python")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/cheyenne/point_source.local.lua b/modulefiles/tasks/cheyenne/point_source.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/cheyenne/point_source.local.lua
+++ b/modulefiles/tasks/cheyenne/point_source.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
index 262919b475..7dcdc5969b 100644
--- a/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
+++ b/modulefiles/tasks/cheyenne/pre_post_stat.local.lua
@@ -1,2 +1,2 @@
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/cheyenne/python_srw.lua b/modulefiles/tasks/cheyenne/python_srw.lua
index 57e2c2eed0..fe6c73a7d5 100644
--- a/modulefiles/tasks/cheyenne/python_srw.lua
+++ b/modulefiles/tasks/cheyenne/python_srw.lua
@@ -1,5 +1,3 @@
unload("python")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/derecho/aqm_ics.local.lua b/modulefiles/tasks/derecho/aqm_ics.local.lua
index de700dbbc6..1040aab9a6 100644
--- a/modulefiles/tasks/derecho/aqm_ics.local.lua
+++ b/modulefiles/tasks/derecho/aqm_ics.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/aqm_lbcs.local.lua b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
index de700dbbc6..1040aab9a6 100644
--- a/modulefiles/tasks/derecho/aqm_lbcs.local.lua
+++ b/modulefiles/tasks/derecho/aqm_lbcs.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("cmake", os.getenv("cmake_ver") or "3.22.0"))
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/fire_emission.local.lua b/modulefiles/tasks/derecho/fire_emission.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/derecho/fire_emission.local.lua
+++ b/modulefiles/tasks/derecho/fire_emission.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/miniconda_regional_workflow_cmaq.lua b/modulefiles/tasks/derecho/miniconda_regional_workflow_cmaq.lua
deleted file mode 100644
index 1ecbec291f..0000000000
--- a/modulefiles/tasks/derecho/miniconda_regional_workflow_cmaq.lua
+++ /dev/null
@@ -1,6 +0,0 @@
-unload("python")
-load("conda")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow_cmaq")
diff --git a/modulefiles/tasks/derecho/nexus_emission.local.lua b/modulefiles/tasks/derecho/nexus_emission.local.lua
index b94435c230..09f38a17dd 100644
--- a/modulefiles/tasks/derecho/nexus_emission.local.lua
+++ b/modulefiles/tasks/derecho/nexus_emission.local.lua
@@ -1,4 +1,4 @@
load("nco/5.0.6")
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
+++ b/modulefiles/tasks/derecho/nexus_gfs_sfc.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/nexus_post_split.local.lua b/modulefiles/tasks/derecho/nexus_post_split.local.lua
index 8874da32a8..a03758c9c6 100644
--- a/modulefiles/tasks/derecho/nexus_post_split.local.lua
+++ b/modulefiles/tasks/derecho/nexus_post_split.local.lua
@@ -1,3 +1,3 @@
load(pathJoin("nco", os.getenv("nco_ver") or "5.0.6"))
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/plot_allvars.local.lua b/modulefiles/tasks/derecho/plot_allvars.local.lua
index 2263141a0a..b49b8bb863 100644
--- a/modulefiles/tasks/derecho/plot_allvars.local.lua
+++ b/modulefiles/tasks/derecho/plot_allvars.local.lua
@@ -1,5 +1,3 @@
unload("python")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/derecho/point_source.local.lua b/modulefiles/tasks/derecho/point_source.local.lua
index f8a53ee074..b62670156f 100644
--- a/modulefiles/tasks/derecho/point_source.local.lua
+++ b/modulefiles/tasks/derecho/point_source.local.lua
@@ -1,2 +1,2 @@
load("ncarenv")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/pre_post_stat.local.lua b/modulefiles/tasks/derecho/pre_post_stat.local.lua
index 262919b475..7dcdc5969b 100644
--- a/modulefiles/tasks/derecho/pre_post_stat.local.lua
+++ b/modulefiles/tasks/derecho/pre_post_stat.local.lua
@@ -1,2 +1,2 @@
load("nco/4.9.5")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/derecho/python_srw.lua b/modulefiles/tasks/derecho/python_srw.lua
index 29c290813e..fe6c73a7d5 100644
--- a/modulefiles/tasks/derecho/python_srw.lua
+++ b/modulefiles/tasks/derecho/python_srw.lua
@@ -1,5 +1,3 @@
unload("python")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua b/modulefiles/tasks/gaea-c5/plot_allvars.local.lua
index 7c2e3a9ba2..b7e9528710 100644
--- a/modulefiles/tasks/gaea-c5/plot_allvars.local.lua
+++ b/modulefiles/tasks/gaea-c5/plot_allvars.local.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/C5/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/gaea-c5/python_srw.lua b/modulefiles/tasks/gaea-c5/python_srw.lua
index 673aa800b8..a8b57c694e 100644
--- a/modulefiles/tasks/gaea-c5/python_srw.lua
+++ b/modulefiles/tasks/gaea-c5/python_srw.lua
@@ -1,5 +1,3 @@
unload("miniconda3")
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/C5/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/gaea/plot_allvars.local.lua b/modulefiles/tasks/gaea/plot_allvars.local.lua
index 5cd4d13325..b7e9528710 100644
--- a/modulefiles/tasks/gaea/plot_allvars.local.lua
+++ b/modulefiles/tasks/gaea/plot_allvars.local.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/gaea/python_srw.lua b/modulefiles/tasks/gaea/python_srw.lua
index a18856b8d2..a2dd45084c 100644
--- a/modulefiles/tasks/gaea/python_srw.lua
+++ b/modulefiles/tasks/gaea/python_srw.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/hera/aqm_ics.local.lua b/modulefiles/tasks/hera/aqm_ics.local.lua
index 4202cbf232..0e7132d749 100644
--- a/modulefiles/tasks/hera/aqm_ics.local.lua
+++ b/modulefiles/tasks/hera/aqm_ics.local.lua
@@ -1,2 +1,2 @@
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
diff --git a/modulefiles/tasks/hera/fire_emission.local.lua b/modulefiles/tasks/hera/fire_emission.local.lua
index ef04528a3f..8aa737aa65 100644
--- a/modulefiles/tasks/hera/fire_emission.local.lua
+++ b/modulefiles/tasks/hera/fire_emission.local.lua
@@ -1,3 +1,3 @@
load("hpss")
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
diff --git a/modulefiles/tasks/hera/miniconda_regional_workflow_cmaq.lua b/modulefiles/tasks/hera/miniconda_regional_workflow_cmaq.lua
deleted file mode 100644
index c0094e0ab2..0000000000
--- a/modulefiles/tasks/hera/miniconda_regional_workflow_cmaq.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow_cmaq")
diff --git a/modulefiles/tasks/hera/nexus_emission.local.lua b/modulefiles/tasks/hera/nexus_emission.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/hera/nexus_emission.local.lua
+++ b/modulefiles/tasks/hera/nexus_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hera/nexus_post_split.local.lua b/modulefiles/tasks/hera/nexus_post_split.local.lua
index 4202cbf232..0e7132d749 100644
--- a/modulefiles/tasks/hera/nexus_post_split.local.lua
+++ b/modulefiles/tasks/hera/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
diff --git a/modulefiles/tasks/hera/plot_allvars.local.lua b/modulefiles/tasks/hera/plot_allvars.local.lua
index 7934169824..b7e9528710 100644
--- a/modulefiles/tasks/hera/plot_allvars.local.lua
+++ b/modulefiles/tasks/hera/plot_allvars.local.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/hera/point_source.local.lua b/modulefiles/tasks/hera/point_source.local.lua
index 07cacc8ce2..89feda226c 100644
--- a/modulefiles/tasks/hera/point_source.local.lua
+++ b/modulefiles/tasks/hera/point_source.local.lua
@@ -1 +1 @@
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hera/python_srw.lua b/modulefiles/tasks/hera/python_srw.lua
deleted file mode 100644
index 62ddf7d9e8..0000000000
--- a/modulefiles/tasks/hera/python_srw.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
diff --git a/modulefiles/tasks/hercules/aqm_ics.local.lua b/modulefiles/tasks/hercules/aqm_ics.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/hercules/aqm_ics.local.lua
+++ b/modulefiles/tasks/hercules/aqm_ics.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hercules/fire_emission.local.lua b/modulefiles/tasks/hercules/fire_emission.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/hercules/fire_emission.local.lua
+++ b/modulefiles/tasks/hercules/fire_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hercules/miniconda_regional_workflow_cmaq.lua b/modulefiles/tasks/hercules/miniconda_regional_workflow_cmaq.lua
deleted file mode 100644
index c785373386..0000000000
--- a/modulefiles/tasks/hercules/miniconda_regional_workflow_cmaq.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-prepend_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/hercules/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow_cmaq")
diff --git a/modulefiles/tasks/hercules/nexus_emission.local.lua b/modulefiles/tasks/hercules/nexus_emission.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/hercules/nexus_emission.local.lua
+++ b/modulefiles/tasks/hercules/nexus_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hercules/nexus_post_split.local.lua b/modulefiles/tasks/hercules/nexus_post_split.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/hercules/nexus_post_split.local.lua
+++ b/modulefiles/tasks/hercules/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hercules/plot_allvars.local.lua b/modulefiles/tasks/hercules/plot_allvars.local.lua
index c995f83bb8..b49b8bb863 100644
--- a/modulefiles/tasks/hercules/plot_allvars.local.lua
+++ b/modulefiles/tasks/hercules/plot_allvars.local.lua
@@ -1,5 +1,3 @@
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/hercules/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/hercules/point_source.local.lua b/modulefiles/tasks/hercules/point_source.local.lua
index 07cacc8ce2..89feda226c 100644
--- a/modulefiles/tasks/hercules/point_source.local.lua
+++ b/modulefiles/tasks/hercules/point_source.local.lua
@@ -1 +1 @@
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/hercules/python_srw.lua b/modulefiles/tasks/hercules/python_srw.lua
index 157e6884ea..fe6c73a7d5 100644
--- a/modulefiles/tasks/hercules/python_srw.lua
+++ b/modulefiles/tasks/hercules/python_srw.lua
@@ -1,5 +1,3 @@
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/hercules/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/jet/plot_allvars.local.lua b/modulefiles/tasks/jet/plot_allvars.local.lua
index ef4f248966..b7e9528710 100644
--- a/modulefiles/tasks/jet/plot_allvars.local.lua
+++ b/modulefiles/tasks/jet/plot_allvars.local.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/jet/python_srw.lua b/modulefiles/tasks/jet/python_srw.lua
index 3c7987be18..a2dd45084c 100644
--- a/modulefiles/tasks/jet/python_srw.lua
+++ b/modulefiles/tasks/jet/python_srw.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/noaacloud/python_srw.lua b/modulefiles/tasks/noaacloud/python_srw.lua
index ed1e785a4c..a2dd45084c 100644
--- a/modulefiles/tasks/noaacloud/python_srw.lua
+++ b/modulefiles/tasks/noaacloud/python_srw.lua
@@ -1,4 +1,2 @@
-prepend_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/orion/aqm_ics.local.lua b/modulefiles/tasks/orion/aqm_ics.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/orion/aqm_ics.local.lua
+++ b/modulefiles/tasks/orion/aqm_ics.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/orion/fire_emission.local.lua b/modulefiles/tasks/orion/fire_emission.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/orion/fire_emission.local.lua
+++ b/modulefiles/tasks/orion/fire_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/orion/miniconda_regional_workflow_cmaq.lua b/modulefiles/tasks/orion/miniconda_regional_workflow_cmaq.lua
deleted file mode 100644
index 890f642185..0000000000
--- a/modulefiles/tasks/orion/miniconda_regional_workflow_cmaq.lua
+++ /dev/null
@@ -1,4 +0,0 @@
-prepend_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/orion/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow_cmaq")
diff --git a/modulefiles/tasks/orion/nexus_emission.local.lua b/modulefiles/tasks/orion/nexus_emission.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/orion/nexus_emission.local.lua
+++ b/modulefiles/tasks/orion/nexus_emission.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/orion/nexus_post_split.local.lua b/modulefiles/tasks/orion/nexus_post_split.local.lua
index 5a7b0cece6..c7ac9dcb90 100644
--- a/modulefiles/tasks/orion/nexus_post_split.local.lua
+++ b/modulefiles/tasks/orion/nexus_post_split.local.lua
@@ -1,2 +1,2 @@
load(pathJoin("nco", os.getenv("nco_ver") or "4.9.3"))
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/orion/plot_allvars.local.lua b/modulefiles/tasks/orion/plot_allvars.local.lua
index 94fed189be..b49b8bb863 100644
--- a/modulefiles/tasks/orion/plot_allvars.local.lua
+++ b/modulefiles/tasks/orion/plot_allvars.local.lua
@@ -1,5 +1,3 @@
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/orion/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "regional_workflow")
+load("conda")
+setenv("SRW_ENV", "srw_graphics")
diff --git a/modulefiles/tasks/orion/point_source.local.lua b/modulefiles/tasks/orion/point_source.local.lua
index 07cacc8ce2..89feda226c 100644
--- a/modulefiles/tasks/orion/point_source.local.lua
+++ b/modulefiles/tasks/orion/point_source.local.lua
@@ -1 +1 @@
-load("miniconda_regional_workflow_cmaq")
+load("python_srw_cmaq")
diff --git a/modulefiles/tasks/orion/python_srw.lua b/modulefiles/tasks/orion/python_srw.lua
index 449ce23f31..fe6c73a7d5 100644
--- a/modulefiles/tasks/orion/python_srw.lua
+++ b/modulefiles/tasks/orion/python_srw.lua
@@ -1,5 +1,3 @@
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/orion/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
-
-setenv("SRW_ENV", "workflow_tools")
+load("conda")
+setenv("SRW_ENV", "srw_app")
diff --git a/modulefiles/tasks/orion/run_vx.local.lua b/modulefiles/tasks/orion/run_vx.local.lua
index 62646d0992..850f296af2 100644
--- a/modulefiles/tasks/orion/run_vx.local.lua
+++ b/modulefiles/tasks/orion/run_vx.local.lua
@@ -18,8 +18,10 @@ setenv("METPLUS_VERSION", metplus_ver)
setenv("METPLUS_ROOT", base_metplus)
setenv("METPLUS_PATH", base_metplus)
+
if (mode() == "unload") then
unload(pathJoin("met", met_ver))
unload(pathJoin("metplus",metplus_ver))
end
+load("stack-python/3.9.7")
load("python_srw")
diff --git a/modulefiles/wflow_cheyenne.lua b/modulefiles/wflow_cheyenne.lua
index 9a7a37c0b5..f3fec407c7 100644
--- a/modulefiles/wflow_cheyenne.lua
+++ b/modulefiles/wflow_cheyenne.lua
@@ -12,13 +12,12 @@ load("rocoto")
unload("python")
+load("conda")
load("set_pythonpath")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_derecho.lua b/modulefiles/wflow_derecho.lua
index 8a71ed6cd7..f32cfdc298 100644
--- a/modulefiles/wflow_derecho.lua
+++ b/modulefiles/wflow_derecho.lua
@@ -12,13 +12,12 @@ load("rocoto")
unload("python")
+load("conda")
load("set_pythonpath")
-prepend_path("MODULEPATH","/glade/work/epicufsrt/contrib/derecho/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_gaea-c5.lua b/modulefiles/wflow_gaea-c5.lua
index 12467dfd74..5f9d93e58f 100644
--- a/modulefiles/wflow_gaea-c5.lua
+++ b/modulefiles/wflow_gaea-c5.lua
@@ -7,8 +7,7 @@ whatis([===[Loads libraries needed for running the UFS SRW App on gaea ]===])
unload("python")
load("set_pythonpath")
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/C5/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/C5/rocoto/modulefiles")
load("rocoto")
@@ -16,6 +15,6 @@ pushenv("MKLROOT", "/opt/intel/oneapi/mkl/2023.1.0/")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_gaea.lua b/modulefiles/wflow_gaea.lua
index e9e59a41ca..7e30a20649 100644
--- a/modulefiles/wflow_gaea.lua
+++ b/modulefiles/wflow_gaea.lua
@@ -7,14 +7,13 @@ whatis([===[Loads libraries needed for running the UFS SRW App on gaea ]===])
unload("python")
load("set_pythonpath")
-prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
prepend_path("MODULEPATH","/lustre/f2/dev/role.epic/contrib/rocoto/modulefiles")
load("rocoto")
load("alps")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_hera.lua b/modulefiles/wflow_hera.lua
index 5d6ebeed1d..832f40bb73 100644
--- a/modulefiles/wflow_hera.lua
+++ b/modulefiles/wflow_hera.lua
@@ -7,12 +7,10 @@ whatis([===[Loads libraries needed for running the UFS SRW App on Hera ]===])
load("rocoto")
load("set_pythonpath")
-
-prepend_path("MODULEPATH","/scratch1/NCEPDEV/nems/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_hercules.lua b/modulefiles/wflow_hercules.lua
index 471b1f8919..978ef14711 100644
--- a/modulefiles/wflow_hercules.lua
+++ b/modulefiles/wflow_hercules.lua
@@ -10,12 +10,11 @@ load("rocoto")
load("set_pythonpath")
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/hercules/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_jet.lua b/modulefiles/wflow_jet.lua
index 5f109429dc..c3e740537a 100644
--- a/modulefiles/wflow_jet.lua
+++ b/modulefiles/wflow_jet.lua
@@ -8,11 +8,10 @@ whatis([===[Loads libraries needed for running the UFS SRW App on Jet ]===])
load("rocoto")
load("set_pythonpath")
-prepend_path("MODULEPATH","/mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_linux.lua b/modulefiles/wflow_linux.lua
index 3fb5d1123a..4c2305f047 100644
--- a/modulefiles/wflow_linux.lua
+++ b/modulefiles/wflow_linux.lua
@@ -6,22 +6,7 @@ whatis([===[This module sets a path for conda environment needed for running the
setenv("CMAKE_Platform", "linux")
--- Conda initialization function
-function init_conda(conda_path)
- local shell=myShellType()
- local conda_file
- if shell == "csh" then
- conda_file=pathJoin(conda_path,"etc/profile.d/conda.csh")
- else
- conda_file=pathJoin(conda_path,"etc/profile.d/conda.sh")
- end
- local mcmd="source " .. conda_file
- execute{cmd=mcmd, modeA={"load"}}
-end
-
--- initialize conda
-local conda_path="/home/username/miniconda3"
-init_conda(conda_path)
+load("conda")
-- add rocoto to path
local rocoto_path="/home/username/rocoto"
@@ -37,6 +22,6 @@ load("set_pythonpath")
-- display conda activation message
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_macos.lua b/modulefiles/wflow_macos.lua
index 6ee6022b20..987adcdd52 100644
--- a/modulefiles/wflow_macos.lua
+++ b/modulefiles/wflow_macos.lua
@@ -6,22 +6,7 @@ whatis([===[This module activates conda environment for running the UFS SRW App
setenv("CMAKE_Platform", "macos")
--- Conda initialization function
-function init_conda(conda_path)
- local shell=myShellType()
- local conda_file
- if shell == "csh" then
- conda_file=pathJoin(conda_path,"etc/profile.d/conda.csh")
- else
- conda_file=pathJoin(conda_path,"etc/profile.d/conda.sh")
- end
- local mcmd="source " .. conda_file
- execute{cmd=mcmd, modeA={"load"}}
-end
-
--- initialize conda
-local conda_path="/Users/username/miniconda3"
-init_conda(conda_path)
+load("conda")
-- add rocoto to path
local rocoto_path="/Users/username/rocoto"
@@ -37,7 +22,7 @@ load("set_pythonpath")
-- display conda activation message
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda virtual environment:
- > conda activate workflow_tools"
+ > conda activate srw_app"
]===])
end
diff --git a/modulefiles/wflow_noaacloud.lua b/modulefiles/wflow_noaacloud.lua
index fdfb7ef14d..e4a355f0b2 100644
--- a/modulefiles/wflow_noaacloud.lua
+++ b/modulefiles/wflow_noaacloud.lua
@@ -10,8 +10,7 @@ load("rocoto")
load("set_pythonpath")
-prepend_path("MODULEPATH","/contrib/EPIC/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
setenv("PROJ_LIB","/contrib/EPIC/miniconda3/4.12.0/envs/regional_workflow/share/proj")
setenv("OPT","/contrib/EPIC/hpc-modules")
@@ -20,6 +19,6 @@ prepend_path("PATH","/contrib/EPIC/bin")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_orion.lua b/modulefiles/wflow_orion.lua
index ed72ff2245..0a9ef26fce 100644
--- a/modulefiles/wflow_orion.lua
+++ b/modulefiles/wflow_orion.lua
@@ -11,12 +11,11 @@ load("wget")
load("set_pythonpath")
unload("python")
-append_path("MODULEPATH","/work/noaa/epic/role-epic/contrib/orion/miniconda3/modulefiles")
-load(pathJoin("miniconda3", os.getenv("miniconda3_ver") or "4.12.0"))
+load("conda")
if mode() == "load" then
LmodMsgRaw([===[Please do the following to activate conda:
- > conda activate workflow_tools
+ > conda activate srw_app
]===])
end
diff --git a/modulefiles/wflow_singularity.lua b/modulefiles/wflow_singularity.lua
index 309c5eac23..3e1de292e1 100644
--- a/modulefiles/wflow_singularity.lua
+++ b/modulefiles/wflow_singularity.lua
@@ -6,9 +6,8 @@ a singularity container
whatis([===[Loads libraries needed for running the UFS SRW App in a singularity container]===])
load("set_pythonpath")
-append_path("MODULEPATH","/opt/hpc-modules/modulefiles/core")
-load("miniconda3")
+load("conda")
if mode() == "load" then
- execute{cmd="conda activate workflow_tools", modeA={"load"}}
+ execute{cmd="conda activate srw_app", modeA={"load"}}
end
diff --git a/tests/README.md b/tests/README.md
index 3670d50635..2f1613254a 100644
--- a/tests/README.md
+++ b/tests/README.md
@@ -46,10 +46,12 @@ The unit tests in the test_python/ directory test various parts of the workflow
### Set PYTHONPATH
-First, you will need to set the PYTHONPATH environment variable to include the ush/ directory:
+First, you will need to set the PYTHONPATH environment variable to include the ush/ directory and
+a few of the workflow-tools subdirectories. From the top level of the ufs-srweather-app clone
+run the following command:
```
-export PYTHONPATH=/path/to/ufs-srweather-app/ush:${PYTHONPATH}
+export PYTHONPATH=$(pwd)/ush:$(pwd)/ush/python_utils/workflow-tools:$(pwd)/ush/python_utils/workflow-tools/src
```
### Set up HPSS tests
diff --git a/ush/load_modules_run_task.sh b/ush/load_modules_run_task.sh
index 24c8de80c3..8765c3f698 100755
--- a/ush/load_modules_run_task.sh
+++ b/ush/load_modules_run_task.sh
@@ -172,11 +172,11 @@ specified task (task_name) failed:
task_name = \"${task_name}\"
modulefile_local = \"${modulefile_local}\"
modules_dir = \"${modules_dir}\""
-elif [ -f ${modules_dir}/python_srw.lua ] ; then
+elif [ -f ${default_modules_dir}/python_srw.lua ] ; then
module load python_srw || print_err_msg_exit "\
Loading SRW common python module failed. Expected python_srw.lua
in the modules directory here:
- modules_dir = \"${modules_dir}\""
+ modules_dir = \"${default_modules_dir}\""
fi
module list
diff --git a/ush/load_modules_wflow.sh b/ush/load_modules_wflow.sh
index 7631295d76..67499f4888 100755
--- a/ush/load_modules_wflow.sh
+++ b/ush/load_modules_wflow.sh
@@ -62,7 +62,7 @@ task failed:
$has_mu && set +u
if [ ! -z $(command -v conda) ]; then
- conda activate workflow_tools
+ conda activate srw_app
fi
$has_mu && set -u