diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml new file mode 100644 index 0000000..5529204 --- /dev/null +++ b/.github/workflows/build-and-test.yml @@ -0,0 +1,508 @@ +name: Build and Test + +on: + push: + branches: [ main, develop, feature/** ] + tags: + - 'v*' + pull_request: + branches: [ main, develop ] + workflow_dispatch: + +env: + CMAKE_VERSION: '3.27.6' + BOOST_VERSION: '1.83.0' + HDF5_VERSION: '1.14.3' + PYTHON_VERSION: '3.11' + UDA_VERSION: '2.7.6' + FMT_VERSION: '9.1.0' + +jobs: + build-linux: + name: Build on ${{ matrix.name }} + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-24.04 + name: "Ubuntu 24.04" + cc: gcc-14 + cxx: g++-14 + matlab_release: R2023b + backend_hdf5: ON + backend_mdsplus: OFF # Tests will run with HDF5 backend only + backend_uda: OFF + install_compiler: false # gcc-14 is default + use_system_packages: false + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Set up MATLAB + uses: matlab-actions/setup-matlab@v2 + with: + release: ${{ matrix.matlab_release }} + products: MATLAB + + - name: Verify MATLAB installation + run: | + which matlab || echo "MATLAB not in PATH" + matlab -batch "version" || echo "MATLAB batch mode failed" + echo "MATLAB_ROOT=$(dirname $(dirname $(which matlab)))" >> $GITHUB_ENV + + - name: Cache pip packages + uses: actions/cache@v4 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ env.PYTHON_VERSION }}-${{ hashFiles('**/requirements.txt') }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-pip-${{ env.PYTHON_VERSION }}-${{ hashFiles('**/requirements.txt') }}- + ${{ runner.os }}-pip-${{ env.PYTHON_VERSION }}- + ${{ runner.os }}-pip- + + - name: Install system dependencies + run: | + # Detect package manager and install dependencies + if command -v apt-get &> /dev/null; then + sudo apt-get update + sudo apt-get install -y \ + build-essential \ + cmake \ + pkg-config \ + wget + + # Install specific compiler version if needed + if [ "${{ matrix.install_compiler }}" = "true" ]; then + sudo apt-get install -y ${{ matrix.cxx }} ${{ matrix.cc }} + fi + elif command -v dnf &> /dev/null; then + # AlmaLinux/RHEL 8+ with full backend support + dnf update -y + + # Install EPEL and PowerTools first (needed for some devel packages) + dnf install -y epel-release + dnf config-manager --set-enabled powertools || dnf config-manager --set-enabled PowerTools || true + + # Install base build tools + dnf install -y \ + gcc gcc-c++ \ + cmake \ + pkgconfig \ + wget \ + make \ + openssl-devel \ + git \ + curl \ + tar \ + gzip \ + ca-certificates \ + python3 \ + python3-pip \ + java-latest-openjdk + + # Update CA certificates for SSL issues + update-ca-trust + + # Install backend-specific packages if using system packages + if [ "${{ matrix.use_system_packages }}" = "true" ]; then + # Install MDSPlus repo (with --nogpgcheck for SSL issues) + rpm -ivh --nogpgcheck https://www.mdsplus.org/dist/rhel8/stable/RPMS/noarch/mdsplus-repo-7.142-81.el8.noarch.rpm || true + + # Install system packages for backends + dnf install -y \ + boost-devel \ + hdf5-devel \ + capnproto capnproto-devel \ + fmt-devel \ + spdlog-devel \ + libtirpc-devel \ + libxml2-devel || echo "Some packages failed to install, continuing..." + + # Install MDSPlus if backend is enabled + if [ "${{ matrix.backend_mdsplus }}" = "ON" ]; then + dnf install -y --nogpgcheck mdsplus mdsplus-devel || echo "MDSPlus installation failed, continuing..." + fi + fi + elif command -v yum &> /dev/null; then + # CentOS/RHEL 7 + yum install -y \ + gcc gcc-c++ \ + cmake \ + pkgconfig \ + wget \ + make \ + git \ + curl + else + echo "Unsupported package manager" + exit 1 + fi + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install sphinx sphinx-rtd-theme myst-parser sphinx-design + # Try to install saxonche, but continue if it fails (not available for all Python versions) + pip install saxonche 2>/dev/null || echo "saxonche not available, continuing without it" + + - name: Build and install UDA + if: matrix.backend_uda == 'ON' && matrix.use_system_packages == true + run: | + cd $HOME + echo "Downloading UDA ${UDA_VERSION}..." + curl -LO https://github.com/ukaea/UDA/archive/refs/tags/${UDA_VERSION}.tar.gz + tar zxf ${UDA_VERSION}.tar.gz + cd UDA-${UDA_VERSION} + + echo "Configuring UDA..." + cmake -B build . \ + -DCMAKE_BUILD_TYPE=Release \ + -DBUILD_SHARED_LIBS=ON \ + -DSSLAUTHENTICATION=ON \ + -DCLIENT_ONLY=ON \ + -DENABLE_CAPNP=ON + + echo "Building UDA..." + cmake --build build -j$(nproc) + + echo "Installing UDA..." + cmake --install build + + echo "UDA installation complete" + + - name: Cache Boost installation + if: matrix.use_system_packages == false + id: cache-boost + uses: actions/cache@v4 + with: + path: ~/boost + key: boost-${{ env.BOOST_VERSION }}-${{ runner.os }}-${{ matrix.name }}-${{ github.sha }} + restore-keys: | + boost-${{ env.BOOST_VERSION }}-${{ runner.os }}-${{ matrix.name }}- + boost-${{ env.BOOST_VERSION }}-${{ runner.os }}- + + - name: Download and install Boost + if: matrix.use_system_packages == false && steps.cache-boost.outputs.cache-hit != 'true' + run: | + cd $HOME + BOOST_VERSION_UNDERSCORE=$(echo $BOOST_VERSION | tr '.' '_') + + # Double-check if boost is already properly installed + if [ -d "$HOME/boost/lib" ] && [ "$(find $HOME/boost/lib -name 'libboost_system*' 2>/dev/null | wc -l)" -gt 0 ]; then + echo "Boost already found and verified in cache, skipping download and build" + exit 0 + fi + + echo "Downloading Boost ${BOOST_VERSION}..." + # Try SourceForge as primary (JFrog Artifactory has been deactivated) + BOOST_URL_SF="https://sourceforge.net/projects/boost/files/boost/${BOOST_VERSION}/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz/download" + if ! wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 3 \ + -O boost_${BOOST_VERSION_UNDERSCORE}.tar.gz "$BOOST_URL_SF"; then + # Fallback to archives.boost.io + echo "SourceForge download failed, trying archives.boost.io..." + BOOST_URL_ARCHIVE="https://archives.boost.io/release/${BOOST_VERSION}/source/boost_${BOOST_VERSION_UNDERSCORE}.tar.gz" + if ! wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 3 \ + -O boost_${BOOST_VERSION_UNDERSCORE}.tar.gz "$BOOST_URL_ARCHIVE"; then + # Last resort: try GitHub mirror + echo "archives.boost.io failed, trying GitHub mirror..." + BOOST_URL_GH="https://github.com/boostorg/boost/releases/download/boost-${BOOST_VERSION}/boost-${BOOST_VERSION}-cmake.tar.gz" + wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 3 \ + -O boost_${BOOST_VERSION_UNDERSCORE}.tar.gz "$BOOST_URL_GH" + fi + fi + + echo "Extracting Boost..." + tar xzf boost_${BOOST_VERSION_UNDERSCORE}.tar.gz + cd boost_${BOOST_VERSION_UNDERSCORE} + + echo "Building Boost libraries..." + ./bootstrap.sh --prefix=$HOME/boost --with-libraries=system,filesystem,log,thread + ./b2 install -j$(nproc) > /dev/null + + echo "Boost installation complete" + + - name: Set Boost environment variables + if: matrix.use_system_packages == false + run: | + echo "BOOST_ROOT=$HOME/boost" >> $GITHUB_ENV + echo "$HOME/boost/lib" >> $GITHUB_PATH + + - name: Cache HDF5 installation + if: matrix.use_system_packages == false + id: cache-hdf5 + uses: actions/cache@v4 + with: + path: ~/hdf5 + key: hdf5-${{ env.HDF5_VERSION }}-${{ runner.os }}-${{ matrix.name }}-${{ github.sha }} + restore-keys: | + hdf5-${{ env.HDF5_VERSION }}-${{ runner.os }}-${{ matrix.name }}- + hdf5-${{ env.HDF5_VERSION }}-${{ runner.os }}- + + - name: Download and install HDF5 + if: matrix.use_system_packages == false && steps.cache-hdf5.outputs.cache-hit != 'true' + run: | + cd $HOME + HDF5_VERSION_SHORT=$(echo $HDF5_VERSION | cut -d. -f1,2) + HDF5_URL="https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${HDF5_VERSION_SHORT}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz" + + # Double-check if HDF5 is already properly installed + if [ -d "$HOME/hdf5/lib" ] && [ "$(find $HOME/hdf5/lib -name 'libhdf5*' 2>/dev/null | wc -l)" -gt 0 ]; then + echo "HDF5 already found and verified in cache, skipping download and build" + exit 0 + fi + + echo "Downloading HDF5 ${HDF5_VERSION}..." + # Try with retries + if ! wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 3 \ + -O hdf5-${HDF5_VERSION}.tar.gz "$HDF5_URL"; then + # Try alternative mirror + echo "Primary download failed, trying alternative URL..." + HDF5_URL_ALT="https://hdf-wordpress-1.s3.amazonaws.com/wp-content/uploads/manual/HDF5/HDF5_${HDF5_VERSION_SHORT}/hdf5-${HDF5_VERSION}.tar.gz" + wget --retry-connrefused --waitretry=1 --read-timeout=20 --timeout=15 -t 3 \ + -O hdf5-${HDF5_VERSION}.tar.gz "$HDF5_URL_ALT" + fi + + echo "Extracting HDF5..." + tar xzf hdf5-${HDF5_VERSION}.tar.gz + cd hdf5-${HDF5_VERSION} + + echo "Configuring HDF5..." + ./configure --prefix=$HOME/hdf5 --enable-cxx > /dev/null + + echo "Building HDF5..." + make -j$(nproc) > /dev/null + make install > /dev/null + + echo "HDF5 installation complete" + + - name: Set HDF5 environment variables + if: matrix.use_system_packages == false + run: | + echo "HDF5_ROOT=$HOME/hdf5" >> $GITHUB_ENV + echo "$HOME/hdf5/bin" >> $GITHUB_PATH + echo "LD_LIBRARY_PATH=$HOME/hdf5/lib:$LD_LIBRARY_PATH" >> $GITHUB_ENV + + - name: Configure CMake + run: | + # Set Boost and HDF5 paths based on installation method + if [ "${{ matrix.use_system_packages }}" = "true" ]; then + BOOST_OPTS="" + HDF5_OPTS="" + PREFIX_PATH="" + else + BOOST_OPTS="-DBoost_NO_BOOST_CMAKE=ON -DBOOST_ROOT=$HOME/boost -DBoost_INCLUDE_DIR=$HOME/boost/include" + HDF5_OPTS="-DHDF5_ROOT=$HOME/hdf5" + PREFIX_PATH="-DCMAKE_PREFIX_PATH=$HOME/boost;$HOME/hdf5" + fi + + MATLAB_OPTS="-DMatlab_ROOT_DIR=${MATLAB_ROOT}" + + # Configure CMake with retry logic for git operations + # Set git retry parameters to handle transient server errors + export GIT_CURL_VERBOSE=1 + for attempt in 1 2 3; do + echo "CMake configuration attempt $attempt..." + cmake -B build \ + -DCMAKE_INSTALL_PREFIX=$PWD/install \ + -DCMAKE_C_COMPILER=${{ matrix.cc }} \ + -DCMAKE_CXX_COMPILER=${{ matrix.cxx }} \ + -DCMAKE_CXX_STANDARD=17 \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo \ + $PREFIX_PATH \ + $MATLAB_OPTS \ + -DAL_BACKEND_HDF5=${{ matrix.backend_hdf5 }} \ + -DAL_BACKEND_MDSPLUS=${{ matrix.backend_mdsplus }} \ + -DAL_BACKEND_UDA=${{ matrix.backend_uda }} \ + -DAL_BUILD_MDSPLUS_MODELS=OFF \ + -DAL_DOWNLOAD_DEPENDENCIES=ON \ + -DAL_CORE_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Core.git \ + -DDD_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Data-Dictionary.git \ + -DAL_CORE_VERSION=develop \ + -DDD_VERSION=main \ + -DAL_EXAMPLES=ON \ + -DAL_TESTS=ON \ + -DAL_PLUGINS=OFF \ + -DAL_HLI_DOCS=ON \ + $BOOST_OPTS \ + $HDF5_OPTS && break + + if [ $attempt -lt 3 ]; then + echo "CMake configuration failed, retrying in 10 seconds..." + rm -rf build + sleep 10 + else + echo "CMake configuration failed after 3 attempts" + exit 1 + fi + done + + - name: Build + run: | + make -C build -j$(nproc) all + + - name: Run tests + run: | + # Create test database + mkdir -p testdb + export USER="$PWD/testdb" + # Ensure MATLAB libraries are in the path + export LD_LIBRARY_PATH=${MATLAB_ROOT}/bin/glnxa64:${LD_LIBRARY_PATH} + # Add MATLAB to PATH + export PATH=${MATLAB_ROOT}/bin:$PATH + + # Verify MATLAB is accessible + echo "MATLAB_ROOT: ${MATLAB_ROOT}" + echo "PATH: $PATH" + echo "LD_LIBRARY_PATH: $LD_LIBRARY_PATH" + which matlab || echo "MATLAB not in PATH" + + # Check if tests are available + cd build + echo "Available tests:" + ctest -N + + # Check if at least one backend is enabled + if [ "${{ matrix.backend_hdf5 }}" = "ON" ] || [ "${{ matrix.backend_mdsplus }}" = "ON" ]; then + echo "Running tests with available backend(s)..." + echo "HDF5: ${{ matrix.backend_hdf5 }}, MDSplus: ${{ matrix.backend_mdsplus }}" + ctest --output-on-failure --verbose + else + echo "No backends enabled, skipping tests" + fi + continue-on-error: true + + - name: Install + run: | + make -C build install + + - name: List installed files + run: | + ls -lR install/ + + - name: Upload build artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: build-artifacts-${{ matrix.name }} + path: | + build/CMakeCache.txt + build/CMakeFiles/CMakeError.log + build/CMakeFiles/CMakeOutput.log + build/Testing/ + retention-days: 7 + + - name: Upload installation + if: success() + uses: actions/upload-artifact@v4 + with: + name: installation-${{ matrix.name }} + path: install/ + retention-days: 7 + + build-docs-only: + name: Build Documentation Only + runs-on: ubuntu-22.04 + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y \ + cmake \ + pkg-config \ + wget + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install sphinx sphinx-rtd-theme myst-parser sphinx-design saxonche + + - name: Configure CMake for docs only + run: | + # Configure CMake with retry logic for git operations + export GIT_CURL_VERBOSE=1 + for attempt in 1 2 3; do + echo "CMake docs configuration attempt $attempt..." + cmake -B build \ + -DCMAKE_INSTALL_PREFIX=$PWD/install \ + -DAL_DOWNLOAD_DEPENDENCIES=ON \ + -DAL_CORE_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Core.git \ + -DDD_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Data-Dictionary.git \ + -DAL_CORE_VERSION=develop \ + -DDD_VERSION=main \ + -DAL_HLI_DOCS=ON \ + -DAL_DOCS_ONLY=ON && break + + if [ $attempt -lt 3 ]; then + echo "CMake configuration failed, retrying in 10 seconds..." + rm -rf build + sleep 10 + else + echo "CMake configuration failed after 3 attempts" + exit 1 + fi + done + + - name: Build documentation + run: | + make -C build al-matlab-docs || true + + - name: Check for documentation warnings + run: | + if [ -f build/doc_warnings.log ]; then + echo "Documentation warnings found:" + cat build/doc_warnings.log + fi + + - name: Debug - Check documentation build output + if: always() + run: | + echo "Checking for documentation files..." + # Check the actual doc directory where Sphinx builds + if [ -d doc/_build/html/ ]; then + echo "Documentation directory found at doc/_build/html/:" + echo "Files count: $(find doc/_build/html/ -type f | wc -l)" + ls -lR doc/_build/html/ | head -100 + elif [ -d build/doc/_build/html/ ]; then + echo "Documentation directory found at build/doc/_build/html/:" + echo "Files count: $(find build/doc/_build/html/ -type f | wc -l)" + ls -lR build/doc/_build/html/ | head -100 + else + echo "Documentation directory NOT found at expected locations" + echo "Searching for html directories:" + find . -type d -name "*html*" 2>/dev/null | head -20 + echo "Searching for _build directories:" + find . -type d -name "_build" 2>/dev/null | head -10 + fi + + - name: Upload documentation + if: always() + uses: actions/upload-artifact@v4 + with: + name: documentation + path: doc/_build/html/ + retention-days: 30 + if-no-files-found: warn + compression-level: 6 + overwrite: false + include-hidden-files: false + + # build-release job removed - toolbox creation is disabled diff --git a/.gitignore b/.gitignore index cdacd43..06c96e8 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,7 @@ tests/IDS_list.h /html/ /latex/ CMakeUserPresets.json +test-install +.vscode/ +*.err +*.out diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 0000000..2e5a04c --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,32 @@ +# Read the Docs configuration file for IMAS-MATLAB +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the "doc" directory with Sphinx +sphinx: + configuration: doc/conf.py + fail_on_warning: false + +# Optionally build your docs in additional formats such as PDF and ePub +formats: + - pdf + - epub + +# Optional but Python requirements required recommended, declare the +# to build your documentation +python: + install: + - requirements: doc/doc_common/requirements.txt + +# Submodules configuration (optional) +submodules: + include: all + recursive: true diff --git a/CMakeLists.txt b/CMakeLists.txt index 9106579..2eecac0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,19 +1,71 @@ # CMake build configuration for the Access Layer C++ HLI cmake_minimum_required( VERSION 3.16 ) +if(POLICY CMP0144) + cmake_policy(SET CMP0144 NEW) +endif() +if(WIN32) + if(DEFINED CMAKE_TOOLCHAIN_FILE AND + CMAKE_TOOLCHAIN_FILE MATCHES "vcpkg.cmake") + message(STATUS "vcpkg in use") + set(CMAKE_FIND_PACKAGE_PREFER_CONFIG ON) + set(CMAKE_FIND_PACKAGE_PREFER_CMAKE_PATH ON) + + if(DEFINED VCPKG_INSTALLED_DIR AND DEFINED VCPKG_TARGET_TRIPLET) + list(APPEND CMAKE_PREFIX_PATH + "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET}") + endif() + + else() + message(STATUS "vcpkg not in use") + endif() +endif() -# Configuration options for common assets +# Local paths for IMAS-MATLAB ################################################################################ +# Include local paths to eliminate AL_COMMON_PATH dependency +# Use full path since CMAKE_MODULE_PATH hasn't been configured yet +include(${CMAKE_CURRENT_SOURCE_DIR}/common/cmake/ALLocalPaths.cmake) option( AL_DOWNLOAD_DEPENDENCIES "Automatically download assets from the AL git repository" ON ) -set( AL_CORE_GIT_REPOSITORY "ssh://git@git.iter.org/imas/al-core.git" CACHE STRING "Git repository of AL-core" ) +set( AL_CORE_GIT_REPOSITORY "https://github.com/iterorganization/IMAS-Core.git" CACHE STRING "Git repository of AL-core" ) set( AL_CORE_VERSION "main" CACHE STRING "Git commit/tag/branch of AL-core" ) include(FetchContent) - -# Load common assets +# Load AL-core library (used for the shared library) ################################################################################ -if( DEFINED ENV{AL_COMMON_PATH} ) - # Take common assets from the path in this environment variable instead of al-core - set( AL_COMMON_PATH $ENV{AL_COMMON_PATH} ) +if(WIN32) + if( ${AL_DOWNLOAD_DEPENDENCIES} ) + # FetchContent_Declare causing reursive call wrapped by vcpkg to _find_package + # So manually clone al-core here. Need fix in later release + set( al-core_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/al-core-src" ) + if( NOT EXISTS "${al-core_SOURCE_DIR}/.git" ) + message( STATUS "Cloning al-core from ${AL_CORE_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${AL_CORE_GIT_REPOSITORY}" "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone al-core: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${AL_CORE_VERSION}" + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${AL_CORE_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + elseif ( ${AL_DEVELOPMENT_LAYOUT} ) + set( al-core_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../al-core" ) + endif() else() if( ${AL_DOWNLOAD_DEPENDENCIES} ) # Download common assets from the ITER git: @@ -22,17 +74,16 @@ else() GIT_REPOSITORY "${AL_CORE_GIT_REPOSITORY}" GIT_TAG "${AL_CORE_VERSION}" ) - else() + FetchContent_MakeAvailable( al-core ) + elseif ( ${AL_DEVELOPMENT_LAYOUT} ) FetchContent_Declare( al-core SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../al-core" ) + FetchContent_MakeAvailable( al-core ) endif() - FetchContent_MakeAvailable( al-core ) - set( AL_COMMON_PATH "${al-core_SOURCE_DIR}/common" ) + endif() -add_subdirectory( ${AL_COMMON_PATH} _common ) - # Define project ################################################################################ @@ -63,16 +114,14 @@ include( ALCore ) if( AL_DOCS_ONLY ) return() endif() - - # Dependencies ################################################################################ - -find_package(Java REQUIRED) -find_package( Matlab REQUIRED ) -find_package( SaxonHE ) - - +find_package( Python3 REQUIRED COMPONENTS Interpreter ) +if(WIN32) + find_package(Matlab REQUIRED MODULE) +else() + find_package( Matlab REQUIRED ) +endif() # Utility sources and target ################################################################################ @@ -83,19 +132,47 @@ set( MEX_UTIL_SOURCES src/imas_mex_structs.c src/imas_mex_utils.c ) +if(WIN32) + # This should be built as a shared library (not a MEX file) with name libal-mex.so/.dll + # It's a utility library that MEX files link to, so use add_library instead of matlab_add_mex + add_library( al-mex SHARED ${MEX_UTIL_SOURCES} ) + target_link_libraries( al-mex PUBLIC al Matlab::mex Matlab::mx ) + target_include_directories( al-mex PRIVATE src ) + target_include_directories( al-mex PUBLIC ${Matlab_INCLUDE_DIRS} ) + + # Use separate (non-interleaved) complex API for compatibility + target_compile_definitions( al-mex PUBLIC) + + # Define AL_MEX_BUILDING_DLL when building the al-mex library + target_compile_definitions( al-mex PRIVATE AL_MEX_BUILDING_DLL ) + + # Compile as C (al-core exports C symbols via extern "C") + set_source_files_properties(${MEX_UTIL_SOURCES} PROPERTIES LANGUAGE C) + # Force C compilation on Windows + target_compile_options(al-mex PRIVATE /TC) + # Link as C library (al exports C symbols) + set_target_properties(al-mex PROPERTIES LINKER_LANGUAGE C) +else() + # This should be built as a shared library with name libal-mex.so + matlab_add_mex( NAME al-mex SRC ${MEX_UTIL_SOURCES} SHARED LINK_TO al ) + target_include_directories( al-mex PRIVATE src ) +endif() -# This should be built as a shared library with name libal-mex.so -matlab_add_mex( NAME al-mex SRC ${MEX_UTIL_SOURCES} SHARED LINK_TO al ) -target_include_directories( al-mex PRIVATE src ) set_target_properties( al-mex PROPERTIES - # override library suffix to '.so' - PREFIX "lib" SUFFIX ".so" - # By default, the mex target created by matlab_add_mex hides all defined - # symbols, unset the LINK_FLAGS property to expose the symbols required by the - # other mex targets: - LINK_FLAGS " -Wl,--as-needed" + PREFIX "lib" ) - +if(WIN32) + # On Windows, it's a regular DLL, not a MEX file + set_target_properties( al-mex PROPERTIES + SUFFIX ".dll" + WINDOWS_EXPORT_ALL_SYMBOLS ON + ) +else() + set_target_properties( al-mex PROPERTIES + SUFFIX ".so" + LINK_FLAGS " -Wl,--as-needed" + ) +endif() set( MEX_TARGETS ) # Generate sources and define targets @@ -103,7 +180,8 @@ set( MEX_TARGETS ) add_custom_target( al-mex-sources ALL ) -# Macro for source generation with Saxon + +# Macro for source generation with saxonche (Python) macro( GENERATE GENERATOR IS_C_GENERATOR OUTPUT_FILES ) if( ${IS_C_GENERATOR} ) set( WORK_DIR "${CMAKE_CURRENT_BINARY_DIR}" ) @@ -116,21 +194,34 @@ macro( GENERATE GENERATOR IS_C_GENERATOR OUTPUT_FILES ) add_custom_command( OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${GENERATOR}_dummy.txt COMMAND - ${Java_JAVA_EXECUTABLE} -cp ${SaxonHE_CLASSPATH} - net.sf.saxon.Transform -t -warnings:fatal - "-xsl:${CMAKE_CURRENT_SOURCE_DIR}/${GENERATOR}.xsl" - "-s:${IDSDEF}" + ${_VENV_PYTHON} "${AL_LOCAL_XSLTPROC_SCRIPT}" + -xsl ${CMAKE_CURRENT_SOURCE_DIR}/${GENERATOR}.xsl + -s ${IDSDEF} + -o ${CMAKE_CURRENT_BINARY_DIR}/${GENERATOR}_output.xml DD_GIT_DESCRIBE=${DD_VERSION} AL_GIT_DESCRIBE=${FULL_VERSION} COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/${GENERATOR}_dummy.txt DEPENDS ${IDSDEF} ${GENERATOR}.xsl - WORKING_DIRECTORY ${WORK_DIR} + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} ) add_custom_target( al-mex-source-${GENERATOR} COMMAND ${CMAKE_COMMAND} -E echo "" DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/${GENERATOR}_dummy.txt BYPRODUCTS ${GENERATED_FILENAMES} ) + + # For non-C generators (e.g., matlab/IDS_list.m), copy generated files from build to source dir + if( NOT ${IS_C_GENERATOR} ) + foreach( OUTPUT_FILE ${OUTPUT_FILES} ) + add_custom_command( + TARGET al-mex-source-${GENERATOR} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy_if_different + ${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILE} + ${CMAKE_CURRENT_SOURCE_DIR}/${OUTPUT_FILE} + ) + endforeach() + endif() + add_dependencies( al-mex-sources al-mex-source-${GENERATOR} ) endmacro() @@ -140,29 +231,63 @@ macro( FILES_FROM_METHOD_NAME FILES METHOD_NAME ) endmacro() # Macro to add a mex target -macro( ADD_IMAS_MEX NAME SOURCES ) - matlab_add_mex( NAME "mex-${NAME}" SRC ${SOURCES} LINK_TO al al-mex OUTPUT_NAME "${NAME}" ) - target_include_directories( "mex-${NAME}" PRIVATE src ) - list( APPEND MEX_TARGETS "mex-${NAME}" ) -endmacro() - +if(WIN32) + macro( ADD_IMAS_MEX NAME SOURCES ) + # Let matlab_add_mex handle the suffix automatically + # Or explicitly set .mexw64 if needed: + matlab_add_mex( NAME "mex-${NAME}" SRC ${SOURCES} LINK_TO al al-mex OUTPUT_NAME "${NAME}" ) + if(MATLAB_VERSION VERSION_GREATER_EQUAL "9.4") # R2018a+ + set_target_properties("mex-${NAME}" PROPERTIES SUFFIX ".mexw64") + endif() + target_include_directories( "mex-${NAME}" PRIVATE src ) + # Use separate (non-interleaved) complex API for compatibility + target_compile_definitions( "mex-${NAME}" PUBLIC) + # Add /bigobj flag for large generated code files + target_compile_options( "mex-${NAME}" PRIVATE /bigobj ) + # Ensure all source-generation targets finish before this MEX target compiles. + # Without this, parallel builds (cmake --build -j N) can race: the MEX target + # tries to compile BYPRODUCT files before al-mex-source-* has generated them. + add_dependencies( "mex-${NAME}" al-mex-sources ) + list( APPEND MEX_TARGETS "mex-${NAME}" ) + endmacro() +else() + # Macro to add a mex target + macro( ADD_IMAS_MEX NAME SOURCES ) + matlab_add_mex( NAME "mex-${NAME}" SRC ${SOURCES} LINK_TO al al-mex OUTPUT_NAME "${NAME}" ) + target_include_directories( "mex-${NAME}" PRIVATE src ) + # Ensure all source-generation targets finish before this MEX target compiles. + # Without this, parallel builds (cmake --build -j N) can race: the MEX target + # tries to compile BYPRODUCT files before al-mex-source-* has generated them. + add_dependencies( "mex-${NAME}" al-mex-sources ) + list( APPEND MEX_TARGETS "mex-${NAME}" ) + endmacro() +endif() # These XSLs generate files in triplets, which are then compiled into one mex target -foreach( METHOD_NAME allocate delete gen get get_sample get_slice init put put_slice rand validate ) +# Note: ids_validate is not compiled on Windows (validate is unsupported on Windows) +foreach( METHOD_NAME allocate delete gen get get_sample get_slice init put put_slice rand ) FILES_FROM_METHOD_NAME( OUTPUT_FILES ${METHOD_NAME} ) GENERATE( "ids_${METHOD_NAME}" 1 "${OUTPUT_FILES}" ) ADD_IMAS_MEX( "ids_${METHOD_NAME}" "${OUTPUT_FILES}" ) endforeach() +if(NOT WIN32) + FILES_FROM_METHOD_NAME( OUTPUT_FILES validate ) + GENERATE( "ids_validate" 1 "${OUTPUT_FILES}" ) + ADD_IMAS_MEX( "ids_validate" "${OUTPUT_FILES}" ) +endif() # 'put' and 'put_slice' in addition need these sources: target_sources( "mex-ids_put" PRIVATE src/ids/delete_ids.c - src/ids/validate_ids.c ) target_sources( "mex-ids_put_slice" PRIVATE src/ids/put_ids.c src/ids/delete_ids.c - src/ids/validate_ids.c ) +# validate_ids.c is not linked on Windows (validate is unsupported on Windows) +if(NOT WIN32) + target_sources( "mex-ids_put" PRIVATE src/ids/validate_ids.c ) + target_sources( "mex-ids_put_slice" PRIVATE src/ids/validate_ids.c ) +endif() # The ids_converter.xsl generates many targets: set( IDS_CONVERTER_SOURCES ) @@ -223,12 +348,125 @@ add_dependencies( al-matlab al-mex-sources al-mex ${MEX_TARGETS} ) ################################################################################ # Install libal-mex to lib folder -install( TARGETS al-mex DESTINATION lib ) +if(WIN32) + install( TARGETS al-mex DESTINATION bin RUNTIME DESTINATION bin LIBRARY DESTINATION lib ) +else() + install( TARGETS al-mex DESTINATION lib ) +endif() # Install compiled libraries to the mex folder install( TARGETS ${MEX_TARGETS} DESTINATION mex ) # Install static and generated '.m' files to the mex folder install( DIRECTORY matlab/ DESTINATION mex ) +# Create MATLAB toolbox folder structure with all necessary files +if(WIN32) + # Install al-mex DLL to toolbox folder + install( TARGETS al-mex DESTINATION toolbox RUNTIME DESTINATION toolbox ) +else() + # Install al-mex shared library to toolbox folder + install( TARGETS al-mex DESTINATION toolbox ) +endif() +# Install MEX files to toolbox folder +install( TARGETS ${MEX_TARGETS} DESTINATION toolbox ) +# Install M-files to toolbox folder +install( DIRECTORY matlab/ DESTINATION toolbox + FILES_MATCHING PATTERN "*.m" ) + +# Install AL shared library to toolbox folder (if not an alias) +# The al library comes from al-core and may already be installed +# Copy runtime dependencies to toolbox during install +if(WIN32) + # On Windows, install runtime DLLs that MEX files depend on. + # This includes both the AL DLLs (from bin/) and the vcpkg dependency DLLs + # (boost, hdf5, pthread, dl, zlib, szip, aec, etc.) needed by al.dll at runtime. + install(CODE " + file(GLOB AL_RUNTIME_LIBS + \"\${CMAKE_INSTALL_PREFIX}/bin/*.dll\" + ) + file(GLOB VCPKG_RUNTIME_LIBS + \"${CMAKE_BINARY_DIR}/vcpkg_installed/x64-windows/bin/*.dll\" + ) + set(ALL_RUNTIME_LIBS \${AL_RUNTIME_LIBS} \${VCPKG_RUNTIME_LIBS}) + foreach(lib IN LISTS ALL_RUNTIME_LIBS) + file(COPY \"\${lib}\" DESTINATION \"\${CMAKE_INSTALL_PREFIX}/toolbox\") + endforeach() + message(STATUS \"Copied runtime libraries to toolbox folder\") + ") +else() + # On Linux, install shared libraries that MEX files depend on + install(CODE " + file(GLOB AL_RUNTIME_LIBS + \"\${CMAKE_INSTALL_PREFIX}/lib/*.so*\" + ) + foreach(lib IN LISTS AL_RUNTIME_LIBS) + file(COPY \"\${lib}\" DESTINATION \"\${CMAKE_INSTALL_PREFIX}/toolbox\") + endforeach() + message(STATUS \"Copied runtime libraries to toolbox folder\") + ") +endif() + +# Copy toolbox packaging script +install( FILES ${CMAKE_CURRENT_SOURCE_DIR}/create_matlab_toolbox.m + DESTINATION . ) + +# Option to create MATLAB toolbox package during installation +option(AL_CREATE_TOOLBOX "Automatically create MATLAB toolbox package during installation" OFF) + +# Get MATLAB executable path +if(DEFINED Matlab_MAIN_PROGRAM) + set(MATLAB_EXECUTABLE "${Matlab_MAIN_PROGRAM}") +elseif(DEFINED Matlab_ROOT_DIR) + if(WIN32) + set(MATLAB_EXECUTABLE "${Matlab_ROOT_DIR}/bin/matlab.exe") + else() + set(MATLAB_EXECUTABLE "${Matlab_ROOT_DIR}/bin/matlab") + endif() +else() + # Try to find matlab in PATH + find_program(MATLAB_EXECUTABLE matlab) +endif() + +if(MATLAB_EXECUTABLE) + message(STATUS "MATLAB executable: ${MATLAB_EXECUTABLE}") + + # Add custom target to create MATLAB toolbox package + # NOTE: This target requires that 'cmake --install' has been run first to create the toolbox folder + add_custom_target( matlab-toolbox + COMMAND ${CMAKE_COMMAND} -E echo "NOTE: Ensure 'cmake --install' has been run before creating the toolbox package" + COMMAND ${CMAKE_COMMAND} -E echo "Looking for toolbox in: ${CMAKE_INSTALL_PREFIX}/toolbox" + COMMAND "${MATLAB_EXECUTABLE}" + -batch "create_matlab_toolbox('${CMAKE_INSTALL_PREFIX}', '${CMAKE_BINARY_DIR}', '${PROJECT_VERSION}', '${DD_VERSION}')" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + COMMENT "Creating MATLAB toolbox package: IMAS-MATLAB/${PROJECT_VERSION}-DD-${DD_VERSION}" + VERBATIM + ) + + # Optionally run toolbox packaging as part of installation + if(AL_CREATE_TOOLBOX) + install(CODE " + message(STATUS \"Creating MATLAB toolbox package: IMAS-MATLAB/${PROJECT_VERSION}-DD-${DD_VERSION}\") + execute_process( + COMMAND \"${MATLAB_EXECUTABLE}\" + -batch \"create_matlab_toolbox('\${CMAKE_INSTALL_PREFIX}', '${CMAKE_BINARY_DIR}', '${PROJECT_VERSION}', '${DD_VERSION}')\" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + RESULT_VARIABLE _TOOLBOX_RESULT + OUTPUT_VARIABLE _TOOLBOX_OUTPUT + ERROR_VARIABLE _TOOLBOX_ERROR + ) + if(_TOOLBOX_RESULT) + message(WARNING \"Failed to create MATLAB toolbox package: \${_TOOLBOX_ERROR}\") + message(WARNING \"You can create it manually by running: cmake --build . --target matlab-toolbox\") + else() + message(STATUS \"MATLAB toolbox package created successfully\") + message(STATUS \"\${_TOOLBOX_OUTPUT}\") + endif() + ") + endif() +else() + message(WARNING "MATLAB executable not found. Toolbox packaging target will not be available.") + message(WARNING "You can manually package the toolbox by running create_matlab_toolbox.m from MATLAB.") +endif() + # Tests ################################################################################ diff --git a/CMakePresets.json b/CMakePresets.json index c14f947..6123732 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -4,10 +4,10 @@ { "name": "https", "displayName": "HTTPS git URLs", - "description": "Use https://git.iter.org/ URLs instead of SSH URLs", + "description": "Use https://github.com/ URLs instead of SSH URLs", "cacheVariables": { - "AL_CORE_GIT_REPOSITORY": "https://git.iter.org/scm/imas/al-core.git", - "AL_PLUGINS_GIT_REPOSITORY": "https://git.iter.org/scm/imas/al-plugins.git", + "AL_CORE_GIT_REPOSITORY": "https://github.com/iterorganization/IMAS-Core.git", + "AL_PLUGINS_GIT_REPOSITORY": "https://github.com/iterorganization/IMAS-Core-Plugins.git", "DD_GIT_REPOSITORY": "https://github.com/iterorganization/IMAS-Data-Dictionary.git" } } diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..df8ba3b --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,72 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..7fdb256 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,40 @@ +# Contributing guidelines + +We welcome any kind of contribution to `IMAS-MATLAB`, +from a simple comment, a question or even a full fledged pull +request. +Please first make sure you read and follow the +[Code of Conduct](CODE_OF_CONDUCT.md). + +## You think you found a bug in the code, or have a question in its use +1. use the [issue search](https://github.com/iterorganization/IMAS-MATLAB/issues) +to check if someone already created a similar issue; +2. if not, make a **new issue** to describe your problem or question. +In the case of a bug suspiscion, please try to give all the relevant +information to allow reproducing the error or identifying +its root cause (version of the IMAS-MATLAB, OS and relevant +dependencies, snippet of code); +3. apply relevant labels to the issue. + +## You want to make or ask some change to the code +1. use the [issue search](https://github.com/iterorganization/IMAS-MATLAB/issues) +to check if someone already proposed a similar idea/change; +2. if not, create a **new issue** to describe what change you would like to see +implemented and specify it if you intend to work on it yourself or if some help +will be needed; +3. wait until some kind of consensus is reached about your idea being relevant, +at which time the issue will be assigned (to you or someone else who can work on +this topic); +4. if you do the development yourself, fork the repository to your own Github +profile and create your own feature branch off of the latest develop commit. +Make sure to regularly sync your branch with the latest commits from `develop` +(find instructions +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork)); +5. when your development is ready, create a pull request (find instructions +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)). + + +While we will try to answer questions quickly and to address issues in a timely +manner, it can may sometimes take longer than expected. A friendly ping in the +discussion or the issue thread can help draw attention if you find that it was +stalled. diff --git a/README.md b/README.md index f48bbd0..0612ca1 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,23 @@ -# IMAS Access Layer: MATLAB High Level Interface +# IMAS-MATLAB -This repository contains the MATLAB High Level Interface to the IMAS Access Layer. +This repository contains the MATLAB data access library for IMAS. +If allows to manipulate MATLAB data structures that correspond to IDS defined in the +[IMAS-Data-Dictionary](https://github.com/iterorganization/IMAS-Data-Dictionary). +It relies on the [IMAS-Core](https://github.com/iterorganization/IMAS-Core) library +to abstract I/O operations from the underlying chosen data storage format. -## Documentation -- [User documentation - ](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/ACCESS-LAYER-doc/matlab/latest.html) - (latest stable release). -- [Developer documentation and build instructions - ](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/ACCESS-LAYER-doc/matlab/dev/index.html) +## Getting started + +The latest build, install and user documentation is available [here](https://imas-matlab.readthedocs.io/en/latest/). + + +## Legal + +IMAS-MATLAB is licensed under [LGPL 3.0](LICENSE.txt). +Copyright 2019-2026 ITER Organization, Copyright 2019-2021 EPFL. + + +## Acknowledgements + +Bootstrapped from the UAL's matlabinterface. diff --git a/ci/build_and_test.sh b/ci/build_and_test.sh index b978a29..28b29a9 100755 --- a/ci/build_and_test.sh +++ b/ci/build_and_test.sh @@ -24,7 +24,6 @@ echo "... 2020b" MODULES=( CMake/3.24.3-GCCcore-10.2.0 Boost/1.74.0-GCC-10.2.0 # AL-Core - Saxon-HE/10.3-Java-11 # DD Python/3.8.6-GCCcore-10.2.0 # documentation libxml2/2.9.10-GCCcore-10.2.0 # AL-Core MDSplus/7.131.6-GCCcore-10.2.0 # backend @@ -59,10 +58,9 @@ echo "... 2023b" module load "${MODULES[@]}" MODULES=( CMake/3.27.6-GCCcore-13.2.0 - Saxon-HE/12.4-Java-21 # DD Python/3.11.5-GCCcore-13.2.0 libxml2/2.11.5-GCCcore-13.2.0 # AL-Core - MDSplus/7.132.0-GCCcore-13.2.0 # backend + MDSplus/7.153.3-GCCcore-13.2.0 # backend Python/3.11.5-GCCcore-13.2.0 # documentation MATLAB/2023b-r5-GCCcore-13.2.0 ) @@ -72,7 +70,7 @@ echo "... foss-2023b" MODULES=(${MODULES[@]} HDF5/1.14.3-gompi-2023b # backend Boost/1.83.0-GCC-13.2.0 # AL-Core - UDA/2.9.1-GCC-13.2.0 # backend + UDA/2.9.3-GCC-13.2.0 # backend ) CMAKE_ARGS=(${CMAKE_ARGS[@]} -DCMAKE_C_COMPILER=${CC:-gcc} @@ -84,7 +82,7 @@ echo "... intel-2023b" MODULES=(${MODULES[@]} HDF5/1.14.3-iimpi-2023b # backend Boost/1.83.0-intel-compilers-2023.2.1 # AL-Core - UDA/2.9.1-intel-compilers-2023.2.1 # backend + UDA/2.9.3-intel-compilers-2023.2.1 # backend ) CMAKE_ARGS=(${CMAKE_ARGS[@]} -DCMAKE_C_COMPILER=${CC:-icx} @@ -100,15 +98,6 @@ module load "${MODULES[@]}" echo "Done loading modules" set -x -# Create a local git configuration with our access token -if [ "x$bamboo_HTTP_AUTH_BEARER_PASSWORD" != "x" ]; then - mkdir -p git - echo "[http \"https://git.iter.org/\"] - extraheader = Authorization: Bearer $bamboo_HTTP_AUTH_BEARER_PASSWORD" > git/config - export XDG_CONFIG_HOME=$PWD - git config -l -fi - # Ensure the build directory is clean: rm -rf build @@ -123,10 +112,10 @@ CMAKE_ARGS=( -D AL_BUILD_MDSPLUS_MODELS=${AL_BUILD_MDSPLUS_MODELS:-ON} # Download dependencies from HTTPS (using an access token): -D AL_DOWNLOAD_DEPENDENCIES=${AL_DOWNLOAD_DEPENDENCIES:-ON} - -D AL_CORE_GIT_REPOSITORY=${AL_CORE_GIT_REPOSITORY:-https://git.iter.org/scm/imas/al-core.git} - -D AL_PLUGINS_GIT_REPOSITORY=${AL_PLUGINS_GIT_REPOSITORY:-https://git.iter.org/scm/imas/al-plugins.git} + -D AL_CORE_GIT_REPOSITORY=${AL_CORE_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Core.git} + -D AL_PLUGINS_GIT_REPOSITORY=${AL_PLUGINS_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Core-Plugins.git} -D DD_GIT_REPOSITORY=${DD_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Data-Dictionary.git} - -D AL_PLUGINS_VERSION=${AL_PLUGINS_VERSION:-main} + -D AL_PLUGINS_VERSION=${AL_PLUGINS_VERSION:-develop} # DD version: can be set with DD_VERSION env variable, otherwise use latest main -D DD_VERSION=${DD_VERSION:-main} # AL Core version: can be set with AL_CORE_VERSION env variable, otherwise use latest main diff --git a/ci/build_docs.sh b/ci/build_docs.sh index a4483f2..769b9ce 100755 --- a/ci/build_docs.sh +++ b/ci/build_docs.sh @@ -28,16 +28,6 @@ module load "${MODULES[@]}" echo "Done loading modules" set -x -# Create a local git configuration with our access token -if [ "x$bamboo_HTTP_AUTH_BEARER_PASSWORD" != "x" ]; then - mkdir -p git - echo "[http \"https://git.iter.org/\"] - extraheader = Authorization: Bearer $bamboo_HTTP_AUTH_BEARER_PASSWORD" > git/config - export XDG_CONFIG_HOME=$PWD - git config -l -fi - - # Ensure the build directory is clean: rm -rf build @@ -45,13 +35,13 @@ rm -rf build CMAKE_ARGS=( # Download dependencies from HTTPS (using an access token): -D AL_DOWNLOAD_DEPENDENCIES=${AL_DOWNLOAD_DEPENDENCIES:-ON} - -D AL_CORE_GIT_REPOSITORY=${AL_CORE_GIT_REPOSITORY:-https://git.iter.org/scm/imas/al-core.git} - -D AL_PLUGINS_GIT_REPOSITORY=${AL_PLUGINS_GIT_REPOSITORY:-https://git.iter.org/scm/imas/al-plugins.git} + -D AL_CORE_GIT_REPOSITORY=${AL_CORE_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Core.git} + -D AL_PLUGINS_GIT_REPOSITORY=${AL_PLUGINS_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Core-Plugins.git} -D DD_GIT_REPOSITORY=${DD_GIT_REPOSITORY:-https://github.com/iterorganization/IMAS-Data-Dictionary.git} -D AL_PLUGINS_VERSION=${AL_PLUGINS_VERSION:-main} -D AL_CORE_VERSION=${AL_CORE_VERSION:-main} -D DD_VERSION=${DD_VERSION:-main} - # Build only documentation + # Build only documentationv -D AL_HLI_DOCS=${AL_HLI_DOCS:-ON} -D AL_DOCS_ONLY=${AL_DOCS_ONLY:-ON} ) diff --git a/ci/build_slurm.sh b/ci/build_slurm.sh new file mode 100644 index 0000000..e141a08 --- /dev/null +++ b/ci/build_slurm.sh @@ -0,0 +1,56 @@ +#!/bin/bash +#SBATCH --job-name=IMAS-Matlab-build +#SBATCH --partition=rigel +#SBATCH --nodes=1 +#SBATCH --ntasks=1 +#SBATCH --cpus-per-task=16 +#SBATCH --mem=32G +#SBATCH --time=01:00:00 +#SBATCH --output=%x-%j.out +#SBATCH --error=%x-%j.err + +module purge +module load CMake/3.27.6-GCCcore-13.2.0 \ + Python/3.11.5-GCCcore-13.2.0 \ + libxml2/2.11.5-GCCcore-13.2.0 \ + MDSplus/7.153.3-GCCcore-13.2.0 \ + MATLAB/2023b-r5-GCCcore-13.2.0 \ + HDF5/1.14.3-gompi-2023b \ + Boost/1.83.0-GCC-13.2.0 \ + UDA/2.9.3-GCC-13.2.0 \ + SciPy-bundle/2023.11-gfbf-2023b \ + Ninja/1.11.1-GCCcore-13.2.0 \ + Blitz++/1.0.2-GCCcore-13.2.0 \ + scikit-build-core/0.9.3-GCCcore-13.2.0 \ + Cython/3.0.10-GCCcore-13.2.0 \ + cython-cmake/0.2.0-GCCcore-13.2.0 \ + setuptools-scm/8.1.0-GCCcore-13.2.0 \ + typing-extensions/4.10.0-GCCcore-13.2.0 + + +# Clean previous build +rm -rf build test-install + +# CMake configure +cmake -B build \ + -DCMAKE_INSTALL_PREFIX="$(pwd)/test-install/" \ + -DAL_BACKEND_HDF5=ON \ + -DAL_BACKEND_MDSPLUS=ON \ + -DAL_BACKEND_UDA=ON \ + -DAL_BUILD_MDSPLUS_MODELS=ON \ + -DAL_PYTHON_BINDINGS=no-build-isolation \ + -DAL_DOWNLOAD_DEPENDENCIES=ON \ + -DDD_GIT_REPOSITORY=https://github.com/iterorganization/IMAS-Data-Dictionary.git \ + -DDD_VERSION=4.1.1 \ + -DBoost_NO_BOOST_CMAKE=ON \ + -DCMAKE_CXX_STANDARD=17 \ + -DCMAKE_C_COMPILER=gcc \ + -DCMAKE_CXX_COMPILER=g++ \ + -DAL_TESTS=OFF \ + -DAL_EXAMPLES=OFF \ + -DAL_PLUGINS=OFF + +# Build and install (use all allocated CPUs) +cmake --build build --target install --parallel "${SLURM_CPUS_PER_TASK:-16}" + +echo "Build and install completed successfully." diff --git a/common/CMakeLists.txt b/common/CMakeLists.txt new file mode 100644 index 0000000..129ba5e --- /dev/null +++ b/common/CMakeLists.txt @@ -0,0 +1,27 @@ +# CMake configuration for AL common assets +# Intended for inclusion (using FetchContent_MakeAvailable) in other AL components + +# Add ./cmake to the module path in the parent scope: +set( CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake PARENT_SCOPE ) + +if ( ${CMAKE_SOURCE_DIR} STREQUAL ${CMAKE_BINARY_DIR} ) + message( FATAL_ERROR "In-source builds not supported. Please use a separate build directory, for example `cmake -B build [...]`" ) +endif() + +if( AL_PYTHON_BINDINGS ) + find_package( Python3 ) + set( PYVER "${Python3_VERSION_MAJOR}.${Python3_VERSION_MINOR}" ) +endif() +configure_file( al_env.sh.in ${CMAKE_CURRENT_BINARY_DIR}/al_env.sh ) + +# Don't generate the environment file when building modules: +if( AL_DOWNLOAD_DEPENDENCIES OR AL_DEVELOPMENT_LAYOUT ) + install( + FILES ${CMAKE_CURRENT_BINARY_DIR}/al_env.sh + TYPE BIN + ) +endif() + +# al-common_SOURCE_DIR is used in some scripts in the cmake/ folder to refer to the +# current source directory: +set( al-common_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR} PARENT_SCOPE ) diff --git a/common/al_env.sh.in b/common/al_env.sh.in new file mode 100644 index 0000000..4333fad --- /dev/null +++ b/common/al_env.sh.in @@ -0,0 +1,22 @@ +export PATH="${CMAKE_INSTALL_PREFIX}/bin:$PATH" +export LD_LIBRARY_PATH="${CMAKE_INSTALL_PREFIX}/lib:$LD_LIBRARY_PATH" +export PKG_CONFIG_PATH="${CMAKE_INSTALL_PREFIX}/lib/pkgconfig:$PKG_CONFIG_PATH" +export HDF5_USE_FILE_LOCKING=FALSE + +if [ -f "${CMAKE_INSTALL_PREFIX}/jar/imas.jar" ]; then + export CLASSPATH="${CMAKE_INSTALL_PREFIX}/jar/imas.jar:$CLASSPATH" +fi +if [ -d "${CMAKE_INSTALL_PREFIX}/mex" ]; then + export MATLABPATH="${CMAKE_INSTALL_PREFIX}/mex:$MATLABPATH" +fi +if [ -d "${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages" ]; then + export PYTHONPATH="${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages:$PYTHONPATH" +fi +if [ -d "${CMAKE_INSTALL_PREFIX}/models/mdsplus" ]; then + export ids_path="${CMAKE_INSTALL_PREFIX}/models/mdsplus" + export MDSPLUS_MODELS_PATH="${CMAKE_INSTALL_PREFIX}/models/mdsplus" +fi +if [ -f "${CMAKE_INSTALL_PREFIX}/include/IDSDef.xml" ]; then + export IDSDEF_PATH="${CMAKE_INSTALL_PREFIX}/include/IDSDef.xml" + export IMAS_VERSION="$( grep -Pom 1 '(?<=)[^<]*' $IDSDEF_PATH )" +fi \ No newline at end of file diff --git a/common/cmake/ALBuildDataDictionary.cmake b/common/cmake/ALBuildDataDictionary.cmake new file mode 100644 index 0000000..df22a88 --- /dev/null +++ b/common/cmake/ALBuildDataDictionary.cmake @@ -0,0 +1,341 @@ +# Everything needed for building the Data Dictionary +# +# This script sets the following variables: +# IDSDEF Path to the generated IDSDef.xml +# IDS_NAMES List of IDSs that are available in the data dictionary +# DD_VERSION Version of the data dictionary +# DD_SAFE_VERSION DD version, safe to use as linker symbol + +if( AL_DOCS_ONLY ) + return() +endif() + +# Find Python for the xsltproc.py program +if(WIN32) + if(NOT Python3_FOUND AND NOT PYTHON_EXECUTABLE) + # Check if Python is in PATH + find_program(PYTHON_EXECUTABLE NAMES python3.exe python.exe python3 python DOC "Python interpreter") + if(NOT PYTHON_EXECUTABLE) + message(FATAL_ERROR "Could not find Python. Please ensure Python is installed and in PATH.") + endif() + else() + set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE}) + endif() +else() + find_package(Python REQUIRED COMPONENTS Interpreter Development.Module) + set(PYTHON_EXECUTABLE ${Python_EXECUTABLE}) +endif() + +message(STATUS "Found Python: ${PYTHON_EXECUTABLE}") + +# Set up Python venv paths for saxonche (used for all XSLT transformations) +if(WIN32) + set(_VENV_PYTHON "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/python.exe") + set(_VENV_PIP "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/pip.exe") +else() + set(_VENV_PYTHON "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/python") + set(_VENV_PIP "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/pip") +endif() + +if( NOT AL_DOWNLOAD_DEPENDENCIES AND NOT AL_DEVELOPMENT_LAYOUT ) + # The DD easybuild module should be loaded, use that module: + # Create Python venv first and install imas_data_dictionary + if(NOT EXISTS "${_VENV_PYTHON}") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -m venv dd_build_env + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + RESULT_VARIABLE _VENV_EXITCODE + OUTPUT_VARIABLE _VENV_OUTPUT + ERROR_VARIABLE _VENV_ERROR + ) + + if(_VENV_EXITCODE) + message(STATUS "venv stdout: ${_VENV_OUTPUT}") + message(STATUS "venv stderr: ${_VENV_ERROR}") + message(FATAL_ERROR "Failed to create venv (exit code: ${_VENV_EXITCODE}). Ensure Python has venv module installed: python -m venv --help") + endif() + + if(DEFINED DD_VERSION) + execute_process( + COMMAND ${_VENV_PIP} install imas_data_dictionary==${DD_VERSION} + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + else() + execute_process( + COMMAND ${_VENV_PIP} install imas_data_dictionary + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + endif() + + if(_PIP_EXITCODE) + message(STATUS "imas_data_dictionary pip output: ${_PIP_OUTPUT}") + message(STATUS "imas_data_dictionary pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install imas_data_dictionary dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() + + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() + endif() +# Set up idsinfo command path +if(WIN32) + set(_IDSINFO_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/Scripts/idsinfo.exe") +else() + set(_IDSINFO_COMMAND "${CMAKE_CURRENT_BINARY_DIR}/dd_build_env/bin/idsinfo") +endif() + + # Use idsinfo idspath command from venv to get the path to IDSDef.xml or data_dictionary.xml + execute_process( + COMMAND ${_IDSINFO_COMMAND} idspath + OUTPUT_VARIABLE IDSDEF + OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE _IDSINFO_EXITCODE + ) + + if( _IDSINFO_EXITCODE ) + message( FATAL_ERROR + "Failed to run 'idsinfo idspath' command. " + "Please ensure IMAS-Data-Dictionary module is loaded." + ) + endif() + + if( NOT EXISTS "${IDSDEF}" ) + message( FATAL_ERROR + "idsinfo idspath returned '${IDSDEF}' but file does not exist. " + "Please ensure IMAS-Data-Dictionary module is properly loaded." + ) + endif() + + message( STATUS "Found Data Dictionary: ${IDSDEF}" ) + + # Populate identifier source xmls based on the IDSDEF location + get_filename_component( DD_BASE_DIR "${IDSDEF}" DIRECTORY ) + + if( DD_BASE_DIR MATCHES "schemas$" ) + # DD 4.1.0+ layout: resources/schemas//*_identifier.xml + file( GLOB DD_IDENTIFIER_FILES "${DD_BASE_DIR}/*/*_identifier.xml" ) + else() + # DD 3.x/4.0.0 layout: dd_x.y.z/include//*_identifier.xml + file( GLOB DD_IDENTIFIER_FILES "${DD_BASE_DIR}/*/*_identifier.xml" ) + endif() + + if( NOT DD_IDENTIFIER_FILES ) + message( WARNING "No identifier XML files found in Data Dictionary at: ${IDSDEF}" ) + endif() +else() + if(WIN32) + # Build the DD from source using direct git commands: + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the Data Dictionary from the ITER git: + set( data-dictionary_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/data-dictionary-src" ) + if( NOT EXISTS "${data-dictionary_SOURCE_DIR}/.git" ) + message( STATUS "Cloning data-dictionary from ${DD_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${DD_GIT_REPOSITORY}" "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone data-dictionary: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${DD_VERSION}" + WORKING_DIRECTORY "${data-dictionary_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${DD_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../data-dictionary for the data dictionary + if( NOT( AL_PARENT_FOLDER ) ) + set( AL_PARENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/.. ) + endif() + set( data-dictionary_SOURCE_DIR ${AL_PARENT_FOLDER}/data-dictionary ) + if( NOT IS_DIRECTORY ${data-dictionary_SOURCE_DIR} ) + message( FATAL_ERROR + "${data-dictionary_SOURCE_DIR} does not exist. Please clone the " + "data-dictionary repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() + else() + # Build the DD from source: + include(FetchContent) + + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the Data Dictionary from the ITER git: + FetchContent_Declare( + data-dictionary + GIT_REPOSITORY ${DD_GIT_REPOSITORY} + GIT_TAG ${DD_VERSION} + ) + else() + # Look in ../data-dictionary for the data dictionary + if( NOT( AL_PARENT_FOLDER ) ) + set( AL_PARENT_FOLDER ${CMAKE_CURRENT_SOURCE_DIR}/.. ) + endif() + set( DD_SOURCE_DIRECTORY ${AL_PARENT_FOLDER}/data-dictionary ) + if( NOT IS_DIRECTORY ${DD_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${DD_SOURCE_DIRECTORY} does not exist. Please clone the " + "data-dictionary repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + + FetchContent_Declare( + data-dictionary + SOURCE_DIR ${DD_SOURCE_DIRECTORY} + ) + set( DD_SOURCE_DIRECTORY ) # unset temporary var + endif() + FetchContent_MakeAvailable( data-dictionary ) + endif() + + + # get version of the data dictionary + execute_process( + COMMAND git describe --tags --always --dirty + WORKING_DIRECTORY ${data-dictionary_SOURCE_DIR} + OUTPUT_VARIABLE DD_GIT_DESCRIBE + OUTPUT_STRIP_TRAILING_WHITESPACE + RESULT_VARIABLE _GIT_RESULT + ) + if(_GIT_RESULT) + execute_process( + COMMAND git rev-parse --short HEAD + WORKING_DIRECTORY ${data-dictionary_SOURCE_DIR} + OUTPUT_VARIABLE DD_GIT_DESCRIBE + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + endif() + + # We need the IDSDef.xml at configure time, ensure it is built + # Create Python venv and install saxonche if not already done + if(NOT EXISTS "${_VENV_PYTHON}") + execute_process( + COMMAND ${PYTHON_EXECUTABLE} -m venv dd_build_env + WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR} + RESULT_VARIABLE _VENV_EXITCODE + OUTPUT_VARIABLE _VENV_OUTPUT + ERROR_VARIABLE _VENV_ERROR + ) + + if(_VENV_EXITCODE) + message(STATUS "venv stdout: ${_VENV_OUTPUT}") + message(STATUS "venv stderr: ${_VENV_ERROR}") + message(FATAL_ERROR "Failed to create venv (exit code: ${_VENV_EXITCODE}). Ensure Python has venv module installed: python -m venv --help") + endif() + + execute_process( + COMMAND ${_VENV_PIP} install saxonche + RESULT_VARIABLE _PIP_EXITCODE + OUTPUT_VARIABLE _PIP_OUTPUT + ERROR_VARIABLE _PIP_ERROR + ) + + if(_PIP_EXITCODE) + message(STATUS "saxonche pip output: ${_PIP_OUTPUT}") + message(STATUS "saxonche pip error: ${_PIP_ERROR}") + message(FATAL_ERROR "Failed to install saxonche dependency (exit code: ${_PIP_EXITCODE}). Check network connectivity and Python wheel compatibility.") + endif() + endif() + + execute_process( + COMMAND ${_VENV_PYTHON} "${AL_LOCAL_XSLTPROC_SCRIPT}" + -xsl "dd_data_dictionary.xml.xsl" + -o "IDSDef.xml" + -s "dd_data_dictionary.xml.xsd" + DD_GIT_DESCRIBE=${DD_GIT_DESCRIBE} + WORKING_DIRECTORY ${data-dictionary_SOURCE_DIR} + RESULT_VARIABLE _MAKE_DD_EXITCODE + OUTPUT_VARIABLE _MAKE_DD_OUTPUT + ERROR_VARIABLE _MAKE_DD_ERROR + ) + + if( _MAKE_DD_EXITCODE ) + message(STATUS "xsltproc.py output: ${_MAKE_DD_OUTPUT}") + message(STATUS "xsltproc.py error: ${_MAKE_DD_ERROR}") + message(FATAL_ERROR "Error while building the Data Dictionary (exit code: ${_MAKE_DD_EXITCODE}). Check paths and Saxon-HE configuration.") + endif() + + # Populate IDSDEF filename + set( IDSDEF "${data-dictionary_SOURCE_DIR}/IDSDef.xml" ) + + # Install IDSDEF (needed for some applications and for UDA backend) + get_filename_component( REAL_IDSDEF ${IDSDEF} REALPATH ) + install( FILES ${REAL_IDSDEF} DESTINATION include RENAME IDSDef.xml ) + + # Populate identifier source xmls + file( GLOB DD_IDENTIFIER_FILES "${data-dictionary_SOURCE_DIR}/*/*_identifier.xml" "${data-dictionary_SOURCE_DIR}/schemas/*/*_identifier.xml" ) +endif() + +# Find out which IDSs exist and populate IDS_NAMES + +set( list_idss_file ${CMAKE_SOURCE_DIR}/common/list_idss.xsl ) +set( CMAKE_CONFIGURE_DEPENDS ${CMAKE_CONFIGURE_DEPENDS};${list_idss_file};${IDSDEF} ) +set( ids_names_tmpfile "${CMAKE_CURRENT_BINARY_DIR}/ids_names_tmp.txt" ) +execute_process( COMMAND + ${_VENV_PYTHON} "${AL_LOCAL_XSLTPROC_SCRIPT}" + -xsl ${list_idss_file} + -s ${IDSDEF} + -o ${ids_names_tmpfile} + RESULT_VARIABLE _XSLT_RESULT + ERROR_VARIABLE _XSLT_ERROR +) +if(_XSLT_RESULT) + message(FATAL_ERROR "Failed to extract IDS names: ${_XSLT_ERROR}") +endif() +if(EXISTS ${ids_names_tmpfile}) + file(READ ${ids_names_tmpfile} IDS_NAMES) + string(STRIP "${IDS_NAMES}" IDS_NAMES) + file(REMOVE ${ids_names_tmpfile}) +else() + message(FATAL_ERROR "IDS names output file not created") +endif() +set( list_idss_file ) # unset temporary var + +# DD version +set( dd_version_file ${CMAKE_SOURCE_DIR}/common/dd_version.xsl ) +set( dd_version_tmpfile "${CMAKE_CURRENT_BINARY_DIR}/dd_version_tmp.txt" ) +execute_process( COMMAND + ${_VENV_PYTHON} "${AL_LOCAL_XSLTPROC_SCRIPT}" + -xsl ${dd_version_file} + -s ${IDSDEF} + -o ${dd_version_tmpfile} + RESULT_VARIABLE _XSLT_RESULT + ERROR_VARIABLE _XSLT_ERROR +) +if(_XSLT_RESULT) + message(FATAL_ERROR "Failed to extract DD version: ${_XSLT_ERROR}") +endif() +if(EXISTS ${dd_version_tmpfile}) + file(READ ${dd_version_tmpfile} DD_VERSION) + string(STRIP "${DD_VERSION}" DD_VERSION) + file(REMOVE ${dd_version_tmpfile}) +else() + message(FATAL_ERROR "DD version output file not created") +endif() +string( REGEX REPLACE "[+-]" "_" DD_SAFE_VERSION "${DD_VERSION}" ) +set( dd_version_file ) # unset temporary var diff --git a/common/cmake/ALBuildDocumentation.cmake b/common/cmake/ALBuildDocumentation.cmake new file mode 100644 index 0000000..c8cbb0e --- /dev/null +++ b/common/cmake/ALBuildDocumentation.cmake @@ -0,0 +1,22 @@ +# CMake config for building Sphinx docs +find_package( Python3 REQUIRED ) + +file( GENERATE OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/doc.sh" + CONTENT "#!/bin/bash +# Set up python venv +${Python3_EXECUTABLE} -m venv '${CMAKE_CURRENT_BINARY_DIR}/doc_venv' +source '${CMAKE_CURRENT_BINARY_DIR}/doc_venv/bin/activate' +pip install -r '${CMAKE_CURRENT_SOURCE_DIR}/doc/doc_common/requirements.txt' + +# Instruct sphinx to treat all warnings as errors +export SPHINXOPTS='-W --keep-going' +# Build HTML documentation +make -C '${CMAKE_CURRENT_SOURCE_DIR}/doc' clean html +# Add a version file: +echo ${FULL_VERSION} > ${CMAKE_CURRENT_SOURCE_DIR}/doc/_build/html/version.txt +" +) + +add_custom_target( "${PROJECT_NAME}-docs" ALL + COMMAND bash "${CMAKE_CURRENT_BINARY_DIR}/doc.sh" +) diff --git a/common/cmake/ALCommonConfig.cmake b/common/cmake/ALCommonConfig.cmake new file mode 100644 index 0000000..ccf8d54 --- /dev/null +++ b/common/cmake/ALCommonConfig.cmake @@ -0,0 +1,59 @@ +# Shared options + +# Note: default options are also listed in the docs: doc_common/building_installing.rst +# When changing default values, the documentation should be updated to reflect that. + +# Note: AL_DOWNLOAD_DEPENDENCIES is also shared, but needs to be set before this +# repository is available, so not listing it here. + +option( BUILD_SHARED_LIBS "Build shared libraries" ON ) +option( AL_EXAMPLES "Build and test examples" ON ) +option( AL_TESTS "Build tests and enable test framework" ON ) +option( AL_PLUGINS "Enable plugin framework for tests and examples" OFF ) +option( AL_HLI_DOCS "Build the Sphinx-based High Level Interface documentation" OFF ) +option( AL_DOCS_ONLY "Don't build anything, except the Sphinx-based High Level Interface documentation" OFF ) + +# Saxon XSLT processor has been replaced with Python saxonche +# No longer need to find SaxonHE - saxonche is installed automatically via pip in virtual environments + +# if( NOT AL_DOWNLOAD_DEPENDENCIES ) +# if( AL_DEVELOPMENT_LAYOUT ) +# set( _DEV ON ) +# else() +# set( _DEV OFF ) +# endif() +# option( AL_DEVELOPMENT_LAYOUT "Look into parent directories for dependencies" ${_DEV} ) +# endif() + +# Enable CTest? +if( AL_EXAMPLES OR AL_TESTS ) + include( CTest ) +endif() + +# Allow configuration of repositories and branches for dependent packages +################################################################################ + +if( AL_DOWNLOAD_DEPENDENCIES ) + if( ${AL_PLUGINS} ) + # AL plugins + ############################################################################## + set( + AL_PLUGINS_GIT_REPOSITORY "https://github.com/iterorganization/IMAS-Core-Plugins.git" + CACHE STRING "Git repository of al-plugins" + ) + set( + AL_PLUGINS_VERSION "main" + CACHE STRING "al-plugins version (tag or branch name) to use for this build" + ) + endif() + # Data dictionary + ############################################################################## + set( + DD_GIT_REPOSITORY "https://github.com/iterorganization/IMAS-data-Dictionary.git" + CACHE STRING "Git repository of the Data Dictionary" + ) + set( + DD_VERSION "main" + CACHE STRING "Data dictionary version (tag or branch name) to use for this build" + ) +endif() diff --git a/common/cmake/ALCore.cmake b/common/cmake/ALCore.cmake new file mode 100644 index 0000000..21c6bc7 --- /dev/null +++ b/common/cmake/ALCore.cmake @@ -0,0 +1,189 @@ +# AL core and plugins + +if( NOT AL_DOWNLOAD_DEPENDENCIES AND NOT AL_DEVELOPMENT_LAYOUT ) + # The Access Layer core should be available as a module, use PkgConfig to create a + # target: + find_package( PkgConfig ) + pkg_check_modules( al REQUIRED IMPORTED_TARGET al-core ) + add_library( al ALIAS PkgConfig::al ) + set( AL_CORE_VERSION ${al_VERSION} ) + + # Stop processing + return() +endif() +if(WIN32) + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the AL core from the ITER git using direct git commands: + set( al-core_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/al-core-src" ) + if( NOT EXISTS "${al-core_SOURCE_DIR}/.git" ) + message( STATUS "Cloning al-core from ${AL_CORE_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${AL_CORE_GIT_REPOSITORY}" "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone al-core: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${AL_CORE_VERSION}" + WORKING_DIRECTORY "${al-core_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${AL_CORE_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../al-core + set( al-core_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-core ) + if( NOT IS_DIRECTORY ${al-core_SOURCE_DIR} ) + # Repository used to be called "al-lowlevel", check this directory as well for + # backwards compatibility: + set( al-core_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-lowlevel ) + if( NOT IS_DIRECTORY ${al-core_SOURCE_DIR} ) + message( FATAL_ERROR + "${al-core_SOURCE_DIR} does not exist. Please clone the " + "al-core repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() + endif() +else() + include(FetchContent) + + if( AL_DOWNLOAD_DEPENDENCIES ) + # Download the AL core from the ITER git: + FetchContent_Declare( + al-core + GIT_REPOSITORY ${AL_CORE_GIT_REPOSITORY} + GIT_TAG ${AL_CORE_VERSION} + ) + else() + # Look in ../al-core + set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-core ) + if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) + # Repository used to be called "al-lowlevel", check this directory as well for + # backwards compatibility: + set( AL_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-lowlevel ) + if( NOT IS_DIRECTORY ${AL_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${AL_SOURCE_DIRECTORY} does not exist. Please clone the " + "al-core repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() + + FetchContent_Declare( + al-core + SOURCE_DIR ${AL_SOURCE_DIRECTORY} + ) + set( AL_SOURCE_DIRECTORY ) # unset temporary var + endif() +endif() + +# Don't load the AL core when only building documentation +if( NOT AL_DOCS_ONLY ) + # Ensure vcpkg packages are found in the subdirectory + if(WIN32) + # On Windows, ensure vcpkg packages are available to the subdirectory + if(DEFINED VCPKG_INSTALLED_DIR AND DEFINED VCPKG_TARGET_TRIPLET) + # Add vcpkg installed directory to CMAKE_PREFIX_PATH for the subdirectory + set(CMAKE_PREFIX_PATH "${VCPKG_INSTALLED_DIR}/${VCPKG_TARGET_TRIPLET};${CMAKE_PREFIX_PATH}") + # Pass vcpkg variables to subdirectory by setting them in parent scope + set(VCPKG_INSTALLED_DIR "${VCPKG_INSTALLED_DIR}" CACHE STRING "vcpkg installed dir" FORCE) + set(VCPKG_TARGET_TRIPLET "${VCPKG_TARGET_TRIPLET}" CACHE STRING "vcpkg triplet" FORCE) + message(STATUS "ALCore: Passing vcpkg paths to al-core subdirectory") + message(STATUS " VCPKG_INSTALLED_DIR: ${VCPKG_INSTALLED_DIR}") + message(STATUS " VCPKG_TARGET_TRIPLET: ${VCPKG_TARGET_TRIPLET}") + message(STATUS " CMAKE_PREFIX_PATH: ${CMAKE_PREFIX_PATH}") + endif() + add_subdirectory( ${al-core_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/_deps/al-core-build ) + else() + FetchContent_MakeAvailable( al-core ) + endif() + get_target_property( AL_CORE_VERSION al VERSION ) +endif() + + +if( ${AL_PLUGINS} ) + if(WIN32) + if( ${AL_DOWNLOAD_DEPENDENCIES} ) + # Download the AL plugins from the ITER git using direct git commands: + set( al-plugins_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_deps/al-plugins-src" ) + if( NOT EXISTS "${al-plugins_SOURCE_DIR}/.git" ) + message( STATUS "Cloning al-plugins from ${AL_PLUGINS_GIT_REPOSITORY}" ) + execute_process( + COMMAND git clone "${AL_PLUGINS_GIT_REPOSITORY}" "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CLONE_RESULT + ERROR_VARIABLE _GIT_CLONE_ERROR + ) + if( _GIT_CLONE_RESULT ) + message( FATAL_ERROR "Failed to clone al-plugins: ${_GIT_CLONE_ERROR}" ) + endif() + endif() + # Checkout the specified version + execute_process( + COMMAND git fetch origin + WORKING_DIRECTORY "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_FETCH_RESULT + ) + execute_process( + COMMAND git checkout "${AL_PLUGINS_VERSION}" + WORKING_DIRECTORY "${al-plugins_SOURCE_DIR}" + RESULT_VARIABLE _GIT_CHECKOUT_RESULT + ERROR_VARIABLE _GIT_CHECKOUT_ERROR + ) + if( _GIT_CHECKOUT_RESULT ) + message( FATAL_ERROR "Failed to checkout ${AL_PLUGINS_VERSION}: ${_GIT_CHECKOUT_ERROR}" ) + endif() + else() + # Look in ../plugins + set( al-plugins_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../al-plugins ) + if( NOT IS_DIRECTORY ${al-plugins_SOURCE_DIR} ) + message( FATAL_ERROR + "${al-plugins_SOURCE_DIR} does not exist. Please clone the " + "al-plugins repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + endif() + + else() + if( ${AL_DOWNLOAD_DEPENDENCIES} ) + # Download the AL plugins from the ITER git: + FetchContent_Declare( + al-plugins + GIT_REPOSITORY ${AL_PLUGINS_GIT_REPOSITORY} + GIT_TAG ${AL_PLUGINS_VERSION} + ) + else() + # Look in ../plugins + set( PLUGINS_SOURCE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/../al-plugins ) + if( NOT IS_DIRECTORY ${PLUGINS_SOURCE_DIRECTORY} ) + message( FATAL_ERROR + "${PLUGINS_SOURCE_DIRECTORY} does not exist. Please clone the " + "al-plugins repository or set AL_DOWNLOAD_DEPENDENCIES=ON." + ) + endif() + + FetchContent_Declare( + al-plugins + SOURCE_DIR ${PLUGINS_SOURCE_DIRECTORY} + ) + set( PLUGINS_SOURCE_DIRECTORY ) # unset temporary var + endif() + FetchContent_MakeAvailable( al-plugins ) + endif() +endif() + +if( AL_HLI_DOCS ) + include( ALBuildDocumentation ) +endif() + diff --git a/common/cmake/ALDetermineVersion.cmake b/common/cmake/ALDetermineVersion.cmake new file mode 100644 index 0000000..d7bcc1a --- /dev/null +++ b/common/cmake/ALDetermineVersion.cmake @@ -0,0 +1,64 @@ +# Determine the version of the current AL component, based on git describe + +if( NOT GIT_ARCHIVE_DESCRIBE ) + message( FATAL_ERROR "GIT_ARCHIVE_DESCRIBE should be set before including ALDetermineVersion" ) +endif() + +if( NOT GIT_ARCHIVE_DESCRIBE MATCHES "^.Format:%.describe" ) + # We are part of an exported tarball and git-archive set the describe content: + set( _GIT_DESCRIBE_ERROR_CODE 0 ) + set( _GIT_DESCRIBE_OUTPUT "${GIT_ARCHIVE_DESCRIBE}" ) +else() + # Ask git for a describe: + find_package( Git ) + if( GIT_EXECUTABLE ) + # Fetch tags first so that describe works correctly in that case. + execute_process( + COMMAND ${GIT_EXECUTABLE} fetch --tags --force + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + RESULT_VARIABLE _GIT_FETCH_ERROR_CODE + OUTPUT_QUIET + ERROR_QUIET + ) + if( _GIT_FETCH_ERROR_CODE ) + message( VERBOSE "git fetch --tags failed (offline build?), proceeding without fetching tags" ) + endif() + # Generate a git-describe version string from Git repository tags + execute_process( + COMMAND ${GIT_EXECUTABLE} describe --tags --dirty + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + OUTPUT_VARIABLE _GIT_DESCRIBE_OUTPUT + RESULT_VARIABLE _GIT_DESCRIBE_ERROR_CODE + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + endif() +endif() + +# Process git describe output: +if( _GIT_DESCRIBE_OUTPUT AND NOT _GIT_DESCRIBE_ERROR_CODE ) + # Git describe should return the version (MAJOR.MINOR.PATCH) and potentially + # a suffix "--g[-dirty]" + # Use a regex to extract all parts: + if( _GIT_DESCRIBE_OUTPUT MATCHES "([0-9]+)[.]([0-9]+)[.]*([0-9]+)(.*)" ) + set( VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}" ) + if( CMAKE_MATCH_4 MATCHES "-([0-9]+)-(.*)" ) + # Use ncommits as fourth version component for the CMAKE project version + set( VERSION "${VERSION}.${CMAKE_MATCH_1}" ) + endif() + else() + message( FATAL_ERROR "Unexpected output of git describe: '${_GIT_DESCRIBE_OUTPUT}'") + endif() + + # Generate a version string that conforms to the Python standards + # e.g. 5.1.0-3-g7c620eb5-dirty becomes 5.1.0+3-g7c620eb5-dirty + string( REGEX REPLACE "-(.*)$" "+\\1" FULL_VERSION ${_GIT_DESCRIBE_OUTPUT} ) + message( VERBOSE "Determined project version: ${VERSION}" ) +endif() + +# Fallback: git not found, or git describe fails +# Set version to 0.0.0 and report a warning +if( NOT DEFINED VERSION ) + set( VERSION "0.0.0" ) + set( FULL_VERSION "0.0.0+unknown" ) + message( WARNING "Failed to determine VERSION from git tags. Falling back to default version '${VERSION}'" ) +endif() diff --git a/common/cmake/ALExampleUtilities.cmake b/common/cmake/ALExampleUtilities.cmake new file mode 100644 index 0000000..1631afd --- /dev/null +++ b/common/cmake/ALExampleUtilities.cmake @@ -0,0 +1,53 @@ +# CMake file providing common logic to test AL examples + +set( EXAMPLE_ENVIRONMENT_WITHOUT_PLUGINS + "IMAS_AL_ENABLE_PLUGINS=FALSE" +) + +if( AL_PLUGINS ) + get_target_property( PLUGIN_DIR al-plugins BINARY_DIR ) + set( EXAMPLE_ENVIRONMENT_WITH_PLUGINS + ${TEST_ENVIRONMENT} + "IMAS_AL_ENABLE_PLUGINS=TRUE" + "IMAS_AL_PLUGINS=${PLUGIN_DIR}" + ) +endif() + +function( set_al_example_properties TEST DISABLED USE_PLUGINS EXTRA_ENVIRONMENT ) + # Set common properties + set_tests_properties( ${TEST} PROPERTIES + # Case insensitive fault|error[^_]|exception|severe|abort|segmentation|fault|dump|logic_error|failed + FAIL_REGULAR_EXPRESSION "[Ff][Aa][Uu][Ll][Tt]|[Ee][Rr][Rr][Oo][Rr][^_]|[Ee][Xx][Cc][Ee][Pp][Tt][Ii][Oo][Nn]|[Ss][Ee][Vv][Ee][Rr][Ee]|[Aa][Bb][Oo][Rr][Tt]|[Ss][Ee][Gg][Mm][Ee][Nn][Tt][Aa][Tt][Ii][Oo][Nn]|[Ff][Aa][Uu][Ll][Tt]|[Dd][Uu][Mm][Pp]|[Ll][Oo][Gg][Ii][Cc]_[Ee][Rr][Rr][Oo][Rr]|[Ff][Aa][Ii][Ll][Ee][Dd]" + DISABLED ${DISABLED} + ) + + # Set environment variables + if( USE_PLUGINS ) + set( P_ENV ${EXAMPLE_ENVIRONMENT_WITH_PLUGINS} ) + else() + set( P_ENV ${EXAMPLE_ENVIRONMENT_WITHOUT_PLUGINS} ) + endif() + set_tests_properties( ${TEST} PROPERTIES ENVIRONMENT "${P_ENV};${EXTRA_ENVIRONMENT}" ) + + # Set fixtures: put/put_slice must run before get/get_slice + string( TOLOWER ${TEST} TEST_LOWER ) + string( REGEX REPLACE "put|get" "" FIXTURE_NAME ${TEST_LOWER} ) + if( TEST_LOWER MATCHES "put" ) + set_tests_properties( ${TEST} PROPERTIES FIXTURES_SETUP ${FIXTURE_NAME} ) + elseif( TEST_LOWER MATCHES "get" ) + set_tests_properties( ${TEST} PROPERTIES FIXTURES_REQUIRED ${FIXTURE_NAME} ) + endif() +endfunction() + + +function( error_on_missing_tests SOURCE_EXTENSION TESTS ) + file( GLOB _SRCS "*.${SOURCE_EXTENSION}" ) + foreach( _SRC IN LISTS _SRCS ) + string( REGEX REPLACE "^.*[/\\]|[.]${SOURCE_EXTENSION}$" "" _SRC_STEM ${_SRC} ) + list( FIND TESTS ${_SRC_STEM} FOUND ) + if( FOUND EQUAL -1 ) + message( SEND_ERROR "Source file ${_SRC} found, but no corresponding test exists" ) + endif() + endforeach() +endfunction() + diff --git a/common/cmake/ALLocalPaths.cmake b/common/cmake/ALLocalPaths.cmake new file mode 100644 index 0000000..f367d74 --- /dev/null +++ b/common/cmake/ALLocalPaths.cmake @@ -0,0 +1,14 @@ +# Local paths for IMAS-MATLAB +# This file defines local paths to eliminate AL_COMMON_PATH dependency + +# Directory containing CMake modules +set(AL_LOCAL_CMAKE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/common/cmake) + +# Directory containing documentation common files +set(AL_LOCAL_DOC_COMMON_DIR ${CMAKE_CURRENT_SOURCE_DIR}/doc/doc_common) + +# XSLT processor script +set(AL_LOCAL_XSLTPROC_SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/common/xsltproc.py) + +# Add cmake modules to CMake module path +list(APPEND CMAKE_MODULE_PATH ${AL_LOCAL_CMAKE_DIR}) diff --git a/common/cmake/ALSetCompilerFlags.cmake b/common/cmake/ALSetCompilerFlags.cmake new file mode 100644 index 0000000..b821ffc --- /dev/null +++ b/common/cmake/ALSetCompilerFlags.cmake @@ -0,0 +1,116 @@ +# Compiler flags for building the Access Layer + +include_guard( DIRECTORY ) + +################################################################################ +# Compile definitions for Fortran interface + +if( CMAKE_SYSTEM_NAME STREQUAL Linux ) + add_compile_definitions( $<$:_Linux> ) +elseif( CMAKE_SYSTEM_NAME STREQUAL Darwin ) + add_compile_definitions( $<$:_MacOS> ) +elseif( CMAKE_SYSTEM_NAME STREQUAL Windows ) + add_compile_definitions( $<$:_Windows> ) +else() + message( WARNING "Unknown CMAKE_SYSTEM_NAME '${CMAKE_SYSTEM_NAME}', continuing..." ) +endif() + + +################################################################################ +# General options + +set( CMAKE_POSITION_INDEPENDENT_CODE ON ) + +# Set default build type to RelWithDebInfo (optimize build, keep debugging symbols) +if( NOT CMAKE_BUILD_TYPE ) + set( CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING + "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." + FORCE + ) +endif() + +################################################################################ +# C++ +if(NOT DEFINED CMAKE_CXX_STANDARD) + set( CMAKE_CXX_STANDARD 17 ) +endif() +if( CMAKE_CXX_COMPILER_ID STREQUAL "Intel" OR CMAKE_CXX_COMPILER_ID STREQUAL "IntelLLVM" ) + # icpc/icpx options + string( APPEND CMAKE_CXX_FLAGS + # " -Wall" + ) +elseif( CMAKE_CXX_COMPILER_ID STREQUAL "GNU" ) + # g++ options + string( APPEND CMAKE_CXX_FLAGS + # " -Wall -Wextra" + ) +elseif( CMAKE_CXX_COMPILER_ID STREQUAL "MSVC" ) + # Visual Studio C++ options + string( APPEND CMAKE_CXX_FLAGS + # " -Wall" + ) +elseif( CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" ) + # Apple Clang C++ options + string( APPEND CMAKE_CXX_FLAGS + # " -Wall" + ) +else() + message( WARNING "Unsupported C++ compiler: ${CMAKE_CXX_COMPILER_ID}" ) +endif() + + +################################################################################ +# Fortran + +get_property(languages GLOBAL PROPERTY ENABLED_LANGUAGES) +if( "Fortran" IN_LIST languages ) + + set( CMAKE_Fortran_FORMAT FREE ) + set( CMAKE_Fortran_PREPROCESS ON ) + set( CMAKE_Fortran_MODULE_DIRECTORY include ) + + if( CMAKE_Fortran_COMPILER_ID STREQUAL "Intel" ) + # ifort options + string( APPEND CMAKE_Fortran_FLAGS + " -r8 -assume no2underscore" + ) + # string( APPEND CMAKE_Fortran_FLAGS " -warn all" ) + elseif( CMAKE_Fortran_COMPILER_ID STREQUAL "GNU" ) + # gfortran options + string( APPEND CMAKE_Fortran_FLAGS + " -fdefault-real-8 -fdefault-double-8 -fno-second-underscore -ffree-line-length-none" + ) + # string( APPEND CMAKE_Fortran_FLAGS " -Wall -Wextra" ) + elseif( CMAKE_Fortran_COMPILER_ID STREQUAL "NAG" ) + # nagfort options + string( APPEND CMAKE_Fortran_FLAGS + " -maxcontin=4000 -w=unused -w=x95 -kind=byte -r8" + ) + # Set CONFIG options for NAG, CMake's definition keep them all empty + string( APPEND CMAKE_Fortran_FLAGS_DEBUG " -g" ) + string( APPEND CMAKE_Fortran_FLAGS_RELWITHDEBINFO " -O2 -g" ) + string( APPEND CMAKE_Fortran_FLAGS_RELEASE " -O3" ) + elseif( CMAKE_Fortran_COMPILER_ID STREQUAL "PGI" ) + # PGI / nvfortran options + string( APPEND CMAKE_Fortran_FLAGS + " -r8 -Mnosecond_underscore" + ) + else() + message( WARNING "Unsupported Fortran compiler: ${CMAKE_Fortran_COMPILER_ID}" ) + endif() + +endif() + +################################################################################ +# Windows support + +# TODO: salvaged from old CMakeLists +# Need to figure out if this is needed and functional before uncommenting + +# add_definitions( -D_CRT_SECURE_NO_WARNINGS ) +# add_compile_options( /bigobj ) + +# set( CMAKE_FIND_LIBRARY_SUFFIXES ".dll.lib" ".lib" ) +# set( CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /SAFESEH:NO" ) +# set( CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /SAFESEH:NO /SUBSYSTEM:CONSOLE" ) +# set( CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /std:c++14 /Zc:__cplusplus" ) diff --git a/common/dd_version.xsl b/common/dd_version.xsl new file mode 100644 index 0000000..906f556 --- /dev/null +++ b/common/dd_version.xsl @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + diff --git a/common/identifiers.common.xsl b/common/identifiers.common.xsl new file mode 100644 index 0000000..af36725 --- /dev/null +++ b/common/identifiers.common.xsl @@ -0,0 +1,247 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + #include + + + + use + + + + import + + + + import + + ; + + + + + + + + + + + + + + + + + + + + + + + + + + + + <?xml version="1.0" ?> + <section class="topic" id="imas_enum_types__ "> + <title> + + </title> + + + + <para> + <screen> + + + + + + </screen> + </para> + + + + + + + <para> This identifier is used in the following places in the ITER IDSs: + + <screen> + + </screen> + + </para> + + + + + <para> Fortran interface example: + <screen> use + + , only: get_type_index, get_type_name, get_type_description</screen> + </para> + + + + + + + + <table> + <tgroup cols="3"> + <colspec colwidth="45mm"/> + <colspec colwidth="60mm"/> + <colspec colwidth="68mm"/> + <thead> + <row> + <entry>Name</entry> + <entry>Value</entry> + <entry>Description</entry> + </row> + </thead> + <tbody> + + + + + <table> + <tgroup cols="3">. + + + <colspec colwidth="15mm"/> + <colspec colwidth="50mm"/> + <colspec colwidth="108mm"/> + <thead> + <row> + <entry>Flag</entry> + <entry>Id</entry> + <entry>Description</entry> + </row> + </thead> + <tbody> + + + + + + + + + + <row> + <entry> + + + </entry> + <entry> <mono> + + </mono> </entry> + + <entry> + + </entry> + </row> + + + + + </tbody> + </tgroup> + </table> + + + </section> + + + + diff --git a/common/list_idss.xsl b/common/list_idss.xsl new file mode 100644 index 0000000..d6d814a --- /dev/null +++ b/common/list_idss.xsl @@ -0,0 +1,18 @@ + + + + + + + + + + + + ; + + + + + diff --git a/common/xsltproc.py b/common/xsltproc.py new file mode 100644 index 0000000..009b7ad --- /dev/null +++ b/common/xsltproc.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python +# simple net.sf.saxon.Transform cli replacement via saxonche Python bindings +# example invokation: +# ./xsltproc.py -xsl IDSDef2MDSpreTree.xsl -s IDSDef.xml -o output.xml DD_GIT_DESCRIBE=1 AL_GIT_DESCRIBE=1 + +import argparse +import logging + +import saxonche + + +def parse_arguments() -> tuple: + """Parse arguments, similar to net.sf.saxon.Transform...""" + + parser = argparse.ArgumentParser( + prog="xsltproc.py", + description="Imitates Saxon-HE's net.sf.saxon.Transform.", + epilog="Additional arguments in format key=value will be set as xml parameters", + ) + parser.add_argument( + "-xsl", + "--stylesheet_file", + type=str, + required=True, + help="XSL style sheet file", + ) + parser.add_argument( + "-s", + "--source_file", + type=str, + required=True, + help="source XML document", + ) + parser.add_argument( + "-o", + "--output_file", + type=str, + required=True, + help="transformed output XML document", + ) + + args, other_args = parser.parse_known_args() + # Convert list of strings "key=value" into dict(key=value, ...) + other_kwargs = {k: v for k, v in map(lambda x: x.split("="), other_args)} + return (args, other_kwargs) + + +def saxon_xsltproc( + source_file: str, stylesheet_file: str, output_file: str, **kwargs +) -> None: + with saxonche.PySaxonProcessor(license=False) as proc: + xsltproc = proc.new_xslt30_processor() + for key, value in kwargs.items(): + string_value = proc.make_string_value(value) + xsltproc.set_parameter(key, string_value) + xsltproc.transform_to_file( + source_file=source_file, + stylesheet_file=stylesheet_file, + output_file=output_file, + ) + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + args, other_kwargs = parse_arguments() + saxon_xsltproc( + source_file=args.source_file, + stylesheet_file=args.stylesheet_file, + output_file=args.output_file, + **other_kwargs, + ) diff --git a/create_matlab_toolbox.m b/create_matlab_toolbox.m new file mode 100644 index 0000000..b5e3dc2 --- /dev/null +++ b/create_matlab_toolbox.m @@ -0,0 +1,214 @@ +function create_matlab_toolbox(installDir, outputDir, version, ddVersion) +% CREATE_MATLAB_TOOLBOX Package IMAS-MATLAB as a MATLAB toolbox +% +% Arguments: +% installDir - Directory containing installed IMAS-MATLAB files +% outputDir - Directory where .mltbx file will be created +% version - Version string (e.g., '5.5.0') +% ddVersion - Data Dictionary version (e.g., '4.1.0') + +if nargin < 4 + error('Usage: create_matlab_toolbox(installDir, outputDir, version, ddVersion)'); +end + +% Detect platform +if ispc + platform = 'win64'; +elseif ismac + platform = 'macos'; +elseif isunix + platform = 'linux64'; +else + platform = 'unknown'; +end + +% Create full version string +fullVersionName = sprintf('IMAS-MATLAB/%s-DD-%s-%s', version, ddVersion, platform); + +fprintf('Creating %s...\n', fullVersionName); + +% Validate input directory +toolboxSource = fullfile(installDir, 'toolbox'); +if ~exist(toolboxSource, 'dir') + error('Toolbox directory not found: %s', toolboxSource); +end + +% Create temporary packaging directory +packDir = fullfile(outputDir, 'toolbox_temp'); +if exist(packDir, 'dir') + rmdir(packDir, 's'); +end +mkdir(packDir); + +try + % Copy toolbox contents + fprintf(' Copying toolbox files...\n'); + copyfile(fullfile(toolboxSource, '*'), packDir); + + % Create startup script + fprintf(' Creating startup script...\n'); + create_startup_script(packDir, version, ddVersion, platform); + + % Create README + fprintf(' Creating README...\n'); + create_readme(packDir, version, ddVersion, platform); + + % Configure toolbox metadata + fprintf(' Configuring toolbox metadata...\n'); + % Create a valid identifier (alphanumeric and hyphens only) + toolboxId = sprintf('IMAS-MATLAB-%s', strrep(version, '.', '-')); + opts = matlab.addons.toolbox.ToolboxOptions(packDir, toolboxId); + + opts.ToolboxName = 'IMAS-MATLAB'; + opts.ToolboxVersion = version; + opts.AuthorName = 'ITER Organization'; + opts.Summary = sprintf('MATLAB interface for IMAS Access Layer - %s', fullVersionName); + opts.Description = sprintf(['High Level Interface for accessing IMAS fusion simulation data from MATLAB. ', ... + '%s includes all MEX files, MATLAB functions, and required ', ... + 'dependencies for Data Dictionary %s. Built for %s.'], ... + fullVersionName, ddVersion, platform); + + % Add all files from package directory + opts.ToolboxFiles = {packDir}; + opts.ToolboxMatlabPath = {packDir}; + + % Set output file with full version name including platform + outputFileName = sprintf('IMAS-MATLAB_%s-DD-%s-%s.mltbx', version, ddVersion, platform); + outputFile = fullfile(outputDir, outputFileName); + opts.OutputFile = outputFile; + + % Package the toolbox + fprintf(' Building toolbox package...\n'); + matlab.addons.toolbox.packageToolbox(opts); + + fprintf('\n========================================\n'); + fprintf('✓ Toolbox created successfully!\n'); + fprintf('========================================\n\n'); + fprintf('Output file: %s\n', outputFile); + + if exist(outputFile, 'file') + fileInfo = dir(outputFile); + fprintf('File size: %.2f MB\n', fileInfo.bytes / 1024 / 1024); + end + + fprintf('\n--- Installation Instructions ---\n'); + fprintf('1. Double-click: %s\n', outputFile); + fprintf(' OR\n'); + fprintf('2. Run: matlab -batch "matlab.addons.install(''%s'')"\n', outputFileName); + + % Clean up temp directory + fprintf('\n Cleaning up temporary files...\n'); + rmdir(packDir, 's'); + fprintf('Done!\n'); + +catch ME + fprintf('\n✗ Failed to create toolbox:\n'); + fprintf(' %s\n\n', ME.message); + fprintf('Temporary files kept in: %s\n', packDir); + rethrow(ME); +end + +end + +function create_startup_script(packDir, version, ddVersion, platform) +% Create the toolbox startup script + +fullVersionName = sprintf('IMAS-MATLAB/%s-DD-%s-%s', version, ddVersion, platform); +startupFile = fullfile(packDir, 'imas_toolbox_startup.m'); +fid = fopen(startupFile, 'w'); + +fprintf(fid, 'function imas_toolbox_startup()\n'); +fprintf(fid, '%% IMAS_TOOLBOX_STARTUP Initialize IMAS-MATLAB toolbox\n'); +fprintf(fid, '%% Auto-generated startup script for %s\n\n', fullVersionName); +fprintf(fid, ' toolboxRoot = fileparts(mfilename(''fullpath''));\n\n'); +fprintf(fid, ' %% Add to path\n'); +fprintf(fid, ' if ~contains(path, toolboxRoot)\n'); +fprintf(fid, ' addpath(toolboxRoot);\n'); +fprintf(fid, ' end\n\n'); +fprintf(fid, ' %% On Windows, add DLLs to system PATH\n'); +fprintf(fid, ' if ispc\n'); +fprintf(fid, ' currentPath = getenv(''PATH'');\n'); +fprintf(fid, ' if ~contains(currentPath, toolboxRoot)\n'); +fprintf(fid, ' setenv(''PATH'', [toolboxRoot pathsep currentPath]);\n'); +fprintf(fid, ' end\n'); +fprintf(fid, ' end\n\n'); +fprintf(fid, ' fprintf(''%s loaded successfully\\n'');\n\n', fullVersionName); +fprintf(fid, ' %% Display version information\n'); +fprintf(fid, ' try\n'); +fprintf(fid, ' v = imas_versions();\n'); +fprintf(fid, ' fprintf(''Access Layer: %%s | Data Dictionary: %%s\\n'', ...\n'); +fprintf(fid, ' v.al_version, v.dd_version);\n'); +fprintf(fid, ' catch ME\n'); +fprintf(fid, ' warning(''Could not retrieve version information: %%s'', ME.message);\n'); +fprintf(fid, ' end\n'); +fprintf(fid, 'end\n'); + +fclose(fid); +end + +function create_readme(packDir, version, ddVersion, platform) +% Create README file + +fullVersionName = sprintf('IMAS-MATLAB/%s-DD-%s-%s', version, ddVersion, platform); +readmeFile = fullfile(packDir, 'README.md'); +fid = fopen(readmeFile, 'w'); + +fprintf(fid, '# %s\n\n', fullVersionName); +fprintf(fid, 'MATLAB High Level Interface to the IMAS Access Layer.\n\n'); +fprintf(fid, '**Platform:** %s\n\n', platform); +fprintf(fid, '## Installation\n\n'); +fprintf(fid, 'This toolbox is installed via MATLAB Add-On Manager.\n\n'); +fprintf(fid, '## Getting Started\n\n'); +fprintf(fid, 'Initialize the toolbox (automatically done on installation):\n'); +fprintf(fid, '```matlab\n'); +fprintf(fid, 'imas_toolbox_startup()\n'); +fprintf(fid, '```\n\n'); +fprintf(fid, 'Verify installation:\n'); +fprintf(fid, '```matlab\n'); +fprintf(fid, 'v = imas_versions()\n'); +fprintf(fid, '```\n\n'); +fprintf(fid, '## Basic Usage\n\n'); +fprintf(fid, '### Opening and Reading Data\n\n'); +fprintf(fid, '```matlab\n'); +fprintf(fid, '%% Open IMAS database using URI\n'); +fprintf(fid, 'uri = ''imas:hdf5?path=./test_db'';\n'); +fprintf(fid, 'ctx = imas_open(uri, 43);\n'); +fprintf(fid, 'if ctx < 0\n'); +fprintf(fid, ' error(''Unable to open pulse'');\n'); +fprintf(fid, 'end\n\n'); +fprintf(fid, '%% Get IDS data\n'); +fprintf(fid, 'm = ids_get(ctx, ''magnetics'');\n\n'); +fprintf(fid, '%% Display data\n'); +fprintf(fid, 'disp(m.time);\n'); +fprintf(fid, 'disp(m.flux_loop{1}.flux.data);\n\n'); +fprintf(fid, '%% Close connection\n'); +fprintf(fid, 'imas_close(ctx);\n'); +fprintf(fid, '```\n\n'); +fprintf(fid, '### Writing Data\n\n'); +fprintf(fid, '```matlab\n'); +fprintf(fid, '%% Open/create pulse\n'); +fprintf(fid, 'uri = ''imas:hdf5?path=./test_db'';\n'); +fprintf(fid, 'ctx = imas_open(uri, 43);\n'); +fprintf(fid, 'if ctx < 0\n'); +fprintf(fid, ' error(''Unable to open pulse'');\n'); +fprintf(fid, 'end\n\n'); +fprintf(fid, '%% Generate IDS structure\n'); +fprintf(fid, 'm = ids_gen(''magnetics'');\n\n'); +fprintf(fid, '%% Populate data\n'); +fprintf(fid, 'm.ids_properties.homogeneous_time = 1;\n'); +fprintf(fid, 'm.flux_loop{1}.flux.data(1) = 10.0;\n'); +fprintf(fid, 'm.flux_loop{1}.flux.data(2) = 20.0;\n'); +fprintf(fid, 'm.time(1) = 2.0;\n'); +fprintf(fid, 'm.time(2) = 5.0;\n\n'); +fprintf(fid, '%% Write to database\n'); +fprintf(fid, 'ids_put(ctx, ''magnetics'', m);\n\n'); +fprintf(fid, '%% Close\n'); +fprintf(fid, 'imas_close(ctx);\n'); +fprintf(fid, '```\n\n'); +fprintf(fid, '## Documentation\n\n'); +fprintf(fid, 'For complete documentation, visit: https://imas-matlab.readthedocs.io/\n\n'); +fprintf(fid, '## License\n\n'); +fprintf(fid, 'See LICENSE.txt for license information.\n'); + +fclose(fid); +end diff --git a/doc/.gitignore b/doc/.gitignore index a56608d..1c84c65 100644 --- a/doc/.gitignore +++ b/doc/.gitignore @@ -1,4 +1,2 @@ _build -# Symlinks to other AL projects (created by CMake): -doc_common plugins diff --git a/doc/building_installing.rst b/doc/building_installing.rst index d594853..e805994 100644 --- a/doc/building_installing.rst +++ b/doc/building_installing.rst @@ -1 +1 @@ -.. include:: ./doc_common/building_installing.rst +.. include:: ./doc_common/building_installing.rst \ No newline at end of file diff --git a/doc/conf.py b/doc/conf.py index 001af34..303c43a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -15,9 +15,21 @@ copyright = f"{datetime.datetime.now().year}, ITER Organization" author = "ITER Organization" -version = subprocess.check_output(["git", "describe"]).decode().strip() -last_tag = subprocess.check_output(["git", "describe", "--abbrev=0"]).decode().strip() -is_develop = version != last_tag +# Get version from git describe, with fallback if no tags exist +try: + version = subprocess.check_output(["git", "describe"], stderr=subprocess.DEVNULL).decode().strip() +except subprocess.CalledProcessError: + # If git describe fails (no tags), use a default version + version = "development" + +# Get last tag for develop check, with fallback +try: + last_tag = subprocess.check_output(["git", "describe", "--abbrev=0"], stderr=subprocess.DEVNULL).decode().strip() +except subprocess.CalledProcessError: + # If no tags exist, we're in development + last_tag = None + +is_develop = last_tag is None or version != last_tag html_context = { "is_develop": is_develop @@ -39,14 +51,14 @@ # todo_include_todos = True -templates_path = ["./doc_common/templates"] # Note: exclude doc_common and plugins folders (which are symlinked by the CMake build) +# Also exclude myenv (Python virtual environment) exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "doc_common", "plugins"] # -- RST snippets to include in every page ----------------------------------- rst_epilog = """\ .. |DD| replace:: `Data Dictionary`_ -.. _`Data Dictionary`: https://sharepoint.iter.org/departments/POP/CM/IMDesign/Data%20Model/CI/Latest.html +.. _`Data Dictionary`: https://imas-data-dictionary.readthedocs.io/en/latest/ """ # -- sphinxcontrib.matlab configuration -------------------------------------- @@ -71,10 +83,10 @@ html_theme = "sphinx_immaterial" html_theme_options = { - "repo_url": "https://git.iter.org/projects/IMAS/repos/access-layer", + "repo_url": "https://github.com/iterorganization/IMAS-MATLAB.git", "repo_name": "Access Layer", "icon": { - "repo": "fontawesome/brands/bitbucket", + "repo": "fontawesome/brands/github", }, "features": [ # "navigation.expand", diff --git a/doc/conf.rst b/doc/conf.rst deleted file mode 100644 index 59227f5..0000000 --- a/doc/conf.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ./doc_common/conf.rst diff --git a/doc/doc_common/building_installing.rst b/doc/doc_common/building_installing.rst new file mode 100644 index 0000000..b46099a --- /dev/null +++ b/doc/doc_common/building_installing.rst @@ -0,0 +1,326 @@ +Building and installing the IMAS-MATLAB +======================================== + +This page describes how to build and install the IMAS-MATLAB. + +Documentation for developers wishing to contribute to the IMAS-MATLAB can be found in +the :ref:`IMAS-MATLAB development guide`. Please refer to that guide if you wish to set +up a development environment. + +For more information about related components, see: + +- `IMAS Core Documentation `__ +- `IMAS Data Dictionary Documentation `__ + + +.. note:: + + For Windows-specific installation instructions, please refer to the + :doc:`MATLAB on Windows ` guide. + + +.. _`build prerequisites`: + +Prerequisites +------------- + +To build the IMAS-MATLAB you need: + +- Git +- A C++11 compiler (tested with GCC and Intel compilers) +- CMake (3.16 or newer) +- Boost C++ libraries (1.66 or newer) +- PkgConfig + +The following dependencies are only required for some of the components: + +- Backends + + - **HDF5 backend**: HDF5 C/C++ libraries (1.8.12 or newer) + - **MDSplus backend**: MDSplus libraries (7.84.8 or newer) + - **UDA backend**: `UDA `__ libraries + (2.7.5 or newer) [#uda_install]_ + +.. [#uda_install] When installing UDA, make sure you have + `Cap'n'Proto `__ installed in your system + and add its support by adding the CMake switch `-DENABLE_CAPNP=ON` when configuring UDA. + + +- MATLAB High Level Interface + + - **MATLAB High Level Interface**: A working MATLAB installation (tested with + version 2023b) + + + + +Standard environments: + +.. md-tab-set:: + + .. md-tab-item:: SDCC ``intel-2023b`` + + The following modules provide all the requirements when using the + ``intel-2023b`` toolchain: + + .. code-block:: bash + + module load intel-compilers/2023.2.1 CMake/3.27.6-GCCcore-13.2.0 Saxon-HE/12.4-Java-21 \ + Boost/1.83.0-iimpi-2023b HDF5/1.14.3-iimpi-2023b \ + MDSplus/7.132.0-GCCcore-13.2.0 \ + UDA/2.8.1-iimpi-2023b Blitz++/1.0.2-GCCcore-13.2.0 \ + MATLAB/2023b-r5-GCCcore-13.2.0 SciPy-bundle/2023.11-intel-2023b \ + scikit-build-core/0.9.3-GCCcore-13.2.0 + + .. md-tab-item:: SDCC ``foss-2023b`` + + The following modules provide all the requirements when using the + ``foss-2023b`` toolchain: + + .. code-block:: bash + + module load CMake/3.27.6-GCCcore-13.2.0 Saxon-HE/12.4-Java-21 \ + Boost/1.83.0-GCC-13.2.0 HDF5/1.14.3-gompi-2023b \ + MDSplus/7.132.0-GCCcore-13.2.0 \ + UDA/2.8.1-GCC-13.2.0 Blitz++/1.0.2-GCCcore-13.2.0 \ + MATLAB/2023b-r5-GCCcore-13.2.0 SciPy-bundle/2023.11-gfbf-2023b \ + build/1.0.3-foss-2023b scikit-build-core/0.9.3-GCCcore-13.2.0 + + .. admonition:: The MATLAB/2023b-r5 installation is lightly tweaked + + The installation at ITER uses `EB PR#20508 + `__ + and its tweak resolves `IMAS-5162 `__ + by removing ``libstdc++.so.6`` from the MATLAB installation. It also adds + ``extern/bin/glnxa64`` to ``LD_LIBRARY_PATH`` to address the + ``MatlabEngine not found`` issue. + + .. caution:: + + When using the HDF5 backend within MATLAB, depending on the HDF5 library being used + you may need to add ``LD_PRELOAD=/lib/libhdf5_hl.so`` when starting + MATLAB. + + .. md-tab-item:: Ubuntu 22.04 + + The following packages provide most requirements when using Ubuntu 22.04: + + .. code-block:: bash + + apt install git build-essential cmake libsaxonhe-java libboost-all-dev \ + pkg-config libhdf5-dev xsltproc libblitz0-dev gfortran \ + default-jdk-headless python3-dev python3-venv python3-pip + + The following dependencies are not available from the package repository, + you will need to install them yourself: + + - MDSplus: see their `GitHub repository + `__ or `home page + `__ for installation instructions. + - UDA: see their `GitHub repository `__ for more + details. + - MATLAB, which is not freely available. + + +Building and installing a single High Level Interface +----------------------------------------------------- + +This section explains how to install a Matlab High Level Interface. Please make sure you +have the :ref:`build prerequisites` installed. + + +Clone the repository +```````````````````` + +First you need to clone the repository of the High Level Interface you want to build: + +.. code-block:: bash + + # For the MATLAB HLI use: + git clone git@github.com:iterorganization/IMAS-MATLAB.git + + +Configuration +````````````` + +Once you have cloned the repository, navigate your shell to the folder and run cmake. +You can pass configuration options with ``-D OPTION=VALUE``. See below list for an +overview of configuration options. + +.. code-block:: bash + + cd al-matlab # al-fortran, al-java, al-cpp or al-python + cmake -B build -D CMAKE_INSTALL_PREFIX=$HOME/al-install -D OPTION1=VALUE1 -D OPTION2=VALUE2 [...] + +.. note:: + + CMake will automatically fetch dependencies from other IMAS-MATLAB GIT repositories + for you. You may need to provide credentials to clone the following repositories: + + - `imas-core (git@github.com:iterorganization/IMAS-Core.git) + `__ + - `al-plugins (https://github.com/iterorganization/al-plugins.git) + `__ + - `imas-data-dictionary (git@github.com:iterorganization/IMAS-Data-Dictionary.git) + `__ + + If you need to change the git repositories, for example to point to a mirror of the + repository or to use a HTTPS URL instead of the default SSH URLs, you can update the + :ref:`configuration options`. For example, add the following options to your + ``cmake`` command to download the repositories over HTTPS instead of SSH: + + .. code-block:: text + :caption: Use explicit options to download dependent repositories over HTTPS + + cmake -B build \ + -D AL_CORE_GIT_REPOSITORY=git@github.com:iterorganization/IMAS-Core.git \ + -D AL_PLUGINS_GIT_REPOSITORY=git@github.com:iterorganization/al-plugins.git \ + -D DD_GIT_REPOSITORY=git@github.com:iterorganization/IMAS-Data-Dictionary.git + + If you use CMake 3.21 or newer, you can also use the ``https`` preset: + + .. code-block:: text + :caption: Use CMake preset to set to download dependent repositories over HTTPS + + cmake -B build --preset=https + + +Choosing the compilers +'''''''''''''''''''''' + +You can instruct CMake to use compilers with the following environment variables: + +- ``CC``: C compiler, for example ``gcc`` or ``icc``. +- ``CXX``: C++ compiler, for example ``g++`` or ``icpc``. + +If you don't specify a compiler, CMake will take a default (usually from the Gnu +Compiler Collection). + +.. important:: + + These environment variables must be set before the first time you configure + ``cmake``! + + If you have an existing ``build`` folder and want to use a different compiler, you + should delete the ``build`` folder first, or use a differently named folder for the + build tree. + + +Configuration options +''''''''''''''''''''' + +For a complete list of available configuration options, please see the `IMAS Core Configuration Options `__. + + +MATLAB-specific configuration options +''''''''''''''''''''''''''''''''''''' + +The following options are specific to the MATLAB High Level Interface: + +- ``AL_CREATE_TOOLBOX``: Automatically create MATLAB toolbox package (``.mltbx``) during installation + + - **Default:** ``OFF`` + - **Type:** Boolean + - **Description:** If set to ``ON``, the MATLAB toolbox package will be created automatically when you run + ``cmake --install``. This requires MATLAB to be installed and available in your PATH. If disabled, you can + create the toolbox manually at any time using the ``matlab-toolbox`` build target. + + - **Usage Examples:** + + - Enable during configuration:: + + cmake -B build -DAL_CREATE_TOOLBOX=ON ... + cmake --install build + + - Create toolbox after installation (default):: + + cmake -B build ... + cmake --install build + cmake --build build --target matlab-toolbox + + +Build the High Level Interface +`````````````````````````````` + +Use ``make`` to build everything. You can speed things up by using parallel compiling +as shown with the ``-j`` option. Be careful with the amount of parallel processes +though: it's easy to exhaust your machine's available hardware (CPU or memory) which may +cause the build to fail. This is especially the case with the C++ High Level Interface. + +.. code-block:: bash + + # Instruct make to build "all" in the "build" folder, using at most "8" parallel + # processes: + make -C build -j8 all + +.. note:: + + By default CMake on Linux will create ``Unix Makefiles`` for actually building + everything, as assumed in this section. + + You can select different generators (such as Ninja) if you prefer, but these are not + tested. See the `CMake documentation + `__ for more + details. + + +Optional: Test the High Level Interface +``````````````````````````````````````` + +If you set either of the options ``AL_EXAMPLES`` or ``AL_TESTS`` to ``ON``, you can run +the corresponding test programs as follows: + +.. code-block:: bash + + # Use make: + make -C build test + # Directly invoke ctest + ctest --test-dir build + +This executes ``ctest`` to run all test and example programs. Note that this may take a +long time to complete. + + +Install the High Level Interface +```````````````````````````````` + +Run ``make install`` to install the high level interface in the folder that you chose in +the configuration step above. + + +Use the High Level Interface +```````````````````````````` + +After installing the HLI, you need to ensure that your code can find the installed +IMAS-MATLAB. To help you with this, a file ``al_env.sh`` is installed. You can +``source`` this file to set all required environment variables: + +.. code-block:: bash + :caption: Set environment variables (replace ```` with your install folder) + + source /bin/al_env.sh + +You may want to add this to your ``$HOME/.bashrc`` file to automatically make the Access +Layer installation available for you. + +.. note:: + + To use a ``public`` dataset, you also need to set the ``IMAS_HOME`` environment + variable. For example, on SDCC, this would be ``export IMAS_HOME=/work/imas``. + + Some programs may rely on an environment variable ``IMAS_VERSION`` to detect which + version of the data dictionary is used in the current IMAS environment. You may set + it manually with the DD version you've build the HLI with, for example: ``export + IMAS_VERSION=3.41.0``. + +Once you have set the required environment variables, you may continue :ref:`Using the +IMAS-MATLAB`. + + +Troubleshooting +``````````````` + +**Problem:** ``Target Boost::log already has an imported location`` + This problem is known to occur with the ``2020b`` toolchain on SDCC. Add the CMake + configuration option ``-D Boost_NO_BOOST_CMAKE=ON`` to work around the problem. + diff --git a/doc/doc_common/dev_guide.rst b/doc/doc_common/dev_guide.rst new file mode 100644 index 0000000..2897f98 --- /dev/null +++ b/doc/doc_common/dev_guide.rst @@ -0,0 +1,178 @@ +IMAS-MATLAB development guide +============================= + + +Repositories +------------ + +The IMAS-MATLAB consists of a number of dependencies which are developed in separate +repositories: + +- `imas-core `__: the + IMAS core repository, MDSplus model generator and Python lowlevel + bindings. +- `data-dictionary + `__: the IMAS Data + Dictionary definitions, used for generating MDSplus models and the traditional High + Level Interfaces. +- `IMAS-Core-Plugins `__: Access + Layer plugins. +- Traditional (code-generated) High Level Interfaces + + - `IMAS-MATLAB `__: + MATLAB HLI + + +The documentation on this page covers everything except the Non-generated HLIs, those +are documented in their own projects. + + +Development environment +----------------------- + +See the :ref:`build prerequisites` section for an overview of modules you need to load +when on SDCC or packages to install when using Ubuntu 22.04. + +The recommended development folder layout is to clone all :ref:`Repositories` in a single root folder (``al-dev`` in below example, but the name of that +folder is not important). + +.. code-block:: text + + al-dev/ # Feel free to name this folder however you want + ├── al-core/ + ├── al-plugins/ # Optional + ├── al-matlab/ + └── data-dictionary/ + +Then, when you configure a project for building (see :ref:`Configuration`), set the +option ``-D AL_DOWNLOAD_DEPENDENCIES=OFF``. Instead of fetching requirements from the +ITER git, CMake will now use the repositories as they are checked out in your +development folders. + + With this setup, it is your responsibility to update the repositories to their + latest versions (if needed). The ``_VERSION`` configuration options are + ignored when ``AL_DOWNLOAD_DEPENDENCIES=OFF``. + +This setup allows you to develop in multiple repositories in parallel. + + +Dependency management +--------------------- + +With all IMAS-MATLAB dependencies spread over different repositories, managing +dependencies is more complex than before. Below diagram expresses the dependencies +between the different repositories: + +.. md-mermaid:: + :name: repository-dependencies + + flowchart + core[al-core] -->|"MDSplus
models"| dd[data-dictionary] + plugins[al-plugins] --> core + hli["al-{hli}"] --> core + hli --> dd + hli --> plugins + +To manage the "correct" version of each of the dependencies, the CMake configuration +specifies which branch to use from each repository: + +- Each HLI indicates which commit to use from the ``al-core`` repository. This is + defined by the ``AL_CORE_VERSION`` cache string in the main ``CMakeLists.txt`` of + the repository. + + The default version used is ``main``, which is the last stable release of + ``al-core``. +- Inside the ``al-core`` repository, the commits to use for the + ``al-plugins`` and ``data-dictionary`` are set in `ALCommonConfig.cmake + `__. + + The default versions used are ``main`` for ``al-plugins``, and ``main`` for + ``data-dictionary``. + + +.. info:: + + CMake supports setting branch names, tags and commit hashes for the dependencies. + + +CMake +----- + +We're using CMake for the build configuration. See `the CMake documentation +`__ for more details about CMake. + +The ``FetchContent`` CMake module for making :ref:`dependencies from other repositories +` available. For more information on this module we refer to the +`FetchContent CMake documentation +`__ + + +Documentation overview +---------------------- + +The documentation is generated with Sphinx. For more information on Sphinx, see the `Sphinx docs +`__ and the `documentation of the theme +(sphinx-immaterial) that we're using +`__. + +Documentation of the HLI is inside the ``doc`` folder of the repository. This folder +contains the configuration (``conf.py``), and documentation pages (``*.rst``). +Documentation that is common to all High Level Interfaces (such as this developer guide) +is in the `common/doc_common folder in the al-core repository +`__. + + +Building the documentation +'''''''''''''''''''''''''' + +Use the option ``-D AL_HLI_DOCS`` to enable building documentation. This will create a +target ``al--docs``, e.g. ``al-matlab-docs`` that will only build the +documentation. You could also use ``-D AL_DOCS_ONLY`` to only build the documentation, +and nothing else. + +.. code-block:: console + :caption: Example: building the documentation for the Python HLI + + al-dev$ cd al-matlab + al-matlab$ # Configure cmake to only create the documentation: + al-matlab$ cmake -B build -D AL_HLI_DOCS -D AL_DOCS_ONLY + [...] + al-matlab$ make -C build al-matlab-docs + [...] + + +GitHub Actions CI/CD pipeline +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In addition to the ITER CI systems, the IMAS-MATLAB repository uses `GitHub Actions +`__ for automated building and testing. The +workflow is defined in `.github/workflows/build-and-test.yml +`__. + +This workflow: + +- **Triggers**: Automatically runs on pushes to ``main``, ``develop``, and ``feature/**`` branches, + on all pull requests to ``main`` and ``develop``, on release tags (``v*``), and can be triggered + manually via ``workflow_dispatch``. + +- **Platforms**: Currently tests on Ubuntu 24.04 with GCC 14 compiler and MATLAB R2023b. + +- **Build steps**: + + 1. Sets up Python 3.11 environment + 2. Installs MATLAB using GitHub's official MATLAB action + 3. Installs system dependencies (build-essential, cmake, pkg-config, etc.) + 4. Caches Boost and pip packages for faster builds + 5. Builds and optionally installs external dependencies (UDA, HDF5, etc.) + 6. Configures the project with CMake + 7. Compiles the code + 8. Runs tests if enabled + +- **Backends tested**: Currently enables the HDF5 backend while MDSplus and UDA + backends are disabled to simplify testing. + +- **Build artifacts**: The workflow checks that the code compiles successfully and + that all tests pass. Build logs are available in the GitHub Actions tab of the repository. + +You can monitor the status of builds and tests in the +`Actions `__ tab of the GitHub repository. diff --git a/doc/doc_common/identifiers.rst b/doc/doc_common/identifiers.rst new file mode 100644 index 0000000..8dfcf8e --- /dev/null +++ b/doc/doc_common/identifiers.rst @@ -0,0 +1,34 @@ +Identifiers +=========== + +The "identifier" structure is used to provide an enumerated list of options. + +For a complete reference of all available identifiers, see the +`IMAS Data Dictionary Identifiers `__ documentation. + +.. csv-table:: Identifier examples (from part of the ``core_sources/source`` identifier) + :header-rows: 1 + + Index, Name, Description + 2, NBI, Source from Neutral Beam Injection + 3, EC, Sources from heating at the electron cyclotron heating and current drive + 4, LH, Sources from lower hybrid heating and current drive + 5, IC, Sources from heating at the ion cyclotron range of frequencies + 6, fusion, "Sources from fusion reactions, e.g. alpha particle heating" + +Using the identifiers library +----------------------------- + +|identifiers_link_instructions| + +Below examples illustrates how to use the identifiers in your Matlab programs. + +.. literalinclude:: code_samples/identifier_example1 + :caption: Matlab example 1: obtain identifier information of coordinate identifier ``phi`` + +.. literalinclude:: code_samples/identifier_example2 + :caption: Matlab example 2: Use the identifier library to fill the ``NBI`` label in the ``core_sources`` IDS + +.. literalinclude:: code_samples/identifier_example3 + :caption: Matlab example 3: Use the identifier library to fill the type of coordinate system used in the ``equilibrium`` IDS + diff --git a/doc/doc_common/imas.rst b/doc/doc_common/imas.rst new file mode 100644 index 0000000..dc10c94 --- /dev/null +++ b/doc/doc_common/imas.rst @@ -0,0 +1,4 @@ +IMAS overview +============= + +For more information about IMAS, see the `IMAS Data Dictionary Introduction `__. diff --git a/doc/doc_common/imas_uri.rst b/doc/doc_common/imas_uri.rst new file mode 100644 index 0000000..1c7bc02 --- /dev/null +++ b/doc/doc_common/imas_uri.rst @@ -0,0 +1,43 @@ +.. _data entry uris: + +IMAS Data Entry URIs +==================== + +This documentation covers how to specify where and how IMAS data is stored using URIs. + +For comprehensive details about IMAS Data Entry URIs and the URI scheme, please refer to the +official IMAS Data Dictionary documentation: + +.. seealso:: + :title: IMAS URI and Data Entry Documentation + + - `Data-Entry and Occurrence `__ + - Explains what a data-entry is and the concept of occurrences in IMAS + + - `IMAS Access-Layer URI Scheme `__ + - Complete reference for the IMAS URI structure including: + + - Scheme, host, backend, query, and fragment components + - Backend types and their options + - Query parameters and keys + - URI examples and legacy identifiers + +When you :ref:`load or store IMAS data `, you need to provide +a data entry URI according to these specifications. + +.. note:: + + For detailed information about all backends, query keys, and backend-specific options, + please refer to the `IMAS URI Scheme documentation + `__ + in the IMAS Data Dictionary. + + + + +.. note:: + + For detailed information about all backends, query keys, and backend-specific options, + please refer to the `IMAS URI Scheme documentation + `__ + in the IMAS Data Dictionary. diff --git a/doc/doc_common/load_store_ids.rst b/doc/doc_common/load_store_ids.rst new file mode 100644 index 0000000..7f4f446 --- /dev/null +++ b/doc/doc_common/load_store_ids.rst @@ -0,0 +1,261 @@ +Loading and storing IMAS data +============================= + +IMAS data is grouped together in Data Entries. A Data Entry is a collection of +:ref:`IDSs ` and their (potentially) multiple +occurrences, which groups and stores data over multiple IDSs as a single +dataset. The Data Entry concept is used whether the collection of IDSs is stored +in a database or only exists temporarily (for example for communication in an +integrated workflow). + +Loading and storing IMAS data happens through an IMAS Database Entry. A Database +Entry tracks the information required for locating where the Data Entry is (or +will be) stored on disk. In |lang| this object is modeled as |dbentry|. + +You may :ref:`open an existing IMAS Database Entry`, which you can use for +loading data that was stored previously. Alternatively you can :ref:`create a +new IMAS Database Entry` to store IDS data. + + +Open an existing IMAS Database Entry +------------------------------------ + +To open an IMAS Database Entry, you need to know the URI indicating where the +Access Layer can find the data. IMAS URIs start with ``imas:`` and indicate +the format and the location of the stored data. You can find a detailed +description of the IMAS URI syntax on the :ref:`Data entry URIs` page. + +.. literalinclude:: code_samples/dbentry_open + :caption: |lang| example: open an existing IMAS Database Entry + +.. seealso:: + + API documentation for |dbentry_open|. + + +Loading IMAS data +----------------- + +After you open a database entry, you can request to load data from disk. + +.. contents:: + :local: + + +Load an entire IDS +'''''''''''''''''' + +With |dbentry_get| you can load ("get") an entire IDS from the database entry. + +Multiple `occurrences` of an IDS may be stored in a data entry. By default, if +you don't specify an occurrence number, occurrence 0 is loaded. By providing an +occurrence number you can load a specific occurrence. How different occurrences +are used depends on the experiment. They could, for example, correspond to: + +- different methods for computing the physical quantities of the IDS, or +- different functionalities in a workflow (e.g. initial values, prescribed + values, values at next time step, …), or +- multiple subsystems (e.g. diagnostics) of the same type in an experiment, etc. + +.. todo:: extend docs after Task 2c. is implemented (get multiple occurrences) + +.. literalinclude:: code_samples/dbentry_get + :caption: |lang| example: get an IDS from an IMAS Database Entry + +.. seealso:: + + - API documentation for |dbentry_get|. + + +Load a single `time slice` of an IDS +'''''''''''''''''''''''''''''''''''' + +Instead of loading a full IDS from disk, the Access Layer allows you to load a +specific `time slice`. This is often useful when you're not interested in the +full time evolution, but instead want data of a specific time. You can use +|dbentry_getslice| for this. + +Most of the time there are no entries at that specific time, so you also need to +indicate an `interpolation method`. This determines what values the access layer +returns when your requested time is in between available time points in the +data. Three interpolation methods currently exist: + +|PREVIOUS_INTERP| + Returns the `previous` time slice if the requested time does not exactly + exist in the original IDS. + + For example, when data exists at :math:`t=\{1, 3, 4\}`, requesting + :math:`t_r=2.1` will give you the data at :math:`t=1`. + + .. csv-table:: Edge case behaviour. :math:`\{t_i\}, i=1..N` represents the time series stored in the IDS. + :header-rows: 1 + + Case, Behaviour + :math:`t_r \lt t_1`, Return data at :math:`t_1`. + :math:`t_r = t_i` [#equal_note]_, Return data at :math:`t_i`. + + +|CLOSEST_INTERP| + Returns the `closest` time slice in the original IDS. This can also be + `after` the requested time. + + For example, when data exists at :math:`t=\{1, 3, 4\}`, requesting + :math:`t=2.1` will give you the data at :math:`t=3`. + + .. csv-table:: Edge case behaviour. :math:`\{t_i\}, i=1..N` represents the time series stored in the IDS. + :header-rows: 1 + + Case, Behaviour + :math:`t_r \lt t_1`, Return data at :math:`t_1`. + :math:`t_r = t_i` [#equal_note]_, Return data at :math:`t_i`. + :math:`t_r - t_i = t_{i+1} - t_r` [#equal_note]_, Return data at :math:`t_{i+1}`. + +.. [#equal_note] Equality for floating point numbers is tricky. For example, + :code:`3.0/7.0 + 2.0/7.0 + 2.0/7.0` is not exactly equal to :code:`1.0`. It + is therefore advised not to depend on this behaviour. + +|LINEAR_INTERP| + Returns a linear interpolation between the existing slices before and after + the requested time. + + For example, when data exists at :math:`t=\{1, 3, 4\}`, requesting + :math:`t=2.1` will give you a linear interpolation of the data at + :math:`t=1` and the data at :math:`t=3`. + + Note that the linear interpolation will be successful only if, between the + two time slices of an interpolated dynamic array of structure, the same + leaves are populated and they have the same size. Otherwise + |dbentry_getslice| will interpolate all fields with a compatible size and + leave others empty. + + .. csv-table:: Edge case behaviour. :math:`\{t_i\}, i=1..N` represents the time series stored in the IDS. + :header-rows: 1 + + Case, Behaviour + :math:`t_r \lt t_1`, Return data at :math:`t_1`. + :math:`t_r \gt t_N`, Return data at :math:`t_N`. + +.. literalinclude:: code_samples/dbentry_getslice + :caption: |lang| example: get a time slice from an IMAS Database Entry + +.. note:: + + The access layer assumes that all time arrays are stored in increasing + order. |dbentry_getslice| may return unexpected results if your data does + not adhere to this assumption. + +.. seealso:: + + API documentation for |dbentry_getslice|. + + +.. include:: partial_get + + +Create a new IMAS Database Entry +-------------------------------- + +To create a new IMAS Database Entry, you need to provide the URI to indicate the +format and the location where you want to store the data. You can find a +detailed description of the IMAS URI syntax and the options available on the +:ref:`Data entry URIs` page. + +.. caution:: + + This function erases any existing database entry on the specified URI! + + +.. literalinclude:: code_samples/dbentry_create + :caption: |lang| example: create a new IMAS Database Entry + +.. seealso:: + + API documentation for |dbentry_create|. + + +Store IMAS data +--------------- + +After you have created an IMAS Database Entry, you can use it for storing IDS +data. There are two ways to do this: + +.. contents:: + :local: + + +Store an entire IDS +''''''''''''''''''' + +With |dbentry_put| you can store ("put") an entire IDS in a database entry. +First you need to have an IDS with data: you can create a new one or :ref:`load +an IDS ` which you modify. See :ref:`Use Interface Data +Structures` for more information on using and manipulating IDSs. + +.. caution:: + + This function erases the existing IDS in the data entry if any was already + stored previously. + +Multiple `occurrences` of an IDS may be stored in a data entry. By default, if +you don't specify an occurrence number, the IDS is stored as occurrence 0. By +providing an occurrence number you can store the IDS as a specific occurrence. + +.. note:: + + The MDS+ backend has a limitation on the number of occurrences of a given + IDS. This number is indicated in the |DD| documentation in the "Max. + occurrence number" column of the list of IDSs. This limitation doesn't apply + to other backends. + +.. literalinclude:: code_samples/dbentry_put + :caption: |lang| example: put an IDS to an IMAS Database Entry + +.. seealso:: + + API documentation for |dbentry_put|. + + +Append a time slice to an already-stored IDS +'''''''''''''''''''''''''''''''''''''''''''' + +With |dbentry_put_slice| you can append a time slice to an existing database +entry. This is useful when you generate data inside a time loop (for example in +simulations, or when taking measurements of an experiment). + +It means you can put a time slice with every iteration of your loop such that +you don't have to keep track of the complete time evolution in memory. Instead, +the Access Layer will keep appending the data to the Database Entry in the +storage backend. + +.. note:: + + Although being put progressively time slice by time slice, the final IDS + must be compliant with the data dictionary. A typical error when + constructing IDS variables time slice by time slice is to change the size of + the IDS fields during the time loop, which is not allowed but for the + children of an array of structure which has time as its coordinate. + +.. literalinclude:: code_samples/dbentry_put_slice + :caption: |lang| example: iteratively put time slices to an IMAS Database Entry + +.. seealso:: + + API documentation for |dbentry_put_slice|. + + +Listing all occurrences of an IDS from a backend +'''''''''''''''''''''''''''''''''''''''''''''''' + +With |list_all_occurrences| you can List all non-empty occurrences of an IDS +using its name in the dataset, and optionnally return the content of a +descriptive node path. + +.. note:: + + The MDS+ backend is storing IDS occurrences infos (pulse file metadata) + for AL version > 5.0.0. Pulse files created with AL version <= 5.0.0. + do not provide these informations (an exception will occur for such + pulse files when calling |list_all_occurrences|). + +.. literalinclude:: code_samples/dbentry_list_all_occurrences + :caption: |lang| example: listing all occurrences of a magnetics IDS from an IMAS Database Entry diff --git a/doc/doc_common/requirements.txt b/doc/doc_common/requirements.txt new file mode 100644 index 0000000..c738935 --- /dev/null +++ b/doc/doc_common/requirements.txt @@ -0,0 +1,11 @@ +# Sphinx and theme +sphinx >= 6.0, < 7.0 +sphinx_immaterial >= 0.11.4, < 0.12 + +six # un-listed dependency of sphinx-fortran + +# Matlab domain +sphinxcontrib-matlabdomain + +# Requirements for the HLI files we import for the Python docs +numpy diff --git a/doc/doc_common/static/.keep b/doc/doc_common/static/.keep new file mode 100644 index 0000000..e69de29 diff --git a/doc/doc_common/use_ids.rst b/doc/doc_common/use_ids.rst new file mode 100644 index 0000000..6f10b4b --- /dev/null +++ b/doc/doc_common/use_ids.rst @@ -0,0 +1,339 @@ +Use Interface Data Structures +============================= + +The Interface Data Structures (IDSs) are the main way to interact with IMAS +data. An IDS is a tree-like structure with one root element (the IDS) and +several branches with data at the leave nodes. + +Many types of IDSs exist: check out the documentation of the |DD| for a complete +overview. + + +Creating IDSs +------------- + +IDSs can be created in multiple ways: + +1. :ref:`Load an IDS from disk ` +2. :ref:`Create an empty IDS` +3. :ref:`Create a copy of an IDS` + + +Create an empty IDS +''''''''''''''''''' + +You can create an empty instance of an IDS by |create_ids_text| creates an empty +``core_profiles`` IDS. This initializes all items in the IDS to their +:ref:`default values`. + +.. literalinclude:: code_samples/ids_create + :caption: |lang| example: create an empty IDS + + +Create a copy of an IDS +''''''''''''''''''''''' + +You can create a copy of another IDS |copy_ids|. + +.. literalinclude:: code_samples/ids_copy + :caption: |lang| example: create a copy of an IDS + + +Deallocate an IDS +''''''''''''''''' + +If you no longer need an IDS, you can deallocate it so it releases the (memory) +resources in use by the data. |deallocate_ids_text| + +.. literalinclude:: code_samples/ids_deallocate + :caption: |lang| example: deallocate an IDS + + +Mandatory and recommended IDS attributes +---------------------------------------- + +Some attributes in an IDS are mandatory or recommended to always fill. Below +list provides a short overview: + +.. todo:: + + Link to DD documentation + +1. ``ids_properties/homogeneous_time`` `[mandatory]`: see :ref:`Time coordinates + and time handling`. +2. ``ids_properties/comment`` `[recommended]`: a comment describing the content + of this IDS. +3. ``ids_properties/provider`` `[recommended]`: name of the person in charge of + producing this data. +4. ``ids_properties/creation_date`` `[recommended]`: date at which this data has + been produced, recommended to use the `ISO 8601 + `_ ``YYYY-MM-DD`` format. + +.. note:: + + ``ids_properties/version_put`` is filled by the access layer when you + :ref:`put an IDS `. + + +Understanding the IDS structure +------------------------------- + +An IDS is a `tree structure +`_. You can think of it +similar as a directory structure with files: the IDS is the root "directory", +and inside it you can find "subdirectories" and "files" with data. + +We will use the general Computer Science terminology for tree structures and +call these "files" and "directories" of our IDSs `nodes`. IDSs can have a +limited number of different types of nodes: + +1. :ref:`Structure`: think of these as the directories of your file + system. Structures contain one or more child nodes (files and + subdirectories). Child nodes can be of any node type again. + +2. :ref:`Array of structures`: this is an array of structures (see + previous point). + +3. :ref:`Data`: this is a data element. Like files on your file system + these nodes contain the actual data stored in the IDS. + + +Structure +''''''''' + +Structure nodes in an IDS are a container for other nodes. In |lang| they are +implemented as a |structures_type|. You can address child nodes as +|structures_child_attribute|, see the code sample below. + +.. literalinclude:: code_samples/ids_structures_node + :caption: |lang| example: address the child node of an IDS structure node + + +Array of structures +''''''''''''''''''' + +Array of structure nodes in an IDS are one-dimensional arrays, containing structure +nodes. In |lang| they are implemented as a |aos_type|. The default value (for +example, when creating a new IDS) for these nodes is |aos_default|. + +.. literalinclude:: code_samples/ids_array_of_structures_node + :caption: |lang| example: address the child node of an IDS arrays of structure node + + +Resizing an array of structures +``````````````````````````````` + +You can resize an array of structures with |aos_resize_meth|. After calling +this, the array of structures will have ``n`` elements. + +.. caution:: + + Resizing an array of structures with |aos_resize_meth| will clear all data + inside the array of structure! Use |aos_resize_keep_meth| to keep existing + data. + +.. literalinclude:: code_samples/ids_array_of_structures_resize + :caption: |lang| example: resizing an array of structures + + +Data +'''' + +Data nodes in an IDS contain numerical or textual data. The data type and +dimensions of a node are defined in the |DD|. + +.. literalinclude:: code_samples/ids_data_node + :caption: |lang| example: get the data contained in a data node of an IDS + + +Data types +`````````` + +The following data types exist: + +- Textual data (|str_type|) +- Whole numbers (|int_type|) +- Floating point numbers (|double_type|) +- Complex floating point numbers (|complex_type|) + +Data nodes can be 0-dimensional, which means that the node accepts a single +value of the specified type. Multi-dimensional data nodes also exist: + +- Textual data: at most 1 dimension (|str_1d_type|) +- Whole numbers: 1-3 dimensions (|int_nd_type|) +- Floating point numbers: 1-6 dimensions (|double_nd_type|) +- Complex floating point numbers: 1-6 dimensions (|complex_nd_type|) + + +.. _Empty fields: + +Default values +`````````````` + +The default values for data fields (for example when creating an empty IDS) are +indicated in the following table. |isFieldValid| + +.. csv-table:: + :header-rows: 1 + :stub-columns: 1 + + , 0D, 1+ dimension + "Textual + + data", |str_default|, |str_1D_default| + "Whole + + numbers", |int_default|, |ND_default| + "Floating + + point + + numbers", |double_default|, |ND_default| + "Complex + + numbers", |complex_default|, |ND_default| + + +Time coordinates and time handling +'''''''''''''''''''''''''''''''''' + +Some quantities (and array of structures) are time dependent. In the |DD| +documentation this is indicated by a coordinate that refers to a time quantity. + +This time-dependent coordinate is treated specially in the access layer, and it +depends on the value of ``ids_properties/homogeneous_time``. There are three +valid values for this property: + +.. todo:: + + Add links to DD. + +1. |tm_heterogeneous| (=0): time-dependent quantities in the IDS may have + different time coordinates. The time coordinates are stored as indicated by + the path in the documentation. This is known as `heterogeneous time`. +2. |tm_homogeneous| (=1): All time-dependent quantities in this IDS use the same + time coordinate. This is known as `homogeneous time`. This time coordinate is + located in the root of the IDS, for example ``core_profiles/time``. The paths + time paths indicated in the documentation are unused in this case. +3. |tm_independent| (=2): The IDS stores no time-dependent data. + + + +IDS validation +-------------- + +The IDSs you fill should be consistent. To help you in validating that, the +Access Layer provides a validation method (|ids_validate|) that executes the +following checks. + +.. contents:: Validation checks + :local: + :depth: 1 + +If you call this method and your IDS fails validation, the Access Layer +|validate_error| explaining the problem. See the following example: + +.. literalinclude:: code_samples/ids_validate + :caption: |lang| example: call IDS validation + +The Access Layer automatically validates an IDS every time you do a +`put` or `put_slice`. To disable this feature, you must set the environment +variable ``IMAS_AL_DISABLE_VALIDATE`` to ``1``. + +.. seealso:: + + API documentation: |ids_validate| + + +Validate the time mode +'''''''''''''''''''''' + +The time mode of an IDS is stored in ``ids_properties.homogeneous_time``. This +property must be filled with a valid time mode (|tm_homogeneous|, +|tm_heterogeneous| or |tm_independent|). When the time +mode is |tm_independent|, all time-dependent quantities must be empty. + + +Validate coordinates +'''''''''''''''''''' + +If a quantity in your IDS has coordinates, then these coordinates must be filled. The +size of your data must match the size of the coordinates: + +.. todo:: link to DD docs + +1. Some dimensions must have a fixed size. This is indicated by the Data Dictionary + as, for example, ``1...3``. + + For example, in the ``magnetics`` IDS, ``b_field_pol_probe(i1)/bandwidth_3db`` has + ``1...2`` as coordinate 1. This means that, if you fill this data field, the first + (and only) dimension of this field must be of size 2. + +2. If the coordinate is another quantity in the IDS, then that coordinate must be + filled and have the same size as your data. + + For example, in the ``pf_active`` IDS, ``coil(i1)/current_limit_max`` is a + two-dimensional quantity with coordinates ``coil(i1)/b_field_max`` and + ``coil(i1)/temperature``. This means that, if you fill this data field, their + coordinate fields must be filled as well. The first dimension of + ``current_limit_max`` must have the same size as ``b_field_max`` and the second + dimension the same size as ``temperature``. + + Time coordinates are handled depending on the value of + ``ids_properties/homogeneous_time``: + + - When using |tm_homogeneous|, all time coordinates look at the root + ``time`` node of the IDS. + - When using |tm_heterogeneous|, all time coordinates look at the time + path specified as coordinate by the Data Dictionary. + + For dynamic array of structures, the time coordinates is a ``FLT_0D`` inside the + AoS (see, for example, ``profiles_1d`` in the ``core_profiles`` IDS). In such + cases the time node must be set to something different than ``EMPTY_FLOAT``. + This is the only case in which values of the coordinates are verified, in all + other cases only the sizes of coordinates are validated. + + .. rubric:: Alternative coordinates + + Version 4 of the Data Dictionary introduces alternative coordinates. An + example of this can be found in the ``core_profiles`` IDS in + ``profiles_1d(itime)/grid/rho_tor_norm``. Alternatives for this coordinate + are: + + - ``profiles_1d(itime)/grid/rho_tor`` + - ``profiles_1d(itime)/grid/psi`` + - ``profiles_1d(itime)/grid/volume`` + - ``profiles_1d(itime)/grid/area`` + - ``profiles_1d(itime)/grid/surface`` + - ``profiles_1d(itime)/grid/rho_pol_norm`` + + Multiple alternative coordinates may be filled (for example, an IDS might + fill both the normalized and non-normalized toroidal flux coordinate). In + that case, the size must be the same. + + When a quantity refers to this set of alternatives (for example + ``profiles_1d(itime)/electrons/temperature``), at least one of the + alternative coordinates must be set and its size match the size of the + quantity. + +3. The Data Dictionary can indicate exclusive alternative coordinates. See for + example the ``distribution(i1)/profiles_2d(itime)/density(:,:)`` quantity in the + ``distributions`` IDS, which has as first coordinate + ``distribution(i1)/profiles_2d(itime)/grid/r OR + distribution(i1)/profiles_2d(itime)/grid/rho_tor_norm``. This means that + either ``r`` or ``rho_tor_norm`` can be used as coordinate. + + Validation works the same as explained in the previous point, except that + exactly one of the alternative coordinate must be filled. Its size must, of + course, still match the size of the data in the specified dimension. + +4. Some quantites indicate a coordinate must be the same size as another quantity + through the property ``coordinateX_same_as``. In this case, the other quantity is + not a coordinate, but their data is related and must be of the same size. + + An example can be found in the ``edge_profiles`` IDS, quantity + ``ggd(itime)/neutral(i1)/velocity(i2)/diamagnetic``. This is a two-dimensional field + for which the first coordinate must be the same as + ``ggd(itime)/neutral(i1)/velocity(i2)/radial``. When the diamagnetic velocity + component is filled, the radial component must be filled as well, and have a + matching size. diff --git a/doc/doc_common/using_al.rst b/doc/doc_common/using_al.rst new file mode 100644 index 0000000..143f841 --- /dev/null +++ b/doc/doc_common/using_al.rst @@ -0,0 +1,21 @@ +Using the IMAS-MATLAB +====================== + +Making the IMAS-MATLAB available for use +----------------------------------------- + +When you're working with a local installation (see :ref:`Building and +installing the IMAS-MATLAB`), you can source the installed environment file: + +.. code-block:: bash + :caption: Set environment variables (replace ```` with the folder of your local install) + + source /bin/al_env.sh + + +.. + HLI-specific documentation should include the following items: + + - Code samples for loading the library (import, include, use, etc.) + - Example program printing the IMAS-MATLAB version used + - Instructions for (compiling if relevant) and running the example program diff --git a/doc/getting_started.rst b/doc/getting_started.rst new file mode 100644 index 0000000..924501a --- /dev/null +++ b/doc/getting_started.rst @@ -0,0 +1,173 @@ +Getting Started with IMAS-MATLAB +================================= + +Welcome! This 5-minute guide will get you up and running with the IMAS-MATLAB. + +**What is IMAS-MATLAB?** + +IMAS-MATLAB is the IMAS data access library (formerly known as the Access Layer) for Matlab users/developers. + + +Load the IMAS-MATLAB Module +------------------------------- + +On the ITER SDCC (supercomputing cluster), make the Access Layer available: + +.. code-block:: bash + + module load IMAS-MATLAB + +To see available versions: + +.. code-block:: bash + + module avail IMAS-MATLAB + +If you have a local installation, source the environment file instead: + +.. code-block:: bash + + source /bin/al_env.sh + + +Open MATLAB and Connect to Data +------------------------------------------- + +Start MATLAB and open a database entry using an IMAS URI. A URI tells the Access Layer +where your data is stored and in what format. + +.. code-block:: matlab + + % Open a database entry + uri = 'imas:hdf5?path=/path/to/data'; + ctx = imas_open(uri, 40); + + if ctx < 0 + error('Unable to open database'); + end + +**What's an IMAS URI?** + +URIs follow the format: ``imas:backend?query_options`` + +For example: +- ``imas:hdf5?path=/path/to/data`` – Read from HDF5 files +- ``imas:mdsplus?path=./test_db`` – Read from MDSplus +- ``imas:uda?backend=...`` – Read from UDA backend + +Learn more: :ref:`Data entry URIs` + + +Load and Display Dat +------------------------ + +Fetch an IDS from your database entry: + +.. code-block:: matlab + + % Load the magnetics IDS (occurrence 0) + magnetics = ids_get(ctx, 'magnetics'); + + % Explore the data + disp(magnetics.ids_properties); % Metadata + disp(magnetics.time); % Time points + disp(magnetics.flux_loop{1}.flux.data); % Access nested data + + +Modify and Store Data +------------------------ + +You can create new data, modify existing data, and store it back: + +.. code-block:: matlab + + % Create a new IDS or modify existing one + equilibrium = ids_init('equilibrium'); + equilibrium.time = [0, 1, 2, 3]; + equilibrium.q_profile.value.data = [1, 2, 3, 4]; + + % Store it to the database + ids_put(ctx, 'equilibrium', equilibrium); + + +Clean Up +----------- + +Always close the database entry when you're done: + +.. code-block:: matlab + + imas_close(ctx); + + +Key Functions Reference +----------------------- + ++====================================+=============================================+ +| Function | Purpose | ++====================================+=============================================+ +| ``imas_open(uri, version)`` | Open a database entry at the given URI | ++------------------------------------+---------------------------------------------+ +| ``imas_close(ctx)`` | Close the database entry | ++------------------------------------+---------------------------------------------+ +| ``ids_get(ctx, ids_name)`` | Load an entire IDS | ++------------------------------------+---------------------------------------------+ +| ``ids_put(ctx, ids_name, ids_obj)``| Store an IDS to disk | ++------------------------------------+---------------------------------------------+ +| ``ids_init(ids_name)`` | Create and initialize a new IDS | ++------------------------------------+---------------------------------------------+ +| ``ids_get_slice(ctx, ids_name, time)`` | Load a specific time slice | ++====================================+=============================================+ + + +Common Use Cases +---------------- + +**Load data and extract a single time slice:** + +.. code-block:: matlab + + % Use CLOSEST interpolation + data = ids_get_slice(ctx, 'equilibrium', 2.5, 'CLOSEST'); + + +**Check if data exists:** + +.. code-block:: matlab + + if ids_isdefined(magnetics.flux_loop{1}.flux) + disp('Flux data is defined'); + end + + +**Access MATLAB examples:** + +The repository contains several example scripts in the ``examples/`` directory: +- ``test_get.m`` Load and display data +- ``test_put.m`` Store new data +- ``test_get_sample_magnetics.m`` Practical magnetics data example + + +Next Steps +---------- + +- **Read more about IDSs**: :doc:`Use Interface Data Structures ` +- **Learn advanced loading/storing**: :doc:`Loading and storing IMAS data ` +- **Understand data storage**: :ref:`Data entry URIs` +- **Check the full API documentation**: See your installed IMATLAB help or visit the + `IMAS-MATLAB `__ + + +Common Issues +------------- + +**"Unable to open pulse" error:** +- Check that your URI is correct and the data path exists + +**IDS not found:** +- Verify the data entry contains this IDS +- Use ``ids_isdefined()`` to check existence first + +**Need help?** +- Check the :doc:`Using the Access Layer ` guide +- Consult the `IMAS-MATLAB `__ \ No newline at end of file diff --git a/doc/imas_uri.rst b/doc/imas_uri.rst index 4c48dce..d5a21ee 100644 --- a/doc/imas_uri.rst +++ b/doc/imas_uri.rst @@ -1 +1,7 @@ +.. _ascii backend: +.. _hdf5 backend: +.. _mdsplus backend: +.. _memory backend: +.. _uda backend: + .. include:: ./doc_common/imas_uri.rst diff --git a/doc/index.rst b/doc/index.rst index 3a336fa..bf55cd1 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -1,46 +1,23 @@ Access Layer - MATLAB High Level Interface ========================================== -These pages document the MATLAB High Level Interface to the :ref:`IMAS ` Access Layer. All code samples and API documentation will show how to use the access layer in the `MATLAB `_ programming language. -.. seealso:: - :title: Access Layer documentation for different programming languages - :collapsible: - - Check out the following pages if you want to use the Access Layer with a - different programming language: - - - `Python High Level Interface - `_ - - `Fortran High Level Interface - `_ - - `C++ High Level Interface - `_ - - `Java High Level Interface - `_ - -.. todo:: - - Replace IDM links with sharepoint links to the respective HLIs - -.. todolist:: - .. toctree:: :maxdepth: 2 :caption: Contents: - imas + getting_started + imas using_al load_store_ids use_ids imas_uri identifiers - conf - plugins .. toctree:: :caption: Examples @@ -59,6 +36,7 @@ programming language. :caption: AL Developer documentation building_installing + matlab_on_windows dev_guide Indices and tables diff --git a/doc/matlab_on_windows.rst b/doc/matlab_on_windows.rst new file mode 100644 index 0000000..2c24765 --- /dev/null +++ b/doc/matlab_on_windows.rst @@ -0,0 +1,328 @@ +========================================== +Windows Installation Guide +========================================== + +Known Limitations +================= + +.. warning:: + + **IDS validation is not supported on Windows.** + + The ``ids_validate`` MEX function and the automatic validation step that + normally runs inside ``ids_put`` / ``ids_put_slice`` are both **disabled** on + Windows builds. Calling ``ids_validate()`` directly will result in an error. + + This is a known limitation tracked in + `GitHub issue #3 `_. + + As a consequence: + + - The ``ids_validate`` MEX target is **not compiled** on Windows. + - ``ids_put`` and ``ids_put_slice`` skip the validation check on Windows and + write data directly without schema validation. + - Users are responsible for ensuring that the IDS data structure is correct + before calling ``ids_put`` or ``ids_put_slice`` on Windows. + + +Windows Prerequisites +===================== + +1. **Visual Studio 2022** with: + - Desktop Development with C++ + - C++ Make Tools for Windows + +2. **MATLAB R2025b** (or compatible version) + +3. **CMake** (included with Visual Studio) + +4. **Python 3.8+** with venv module (verify with ``python --version``) + + + +Setup vcpkg +=========== + +.. code-block:: batch + + git clone https://github.com/microsoft/vcpkg.git + cd vcpkg + bootstrap-vcpkg.bat + + +Configure PowerShell Environment +================================ + +Run these commands in PowerShell before building: + +.. code-block:: powershell + + $env:PATH += ";C:\Program Files\Microsoft Visual Studio\2022\Community\Common7\IDE\CommonExtensions\Microsoft\CMake\CMake\bin\" + $env:PATH += ";C:\Program Files\Microsoft Visual Studio\2022\Community\VC\Tools\MSVC\14.44.35207\bin\HostX86\x86" + $env:PATH += ";" + +if using already built IMAS-Core: + +.. code-block:: powershell + + $env:AL_COMMON_PATH = "\share\common" + +Build Configuration +=================== + +**Debug Build:** + +.. code-block:: bash + + # with -DAL_DOWNLOAD_DEPENDENCIES=ON + cmake -Bbuild -S . -DVCPKG=ON -DAL_PYTHON_BINDINGS=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=ON -DAL_EXAMPLES=OFF -DAL_TESTS=OFF -DAL_PLUGINS=OFF -DAL_HLI_DOCS=OFF -DAL_DOCS_ONLY=OFF -DDD_VERSION="3.39.0" -DAL_BACKEND_UDA=OFF -DAL_BACKEND_UDAFAT=OFF -DAL_BACKEND_MDSPLUS=OFF + # with -DAL_DOWNLOAD_DEPENDENCIES=OFF + cmake -Bbuild -S . -DVCPKG=ON -DAL_PYTHON_BINDINGS=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=OFF -DAL_EXAMPLES=OFF -DAL_TESTS=OFF -DAL_PLUGINS=OFF -DAL_HLI_DOCS=OFF -DAL_DOCS_ONLY=OFF -DDD_VERSION="3.39.0" -DAL_BACKEND_UDA=OFF -DAL_BACKEND_UDAFAT=OFF -DAL_BACKEND_MDSPLUS=OFF -DCMAKE_PREFIX_PATH="" + +**Release Build:** + +.. code-block:: bash + + # with -DAL_DOWNLOAD_DEPENDENCIES=ON + cmake -Bbuild -S . -DCMAKE_BUILD_TYPE=Release -DVCPKG=ON -DAL_PYTHON_BINDINGS=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=ON -DAL_EXAMPLES=OFF -DAL_TESTS=OFF -DAL_PLUGINS=OFF -DAL_HLI_DOCS=OFF -DAL_DOCS_ONLY=OFF -DDD_VERSION="3.39.0" -DAL_BACKEND_UDA=OFF -DAL_BACKEND_UDAFAT=OFF -DAL_BACKEND_MDSPLUS=OFF + # with -DAL_DOWNLOAD_DEPENDENCIES=OFF + cmake -Bbuild -S . -DCMAKE_BUILD_TYPE=Release -DVCPKG=ON -DAL_PYTHON_BINDINGS=ON -DCMAKE_INSTALL_PREFIX="" -DCMAKE_TOOLCHAIN_FILE="/scripts/buildsystems/vcpkg.cmake" -DAL_DOWNLOAD_DEPENDENCIES=OFF -DAL_EXAMPLES=OFF -DAL_TESTS=OFF -DAL_PLUGINS=OFF -DAL_HLI_DOCS=OFF -DAL_DOCS_ONLY=OFF -DDD_VERSION="3.39.0" -DAL_BACKEND_UDA=OFF -DAL_BACKEND_UDAFAT=OFF -DAL_BACKEND_MDSPLUS=OFF -DCMAKE_PREFIX_PATH="" + +Build and Install +================= + +.. code-block:: bash + + cmake --build build --config Release --target install + + +Using in MATLAB +=============== + +Example MATLAB script to access IMAS data: + +.. code-block:: matlab + + % Set PATH to find all required DLLs + setenv('PATH', [getenv('PATH') ... + ';\build\Release' ... + ';\matlab' ... + ';\build\_deps\al-core-build\Release' ... + ';\build\vcpkg_installed\x64-windows\bin']); + + % Add MATLAB helper functions and MEX files + addpath('\matlab'); + addpath('\build\Release'); + + % Open IMAS pulse file + uri = 'imas:hdf5?path='; + ctx = imas_open(uri, 40); + if ctx < 0 + error('Unable to open pulse'); + end + + % Get IDS structure + try + m = ids_get(ctx, 'waves'); + disp('Success!') + catch ME + disp(['Error: ' ME.message]) + end + + % Display results + disp('=== IDS Properties ==='); + disp(['Comment: ' m.ids_properties.comment]); + disp(['Data Dictionary: ' m.ids_properties.version_put.data_dictionary]); + + +Creating the MATLAB Toolbox Package +==================================== + +After a successful build and install, a self-contained ``.mltbx`` toolbox file can be +created using the ``create_matlab_toolbox`` script included in the install directory. + +.. note:: + + The ``/toolbox/`` folder must exist (produced by + ``cmake --build build --config Release --target install``) before packaging. + +Run the following from inside MATLAB: + +.. code-block:: matlab + + create_matlab_toolbox('', '', '', '') + +For example: + +.. code-block:: matlab + + create_matlab_toolbox('C:\imas_matlab_release', ... + 'C:\imas_matlab_release', ... + '5.5.0', '4.1.1') + +This produces a file such as:: + + C:\imas_matlab_release\IMAS-MATLAB_5.5.0-DD-4.1.1-win64.mltbx + + +Installing the MATLAB Toolbox +============================== + +**Option A – Double-click** the ``.mltbx`` file in Windows File Explorer. +MATLAB opens and installs it automatically via the Add-On Manager. + +**Option B – From inside MATLAB:** + +.. code-block:: matlab + + matlab.addons.install('C:\imas_matlab_release\IMAS-MATLAB_5.5.0-DD-4.1.1-win64.mltbx') + +The toolbox is installed to:: + + C:\Users\\AppData\Roaming\MathWorks\MATLAB Add-Ons\Toolboxes\IMAS-MATLAB\ + + +Using the Installed Toolbox +============================ + +Initializing +------------ + +On Windows, run this at the start of every MATLAB session to register the DLL +folder on the system ``PATH`` so that MEX files can locate their dependencies: + +.. code-block:: matlab + + imas_toolbox_startup() + +Expected output:: + + IMAS-MATLAB/5.5.0-DD-4.1.1-win64 loaded successfully + Access Layer: 5.5.0+64-... | Data Dictionary: 4.1.1 + +To run this automatically every session, add it to your MATLAB ``startup.m``: + +.. code-block:: matlab + + % Open startup.m + edit(fullfile(userpath, 'startup.m')) + + % Add the following line and save: + imas_toolbox_startup() + +Verifying the installation +--------------------------- + +.. code-block:: matlab + + v = imas_versions() + +Expected output:: + + v = + struct with fields: + al_version: '5.5.0+64-...' + hli_version: '5.5.0+64-...' + dd_version: '4.1.1' + +Writing data +------------ + +.. code-block:: matlab + + % Open / create a database entry (mode 43 = FORCE_CREATE) + ctx = imas_open('imas:hdf5?path=C:/mydata', 43); + if ctx < 0, error('Unable to open database'); end + + m = ids_gen('magnetics'); + m.ids_properties.homogeneous_time = 1; + m.time = [1.0; 2.0; 3.0]; + m.flux_loop{1}.flux.data = [10.0; 20.0; 30.0]; + + ids_put(ctx, 'magnetics', m); % writes entire IDS (overwrites existing) + imas_close(ctx); + +Reading data +------------ + +.. code-block:: matlab + + % Open existing database (mode 40 = OPEN_PULSE) + ctx = imas_open('imas:hdf5?path=C:/mydata', 40); + if ctx < 0, error('Unable to open database'); end + + m = ids_get(ctx, 'magnetics'); + disp(m.time) + disp(m.flux_loop{1}.flux.data) + + imas_close(ctx); + +Appending time slices +---------------------- + +.. code-block:: matlab + + ctx = imas_open('imas:hdf5?path=C:/mydata', 43); + + m = ids_gen('magnetics'); + m.ids_properties.homogeneous_time = 1; + + % Time slices must be appended in strictly increasing order + m.time = 1.0; m.flux_loop{1}.flux.data = 10.0; + ids_put_slice(ctx, 'magnetics', m); + + m.time = 2.0; m.flux_loop{1}.flux.data = 20.0; + ids_put_slice(ctx, 'magnetics', m); + + imas_close(ctx); + +``imas_open`` mode values +-------------------------- + ++----+----------------------+----------------------------------------------+ +|Mode| Constant | Description | ++====+======================+==============================================+ +| 40 | ``OPEN_PULSE`` | Open existing entry (error if not found) | ++----+----------------------+----------------------------------------------+ +| 41 | ``FORCE_OPEN_PULSE`` | Open entry, create if it does not exist | ++----+----------------------+----------------------------------------------+ +| 42 | ``CREATE_PULSE`` | Create new entry (error if already exists) | ++----+----------------------+----------------------------------------------+ +| 43 | ``FORCE_CREATE`` | Create entry, overwrite if it already exists | ++----+----------------------+----------------------------------------------+ + +Uninstalling +------------ + +In MATLAB: **Home → Add-Ons → Manage Add-Ons** → find *IMAS-MATLAB* → **Uninstall**. + +Or from the command line: + +.. code-block:: matlab + + matlab.addons.uninstall('IMAS-MATLAB') + + +Run MATLAB Tests +================ + +.. code-block:: bash + + matlab -batch "test_code" + + +Windows Troubleshooting +======================= + +- Ensure all PowerShell environment variables are set before running CMake +- Verify Visual Studio C++ build tools are installed +- Check that all dependencies are accessible at the specified network paths +- Confirm Python installation with ``python --version`` +- If MEX files fail with *"The specified module could not be found"*, ensure + ``imas_toolbox_startup()`` has been called and the vcpkg DLLs are present in + the toolbox folder (``hdf5.dll``, ``pthreadVC3.dll``, + ``boost_filesystem-vc143-mt-x64-1_90.dll``, ``dl.dll``) +- **IDS validation is not available on Windows** — ``ids_validate`` is not + compiled and validation is skipped inside ``ids_put`` / ``ids_put_slice``. + See `GitHub issue #3 `_ + for status and updates. diff --git a/doc/plugins.rst b/doc/plugins.rst deleted file mode 100644 index e199c5a..0000000 --- a/doc/plugins.rst +++ /dev/null @@ -1,9 +0,0 @@ -.. include:: ./doc_common/plugins.rst - - -.. toctree:: - :caption: Contents - :maxdepth: 2 - - plugins_architecture - plugins_examples diff --git a/doc/plugins_architecture.rst b/doc/plugins_architecture.rst deleted file mode 100644 index a0b402a..0000000 --- a/doc/plugins_architecture.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ./doc_common/plugins_architecture.rst diff --git a/doc/plugins_examples.rst b/doc/plugins_examples.rst deleted file mode 100644 index 22e6081..0000000 --- a/doc/plugins_examples.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ./doc_common/plugins_examples.rst diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 48cd7ba..2fc56d2 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -11,18 +11,24 @@ if( AL_BACKEND_MDSPLUS ) get_target_property( MDSPLUS_MODEL_DIR al-mdsplus-model BINARY_DIR ) endif() -set( M_FILES +# Examples that use MDSplus backend +set( M_FILES_MDSPLUS test_amns_data_validate test_distributions_validate test_get test_isdefined test_put - test_get_sample_magnetics test_serialize test_waves_validate ) -foreach( M_FILE ${M_FILES} ) +# Examples that use HDF5 backend +set( M_FILES_HDF5 + test_get_sample_magnetics +) + +# Add tests for MDSplus examples +foreach( M_FILE ${M_FILES_MDSPLUS} ) add_test( NAME example-mex-${M_FILE} COMMAND ${Matlab_MAIN_PROGRAM} -nodisplay @@ -33,9 +39,39 @@ foreach( M_FILE ${M_FILES} ) set( DISABLED OFF ) if( NOT AL_BACKEND_MDSPLUS ) set( DISABLED ON ) + message(STATUS "Disabling example ${M_FILE}: requires MDSplus backend") + endif() + # Note: added ids_path for backward compatibility with IMAS-Core<5.6 + set_al_example_properties( example-mex-${M_FILE} ${DISABLED} OFF "MDSPLUS_MODELS_PATH=${MDSPLUS_MODEL_DIR};ids_path=${MDSPLUS_MODEL_DIR}" ) + + if( AL_PLUGINS ) + add_test( NAME example-mex-${M_FILE}-with-plugins + COMMAND ${Matlab_MAIN_PROGRAM} + -nodisplay + -batch "addpath ${LIB_FOLDER} ${M_FOLDER}; ${M_FILE}; exit()" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + ) + # Note: added ids_path for backward compatibility with IMAS-Core<5.6 + set_al_example_properties( example-mex-${M_FILE}-with-plugins ${DISABLED} ON "MDSPLUS_MODELS_PATH=${MDSPLUS_MODEL_DIR};ids_path=${MDSPLUS_MODEL_DIR}" ) endif() +endforeach() + +# Add tests for HDF5 examples +foreach( M_FILE ${M_FILES_HDF5} ) + add_test( NAME example-mex-${M_FILE} + COMMAND ${Matlab_MAIN_PROGRAM} + -nodisplay + -batch "addpath ${LIB_FOLDER} ${M_FOLDER}; ${M_FILE}; exit()" + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + ) - set_al_example_properties( example-mex-${M_FILE} ${DISABLED} OFF "ids_path=${MDSPLUS_MODEL_DIR}" ) + set( DISABLED OFF ) + if( NOT AL_BACKEND_HDF5 ) + set( DISABLED ON ) + message(STATUS "Disabling example ${M_FILE}: requires HDF5 backend") + endif() + # Note: added ids_path for backward compatibility with IMAS-Core<5.6 + set_al_example_properties( example-mex-${M_FILE} ${DISABLED} OFF "MDSPLUS_MODELS_PATH=${MDSPLUS_MODEL_DIR};ids_path=${MDSPLUS_MODEL_DIR}" ) if( AL_PLUGINS ) add_test( NAME example-mex-${M_FILE}-with-plugins @@ -44,6 +80,7 @@ foreach( M_FILE ${M_FILES} ) -batch "addpath ${LIB_FOLDER} ${M_FOLDER}; ${M_FILE}; exit()" WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} ) - set_al_example_properties( example-mex-${M_FILE}-with-plugins ${DISABLED} ON "ids_path=${MDSPLUS_MODEL_DIR}" ) + # Note: added ids_path for backward compatibility with IMAS-Core<5.6 + set_al_example_properties( example-mex-${M_FILE}-with-plugins ${DISABLED} ON "MDSPLUS_MODELS_PATH=${MDSPLUS_MODEL_DIR};ids_path=${MDSPLUS_MODEL_DIR}" ) endif() endforeach() diff --git a/ids_put.xsl b/ids_put.xsl index 295c32d..a02e500 100644 --- a/ids_put.xsl +++ b/ids_put.xsl @@ -156,7 +156,9 @@ void mexFunction(int nlhs, mxArray *plhs[], #include "imas_mex_utils.h" + #ifndef _WIN32 #include "ids_validate.h" + #endif al_status_t ids_delete_(int expIdx, char* idsFullName); diff --git a/ids_put_slice.xsl b/ids_put_slice.xsl index 510c9ea..82d6f8f 100644 --- a/ids_put_slice.xsl +++ b/ids_put_slice.xsl @@ -156,7 +156,9 @@ void mexFunction(int nlhs, mxArray *plhs[], #include "imas_mex_utils.h" + #ifndef _WIN32 #include "ids_validate.h" + #endif al_status_t ids_put_(int expIdx, char* idsFullName, const mxArray* ids); diff --git a/ids_rand.xsl b/ids_rand.xsl index 5f6835b..d149105 100644 --- a/ids_rand.xsl +++ b/ids_rand.xsl @@ -149,7 +149,11 @@ void mexFunction(int nlhs, mxArray *plhs[], int i; int n; mxArray* data; - srandom(0); + #ifdef _WIN32 + srand(0); + #else + srandom(0); + #endif status = init_dataTree_read(); if (status.code >= 0) status = get_data_from_dataTree(NULL, ids); diff --git a/ids_validate.xsl b/ids_validate.xsl index 31052df..aa74371 100644 --- a/ids_validate.xsl +++ b/ids_validate.xsl @@ -135,8 +135,13 @@ void mexFunction(int nlhs, mxArray *plhs[], // a hardcoded strok_r. same function but by moving the save pointer char *my_strtok_r (char *srcString, char delim, char **save_ptr) { - uint openpar = 0; - uint closepar = 0; + #ifdef _WIN32 + unsigned int openpar = 0; + unsigned int closepar = 0; + #else + uint openpar = 0; + uint closepar = 0; + #endif if(!srcString) { srcString = *save_ptr; @@ -194,7 +199,11 @@ void mexFunction(int nlhs, mxArray *plhs[], const mxArray* pfield; char *relative_path; char *pathcopy = strdup(path); - mwIndex index; + #ifdef _WIN32 + mwIndex index = 0; + #else + mwIndex index; + #endif if (!data) { free(pathcopy); @@ -413,17 +422,17 @@ end_repl_str: /* Allow for 1D row vectors */ if (ndims == 1 && dims[0] == 1) { - size_t needed = snprintf(NULL, 0, "%s,%d,%s","(",dims[1],")"); + size_t needed = snprintf(NULL, 0, "%s,%zu,%s","(",dims[1],")"); char *result = malloc(needed+1); - sprintf(result,"%s,%d,%s","(",dims[1],")"); + sprintf(result,"%s,%zu,%s","(",dims[1],")"); return result; } else { - size_t needed = snprintf(NULL, 0, "%s%d","(",dims[0]); - for (int i=1;i<rank;i++) needed = needed + snprintf(NULL, 0, ",%d",dims[i]); + size_t needed = snprintf(NULL, 0, "%s%zu","(",dims[0]); + for (int i=1;i<rank;i++) needed = needed + snprintf(NULL, 0, ",%zu",dims[i]); needed = needed + snprintf(NULL, 0, ")"); char *result = malloc(needed+1); - sprintf(result,"%s%d","(",dims[0]); - for (int i=1;i<rank;i++) sprintf(result,"%s,%d",result,dims[i]); + sprintf(result,"%s%zu","(",dims[0]); + for (int i=1;i<rank;i++) sprintf(result,"%s,%zu",result,dims[i]); sprintf(result,"%s)",result); return result; } @@ -567,9 +576,9 @@ end_repl_str: sprintf(buffercoord, "%s OR %s",buffercoord,ctargetfield[target]); } if(spec_dim!=0) sprintf(buffercoord,"%s OR %d",buffercoord,spec_dim); - size_t needed = snprintf(NULL, 0, "Element '%s%s' has incorrect shape %s: its coordinate in dimension %d ('%s%s') has size %d.", crootpath, initialpath, getShapeStr(pfield,rank), cfield_dim,crootpath,ctargetfield[targetcpathid], targetFieldSize); + size_t needed = snprintf(NULL, 0, "Element '%s%s' has incorrect shape %s: its coordinate in dimension %d ('%s%s') has size %zu.", crootpath, initialpath, getShapeStr(pfield,rank), cfield_dim,crootpath,ctargetfield[targetcpathid], targetFieldSize); char *buffer = malloc(needed+1); - sprintf(buffer, "Element '%s%s' has incorrect shape %s: its coordinate in dimension %d ('%s%s') has size %d.", crootpath, initialpath,getShapeStr(pfield,rank), cfield_dim,crootpath,ctargetfield[targetcpathid], targetFieldSize); + sprintf(buffer, "Element '%s%s' has incorrect shape %s: its coordinate in dimension %d ('%s%s') has size %zu.", crootpath, initialpath,getShapeStr(pfield,rank), cfield_dim,crootpath,ctargetfield[targetcpathid], targetFieldSize); strncpy(status.message, buffer, needed); /// --- replacement of each index by its value for (int k=0;k<nbindices; k++) { @@ -653,11 +662,18 @@ end_repl_str: return status; } + // Default return if no other path was taken + free(pathcopy); + return status; } al_validation_status_t validateCoordinateFromPath(const mxArray *data, int idsTimeMode, int timeSize, bool is_time_coordinate, const char *crootpath, const char *path, int rank, int cfield_dim,const char *ctargetfield[], int nb_ctargets, int *target_ranks, int ctargetfielddim, int spec_dim) { const mxArray *root = data; - int *indices_values; + #ifdef _WIN32 + int *indices_values = NULL; + #else + int *indices_values; + #endif char *indices_names[] = {}; const char *initialpath = path; @@ -676,7 +692,7 @@ end_repl_str: const mxArray* data=NULL; const mxArray* pfield=NULL; int idsTimeMode = IDS_TIME_MODE_UNKNOWN; - int timeSize; + int timeSize = 0; int isEmpty; int i1max, i2max, i3max, i4max, itimemax; int aosArraySize; diff --git a/implementations.xsl b/implementations.xsl index 302cf6f..775262c 100644 --- a/implementations.xsl +++ b/implementations.xsl @@ -184,7 +184,8 @@ al_status_t status_end = {0,""}; int putOpCtx = -1; int homogeneousTime = IDS_TIME_MODE_UNKNOWN; - /* Validation check for input schema */ + /* Validation check for input schema (not supported on Windows) */ +#ifndef _WIN32 al_validation_status_t status_val = {0,""}; bool flag = is_validation_required(); if(flag) { @@ -197,6 +198,7 @@ mexWarnMsgIdAndTxt("IMAS:ids_validate:invalid_ids", "IDS is found to be invalid . PUT quits with no action."); return status; } +#endif status = init_dataTree_write((mxArray *) ids); /* TODO: move these checks to external function? */ if (status.code >= 0) status = getHomogeneousTime(&homogeneousTime); diff --git a/rand.xsl b/rand.xsl index 5c01d84..aad2e96 100644 --- a/rand.xsl +++ b/rand.xsl @@ -32,10 +32,18 @@ n = ntime; + #ifdef _WIN32 + n = 1+rand()%4; + #else n = 1+random()%4; + #endif + #ifdef _WIN32 + n = 1+rand()%4; + #else n = 1+random()%4; + #endif n = n < ? n : ; diff --git a/src/imas_mex_casts.c b/src/imas_mex_casts.c index d2e997f..eaaf2db 100644 --- a/src/imas_mex_casts.c +++ b/src/imas_mex_casts.c @@ -274,6 +274,11 @@ al_status_t castCellToChar(mxArray ** data) numel = mxGetNumberOfElements(cellData); + if (numel == 0) { + *data = NULL; + return status; + } + strings = malloc(numel*sizeof(char *)); for (i = 0; i < numel; i++) { cell = mxGetCell(*data, (mwIndex) i); diff --git a/src/imas_mex_params.h b/src/imas_mex_params.h index 444c5c2..aa08f75 100644 --- a/src/imas_mex_params.h +++ b/src/imas_mex_params.h @@ -27,9 +27,12 @@ struct imas_mex_params { int verbosity; /*!< Controls the verbosity level of the interface, higher means more verbose. Minimum value is 0, maximum value is 4 (default: 0). */ }; -/** \cond */ -extern struct imas_mex_params params; - +#ifdef _WIN32 + /** \cond */ + AL_MEX_EXPORT extern struct imas_mex_params params; +#else + extern struct imas_mex_params params; +#endif int setDefaultParams(void); /** \endcond */ #endif diff --git a/src/imas_mex_utils.c b/src/imas_mex_utils.c index 1e65bf4..ceaecc9 100644 --- a/src/imas_mex_utils.c +++ b/src/imas_mex_utils.c @@ -27,53 +27,85 @@ int msglen = 0; /*!< Length of the mex_errms int msg_haspathinfo = 0; -/** - Convert integer to string - */ -char * itoa(int num) -{ - int i, rem, len = 0, n; - - n = num; - while (n != 0) - { - len++; - n /= 10; - } - char * str = (char*)malloc(len); - for (i = 0; i < len; i++) - { - rem = num % 10; - num = num / 10; - str[len - (i + 1)] = rem + '0'; - } - str[len] = '\0'; - return str; -} +#ifdef _WIN32 + #include // For _getpid() -/** - Convert integer to string - */ -int atoi(const char *s1) -{ - int sign = 1, number = 0, index = 0; - if(*s1 == '-'){ - sign = -1; - index = 1; - } - - while(*s1 != '\0'){ - if(*s1 >= '0' && *s1 <= '9'){ - number = number*10 + *s1 - '0'; - } else { - break; - } - *s1++; - } - - number = number * sign; - return number; -} + // Windows implementation of gettimeofday + int gettimeofday(struct timeval *tv, void *tz) { + FILETIME ft; + unsigned __int64 tmpres = 0; + + GetSystemTimeAsFileTime(&ft); + + tmpres |= ft.dwHighDateTime; + tmpres <<= 32; + tmpres |= ft.dwLowDateTime; + + // Convert file time to unix epoch + tmpres /= 10; // convert to microseconds + tmpres -= 11644473600000000ULL; // Windows to UNIX epoch offset + + tv->tv_sec = (long)(tmpres / 1000000UL); + tv->tv_usec = (long)(tmpres % 1000000UL); + + return 0; + } + + // Windows implementation of getpid + #define getpid _getpid + + // Windows implementation of access + #define access _access + #define F_OK 0 +#else + /** + Convert integer to string + */ + char * itoa(int num) + { + int i, rem, len = 0, n; + + n = num; + while (n != 0) + { + len++; + n /= 10; + } + char * str = (char*)malloc(len); + for (i = 0; i < len; i++) + { + rem = num % 10; + num = num / 10; + str[len - (i + 1)] = rem + '0'; + } + str[len] = '\0'; + return str; + } + + /** + Convert integer to string + */ + int atoi(const char *s1) + { + int sign = 1, number = 0, index = 0; + if(*s1 == '-'){ + sign = -1; + index = 1; + } + + while(*s1 != '\0'){ + if(*s1 >= '0' && *s1 <= '9'){ + number = number*10 + *s1 - '0'; + } else { + break; + } + *s1++; + } + + number = number * sign; + return number; + } +#endif /** Concatenate two strings @@ -127,8 +159,15 @@ char* generate_tmp_file() unsigned long rnd = rand() ^ getpid(); // XOR random value with process id - char* rndstr = itoa(rnd); - fname=(char *)malloc( strlen(rndstr) + 1); + #ifdef _WIN32 + /* Convert random number to string using sprintf instead of itoa */ + char rndstr[32]; /* Enough for unsigned long */ + sprintf(rndstr, "%lu", rnd); + #else + char* rndstr = itoa(rnd); + fname=(char *)malloc( strlen(rndstr) + 1); + #endif + fname = concat(prefix, rndstr); int file_available_status = access(fname, F_OK); // returns 0 if the file exists and accessible and returns -1 if not exist @@ -404,13 +443,16 @@ al_status_t data_to_mxArray(int datatype, int dim, void *array, int *size, mxArr memcpy(mxGetData(*data), array, numel * dsize); else { #if MX_HAS_INTERLEAVED_COMPLEX -#error IMAS_MEX builds with interleaved complex API is not supported yet memcpy(mxGetData(*data), array, numel * dsize * 2); #else /* MATLAB complex data has two separate pointers for real and imaginary data (separate API) */ pr = mxGetData(*data); - pi = mxGetImagData(*data); - for (i = 0; i < numel; i++) { + pi = mxGetImagData(*data); + if (!pr || !pi) { + mexErrMsgIdAndTxt("imas:mex", "Failed to allocate complex array data (pr=%p, pi=%p)", pr, pi); + return (al_status_t) {-1, "Failed to allocate complex array data"}; + } + for (i = 0; i < numel; i++) { pr[i] = ((double *) array)[2*i]; pi[i] = ((double *) array)[2*i+1]; } @@ -486,13 +528,18 @@ al_status_t data_from_mxArray(int datatype, int dim, const mxArray * data, void *array = mxGetData(data); else { #if MX_HAS_INTERLEAVED_COMPLEX -#error IMAS_MEX builds with interleaved complex API is not supported yet *array = mxGetData(data); #else /* MATLAB complex data has two separate pointers for real and imaginary data (separate API) */ *array = malloc(numel*2*sizeof(double)); pr = mxGetData(data); pi = mxGetImagData(data); + if (!pr || !pi) { + free(*array); + *array = NULL; + mexErrMsgIdAndTxt("imas:mex", "Input array is not properly complex (pr=%p, pi=%p)", pr, pi); + return (al_status_t) {-1, "Input array is not properly complex"}; + } for (i = 0; i < numel; i++) { ((double *) *array)[2*i] = pr[i]; ((double *) *array)[2*i+1] = pi[i]; @@ -625,7 +672,12 @@ void getNodePath(char* path, char* dataDictionaryVersion, int k) { +#ifdef _WIN32 + /* Allocate pathTokens dynamically - MSVC doesn't support VLAs */ + char** pathTokens = (char**)malloc(ancestors_count * sizeof(char*)); +#else char* pathTokens[ancestors_count]; +#endif char* nbc_versions[NBC_VERSIONS_MAX_COUNT]; char* nbc_previous_names[NBC_VERSIONS_MAX_COUNT]; char* pathToken = malloc(ANCESTOR_NAME_MAX_LENGTH); @@ -681,6 +733,10 @@ void getNodePath(char* path, } free(pathTokens[i]); } + // TODO: free pathTokens elements in the loop above to avoid memory leak, but this causes an access violation on Windows, investigate further +#ifdef _WIN32 + free(pathTokens); +#endif } /** @@ -814,7 +870,12 @@ al_status_t my_al_read_data(struct imas_mex_actionInfo * action, struct imas_mex else if (field->datatype == DOUBLE_DATA) array = malloc(sizeof(double)); else if (field->datatype == COMPLEX_DATA) +#ifdef _WIN32 + /* Complex = real + imaginary parts, MSVC doesn't support _Complex keyword */ + array = malloc(2 * sizeof(double)); +#else array = malloc(sizeof(double _Complex)); +#endif } status = al_read_data(action->context, field->fieldPath, field->timebasePath, &array, field->datatype, field->dim, &dims[0]); @@ -914,6 +975,7 @@ al_status_t my_al_write_data(struct imas_mex_actionInfo * action, struct imas_me if (field->datatype == CHAR_DATA && field->dim == 2) { if (mxIsCell(data)) { if (status.code >= 0) status = cast_status = castCellToChar((mxArray **) &data); + if (data == NULL) return (al_status_t) {0,""}; } } diff --git a/src/imas_mex_utils.h b/src/imas_mex_utils.h index aa46cf9..9212d12 100644 --- a/src/imas_mex_utils.h +++ b/src/imas_mex_utils.h @@ -13,18 +13,42 @@ #define IMAS_MEX_UTILS_H + +#include "mex.h" + +// DLL export/import macro for Windows +#ifdef _WIN32 + #ifdef AL_MEX_BUILDING_DLL + #define AL_MEX_EXPORT __declspec(dllexport) + #else + #define AL_MEX_EXPORT __declspec(dllimport) + #endif + // Undef the deprecation macros to use the actual function names + // In R2018a+ these are #defined to "IsDeprecated" versions that don't link + #ifdef mxGetImagData + #undef mxGetImagData + #endif + #ifdef mxSetImagData + #undef mxSetImagData + #endif +#else + // On non-Windows platforms, AL_MEX_EXPORT is empty + #define AL_MEX_EXPORT +#endif + /** \cond */ -extern const int EMPTY_INT; -extern const double EMPTY_DOUBLE; -extern const double EMPTY_COMPLEX[2]; +AL_MEX_EXPORT extern const int EMPTY_INT; +AL_MEX_EXPORT extern const double EMPTY_DOUBLE; +AL_MEX_EXPORT extern const double EMPTY_COMPLEX[2]; -extern const int IDS_TIME_MODE_UNKNOWN; -extern const int IDS_TIME_MODE_HETEROGENEOUS; -extern const int IDS_TIME_MODE_HOMOGENEOUS; -extern const int IDS_TIME_MODE_INDEPENDENT; +AL_MEX_EXPORT extern const int IDS_TIME_MODE_UNKNOWN; +AL_MEX_EXPORT extern const int IDS_TIME_MODE_HETEROGENEOUS; +AL_MEX_EXPORT extern const int IDS_TIME_MODE_HOMOGENEOUS; +AL_MEX_EXPORT extern const int IDS_TIME_MODE_INDEPENDENT; + +/** \endcond */ -#include "mex.h" #include "al_lowlevel.h" #include "imas_mex_params.h" #include "imas_mex_casts.h" @@ -34,8 +58,17 @@ extern const int IDS_TIME_MODE_INDEPENDENT; #include #include #include -#include -#include + +#ifdef _WIN32 + #include + #include + // Windows doesn't have sys/time.h or unistd.h + // gettimeofday() is implemented in imas_mex_utils.c +#else + #include + #include +#endif + #include #ifdef NO_MXISSCALAR #define mxIsScalar(a) (mxGetNumberOfElements(a)==1) @@ -91,15 +124,25 @@ struct imas_mex_fieldInfo { int dim; /*!< Rank of the current field. */ }; -/** \cond */ -extern const char * mex_errmsgid; -extern char mex_errmsgtxt[MAXERRMSGTXTSIZE]; -extern int msglen; -extern int msg_haspathinfo; +#ifdef _WIN32 + /** \cond */ + AL_MEX_EXPORT extern const char * mex_errmsgid; + AL_MEX_EXPORT extern char mex_errmsgtxt[MAXERRMSGTXTSIZE]; + AL_MEX_EXPORT extern int msglen; + AL_MEX_EXPORT extern int msg_haspathinfo; +#else + /** \cond */ + extern const char * mex_errmsgid; + extern char mex_errmsgtxt[MAXERRMSGTXTSIZE]; + extern int msglen; + extern int msg_haspathinfo; +#endif -char * itoa(int ); +#ifndef _WIN32 + char * itoa(int ); -int atoi(const char *); + int atoi(const char *); +#endif char* concat(const char *, const char *); diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 3748a5a..70371c0 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -14,8 +14,8 @@ add_test( NAME al-mex-test -batch "addpath ${LIB_FOLDER} ${M_FOLDER}; res = runtests('imas_unit_tests');exit(~all([res.Passed]))" WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} ) - -set( MATLAB_ENV "IMAS_AL_DISABLE_VALIDATE=1;IMAS_AL_DISABLE_OBSOLESCENT_WARNING=1;ids_path=${MDSPLUS_MODEL_DIR}" ) +# Note: added ids_path for backward compatibility with IMAS-Core<5.6 +set( MATLAB_ENV "IMAS_AL_DISABLE_VALIDATE=1;IMAS_AL_DISABLE_OBSOLESCENT_WARNING=1;ids_path=${MDSPLUS_MODEL_DIR};MDSPLUS_MODELS_PATH=${MDSPLUS_MODEL_DIR}" ) if( AL_BACKEND_HDF5 ) # The HDF5 library built-in with matlab may conflict with the one that the AL # was built against. Preload the AL-linked library: @@ -51,11 +51,20 @@ if( AL_PLUGINS ) ) endif() -# Tests require MDSPLUS and HDF5 -if( NOT AL_BACKEND_MDSPLUS OR NOT AL_BACKEND_HDF5 ) +# Tests require at least one backend (HDF5 or MDSPLUS) +if( NOT AL_BACKEND_MDSPLUS AND NOT AL_BACKEND_HDF5 ) + message(STATUS "Disabling tests: No backend available (requires HDF5 or MDSplus)") set_tests_properties( al-mex-test PROPERTIES DISABLED ON ) if( AL_PLUGINS ) set_tests_properties( al-partial-get-test PROPERTIES DISABLED ON ) endif() +else() + if( AL_BACKEND_HDF5 AND NOT AL_BACKEND_MDSPLUS ) + message(STATUS "Tests will run with HDF5 backend only") + elseif( AL_BACKEND_MDSPLUS AND NOT AL_BACKEND_HDF5 ) + message(STATUS "Tests will run with MDSplus backend only") + else() + message(STATUS "Tests will run with both HDF5 and MDSplus backends") + endif() endif() diff --git a/validate_single.xsl b/validate_single.xsl index 0bc6c4d..75f5558 100644 --- a/validate_single.xsl +++ b/validate_single.xsl @@ -52,7 +52,7 @@ } } } - if ((scalar_time == EMPTY_DOUBLE)) { + if (scalar_time == EMPTY_DOUBLE) { size_t needed = snprintf(NULL, 0, "Time coordinate of '' ('(%d)/time') has empty values.", itime+1); char *buffer = malloc(needed + 1); sprintf(buffer, "Time coordinate of '' ('(%d)/time') has empty values.", itime+1); @@ -891,7 +891,7 @@ } } } - if ((scalar_time == EMPTY_DOUBLE)) { + if (scalar_time == EMPTY_DOUBLE) { size_t needed = snprintf(NULL, 0, "Time coordinate of '' ('(%d)/time') has empty values.", itime+1); char *buffer = malloc(needed + 1); sprintf(buffer, "Time coordinate of '' ('(%d)/time') has empty values.", itime+1); diff --git a/vcpkg.json b/vcpkg.json new file mode 100644 index 0000000..d52f979 --- /dev/null +++ b/vcpkg.json @@ -0,0 +1,20 @@ +{ + "name": "imas-matlab", + "version": "1.0.0", + "description": "MATLAB High Level Interface of the IMAS Access Layer", + "dependencies": [ + { + "name": "dlfcn-win32", + "platform": "windows" + }, + "hdf5", + { + "name": "pthreads", + "platform": "windows" + }, + "boost-algorithm", + "boost-filesystem", + "boost-variant", + "pkgconf" + ] +}