diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index f6c7d47..192fea0 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -13,6 +13,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + with: + submodules: true # Install dependencies - name: Set up Python 3.9 @@ -20,19 +22,16 @@ jobs: with: python-version: 3.9 - - name: Install Poetry - uses: snok/install-poetry@v1 - - name: Install dependencies run: | - python3 -m pip install coffe - python3 -m pip install -r pip-requirements-dev.txt + python3 -m pip install cibuildwheel + SETUPTOOLS_SCM_PRETEND_VERSION=0.1.0 CIBW_BUILD='cp39*' python3 -m cibuildwheel --platform linux + python3 -m pip install wheelhouse/*.whl # Build the site - name: Build the site run: | - rm -fr coffe/__init__.py - sh generate_docs.sh + bash scripts/generate_docs.sh # If we've pushed to master, push the book's HTML to github-pages - if: ${{ github.ref == 'refs/heads/master' }} diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml new file mode 100644 index 0000000..21f672c --- /dev/null +++ b/.github/workflows/build-wheels.yml @@ -0,0 +1,62 @@ +name: Build + +on: + pull_request: + push: + branches: + - master + workflow_dispatch: + inputs: + version: + description: 'The version of COFFE to release' + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-13, macos-14] + + steps: + - uses: actions/checkout@v4 + with: + submodules: true + + - name: Setup all external dependencies + if: runner.os == 'macOS' + run: | + brew install automake + + # Used to host cibuildwheel + - uses: actions/setup-python@v3 + + - name: Install cibuildwheel + run: python -m pip install cibuildwheel==2.16.5 conan + + # if we're not doing a release version (manual!), only build one python version + - name: Setup versions + if: ${{ ! (github.event_name == 'workflow_dispatch' && github.event.inputs.version) }} + run: | + echo "CIBW_BUILD=cp311-*" >> "$GITHUB_ENV" + # pretend version so setuptools doesn't fail + echo "SETUPTOOLS_SCM_PRETEND_VERSION=0.1.0" >> "$GITHUB_ENV" + + - name: SCM + if: github.event_name == 'workflow_dispatch' && github.event.inputs.version + run: echo "SETUPTOOLS_SCM_PRETEND_VERSION=${{ github.event.input.version }}" >> "$GITHUB_ENV" + + - name: Build wheels + run: python -m cibuildwheel --output-dir wheelhouse + # to supply options, put them in 'env', like: + env: + # we skip tests in CI to save time + CIBW_TEST_COMMAND: 'true' + CIBW_ENVIRONMENT_PASS_LINUX: 'SETUPTOOLS_SCM_PRETEND_VERSION' + CUBA_INSTALL_DIR: ${{ github.workspace }}/cuba_install + CLASS_INSTALL_DIR: ${{ github.workspace }}/class_install + + - uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl diff --git a/.gitmodules b/.gitmodules index 8781a45..5b405a9 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,8 +1,8 @@ [submodule "class_public"] - path = class_public + path = external/class_public url = https://github.com/JCGoran/class_public branch = feature/conda [submodule "libcuba"] - path = libcuba + path = external/libcuba url = https://github.com/JCGoran/libcuba branch = master diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..5a2de1a --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,248 @@ +cmake_minimum_required(VERSION 3.10) +project( + coffe + VERSION ${SKBUILD_PROJECT_VERSION} + LANGUAGES C) + +# Default build type +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE Release) +endif() + +# Add the option to disable silent rules +option(DISABLE_SILENT_RULES "Disable silent rules" OFF) +if(NOT DISABLE_SILENT_RULES) + set(CMAKE_VERBOSE_MAKEFILE OFF) +endif() + +# Get the current arch (because MacOS can have both) +execute_process( + COMMAND uname -m + COMMAND tr -d '\n' + OUTPUT_VARIABLE BUILD_ARCHITECTURE) + +# Check whether the user requested Python +if(COFFE_ENABLE_PYTHON) + if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin" AND (DEFINED CONAN_C_FLAGS + OR DEFINED CONAN_CXX_FLAGS)) + # For some reason, Conan does not find Python from the venv on MacOS, so we + # need to use this annoying hack. Note that this does not guarantee that the + # Python executable is the one from the venv, but at least it finds the one + # compatible with all of the packages from the venv + find_package( + Python "3.${COFFE_PYTHON_MINOR_VERSION}" EXACT + COMPONENTS Interpreter Development.Module + REQUIRED) + else() + find_package( + Python + COMPONENTS Interpreter Development.Module + REQUIRED) + endif() + include(cmake/FindCython.cmake) +endif() + +# Define source directory +set(SRC_DIR ${CMAKE_SOURCE_DIR}/src) + +# Compiler flags +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wextra -Wpedantic") +set(CMAKE_C_STANDARD 99) + +# Check for libraries +find_package(GSL 2.1 REQUIRED) +find_library(FFTW3_LIB fftw3 NAMES fftw fftw3 libfftw libfftw3 REQUIRED) +find_library(CONFIG_LIB config NAMES config config++ libconfig libconfig++ + REQUIRED) +find_library(M_LIB m NAMES m libm REQUIRED) +set(CONFIG_INCLUDE_DIRS) +set(FFTW3_INCLUDE_DIRS) +set(CUBA_INCLUDE_DIRS) +set(CLASS_INCLUDE_DIRS) + +# Check for optional libraries +if(COFFE_ENABLE_CUBA) + find_library( + CUBA_LIB cuba + NAMES cuba libcuba + PATHS /opt/cuba_${BUILD_ARCHITECTURE}/lib $ENV{CUBA_INSTALL_DIR}/lib + REQUIRED) +endif() +if(COFFE_ENABLE_CLASS) + find_library( + CLASS_LIB class + NAMES class libclass + PATHS /opt/class_${BUILD_ARCHITECTURE}/lib + /opt/class_public_${BUILD_ARCHITECTURE}/lib + $ENV{CLASS_INSTALL_DIR}/lib REQUIRED) +endif() + +get_filename_component(BASE_DIR ${CONFIG_LIB} DIRECTORY) +string(REGEX REPLACE "/lib$" "/include" CONFIG_INCLUDE_DIRS ${BASE_DIR}) + +get_filename_component(BASE_DIR ${FFTW3_LIB} DIRECTORY) +string(REGEX REPLACE "/lib$" "/include" FFTW3_INCLUDE_DIRS ${BASE_DIR}) + +get_filename_component(BASE_DIR ${CLASS_LIB} DIRECTORY) +string(REGEX REPLACE "/lib$" "/include" CLASS_INCLUDE_DIRS ${BASE_DIR}) + +get_filename_component(BASE_DIR ${CUBA_LIB} DIRECTORY) +string(REGEX REPLACE "/lib$" "/include" CUBA_INCLUDE_DIRS ${BASE_DIR}) + +# OpenMP support +find_package(OpenMP) +if(OpenMP_C_FOUND) + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") +endif() + +# Optimization flags +include(CheckCCompilerFlag) +if(COFFE_ENABLE_MATHOPTS) + if(CMAKE_C_COMPILER_ID STREQUAL "Clang" OR CMAKE_C_COMPILER_ID STREQUAL "GNU") + set(COFFE_MATHOPTS "-ffast-math") + endif() +else() + set(COFFE_MATHOPTS) +endif() +# Add definitions for conditional compilation +if(COFFE_ENABLE_CLASS) + add_compile_definitions(-DHAVE_CLASS) +endif() +if(COFFE_ENABLE_CUBA) + add_compile_definitions(-DHAVE_CUBA) +endif() + +message("COFFE configuration") +message("============================") +message("Build architecture: ${BUILD_ARCHITECTURE}") +message("Math library: ${M_LIB}") +message("GSL library: ${GSL_LIBRARIES}") +message("FFTW library: ${FFTW3_LIB}") +message("libconfig library: ${CONFIG_LIB}") +if(COFFE_ENABLE_PYTHON) + message("Python path: ${Python_EXECUTABLE}") + message("Cython path: ${CYTHON_EXECUTABLE}") +endif() +if(COFFE_ENABLE_CLASS) + message("CLASS library: ${CLASS_LIB}") +endif() +if(COFFE_ENABLE_CUBA) + message("CUBA library: ${CUBA_LIB}") +endif() +message("OpenMP enabled: ${OpenMP_C_FOUND}") +message("Additional optimizations: ${COFFE_MATHOPTS}") + +set(COFFE_HEADERS + ${SRC_DIR}/common.h + ${SRC_DIR}/covariance.h + ${SRC_DIR}/utils.h + ${SRC_DIR}/twobessel.h + ${SRC_DIR}/errors.h + ${SRC_DIR}/parser.h + ${SRC_DIR}/twofast.h + ${SRC_DIR}/integrals.h + ${SRC_DIR}/background.h + ${SRC_DIR}/functions.h + ${SRC_DIR}/tanhsinh.h + ${SRC_DIR}/signal.h + ${SRC_DIR}/corrfunc.h + ${SRC_DIR}/multipoles.h + ${SRC_DIR}/average_multipoles.h) + +set(COFFE_LIB_SOURCES + ${SRC_DIR}/common.c + ${SRC_DIR}/covariance.c + ${SRC_DIR}/utils.c + ${SRC_DIR}/twobessel.c + ${SRC_DIR}/errors.c + ${SRC_DIR}/parser.c + ${SRC_DIR}/twofast.c + ${SRC_DIR}/integrals.c + ${SRC_DIR}/background.c + ${SRC_DIR}/functions.c + ${SRC_DIR}/tanhsinh.c + ${SRC_DIR}/signal.c + ${SRC_DIR}/corrfunc.c + ${SRC_DIR}/multipoles.c + ${SRC_DIR}/average_multipoles.c) + +# Main program +add_executable(coffe-cli ${SRC_DIR}/main.c ${COFFE_LIB_SOURCES} + ${SRC_DIR}/output.c) + +# Include directories +target_include_directories( + coffe-cli PRIVATE ${CONFIG_INCLUDE_DIRS} ${FFTW3_INCLUDE_DIRS} + ${CUBA_INCLUDE_DIRS} ${SRC_DIR} ${CLASS_INCLUDE_DIRS}) + +# Add definition +target_compile_definitions(coffe-cli PRIVATE PACKAGE_STRING="${VERSION}") + +# Link libraries +target_link_libraries( + coffe-cli + ${M_LIB} + ${GSL_LIBRARIES} + ${FFTW3_LIBRARIES} + ${FFTW3_LIB} + ${libconfig_LIBRARIES} + ${CONFIG_LIB} + ${CUBA_LIB} + ${CLASS_LIB}) + +target_compile_options(coffe-cli PRIVATE ${COFFE_MATHOPTS}) + +# Tests +if(COFFE_ENABLE_TESTS) + enable_testing() + + if(NOT DEFINED COFFE_TEST_DATADIR) + set(COFFE_TEST_DATADIR ${CMAKE_SOURCE_DIR}) + endif() + + set(COFFE_TESTS background integrals corrfunc multipoles covariance) + foreach(name IN LISTS COFFE_TESTS) + # Add the test executable + add_executable(test_${name} ${CMAKE_SOURCE_DIR}/tests/test_${name}.c + ${COFFE_LIB_SOURCES}) + target_include_directories( + test_${name} + PRIVATE ${CONFIG_INCLUDE_DIRS} ${FFTW3_INCLUDE_DIRS} ${CUBA_INCLUDE_DIRS} + ${SRC_DIR} ${CLASS_INCLUDE_DIRS} ${CMAKE_SOURCE_DIR}/tests) + # Add defines + target_compile_definitions( + test_${name} PRIVATE COFFE_TEST_DATADIR="${COFFE_TEST_DATADIR}") + # Link libraries + target_link_libraries( + test_${name} + ${M_LIB} + ${GSL_LIBRARIES} + ${FFTW3_LIB} + ${CONFIG_LIB} + ${CUBA_LIB} + ${CLASS_LIB}) + # Register tests + add_test(NAME test_${name} COMMAND test_${name}) + endforeach() +endif() + +if(COFFE_ENABLE_PYTHON) + add_subdirectory(${CMAKE_SOURCE_DIR}/python/coffe) +endif() + +# Installation +if(NOT SKBUILD) + install( + TARGETS coffe-cli + COMPONENT Runtime + DESTINATION bin) + + # Install headers and other files + install(FILES ${COFFE_HEADERS} DESTINATION include/coffe) + + install( + FILES ${SRC_DIR}/WAVENUMBER_HEADER.dat ${SRC_DIR}/POWER_SPECTRUM_HEADER.dat + ${CMAKE_SOURCE_DIR}/settings.cfg ${CMAKE_SOURCE_DIR}/separations.dat + ${CMAKE_SOURCE_DIR}/PkL_CLASS.dat ${CMAKE_SOURCE_DIR}/README.md + DESTINATION share/coffe) +endif() diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 8647001..0000000 --- a/Dockerfile +++ /dev/null @@ -1,57 +0,0 @@ -# NOTE: if you are building this on your system and are not pulling from Dockerhub, -# a lot or warnings may be displayed; to turn them off, use "docker build -q [PATH]", -# where [PATH] is the location of this Dockerfile; -# most are harmless and do not affect the outcome of the build -FROM gcc:8 -MAINTAINER Goran Jelic-Cizmek "goran.jelic-cizmek@unige.ch" -# get all the standard required libraries -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - autoconf-archive git \ - libconfig-dev \ - libgsl-dev \ - libfftw3-dev \ - python3-pip \ - python3-setuptools \ - python3-wheel \ - wget && \ - apt-get clean && apt-get autoremove -y -# get CUBA and build it -WORKDIR /tmp/ -RUN wget http://www.feynarts.de/cuba/Cuba-4.2.tar.gz -RUN tar xf /tmp/Cuba-4.2.tar.gz -C /tmp/ -WORKDIR /tmp/Cuba-4.2/ -RUN ./configure -RUN make -j lib -RUN make install -RUN rm -rf /tmp/Cuba* -# get CLASS and set it up -WORKDIR /tmp/ -# CLASS has a small bug when using the static library, -# see https://github.com/lesgourg/class_public/issues/255 for details, -# which was fixed in the fork -RUN git clone -b master --single-branch https://github.com/lesgourg/class_public class -WORKDIR /tmp/class/ -RUN make -j libclass.a -RUN cp libclass.a /usr/lib/x86_64-linux-gnu/ -RUN cp include/*.h /usr/include/ -# get COFFE and set it up -WORKDIR / -RUN git clone -q https://github.com/JCGoran/coffe -WORKDIR /coffe/ -# this is necessary as long as the future branch isn't merged into master -RUN git checkout future -RUN autoreconf -i -RUN ./configure --enable-cuba --enable-class -RUN make -j coffe -# optional dependencies for making life easier -RUN pip3 install \ - wheel \ - numpy \ - scipy \ - matplotlib \ - jupyter -# installing dependencies for COFFE python "wrapper" -RUN pip3 install -r python/requirements.txt -RUN ln -s /coffe/coffe /usr/bin/coffe -WORKDIR /data/ diff --git a/Makefile.am b/Makefile.am deleted file mode 100644 index dcfc47d..0000000 --- a/Makefile.am +++ /dev/null @@ -1,285 +0,0 @@ -# The main product -bin_PROGRAMS = coffe-cli # make all - -lib_LIBRARIES = libcoffe.a - -libcoffe_adir = $(includedir)/coffe - -# the non-source files that will be included -# when running `make dist` -dist_pkgdata_DATA = \ - WAVENUMBER_HEADER.dat \ - POWER_SPECTRUM_HEADER.dat \ - settings.cfg \ - separations.dat \ - PkL_CLASS.dat - -EXTRA_DIST = \ - tests/benchmarks/benchmark_all_corrfunc0.dat \ - tests/benchmarks/benchmark_all_corrfunc1.dat \ - tests/benchmarks/benchmark_all_corrfunc2.dat \ - tests/benchmarks/benchmark_all_corrfunc3.dat \ - tests/benchmarks/benchmark_all_corrfunc4.dat \ - tests/benchmarks/benchmark_all_multipoles0.dat \ - tests/benchmarks/benchmark_all_multipoles2.dat \ - tests/benchmarks/benchmark_all_multipoles4.dat \ - tests/benchmarks/benchmark_background.dat \ - tests/benchmarks/benchmark_d1_corrfunc0.dat \ - tests/benchmarks/benchmark_d1_corrfunc1.dat \ - tests/benchmarks/benchmark_d1_corrfunc2.dat \ - tests/benchmarks/benchmark_d1_corrfunc3.dat \ - tests/benchmarks/benchmark_d1_corrfunc4.dat \ - tests/benchmarks/benchmark_d1_multipoles0.dat \ - tests/benchmarks/benchmark_d1_multipoles2.dat \ - tests/benchmarks/benchmark_d1_multipoles4.dat \ - tests/benchmarks/benchmark_d2_corrfunc0.dat \ - tests/benchmarks/benchmark_d2_corrfunc1.dat \ - tests/benchmarks/benchmark_d2_corrfunc2.dat \ - tests/benchmarks/benchmark_d2_corrfunc3.dat \ - tests/benchmarks/benchmark_d2_corrfunc4.dat \ - tests/benchmarks/benchmark_d2_multipoles0.dat \ - tests/benchmarks/benchmark_d2_multipoles2.dat \ - tests/benchmarks/benchmark_d2_multipoles4.dat \ - tests/benchmarks/benchmark_den_corrfunc0.dat \ - tests/benchmarks/benchmark_den_corrfunc1.dat \ - tests/benchmarks/benchmark_den_corrfunc2.dat \ - tests/benchmarks/benchmark_den_corrfunc3.dat \ - tests/benchmarks/benchmark_den_corrfunc4.dat \ - tests/benchmarks/benchmark_den_multipoles0.dat \ - tests/benchmarks/benchmark_den_multipoles2.dat \ - tests/benchmarks/benchmark_den_multipoles4.dat \ - tests/benchmarks/benchmark_flatsky_density_lensing_multipoles0.dat \ - tests/benchmarks/benchmark_flatsky_density_lensing_multipoles2.dat \ - tests/benchmarks/benchmark_flatsky_density_lensing_multipoles4.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc0.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc1.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc2.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc3.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc4.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_multipoles0.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_multipoles2.dat \ - tests/benchmarks/benchmark_flatsky_lensing_lensing_multipoles4.dat \ - tests/benchmarks/benchmark_g1_corrfunc0.dat \ - tests/benchmarks/benchmark_g1_corrfunc1.dat \ - tests/benchmarks/benchmark_g1_corrfunc2.dat \ - tests/benchmarks/benchmark_g1_corrfunc3.dat \ - tests/benchmarks/benchmark_g1_corrfunc4.dat \ - tests/benchmarks/benchmark_g1_multipoles0.dat \ - tests/benchmarks/benchmark_g1_multipoles2.dat \ - tests/benchmarks/benchmark_g1_multipoles4.dat \ - tests/benchmarks/benchmark_g2_corrfunc0.dat \ - tests/benchmarks/benchmark_g2_corrfunc1.dat \ - tests/benchmarks/benchmark_g2_corrfunc2.dat \ - tests/benchmarks/benchmark_g2_corrfunc3.dat \ - tests/benchmarks/benchmark_g2_corrfunc4.dat \ - tests/benchmarks/benchmark_g2_multipoles0.dat \ - tests/benchmarks/benchmark_g2_multipoles2.dat \ - tests/benchmarks/benchmark_g2_multipoles4.dat \ - tests/benchmarks/benchmark_g3_corrfunc0.dat \ - tests/benchmarks/benchmark_g3_corrfunc1.dat \ - tests/benchmarks/benchmark_g3_corrfunc2.dat \ - tests/benchmarks/benchmark_g3_corrfunc3.dat \ - tests/benchmarks/benchmark_g3_corrfunc4.dat \ - tests/benchmarks/benchmark_g3_multipoles0.dat \ - tests/benchmarks/benchmark_g3_multipoles2.dat \ - tests/benchmarks/benchmark_g3_multipoles4.dat \ - tests/benchmarks/benchmark_g4_corrfunc0.dat \ - tests/benchmarks/benchmark_g4_corrfunc1.dat \ - tests/benchmarks/benchmark_g4_corrfunc2.dat \ - tests/benchmarks/benchmark_g4_corrfunc3.dat \ - tests/benchmarks/benchmark_g4_corrfunc4.dat \ - tests/benchmarks/benchmark_g4_multipoles0.dat \ - tests/benchmarks/benchmark_g4_multipoles2.dat \ - tests/benchmarks/benchmark_g4_multipoles4.dat \ - tests/benchmarks/benchmark_g5_corrfunc0.dat \ - tests/benchmarks/benchmark_g5_corrfunc1.dat \ - tests/benchmarks/benchmark_g5_corrfunc2.dat \ - tests/benchmarks/benchmark_g5_corrfunc3.dat \ - tests/benchmarks/benchmark_g5_corrfunc4.dat \ - tests/benchmarks/benchmark_g5_multipoles0.dat \ - tests/benchmarks/benchmark_g5_multipoles2.dat \ - tests/benchmarks/benchmark_g5_multipoles4.dat \ - tests/benchmarks/benchmark_integral0.dat \ - tests/benchmarks/benchmark_integral1.dat \ - tests/benchmarks/benchmark_integral2.dat \ - tests/benchmarks/benchmark_integral3.dat \ - tests/benchmarks/benchmark_integral4.dat \ - tests/benchmarks/benchmark_integral5.dat \ - tests/benchmarks/benchmark_integral6.dat \ - tests/benchmarks/benchmark_integral7.dat \ - tests/benchmarks/benchmark_integral8.dat \ - tests/benchmarks/benchmark_integral8_renormalization.dat \ - tests/benchmarks/benchmark_integral9.dat \ - tests/benchmarks/benchmark_len_corrfunc0.dat \ - tests/benchmarks/benchmark_len_corrfunc1.dat \ - tests/benchmarks/benchmark_len_corrfunc2.dat \ - tests/benchmarks/benchmark_len_corrfunc3.dat \ - tests/benchmarks/benchmark_len_corrfunc4.dat \ - tests/benchmarks/benchmark_len_multipoles0.dat \ - tests/benchmarks/benchmark_len_multipoles2.dat \ - tests/benchmarks/benchmark_len_multipoles4.dat \ - tests/benchmarks/benchmark_multipoles_covariance_00.dat \ - tests/benchmarks/benchmark_multipoles_covariance_02.dat \ - tests/benchmarks/benchmark_multipoles_covariance_04.dat \ - tests/benchmarks/benchmark_multipoles_covariance_20.dat \ - tests/benchmarks/benchmark_multipoles_covariance_22.dat \ - tests/benchmarks/benchmark_multipoles_covariance_24.dat \ - tests/benchmarks/benchmark_multipoles_covariance_40.dat \ - tests/benchmarks/benchmark_multipoles_covariance_42.dat \ - tests/benchmarks/benchmark_multipoles_covariance_44.dat \ - tests/benchmarks/benchmark_rsd_corrfunc0.dat \ - tests/benchmarks/benchmark_rsd_corrfunc1.dat \ - tests/benchmarks/benchmark_rsd_corrfunc2.dat \ - tests/benchmarks/benchmark_rsd_corrfunc3.dat \ - tests/benchmarks/benchmark_rsd_corrfunc4.dat \ - tests/benchmarks/benchmark_rsd_multipoles0.dat \ - tests/benchmarks/benchmark_rsd_multipoles2.dat \ - tests/benchmarks/benchmark_rsd_multipoles4.dat \ - tests/benchmarks/benchmark_std_corrfunc0.dat \ - tests/benchmarks/benchmark_std_corrfunc1.dat \ - tests/benchmarks/benchmark_std_corrfunc2.dat \ - tests/benchmarks/benchmark_std_corrfunc3.dat \ - tests/benchmarks/benchmark_std_corrfunc4.dat \ - tests/benchmarks/benchmark_std_d1_corrfunc0.dat \ - tests/benchmarks/benchmark_std_d1_corrfunc1.dat \ - tests/benchmarks/benchmark_std_d1_corrfunc2.dat \ - tests/benchmarks/benchmark_std_d1_corrfunc3.dat \ - tests/benchmarks/benchmark_std_d1_corrfunc4.dat \ - tests/benchmarks/benchmark_std_d1_multipoles0.dat \ - tests/benchmarks/benchmark_std_d1_multipoles2.dat \ - tests/benchmarks/benchmark_std_d1_multipoles4.dat \ - tests/benchmarks/benchmark_std_len_corrfunc0.dat \ - tests/benchmarks/benchmark_std_len_corrfunc1.dat \ - tests/benchmarks/benchmark_std_len_corrfunc2.dat \ - tests/benchmarks/benchmark_std_len_corrfunc3.dat \ - tests/benchmarks/benchmark_std_len_corrfunc4.dat \ - tests/benchmarks/benchmark_std_len_multipoles0.dat \ - tests/benchmarks/benchmark_std_len_multipoles2.dat \ - tests/benchmarks/benchmark_std_len_multipoles4.dat \ - tests/benchmarks/benchmark_std_multipoles0.dat \ - tests/benchmarks/benchmark_std_multipoles2.dat \ - tests/benchmarks/benchmark_std_multipoles4.dat \ - README.md \ - manual/coffe_manual.pdf - -AM_CPPFLAGS = \ - -DDATADIR=\"$(srcdir)/\" \ - -Wall -Wextra -Wpedantic \ - -I$(srcdir)/src/ -I$(srcdir)/tests/ - -libcoffe_a_HEADERS = \ - src/common.h \ - src/parser.h \ - src/background.h \ - src/integrals.h \ - src/corrfunc.h \ - src/multipoles.h \ - src/average_multipoles.h \ - src/covariance.h - -libcoffe_a_SOURCES = \ - src/common.c \ - src/covariance.c \ - src/utils.c \ - src/twobessel.c \ - src/errors.c \ - src/parser.c \ - src/twofast.c \ - src/integrals.c \ - src/background.c \ - src/functions.c \ - src/tanhsinh.c \ - src/signal.c \ - src/corrfunc.c \ - src/multipoles.c \ - src/average_multipoles.c - -#include .c and .h in SOURCES so that both appear in dist -coffe_cli_SOURCES = \ - $(libcoffe_a_HEADERS) \ - src/utils.h \ - src/twobessel.h \ - src/twofast.h \ - src/errors.h \ - src/functions.h \ - src/tanhsinh.h \ - src/signal.h \ - src/output.h \ - $(libcoffe_a_SOURCES) \ - src/output.c \ - src/main.c - -TESTS = \ - test_background \ - test_integrals \ - test_corrfunc \ - test_multipoles \ - test_covariance - -check_PROGRAMS = \ - test_background \ - test_integrals \ - test_corrfunc \ - test_multipoles \ - test_covariance - -test_background_SOURCES = \ - tests/tools.h \ - tests/test_background.c \ - src/common.c \ - src/errors.c \ - src/parser.c \ - src/tanhsinh.c \ - src/background.c - -test_integrals_SOURCES = \ - tests/tools.h \ - tests/test_integrals.c \ - src/common.c \ - src/errors.c \ - src/parser.c \ - src/tanhsinh.c \ - src/background.c \ - src/twofast.c \ - src/integrals.c - -test_corrfunc_SOURCES = \ - tests/tools.h \ - tests/test_corrfunc.c \ - src/common.c \ - src/errors.c \ - src/parser.c \ - src/tanhsinh.c \ - src/background.c \ - src/twofast.c \ - src/integrals.c \ - src/functions.c \ - src/signal.c - -test_multipoles_SOURCES = \ - tests/tools.h \ - tests/test_multipoles.c \ - src/common.c \ - src/errors.c \ - src/parser.c \ - src/tanhsinh.c \ - src/background.c \ - src/twofast.c \ - src/integrals.c \ - src/functions.c \ - src/signal.c - -test_covariance_SOURCES = \ - tests/tools.h \ - tests/test_covariance.c \ - src/common.c \ - src/errors.c \ - src/parser.c \ - src/tanhsinh.c \ - src/background.c \ - src/twofast.c \ - src/integrals.c \ - src/functions.c \ - src/twobessel.c \ - src/utils.c \ - src/covariance.c diff --git a/README.md b/README.md index 40aa746..4e6a5a8 100644 --- a/README.md +++ b/README.md @@ -18,24 +18,29 @@ The relevant theoretical papers are: ### From pip -If you are on Linux, the latest version of COFFE can be installed using: +If you are on Linux or MacOS, the latest version of COFFE can be installed using: ```sh pip install coffe ``` -If you are on another platform, refer to the section below. +Note that Windows is not officially supported. -### Development version (including non-Linux machines) +If you wish to install the development version of COFFE, please refer to the section below. -**NOTE**: the use of a virtual environment (such as Python's [venv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment) or [Conda](https://docs.conda.io/en/latest/)) is highly recommended. +### Development version -If you would like to install the development version, you will need to first install the following: +**NOTE**: the use of a virtual environment (such as Python's [venv](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/#creating-a-virtual-environment)) is highly recommended. + +#### Prerequisites + +If you would like to install the development version, you will need to first have the following: * a C compiler, compatible with the C99 standard -* a Python interpreter, version 3.7 or above +* a Python interpreter, version 3.8 or above * [GSL](https://www.gnu.org/software/gsl/) (GNU Scientific Library) and the corresponding headers, version 2.1 or above (available as `libgsl-dev` on Debian-based, and as `gsl-devel` on RHEL/CentOS-based distros) * [FFTW](http://www.fftw.org/download.html) and the corresponding headers, version 3 or above (available as `libfftw3-dev` on Debian-based, and as `fftw-devel` on RHEL/CentOS-based distros) +* [libconfig](http://hyperrealm.github.io/libconfig/) Then clone this repository: @@ -49,36 +54,90 @@ then change directory to it: cd coffe ``` -**NOTE**: if you are using Conda, you can install the above dependencies easily. -First, create a new environment: +#### Linux (CentOS/RHEL based) + +Run the script: + +```sh +bash scripts/install_other.sh gsl fftw libconfig +``` + +#### Linux (Debian/Ubuntu based) + +Run the following command: + +```sh +sudo apt install libgsl-dev libfftw3-dev libconfig-dev +``` + +and follow the instructions from the prompt. + +#### MacOS (Homebrew) + +You can install the necessary prerequisites using Homebrew: + +```sh +brew install gsl fftw libconfig +``` + +**NOTE**: as a technical aside, Homebrew-installed packages (whether installed as pre-build binaries or from source) are built for the current version of your operating system. This means that they **CANNOT** be used to create a redistributable Python wheel, i.e. a wheel that works on any older version of MacOS. + +#### MacOS (Conan) + +As an alternative to Homebrew, one can use Conan to build the dependencies. + +First install Conan using: + +```sh +pip install conan +``` + +Then, generate a profile: ```sh -conda create --name [NAME] +conan profile detect ``` -and activate it: +Finally, install all of the dependencies in the `_build` directory: ```sh -conda activate [NAME] +conan install . --output-folder=_build --build=missing ``` -Finally, run: +Note that this may take a while as the packages are usually built from source. + +**IMPORTANT NOTE**: Due to the fact that newer Apple devices have dual architectures (both `arm64` and `x86_64`), it is recommended to not mix these together, i.e. you should re-run _all_ of the above in clean `arm64` and `x86_64` environments (terminals) in separate COFFE directories to avoid any issues. + +#### Installing CLASS and CUBA + +COFFE also depends on the CLASS and CUBA libraries, which are not available on Homebrew or Conan, or the default Linux package repositories. +To install them, one needs to install `automake`, either via Homebrew (`brew install automake`) or via some other package manager. +They can then be built and installed by running: ```sh -conda install --channel conda-forge --file requirements.txt +bash scripts/install_other.sh class cuba ``` -Once you have installed the above (either natively or using Conda), you can run: +This will install the two packages in the directories `/opt/cuba_[ARCH]` and `/opt/class_public_[ARCH]`, where arch is either `x86_64` or `arm64` depending on your CPU architecture. + +#### Installing COFFE + +Now that the prerequisites are installed, you can install COFFE using: ```sh -./install +pip install . ``` -to install all of the other dependencies which COFFE requires. +If you would additionally like to install all of the various tools for testing, generating docs, and development, you can additionally run: + +```sh +pip install '.[all]' +``` ## Documentation The documentation for the latest version is available [here](https://jcgoran.github.io/coffe/). +To build the documentation, you can run `bash scripts/generate_docs.sh`, after which the documentation will be available under `docs/`. ## Bug reports and feature requests @@ -108,37 +167,88 @@ A `bibTeX` entry is provided below for convenience. ## Development -### Testing +## Testing COFFE + +If you would like to test COFFE, you can do so in two ways: using either `pytest`, or `cmake` (deprecated). + +### Testing with pytest + +To run the tests via `pytest`, first install COFFE using the instructions above, and then run: + +```sh +python -m pytest tests/ +``` + +### Testing with `cmake` (deprecated) -#### C tests (legacy) +If you do not want to build COFFE using `pip install`, you can instead use `cmake`, which is installable via `pip install cmake`. To do so, follow all of the above instructions, but instead of doing `pip install .`, you can instead do: -To run the C test suite, you can use the command `make check`, which will build the binaries `test_[MODULE]`, where `[MODULE]` can currently be one of `background`, `integrals`, `corrfunc`, `multipoles`, `covariance`, and automatically run them. -Alternatively, you can build them one by one using `make test_[MODULE]`, and run them manually via `./test_[MODULE]`. -This is primarily useful when modifying the code itself, to make sure the old results of the code weren't broken by some new change (feature, bugfix, etc.). +```sh +mkdir build +cd build +cmake -DCMAKE_INSTALL_PREFIX=./install -DCOFFE_ENABLE_PYTHON=ON -DCOFFE_ENABLE_CLASS=ON -DCOFFE_ENABLE_CUBA=ON -DCOFFE_ENABLE_MATHOPTS=ON -DCOFFE_ENABLE_TESTS=ON .. +``` -#### Python tests +In case of issues with missing detection of GSL, FFTW, etc., which can happen when using Conan on MacOS, you can use: + +```sh +cmake -DCMAKE_INSTALL_PREFIX=./install -DCOFFE_ENABLE_PYTHON=ON -DCOFFE_ENABLE_CLASS=ON -DCOFFE_ENABLE_CUBA=ON -DCOFFE_ENABLE_MATHOPTS=ON -DCOFFE_ENABLE_TESTS=ON -DCMAKE_TOOLCHAIN_FILE=../_build/conan_toolchain.cmake -DCOFFE_PYTHON_MINOR_VERSION=[VERSION] .. +``` -To run the Python test suite, first install the development requirements: +where you must replace `[VERSION]` with whatever minor version of Python you are using (for instance, when using Python 3.9, replace `[VERSION]` with `9`). +Then you can build COFFE tests using: ```sh -pip install -r pip-requirements-dev.txt +make ``` -Then run: +**NOTE**: if you have Ninja installed, you can additionally pass `-G Ninja` to the above `cmake` command, and then run `ninja build` instead of `make`. + +Finally, run the tests using: ```sh -pytest tests/test_coffe.py +ctest ``` ### Building Python wheels The building of wheels is done using the [`cibuildwheel`](https://cibuildwheel.readthedocs.io/en/stable/) utility. -If building wheels for Linux, you can run: +To install it, run: ```sh -sh build_wheels_linux.sh +pip install cibuildwheel ``` -which should build all of the wheels for Python 3.7 and above on Linux. +#### Linux + +Building of wheels on Linux requires a container engine like [Docker](https://docs.docker.com/engine/install/) or [Podman](https://podman.io/docs/installation). +Once one of those is installed, the wheels can be built using: + +```sh +cibuildwheel --platform linux +``` + +The wheels will then be available in the `wheelhouse` subdirectory, and can then be uploaded to PyPI. + +#### MacOS + +The MacOS wheels require an [official Python installer](https://www.python.org/downloads/macos/); the ones from Homebrew, Conda, etc. will most likely not work. +To build the wheels, run: + +```sh +cibuildwheel --platform macos +``` + +The wheels will then be available in the `wheelhouse` subdirectory, and can then be uploaded to PyPI. + +**IMPORTANT NOTE**: if you installed GSL, FFTW, or libconfig via Brew, make sure to unlink them first using: + +```sh +brew unlink gsl fftw libconfig +``` + +because otherwise `cibuildwheel` (or rather, `auditwheel`) may complain about mismatching OS versions. + +### Releasing Python wheels -The building of wheels for other platforms is not implemented, however, contributions are certainly welcome and encouraged. +To automate the tedious task of building the wheels, they are now setup in the CI. diff --git a/build_sdist.sh b/build_sdist.sh deleted file mode 100755 index 3cb4421..0000000 --- a/build_sdist.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -set -eux - -python3 -m build --sdist - -set +eux diff --git a/build_wheels_linux.sh b/build_wheels_linux.sh deleted file mode 100755 index 58a35d8..0000000 --- a/build_wheels_linux.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env sh - -set -eux -# script for building multiple manylinux images - -# by default we build the manylinux2014 wheel -CIBW_BEFORE_ALL="sh build_wheels_linux_${1:-manylinux2014}.sh" \ - CIBW_MANYLINUX_X86_64_IMAGE="${1:-manylinux2014}" \ - cibuildwheel --platform linux - -set +eux diff --git a/build_wheels_linux_manylinux2014.sh b/build_wheels_linux_manylinux2014.sh deleted file mode 100755 index b2e1d45..0000000 --- a/build_wheels_linux_manylinux2014.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env sh -set -eux - -# we need to build GSL from source as the one in the repos is too old (need -# version 2.1 for 2D interpolation) -GSL_VERSION="2.0" - -curl -sL -vvv https://ftp.gnu.org/gnu/gsl/gsl-${GSL_VERSION}.tar.gz --output libgsl.tar.gz -tar xf libgsl.tar.gz -(cd gsl-${GSL_VERSION} && ./autogen.sh && ./configure && make && make install) -yum install -y fftw-devel -git submodule update --init --recursive -git submodule foreach 'sh install.sh' - -set +eux diff --git a/build_wheels_linux_manylinux_2_24.sh b/build_wheels_linux_manylinux_2_24.sh deleted file mode 100755 index 5994c5e..0000000 --- a/build_wheels_linux_manylinux_2_24.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env sh -set -eux - -apt-get update -apt-get install -y libgsl-dev libfftw3-dev -git submodule update --init --recursive -git submodule foreach 'sh install.sh' - -set +eux diff --git a/build_wheels_linux_manylinux_2_28.sh b/build_wheels_linux_manylinux_2_28.sh deleted file mode 100755 index b9e88d6..0000000 --- a/build_wheels_linux_manylinux_2_28.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env sh -set -eux - -yum install -y gsl-devel fftw-devel -git submodule update --init --recursive -git submodule foreach 'sh install.sh' - -set +eux diff --git a/class_public b/class_public deleted file mode 160000 index 8efeba0..0000000 --- a/class_public +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8efeba01ab61aaf56879f8d4b1264f6c977a4874 diff --git a/cmake/FindCython.cmake b/cmake/FindCython.cmake new file mode 100644 index 0000000..b995831 --- /dev/null +++ b/cmake/FindCython.cmake @@ -0,0 +1,107 @@ +# .rst: +# +# Find ``cython`` executable. +# +# This module will set the following variables in your project: +# +# ``CYTHON_EXECUTABLE`` path to the ``cython`` program +# +# ``CYTHON_VERSION`` version of ``cython`` +# +# ``CYTHON_FOUND`` true if the program was found +# +# And the following target: +# +# ``Cython::Cython`` The Cython executable +# +# A range of versions is supported on CMake 3.19+. See also UseCython. +# +# For more information on the Cython project, see https://cython.org/. +# +# *Cython is a language that makes writing C extensions for the Python language +# as easy as Python itself.* +# +# ============================================================================= +# Copyright 2011 Kitware, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# ============================================================================= + +# Use the Cython executable that lives next to the Python executable if it is a +# local installation. +if(Python_EXECUTABLE) + get_filename_component(_python_path ${Python_EXECUTABLE} PATH) +elseif(Python3_EXECUTABLE) + get_filename_component(_python_path ${Python3_EXECUTABLE} PATH) +elseif(DEFINED PYTHON_EXECUTABLE) + get_filename_component(_python_path ${PYTHON_EXECUTABLE} PATH) +endif() + +if(DEFINED _python_path) + find_program( + CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + HINTS ${_python_path} + DOC "path to the cython executable") +else() + find_program( + CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + DOC "path to the cython executable") +endif() + +if(CYTHON_EXECUTABLE) + set(CYTHON_version_command "${CYTHON_EXECUTABLE}" --version) + + execute_process( + COMMAND ${CYTHON_version_command} + OUTPUT_VARIABLE CYTHON_version_output + ERROR_VARIABLE CYTHON_version_error + RESULT_VARIABLE CYTHON_version_result + OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_STRIP_TRAILING_WHITESPACE) + + if(NOT ${CYTHON_version_result} EQUAL 0) + set(_error_msg "Command \"${CYTHON_version_command}\" failed with") + set(_error_msg "${_error_msg} output:\n${CYTHON_version_error}") + message(FATAL_ERROR "${_error_msg}") + elseif("${CYTHON_version_output}" MATCHES "^[Cc]ython version ([^,]+)") + set(CYTHON_VERSION "${CMAKE_MATCH_1}") + elseif("${CYTHON_version_error}" MATCHES "^[Cc]ython version ([^,]+)") + set(CYTHON_VERSION "${CMAKE_MATCH_1}") + else() + message(FATAL_ERROR "Invalid Cython version output") + endif() +endif() + +include(FindPackageHandleStandardArgs) + +if(CMAKE_VERSION VERSION_LESS 3.19) + set(_handle_version_range) +else() + set(_handle_version_range HANDLE_VERSION_RANGE) +endif() + +find_package_handle_standard_args( + Cython + REQUIRED_VARS CYTHON_EXECUTABLE + VERSION_VAR ${CYTHON_VERSION} ${_handle_version_range}) + +if(CYTHON_FOUND) + if(NOT DEFINED Cython::Cython) + add_executable(Cython::Cython IMPORTED) + set_target_properties(Cython::Cython PROPERTIES IMPORTED_LOCATION + "${CYTHON_EXECUTABLE}") + endif() +endif() + +mark_as_advanced(CYTHON_EXECUTABLE) diff --git a/cmake/UseCython.cmake b/cmake/UseCython.cmake new file mode 100644 index 0000000..3690630 --- /dev/null +++ b/cmake/UseCython.cmake @@ -0,0 +1,236 @@ +# .rst: +# +# The following functions are defined: +# +# .. cmake:command:: Cython_transpile +# +# Create custom rules to generate the source code for a Python extension module +# using cython. +# +# Cython_transpile( [LANGUAGE C | CXX] [CYTHON_ARGS ...] +# [OUTPUT ] [OUTPUT_VARIABLE ]) +# +# Options: +# +# ``LANGUAGE [C | CXX]`` Force the generation of either a C or C++ file. +# Recommended; will attempt to be deduced if not specified, defaults to C unless +# only CXX is enabled. +# +# ``CYTHON_ARGS `` Specify additional arguments for the cythonization +# process. Will default to the ``CYTHON_ARGS`` variable if not specified. +# +# ``OUTPUT `` Specify a specific path for the output file as +# ````. By default, this will output into the current binary dir. A +# depfile will be created alongside this file as well. +# +# ``OUTPUT_VARIABLE `` Set the variable ```` in +# the parent scope to the path to the generated source file. +# +# Defined variables: +# +# ```` +# The path of the generated source file. +# +# Usage example: +# +# .. code-block:: cmake +# +# find_package(Cython) include(UseCython) +# +# Cython_transpile(_hello.pyx OUTPUT_VARIABLE _hello_source_files ) +# +# Python_add_library(_hello MODULE ${_hello_source_files} WITH_SOABI ) +# +# ============================================================================= +# Copyright 2011 Kitware, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. +# ============================================================================= + +if(CMAKE_VERSION VERSION_LESS "3.8") + # CMake 3.7 required for DEPFILE CMake 3.8 required for COMMAND_EXPAND_LISTS + message(FATAL_ERROR "CMake 3.8 required for COMMAND_EXPAND_LISTS") +endif() + +function(Cython_transpile) + set(_options) + set(_one_value LANGUAGE OUTPUT OUTPUT_VARIABLE) + set(_multi_value CYTHON_ARGS) + + cmake_parse_arguments(_args "${_options}" "${_one_value}" "${_multi_value}" + ${ARGN}) + + if(DEFINED CYTHON_EXECUTABLE) + set(_cython_command "${CYTHON_EXECUTABLE}") + elseif(DEFINED Python_EXECUTABLE) + set(_cython_command "${Python_EXECUTABLE}" -m cython) + elseif(DEFINED Python3_EXECUTABLE) + set(_cython_command "${Python3_EXECUTABLE}" -m cython) + else() + message(FATAL_ERROR "Cython executable not found") + endif() + + # Default to CYTHON_ARGS if argument not specified + if(NOT _args_CYTHON_ARGS AND DEFINED CYTHON_ARGS) + set(_args_CYTHON_ARGS "${CYTHON_ARGS}") + endif() + + # Get input + set(_source_files ${_args_UNPARSED_ARGUMENTS}) + list(LENGTH _source_files input_length) + if(NOT input_length EQUAL 1) + message( + FATAL_ERROR + "One and only one input file must be specified, got '${_source_files}'") + endif() + + function(_transpile _source_file generated_file language) + + if(language STREQUAL "C") + set(_language_arg "") + elseif(language STREQUAL "CXX") + set(_language_arg "--cplus") + else() + message(FATAL_ERROR "_transpile language must be one of C or CXX") + endif() + + set_source_files_properties(${generated_file} PROPERTIES GENERATED TRUE) + + # Generated depfile is expected to have the ".dep" extension and be located + # along side the generated source file. + set(_depfile ${generated_file}.dep) + set(_depfile_arg "-M") + + # Normalize the input path + get_filename_component(_source_file "${_source_file}" ABSOLUTE) + + # Pretty-printed output names + file(RELATIVE_PATH generated_file_relative ${CMAKE_BINARY_DIR} + ${generated_file}) + file(RELATIVE_PATH source_file_relative ${CMAKE_SOURCE_DIR} ${_source_file}) + set(comment + "Generating ${_language} source '${generated_file_relative}' from '${source_file_relative}'" + ) + + # Get output directory to ensure its exists + get_filename_component(output_directory "${generated_file}" DIRECTORY) + + get_source_file_property(pyx_location ${_source_file} LOCATION) + + # Add the command to run the compiler. + add_custom_command( + OUTPUT ${generated_file} + COMMAND ${CMAKE_COMMAND} -E make_directory ${output_directory} + COMMAND ${_cython_command} ${_language_arg} "${_args_CYTHON_ARGS}" + ${_depfile_arg} ${pyx_location} --output-file ${generated_file} + COMMAND_EXPAND_LISTS + MAIN_DEPENDENCY ${_source_file} + DEPFILE ${_depfile} + VERBATIM + COMMENT ${comment}) + endfunction() + + function(_set_output _input_file _language _output_var) + if(_language STREQUAL "C") + set(_language_extension "c") + elseif(_language STREQUAL "CXX") + set(_language_extension "cxx") + else() + message(FATAL_ERROR "_set_output language must be one of C or CXX") + endif() + + # Can use cmake_path for CMake 3.20+ cmake_path(GET _input_file STEM + # basename) + get_filename_component(_basename "${_input_file}" NAME_WE) + + if(IS_ABSOLUTE ${_input_file}) + file(RELATIVE_PATH _input_relative ${CMAKE_CURRENT_SOURCE_DIR} + ${_input_file}) + else() + set(_input_relative ${_input_file}) + endif() + + get_filename_component(_output_relative_dir "${_input_relative}" DIRECTORY) + string(REPLACE "." "_" _output_relative_dir "${_output_relative_dir}") + if(_output_relative_dir) + set(_output_relative_dir "${_output_relative_dir}/") + endif() + + set(${_output_var} + "${CMAKE_CURRENT_BINARY_DIR}/${_output_relative_dir}${_basename}.${_language_extension}" + PARENT_SCOPE) + endfunction() + + set(generated_files) + + list(GET _source_files 0 _source_file) + + # Set target language + set(_language ${_args_LANGUAGE}) + if(NOT _language) + get_property(_languages GLOBAL PROPERTY ENABLED_LANGUAGES) + if("C" IN_LIST _languages AND "CXX" IN_LIST _languages) + # Try to compute language. Returns falsy if not found. + _cython_compute_language(_language ${_source_file}) + elseif("C" IN_LIST _languages) + # If only C is enabled globally, assume C + set(_language "C") + elseif("CXX" IN_LIST _languages) + # Likewise for CXX + set(_language "CXX") + else() + message( + FATAL_ERROR + "LANGUAGE keyword required if neither C nor CXX enabled globally") + endif() + endif() + + if(NOT _language MATCHES "^(C|CXX)$") + message(FATAL_ERROR "Cython_transpile LANGUAGE must be one of C or CXX") + endif() + + # Place the cython files in the current binary dir if no path given + if(NOT _args_OUTPUT) + _set_output(${_source_file} ${_language} _args_OUTPUT) + elseif(NOT IS_ABSOLUTE ${_args_OUTPUT}) + set(_args_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${_args_OUTPUT}") + endif() + + set(generated_file ${_args_OUTPUT}) + _transpile(${_source_file} ${generated_file} ${_language}) + list(APPEND generated_files ${generated_file}) + + # Output variable only if set + if(_args_OUTPUT_VARIABLE) + set(_output_variable ${_args_OUTPUT_VARIABLE}) + set(${_output_variable} + ${generated_files} + PARENT_SCOPE) + endif() + +endfunction() + +function(_cython_compute_language OUTPUT_VARIABLE FILENAME) + file(READ "${FILENAME}" FILE_CONTENT) + # Check for compiler directive similar to "# distutils: language = c++" See + # https://cython.readthedocs.io/en/latest/src/userguide/wrapping_CPlusPlus.html#declare-a-var-with-the-wrapped-c-class + set(REGEX_PATTERN + [=[^[[:space:]]*#[[:space:]]*distutils:.*language[[:space:]]*=[[:space:]]*(c\\+\\+|c)]=] + ) + string(REGEX MATCH "${REGEX_PATTERN}" MATCH_RESULT "${FILE_CONTENT}") + string(TOUPPER "${MATCH_RESULT}" LANGUAGE_NAME) + string(REPLACE "+" "X" LANGUAGE_NAME "${LANGUAGE_NAME}") + set(${OUTPUT_VARIABLE} + ${LANGUAGE_NAME} + PARENT_SCOPE) +endfunction() diff --git a/conanfile.txt b/conanfile.txt new file mode 100644 index 0000000..de75bd0 --- /dev/null +++ b/conanfile.txt @@ -0,0 +1,8 @@ +[requires] +gsl/2.7 +fftw/3.3.9 +libconfig/1.7.2 + +[generators] +CMakeDeps +CMakeToolchain diff --git a/configure.ac b/configure.ac deleted file mode 100644 index 8c9fb23..0000000 --- a/configure.ac +++ /dev/null @@ -1,101 +0,0 @@ -# -*- Autoconf -*- -# Process this file with autoconf to produce a configure script. - -AC_PREREQ([2.69]) -AC_INIT([coffe], [2.0.4], [goran.jelic-cizmek@unige.ch]) -AM_INIT_AUTOMAKE([foreign subdir-objects]) # Does not require NEWS, COPYING, AUTHORS, ChangeLog or README - -# silent make https://autotools.io/automake/silent.html -# silent rules enabled by default with 'yes' -# disable silent runles with ./configure --disable-silent-rules -AM_SILENT_RULES([yes]) # less verbose make output - -AC_CONFIG_SRCDIR([src/main.c]) - -AC_LANG([C]) # Use C not C++ - -: ${CFLAGS=""} - -# Checks for programs. -AC_PROG_CC - -AC_PROG_CC_C99 - -AC_PROG_RANLIB - -AC_C_CONST - -AC_C_INLINE - -AC_OPENMP - -AC_SUBST(AM_CFLAGS,"$OPENMP_CFLAGS") - -AM_CONDITIONAL([FASTFLAG], false) -AX_CHECK_COMPILE_FLAG([-Ofast], [FASTFLAG=true], [FASTFLAG=false]) - -if $FASTFLAG; then - AC_SUBST(AM_CFLAGS,"$AM_CFLAGS -Ofast") -else - AC_SUBST(AM_CFLAGS,"$AM_CFLAGS -O3") -fi - -dnl AM_CONDITIONAL([OPENACCFLAG], false) -dnl AX_CHECK_COMPILE_FLAG([-fopenacc], [OPENACCFLAG=true], [OPENACCFLAG=false]) - -dnl if $OPENACCFLAG; then -dnl AC_SUBST(AM_CFLAGS,"$AM_CFLAGS -fopenacc") -dnl fi - -AC_PROG_INSTALL - - -# Checks for libraries. - -# Found libraries are automatically addded to LIBS -AC_CHECK_LIB([m],[cos],[], AC_MSG_FAILURE([Math library not found.])) -AC_CHECK_LIB([gslcblas],[cblas_dgemm], [], AC_MSG_FAILURE([GSL CBLAS not found.])) -AC_CHECK_LIB([gsl],[gsl_blas_dgemm], [], AC_MSG_FAILURE([GSL not found.])) - -AC_SEARCH_LIBS([fftw_plan_dft_r2c_1d], [fftw3],[],[ - AC_MSG_ERROR([You need to install the FFTW3 library.]) - ]) - -AC_SEARCH_LIBS([config_lookup_float], [config],[],[ - AC_MSG_ERROR([You need to install the libconfig library.]) - ]) - - -# Checks for optional libraries -AC_SEARCH_LIBS([Cuhre], [cuba],[],[ - AC_MSG_WARN([CUBA library not found.]) - ]) - -AC_SEARCH_LIBS([input_default_params], [class],[],[ - AC_MSG_WARN([CLASS library not found.]) - ], ["$OPENMP_CFLAGS"]) - -dnl AC_SEARCH_LIBS([flint_malloc], [flint], [], [ -dnl AC_MSG_WARN([FLINT library not found.]) -dnl ]) - -dnl AC_SEARCH_LIBS([acb_init], [flint flint-arb arb], [], [ -dnl AC_MSG_WARN([ARB library not found.]) -dnl ]) - -# Checks for header files. -AC_HEADER_ASSERT # ./configure --disable-assert to define NDEBUG -AC_CHECK_HEADERS([stdlib.h stdarg.h complex.h math.h string.h float.h limits.h stddef.h], [], [ - AC_MSG_ERROR([Certain standard headers not found.]) - ]) - -AC_CHECK_FUNCS([mkdir pow sqrt], [],[ - AC_MSG_ERROR([Certain standard functions not found.]) - ]) - -AC_ARG_ENABLE(class, [ --enable-class Automatically includes CLASS library if found], CPPFLAGS="$CPPFLAGS -DHAVE_CLASS", []) -dnl AC_ARG_ENABLE(covariance, [--enable-covariance Automatically includes the covariance calculation if required libraries found], CPPFLAGS="$CPPFLAGS -DHAVE_COVARIANCE", []) -AC_ARG_ENABLE(cuba, [ --enable-cuba Automatically includes the CUBA library if found], CPPFLAGS="$CPPFLAGS -DHAVE_CUBA", []) - -AC_CONFIG_FILES([Makefile]) -AC_OUTPUT diff --git a/external/class_public b/external/class_public new file mode 160000 index 0000000..fddd535 --- /dev/null +++ b/external/class_public @@ -0,0 +1 @@ +Subproject commit fddd535a3c235d986f17867a414766575ffa12e1 diff --git a/libcuba b/external/libcuba similarity index 100% rename from libcuba rename to external/libcuba diff --git a/generate_docs.sh b/generate_docs.sh deleted file mode 100755 index 981eb73..0000000 --- a/generate_docs.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env sh - -set -eux - -python3 -m pdoc --docformat numpy --math -o docs/ coffe - -printf "Docs generated at: %s\n" "${PWD}/docs/index.html" - -set +eux diff --git a/install b/install deleted file mode 100755 index 062a944..0000000 --- a/install +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env sh - -# THIS IS THE ONLY SCRIPT YOU NEED TO RUN TO INSTALL COFFE - -set -eux - -# in case of no conda env, we just install the Python-specific requirements -if [ -z "${CONDA_PREFIX-}" ] && [ -z "${CONDA_DEFAULT_ENV-}" ] -then - python3 -m pip install Cython wheel -# otherwise, we need to install all of the requirements (compiler, GSL lib, etc.) -else - conda install --channel conda-forge --file requirements.txt -fi - -git submodule update --init --recursive -git submodule foreach './install.sh' -sh reinstall.sh - -set +eux diff --git a/install_other.sh b/install_other.sh deleted file mode 100755 index b02ef09..0000000 --- a/install_other.sh +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env sh - -set -e -# installer for CLASS when using a conda environment -CLASS_DIR="class_public" -CLASS_REMOTE_URL="https://github.com/JCGoran/class_public" -CLASS_BRANCH="feature/conda" - -CUBA_DIR="cuba" -CUBA_REMOTE_URL="https://github.com/JCGoran/libcuba" -CUBA_BRANCH="master" - -install_cuba(){ - if [ -z "${CONDA_PREFIX}" ] - then - printf 'You need to activate a conda environment using `conda activate [ENVIRONMENT]` before running this script\n' - return 1 - fi - - if [ ! -d "${CUBA_DIR}" ] - then - printf 'Attempting to install CUBA in the current environment (%s)...\n' "${CONDA_DEFAULT_ENV}" - printf 'Cloning to directory %s...\n' "${CUBA_DIR}" - git clone --branch "${CUBA_BRANCH}" "${CUBA_REMOTE_URL}" "${CUBA_DIR}" - fi - - cd "${CUBA_DIR}" && autoreconf --install && ./configure --prefix="${CONDA_PREFIX}" CFLAGS=-fPIC && make install && cd - - printf 'CUBA successfully installed\n' -} - - - -install_class(){ - if [ -z "${CONDA_PREFIX}" ] - then - printf 'You need to activate a conda environment using `conda activate [ENVIRONMENT]` before running this script\n' - return 1 - fi - - if [ ! -d "${CLASS_DIR}" ] - then - printf 'Attempting to install CLASS in the current environment (%s)...\n' "${CONDA_DEFAULT_ENV}" - printf 'Cloning to directory %s...\n' "${CLASS_DIR}" - git clone --branch "${CLASS_BRANCH}" "${CLASS_REMOTE_URL}" "${CLASS_DIR}" - fi - - make -C "${CLASS_DIR}" libclass.a && cp -a "${CLASS_DIR}/libclass.a" "${CONDA_PREFIX}/lib/" && cp -a "${CLASS_DIR}/include/"*.h "${CONDA_PREFIX}/include/" - printf 'CLASS successfully installed\n' -} - -install_cuba && install_class -set +e diff --git a/pip-requirements-dev.txt b/pip-requirements-dev.txt deleted file mode 100644 index 6ace6a5..0000000 --- a/pip-requirements-dev.txt +++ /dev/null @@ -1,12 +0,0 @@ -pdoc -scipy -pandas -pylint -pytest -mypy -isort -reorder-python-imports -autoimport -black -cibuildwheel -twine diff --git a/pip-requirements.txt b/pip-requirements.txt deleted file mode 100644 index 24ce15a..0000000 --- a/pip-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -numpy diff --git a/pyproject.toml b/pyproject.toml index 32494c8..b754dd7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] name = "coffe" -version = "3.0.0" +dynamic = ["version"] description = "Python wrapper for the COrrelation Function Full-sky Estimator code" readme = "README.md" -requires-python = ">=3.7" +requires-python = ">=3.8" authors = [ {name = "Goran Jelic-Cizmek", email = "goran.jelic-cizmek@unige.ch"}, ] @@ -15,17 +15,40 @@ classifiers = [ "Programming Language :: Cython", "Programming Language :: C", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering :: Physics", ] dependencies = [ - "numpy>=1.19.5", + "numpy>=1.19.5,<2", ] +optional-dependencies.test = [ + "scipy", + "pandas", + "pytest", +] + +optional-dependencies.docs = [ + "pdoc", +] + +optional-dependencies.dev = [ + "pylint", + "mypy", + "isort", + "reorder-python-imports", + "autoimport", + "black", + "cibuildwheel", + "cmakelang", +] + +optional-dependencies.all = ["coffe[test,docs,dev]"] + [project.scripts] coffe-cli = "coffe.__main__:main" @@ -33,13 +56,59 @@ coffe-cli = "coffe.__main__:main" repository = "https://github.com/JCGoran/coffe" [tool.cibuildwheel] -skip = ["cp36-*", "pp*", "pypy*", "*musllinux*"] +skip = ["cp36-*", "cp37-*", "pp*", "pypy*", "*musllinux*"] +test-requires = ["pytest", "scipy", "pandas"] +test-command = ["python -c 'import coffe'", "pytest -vvv -k test_power_spectrum {package}/tests"] [tool.cibuildwheel.macos] -archs = ["x86_64", "arm64"] +before-all = [ + # install CUBA, FFTW, GSL, libconfig, and CLASS + "conan profile detect --force", + "conan install . --output-folder=_build --build=missing", + "bash scripts/install_other.sh cuba class", +] +# `SKBUILD_CMAKE_ARGS` overrides everything, so our `cmake.define`s need to be placed here as well +environment = { SKBUILD_CMAKE_ARGS = "-DCOFFE_ENABLE_CLASS=ON;-DCOFFE_ENABLE_CUBA=ON;-DCOFFE_ENABLE_MATHOPTS=ON;-DCOFFE_ENABLE_PYTHON=ON;-DCMAKE_TOOLCHAIN_FILE=_build/conan_toolchain.cmake;-DCOFFE_PYTHON_MINOR_VERSION=$(python -c 'import sys;print(sys.version_info.minor)')", MACOSX_DEPLOYMENT_TARGET = "11.0" } [tool.cibuildwheel.linux] +manylinux-x86_64-image = "quay.io/pypa/manylinux_2_28_x86_64" archs = ["x86_64"] +before-all = [ + # install CUBA, FFTW, GSL, libconfig, and CLASS + "bash scripts/install_other.sh gsl libconfig fftw cuba class", +] +environment-pass = ["SETUPTOOLS_SCM_PRETEND_VERSION"] [build-system] -requires = ["setuptools", "wheel", "Cython"] +requires = [ + "scikit-build-core", "cython<3", "setuptools-scm>=8.0.0", + # why do we need Conan only on Darwin (i.e. MacOS)? + # because GSL, FFTW, etc. installed via `brew` always have the + # deployment target equal to the one of the build machine, + # while Linux does not care about this + "conan; sys_platform=='darwin'" +] +build-backend = "scikit_build_core.build" + +[tool.scikit-build] +metadata.version.provider = "scikit_build_core.metadata.setuptools_scm" +wheel.packages = ["python/coffe"] +logging.level = "DEBUG" +# we want the wheels to basically have 2 kinds of files: +# - *.py sources +# - *.so or *.dylib shared libraries built with Cython +wheel.exclude = ["**.pyx", "CMakeLists.txt", "**.pxd"] + +[tool.scikit-build.cmake] +verbose = true +version = ">=3.15.0" + +[tool.scikit-build.cmake.define] +# on MacOS, this probably doesn't work; +# to make a wheel, just use `cibuildwheel --platform macos` +COFFE_ENABLE_CLASS = "ON" +COFFE_ENABLE_PYTHON = "ON" +COFFE_ENABLE_MATHOPTS = "ON" +COFFE_ENABLE_CUBA="ON" + +[tool.setuptools_scm] diff --git a/python/coffe/CMakeLists.txt b/python/coffe/CMakeLists.txt new file mode 100644 index 0000000..c7d24b5 --- /dev/null +++ b/python/coffe/CMakeLists.txt @@ -0,0 +1,33 @@ +include(${CMAKE_SOURCE_DIR}/cmake/UseCython.cmake) +cython_transpile(${CMAKE_CURRENT_SOURCE_DIR}/coffe.pyx OUTPUT_VARIABLE + _coffe_sources) + +python_add_library(coffe MODULE ${_coffe_sources} ${COFFE_LIB_SOURCES} + WITH_SOABI) + +target_compile_options(coffe PRIVATE ${COFFE_MATHOPTS}) + +# Add all include directories Note that we include CLASS directories last due to +# naming conflicts (i.e. both COFFE and CLASS have a `parser.h` file) +target_include_directories( + coffe PRIVATE ${CONFIG_INCLUDE_DIRS} ${FFTW3_INCLUDE_DIRS} + ${CUBA_INCLUDE_DIRS} ${SRC_DIR} ${CLASS_INCLUDE_DIRS}) + +# Link the libraries properly +target_link_libraries( + coffe + PUBLIC ${M_LIB} + ${GSL_LIBRARIES} + ${FFTW3_LIB} + ${FFTW3_LIBRARIES} + ${CONFIG_LIB} + ${libconfig_LIBRARIES} + ${CUBA_LIB} + ${CLASS_LIB}) + +# "install" the coffe Python library TODO: if we are not building a wheel, put +# it somewhere else, like `DIR/lib/pythonX.Y/site-packages/coffe` +install( + TARGETS coffe + COMPONENT Runtime + DESTINATION coffe) diff --git a/coffe/__init__.py b/python/coffe/__init__.py similarity index 86% rename from coffe/__init__.py rename to python/coffe/__init__.py index 26ae1dd..5967907 100644 --- a/coffe/__init__.py +++ b/python/coffe/__init__.py @@ -5,16 +5,12 @@ ### Installation -In brief, if you are on Linux, you can install COFFE using: +In brief, if you are on Linux or MacOS, you can install COFFE using: ```sh pip install coffe ``` -Refer to the installation section of the -[README](https://github.com/JCGoran/coffe/) for installation instructions on -various other platforms. - ### Running #### From a Python environment diff --git a/coffe/__main__.py b/python/coffe/__main__.py similarity index 100% rename from coffe/__main__.py rename to python/coffe/__main__.py diff --git a/coffe/ccoffe.pxd b/python/coffe/coffe.pxd similarity index 100% rename from coffe/ccoffe.pxd rename to python/coffe/coffe.pxd diff --git a/coffe/coffe.pyx b/python/coffe/coffe.pyx similarity index 99% rename from coffe/coffe.pyx rename to python/coffe/coffe.pyx index 43be491..2dc8a57 100644 --- a/coffe/coffe.pyx +++ b/python/coffe/coffe.pyx @@ -8,7 +8,7 @@ The main modules used for the Python version of the COFFE code # TODO figure out how to use OpenMP from libc.stdlib cimport malloc, free -from coffe cimport ccoffe +from coffe cimport coffe as ccoffe from coffe.representation import ( Corrfunc, Multipoles, diff --git a/coffe/representation.py b/python/coffe/representation.py similarity index 100% rename from coffe/representation.py rename to python/coffe/representation.py diff --git a/reinstall.sh b/reinstall.sh deleted file mode 100755 index df8dc47..0000000 --- a/reinstall.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env sh - -# This script is used to rebuild COFFE is changes are made to the source files - -set -eux - -python3 -m pip install . -python3 setup.py build_ext -i - -set +eux diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 4f985f8..0000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -c-compiler -gsl -fftw -make -automake -autoconf -autoconf-archive -cython -libconfig diff --git a/scripts/Dockerfile b/scripts/Dockerfile new file mode 100644 index 0000000..8230adc --- /dev/null +++ b/scripts/Dockerfile @@ -0,0 +1,6 @@ +ARG MANYLINUX_IMAGE=manylinux_2_28_x86_64 +FROM quay.io/pypa/$MANYLINUX_IMAGE +LABEL author="Goran Jelic-Cizmek" + +COPY install_other.sh . +RUN bash install_other.sh cuba gsl class fftw libconfig diff --git a/scripts/generate_docs.sh b/scripts/generate_docs.sh new file mode 100755 index 0000000..3d0915a --- /dev/null +++ b/scripts/generate_docs.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env sh + +set -eux + +# dir where the script is +script_dir="$(cd "$(dirname "$0")"; pwd -P)" + +python -m pip install pdoc +python -m pdoc --docformat numpy --math -o "${script_dir}/../docs/" coffe + +printf "Docs generated at: %s\n" "${script_dir}/../docs/index.html" + +set +eux diff --git a/scripts/install_other.sh b/scripts/install_other.sh new file mode 100755 index 0000000..980f408 --- /dev/null +++ b/scripts/install_other.sh @@ -0,0 +1,105 @@ +#!/usr/bin/env sh + +set -ex + +CLASS_INSTALL_DIR="${CLASS_INSTALL_DIR:-/opt/class_public_$(uname -m)}" +CUBA_INSTALL_DIR="${CUBA_INSTALL_DIR:-/opt/cuba_$(uname -m)}" +# the abs dir where this script is located (so we can call it from wherever) +script_dir="$(cd "$(dirname "$0")"; pwd -P)" + +install_cuba(){ + if [ -e "${CUBA_INSTALL_DIR}/lib/libcuba.a" ] + then + printf "CUBA already installed at path %s\n" "${CUBA_INSTALL_DIR}" + return 0 + fi + cd "${script_dir}/../external/libcuba/" + export MACOSX_DEPLOYMENT_TARGET='11.0' + git clean -xdf . + if [ "$(uname -s)" = 'Darwin' ] + then + CFLAGS="-fPIC -mmacosx-version-min=${MACOSX_DEPLOYMENT_TARGET}" + else + + CFLAGS="-fPIC" + fi + autoreconf --install + ./configure --prefix="${CUBA_INSTALL_DIR}" CFLAGS="${CFLAGS}" + + make install + cd - + printf 'CUBA installed in %s\n' "${CUBA_INSTALL_DIR}" +} + + + +install_class(){ + if [ -e "${CLASS_INSTALL_DIR}/lib/libclass.a" ] + then + printf "CLASS already installed at path %s\n" "${CLASS_INSTALL_DIR}" + return 0 + fi + current_dir="${script_dir}/../external/class_public/" + cd "${current_dir}" + export MACOSX_DEPLOYMENT_TARGET='11.0' + git clean -xdf . + make libclass.a + + mkdir -p "${CLASS_INSTALL_DIR}/lib" "${CLASS_INSTALL_DIR}/include" + cp -a "${current_dir}/libclass.a" "${CLASS_INSTALL_DIR}/lib/" + cp -a "${current_dir}/include/"*.h "${CLASS_INSTALL_DIR}/include/" + cd - + printf 'CLASS installed in %s\n' "${CLASS_INSTALL_DIR}" +} + +install_fftw(){ + if [ "$(uname -s)" = 'Darwin' ] + then + printf "Please use Conan to install FFTW\n" + return 1 + fi + yum install -y fftw-devel +} + +install_gsl(){ + if [ "$(uname -s)" = 'Darwin' ] + then + printf "Please use Conan to install GSL\n" + return 1 + fi + yum install -y gsl-devel +} + +install_libconfig(){ + if [ "$(uname -s)" = 'Darwin' ] + then + printf "Please use Conan to install libconfig\n" + return 1 + fi + yum install -y libconfig-devel +} + +for arg in $@ +do + case "${arg}" in + "gsl") + install_gsl + ;; + "cuba") + install_cuba + ;; + "class") + install_class + ;; + "fftw") + install_fftw + ;; + "libconfig") + install_libconfig + ;; + *) + ;; + esac +done + +set +ex diff --git a/setup.py b/setup.py deleted file mode 100755 index 932ffa3..0000000 --- a/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python3 - -import os - -from Cython.Build import cythonize -from setuptools import Extension, setup - - -def get_include_dirs(): - """ - Returns a list of all of the alternative include directories (virtual env, - conda env, etc.) - """ - return [ - os.path.join(os.environ.get(_), "include/") - for _ in ["VIRTUAL_ENV", "CONDA_PREFIX"] - if os.environ.get(_) - ] - - -def get_library_dirs(): - """ - Returns a list of all of the alternative library directories (virtual env, - conda env, etc.) - """ - return [ - os.path.join(os.environ.get(_), "lib/") - for _ in ["VIRTUAL_ENV", "CONDA_PREFIX"] - if os.environ.get(_) - ] - - -extra_compile_args = [ - "-fopenmp", - "-Ofast", - "-DHAVE_CLASS", - "-DHAVE_CUBA", - "-DCOFFE_CYTHON", -] - - -setup( - name="coffe", - version="3.0.0", - url="https://github.com/JCGoran/coffe", - author="Goran Jelic-Cizmek", - author_email="goran.jelic-cizmek@unige.ch", - packages=["coffe"], - ext_modules=cythonize( - [ - Extension( - "coffe.coffe", - sources=[ - "coffe/*.pyx", - "src/errors.c", - "src/common.c", - "src/parser.c", - "src/background.c", - "src/twofast.c", - "src/integrals.c", - "src/signal.c", - "src/functions.c", - "src/corrfunc.c", - "src/multipoles.c", - "src/utils.c", - "src/twobessel.c", - "src/covariance.c", - "src/average_multipoles.c", - "src/tanhsinh.c", - ], - include_dirs=[ - "src/", - "./", - ] - + get_include_dirs(), - libraries=[ - "m", - "gsl", - "gslcblas", - "fftw3", - "cuba", - "class", - ], - library_dirs=get_library_dirs(), - extra_compile_args=extra_compile_args, - extra_link_args=["-fopenmp"], - ), - ] - ), -) diff --git a/POWER_SPECTRUM_HEADER.dat b/src/POWER_SPECTRUM_HEADER.dat similarity index 100% rename from POWER_SPECTRUM_HEADER.dat rename to src/POWER_SPECTRUM_HEADER.dat diff --git a/WAVENUMBER_HEADER.dat b/src/WAVENUMBER_HEADER.dat similarity index 100% rename from WAVENUMBER_HEADER.dat rename to src/WAVENUMBER_HEADER.dat diff --git a/tests/test_background.c b/tests/test_background.c index 18c4720..2348757 100644 --- a/tests/test_background.c +++ b/tests/test_background.c @@ -21,7 +21,7 @@ static int coffe_test_background( /* load the file */ coffe_read_ncol( - DATADIR "/tests/benchmarks/benchmark_background.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_background.dat", 8, &size, &z, &a, &Hz, &conformal_Hz, &conformal_Hz_prime, &D1, &f, &comoving_distance diff --git a/tests/test_coffe.py b/tests/test_coffe.py index ef479f7..1a2b70b 100644 --- a/tests/test_coffe.py +++ b/tests/test_coffe.py @@ -12,7 +12,8 @@ from coffe_utils import average_covariance_matrix, covariance_matrix -DATA_DIR = Path("tests/benchmarks/") +DATA_DIR = Path(__file__).parent / "benchmarks" +TOPLEVEL_DIR = Path(__file__).parent.parent h = 0.67 COFFE_H0 = 1 / 2997.92458 @@ -119,7 +120,7 @@ def test_parameters(self): def test_power_spectrum(self): cosmo = Coffe() - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -130,7 +131,7 @@ def test_power_spectrum(self): def test_cross_spectrum(self): cosmo = Coffe() - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -195,7 +196,7 @@ def test_background(self): def test_integrals(self): cosmo = Coffe() - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -238,7 +239,7 @@ def test_corrfunc(self): mu=[0.0, 0.2, 0.5, 0.8, 0.95], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 contributions = { @@ -273,7 +274,7 @@ def test_multipoles(self): l=[0, 2, 4], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -310,7 +311,7 @@ def test_average_multipoles(self): z_max=[1.1], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -331,7 +332,7 @@ def test_multipoles_flat_lensing_lensing(self): l=[0, 2, 4], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -357,7 +358,7 @@ def test_multipoles_flat_density_lensing(self): l=[0, 2, 4], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -393,7 +394,7 @@ def test_covariance_multipoles(self): l=[0, 2, 4], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -445,7 +446,7 @@ def test_average_covariance_multipoles(self): l=[0, 2, 4], ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -513,7 +514,7 @@ def test_multiple_populations(self): covariance_poisson=False, ) - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) k, pk = k * h, pk / h**3 cosmo.set_power_spectrum_linear(k, pk) @@ -524,7 +525,7 @@ def test_multiple_populations(self): # only Poisson contribution cosmo.covariance_poisson = True - k, pk = np.transpose(np.loadtxt("PkL_CLASS.dat")) + k, pk = np.transpose(np.loadtxt(TOPLEVEL_DIR / "PkL_CLASS.dat")) cosmo.set_power_spectrum_linear(k, pk) cov = covariance_matrix(cosmo.compute_covariance_bulk()) diff --git a/tests/test_corrfunc.c b/tests/test_corrfunc.c index 6475584..9c09e5b 100644 --- a/tests/test_corrfunc.c +++ b/tests/test_corrfunc.c @@ -124,7 +124,7 @@ static int coffe_test_corrfunc( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_%s_corrfunc%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_%s_corrfunc%zu.dat", type, i ); @@ -144,19 +144,19 @@ static int coffe_test_corrfunc( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, NONINTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, SINGLE_INTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, DOUBLE_INTEGRATED, CORRFUNC ); @@ -186,7 +186,7 @@ static int coffe_test_corrfunc( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_corrfunc%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_corrfunc%zu.dat", i ); @@ -205,19 +205,19 @@ static int coffe_test_corrfunc( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, NONINTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, SINGLE_INTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, DOUBLE_INTEGRATED, CORRFUNC ); @@ -247,7 +247,7 @@ static int coffe_test_corrfunc( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_len_corrfunc%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_len_corrfunc%zu.dat", i ); @@ -266,19 +266,19 @@ static int coffe_test_corrfunc( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, NONINTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, SINGLE_INTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, DOUBLE_INTEGRATED, CORRFUNC ); @@ -308,7 +308,7 @@ static int coffe_test_corrfunc( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_d1_corrfunc%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_d1_corrfunc%zu.dat", i ); @@ -327,19 +327,19 @@ static int coffe_test_corrfunc( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, NONINTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, SINGLE_INTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, DOUBLE_INTEGRATED, CORRFUNC ); @@ -368,7 +368,7 @@ static int coffe_test_corrfunc( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_flatsky_lensing_lensing_corrfunc%zu.dat", i ); @@ -387,19 +387,19 @@ static int coffe_test_corrfunc( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, NONINTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, SINGLE_INTEGRATED, CORRFUNC ) + coffe_integrate( par, bg, integral, - 1.0, x, m, 0, + 1.0, 0, 0, x, m, 0, DOUBLE_INTEGRATED, CORRFUNC ); diff --git a/tests/test_covariance.c b/tests/test_covariance.c index 571c773..36f40b5 100644 --- a/tests/test_covariance.c +++ b/tests/test_covariance.c @@ -231,7 +231,7 @@ static int coffe_test_covariance( snprintf( name, sizeof(name) / sizeof(*name), - DATADIR "/tests/benchmarks/benchmark_multipoles_covariance_%d%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_multipoles_covariance_%d%d.dat", multipoles[mp1], multipoles[mp2] ); diff --git a/tests/test_integrals.c b/tests/test_integrals.c index 6a19598..8547696 100644 --- a/tests/test_integrals.c +++ b/tests/test_integrals.c @@ -16,7 +16,9 @@ #define MAX_SEPARATION 10000 #endif +#ifndef COFFE_H0 #define COFFE_H0 (1. / 2997.92458) +#endif #define h (0.67) static int coffe_test_integrals( @@ -38,7 +40,7 @@ static int coffe_test_integrals( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_integral%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_integral%zu.dat", i ); coffe_read_ncol( @@ -103,7 +105,7 @@ static int coffe_test_integrals( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_integral%zu.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_integral%zu.dat", (size_t)9 ); double *x_array, *y_array; @@ -156,7 +158,7 @@ static int coffe_test_integrals( size_t divergent_size; coffe_read_ncol( - DATADIR "/tests/benchmarks/benchmark_integral8.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_integral8.dat", 2, &divergent_size, &divergent_x, @@ -203,7 +205,7 @@ static int coffe_test_integrals( size_t ren_size; coffe_read_ncol( - DATADIR "/tests/benchmarks/benchmark_integral8_renormalization.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_integral8_renormalization.dat", 3, &ren_size, &ren_x, diff --git a/tests/test_multipoles.c b/tests/test_multipoles.c index 163a6ee..42d84c5 100644 --- a/tests/test_multipoles.c +++ b/tests/test_multipoles.c @@ -124,7 +124,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_%s_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_%s_multipoles%d.dat", type, l ); @@ -144,19 +144,19 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES ); @@ -218,7 +218,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_multipoles%d.dat", l ); @@ -237,19 +237,19 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES ); @@ -279,7 +279,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_len_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_len_multipoles%d.dat", l ); @@ -298,19 +298,19 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES ); @@ -340,7 +340,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_std_d1_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_std_d1_multipoles%d.dat", l ); @@ -359,19 +359,19 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES ); @@ -400,7 +400,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_flatsky_lensing_lensing_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_flatsky_lensing_lensing_multipoles%d.dat", l ); @@ -419,19 +419,19 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES ); @@ -462,7 +462,7 @@ static int coffe_test_multipoles( snprintf( name, size_name, - DATADIR "/tests/benchmarks/benchmark_flatsky_density_lensing_multipoles%d.dat", + COFFE_TEST_DATADIR "/tests/benchmarks/benchmark_flatsky_density_lensing_multipoles%d.dat", l ); @@ -481,7 +481,7 @@ static int coffe_test_multipoles( const double y_expected = yvalue[k]; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ); @@ -524,19 +524,19 @@ static int coffe_test_multipoles( const double y_expected = 0; const double y_obtained = coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, NONINTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, SINGLE_INTEGRATED, MULTIPOLES ) + coffe_integrate( par, bg, integral, - 1.0, x, 0, l, + 1.0, 0, 0, x, 0, l, DOUBLE_INTEGRATED, MULTIPOLES );