diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 6d667a6..26f2f38 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,19 +19,19 @@ jobs: with: python-version: '3.11' - - name: Install Poetry + - name: Install UV run: | - pip install poetry + make uv-install-venv - - name: Cache Poetry virtualenv and dependencies + - name: Cache UV virtualenv and dependencies uses: actions/cache@v4 with: path: | - ~/.cache/pypoetry + ~/.cache/uv ~/.cache/pip - key: poetry-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} + key: uv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} restore-keys: | - poetry-${{ runner.os }}- + uv-${{ runner.os }}- - name: Install dependencies run: | diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index e14156f..b35e008 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -19,19 +19,19 @@ jobs: with: python-version: '3.11' - - name: Install Poetry + - name: Install UV run: | - pip install poetry + make uv-install-venv - - name: Cache Poetry virtualenv and dependencies + - name: Cache UV virtualenv and dependencies uses: actions/cache@v4 with: path: | - ~/.cache/pypoetry + ~/.cache/uv ~/.cache/pip - key: poetry-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} + key: uv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} restore-keys: | - poetry-${{ runner.os }}- + uv-${{ runner.os }}- - name: Cache pre-commit uses: actions/cache@v4 diff --git a/.github/workflows/publish-gh-pages.yml b/.github/workflows/publish-gh-pages.yml index 463ace9..d9b2ad7 100644 --- a/.github/workflows/publish-gh-pages.yml +++ b/.github/workflows/publish-gh-pages.yml @@ -27,23 +27,24 @@ jobs: with: python-version: '3.11' - - name: Install Poetry + - name: Install UV run: | - pip install poetry + make uv-install-venv - - name: Cache Poetry virtualenv and dependencies + - name: Cache UV virtualenv and dependencies uses: actions/cache@v4 with: path: | - ~/.cache/pypoetry + ~/.cache/uv ~/.cache/pip - key: poetry-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} + key: uv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} restore-keys: | - poetry-${{ runner.os }}- + uv-${{ runner.os }}- - name: Install dependencies run: | - make install + make install-dev + make install-docs - name: Run mkdocs deploy run: | diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 352cb2c..8d4ee45 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,19 +19,19 @@ jobs: with: python-version: '3.11' - - name: Install Poetry + - name: Install UV run: | - pip install poetry + make uv-install-venv - - name: Cache Poetry virtualenv and dependencies + - name: Cache UV virtualenv and dependencies uses: actions/cache@v4 with: path: | - ~/.cache/pypoetry + ~/.cache/uv ~/.cache/pip - key: poetry-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} + key: uv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }} restore-keys: | - poetry-${{ runner.os }}- + uv-${{ runner.os }}- - name: Install dependencies run: | diff --git a/.make/.bumpversion.toml b/.make/.bumpversion.toml index dbfd1e4..6b20ec8 100644 --- a/.make/.bumpversion.toml +++ b/.make/.bumpversion.toml @@ -1,5 +1,5 @@ [tool.bumpversion] -current_version = "0.7.1" +current_version = "1.0.0" commit = true message = "Makefile version {new_version} released." tag = true diff --git a/.make/CHANGES_MAKEFILE.md b/.make/CHANGES_MAKEFILE.md index c20049d..db94cba 100644 --- a/.make/CHANGES_MAKEFILE.md +++ b/.make/CHANGES_MAKEFILE.md @@ -6,11 +6,41 @@ ______________________________________________________________________ -## [0.7.1](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-0.7.1) (2025-09-17) +## [1.2.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-1.2.0) (2026-01-21) ______________________________________________________________________ - +- Add auto-initialization script and corresponding makefile targets + +## [1.1.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-1.1.0) (2026-01-20) + +______________________________________________________________________ + +- Add MkDocs dependencies and skeleton structure for MkDocs pages +- Add `docs` makefile targets +- Refactor base package to `src/core` instead of `src/` and improve package structure to follow current python best practices +- Improve and fix bugs/typos from the conda, poetry and uv targets + +## [1.0.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-1.0.0) (2025-11-21) + +______________________________________________________________________ + +- BREAKING CHANGE - Make default version of project use `uv` + - Consists of a rework of the `pyproject.toml` file that no longer works with `poetry<2.0.0` +- Added `poetry python install` functionality to the makefike +- Refactored `conda` installation to use miniforge and micromamba instead of miniconda +- Improve determination of build tool and environment by makefile to make experience simpler +- Refactor target group enablement via `Makefile.variables` file instead of commenting + out lines in `Makefile` +- Add link checker to `pre-commit` +- Refactor tests to reduce duplication +- Remove target that installed `poetry` inside conda environment +- Update documentation and README.md +- Convert to Google docstring format + +## [0.7.1](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-0.7.1) (2025-09-17) + +______________________________________________________________________ ## [0.7.0](https://github.com/RolnickLab/lab-advanced-template/tree/makefile-0.7.0) (2025-09-10) diff --git a/.make/Makefile b/.make/Makefile index 293b7f5..85dbe8c 100644 --- a/.make/Makefile +++ b/.make/Makefile @@ -10,7 +10,7 @@ # files to include. ######################################################################################## PROJECT_PATH := $(dir $(abspath $(firstword $(MAKEFILE_LIST)))) -MAKEFILE_VERSION := 0.7.1 +MAKEFILE_VERSION := 1.2.0 BUMP_TOOL := bump-my-version BUMP_CONFIG_FILE := $(PROJECT_PATH).bumpversion.toml diff --git a/.make/README.md b/.make/README.md index eb26919..d6f8c41 100644 --- a/.make/README.md +++ b/.make/README.md @@ -2,7 +2,41 @@ This folder contains the base makefile targets. -It should not be modified outside the [Template's repository](https://github.com/RolnickLab/lab-advanced-template). +The project uses a Makefile to automate most operations. If `make` is available on your +machine there's a good chance this will work. + +## The Makefiles + +The following Makefile files should not be modified, but can be consulted: + +- [Makefile](../Makefile) : Entrypoint of the makefile utilities. +- [base.make](base.make) : Shared utilities, project agnostic. +- [conda.make](conda.make) : Conda related targets. +- [lint.make](lint.make) : Linting and formatting related targets. +- [poetry.make](poetry.make) : Poetry related targets. +- [manager.make](manager.make) : Manages which group to include based on Makefile variables. +- [test.make](test.make) : Test related targets. +- [uv.make](uv.make) : UV related targets. + +The following Makefile files are project or user specific and can be modified by +project users: + +- [Makefile.variables](../Makefile.variables) : Shared project variables. + - In this file, you can activate or deactivate target groups, and configure + settings according to your project's needs. +- [Makefile.targets](../Makefile.targets) : Shared project targets. +- [Makefile.private](../Makefile.private.example) : User specific variables and targets. + - This file is ignored by git and should never be committed, as it can also contain + secrets. You can override project configurations for local requirements, as + well as personal preferences. + - You can create your own version locally by copying from + [Makefile.private.example](../Makefile.private.example) + +## Basic Information + +The different targets and their description can be examined by executing the command: + +![](img/make_targets.png) If there is a problem with the contents of these targets, please open an issue [here](https://github.com/RolnickLab/lab-advanced-template/issues). @@ -25,5 +59,5 @@ essentially bash scripts to test the different makefile targets. These should only be run when modifying the makefiles inside the [Template's repository](https://github.com/RolnickLab/lab-advanced-template). -They should never be run in a project implemented from the template, as they could cause -side effects to your project. +They should **never** be run in a project implemented from the template, as they could +cause side effects to your project. diff --git a/.make/base.make b/.make/base.make index 5f40e6c..02f98fe 100644 --- a/.make/base.make +++ b/.make/base.make @@ -11,12 +11,14 @@ # Please report bugs to francis.pelletier@mila.quebec ######################################################################################## +.DEFAULT_GOAL := help + # Basic variables PROJECT_PATH := $(dir $(abspath $(firstword $(MAKEFILE_LIST)))) MAKEFILE_NAME := $(word $(words $(MAKEFILE_LIST)),$(MAKEFILE_LIST)) SHELL := /usr/bin/env bash BUMP_TOOL := bump-my-version -MAKEFILE_VERSION := 0.7.1 +MAKEFILE_VERSION := 1.2.0 DOCKER_COMPOSE ?= docker compose AUTO_INSTALL ?= @@ -26,24 +28,102 @@ CONDA_TOOL := conda CONDA_ENVIRONMENT ?= CONDA_YES_OPTION ?= -# Default environment to install package -# Can be overridden in Makefile.private file -DEFAULT_INSTALL_ENV ?= -DEFAULT_POETRY_INSTALL_ENV ?= - -# Colors -_SECTION := \033[1m\033[34m -_TARGET := \033[36m -_NORMAL := \033[0m - -.DEFAULT_GOAL := help +# Default variables (if Makefile.variables is missing) +APP_VERSION := 0.0.0 +APPLICATION_NAME := src +PYTHON_VERSION := 3.12 +DEFAULT_INSTALL_ENV := uv +DEFAULT_BUILD_TOOL := uv +TARGET_GROUPS := lint,test +CONDA_ENVIRONMENT := src-env + +# Targets Colors +_ESC := $(shell printf '\033') +_SECTION := $(_ESC)[1m\033[34m +_BLUE := $(_ESC)[1m\033[34m +_TARGET := $(_ESC)[1m\033[36m +_CYAN := $(_ESC)[36m +_NORMAL := $(_ESC)[0m +_WARNING := $(_ESC)[1;39;41m + +WARNING := $(_WARNING) -- WARNING -- $(_NORMAL) # Project and Private variables and targets import to override variables for local # This is to make sure, sometimes the Makefile includes don't work. -include Makefile.variables -include Makefile.private + +contains = $(if $(findstring $(1),$(2)),true) +not_in = $(if $(findstring $(1),$(2)),,true) + +INSTALL_ENV_IS_VENV := $(call contains,venv,$(DEFAULT_INSTALL_ENV)) +INSTALL_ENV_IS_UV := $(call contains,uv,$(DEFAULT_INSTALL_ENV)) +INSTALL_ENV_IS_POETRY := $(call contains,poetry,$(DEFAULT_INSTALL_ENV)) +INSTALL_ENV_IS_CONDA := $(call contains,conda,$(DEFAULT_INSTALL_ENV)) + +BUILD_TOOL_IS_UV := $(call contains,uv,$(DEFAULT_BUILD_TOOL)) +BUILD_TOOL_IS_POETRY := $(call contains,poetry,$(DEFAULT_BUILD_TOOL)) + +CONDA_CONFLICT := $(and $(INSTALL_ENV_IS_CONDA),$(BUILD_TOOL_IS_UV)) +UV_CONFLICT := $(and $(INSTALL_ENV_IS_POETRY),$(BUILD_TOOL_IS_UV)) +POETRY_CONFLICT := $(and $(INSTALL_ENV_IS_UV),$(BUILD_TOOL_IS_POETRY)) +PLEASE_FIX_CONFLICT_MSG := Please fix the conflict in your [Makefile.variables] and/or [Makefile.private] file(s) + +IS_MAKEFILE_VARIABLES_MISSING := $(call not_in,Makefile.variables,$(MAKEFILE_LIST)) +PLEASE_FIX_MISSING_FILE := Please consider adding a [Makefile.variables] file to your project - See lab-advanced-template for more info + +TAG_WARN := $(_WARNING) -- WARNING -- $(_NORMAL) + +check_configs = $(if $($(1)), \ + $(info ) \ + $(info $(TAG_WARN) $(2)) \ + $(info $(PLEASE_FIX_CONFLICT_MSG)) \ + $(info ) \ +) + +check_files = $(if $($(1)), \ + $(info ) \ + $(info $(TAG_WARN) $(2)) \ + $(info $(PLEASE_FIX_MISSING_FILE)) \ + $(info ) \ +) + +# Config Checks +# These run immediately when you type 'make' +$(call check_configs,CONDA_CONFLICT,'conda' environment is enabled while using 'uv') +$(call check_configs,UV_CONFLICT,'poetry' environment is enabled while using 'uv') +$(call check_configs,POETRY_CONFLICT,'uv' environment is enabled while using 'poetry') +$(call check_files,IS_MAKEFILE_VARIABLES_MISSING,The configuration file 'Makefile.variables' is missing - Using default values) + + +## -- Initialization targets ---------------------------------------------------------------------------------------- ## +.PHONY: project-init +project-init: ## Initialize the project from the template - Only run once! + @python3 $(PROJECT_PATH).make/scripts/auto_init_script.py + +.PHONY: project-init-dry-run +project-init-dry: ## Test run: no changes will be made - Initialize the project from the template + @python3 $(PROJECT_PATH).make/scripts/auto_init_script.py --dry + + ## -- Informative targets ------------------------------------------------------------------------------------------- ## +.PHONY: info +info: ## Get project configuration info + @echo "" + @echo -e "$(_BLUE)--- Configuration Status ---$(_NORMAL)" + @echo "" + @echo -e "$(_CYAN)Application Name$(_NORMAL) : $(APPLICATION_NAME)" + @echo -e "$(_CYAN)Application version$(_NORMAL) : $(APP_VERSION)" + @echo -e "$(_CYAN)Application Root$(_NORMAL) : [$(PROJECT_PATH)]" + @echo -e "$(_CYAN)Application package$(_NORMAL) : [$(PROJECT_PATH)src/$(APPLICATION_NAME)]" + @echo -e "$(_CYAN)Environment manager$(_NORMAL) : $(DEFAULT_INSTALL_ENV)" + @echo -e "$(_CYAN)Build tool$(_NORMAL) : $(DEFAULT_BUILD_TOOL)" + @echo -e "$(_CYAN)Python version$(_NORMAL) : $(PYTHON_VERSION)" + @echo -e "$(_CYAN)Active makefile targets$(_NORMAL) : [$(TARGET_GROUPS)]" + @echo -e "$(_CYAN)Makefile version$(_NORMAL) : $(MAKEFILE_VERSION)" + + .PHONY: all all: help @@ -78,11 +158,13 @@ targets: help .PHONY: version version: ## display current version - @echo "version: $(APP_VERSION)" + @echo -e "$(_CYAN)Application version$(_NORMAL) : $(APP_VERSION)" + @echo -e "$(_CYAN)Makefile version$(_NORMAL) : $(MAKEFILE_VERSION)" ## -- Virtualenv targets -------------------------------------------------------------------------------------------- ## -VENV_PATH := $(PROJECT_PATH).venv +DEFAULT_VENV_PATH := $(PROJECT_PATH).venv +VENV_PATH := $(DEFAULT_VENV_PATH) VENV_ACTIVATE := $(VENV_PATH)/bin/activate .PHONY: venv-create @@ -138,15 +220,15 @@ dry: ## Add the dry target for a preview of changes; ex. 'make bump-major dry' .PHONY: bump-major bump-major: ## Bump application major version - $(BUMP_TOOL) bump $(BUMP_ARGS) major + @$(ENV_COMMAND_TOOL) $(BUMP_TOOL) bump $(BUMP_ARGS) major .PHONY: bump-minor bump-minor: ## Bump application minor version <0.X.0> - $(BUMP_TOOL) bump $(BUMP_ARGS) minor + @$(ENV_COMMAND_TOOL) $(BUMP_TOOL) bump $(BUMP_ARGS) minor .PHONY: bump-patch bump-patch: ## Bump application patch version <0.0.X> - $(BUMP_TOOL) bump $(BUMP_ARGS) patch + @$(ENV_COMMAND_TOOL) $(BUMP_TOOL) bump $(BUMP_ARGS) patch diff --git a/.make/conda.make b/.make/conda.make index 855f80e..2bc149d 100644 --- a/.make/conda.make +++ b/.make/conda.make @@ -1,176 +1,350 @@ - +-include Makefile.variables +-include Makefile.private ## -- Conda targets ------------------------------------------------------------------------------------------------- ## CONDA_ENVIRONMENT_FILE := environment.yml +SLURM_ENV_VAR_PRESENT := env | grep -q "SLURM" + +CONDA_ENV_TOOL := $(shell command -v $(CONDA_TOOL) 2> /dev/null) +LOCAL_CONDA_TOOL_PATH := $(shell echo $$HOME/.local/bin/$(CONDA_TOOL)) +ifeq ($(CONDA_ENV_TOOL),) + CONDA_ENV_TOOL := $(LOCAL_CONDA_TOOL_PATH) +endif + .PHONY: conda-install -conda-install: ## Install Miniconda on your local machine - @echo "Looking for [$(CONDA_TOOL)]..."; \ - $(CONDA_TOOL) --version; \ +conda-install: ## General target to install conda like tool - Uses 'CONDA_TOOL' makefile variable + @echo "### Checking if [$(CONDA_ENV_TOOL)] is installed ..."; \ + $(CONDA_ENV_TOOL) --version; \ if [ $$? != "0" ]; then \ echo " "; \ echo "Your defined Conda tool [$(CONDA_TOOL)] has not been found."; \ echo " "; \ - echo "If you know you already have [$(CONDA_TOOL)] or some other Conda tool installed,"; \ - echo "Check your [CONDA_TOOL] variable in the Makefile.private for typos."; \ - echo " "; \ - echo "If your conda tool has not been initiated through your .bashrc file,"; \ - echo "consider using the full path to its executable instead when"; \ - echo "defining your [CONDA_TOOL] variable"; \ - echo " "; \ - echo "If in doubt, don't install Conda and manually create and activate"; \ - echo "your own Python environment."; \ - echo " "; \ - echo "It is strongly NOT advisable to execute this command if you are on a"; \ - echo "Compute Cluster (ie. Mila/DRAC), as they either have modules available (Mila),"; \ - echo "or even prohibit the installation and use of Conda (DRAC) based environments."; \ - echo " "; \ - echo -n "Would you like to install and initialize Miniconda ? [y/N]: "; \ + echo "If [$(CONDA_TOOL)] or some other Conda tool installed is supposed to already be installed"; \ + echo "on your system, check your [CONDA_TOOL] variable in the Makefile.private for typos."; \ + echo ""; \ + echo "If you already have some version of conda installed, it might not have"; \ + echo "been properly activated (which can also be on purpose when on a compute cluster)."; \ + echo "Consider reloading your shell before you try again."; \ + echo ""; \ + echo "If in doubt, don't install Conda and manually - create and activate"; \ + echo "your own Python environment some other way."; \ + echo ""; \ + echo "This script provides 2 option:"; \ + echo ""; \ + echo " * You can install 'conda' and 'mamba' through Miniforge3 (https://github.com/conda-forge/miniforge)"; \ + echo ""; \ + echo " * Or you can install 'micromamba' (which will also be available as 'mamba' in your shell)"; \ + echo " from the Micromamba project (https://mamba.readthedocs.io/en/latest/index.html)"; \ + echo ""; \ + echo -n "Would you like to install one of the tools mentioned above? [y/N]: "; \ read ans; \ case $$ans in \ [Yy]*) \ - echo "Fetching and installing miniconda"; \ - echo " "; \ - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh; \ - bash ~/miniconda.sh -b -p $${HOME}/.conda; \ - export PATH=$${HOME}/.conda/bin:$$PATH; \ - conda init; \ - /usr/bin/rm ~/miniconda.sh; \ + echo ""; \ + echo -n "Would you like to install miniforge or micromamba? [miniforge/micromamba/none]: "; \ + read conda_provider; \ + case $$conda_provider in \ + "miniforge" | "mini" | "forge" ) \ + echo ""; \ + make -s _miniforge-install;\ + ;; \ + "micromamba" | "micro" | "mamba" ) \ + echo ""; \ + make -s mamba-install; \ + ;; \ + "None" | "none" | "no" | "n" ) \ + echo ""; \ + echo "Exiting process"; \ + ;; \ + *) \ + echo ""; \ + echo "Input is not conform, process is stopping - please try again"; \ + esac; \ ;; \ *) \ - echo "Skipping installation."; \ - echo " "; \ + echo ""; \ + echo "Terminating installation process."; \ + echo ""; \ ;; \ esac; \ else \ echo "Conda tool [$(CONDA_TOOL)] has been found, skipping installation"; \ fi; -.PHONY: mamba-install -mamba-install: ## Install Micromamba on you local machine - @echo "Looking for [micromamba]..."; \ - micromamba --version; \ - if [ $$? != "0" ]; then \ - echo ""; \ - echo "[micromamba] has not been found."; \ - echo ""; \ - echo "If you know you already have installed [micromamba] installed, it might not have"; \ - echo "been properly activated (which can also be on purpose)."; \ - echo ""; \ - echo "If your Conda/Micromamba tool has not been initiated through your .bashrc file,"; \ - echo "consider using the full path to its executable instead when"; \ - echo "defining your [CONDA_TOOL] variable"; \ - echo " "; \ - echo "If you do decide to install Micromamba, please take care to define the [CONDA_TOOL]"; \ - echo "variable in you personal 'Makefile.private' file as CONDA_TOOL := micromamba."; \ - echo " "; \ - echo "If in doubt, don't install Micromamba and manually create and activate"; \ - echo "your own Python environment."; \ - echo ""; \ - echo "It is strongly NOT advisable to execute this command if you are on a"; \ - echo "Compute Cluster (ie. Mila/DRAC), as they either have modules available (Mila),"; \ - echo "or even prohibit the installation and use of Conda based environments (DRAC)."; \ - echo ""; \ - echo -n "Would you like to install and initialize [micromamba] ? [y/N]: "; \ - read ans; \ - case $$ans in \ +.PHONY: _is_local_bin_on_path +_is_local_bin_on_path: + @echo "" + @echo "### Verifying if [$$HOME/.local/bin] is in PATH" + @if echo $$PATH | tr ':' '\n' | grep -Fxq "$$HOME/.local/bin"; then \ + echo "";\ + echo "[$$HOME/.local/bin] found in PATH variable - skipping";\ + echo "";\ + else \ + echo "";\ + echo "[$$HOME/.local/bin] NOT found in PATH variable"; \ + echo "";\ + echo "Adding 'export PATH="\$$HOME/.local/bin:\$$PATH"' to your .bashrc file"; \ + echo 'export PATH="$$HOME/.local/bin:$$PATH"' >> $$HOME/.bashrc; \ + echo "";\ + echo -e "$(WARNING) Consider reloading your shell after this.";\ + echo "";\ + fi; + +.PHONY: _installer-miniforge +_installer-miniforge: + @make -s _is_local_bin_on_path + @wget "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$$(uname)-$$(uname -m).sh" + @bash Miniforge3-$$(uname)-$$(uname -m).sh -b -p $$HOME/.miniforge3 + @echo "" + @echo "Adding [conda] and [mamba] to your '\$$HOME/.local/bin' directory" + @mkdir -p $$HOME/.local/bin + @ln -s $$HOME/.miniforge3/condabin/conda $$HOME/.local/bin/conda + @ln -s $$HOME/.miniforge3/condabin/mamba $$HOME/.local/bin/mamba + @/usr/bin/rm Miniforge3-$$(uname)-$$(uname -m).sh + @echo "" + @echo "Please configure the [CONDA_TOOL] variable in your 'Makefile.private file to" + @echo "either 'CONDA_TOOL := conda' or 'CONDA_TOOL := mamba', depending on which one " + @echo "you prefer to use" + @echo "" + @echo "Consider reloading your shell after this so you can have access to the tool" + @echo "" + +.PHONY: _installer-mamba +_installer-mamba: + @make -s _is_local_bin_on_path + @wget -qO- https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba + @echo "Adding [mamba] to your '\$$HOME/.local/bin' directory" + @mkdir -p $$HOME/.local/bin + @mv bin/micromamba $$HOME/.local/bin/mamba + @rm -rf bin/ + @echo "" + @echo "Please configure the [CONDA_TOOL] variable in your 'Makefile.private file to" + @echo "'CONDA_TOOL := mamba' for micromamba to be used by the makefile." + @echo "" + @echo "Consider reloading your shell after this so you can have access to the tool" + @echo "" + +.PHONY: _slurm-warming +_slurm-warming: + echo "" + echo "#" + echo "#" + echo "#" + echo "" + echo -e "$(WARNING)SLURM Environment variables have been found!!!" + echo "" + echo "#" + echo "#" + echo "#" + echo "" + echo "This indicates you might be on a compute cluster" + echo "" + echo "It is NOT advisable to execute this command if you are on a Compute Cluster (ie. Mila/DRAC)," + echo "as they either have modules available (Mila), or even prohibit the installation " + echo "and use of Conda (DRAC) based environments." + echo "" + echo "Please do not install Conda or similar tools on one the clusters of the" + echo "Digital Research Alliance of Canada" + echo "" + echo "Only proceed if you know what you are doing!!!" + echo "" + +.PHONY: _miniforge-install +_miniforge-install: + @echo "### Verifying for SLURM environment variable ..." + @if $(SLURM_ENV_VAR_PRESENT) ; then \ + make -s _slurm-warming; \ + echo -n "Are you sure you want to install Miniforge ? [y/N]: "; \ + read ans_slurm; \ + case $$ans_slurm in \ [Yy]*) \ - echo 'Installing Micromamba' - wget -qO- https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj bin/micromamba;\ - mv bin/micromamba ~/.local/bin/micromamba;\ - rm -rf bin/;\ - ~/.local/bin/micromamba shell init -s bash ~/.micromamba;\ + echo ""; \ + echo "Installing Miniforge3 - without initializing it."; \ + make -s _installer-miniforge; \ ;; \ *) \ - echo "Skipping installation."; \ - echo " "; \ + echo ""; \ + echo "Terminating installation process."; \ + echo ""; \ + echo "Please activate the required cluster anaconda module or use another way "; \ + echo "to manage your environment before continuing."; \ + echo ""; \ ;; \ esac; \ + else \ + echo ""; \ + echo "Installing and initializing Miniforge "; \ + echo ""; \ + make -s _installer-miniforge; \ + $$HOME/.local/bin/conda init; \ + $$HOME/.local/bin/mamba shell init --shell bash --root-prefix=$$HOME/.mamba; \ + fi; \ +.PHONY: _mamba-install +_mamba-install: + @echo "### Verifying for SLURM environment variable ..." + @if $(SLURM_ENV_VAR_PRESENT) ; then \ + make -s _slurm-warming; \ + echo -n "Are you sure you want to install Micromamba ? [y/N]: "; \ + read ans_slurm; \ + case $$ans_slurm in \ + [Yy]*) \ + echo ""; \ + echo "Installing Micromamba - without initializing it."; \ + make -s _installer-mamba; \ + ;; \ + *) \ + echo ""; \ + echo "Terminating installation process."; \ + echo ""; \ + echo "Please activate the required cluster anaconda module or use another way "; \ + echo "to manage your environment before continuing."; \ + echo " "; \ + ;; \ + esac; \ + else \ + echo ""; \ + echo "Installing and initializing Micromamba "; \ + echo ""; \ + make -s _installer-mamba; \ + $$HOME/.local/bin/mamba shell init -s bash $$HOME/.micromamba; \ + fi; \ -.PHONY: conda-create-env -conda-create-env: conda-install ## Create a local Conda environment based on 'environment.yml' file - @$(CONDA_TOOL) env create $(CONDA_YES_OPTION) -f $(CONDA_ENVIRONMENT_FILE) - -.PHONY: conda-env-info -conda-env-info: ## Print information about active Conda environment using - @$(CONDA_TOOL) info - -.PHONY: conda-activate -conda-activate: ## Print the shell command to activate the project's Conda env. - @echo "$(CONDA_TOOL) activate $(CONDA_ENVIRONMENT)" - -.PHONY: _conda-poetry-install -_conda-poetry-install: - @$(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) python --version; \ +.PHONY: miniforge-install +miniforge-install: ## Install conda and mamba from Miniforge3. (Full functionality for local development) + @echo "#"; \ + echo "# Miniforge Install process"; \ + echo "#"; \ + echo ""; \ + echo "### Verifying that [conda] is not already installed ..."; \ + conda --version; \ if [ $$? != "0" ]; then \ - echo "Target environment doesn't seem to exist..."; \ - if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo ""; \ - echo -n "Do you want to create it? [y/N] "; \ - read ans; \ + echo ""; \ + echo ""; \ + echo "[conda] has not been found."; \ + echo " "; \ + echo "If [conda] is already supposed to be installed on this system, it might not have"; \ + echo "been properly configured, and/or has not been initialized (which can also be on purpose when "; \ + echo "on a compute cluster). Consider reloading your shell before you try again."; \ + echo ""; \ + echo "If in doubt, don't install Miniforge. Instead, manually create and activate"; \ + echo "your Python environment some other way."; \ + echo ""; \ + echo "### Verifying that [mamba] is not already installed ..."; \ + mamba --version; \ + if [ $$? = "0" ]; then \ + echo -e "$(WARNING)[mamba] has been found - Installing miniforge is probably redundant"; \ fi; \ + echo ""; \ + echo "### Verifying that [micromamba] is not already installed ..."; \ + micromamba --version; \ + if [ $$? = "0" ]; then \ + echo -e "$(WARNING)[micromamba] has been found - Installing miniforge is probably redundant"; \ + fi; \ + echo ""; \ + echo -n "Would you like to install and initialize Miniforge ? [y/N]: "; \ + read ans; \ case $$ans in \ [Yy]*) \ - echo "Creating conda environment : [$(CONDA_ENVIRONMENT)]"; \ - make -s conda-create-env; \ + echo ""; \ + make -s _miniforge-install; \ ;; \ *) \ - echo "Exiting..."; \ - exit 1;\ + echo ""; \ + echo "Terminating installation process."; \ + echo ""; \ ;; \ - esac;\ - fi; - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) install $(CONDA_YES_OPTION) -c conda-forge poetry; \ - CURRENT_VERSION=$$($(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry --version | awk '{print $$NF}' | tr -d ')'); \ - REQUIRED_VERSION="1.6.0"; \ - if [ "$$(printf '%s\n' "$$REQUIRED_VERSION" "$$CURRENT_VERSION" | sort -V | head -n1)" != "$$REQUIRED_VERSION" ]; then \ - echo "Poetry installed version $$CURRENT_VERSION is less than minimal version $$REQUIRED_VERSION, fixing urllib3 version to prevent problems"; \ - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry add "urllib3<2.0.0"; \ + esac; \ + else \ + echo "Conda tool [conda] has been found, skipping installation"; \ fi; -.PHONY:conda-poetry-install -conda-poetry-install: ## Install Poetry in the project's Conda environment. Will fail if Conda is not found - @poetry --version; \ - if [ $$? != "0" ]; then \ - echo "Poetry not found, proceeding to install Poetry..."; \ - echo "Looking for [$(CONDA_TOOL)]...";\ - $(CONDA_TOOL) --version; \ - if [ $$? != "0" ]; then \ - echo "$(CONDA_TOOL) not found; Poetry will not be installed"; \ - else \ - echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ - make -s _conda-poetry-install; \ - fi; \ - else \ +.PHONY: mamba-install +mamba-install: ## Install Micromamba as 'mamba'. (Minimalistic install for env management) + @echo "#"; \ + echo "# Micromamba Install process"; \ + echo "#"; \ + echo ""; \ + echo "### Verifying that [mamba] is not installed ..."; \ + mamba --version; \ + if [ $$? != "0" ]; then \ + echo ""; \ + echo "### Verifying that [micromamba] is not already installed ..."; \ + micromamba --version; \ + if [ $$? != "0" ]; then \ + echo ""; \ + echo "[mamba] and [micromamba] have not been found."; \ + echo ""; \ + echo "If [mamba] and/or [micromamba] are already supposed to be installed on this system,"; \ + echo "they might not have been properly configured, and/or have not been initialized"; \ + echo "(which can also be on purpose when on a compute cluster). Consider reloading your"; \ + echo "shell before you try again."; \ echo ""; \ - echo "Poetry has been found on this system :"; \ - echo " Install location: $$(which poetry)"; \ + echo "If in doubt, don't install Micromamba. Instead, manually create and activate"; \ + echo "your Python environment some other way."; \ echo ""; \ - if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo -n "Would you like to install poetry in the project's conda environment anyway ? [y/N]: "; \ - read ans; \ + echo "### Verifying that [conda] is not already installed ..."; \ + conda --version; \ + if [ $$? = "0" ]; then \ + echo -e "$(WARNING)[conda] has been found - Only install micromamba if you really need it"; \ fi; \ + echo ""; \ + echo -n "Would you like to install and initialize [micromamba] ? [y/N]: "; \ + read ans; \ case $$ans in \ [Yy]*) \ - echo "Installing Poetry with Conda in [$(CONDA_ENVIRONMENT)] environment"; \ - make -s _conda-poetry-install; \ + echo ""; \ + make -s _mamba-install; \ ;; \ *) \ - echo "Skipping installation."; \ - echo " "; \ + echo ""; \ + echo "Terminating installation process."; \ + echo ""; \ ;; \ esac; \ - fi; + else \ + echo "[micromamba] has been found, skipping installation"; \ + fi; \ + else \ + echo "[mamba] has been found, skipping installation"; \ + fi; \ -.PHONY: conda-poetry-uninstall -conda-poetry-uninstall: ## Uninstall Poetry located in currently active Conda environment - $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) $(CONDA_TOOL) remove $(CONDA_YES_OPTION) poetry + +.PHONY: conda-create-env +conda-create-env: conda-install ## Create a local Conda environment based on 'environment.yml' file + @if [ ! -f $(CONDA_ENVIRONMENT_FILE) ]; then \ + $(CONDA_ENV_TOOL) create $(CONDA_YES_OPTION) python=$(PYTHON_VERSION) -c conda-forge -n $(CONDA_ENVIRONMENT); \ + echo "Generating '$(ENV_FILE)' file..."; \ + if [ -f $(CONDA_ENVIRONMENT_FILE)l ]; then \ + echo "Warning: $(CONDA_ENVIRONMENT_FILE) already exists. Overwriting..."; \ + fi; \ + ( \ + echo "name: $(CONDA_ENVIRONMENT)"; \ + echo "channels:"; \ + echo " - conda-forge"; \ + echo "dependencies:"; \ + echo -n " - python=$(PYTHON_VERSION)"; \ + ) > $(CONDA_ENVIRONMENT_FILE); \ + echo ""; \ + echo "#"; \ + echo "Done. File content:"; \ + cat $(CONDA_ENVIRONMENT_FILE); \ + echo ""; \ + echo "#"; \ + echo ""; \ + else \ + $(CONDA_ENV_TOOL) env create $(CONDA_YES_OPTION) -f $(CONDA_ENVIRONMENT_FILE); \ + fi; + +.PHONY: conda-env-info +conda-env-info: ## Print information about active Conda environment using + @$(CONDA_ENV_TOOL) info + +.PHONY: conda-activate +conda-activate: ## Print the shell command to activate the project's Conda env. + @echo "$(CONDA_ENV_TOOL) activate $(CONDA_ENVIRONMENT)" .PHONY: conda-clean-env conda-clean-env: ## Completely removes local project's Conda environment - $(CONDA_TOOL) env remove $(CONDA_YES_OPTION) -n $(CONDA_ENVIRONMENT) + $(CONDA_ENV_TOOL) env remove $(CONDA_YES_OPTION) -n $(CONDA_ENVIRONMENT) diff --git a/.make/docs.make b/.make/docs.make new file mode 100644 index 0000000..9da5248 --- /dev/null +++ b/.make/docs.make @@ -0,0 +1,29 @@ +## -- Docs targets -------------------------------------------------------------------------------------------------- ## +.PHONY: preview-docs +preview-docs: install-docs ## Preview the documentation site locally + @$(ENV_COMMAND_TOOL) mkdocs serve -a 0.0.0.0:7000 + + +.PHONY: build-docs +build-docs: install-docs ## Build the documentation files locally + @$(ENV_COMMAND_TOOL) mkdocs build + +.PHONY: deploy-docs +deploy-docs: install-docs ## Publish and deploy the documentation to the live Github page + @echo""; \ + echo -e "\e[1;39;41m-- WARNING --\e[0m This command will deploy all current changes to the live Github page - Making it publicly available"; \ + echo""; \ + echo -n "Would you like to deploys the docs? [Y/n]: "; \ + read ans; \ + case $$ans in \ + [Yy]*) \ + echo""; \ + $(ENV_COMMAND_TOOL) mkdocs gh-deploy; \ + echo""; \ + ;; \ + *) \ + echo""; \ + echo "Skipping publication to Github Pages."; \ + echo " "; \ + ;; \ + esac; \ diff --git a/.make/img/README.md b/.make/img/README.md new file mode 100644 index 0000000..94f7956 --- /dev/null +++ b/.make/img/README.md @@ -0,0 +1,3 @@ +# Images + +This folder is for images used in the different Markdown documents. diff --git a/.make/lint.make b/.make/lint.make index 022edf3..4952600 100644 --- a/.make/lint.make +++ b/.make/lint.make @@ -1,37 +1,37 @@ ## -- Linting targets ----------------------------------------------------------------------------------------------- ## +.PHONY: precommit +precommit: ## Run Pre-commit on all files manually (Only lint target that works without dev dependencies) + @$(ENV_COMMAND_TOOL) pre-commit run --all-files + .PHONY: check-lint check-lint: ## Check code linting (black, isort, flake8, docformatter and pylint) - $(ENV_COMMAND_TOOL) nox -s check + @$(ENV_COMMAND_TOOL) nox -s check .PHONY: check-pylint check-pylint: ## Check code with pylint - $(ENV_COMMAND_TOOL) nox -s pylint + @$(ENV_COMMAND_TOOL) nox -s pylint .PHONY: check-complexity check-complexity: ## Check code cyclomatic complexity with Flake8-McCabe - $(ENV_COMMAND_TOOL) nox -s complexity + @$(ENV_COMMAND_TOOL) nox -s complexity .PHONY: fix-lint fix-lint: ## Fix code linting (autoflake, autopep8, black, isort, flynt, docformatter) - $(ENV_COMMAND_TOOL) nox -s fix + @$(ENV_COMMAND_TOOL) nox -s fix .PHONY: markdown-lint markdown-lint: ## Fix markdown linting using mdformat - $(ENV_COMMAND_TOOL) nox -s mdformat - -.PHONY: precommit -precommit: ## Run Pre-commit on all files manually - $(ENV_COMMAND_TOOL) nox -s precommit + @$(ENV_COMMAND_TOOL) nox -s mdformat .PHONY: ruff ruff: ## Run the ruff linter - $(ENV_COMMAND_TOOL) nox -s ruff-lint + @$(ENV_COMMAND_TOOL) nox -s ruff-lint .PHONY: ruff-fix ruff-fix: ## Run the ruff linter and fix automatically fixable errors - $(ENV_COMMAND_TOOL) nox -s ruff-fix + @$(ENV_COMMAND_TOOL) nox -s ruff-fix .PHONY: ruff-format ruff-format: ## Run the ruff code formatter - $(ENV_COMMAND_TOOL) nox -s ruff-format + @$(ENV_COMMAND_TOOL) nox -s ruff-format diff --git a/.make/manager.make b/.make/manager.make new file mode 100644 index 0000000..b212524 --- /dev/null +++ b/.make/manager.make @@ -0,0 +1,64 @@ +######################################################################################## +# +# MODIFY WITH CARE!!! +# If necessary, override the corresponding variable and/or target, or create new ones +# in one of the following files, depending on the nature of the override : +# +# `Makefile.variables`, `Makefile.targets` or `Makefile.private`, +# +# The only valid reason to modify this file is to fix a bug or to add/remove +# files to include. +# +# REMEMBER!!! +# This is a project level config, any changes here will affect all other users +# +######################################################################################## +# +# Necessary make files +# +include .make/base.make +-include Makefile.variables + +# +# Optional makefiles targets +# + +# Env related +ifneq (,$(INSTALL_ENV_IS_CONDA)) + include .make/conda.make +endif + +# Build tool related +ifneq (,$(BUILD_TOOL_IS_UV)) + include .make/uv.make +endif + +ifneq (,$(BUILD_TOOL_IS_POETRY)) + include .make/poetry.make +endif + +## Linting targets +ifneq (,$(findstring lint,$(TARGET_GROUPS))) + include .make/lint.make +endif + +## Test related targets +ifneq (,$(findstring test,$(TARGET_GROUPS))) + include .make/test.make +endif + +## Docs related targets +ifneq (,$(findstring docs,$(TARGET_GROUPS))) + include .make/docs.make +endif + +# +# Project related makefiles +# +## Custom targets and variables +-include Makefile.targets +-include Makefile.variables + +## Private variables and targets import to override variables for local +-include Makefile.private + diff --git a/.make/poetry.make b/.make/poetry.make index 92af1ad..4e9b76c 100644 --- a/.make/poetry.make +++ b/.make/poetry.make @@ -3,45 +3,34 @@ -include Makefile.variables -include Makefile.private -ENV_COMMAND_TOOL := poetry run +POETRY_COMMAND_WITH_PROJECT_ENV := $(shell command -v poetry 2> /dev/null) +LOCAL_POETRY_PATH := $(shell echo $$HOME/.local/bin/poetry) + +ifeq ($(POETRY_COMMAND_WITH_PROJECT_ENV),) + POETRY_COMMAND_WITH_PROJECT_ENV := $(LOCAL_POETRY_PATH) +endif + +ifeq ($(DEFAULT_INSTALL_ENV),venv) +POETRY_COMMAND_WITH_PROJECT_ENV := source $(VENV_ACTIVATE) && $(POETRY_COMMAND_WITH_PROJECT_ENV) +else ifeq ($(DEFAULT_INSTALL_ENV),poetry) +POETRY_COMMAND_WITH_PROJECT_ENV := $(POETRY_COMMAND_WITH_PROJECT_ENV) +else ifeq ($(DEFAULT_INSTALL_ENV),conda) +POETRY_COMMAND_WITH_PROJECT_ENV := $(CONDA_ENV_TOOL) run -n $(CONDA_ENVIRONMENT) $(POETRY_COMMAND_WITH_PROJECT_ENV) +endif + +# Do not rename these unless you also rename across all other make files in .make/ +ENV_COMMAND_TOOL := $(POETRY_COMMAND_WITH_PROJECT_ENV) run +ENV_INSTALL_TOOL := $(POETRY_COMMAND_WITH_PROJECT_ENV) install + ## -- Poetry targets ------------------------------------------------------------------------------------------------ ## .PHONY: poetry-install-auto -poetry-install-auto: ## Install Poetry automatically using DEFAULT_POETRY_INSTALL_ENV. Defaults to venv install - @poetry --version; \ +poetry-install-auto: ## Install Poetry automatically via pipx + @$(POETRY_COMMAND_WITH_PROJECT_ENV) --version; \ if [ $$? != "0" ]; then \ echo "Poetry not found, proceeding to install Poetry..."; \ - if [ "$(DEFAULT_POETRY_INSTALL_ENV)" == "conda" ]; then \ - echo ""; \ - echo "[DEFAULT_POETRY_INSTALL_ENV] is defined as 'conda', installing poetry with the 'poetry-install-conda' target"; \ - echo ""; \ - ans_where="conda"; \ - elif [ "$(DEFAULT_POETRY_INSTALL_ENV)" == "venv" ]; then \ - echo ""; \ - echo "[DEFAULT_POETRY_INSTALL_ENV] is defined as 'venv', installing poetry with the 'poetry-install-venv' target"; \ - echo ""; \ - ans_where="venv"; \ - else\ - echo ""; \ - echo "[DEFAULT_POETRY_INSTALL_ENV] is not defined, defaulting to installing poetry with the 'poetry-install-venv' target"; \ - echo ""; \ - ans_where="venv"; \ - fi; \ - case $$ans_where in \ - "venv" | "Venv" |"VENV") \ - make AUTO_INSTALL=true -s poetry-install-venv; \ - ;; \ - "conda" | "Conda" | "CONDA") \ - echo "Installing poetry with Conda"; \ - make AUTO_INSTALL=true -s conda-poetry-install; \ - ;; \ - *) \ - echo ""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m There was an unexpected error. Option $$ans_how not found, exiting process."; \ - echo ""; \ - exit 1; \ - esac; \ + make AUTO_INSTALL=true -s poetry-install-venv; \ fi; .PHONY: _pipx_install_poetry @@ -49,7 +38,14 @@ _pipx_install_poetry: @output="$$(pip install poetry --dry-run)"; \ if echo "$$output" | grep -q computecanada ; then \ echo ""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m Compute Canada (DRAC) environment detected: Installing Poetry < 2.0.0"; \ + echo -e "$(WARNING)Compute Canada (DRAC) environment detected: Installing Poetry < 2.0.0"; \ + echo "Some features will not be available - like 'poetry python install' which allows poetry"; \ + echo "to manage python versions automatically. Consider loading the appropriate python module"; \ + echo ""; \ + echo "This will also require the 'pyproject.toml' file to use the classic poetry format."; \ + echo ""; \ + echo "Consider loading the appropriate python module before installing this package with 'make install'"; \ + echo "or switching to 'uv'."; \ echo ""; \ pipx install 'poetry<2.0.0' ; \ else \ @@ -60,7 +56,7 @@ _pipx_install_poetry: .PHONY: poetry-install poetry-install: ## Install Poetry interactively. @echo "Looking for Poetry version...";\ - poetry --version; \ + $(POETRY_COMMAND_WITH_PROJECT_ENV) --version; \ if [ $$? != "0" ]; then \ if [ "$(AUTO_INSTALL)" = "true" ]; then \ ans="y";\ @@ -84,7 +80,7 @@ poetry-install: ## Install Poetry interactively. [Yy]*) \ if [ $$pipx_found == "1" ]; then \ echo""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m The following pip has been found and will be used to install pipx: "; \ + echo -e "$(WARNING)The following pip has been found and will be used to install pipx: "; \ echo " -> "$$(which pip); \ echo""; \ echo "If you do not have write permission to that environment, using it to install pipx will fail."; \ @@ -103,7 +99,7 @@ poetry-install: ## Install Poetry interactively. ;; \ *) \ echo ""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m Option $$ans_how not found, exiting process."; \ + echo -e "$(WARNING)Option $$ans_how not found, exiting process."; \ echo ""; \ exit 1; \ esac; \ @@ -149,34 +145,44 @@ poetry-install-local: ## Install standalone Poetry. Will install pipx with local .PHONY: poetry-env-info poetry-env-info: ## Information about the currently active environment used by Poetry - @poetry env info + @$(POETRY_COMMAND_WITH_PROJECT_ENV) env info + +.PHONY: poetry-env-set-local +poetry-env-set-local: ## Configure poetry to create env locally for this project. + @$(POETRY_COMMAND_WITH_PROJECT_ENV) config virtualenvs.in-project true --local .PHONY: poetry-create-env poetry-create-env: ## Create a Poetry managed environment for the project (Outside of Conda environment). - @echo "Creating Poetry environment that will use Python $(PYTHON_VERSION)"; \ - poetry env use $(PYTHON_VERSION); \ - poetry env info + @echo "Searching for python version $(PYTHON_VERSION) ..." + @available_python=$$($(POETRY_COMMAND_WITH_PROJECT_ENV) python list); \ + if ! echo "$$available_python" | grep -qF "$(PYTHON_VERSION)"; then \ + echo "Python version $(PYTHON_VERSION) not found ..."; \ + $(POETRY_COMMAND_WITH_PROJECT_ENV) python install "$(PYTHON_VERSION)"; \ + fi; + @echo "Creating Poetry environment using Python $(PYTHON_VERSION)"; \ + $(POETRY_COMMAND_WITH_PROJECT_ENV) env use $(PYTHON_VERSION); \ + $(POETRY_COMMAND_WITH_PROJECT_ENV) env info @echo"" @echo "This environment can be accessed either by using the " - @echo "command, or activated with the command." + @echo "command, or activated with the command." @echo"" @echo "Use and for more information" @echo"" .PHONY: poetry-activate poetry-activate: ## Print the shell command to activate the project's poetry env. - poetry env activate + @$(POETRY_COMMAND_WITH_PROJECT_ENV) env activate .PHONY: poetry-remove-env poetry-remove-env: ## Remove current project's Poetry managed environment. @if [ "$(AUTO_INSTALL)" = "true" ]; then \ ans_env="y";\ - env_path=$$(poetry env info -p); \ + env_path=$$($(POETRY_COMMAND_WITH_PROJECT_ENV) env info -p); \ env_name=$$(basename $$env_path); \ else \ echo""; \ echo "Looking for poetry environments..."; \ - env_path=$$(poetry env info -p); \ + env_path=$$($(POETRY_COMMAND_WITH_PROJECT_ENV) env info -p); \ if [[ "$$env_path" != "" ]]; then \ echo "The following environment has been found for this project: "; \ env_name=$$(basename $$env_path); \ @@ -190,6 +196,7 @@ poetry-remove-env: ## Remove current project's Poetry managed environment. echo""; \ echo "If the active environment listed above is a venv environment,"; \ echo "Choosing to delete it will have no effect; use the bash command $ rm -rf "; \ + echo "or 'make venv-remove'"; \ echo""; \ echo -n "Would you like delete the environment listed above? [y/N]: "; \ read ans_env; \ @@ -201,7 +208,7 @@ poetry-remove-env: ## Remove current project's Poetry managed environment. if [[ $$env_name != "None" ]]; then \ case $$ans_env in \ [Yy]*) \ - poetry env remove $$env_name || echo "No environment was removed"; \ + $(POETRY_COMMAND_WITH_PROJECT_ENV) env remove $$env_name || echo "No environment was removed"; \ ;; \ *) \ echo "No environment was found/provided - skipping environment deletion"; \ @@ -305,21 +312,10 @@ poetry-uninstall-venv: poetry-remove-env ## Uninstall pipx-installed Poetry, the ## -- Install targets (All install targets will install Poetry if not found using 'make poetry-install-auto')-------- ## - -POETRY_COMMAND := poetry - -ifeq ($(DEFAULT_INSTALL_ENV),venv) -POETRY_COMMAND := source $(VENV_ACTIVATE) && poetry -else ifeq ($(DEFAULT_INSTALL_ENV),poetry) -POETRY_COMMAND := poetry -else ifeq ($(DEFAULT_INSTALL_ENV),conda) -POETRY_COMMAND := $(CONDA_TOOL) run -n $(CONDA_ENVIRONMENT) poetry -endif - .PHONY: _check-env _check-env: @if ! [ $(DEFAULT_INSTALL_ENV) ]; then \ - echo -e "\e[1;39;41m-- WARNING --\e[0m No installation environment have been defined." ; \ + echo -e "$(WARNING)No installation environment have been defined." ; \ echo "" ; \ echo "[DEFAULT_INSTALL_ENV] is not defined - Poetry will use the currently activated environment." ; \ echo "If there is no currently active environment (ie. conda or venv)," ; \ @@ -329,7 +325,7 @@ _check-env: make -s venv-create ;\ fi; \ elif [ $(DEFAULT_INSTALL_ENV) = "conda" ]; then \ - if ! $(CONDA_TOOL) env list | grep -q $(CONDA_ENVIRONMENT) ; then \ + if ! $(CONDA_ENV_TOOL) env list | grep -q $(CONDA_ENVIRONMENT) ; then \ make -s conda-create-env ; \ fi; \ fi; @@ -359,29 +355,38 @@ _remind-env-activate: @echo "" .PHONY: install -install: install-precommit ## Install the application package, developer dependencies and pre-commit hook +install: install-precommit install-dev ## Install the application package, developer dependencies and pre-commit hook .PHONY: install-precommit -install-precommit: install-dev ## Install the pre-commit hooks (also installs developer dependencies) +install-precommit: ## Install the pre-commit hook (need to run one of the install targets first) @if [ -f .git/hooks/pre-commit ]; then \ echo "Pre-commit hook found"; \ else \ echo "Pre-commit hook not found, proceeding to configure it"; \ - $(POETRY_COMMAND) run pre-commit install; \ + $(ENV_COMMAND_TOOL) pre-commit install; \ fi; +.PHONY: uninstall-precommit +uninstall-precommit: ## Uninstall the pre-commit hook + @$(ENV_COMMAND_TOOL) pre-commit uninstall + .PHONY: install-dev install-dev: poetry-install-auto _check-env ## Install the application along with developer dependencies - @$(POETRY_COMMAND) install --with dev + @$(ENV_INSTALL_TOOL) --with dev --without lab --without docs @make -s _remind-env-activate -.PHONY: install-with-lab -install-with-lab: poetry-install-auto _check-env ## Install the application and it's dev dependencies, including Jupyter Lab - @$(POETRY_COMMAND) install --with dev --with lab +.PHONY: install-jupyter +install-jupyter: poetry-install-auto _check-env ## Install the application and it's dev dependencies, including Jupyter Lab + @$(ENV_INSTALL_TOOL) --with dev --with lab --without docs @make -s _remind-env-activate +.PHONY: install-docs +install-docs: poetry-install-auto _check-env ## Install the application and it's dev dependencies, including Jupyter Lab + @$(ENV_INSTALL_TOOL) --with docs --without lab + @make -s _remind-env-activate + .PHONY: install-package install-package: poetry-install-auto _check-env ## Install the application package only - @$(POETRY_COMMAND) install + @$(ENV_INSTALL_TOOL) --only-root @make -s _remind-env-activate diff --git a/.make/scripts/auto_init_script.py b/.make/scripts/auto_init_script.py new file mode 100644 index 0000000..24570f6 --- /dev/null +++ b/.make/scripts/auto_init_script.py @@ -0,0 +1,842 @@ +#!/usr/bin/env python3 +""" +Initialization Script for Lab Advanced Template. + +This script customizes the project based on user input and removes template-specific placeholders. +It handles: +- User input via CLI flags or interactive prompts. +- Automatic detection of repository URL. +- Variable replacement in configuration files. +- Directory renaming and import updates. +- Configuration updates in Makefile.variables. +- README customization. +- Link check configuration. +""" + +import argparse +import json +import re +import shutil +import subprocess +from argparse import Namespace +from pathlib import Path +from typing import Any, Dict, List, Optional + +try: + from typing import LiteralString +except ImportError: + # Workaround for python<3.11 + LiteralString = str + +# --- Constants --- +PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent + +# Placeholders (These will be updated by the script itself after the first run) +PLACEHOLDER_PACKAGE_NAME = "core" +PLACEHOLDER_IMPORT_NAME = "my_awesome_project" +PLACEHOLDER_PROJECT_NAME = "My Awesome Project" +PLACEHOLDER_README_PROJECT_NAME = "\\" +PLACEHOLDER_DESCRIPTION_TOML = "" +PLACEHOLDER_DESCRIPTION_README = "[Provide a brief, one-sentence description of your project here.]" +PLACEHOLDER_PYTHON_VERSION = "3.12" +PLACEHOLDER_REPO_URL = "REPOSITORY_URL" +PLACEHOLDER_AUTHOR = "Author" +PLACEHOLDER_EMAIL = "author@example.com" + +DEFAULT_PYTHON_VERSION = "3.12" +VALID_PYTHON_VERSIONS = ["3.11", "3.12", "3.13", "3.14"] +DEFAULT_INSTALL_ENV = "uv" +DEFAULT_BUILD_TOOL = "uv" +VALID_INSTALL_ENVS = ["uv", "poetry", "conda", "venv"] +VALID_BUILD_TOOLS = ["uv", "poetry"] + +# Files to modify +PYPROJECT_TOML = PROJECT_ROOT / "pyproject.toml" +MAKEFILE_VARIABLES = PROJECT_ROOT / "Makefile.variables" +README_MD = PROJECT_ROOT / "README.md" +CHANGES_MD = PROJECT_ROOT / "CHANGES.md" +MARKDOWN_LINK_CHECK = PROJECT_ROOT / ".markdown-link-check.json" +INIT_MARKER_FILE = PROJECT_ROOT / ".make" / ".init_completed" + +# Useful string variables +UV = "uv" +POETRY = "poetry" +CONDA = "conda" + + +# --- Helper Functions --- + + +def run_command(command: List[str], cwd: Path = PROJECT_ROOT, capture_output: bool = True) -> Optional[str]: + """ + Runs a shell command and returns the output. + + Args: + command: A list of strings representing the command to run. + cwd: The working directory for the command. Defaults to PROJECT_ROOT. + capture_output: Whether to capture stdout/stderr. Defaults to True. + + Returns: + The stripped stdout string if successful, or None if the command fails. + """ + try: + result = subprocess.run(args=command, cwd=cwd, capture_output=capture_output, text=True, check=True) + return result.stdout.strip() + except subprocess.CalledProcessError: + print(f"Error running command {' '.join(command)}") + return None + + +def get_git_remote_url() -> Optional[str]: + """ + Detects the git remote origin URL and converts it to HTTPS format. + + Returns: + The HTTPS URL of the git remote, or None if not found. + """ + url = run_command(command=["git", "remote", "get-url", "origin"]) + if not url: + return None + + # Convert SSH to HTTPS + # git@github.com:User/Repo.git -> https://github.com/User/Repo + if url.startswith("git@"): + url = url.replace(":", "/").replace("git@", "https://") + + if url.endswith(".git"): + url = url[:-4] + + return url + + +def prompt_user(prompt: str, default: Optional[str] = None, choices: Optional[List[str]] = None) -> Optional[str]: + """ + Prompts the user for input, with optional default and validation. + + Args: + prompt: The text to display to the user. + default: The default value to return if the user enters nothing. + choices: A list of valid choices. If provided, input is validated against this list. + + Returns: + The user's input or the default value. + """ + while True: + prompt_text = f"{prompt}" + if default: + prompt_text += f" - [{default}]" + if choices: + prompt_text += f" - Available choices: ({', '.join(choices)})" + prompt_text += ": " + + value = input(prompt_text).strip() + + if not value and default: + return default + + if not value and not default: + print("Value is required.") + continue + + if choices and value not in choices: + print(f"Invalid choice. Must be one of: {', '.join(choices)}") + continue + + return value + + +def replace_in_file(filepath: Path, replacements: Dict[str, str], dry_run: bool = False) -> None: + """ + Replaces text in a file based on a dictionary of replacements. + + Args: + filepath: Path to the file to modify. + replacements: A dictionary where keys are strings to find and values are replacements. + dry_run: If True, prints what would happen without modifying the file. + """ + if not filepath.exists(): + print(f"Warning: File not found: {filepath}") + return + + content = filepath.read_text(encoding="utf-8") + + new_content = content + for search, replace in replacements.items(): + new_content = new_content.replace(search, replace) + + if content != new_content: + if dry_run: + print(f"[Dry Run] Would update {filepath}") + else: + filepath.write_text(data=new_content) + print(f"Updated {filepath}") + + +def update_makefile_variables( + filepath: Path, + install_env: str, + build_tool: str, + python_version: str, + app_name: str, + conda_env: Optional[str], + conda_tool: Optional[str], + dry_run: bool, +) -> None: + """ + Updates specific variables in Makefile.variables. + + Args: + filepath: Path to the Makefile.variables file. + install_env: The installation environment (e.g., 'uv', 'poetry', 'conda'). + build_tool: The build tool (e.g., 'uv', 'poetry'). + python_version: The Python version string. + app_name: The application name. + conda_env: The conda environment name (optional). + conda_tool: The conda tool name (optional). + dry_run: If True, prints changes without writing to file. + """ + if not filepath.exists(): + print(f"Warning: File not found: {filepath}") + return + + updates = { + "DEFAULT_INSTALL_ENV :=": install_env, + "DEFAULT_BUILD_TOOL :=": build_tool, + "PYTHON_VERSION :=": python_version, + "APPLICATION_NAME :=": app_name, + } + + if not conda_env: + conda_env = app_name + updates["CONDA_ENVIRONMENT :="] = conda_env + if conda_tool: + updates["CONDA_TOOL :="] = conda_tool + + lines = filepath.read_text(encoding="utf-8").splitlines(keepends=True) + + new_lines = [] + for line in lines: + matched_key = next((key for key in updates if line.startswith(key)), None) + if matched_key: + new_lines.append(f"{matched_key} {updates[matched_key]}\n") + else: + new_lines.append(line) + + if dry_run: + print(f"[Dry Run] Would update {filepath} with configuration settings.") + else: + filepath.write_text(data="".join(new_lines), encoding="utf-8") + print(f"Updated {filepath}") + + +def comment_block(match: re.Match[str]) -> LiteralString: + """Utility function to help comment out sections of the pyproject.toml file, depending on the build tool + selected.""" + block = match.group(0) + lines = block.splitlines() + new_lines = [] + for line in lines: + if line.strip() == "": + new_lines.append(line) + elif line.startswith("# "): + new_lines.append(line) + else: + new_lines.append(f"# {line}") + return "\n".join(new_lines) + + +def uncomment_block(match: re.Match[str]) -> LiteralString: + """Utility function to help un-comment sections of the pyproject.toml file, depending on the build tool selected.""" + block = match.group(0) + lines = block.splitlines() + new_lines = [] + for line in lines: + if line.startswith("# "): + new_lines.append(line[2:]) + elif line.startswith("#"): + new_lines.append(line[1:]) + else: + new_lines.append(line) + return "\n".join(new_lines) + + +def update_pyproject_toml( + filepath: Path, + package_name: str, + description: str, + author: str, + email: str, + python_version: str, + repo_url: Optional[str], + build_tool: str, + dry_run: bool, +) -> None: + """ + Updates pyproject.toml with project metadata. + + Args: + filepath: Path to pyproject.toml. + package_name: The new package name. + description: Project description. + author: Author name. + email: Author email. + python_version: Python version string. + repo_url: Repository URL. + build_tool: The build tool to use ('uv' or 'poetry'). + dry_run: If True, prints changes without writing to file. + """ + if not filepath.exists(): + print(f"Warning: File not found: {filepath}") + return + + content = filepath.read_text(encoding="utf-8") + + # Basic replacements + # Note: Using regex for more robust replacement where simple string replace might be ambiguous + + # Update name = "core" -> name = "package_name" + # We look for name = "core" specifically in the [project] section usually at the top + content = re.sub( + pattern=f'name = "{PLACEHOLDER_PACKAGE_NAME}"', repl=f'name = "{package_name}"', string=content, count=1 + ) + + # Update description + content = re.sub( + pattern=f'description = "{PLACEHOLDER_DESCRIPTION_TOML}"', + repl=f'description = "{description}"', + string=content, + count=1, + ) + + # Update authors + # authors = [{ name = "Francis Pelletier", email = "fplt.softwaredeveloper@gmail.com" }] + new_authors = f'authors = [{{ name = "{author}", email = "{email}" }}]' + content = re.sub(pattern=r"authors = \[.*]", repl=new_authors, string=content, count=1) + + # Update python version + # requires-python = ">=3.12,<3.13" + # We assume the user provides "3.12", we want ">=3.12,<3.13" logic or just update the base + # For simplicity, let's try to construct the range if it matches X.Y format + match = re.match(pattern=r"(\d+)\.(\d+)", string=python_version) + if match: + major, minor = map(int, match.groups()) + next_minor = minor + 1 + new_requires = f'requires-python = ">={major}.{minor},<{major}.{next_minor}"' + content = re.sub(pattern=r'requires-python = ".*"', repl=new_requires, string=content, count=1) + + # Update tool.hatch.build.targets.wheel packages + # packages = ["src/core"] + content = content.replace(f'packages = ["src/{PLACEHOLDER_PACKAGE_NAME}"]', f'packages = ["src/{package_name}"]') + + # Update tool.poetry packages + # packages = [ + # { include = "core", from = "src" } + # ] + content = content.replace(f'include = "{PLACEHOLDER_PACKAGE_NAME}"', f'include = "{package_name}"') + + # Toggle Build Systems + # Hatchling Block Pattern + hatchling_pattern = ( + r"(?:# )?\[build-system\]\n" + r"(?:# )?requires = \[\"hatchling\"\]\n" + r"(?:# )?build-backend = \"hatchling\.build\"\n" + r"\n" + r"(?:# )?\[tool\.hatch\.build\.targets\.wheel\]\n" + r"(?:# )?packages = \[\"src/[^\"]+\"\]" + ) + + # Poetry Block Pattern + poetry_pattern = ( + r"(?:# )?\[build-system\]\n" + r"(?:# )?requires = \[\"poetry-core\"\]\n" + r"(?:# )?build-backend = \"poetry\.core\.masonry\.api\"\n" + r"\n" + r"(?:# )?\[tool\.poetry\]\n" + r"(?:# )?packages = \[\n" + r"(?:# )? \{ include = \"[^\"]+\", from = \"src\" \}\n" + r"(?:# )?\]" + ) + + if build_tool == "uv": + # Enable Hatchling, Disable Poetry + content = re.sub(pattern=hatchling_pattern, repl=uncomment_block, string=content, flags=re.MULTILINE) + content = re.sub(pattern=poetry_pattern, repl=comment_block, string=content, flags=re.MULTILINE) + elif build_tool == "poetry": + # Disable Hatchling, Enable Poetry + content = re.sub(pattern=hatchling_pattern, repl=comment_block, string=content, flags=re.MULTILINE) + content = re.sub(pattern=poetry_pattern, repl=uncomment_block, string=content, flags=re.MULTILINE) + + # Update bumpversion files + if repo_url: + content = content.replace(PLACEHOLDER_REPO_URL, repo_url) + + # Update black target-version + new_target_version = f'target-version = ["py{python_version.replace(".", "")}"]' + content = re.sub(pattern=r"target-version = .*", repl=new_target_version, string=content, count=1) + + if dry_run: + print(f"[Dry Run] Would update {filepath} with project metadata.") + else: + filepath.write_text(data=content, encoding="utf-8") + print(f"Updated {filepath}") + + +def rename_package_directory(package_name: str, dry_run: bool) -> None: + """ + Renames src/core to src/ and updates imports. + + Args: + package_name: The new name for the package directory. + dry_run: If True, prints changes without moving files or updating imports. + """ + src_previous = PROJECT_ROOT / "src" / PLACEHOLDER_PACKAGE_NAME + src_new = PROJECT_ROOT / "src" / package_name + + if not src_previous.exists(): + print(f"Warning: {src_previous} does not exist. Skipping rename.") + return + + if dry_run: + print(f"[Dry Run] Would rename {src_previous} to {src_new}") + else: + shutil.move(src=src_previous, dst=src_new) + print(f"Renamed {src_previous} to {src_new}") + + # Update imports in all .py files + # We need to walk through the project and replace "from my_awesome_project" with "from package_name" + # and "import my_awesome_project" with "import package_name" + + # Directories to skip + skip_dirs = {".git", ".venv", "__pycache__", ".nox", ".idea"} + + for file_path in PROJECT_ROOT.rglob("*.py"): + # Check if any part of the path is in skip_dirs + if any(part in skip_dirs for part in file_path.parts): + continue + + # Skip this script itself to avoid self-modification during this step + if file_path.resolve() == Path(__file__).resolve(): + continue + + content = file_path.read_text(encoding="utf-8") + + # Simple replacements for imports + # This is a basic heuristic and might need refinement for complex cases + new_content = content.replace(f"from {PLACEHOLDER_IMPORT_NAME}", f"from {package_name}") + new_content = new_content.replace(f"import {PLACEHOLDER_IMPORT_NAME}", f"import {package_name}") + + if content != new_content: + if dry_run: + print(f"[Dry Run] Would update imports in {file_path}") + else: + file_path.write_text(data=new_content, encoding="utf-8") + print(f"Updated imports in {file_path}") + + +def search_string(content: str, pattern: str) -> re.Match[str] | None: + result = re.search(pattern=pattern, string=content) + return result + + +def open_close_detail_sections(content: str, install_env: str) -> str: + """ + Helper utility to handle changing the opened and closed detail sections. + + Args: + content: The content to modify. + install_env: The installation environment used. + + Returns: + The modified content. + """ + # Patterns to capture the details block. + # Group 1: The opening tag (
or
) + # Group 2: The content (summary ... /details) including trailing newlines + + uv_pattern = r"()(\nStack: uv [\s\S]*?
\n*)" + poetry_pattern = r"()( Stack: Poetry[\s\S]*?
\n*)" + conda_pattern = ( + r"()( Stack: Poetry \+ Conda[\s\S]*?\n*)" + ) + + patterns = { + UV: uv_pattern, + POETRY: poetry_pattern, + CONDA: conda_pattern, + } + + working_content = content + + for env, pattern in patterns.items(): + if env == install_env: + # Ensure Open + replacement = r"
\2" + else: + # Ensure Closed + replacement = r"
\2" + + working_content = re.sub(pattern=pattern, repl=replacement, string=working_content) + + return working_content + + +def update_readme( + readme_path: Path, project_name: str, description: str, install_env: str, python_version: str, dry_run: bool +) -> None: + """ + Updates README.md content. + + Args: + readme_path: Path to README.md. + project_name: The project name. + description: The project description. + install_env: The installation environment used. + python_version: The Python version used. + dry_run: If True, prints changes without writing to file. + """ + if not readme_path.exists(): + return + + content = readme_path.read_text(encoding="utf-8") + + # Replace Title and Description + content = re.sub( + pattern=f"# {re.escape(PLACEHOLDER_README_PROJECT_NAME)}", repl=f"# {project_name}", string=content + ) + content = re.sub(pattern=re.escape(PLACEHOLDER_DESCRIPTION_README), repl=description, string=content) + + # Remove Template Initialization Section + # We look for the section start and end + start_marker = "## 🚀 Template Initialization" + end_marker = "## 🐍 Python Version" + + start_idx = content.find(start_marker) + end_idx = content.find(end_marker) + + if start_idx != -1 and end_idx != -1: + # Keep the end marker section + content = content[:start_idx] + content[end_idx:] + + # Update Python Version section text + content = re.sub( + pattern=f"This project uses \\*\\*Python {re.escape(PLACEHOLDER_PYTHON_VERSION)}\\*\\*", + repl=f"This project uses **Python {python_version}**", + string=content, + ) + + # Dynamic Content Removal based on stack + content = open_close_detail_sections(content=content, install_env=install_env) + + if dry_run: + print(f"[Dry Run] Would update {readme_path}") + else: + readme_path.write_text(data=content, encoding="utf-8") + print(f"Updated {readme_path}") + + +def update_link_check(filepath: Path, repo_url: Optional[str], dry_run: bool) -> None: + """ + Adds repository URL to .markdown-link-check.json ignore patterns. + + Args: + filepath: Path to .markdown-link-check.json. + repo_url: The repository URL to add to ignore patterns. + dry_run: If True, prints changes without writing to file. + """ + if not repo_url or not filepath.exists(): + return + + try: + data = json.loads(filepath.read_text(encoding="utf-8")) + except json.JSONDecodeError: + print(f"Error reading {filepath}") + return + + # Check if already exists + patterns = data.get("ignorePatterns", []) + if not any(p.get("pattern") == repo_url for p in patterns): + patterns.append({"pattern": repo_url}) + data["ignorePatterns"] = patterns + + # Also replace REPOSITORY_URL placeholder if present + new_patterns = [] + for p in patterns: + if p.get("pattern") == f"{PLACEHOLDER_REPO_URL}/tree/main": + new_patterns.append({"pattern": f"{repo_url}/tree/main"}) + else: + new_patterns.append(p) + data["ignorePatterns"] = new_patterns + + if dry_run: + print(f"[Dry Run] Would update {filepath} with repo URL.") + else: + filepath.write_text(data=json.dumps(obj=data, indent=2), encoding="utf-8") + print(f"Updated {filepath}") + + +def update_self(script_path: Path, replacements: Dict[str, str], dry_run: bool) -> None: + """Updates the script's own constants to match the new project state.""" + if not script_path.exists(): + return + + content = script_path.read_text(encoding="utf-8") + + for key, value in replacements.items(): + # Look for KEY = "..." or KEY = '...' + # We use json.dumps to generate a safe string representation (e.g. "value") + # and replace the existing assignment. + # We assume the constants are defined at the top level. + + # Regex explanation: + # ^: Start of line + # {key}: The constant name + # \s*=\s*: Assignment operator with optional whitespace + # .*$: Match the rest of the line (the value) + + new_line = f"{key} = {json.dumps(value)}" + content = re.sub(pattern=rf"^{key}\s*=\s*.*$", repl=new_line, string=content, flags=re.MULTILINE) + + if dry_run: + print(f"[Dry Run] Would update {script_path} constants.") + else: + script_path.write_text(data=content, encoding="utf-8") + print(f"Updated {script_path} constants for future runs.") + + +def self_update_for_next_run_of_script( + args: Namespace, + author: str | Any, + description: str | Any, + email: str | Any, + package_name: str | Any, + project_name: str | Any, + python_version: str | Any, + repo_url: str | None, +): + self_replacements = { + "PLACEHOLDER_PACKAGE_NAME": package_name, + "PLACEHOLDER_IMPORT_NAME": package_name, + "PLACEHOLDER_PROJECT_NAME": project_name, + "PLACEHOLDER_README_PROJECT_NAME": project_name, + "PLACEHOLDER_DESCRIPTION_TOML": description, + "PLACEHOLDER_PYTHON_VERSION": python_version, + "DEFAULT_PYTHON_VERSION": python_version, + "PLACEHOLDER_AUTHOR": author, + "PLACEHOLDER_EMAIL": email, + } + + if repo_url: + self_replacements["PLACEHOLDER_REPO_URL"] = repo_url + + update_self(script_path=Path(__file__), replacements=self_replacements, dry_run=args.dry_run) + + +def gather_build_and_env_fields(args: Namespace, package_name: str | Any) -> tuple[Any, Any, Any, Any]: + install_env = args.install_env + if not install_env: + install_env = prompt_user(prompt="Install Environment", default=DEFAULT_INSTALL_ENV, choices=VALID_INSTALL_ENVS) + + # Adjust build tool choices based on install env + available_build_tools = VALID_BUILD_TOOLS + if install_env == "conda" and "uv" in available_build_tools: + # Logic from plan: If conda is selected remove uv from the choices of build-tools + # and add explanation that only poetry is available when using conda + print( + "Note: When using Conda for environment management, 'poetry' is the only supported build tool in this " + "template." + ) + available_build_tools = ["poetry"] + if install_env == "uv" and "poetry" in available_build_tools: + # Logic from plan: If conda is selected remove uv from the choices of build-tools + # and add explanation that only poetry is available when using conda + print("Note: When using UV for environment management, 'uv' is the only supported build tool in this template.") + available_build_tools = ["uv"] + if install_env == "poetry" and "uv" in available_build_tools: + # Logic from plan: If conda is selected remove uv from the choices of build-tools + # and add explanation that only poetry is available when using conda + print( + "Note: When using Poetry for environment management, 'poetry' is the only supported build tool in this " + "template." + ) + available_build_tools = ["poetry"] + + build_tool = args.build_tool + if not build_tool: + default_bt = "poetry" if install_env in ["conda", "poetry"] else DEFAULT_BUILD_TOOL + build_tool = prompt_user(prompt="Build Tool", default=default_bt, choices=available_build_tools) + + # Validate build_tool with install_env + if install_env == "conda" and build_tool == "uv": + print("Warning: 'uv' build tool is not supported with 'conda' environment in this template.") + print("Switching build tool to 'poetry'.") + build_tool = "poetry" + + conda_env_name = None + conda_tool = None + if install_env == "conda": + conda_env_name = args.conda_env_name or prompt_user(prompt="Conda Environment Name", default=f"{package_name}") + conda_tool = args.conda_tool or prompt_user(prompt="Conda Tool", default="mamba", choices=["mamba", "conda"]) + return build_tool, conda_env_name, conda_tool, install_env + + +def gather_metadata_fields(args: Namespace) -> tuple[str | Any, str | Any, str | Any, str | Any, str | Any, str | Any]: + project_name = args.project_name or prompt_user(prompt="Project Name", default=PLACEHOLDER_PROJECT_NAME) + package_name_args = [ + "Package Name (snake_case)", + PLACEHOLDER_IMPORT_NAME.lower().replace(" ", "_").replace("-", "_"), + ] + if project_name != PLACEHOLDER_PROJECT_NAME: + package_name_args = ["Package Name (snake_case)", project_name.lower().replace(" ", "_").replace("-", "_")] + package_name = args.package_name or prompt_user(prompt=package_name_args[0], default=package_name_args[1]) + description = args.description or prompt_user(prompt="Project Description", default=PLACEHOLDER_DESCRIPTION_TOML) + author = args.author or prompt_user(prompt="Author Name", default=PLACEHOLDER_AUTHOR) + email = args.email or prompt_user(prompt="Author Email", default=PLACEHOLDER_EMAIL) + python_version = args.python_version or prompt_user( + prompt="Python Version", default=DEFAULT_PYTHON_VERSION, choices=VALID_PYTHON_VERSIONS + ) + return author, description, email, package_name, project_name, python_version + + +def generate_args_and_parser() -> Namespace: + parser = argparse.ArgumentParser(description="Initialize the project from the template.") + + # Project Metadata + parser.add_argument("--project-name", help="Name of the project") + parser.add_argument("--package-name", help="Python package name (snake_case)") + parser.add_argument("--description", help="Brief project description") + parser.add_argument("--author", help="Author's name") + parser.add_argument("--email", help="Author's email") + + # Technical Configuration + parser.add_argument("--python-version", choices=VALID_PYTHON_VERSIONS, help="Target Python version") + parser.add_argument("--install-env", choices=VALID_INSTALL_ENVS, help="Tool for virtual environment management") + parser.add_argument("--build-tool", choices=VALID_BUILD_TOOLS, help="Tool for dependency/build management") + + # Conda Specifics + parser.add_argument("--conda-env-name", help="Name of the conda environment") + parser.add_argument("--conda-tool", choices=["mamba", "conda"], help="Tool to use (mamba or conda)") + + # Flags + parser.add_argument("--dry-run", action="store_true", help="Preview changes without writing to disk") + + args = parser.parse_args() + return args + + +# --- Main Execution --- + + +def main() -> None: + """ + Main execution function for the initialization script. + + Parses arguments, gathers user input, and triggers file updates. + """ + args = generate_args_and_parser() + + if INIT_MARKER_FILE.exists() and not args.dry_run: + print("\n⚠️ WARNING: It looks like this project has already been initialized.") + print(f"Marker file exists: {INIT_MARKER_FILE}") + print("Re-running this script might overwrite your changes or cause unexpected behavior.") + + should_continue = prompt_user(prompt="Do you want to continue anyway?", default="no", choices=["yes", "no"]) + if should_continue != "yes": + print("Aborting initialization.") + return + + print("🚀 Starting Project Initialization...") + + # 1. Gather Inputs + author, description, email, package_name, project_name, python_version = gather_metadata_fields(args=args) + + build_tool, conda_env_name, conda_tool, install_env = gather_build_and_env_fields( + args=args, package_name=package_name + ) + + # 2. Automatic Repo Detection + repo_url = get_git_remote_url() + if repo_url: + print(f"Detected Repository URL: {repo_url}") + else: + print("Could not detect repository URL. Placeholders will remain.") + + # 3. Execution + print("\nApplying changes...") + + # Update Makefile.variables + update_makefile_variables( + filepath=MAKEFILE_VARIABLES, + install_env=install_env, + build_tool=build_tool, + python_version=python_version, + app_name=package_name, + conda_env=conda_env_name, + conda_tool=conda_tool, + dry_run=args.dry_run, + ) + + # Update pyproject.toml + update_pyproject_toml( + filepath=PYPROJECT_TOML, + package_name=package_name, + description=description, + author=author, + email=email, + python_version=python_version, + repo_url=repo_url, + build_tool=build_tool, + dry_run=args.dry_run, + ) + + # Rename Directory and Update Imports + if package_name != PLACEHOLDER_PACKAGE_NAME: + rename_package_directory(package_name=package_name, dry_run=args.dry_run) + + # Update README.md + update_readme( + readme_path=README_MD, + project_name=project_name, + description=description, + install_env=install_env, + python_version=python_version, + dry_run=args.dry_run, + ) + + # Update CHANGES.md + replace_in_file( + filepath=CHANGES_MD, + replacements={ + PLACEHOLDER_PROJECT_NAME: project_name, + PLACEHOLDER_README_PROJECT_NAME: project_name, + PLACEHOLDER_AUTHOR: author, + PLACEHOLDER_REPO_URL: repo_url if repo_url else PLACEHOLDER_REPO_URL, + }, + dry_run=args.dry_run, + ) + + # Update .markdown-link-check.json + update_link_check(filepath=MARKDOWN_LINK_CHECK, repo_url=repo_url, dry_run=args.dry_run) + + # 4. Update self (the script itself) to prepare for next run + self_update_for_next_run_of_script( + args=args, + author=author, + description=description, + email=email, + package_name=package_name, + project_name=project_name, + python_version=python_version, + repo_url=repo_url, + ) + + print("\n✅ Initialization Complete!") + if args.dry_run: + print("(This was a dry run. No files were modified.)") + else: + INIT_MARKER_FILE.touch() + print("\n🔍 Please review the changes and commit them.") + print("\n📦 You can now process to installing the package using the 'make install' command.") + + +if __name__ == "__main__": + main() diff --git a/.make/tests/test-script.sh b/.make/tests/test-script.sh index 4536f5b..76c556d 100755 --- a/.make/tests/test-script.sh +++ b/.make/tests/test-script.sh @@ -1,174 +1,253 @@ #!/usr/bin/env bash +# Set strict mode: exit on error, use unbound variables, fail on pipe errors +set -euo pipefail + +# --- 1. Project Path Determination --- + SCRIPT_PATH="$(readlink -f "$0")" -# Get the directory of the script +# Get the directory of the script, its parent, and the project root SCRIPT_DIR="$(dirname "$SCRIPT_PATH")" MAKE_DIR="$(dirname "$SCRIPT_DIR")" PROJECT_DIR="$(dirname "$MAKE_DIR")" -TEST_ENV="$PROJECT_DIR/.testvenv" +# --- 2. Shared Configuration Variables --- + +TEST_CONDA_ENV="lab-advanced-template-testing" +TEST_VENV_PATH="$PROJECT_DIR/.testvenv" PIPX_TEST_ENV="$PROJECT_DIR/.testvenvpipx" -# Test base.make +# Reusable Makefile argument strings +BASE_MAKEFILE_ARGS="-f $PROJECT_DIR/.make/base.make" +LINT_MAKEFILE_ARGS="-f $PROJECT_DIR/.make/lint.make" +CONDA_MAKEFILE_ARGS="-f $PROJECT_DIR/.make/conda.make" +POETRY_MAKEFILE_ARGS="-f $PROJECT_DIR/.make/poetry.make" +UV_MAKEFILE_ARGS="-f $PROJECT_DIR/.make/uv.make" -base() { +# Overrides for different configurations (now more focused) +MAKEFILE_CONDA_OVERRIDE="$BASE_MAKEFILE_ARGS $CONDA_MAKEFILE_ARGS $LINT_MAKEFILE_ARGS" +MAKEFILE_POETRY_OVERRIDE="$BASE_MAKEFILE_ARGS $CONDA_MAKEFILE_ARGS $POETRY_MAKEFILE_ARGS $LINT_MAKEFILE_ARGS" +MAKEFILE_UV_OVERRIDE="$BASE_MAKEFILE_ARGS $UV_MAKEFILE_ARGS $LINT_MAKEFILE_ARGS" + +# Tool-specific arguments +POETRY_ARGS="DEFAULT_INSTALL_ENV=poetry DEFAULT_BUILD_TOOL=poetry VENV_PATH=$PROJECT_DIR/.venv" +UV_ARGS="DEFAULT_INSTALL_ENV=uv DEFAULT_BUILD_TOOL=uv" +POETRY_CONDA_ARGS="DEFAULT_INSTALL_ENV=conda DEFAULT_BUILT_TOOL=poetry CONDA_ENVIRONMENT=$TEST_CONDA_ENV" + +# --- 3. Core Helper Functions --- + +# Helper function to print section headers +print_header() { echo "" echo "###" - echo "### Test base targets" + echo "### $1" echo "###" echo "" - echo "### Running 'make targets': ###" - echo "" - cd "$PROJECT_DIR" && make targets - - echo "### Running 'make version': ###" - echo "" - cd "$PROJECT_DIR" && make version +} - # Test venv +# Generic function to execute a make target with arguments +# Usage: make_test "Target Name" "MAKEFILE_ARGS" "CUSTOM_VARS" "make_target" +make_test() { + local target_name="$1" + local makefile_args="$2" + local custom_vars="$3" + local make_target="$4" + echo "### Running 'make $make_target' ($target_name): ###" echo "" - echo "### Running 'make venv-create' and 'make venv-remove: ###" - echo "" + + # Use command grouping (subshell) to ensure 'cd' doesn't affect the main script's environment ( cd "$PROJECT_DIR" && \ - make VENV_PATH="$PROJECT_PATH.testvenv" venv-create && \ - make VENV_PATH="$PROJECT_PATH.testvenv" AUTO_INSTALL=true venv-remove + make $makefile_args $custom_vars $make_target ) - # Versioning +} - echo "" - echo "### Running 'make bump' targets ###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make bump-major dry && \ - make bump-minor dry && \ - make bump-patch dry +# Generic function to run a sequence of tests +# Usage: run_test_suite "Suite Description" "MAKEFILE_ARGS" "CUSTOM_VARS" "TARGET_LIST" +run_test_suite() { + local suite_description="$1" + local makefile_args="$2" + local custom_vars="$3" + # Shift arguments to treat all subsequent args as the target list + shift 3 + local targets=("$@") + + print_header "Test Suite: $suite_description" + + for target in "${targets[@]}"; do + make_test "$suite_description" "$makefile_args" "$custom_vars" "$target" + done +} + + +# --- 4. Setup and Cleanup Functions --- + +test_cleanup() { + print_header "Cleaning potential test environments" + rm -rf "$PROJECT_DIR/.venv" "$TEST_VENV_PATH" "$PIPX_TEST_ENV" "$PROJECT_DIR/poetry.lock" "$PROJECT_DIR/uv.lock" +} + +# --- 5. Specific Test Functions --- + +base() { + print_header "Test base targets" + + # Simple targets without special environment setup + make_test "Base Targets" "" "" "targets" + make_test "Base Targets" "" "" "version" + + # Test venv creation and removal with a temporary path + run_test_suite \ + "Test temporary venv creation/removal" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_CONDA_ARGS VENV_PATH='$TEST_VENV_PATH'" \ + "venv-create" \ + "venv-remove AUTO_INSTALL=true" + + # Test versioning (bump targets) within a Conda environment managed by Poetry + local version_targets=( + "conda-create-env CONDA_ENVIRONMENT_FILE='$SCRIPT_DIR/test_environment.yml' CONDA_YES_OPTION='-y'" + "conda-env-info" + "conda-activate" + "install AUTO_INSTALL=true" + "bump-major dry" + "bump-minor dry" + "bump-patch dry" + "conda-clean-env AUTO_INSTALL=true" ) + run_test_suite \ + "Test version bump targets (Poetry/Conda)" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_CONDA_ARGS" \ + "${version_targets[@]}" } -TEST_CONDA_ENV="lab-advanced-template-testing" -MAKEFILE_ARGS_CONDA_OVERRIDE="-f $PROJECT_DIR/Makefile -f $PROJECT_DIR/.make/conda.make" -conda(){ - echo "" - echo "###" - echo "### Test conda targets" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && - make $MAKEFILE_ARGS_CONDA_OVERRIDE CONDA_ENVIRONMENT_FILE="$SCRIPT_DIR/test_environment.yml" CONDA_YES_OPTION="-y" conda-create-env && \ - make $MAKEFILE_ARGS_CONDA_OVERRIDE conda-env-info && \ - make $MAKEFILE_ARGS_CONDA_OVERRIDE CONDA_ENVIRONMENT="$TEST_CONDA_ENV" conda-activate && \ - make $MAKEFILE_ARGS_CONDA_OVERRIDE CONDA_ENVIRONMENT="$TEST_CONDA_ENV" AUTO_INSTALL=true conda-poetry-install && \ - make $MAKEFILE_ARGS_CONDA_OVERRIDE CONDA_ENVIRONMENT="$TEST_CONDA_ENV" AUTO_INSTALL=true conda-poetry-uninstall && \ - make $MAKEFILE_ARGS_CONDA_OVERRIDE CONDA_ENVIRONMENT="$TEST_CONDA_ENV" AUTO_INSTALL=true conda-clean-env +conda() { + local conda_targets=( + "conda-create-env CONDA_ENVIRONMENT_FILE='$SCRIPT_DIR/test_environment.yml' CONDA_YES_OPTION='-y'" + "conda-env-info" + "conda-activate" + "conda-clean-env AUTO_INSTALL=true" ) + run_test_suite \ + "Test core conda environment targets" \ + "$MAKEFILE_CONDA_OVERRIDE" \ + "$POETRY_CONDA_ARGS" \ + "${conda_targets[@]}" } -MAKEFILE_ARGS_POETRY_OVERRIDE="-f $PROJECT_DIR/Makefile -f $PROJECT_DIR/.make/conda.make -f $PROJECT_DIR/.make/poetry.make" +# The 'lint' function is the most complex due to the matrix of combinations. lint() { - echo "" - echo "###" - echo "### Test pipx poetry targets" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry poetry-create-env && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry install && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry check-lint && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry check-pylint && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry check-complexity && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry fix-lint && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry precommit && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry ruff && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry ruff-fix && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry ruff-format - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry AUTO_INSTALL=true poetry-remove-env - ) + # Combination 1: Poetry (Default Venv Path) + local poetry_venv_lint_targets=( + "poetry-create-env" "install" "check-lint" "check-pylint" "check-complexity" + "fix-lint" "precommit" "ruff" "ruff-fix" "ruff-format" + "poetry-remove-env AUTO_INSTALL=true" "venv-remove AUTO_INSTALL=true" + ) + run_test_suite \ + "Lint targets for Poetry (default venv)" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_ARGS" \ + "${poetry_venv_lint_targets[@]}" + + # Combination 2: Poetry (Conda-managed) + local poetry_conda_lint_targets=( + "conda-create-env CONDA_ENVIRONMENT_FILE='$SCRIPT_DIR/test_environment.yml' CONDA_YES_OPTION='-y'" + "install" "check-lint" "check-pylint" "check-complexity" + "fix-lint" "precommit" "ruff" "ruff-fix" "ruff-format" + "conda-clean-env AUTO_INSTALL=true" + ) + run_test_suite \ + "Lint targets for Poetry (Conda-managed)" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_CONDA_ARGS" \ + "${poetry_conda_lint_targets[@]}" + + # Combination 3: uv (Default Venv Path) + local uv_lint_targets=( + "uv-create-env" "install" "check-lint" "check-pylint" "check-complexity" + "fix-lint" "precommit" "ruff" "ruff-fix" "ruff-format" + "uv-remove-env AUTO_INSTALL=true" + ) + run_test_suite \ + "Lint targets for uv" \ + "$MAKEFILE_UV_OVERRIDE" \ + "$UV_ARGS" \ + "${uv_lint_targets[@]}" } -poetry(){ - echo "" - echo "###" - echo "### Test Poetry managed and venv managed installs" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry poetry-create-env && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry install && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=poetry AUTO_INSTALL=true poetry-remove-env && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=venv VENV_PATH="$TEST_ENV" install && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=venv VENV_PATH="$TEST_ENV" AUTO_INSTALL=true venv-remove - ) - echo "" - echo "### Test conda managed installs" - echo "" - ( - cd "$PROJECT_DIR" && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE CONDA_ENVIRONMENT_FILE="$SCRIPT_DIR/test_environment.yml" CONDA_YES_OPTION="-y" conda-create-env && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE DEFAULT_INSTALL_ENV=conda CONDA_ENVIRONMENT="$TEST_CONDA_ENV" install && \ - make $MAKEFILE_ARGS_POETRY_OVERRIDE CONDA_ENVIRONMENT="$TEST_CONDA_ENV" AUTO_INSTALL=true conda-clean-env - ) +poetry() { + # Test 1: Poetry managed environment + local poetry_targets=( + "poetry-create-env" "install" "poetry-remove-env AUTO_INSTALL=true" "venv-remove AUTO_INSTALL=true" + ) + run_test_suite \ + "Poetry managed environment (poetry/venv)" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_ARGS" \ + "${poetry_targets[@]}" + # Test 2: Conda managed environment + local conda_targets=( + "conda-create-env CONDA_ENVIRONMENT_FILE='$SCRIPT_DIR/test_environment.yml' CONDA_YES_OPTION='-y'" + "install" "conda-clean-env AUTO_INSTALL=true" + ) + run_test_suite \ + "Poetry managed environment (Conda)" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_CONDA_ARGS" \ + "${conda_targets[@]}" } poetry-pipx(){ - echo "" - echo "###" - echo "### Test pipx poetry targets" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make PIPX_VENV_PATH="$PIPX_TEST_ENV" poetry-install-venv && \ - make PIPX_VENV_PATH="$PIPX_TEST_ENV" AUTO_INSTALL=true poetry-uninstall-venv && \ - make poetry-install-venv + local pipx_poetry_targets=( + "poetry-install-venv PIPX_VENV_PATH='$PIPX_TEST_ENV'" + "poetry-uninstall-venv PIPX_VENV_PATH='$PIPX_TEST_ENV' AUTO_INSTALL=true" + "poetry-install-venv" # Test default pipx path ) + run_test_suite \ + "Test pipx Poetry targets" \ + "$MAKEFILE_POETRY_OVERRIDE" \ + "$POETRY_ARGS" \ + "${pipx_poetry_targets[@]}" } -MAKEFILE_ARGS_UV_OVERRIDE="-f $PROJECT_DIR/Makefile -f $PROJECT_DIR/.make/uv.make" - uv(){ - echo "" - echo "###" - echo "### Test uv managed managed installs" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make $MAKEFILE_ARGS_UV_OVERRIDE AUTO_INSTALL=true uv-migrate-from-poetry && \ - make $MAKEFILE_ARGS_UV_OVERRIDE uv-create-env && \ - make $MAKEFILE_ARGS_UV_OVERRIDE AUTO_INSTALL=true install && \ - make $MAKEFILE_ARGS_UV_OVERRIDE AUTO_INSTALL=true uv-remove-env && \ - make $MAKEFILE_ARGS_UV_OVERRIDE AUTO_INSTALL=true uv-migrate-undo && \ - rm -rf pyproject.toml.uv.backup + local uv_targets=( + "uv-create-env" + "install AUTO_INSTALL=true" + "uv-remove-env AUTO_INSTALL=true" ) + # Cleanup is done outside the suite for the specific cleanup step (rm -rf pyproject.toml.uv.backup) + run_test_suite \ + "Test uv managed installs and migration" \ + "$MAKEFILE_UV_OVERRIDE" \ + "$UV_ARGS" \ + "${uv_targets[@]}" } uv-pipx(){ - echo "" - echo "###" - echo "### Test pipx uv pipx targets" - echo "###" - echo "" - ( - cd "$PROJECT_DIR" && \ - make $MAKEFILE_ARGS_UV_OVERRIDE PIPX_VENV_PATH="$PIPX_TEST_ENV" uv-install-venv && \ - make $MAKEFILE_ARGS_UV_OVERRIDE PIPX_VENV_PATH="$PIPX_TEST_ENV" AUTO_INSTALL=true uv-uninstall-venv && \ - make $MAKEFILE_ARGS_UV_OVERRIDE uv-install-venv + local uv_pipx_targets=( + "uv-install-venv PIPX_VENV_PATH='$PIPX_TEST_ENV'" + "uv-uninstall-venv PIPX_VENV_PATH='$PIPX_TEST_ENV' AUTO_INSTALL=true" ) + run_test_suite \ + "Test pipx uv targets" \ + "$MAKEFILE_UV_OVERRIDE" \ + "$UV_ARGS" \ + "${uv_pipx_targets[@]}" } +# --- 6. Execution Control Functions --- + all() { base conda lint poetry + uv } list () { @@ -179,30 +258,47 @@ list () { echo echo " List of available tests:" echo - echo " - base : Test 'base' targets" - echo " - conda : Test 'conda' targets" - echo " - lint : Test 'linting' targets" - echo " - poetry : Test 'poetry' targets" + echo " - base : Test 'base' targets" + echo " - conda : Test 'conda' targets" + echo " - lint : Test 'linting' targets (Poetry/Conda/uv matrix)" + echo " - poetry : Test 'poetry' targets (venv/Conda matrix)" echo " - poetry-pipx : Test 'poetry' targets related to pipx" - echo " - uv : Test 'uv' targets" - echo " - uv-pipx : Test 'uv' targets related to pipx" - echo " - test : Test 'test' targets" + echo " - uv : Test 'uv' targets" + echo " - uv-pipx : Test 'uv' targets related to pipx" echo echo " Full test suite" echo - echo " - all : Run most tests, except 'poetry-pipx'" + echo " - all : Run most tests, except '*-pipx'" echo } +check_and_run_cleanup() { + echo "" + echo "🚨 WARNING: Executing cleanup will **permanently remove environments** (e.g., .venv, conda) and **lock files** (e.g., poetry.lock, requirements.lock)." + read -r -p "Are you sure you want to continue with the cleanup? (y/n): " confirm_cleanup + + if [[ "$confirm_cleanup" =~ ^[Yy]$ ]]; then + test_cleanup + echo "Cleanup complete!" + else + echo "Cleanup aborted by user. Skipping environment removal." + fi +} + +# --- 7. Main Script Execution --- + if [[ "$#" -eq 0 ]]; then list + exit 0 +else + check_and_run_cleanup fi for var in "$@" do - # Order is set according to use, not alphabetical order + # Use a case statement to map arguments to function calls case "$var" in "list") list @@ -236,9 +332,9 @@ do echo "* ""$var"" is not a valid input " echo "* Use the list command to see available inputs" echo "* * * * * * * * * * * * * * * * * * * * * * * * *" - echo + echo "" list - echo exit 1 esac -done \ No newline at end of file +done +test_cleanup diff --git a/.make/tests/test_environment.yml b/.make/tests/test_environment.yml index 3cd99a6..bf1a448 100644 --- a/.make/tests/test_environment.yml +++ b/.make/tests/test_environment.yml @@ -2,5 +2,5 @@ name: lab-advanced-template-testing channels: - conda-forge dependencies: - - python=3.11 + - python=3.12 diff --git a/.make/uv.make b/.make/uv.make index 13c07dd..ef9151f 100644 --- a/.make/uv.make +++ b/.make/uv.make @@ -2,19 +2,26 @@ # This is to make sure, sometimes the Makefile includes don't work. ## -- UV targets ------------------------------------------------------------------------------------------------ ## -ENV_COMMAND_TOOL := uv run +ENV_TOOL := $(shell command -v uv 2> /dev/null) +LOCAL_UV_PATH := $(shell echo $$HOME/.local/bin/uv) +ifeq ($(ENV_TOOL),) + ENV_TOOL := $(LOCAL_UV_PATH) +endif +# Do not rename these unless you also rename across all other make files in .make/ +ENV_COMMAND_TOOL := $(ENV_TOOL) run +ENV_INSTALL_TOOL := $(ENV_TOOL) sync .PHONY: uv-install-auto uv-install-auto: - uv --version; \ + @$(ENV_TOOL) --version; \ if [ $$? != "0" ]; then \ - @make -s uv-install-venv; \ + make -s uv-install-venv; \ fi; .PHONY: uv-install uv-install: ## Install uv interactively. @echo "Looking for uv version...";\ - uv --version; \ + $(ENV_TOOL) --version; \ if [ $$? != "0" ]; then \ if [ "$(AUTO_INSTALL)" = "true" ]; then \ ans="y";\ @@ -38,7 +45,7 @@ uv-install: ## Install uv interactively. [Yy]*) \ if [ $$pipx_found == "1" ]; then \ echo""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m The following pip has been found and will be used to install pipx: "; \ + echo -e "$(WARNING)The following pip has been found and will be used to install pipx: "; \ echo " -> "$$(which pip); \ echo""; \ echo "If you do not have write permission to that environment, using it to install pipx will fail."; \ @@ -57,7 +64,7 @@ uv-install: ## Install uv interactively. ;; \ *) \ echo ""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m Option $$ans_how not found, exiting process."; \ + echo -e "$(WARNING)Option $$ans_how not found, exiting process."; \ echo ""; \ exit 1; \ esac; \ @@ -84,14 +91,14 @@ uv-install-venv: ## Install standalone uv. Will install pipx in $HOME/.pipx_venv @pipx --version; \ if [ $$? != "0" ]; then \ echo "Creating virtual environment using venv here : [$(PIPX_VENV_PATH)]"; \ - python3 -m venv $(PIPX_VENV_PATH); \ + virtualenv $(PIPX_VENV_PATH); \ echo "Activating virtual environment [$(PIPX_VENV_PATH)]"; \ source $(PIPX_VENV_PATH)/bin/activate; \ pip3 install pipx; \ pipx ensurepath; \ - source $(PIPX_VENV_PATH)/bin/activate && make -s _pipx_install_uv ; \ + source $(PIPX_VENV_PATH)/bin/activate && make -s _pipx_install_uv; \ else \ - make -s _pipx_install_uv ; \ + make -s _pipx_install_uv; \ fi; .PHONY: uv-install-local @@ -107,8 +114,8 @@ uv-install-local: ## Install standalone uv. Will install pipx with locally avail .PHONY: uv-create-env uv-create-env: ## Create a virtual environment for uv, using the project's python version. - @uv python install $(PYTHON_VERSION) - @uv venv --python $(PYTHON_VERSION) + @$(ENV_TOOL) python install $(PYTHON_VERSION) + @$(ENV_TOOL) venv --python $(PYTHON_VERSION) .PHONY: uv-activate uv-activate: ## Print out the shell command to activate the project's uv environment. @@ -210,66 +217,6 @@ uv-uninstall-venv: uv-remove-env ## Uninstall pipx-installed uv, the created uv ;; \ esac; \ -PHONY: uv-migrate-from-poetry -uv-migrate-from-poetry: ## Migrate the project's default poetry 'pyproject.toml' to uv. - @if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo""; \ - echo -n "Would you like to convert your current pyproject.toml file to use uv instead of poetry ? [y/N]: "; \ - read ans; \ - fi; \ - case $$ans in \ - [Yy]*) \ - echo "Creating backup copy of current pyproject.toml file"; \ - cp pyproject.toml pyproject.toml.poetry.backup ; \ - echo "Migrating pyproject.toml file to use uv"; \ - uvx migrate-to-uv --dependency-groups-strategy keep-existing; \ - if [ -e pyproject.toml.uv.backup ]; then \ - echo "pyproject.toml.uv.backup file found. Proceeding to delete it."; \ - rm pyproject.toml.uv.backup ; \ - fi; \ - ;; \ - *) \ - echo "Skipping pyproject.toml migration."; \ - echo ""; \ - ;; \ - esac; \ - -PHONY: uv-migrate-undo -uv-migrate-undo: ## Undo previous migration of the project's default poetry 'pyproject.toml' to uv. - @if [ "$(AUTO_INSTALL)" = "true" ]; then \ - ans="y";\ - else \ - echo""; \ - echo -n "Would you like to revert your current pyproject.toml file to use the previous backup of the file ? [y/N]: "; \ - read ans; \ - fi; \ - case $$ans in \ - [Yy]*) \ - echo "Checking if backup pyproject.toml file exists"; \ - if [ -e "$(PROJECT_PATH)/pyproject.toml.poetry.backup" ]; then \ - echo "Backup file found"; \ - echo "Making backup of current uv pyproject.toml"; \ - cp "$(PROJECT_PATH)/pyproject.toml" "$(PROJECT_PATH)/pyproject.toml.uv.backup" ; \ - echo "Reverting pyproject.toml file to previous poetry backup."; \ - cp "$(PROJECT_PATH)pyproject.toml.poetry.backup" "$(PROJECT_PATH)/pyproject.toml" ; \ - echo "Removing previous poetry backup."; \ - rm "$(PROJECT_PATH)/pyproject.toml.poetry.backup" ; \ - echo "Removing uv.lock file."; \ - rm "$(PROJECT_PATH)/uv.lock" ; \ - else \ - echo ""; \ - echo "Backup file not found. Skipping migration undo"; \ - fi; \ - ;; \ - *) \ - echo "Skipping pyproject.toml migration."; \ - echo ""; \ - ;; \ - esac; \ - - ## -- Install targets (All install targets will install uv if not found using 'make uv-install-auto')---------------- ## .PHONY: _remind-env-activate _remind-env-activate: @@ -278,16 +225,18 @@ _remind-env-activate: @echo "" @make -s uv-activate @echo "" - @echo "You can also use the following command line to interact with the environment" + @echo "or use the eval bash command : eval \$$(make uv-activate)" + @echo "" + @echo "You can also use the following command line to interact with the environment directly" @echo "" @echo " $$ uv run " @echo "" .PHONY: install -install: install-precommit ## Install the application package, developer dependencies and pre-commit hook +install: install-precommit install-dev ## Install the application package, developer dependencies and pre-commit hook .PHONY: install-precommit -install-precommit: install-dev ## Install the pre-commit hooks (also installs developer dependencies) +install-precommit: ## Install the pre-commit hooks (need to run one of the install targets first) @if [ -f .git/hooks/pre-commit ]; then \ echo "Pre-commit hook found"; \ else \ @@ -295,18 +244,31 @@ install-precommit: install-dev ## Install the pre-commit hooks (also installs de $(ENV_COMMAND_TOOL) pre-commit install; \ fi; +.PHONY: uninstall-precommit +uninstall-precommit: ## Uninstall the pre-commit hook + @$(ENV_COMMAND_TOOL) pre-commit uninstall + .PHONY: install-dev install-dev: uv-install-auto ## Install the application along with developer dependencies - @uv sync --group dev + @$(ENV_INSTALL_TOOL) --group dev --all-packages + @make -s _remind-env-activate + +.PHONY: install-all +install-all: uv-install-auto ## Install the application and all it's dependency groups + @$(ENV_INSTALL_TOOL) --all-groups --all-packages @make -s _remind-env-activate -.PHONY: install-with-lab -install-with-lab: uv-install-auto ## Install the application and it's dev dependencies, including Jupyter Lab - @uv --group dev --group lab +.PHONY: install-jupyterlab +install-jupyterlab: uv-install-auto ## Install Jupyter Lab dependencies + @$(ENV_TOOL) pip install --group lab @make -s _remind-env-activate +.PHONY: install-docs +install-docs: uv-install-auto ## Install docs related dependencies (mkdocs) + @$(ENV_TOOL) pip install --group docs + @make -s _remind-env-activate .PHONY: install-package install-package: uv-install-auto ## Install the application package only - @uv sync + @$(ENV_INSTALL_TOOL) --no-dev --all-packages @make -s _remind-env-activate diff --git a/.mdformat.toml b/.mdformat.toml index d9a2adb..7c91ac1 100644 --- a/.mdformat.toml +++ b/.mdformat.toml @@ -1,2 +1,2 @@ number = true -extensions = ["gfm", "gfm_alerts", "mkdocs"] \ No newline at end of file +extensions = ["gfm", "gfm_alerts", "mkdocs"] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4908592..9bfc4b9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,28 +2,51 @@ ## Adding dependencies -Please make sure to add dependencies using `Poetry`, so the project environment +Please make sure to add dependencies using `uv`, so the project environment is kept up to date and functional for other users. +Note that this should not be done while on the DRAC cluster and using pre-built wheels, +as those library versions do not exist elsewhere and `uv` will install Pypi versions, +not local versions - unless specifically configured to do. + To add a new dependency: ``` -poetry add +uv add ``` To add a new dependency with a specific version: ``` -poetry add "==" +uv add "==" +``` + +To add a new dependency and specify a version with some ability to update, use capped +versions, like so : `">=1.2.3,<1.3.0"`. +This is useful when you want to limit the version number but still allow bug fixes. + +``` +uv add "pandas>=2.3.0,<2.4.0" ``` To add a new dependency to a specific group of dependencies (for example, the development dependencies): ``` -poetry add --group dev +uv add --group dev ``` +To make a whole group optional, add the following to your `pyproject.toml` file, where +`` is the name of your group: + +``` +[project.optional-dependencies] +``` + +If you do add dependencies directly with pip, make sure to also add them +(preferably with a version number) to the `dependencies = []` section of +the `pyproject.toml` file. + ## Design patterns Two main considerations should be made when contributing to this package. diff --git a/Makefile b/Makefile index 7ca3c48..db6c1c5 100644 --- a/Makefile +++ b/Makefile @@ -16,33 +16,4 @@ # # Necessary make files # -include .make/base.make - -# -# Optional makefiles - Comment/uncomment the targets you want for the project -# -## Conda targets -include .make/conda.make - -## Poetry targets - !!! If using Poetry, you should comment out the UV file below and consider -## if you need the conda file above !!! -include .make/poetry.make - -## UV targets - !!! If using UV, you should comment out the poetry file above, and possibly the conda file too !!! -#include .make/uv.make - -## Linting targets -include .make/lint.make - -## Test related targets -include .make/test.make - -# -# Project related makefiles -# -## Custom targets and variables --include Makefile.targets --include Makefile.variables - -## Private variables and targets import to override variables for local --include Makefile.private +include .make/manager.make diff --git a/Makefile.private.example b/Makefile.private.example index 148bd0d..48f4db1 100644 --- a/Makefile.private.example +++ b/Makefile.private.example @@ -21,23 +21,29 @@ # asking beforehand. AUTO_INSTALL := false -# The default environment to use. The choices are as follow: [venv, poetry, conda] -# If this is not set, the makefile will use the `poetry` command without activating -# an environment before hand. -DEFAULT_INSTALL_ENV := venv - -# The default environment where Poetry will be installed. The choices are as follow: [venv, conda] -# If this is not set, the makefile will ask the user where they want to install Poetry -#DEFAULT_POETRY_INSTALL_ENV := venv - +# The default environment to use. The choices are as follow: [venv, uv, poetry, conda] +# Here, we are talking about the virtual environment management - not dependencies. +# +# venv creates a standard virtual environment via the available 'virtualenv' +# uv creates a virtual environment via 'uv venv -p $(PYTHON_VERSION)' +# poetry creates a virtual environment via 'poetry env use $(PYTHON_VERSION) +# conda create a virtual environment via '$(CONDA_TOOL) env create -f environment.yml' +# +# This overrides the value found in 'Makefile.variables' +DEFAULT_INSTALL_ENV := uv + +# The default build tool to use. The choices are as follows: [uv, poetry] +# This value should also reflect the choice above - DO NOT choose 'poetry' or 'conda' for +# DEFAULT_INSTALL_ENV and then use 'uv' as the build tool +# +# This is the project's default. +DEFAULT_BUILD_TOOL := uv # This variable is to enable the '-y' flag when newer version of conda is available. # The default Mila conda module does not allow this flag; leave blanks or commented out -#CONDA_YES_OPTION := -y +# CONDA_YES_OPTION := -y -# If you use a different Conda tool, say `micromamba`, or if you use a different alias +# If you use a different Conda tool, say `mamba`, or if you use a different alias # CONDA_TOOL := -DOCKER_COMPOSE := docker compose - -## -- Private targets ------------------------------------------------------------------------------------------------## +DOCKER_COMPOSE := docker compose diff --git a/Makefile.targets b/Makefile.targets index b98bddc..30d999f 100644 --- a/Makefile.targets +++ b/Makefile.targets @@ -6,33 +6,3 @@ .PHONY: test-notebooks test-notebooks: ## Execute test notebooks using pytest and nbval $(ENV_COMMAND_TOOL) nox -s test_nb - -## -- Docs targets -------------------------------------------------------------------------------------------------- ## -.PHONY: preview-docs -preview-docs: ## Preview the documentation site locally - @poetry run mkdocs serve -a 0.0.0.0:7000 - - -.PHONY: build-docs -build-docs: ## Build the documentation files locally - @poetry run mkdocs build - -.PHONY: deploy-docs -deploy-docs: ## Publish and deploy the documentation to the live Github page - @echo""; \ - echo -e "\e[1;39;41m-- WARNING --\e[0m This command will deploy all current changes to the live Github page - Making it publicly available"; \ - echo""; \ - echo -n "Would you like to deploys the docs? [Y/n]: "; \ - read ans; \ - case $$ans in \ - [Yy]*) \ - echo""; \ - poetry run mkdocs gh-deploy; \ - echo""; \ - ;; \ - *) \ - echo""; \ - echo "Skipping publication to Github Pages."; \ - echo " "; \ - ;; \ - esac; \ diff --git a/Makefile.variables b/Makefile.variables index 0b7b2fa..06b7ace 100644 --- a/Makefile.variables +++ b/Makefile.variables @@ -1,7 +1,40 @@ # This file is for custom makefile variable at the project level. # They are meant to be share by the whole project team. +# +# Project Core variables +# APP_VERSION := 0.2.1 + +# APPLICATION_NAME must be aligned with the name of the folder containing your package APPLICATION_NAME := geospatial_tools + +PYTHON_VERSION := 3.11 + +# This is the default install environment for the project. +# Here, we are talking about the virtual environment management - not dependencies. +# +# venv creates a standard virtual environment via the available 'virtualenv' +# uv creates a virtual environment via 'uv venv -p $(PYTHON_VERSION)' +# poetry creates a virtual environment via 'poetry env use $(PYTHON_VERSION) +# conda create a virtual environment via '$(CONDA_TOOL) env create -f environment.yml' +# +# The default environment is uv. The choices are as follows: [venv, uv, poetry, conda] +DEFAULT_INSTALL_ENV := uv + +# This is the project's default build tool. +# This value should also reflect the choice above - DO NOT choose 'poetry' for +# DEFAULT_INSTALL_ENV and then use 'uv' as the build tool +# +# The default build tool is uv. The choices are as follows: [uv, poetry] +DEFAULT_BUILD_TOOL := uv + +# Makefile target groups +# Below, you can chose what target groups are available. Full list: [lint,test] +TARGET_GROUPS := lint,test, docs + +# +# Conda related variables +# CONDA_ENVIRONMENT := geospatial-tools -PYTHON_VERSION := 3.11 \ No newline at end of file +CONDA_TOOL := mamba diff --git a/README.md b/README.md index cdd6423..816a21d 100644 --- a/README.md +++ b/README.md @@ -1,251 +1,102 @@ -# Geospatial-Tools - -## Description +# Geospatial-tools This repository is a collection of tools and scripts for geospatial use cases. -## Requirements - -This project has only been tested in a Linux (Debian based) environment and assumes -some basic tools for development are already installed. - -The project uses a Makefile to automate most operations. If `make` is available on your -machine there's a good chance this will work. - -The following Makefile files should not be modified, but can be consulted: - -- [Makefile](Makefile) : Orchestration of the different files - - In this file, you can activate or deactivate target groups according to your - project's needs. -- [base.make](.make/base.make) : Shared utilities, project agnostic. -- [conda.make](.make/conda.make) : Conda related targets. -- [lint.make](.make/lint.make) : Linting and formating related targets. -- [poetry.make](.make/poetry.make) : Poetry related targets. -- [test.make](.make/test.make) : Test related targets. -- [uv.make](.make/uv.make) : UV related targets. - -The following Makefile files are project or user specific and can be modified by -project users: - -- [Makefile.variables](Makefile.variables) : Shared project variables. -- [Makefile.targets](Makefile.targets) : Shared project targets. -- [Makefile.private](Makefile.private.example) : User specific variables and targets. - - This file is ignored by git and should never be committed, as it can also contain - secrets. - - You can create your own version locally by copying from - [Makefile.private.example](Makefile.private.example) - -## Basic Information - -The different targets and their description can be examined by executing the command -`make targets` - -![image](.make/img/make_targets.png) +## 🐍 Python Version -## Python Version +This project uses **Python 3.11** and relies on a `Makefile` for standardized, reproducible commands. -This project uses Python version 3.11 +You can read more about the makefile [here](.make/README.md). -## Build Tool +## 📦 Package & Environment Management -This project uses `poetry` as a build tool. Using a build tool has the advantage of -streamlining script use as well as fix path issues related to imports. +- **Environment & Dependency Management:** **[uv](https://docs.astral.sh/uv/)** is the **recommended default** tool for fast, reliable dependency installation and virtual environment creation. It can be configured to use **[Poetry](https://python-poetry.org/docs/)** or `conda` via `Makefile.variables`. +- **Configuration:** Review the project-level configurations in [Makefile.variables](Makefile.variables) or set individual preferences in `Makefile.private`. -## First Time User Quick Setup +## ⚡ Quick Start -### Installing poetry - -If on a compute cluster, first load the appropriate python module. - -Installing `pipx` and `poetry`: +You can review your current active configurations using this command: ```bash -make poetry-install +make info ``` -Next, let's choose how to manage our development environment - -### Environment management choice - -Choose between managing your development with either `virtualenv`, `poetry` or `conda`. - -#### Virtualenv (or Venv) - -This is the simplest way and what most people in the lab are already used to. - -Make sure that `python 3.10 or 3.11` is available either locally, through loaded cluster -modules (ex. `module load python/`) or `pyenv` before executing the -following command: +You can list the available targets using this command: ```bash -make venv-create +make targets ``` -You can see the shell command to activate the environment with the following target: +### 🛠️ Tool-Specific Setup -```bash -make venv-activate -``` +Select your preferred development stack below. Ensure your `Makefile.variables` are configured to match your choice. -#### Poetry +### Install System Tools -Make sure that `python 3.10 or 3.11` is available either locally, through loaded cluster -modules (ex. `module load python/`) or `pyenv` before executing the -following command: +If needed, run the command corresponding to your chosen stack to install the necessary system tools. +
+Stack: uv (Default) ```bash -make poetry-create-env +make uv-install ``` +
-Make sure to read up -on [how to use your poetry virtual environment](https://python-poetry.org/docs/basic-usage/#using-your-virtual-environment) +### 📦 Installing the Project -You can: - -- Use the `poetry run` command to access your executables - - ex. `poetry run python your_script.py`, or `poetry run pylint src/` -- If you don't want to have to use `poetry run` all the time, you can see the shell - command to activate the environment with the following target: - - `make poetry-activate` -- If you have the `poetry-plugin-shell` you can use the `poetry shell` command to - activate and step in your project environment - -You can of course also create a classic `virtualenv`, -like [in the above section](#virtualenv-or-venv), -activate it, and use `poetry` inside the activate environment. - -#### Conda - -Reminder: `conda` is not available on DRAC. - -If working on the Mila cluster, first load the appropriate module : -`module load anaconda/3` - -- Create `conda` environment (will check for `conda` and install it if not found): - -```bash -make conda-create-env -``` - -- Activate `conda` environment (substitute with your `` if something else - than `conda`: - -``` -conda activate -``` - -### Install - -Once the environment is taken care of, you can verify everything with the following -command before continuing: +Once your tools are configured and installed, run the universal install command. This will create the environment and install all dependencies defined in pyproject.toml. ```bash -make poetry-env-info +make install ``` -or +### 🔌 Activating the Environment ```bash -poetry env info +# Works for uv, poetry, and conda configurations +eval $(make uv-activate) ``` -To install the package and its dependencies: +Alternatively, you can use `uv run ` directly: ```bash -make install +uv run python +# or +uv run pre-commit ``` -This also installs and configures the `pre-commit` hook. See ... - -### Development +## 📖 Project Usage -1. [Add required dependencies](CONTRIBUTING.md#adding-dependencies) -2. Create some new modules +## 🌐 Environment & Portability Note -## Detailed documentation +This template is designed for reproducibility using the `lock` files (`uv.lock`). -This project assumes environment management will be done with `conda`, a classic -python virtual environment, or directly through `poetry`. +## 🛠️ Development Workflow -- [Poetry](https://python-poetry.org/docs/basic-usage/) -- [Conda](https://conda.io/projects/conda/en/latest/user-guide/getting-started.html) +### Adding Dependencies -While it is possible to manage the environment with, for example, pyenv or virtualenv, -those specific use cases are not supported by the Makefile and require users to set up -their own environments beforehand. +To add new dependencies, see the [Contributing guidelines](CONTRIBUTING.md#adding-dependencies). -If you want to use something else than `conda` or `poetry` to manage environment -isolation, -it is recommended to follow -[Poetry's guidelines on managing environments](https://python-poetry.org/docs/managing-environments/) +### Pre-commit -`poetry` is not included in the [environment.yml](environment.yml), due to some possible -problems -in compute cluster environments, but will be installed automatically if needed -by most `install` targets. +This project uses `pre-commit` for automated code formatting and linting. The hooks are defined in `.pre-commit-config.yaml`. -### Environment Management +- **Installation:** The `pre-commit install` command installs git hook that run automatically before each commit. It is run automatically when you run the `make install` command. It can also be installed manually with the `make install-precommit` command. +- **Automatic Fixes:** When you `git commit`, `pre-commit` will run. It will automatically fix many formatting issues (like `black`). If it makes changes, your commit will be aborted. Simply `git add .` the changes and commit again. +- **Manual Run:** You can run all checks on all files manually at any time: + ```bash + make precommit + ``` +- **Uninstalling:** To remove the git hooks: + ```bash + make uninstall-precommit + ``` -Your project will need a virtual environment for your dependencies. +**Note about `markdown-link-check`**: -There are different ways of managing your python version in these environments. On the -clusters, you have access to different python modules, and through `conda` you have -access -to practically all the versions that are available. +This pre-commit uses a tool called [markdown-link-check](https://github.com/tcort/markdown-link-check). It's a great tool to make sure all your links are up and accessible. Il you need to modify the exception list, say, because you are linking to a private repository and the check keeps failing, add it to the ignore patterns [here](.markdown-link-check.json) -However, on your own system, if you do not wish to use `conda`, you will have to either -manually install different versions of python manually for them to be usable by `poetry` -or use a tool like [pyenv](https://github.com/pyenv/pyenv). - -Do note that `conda` is not available on the DRAC cluster, and there are some extra -steps -to use `conda` on the Mila cluster compared to a workstation. - -#### How to use conda with poetry - -When using `conda`, it is important to understand that it is both an environment -management -tool AND a dependency management tool... and so is `poetry`. The difference is that with -`conda` -you can install different versions of python, as well as have access to non -python applications and binary packages. - -To use them together, it is recommended to use `conda` as the environment and python -version manager, for non-python dependencies, and use `poetry` as the python -dependency manager. - -Using both `conda` and `poetry` (and `pip` directly, for that matter), to install and -manage python dependencies is a great recipe for breaking your environment. - -If you do need to install python dependencies in both (ex. pytorch through `conda`, and -others using `poetry`), you need to be cautious as one dependency manager can and will -interfere with the dependencies managed by the other one and will make dependency -conflicts challenging to fix. - -If there are no ways around it, you could also manage and use two environments at the -same time -(one via conda and one via poetry). This, however, requires an enormous amount of -discipline -to work and is strongly discouraged. - -#### Environment management choices - -The choice of which environment manager to use between `conda` and `poetry` is -ultimately -determined by either project requirements or personal preference. Both are allowed and -supported by this project. - -### Installation - -There are a few installation targets available. - -![image](.make/img/install_targets.png) - -Generally, `make install` is the most useful, but the others can have their uses. -For example, using `make install-package` is great for an environment where -you will only be executing your code and do no development. - -### Useful Makefile targets for development +## Other useful development targets To run linting checks with `flake8`, `pylint`, `black`, `isort` and `docformatter`: @@ -259,60 +110,21 @@ To fix linting with `autoflake`,`autopep8`,`black`, `isort`, `flynt` and `docfor make fix-lint ``` -To run a `pre-commit` check before actually committing: - -```bash -make precommit -``` - To run tests: ```bash make test ``` -#### The use of Nox +### Nox Behind the scenes, the targets in this section make use of the [Nox automation tool](https://nox.thea.codes/en/stable/). The configurations can be found in the [noxfile.py](noxfile.py) file. -#### Experimental targets - -The `ruff` tool is now also available in the makefile. This tool is not yet integrated -into the `pre-commit` configuration and should be used, optionally, as a complement to -the other code quality tools. - -To run linting using `ruff`: - -```bash -make ruff -``` - -To run linting using `ruff` and fix automatically fixable warnings: - -```bash -make ruff-fix -``` - -To run code formatting using `ruff` - -```bash -make ruff-format -``` - -For more information of `ruff`, see -it's [official documentation here](https://docs.astral.sh/ruff/). - -## Configurations - -Configurations are in the [config/](configs) folder. - -## Data - -See [Data Readme](data/README.md) +For more information about how `nox` is used in this project, see -## Contributing to this repository +### Contributing -See [Contributing guidelines](CONTRIBUTING.md) +Please read and follow the [Contributing guidelines](CONTRIBUTING.md) for details on submitting code, running tests, and managing dependencies. diff --git a/docs/mkdocs/index.md b/docs/mkdocs/index.md index 4d6fc50..ea1a8aa 100644 --- a/docs/mkdocs/index.md +++ b/docs/mkdocs/index.md @@ -22,5 +22,5 @@ This project uses Python version 3.11 ## Build Tool -This project uses [`poetry`](https://python-poetry.org/) as a build tool. Using a build tool has the advantage of +This project uses [`uv`](https://docs.astral.sh/uv/) as a build tool. Using a build tool has the advantage of streamlining script use as well as fix path issues related to imports. diff --git a/notebooks/planetary_computer_sentinel2_exploration.ipynb b/notebooks/planetary_computer_sentinel2_exploration.ipynb index 4da79e6..da695bd 100644 --- a/notebooks/planetary_computer_sentinel2_exploration.ipynb +++ b/notebooks/planetary_computer_sentinel2_exploration.ipynb @@ -2,10 +2,13 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, "id": "initial_id", - "metadata": {}, - "outputs": [], + "metadata": { + "ExecuteTime": { + "end_time": "2026-01-29T21:49:37.845965444Z", + "start_time": "2026-01-29T21:49:35.493264346Z" + } + }, "source": [ "import leafmap\n", "import geopandas as gpd\n", @@ -15,7 +18,9 @@ "from geospatial_tools.stac import Asset\n", "from geospatial_tools.utils import get_yaml_config, download_url, unzip_file\n", "from geospatial_tools.vector import create_vector_grid_parallel, to_geopackage, select_polygons_by_location" - ] + ], + "outputs": [], + "execution_count": 1 }, { "cell_type": "markdown", @@ -46,20 +51,24 @@ }, { "cell_type": "code", - "execution_count": 2, "id": "687ac922509bf0e4", - "metadata": {}, + "metadata": { + "ExecuteTime": { + "end_time": "2026-01-29T21:49:37.926580867Z", + "start_time": "2026-01-29T21:49:37.848648030Z" + } + }, + "source": [ + "file_configs = get_yaml_config(\"data_file_links\")\n", + "raw_usa_polygon_path = file_configs[\"united_states_polygon\"][\"url\"]\n", + "raw_s2_tiling_grid_path = file_configs[\"sentinel_2_tiling_grid\"][\"url\"]\n", + "download_list = {\"raw_usa_polygon\": raw_usa_polygon_path, \"raw_s2_tiling_grid\": raw_s2_tiling_grid_path}\n", + "file_list = [download_url(url=url, filename=f\"{DATA_DIR}/{key}.zip\") for key, url in download_list.items()]\n", + "\n", + "file_list\n", + "\n" + ], "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Yaml config file [/home/francispelletier/projects/geospatial_tools/configs/data_file_links.yaml] found.\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Loading YAML config file [/home/francispelletier/projects/geospatial_tools/configs/data_file_links.yaml].\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] File [/home/francispelletier/projects/geospatial_tools/data/raw_usa_polygon.zip] already exists. Skipping download.\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] File [/home/francispelletier/projects/geospatial_tools/data/raw_s2_tiling_grid.zip] already exists. Skipping download.\n" - ] - }, { "data": { "text/plain": [ @@ -72,41 +81,32 @@ "output_type": "execute_result" } ], - "source": [ - "file_configs = get_yaml_config(\"data_file_links\")\n", - "raw_usa_polygon_path = file_configs[\"united_states_polygon\"][\"url\"]\n", - "raw_s2_tiling_grid_path = file_configs[\"sentinel_2_tiling_grid\"][\"url\"]\n", - "download_list = {\"raw_usa_polygon\": raw_usa_polygon_path, \"raw_s2_tiling_grid\": raw_s2_tiling_grid_path}\n", - "file_list = [download_url(url=url, filename=f\"{DATA_DIR}/{key}.zip\") for key, url in download_list.items()]\n", - "\n", - "file_list\n", - "\n" - ] + "execution_count": 2 }, { "cell_type": "code", - "execution_count": 3, "id": "26a5535d1d05fbbe", - "metadata": {}, + "metadata": { + "ExecuteTime": { + "end_time": "2026-01-29T21:49:38.536778254Z", + "start_time": "2026-01-29T21:49:37.927928176Z" + } + }, + "source": [ + "[unzip_file(zip_path=f, extract_to=DATA_DIR) for f in file_list]" + ], "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.shp.ea.iso.xml]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.shp.iso.xml]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.shp]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.shx]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.dbf]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.prj]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [cb_2018_us_nation_20m.cpg]\n", - "[2024-12-20 11:20:26] INFO [MainThread][geospatial_tools.utils] Extracted: [S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml]\n" - ] - }, { "data": { "text/plain": [ - "[None, None]" + "[['/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.shp.ea.iso.xml',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.shp.iso.xml',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.shp',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.shx',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.dbf',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.prj',\n", + " '/home/francispelletier/projects/geospatial_tools/data/cb_2018_us_nation_20m.cpg'],\n", + " ['/home/francispelletier/projects/geospatial_tools/data/S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml']]" ] }, "execution_count": 3, @@ -114,9 +114,7 @@ "output_type": "execute_result" } ], - "source": [ - "[unzip_file(zip_path=f, extract_to=DATA_DIR) for f in file_list]" - ] + "execution_count": 3 }, { "cell_type": "markdown", @@ -141,25 +139,44 @@ }, { "cell_type": "code", - "execution_count": 4, "id": "ff487d19985d9368", - "metadata": {}, - "outputs": [], + "metadata": { + "ExecuteTime": { + "end_time": "2026-01-29T21:49:38.640837572Z", + "start_time": "2026-01-29T21:49:38.584766210Z" + } + }, "source": [ "USA_POLYGON_FILE = DATA_DIR / \"usa_polygon_5070.gpkg\"\n", "S2_USA_GRID_FILE = DATA_DIR / \"s2_grid_usa_polygon_5070.gpkg\"\n", "usa_polygon = gpd.read_file(USA_POLYGON_FILE)\n", "s2_grid = gpd.read_file(S2_USA_GRID_FILE)" - ] + ], + "outputs": [], + "execution_count": 4 }, { "cell_type": "code", - "execution_count": 5, "id": "2767e8432a15bb65", - "metadata": {}, + "metadata": { + "ExecuteTime": { + "end_time": "2026-01-29T21:49:38.697289852Z", + "start_time": "2026-01-29T21:49:38.642323833Z" + } + }, + "source": [ + "usa_polygon" + ], "outputs": [ { "data": { + "text/plain": [ + " AFFGEOID GEOID NAME \\\n", + "0 0100000US US United States \n", + "\n", + " geometry \n", + "0 MULTIPOLYGON (((-2116048.733 3142966.552, -211... " + ], "text/html": [ "
\n", "