diff --git a/.github/RELEASE_LABELS_GUIDE.md b/.github/RELEASE_LABELS_GUIDE.md new file mode 100644 index 00000000..df433800 --- /dev/null +++ b/.github/RELEASE_LABELS_GUIDE.md @@ -0,0 +1,216 @@ +# Release Labels Quick Reference + +This is a quick reference guide for maintainers on using release labels for automated versioning. + +## âš ī¸ IMPORTANT: Default Behavior + +**If you merge a PR without a release label, NO RELEASE will be created.** + +This is intentional and safe by default - you must explicitly label PRs to trigger a release. + +## When Reviewing PRs + +Before merging any PR to `master`, decide if it should trigger a release and add the appropriate label: + +## Label Reference + +| Label | Color | Effect | Current → New | Use Case | +|-------|-------|--------|---------------|----------| +| **release:patch** | đŸŸĸ Green | Patch bump | 2.1.5 → 2.1.6 | Bug fixes, minor improvements | +| **release:minor** | 🟡 Yellow | Minor bump | 2.1.5 → 2.2.0 | New features, enhancements, new functionality | +| **release:major** | 🔴 Red | Major bump | 2.1.5 → 3.0.0 | Breaking changes, major API changes, removed features | +| **skip-release** (or no label) | âšĒ Gray | No release | 2.1.5 → 2.1.5 | Docs only, CI changes, tests, or changes not ready to release | + +**Note:** `skip-release` label is optional - if you don't add any label, the PR will be skipped for release automatically. + +## Decision Tree + +``` +Should this PR trigger a release? +│ +├─ NO → Don't add a label (or use 'skip-release') +│ Examples: +│ - Docs only changes +│ - README updates +│ - CI/workflow changes +│ - Tests only +│ - Work in progress you want to merge but not release yet +│ +└─ YES → Does it break existing code? + │ + ├─ YES → Use 'release:major' + │ Examples: + │ - Breaking API changes + │ - Removed features + │ - Changed method signatures + │ + └─ NO → Does it add new features? + │ + ├─ YES → Use 'release:minor' + │ Examples: + │ - New methods + │ - New functionality + │ - New API endpoints + │ + └─ NO → Use 'release:patch' + Examples: + - Bug fixes + - Performance improvements + - Minor enhancements +``` + +## Examples + +### release:patch +- Fix bug in `CubeService.get_dimension()` +- Improve error message +- Update dependency version +- Performance optimization +- Fix typo in code + +### release:minor +- Add new method to `ChoreService` +- Add support for new TM1 REST API endpoint +- Add optional parameter to existing method (backwards compatible) +- New utility function + +### release:major +- Remove deprecated method +- Change method signature (breaking) +- Rename class or module +- Change default behavior that breaks existing code +- Update minimum Python version + +### No label (or skip-release) +- Update README +- Fix documentation typo +- Update GitHub Actions workflow +- Add or update tests (no code changes) +- Add code comments +- Refactoring that you want to accumulate before releasing +- Any change you're not ready to release yet + +## Default Behavior + +If **NO label** is added → **NO RELEASE** (safe default) + +To trigger a release, you **MUST** add one of: `release:patch`, `release:minor`, or `release:major` + +## Priority + +If multiple PRs are merged in one day with different labels, the **highest priority label wins**: + +1. **release:major** (highest priority) +2. **release:minor** +3. **release:patch** (lowest priority) + +Example: +``` +Monday: + - PR #101: Docs update (no label) → skip + - PR #102: New feature (release:minor) → minor + - PR #103: Bug fix (release:patch) → patch + + Result: Next release will be MINOR (2.1.5 → 2.2.0) + (all 3 PRs included in changelog) + +Tuesday: + - PR #104: Test update (no label) → skip + - PR #105: CI fix (no label) → skip + + Result: NO RELEASE (no release labels found) +``` + +## Common Mistakes + +❌ **DON'T**: +- Forget to add a release label when you want a release (will be skipped!) +- Use `release:minor` for bug fixes +- Add a release label to docs-only changes +- Mix breaking changes with features in same PR + +✅ **DO**: +- Add appropriate release label before merging (if you want a release) +- Leave without label for docs, tests, CI, or work-in-progress +- Split breaking changes into separate PRs when possible +- Review the entire set of changes since last release +- Remember: **no label = no release** (safe default) + +## Workflow Timeline + +### Scenario 1: Release Day (PRs with release labels) + +``` +┌─────────────────────────────────────────────────────────┐ +│ Throughout the Day │ +│ ─────────────────────────────────────────────────────── │ +│ 10:00 AM Merge PR #123 (release:patch) ✅ │ +│ 2:00 PM Merge PR #124 (release:minor) ✅ │ +│ 5:00 PM Merge PR #125 (no label - docs only) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ 4:00 AM CET (Next Day) - Automated Workflow │ +│ ─────────────────────────────────────────────────────── │ +│ 1. Check for new commits → Found 3 PRs │ +│ 2. Determine version → MINOR (highest label found) │ +│ 3. Run full tests → 2-3 hours │ +│ 4. Tests PASS → Continue │ +│ 5. Bump version → 2.1.5 → 2.2.0 │ +│ 6. Create release → GitHub Release + changelog │ +│ 7. Publish → PyPI │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ 7:00 AM CET │ +│ ─────────────────────────────────────────────────────── │ +│ ✅ Version 2.2.0 available on PyPI │ +│ Users can: pip install --upgrade TM1py │ +└─────────────────────────────────────────────────────────┘ +``` + +### Scenario 2: No Release (no release labels) + +``` +┌─────────────────────────────────────────────────────────┐ +│ Throughout the Day │ +│ ─────────────────────────────────────────────────────── │ +│ 10:00 AM Merge PR #126 (no label - tests) │ +│ 2:00 PM Merge PR #127 (no label - CI fix) │ +│ 5:00 PM Merge PR #128 (skip-release - docs) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ 4:00 AM CET (Next Day) - Automated Workflow │ +│ ─────────────────────────────────────────────────────── │ +│ 1. Check for new commits → Found 3 PRs │ +│ 2. Check for release labels → NONE found │ +│ 3. Skip release â­ī¸ │ +│ 4. Workflow completes (no version bump, no publish) │ +└─────────────────────────────────────────────────────────┘ + ↓ +┌─────────────────────────────────────────────────────────┐ +│ 7:00 AM CET │ +│ ─────────────────────────────────────────────────────── │ +│ â„šī¸ No release created (version remains 2.2.0) │ +│ Changes merged to master but not published to PyPI │ +└─────────────────────────────────────────────────────────┘ +``` + +## Checking Labels via GitHub CLI + +```bash +# View labels on a PR +gh pr view 123 --json labels + +# Add label to PR +gh pr edit 123 --add-label "release:minor" + +# Remove label from PR +gh pr edit 123 --remove-label "release:patch" +``` + +## Questions? + +See the full documentation: +- [CONTRIBUTING.md](../../CONTRIBUTING.md) diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml new file mode 100644 index 00000000..499c0686 --- /dev/null +++ b/.github/workflows/nightly-release.yml @@ -0,0 +1,358 @@ +name: Nightly Release + +on: + schedule: + # Run at 02:00 UTC (approximately 3–4 AM CET/CEST depending on DST) + # Using a fixed 02:00 UTC time to be safe across CET/CEST transitions + - cron: '0 2 * * *' + workflow_dispatch: # Allow manual trigger for testing + +permissions: + issues: write + contents: write + pull-requests: read + +jobs: + check-for-changes: + runs-on: ubuntu-latest + outputs: + has_changes: ${{ steps.check.outputs.has_changes }} + last_tag: ${{ steps.check.outputs.last_tag }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history for proper comparison + + - name: Check for commits since last release + id: check + run: | + # Get the last release tag + LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + echo "last_tag=$LAST_TAG" >> $GITHUB_OUTPUT + + if [ -z "$LAST_TAG" ]; then + echo "No previous tags found, will create first release" + echo "has_changes=true" >> $GITHUB_OUTPUT + exit 0 + fi + + # Check if there are commits since last tag + COMMITS_SINCE=$(git rev-list ${LAST_TAG}..HEAD --count) + echo "Commits since $LAST_TAG: $COMMITS_SINCE" + + if [ "$COMMITS_SINCE" -gt 0 ]; then + echo "has_changes=true" >> $GITHUB_OUTPUT + echo "✅ Found $COMMITS_SINCE new commits since last release" + else + echo "has_changes=false" >> $GITHUB_OUTPUT + echo "â­ī¸ No new commits since last release, skipping" + fi + + run-tests: + needs: check-for-changes + if: needs.check-for-changes.outputs.has_changes == 'true' + runs-on: ubuntu-latest + strategy: + matrix: + environment: ["tm1-12", "tm1-11-cloud"] + environment: ${{ matrix.environment }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + pip install -e .[pandas,dev] + + - name: Retrieve TM1 Connection Details + run: echo "Retrieving TM1 connection details" + env: + TM1_CONNECTION: ${{ vars.TM1_CONNECTION }} + TM1_CONNECTION_SECRET: ${{ secrets.TM1_CONNECTION_SECRET }} + + - name: Generate config.ini + run: | + python Tests/resources/generate_config.py + env: + TM1_CONNECTION: ${{ vars.TM1_CONNECTION }} + TM1_CONNECTION_SECRET: ${{ secrets.TM1_CONNECTION_SECRET }} + + - name: Run integration tests + run: pytest Tests/ + + - name: Upload test results + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-results-${{ matrix.environment }} + path: Tests/test-reports/ + retention-days: 7 + + determine-version: + needs: [check-for-changes, run-tests] + runs-on: ubuntu-latest + outputs: + new_version: ${{ steps.version.outputs.new_version }} + version_type: ${{ steps.version.outputs.version_type }} + changelog: ${{ steps.changelog.outputs.changelog }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Determine version bump type + id: version + env: + GH_TOKEN: ${{ github.token }} + run: | + LAST_TAG="${{ needs.check-for-changes.outputs.last_tag }}" + + # Parse current version (default to 2.2.0 if no tags) + if [ -z "$LAST_TAG" ]; then + CURRENT_VERSION="2.2.0" + else + CURRENT_VERSION="${LAST_TAG#v}" # Remove 'v' prefix if present + fi + + echo "Current version: $CURRENT_VERSION" + + # Split version into parts + IFS='.' read -r MAJOR MINOR PATCH <<< "$CURRENT_VERSION" + + # Get all merged PRs since last tag + if [ -z "$LAST_TAG" ]; then + COMMITS=$(git log --format="%H" HEAD) + else + COMMITS=$(git log --format="%H" ${LAST_TAG}..HEAD) + fi + + # Determine version bump type by checking PR labels + VERSION_TYPE="skip" # Default to skip (no release unless explicitly labeled) + FOUND_RELEASE_LABEL=false + + for COMMIT in $COMMITS; do + # Get PR number(s) associated with this commit (works with rebase & merge) + PR_NUM=$(gh api \ + -H "Accept: application/vnd.github+json" \ + repos/${{ github.repository }}/commits/$COMMIT/pulls \ + --jq '.[0].number' 2>/dev/null || echo "") + + if [ -n "$PR_NUM" ]; then + echo "Checking PR #$PR_NUM for release labels..." + + # Get PR labels using GitHub CLI + LABELS=$(gh pr view $PR_NUM --json labels --jq '.labels[].name' 2>/dev/null || echo "") + + if echo "$LABELS" | grep -q "release:major"; then + VERSION_TYPE="major" + FOUND_RELEASE_LABEL=true + echo "Found release:major label in PR #$PR_NUM" + break # Major takes precedence + elif echo "$LABELS" | grep -q "release:minor"; then + VERSION_TYPE="minor" + FOUND_RELEASE_LABEL=true + echo "Found release:minor label in PR #$PR_NUM" + # Don't break, continue checking for major + elif echo "$LABELS" | grep -q "release:patch"; then + # Only set to patch if we haven't found minor yet + if [ "$VERSION_TYPE" != "minor" ]; then + VERSION_TYPE="patch" + FOUND_RELEASE_LABEL=true + fi + echo "Found release:patch label in PR #$PR_NUM" + elif echo "$LABELS" | grep -q "skip-release"; then + echo "Found skip-release label in PR #$PR_NUM" + # Explicit skip, don't change VERSION_TYPE + fi + fi + done + + # If no release labels found, skip the release + if [ "$FOUND_RELEASE_LABEL" = false ]; then + echo "â­ī¸ No release labels found on any merged PRs, skipping release" + VERSION_TYPE="skip" + fi + + # Calculate new version + case $VERSION_TYPE in + major) + NEW_VERSION="$((MAJOR + 1)).0.0" + ;; + minor) + NEW_VERSION="${MAJOR}.$((MINOR + 1)).0" + ;; + patch) + NEW_VERSION="${MAJOR}.${MINOR}.$((PATCH + 1))" + ;; + skip) + NEW_VERSION="" # No version bump + echo "â­ī¸ Skipping release - no release labels found" + ;; + esac + + echo "Version bump type: $VERSION_TYPE" + echo "New version: $NEW_VERSION" + echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT + echo "version_type=$VERSION_TYPE" >> $GITHUB_OUTPUT + + - name: Generate changelog + id: changelog + env: + GH_TOKEN: ${{ github.token }} + run: | + LAST_TAG="${{ needs.check-for-changes.outputs.last_tag }}" + + echo "Generating changelog..." + + # Get commits since last tag + if [ -z "$LAST_TAG" ]; then + COMMITS=$(git log --format="- %s (%h)" HEAD) + else + COMMITS=$(git log --format="- %s (%h)" ${LAST_TAG}..HEAD) + fi + + # Create changelog + CHANGELOG="## What's Changed"$'\n\n'"$COMMITS"$'\n\n'"**Full Changelog**: https://github.com/${{ github.repository }}/compare/${LAST_TAG}...${{ steps.version.outputs.new_version }}" + + # Save to output (handle multiline) + { + echo 'changelog<> $GITHUB_OUTPUT + + create-release: + needs: [determine-version] + if: needs.determine-version.outputs.version_type != 'skip' && needs.determine-version.outputs.new_version != '' + runs-on: ubuntu-latest + environment: release-approval # Requires manual approval + outputs: + upload_url: ${{ steps.create_release.outputs.upload_url }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Update version in pyproject.toml + run: | + NEW_VERSION="${{ needs.determine-version.outputs.new_version }}" + sed -i "s/^version = .*/version = \"$NEW_VERSION\"/" pyproject.toml + sed -i "s|tarball/.*\"|tarball/$NEW_VERSION\"|" pyproject.toml + + - name: Commit version bump + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add pyproject.toml + git commit -m "chore: bump version to ${{ needs.determine-version.outputs.new_version }}" + git push + + - name: Create and push tag + run: | + git fetch origin --tags + git tag ${{ needs.determine-version.outputs.new_version }} + git push origin ${{ needs.determine-version.outputs.new_version }} + + - name: Create GitHub Release + id: create_release + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ needs.determine-version.outputs.new_version }} + name: Release ${{ needs.determine-version.outputs.new_version }} + body: | + ${{ needs.determine-version.outputs.changelog }} + + --- + + 🤖 This release was automatically created by the nightly release workflow. + + Install via pip: + ```bash + pip install --upgrade TM1py + ``` + draft: false + prerelease: false + + publish-to-pypi: + needs: [determine-version, create-release] + runs-on: ubuntu-latest + environment: pypi-publish # Requires manual approval (configure in GitHub repo settings) + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + ref: ${{ needs.determine-version.outputs.new_version }} + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install build tools + run: | + python -m pip install --upgrade pip + pip install build twine + + - name: Build package + run: python -m build + + - name: Check package + run: twine check dist/* + + - name: Publish to PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: twine upload dist/* + + - name: Success notification + run: | + echo "✅ Successfully published TM1py ${{ needs.determine-version.outputs.new_version }} to PyPI!" + echo "Users can now install it with: pip install --upgrade TM1py" + + notify-on-skip: + needs: [determine-version] + if: needs.determine-version.outputs.version_type == 'skip' + runs-on: ubuntu-latest + steps: + - name: Notify skip + run: | + echo "â­ī¸ Release skipped - no release labels found on merged PRs" + echo "" + echo "To trigger a release, add one of these labels to PRs before merging:" + echo " - release:patch (for bug fixes)" + echo " - release:minor (for new features)" + echo " - release:major (for breaking changes)" + echo "" + echo "Or use 'skip-release' label to explicitly skip a PR" + + notify-on-failure: + needs: [run-tests, determine-version] + if: ${{ always() && failure() }} + runs-on: ubuntu-latest + steps: + - name: Create issue on failure + uses: actions/github-script@v7 + with: + script: | + const issue = await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: '🚨 Nightly Release Failed', + body: `The nightly release workflow failed. Please check the [workflow run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}) for details.`, + labels: ['release', 'bug', 'automation'] + }); + console.log('Created issue:', issue.data.number); diff --git a/.github/workflows/pr-validation.yml b/.github/workflows/pr-validation.yml new file mode 100644 index 00000000..23b97bc5 --- /dev/null +++ b/.github/workflows/pr-validation.yml @@ -0,0 +1,44 @@ +name: PR Validation + +on: + pull_request: + branches: [master] + types: [opened, synchronize, reopened] + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install black ruff + + - name: Run Black (formatting check) + run: black --check --diff . + + - name: Run Ruff (linting) + run: ruff check . + + # Placeholder for future unit tests + # Uncomment when unit tests are available + # - name: Run unit tests + # run: | + # pip install -e .[dev] + # pytest Tests/unit/ -v + + - name: Validation Summary + if: success() + run: | + echo "✅ All validation checks passed!" + echo "- Code formatting (Black): PASSED" + echo "- Linting (Ruff): PASSED" + # echo "- Unit tests: PASSED" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..a2357c0c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,9 @@ +# Contributing to TM1py + +TM1py is an open-source project. It thrives on contributions from the TM1 community. + +To ensure a smooth and consistent contribution process, please review our full contributor guide: + +👉 **See the complete guide here:** [`docs/how-to-contribute.md`](docs/how-to-contribute.md) + +If you have questions or need help at any stage, feel free to open an issue or start a discussion. diff --git a/TM1py/Objects/Annotation.py b/TM1py/Objects/Annotation.py index 6598b1cf..d9f86e3d 100644 --- a/TM1py/Objects/Annotation.py +++ b/TM1py/Objects/Annotation.py @@ -244,4 +244,4 @@ def construct_body_for_post(self, cube_dimensions) -> Dict: body["commentType"] = "ANNOTATION" body["commentLocation"] = ",".join(self.dimensional_context) - return body \ No newline at end of file + return body diff --git a/TM1py/Services/ApplicationService.py b/TM1py/Services/ApplicationService.py index 3d20b7d4..96f51001 100644 --- a/TM1py/Services/ApplicationService.py +++ b/TM1py/Services/ApplicationService.py @@ -92,8 +92,7 @@ def _find_private_boundary(self, segments: List[str], **kwargs) -> int: return len(segments) # All segments are public - def _resolve_path(self, path: str, private: bool = False, use_cache: bool = False, - **kwargs) -> Tuple[str, bool]: + def _resolve_path(self, path: str, private: bool = False, use_cache: bool = False, **kwargs) -> Tuple[str, bool]: """Resolve application path, handling mixed public/private folder hierarchies. For public access (private=False), returns direct URL without probing. @@ -210,8 +209,13 @@ def get_names(self, path: str, private: bool = False, use_cache: bool = False, * return [application["Name"] for application in response.json()["value"]] def get( - self, path: str, application_type: Union[str, ApplicationTypes], name: str, private: bool = False, - use_cache: bool = False, **kwargs + self, + path: str, + application_type: Union[str, ApplicationTypes], + name: str, + private: bool = False, + use_cache: bool = False, + **kwargs, ) -> Application: """Retrieve Planning Analytics Application @@ -294,8 +298,9 @@ def get( view_name=response.json()["View"]["Name"], ) - def get_document(self, path: str, name: str, private: bool = False, use_cache: bool = False, - **kwargs) -> DocumentApplication: + def get_document( + self, path: str, name: str, private: bool = False, use_cache: bool = False, **kwargs + ) -> DocumentApplication: """Get Excel Application from TM1 Server in binary format. Can be dumped to file. Automatically handles mixed public/private folder hierarchies. @@ -406,8 +411,13 @@ def rename( data = {"Name": new_application_name} return self._rest.POST(url, data=json.dumps(data), **kwargs) - def create(self, application: Union[Application, DocumentApplication], private: bool = False, - use_cache: bool = False, **kwargs) -> Response: + def create( + self, + application: Union[Application, DocumentApplication], + private: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Response: """Create Planning Analytics application Automatically handles mixed public/private folder hierarchies. @@ -435,8 +445,13 @@ def create(self, application: Union[Application, DocumentApplication], private: return response - def update(self, application: Union[Application, DocumentApplication], private: bool = False, - use_cache: bool = False, **kwargs) -> Response: + def update( + self, + application: Union[Application, DocumentApplication], + private: bool = False, + use_cache: bool = False, + **kwargs, + ) -> Response: """Update Planning Analytics application Automatically handles mixed public/private folder hierarchies. @@ -455,7 +470,7 @@ def update(self, application: Union[Application, DocumentApplication], private: url = format_url( base_url + "/" + contents + "('{name}{extension}')/Document/Content", name=application.name, - extension="" if verify_version("12", self.version) else ".blob" + extension="" if verify_version("12", self.version) else ".blob", ) response = self._rest.PATCH(url=url, data=application.content, headers=self.binary_http_header, **kwargs) else: @@ -465,8 +480,11 @@ def update(self, application: Union[Application, DocumentApplication], private: return response def update_or_create( - self, application: Union[Application, DocumentApplication], private: bool = False, - use_cache: bool = False, **kwargs + self, + application: Union[Application, DocumentApplication], + private: bool = False, + use_cache: bool = False, + **kwargs, ) -> Response: """Update or create Planning Analytics application @@ -504,23 +522,42 @@ def update_or_create_document_from_file( :param use_cache: boolean - whether to cache discovered private boundaries :return: Response """ - if self.exists(path=path, application_type=ApplicationTypes.DOCUMENT, name=name, private=private, - use_cache=use_cache, **kwargs): + if self.exists( + path=path, + application_type=ApplicationTypes.DOCUMENT, + name=name, + private=private, + use_cache=use_cache, + **kwargs, + ): response = self.update_document_from_file( - path_to_file=path_to_file, application_path=path, application_name=name, private=private, - use_cache=use_cache, **kwargs + path_to_file=path_to_file, + application_path=path, + application_name=name, + private=private, + use_cache=use_cache, + **kwargs, ) else: response = self.create_document_from_file( - path_to_file=path_to_file, application_path=path, application_name=name, private=private, - use_cache=use_cache, **kwargs + path_to_file=path_to_file, + application_path=path, + application_name=name, + private=private, + use_cache=use_cache, + **kwargs, ) return response def exists( - self, path: str, application_type: Union[str, ApplicationTypes], name: str, private: bool = False, - use_cache: bool = False, **kwargs + self, + path: str, + application_type: Union[str, ApplicationTypes], + name: str, + private: bool = False, + use_cache: bool = False, + **kwargs, ) -> bool: """Check if application exists @@ -592,8 +629,13 @@ def exists( return self._exists(url, **kwargs) def create_document_from_file( - self, path_to_file: str, application_path: str, application_name: str, private: bool = False, - use_cache: bool = False, **kwargs + self, + path_to_file: str, + application_path: str, + application_name: str, + private: bool = False, + use_cache: bool = False, + **kwargs, ) -> Response: """Create DocumentApplication in TM1 from local file @@ -611,8 +653,13 @@ def create_document_from_file( return self.create(application=application, private=private, use_cache=use_cache, **kwargs) def update_document_from_file( - self, path_to_file: str, application_path: str, application_name: str, private: bool = False, - use_cache: bool = False, **kwargs + self, + path_to_file: str, + application_path: str, + application_name: str, + private: bool = False, + use_cache: bool = False, + **kwargs, ) -> Response: """Update DocumentApplication in TM1 from local file @@ -686,7 +733,7 @@ def _discover_at_path( flat: bool, in_private_context: bool, results: List[Dict], - **kwargs + **kwargs, ) -> List[Dict]: """Discover items at a specific path, handling both public and private contents. @@ -713,7 +760,7 @@ def _discover_at_path( flat=flat, results=results, items=items, - **kwargs + **kwargs, ) else: # Public context - get public contents @@ -728,7 +775,7 @@ def _discover_at_path( flat=flat, results=results, items=items, - **kwargs + **kwargs, ) # Also get private contents if requested (private items in a public folder) @@ -744,7 +791,7 @@ def _discover_at_path( flat=flat, results=results, items=items, - **kwargs + **kwargs, ) return items if not flat else results @@ -760,7 +807,7 @@ def _process_items( flat: bool, results: List[Dict], items: List[Dict], - **kwargs + **kwargs, ): """Process raw items from API and handle recursion. @@ -789,7 +836,7 @@ def _process_items( "id": item_id, "name": item_name, "path": item_path, - "is_private": is_private or in_private_context + "is_private": is_private or in_private_context, } # Handle recursion for folders @@ -803,7 +850,7 @@ def _process_items( flat=flat, in_private_context=new_private_context, results=results, - **kwargs + **kwargs, ) if not flat: item["children"] = children @@ -814,12 +861,7 @@ def _process_items( items.append(item) def discover( - self, - path: str = "", - include_private: bool = False, - recursive: bool = False, - flat: bool = False, - **kwargs + self, path: str = "", include_private: bool = False, recursive: bool = False, flat: bool = False, **kwargs ) -> List[Dict]: """Discover applications in the Applications folder. @@ -850,7 +892,7 @@ def discover( flat=flat, in_private_context=in_private_context, results=results, - **kwargs + **kwargs, ) return results if flat else items diff --git a/TM1py/Services/CellService.py b/TM1py/Services/CellService.py index 9a8a3d1f..90515ed9 100644 --- a/TM1py/Services/CellService.py +++ b/TM1py/Services/CellService.py @@ -586,9 +586,7 @@ def relative_proportional_spread( SELECT {{ {rows} }} ON 0 FROM [{cube}] - """.format( - rows="}*{".join(unique_element_names), cube=cube - ) + """.format(rows="}*{".join(unique_element_names), cube=cube) cellset_id = self.create_cellset(mdx=mdx, sandbox_name=sandbox_name, **kwargs) payload = { @@ -622,9 +620,7 @@ def clear_spread( SELECT {{ {rows} }} ON 0 FROM [{cube}] - """.format( - rows="}*{".join(unique_element_names), cube=cube - ) + """.format(rows="}*{".join(unique_element_names), cube=cube) cellset_id = self.create_cellset(mdx=mdx, sandbox_name=sandbox_name, **kwargs) payload = {"BeginOrdinal": 0, "Value": "C", "ReferenceCell@odata.bind": list()} diff --git a/TM1py/Services/ElementService.py b/TM1py/Services/ElementService.py index d1dac6de..3606cd6b 100644 --- a/TM1py/Services/ElementService.py +++ b/TM1py/Services/ElementService.py @@ -1091,7 +1091,7 @@ def get_edges(sub_trees): component_name = sub_tree["Component"]["Name"] else: component_name = sub_tree["ComponentName"] - edges[sub_tree["ParentName"],component_name] = sub_tree["Weight"] + edges[sub_tree["ParentName"], component_name] = sub_tree["Weight"] if "Edges" not in sub_tree["Component"]: continue diff --git a/TM1py/Services/HierarchyService.py b/TM1py/Services/HierarchyService.py index fc4413d6..b1c22d3b 100644 --- a/TM1py/Services/HierarchyService.py +++ b/TM1py/Services/HierarchyService.py @@ -11,7 +11,6 @@ from collections import defaultdict from typing import Dict, Iterable, List, Optional, Tuple - from requests import Response from TM1py.Exceptions import TM1pyRestException @@ -93,9 +92,7 @@ def explore_relationships(node, path): explore_relationships(node, [node]) if cycles: - raise ValueError( - f"Circular reference{'s' if len(cycles) > 1 else ''} found in edges: {cycles}" - ) + raise ValueError(f"Circular reference{'s' if len(cycles) > 1 else ''} found in edges: {cycles}") @staticmethod def _validate_alias_uniqueness(df: "pd.DataFrame"): diff --git a/TM1py/Services/RestService.py b/TM1py/Services/RestService.py index 8aefaf92..cbff415d 100644 --- a/TM1py/Services/RestService.py +++ b/TM1py/Services/RestService.py @@ -136,7 +136,7 @@ def __init__(self, **kwargs): - **ssl_context**: User-defined SSL context. - **cert** (str|tuple): (Optional) If string, path to SSL client cert file (.pem). If tuple, ('cert', 'key') pair. - :param kwargs: See description above for all supported arguments + :param kwargs: See description above for all supported arguments """ # store kwargs for future use e.g. re_connect on 401 session timeout self._kwargs = kwargs @@ -456,12 +456,11 @@ def _handle_remote_disconnect( # Calculate delay with exponential backoff: delay * backoff_factor^(attempt-1), capped at max_delay current_delay = min( self._remote_disconnect_retry_delay * (self._remote_disconnect_backoff_factor ** (attempt - 1)), - self._remote_disconnect_max_delay + self._remote_disconnect_max_delay, ) warnings.warn( - f"Retry attempt {attempt}/{self._remote_disconnect_max_retries} " - f"after {current_delay:.1f}s delay..." + f"Retry attempt {attempt}/{self._remote_disconnect_max_retries} " f"after {current_delay:.1f}s delay..." ) time.sleep(current_delay) @@ -506,15 +505,11 @@ def _handle_remote_disconnect( # Re-raise TM1 exceptions as-is raise except Exception as retry_error: - warnings.warn( - f"Retry attempt {attempt}/{self._remote_disconnect_max_retries} failed: {retry_error}" - ) + warnings.warn(f"Retry attempt {attempt}/{self._remote_disconnect_max_retries} failed: {retry_error}") continue # All retries exhausted - warnings.warn( - f"All {self._remote_disconnect_max_retries} retry attempts failed after remote disconnect" - ) + warnings.warn(f"All {self._remote_disconnect_max_retries} retry attempts failed after remote disconnect") raise original_error def connect(self): diff --git a/TM1py/Services/SubsetService.py b/TM1py/Services/SubsetService.py index ecd86eb0..f1be7f91 100644 --- a/TM1py/Services/SubsetService.py +++ b/TM1py/Services/SubsetService.py @@ -8,7 +8,7 @@ from TM1py.Services.ObjectService import ObjectService from TM1py.Services.ProcessService import ProcessService from TM1py.Services.RestService import RestService -from TM1py.Utils import format_url, case_and_space_insensitive_equals +from TM1py.Utils import case_and_space_insensitive_equals, format_url class SubsetService(ObjectService): diff --git a/TM1py/__init__.py b/TM1py/__init__.py index 4448f539..f5ad044c 100644 --- a/TM1py/__init__.py +++ b/TM1py/__init__.py @@ -74,4 +74,11 @@ from TM1py.Services.ViewService import ViewService from TM1py.Utils import Utils -__version__ = "2.2" +# Version is managed in pyproject.toml +try: + from importlib.metadata import version + + __version__ = version("TM1py") +except Exception: + # Fallback for development installations + __version__ = "2.2.0" diff --git a/Tests/CellService_test.py b/Tests/CellService_test.py index 49094c26..48174d21 100644 --- a/Tests/CellService_test.py +++ b/Tests/CellService_test.py @@ -4587,7 +4587,7 @@ def test_clear_with_mdx_invalid_query(self): """ self.tm1.cells.clear_with_mdx(cube=self.cube_name, mdx=mdx) - self.assertIn("Failed to initialize View by Expression", str(e.exception) ) + self.assertIn("Failed to initialize View by Expression", str(e.exception)) def test_clear_with_mdx_unsupported_version(self): diff --git a/Tests/ChoreService_test.py b/Tests/ChoreService_test.py index c2fe3bf2..b0e2dd25 100644 --- a/Tests/ChoreService_test.py +++ b/Tests/ChoreService_test.py @@ -478,7 +478,6 @@ def test_update_chore_remove_first_task(self): task1, task2 = self.tasks[2], c._tasks[1] self.assertEqual(task1, task2) - def test_activate(self): chore = self.tm1.chores.get(self.chore_name1) if chore.active: diff --git a/Tests/ElementService_test.py b/Tests/ElementService_test.py index 3a54482c..87669d97 100644 --- a/Tests/ElementService_test.py +++ b/Tests/ElementService_test.py @@ -81,7 +81,9 @@ def setUp(self): self.tm1.cubes.cells.write_value("1990/91", self.attribute_cube_name, ("1991", "Financial Year")) self.tm1.cubes.cells.write_value("1991/92", self.attribute_cube_name, ("1992", "Financial Year")) self.tm1.cubes.cells.write_value("All Years", self.attribute_cube_name, ("Total Years", "Financial Year")) - self.tm1.cubes.cells.write_value("All Consolidations", self.attribute_cube_name, ("All Consolidations", "Financial Year")) + self.tm1.cubes.cells.write_value( + "All Consolidations", self.attribute_cube_name, ("All Consolidations", "Financial Year") + ) self.create_or_update_dimension_with_hierarchies() @@ -713,8 +715,6 @@ def run_test_get_elements_dataframe_elements_via_mdx(self, use_blob: bool): self.assertTrue(df.equals(reference_df)) - - def test_get_element_names(self): element_names = self.tm1.dimensions.hierarchies.elements.get_element_names( self.dimension_name, self.hierarchy_name diff --git a/Tests/HierarchyService_test.py b/Tests/HierarchyService_test.py index a015b8d7..880b7e11 100644 --- a/Tests/HierarchyService_test.py +++ b/Tests/HierarchyService_test.py @@ -1305,7 +1305,7 @@ def test_update_or_create_hierarchy_from_dataframe_circular_reference(self): element_column=self.region_dimension_name, element_type_column="ElementType", unwind_all=True, - verify_edges=True + verify_edges=True, ) def test_update_or_create_hierarchy_from_dataframe_circular_references(self): @@ -1337,7 +1337,7 @@ def test_update_or_create_hierarchy_from_dataframe_circular_references(self): element_column=self.region_dimension_name, element_type_column="ElementType", unwind_all=True, - verify_edges=True + verify_edges=True, ) def test_update_or_create_hierarchy_from_dataframe_no_weight_columns(self): diff --git a/Tests/Hierarchy_test.py b/Tests/Hierarchy_test.py index 27a8fb24..7cf37d4c 100644 --- a/Tests/Hierarchy_test.py +++ b/Tests/Hierarchy_test.py @@ -187,10 +187,15 @@ def test_get_descendants_recursive_leaves_only(self): elements = hierarchy.get_descendants("Europe", recursive=True, leaves_only=True) self.assertEqual( - {Element("Germany", "Numeric"), Element("Austria", "Numeric"), - Element("Switzerland", "Numeric"), Element("France", "Numeric")}, - elements) - + { + Element("Germany", "Numeric"), + Element("Austria", "Numeric"), + Element("Switzerland", "Numeric"), + Element("France", "Numeric"), + }, + elements, + ) + def test_get_descendants_recursive_leaves_only_with_higher_level_consolidation(self): hierarchy = Hierarchy( name="NotRelevant", @@ -205,7 +210,8 @@ def test_get_descendants_recursive_leaves_only_with_higher_level_consolidation(s Element("CCC", "Numeric"), Element("AAA", "Numeric"), Element("AAB", "Numeric"), - Element("AAC", "Numeric")], + Element("AAC", "Numeric"), + ], edges={ ("Total", "A"): 1, ("A", "AA"): 1, @@ -216,12 +222,20 @@ def test_get_descendants_recursive_leaves_only_with_higher_level_consolidation(s ("B", "BBC"): 1, ("Total", "C"): 1, ("C", "CCC"): 1, - }) + }, + ) elements = hierarchy.get_descendants("Total", recursive=True, leaves_only=True) self.assertEqual( - {Element("BBC", "Numeric"), Element("CCC", "Numeric"), Element("AAA", "Numeric"), Element("AAB", "Numeric"), Element("AAC", "Numeric")}, - elements) + { + Element("BBC", "Numeric"), + Element("CCC", "Numeric"), + Element("AAA", "Numeric"), + Element("AAB", "Numeric"), + Element("AAC", "Numeric"), + }, + elements, + ) def test_get_descendant_edges_recursive_false(self): hierarchy = Hierarchy( diff --git a/Tests/ProcessService_test.py b/Tests/ProcessService_test.py index 2a48a30d..e790cc50 100644 --- a/Tests/ProcessService_test.py +++ b/Tests/ProcessService_test.py @@ -290,7 +290,7 @@ def test_compile_with_errors(self): self.tm1.processes.update_or_create(p_bad) errors = self.tm1.processes.compile(p_bad.name) self.assertTrue(len(errors) == 1) - self.assertIn('dimsize', errors[0]["Message"]) + self.assertIn("dimsize", errors[0]["Message"]) self.tm1.processes.delete(p_bad.name) @skip_if_version_lower_than(version="11.4") @@ -373,7 +373,7 @@ def test_compile_process_with_errors(self): errors = self.tm1.processes.compile_process(p_bad) self.assertTrue(len(errors) == 1) - self.assertIn('dimsize', errors[0]["Message"]) + self.assertIn("dimsize", errors[0]["Message"]) def test_get_process(self): p_ascii_orig = copy.deepcopy(self.p_ascii) diff --git a/Tests/Utils_test.py b/Tests/Utils_test.py index 71ceba2b..9e90bdee 100644 --- a/Tests/Utils_test.py +++ b/Tests/Utils_test.py @@ -151,12 +151,18 @@ def test_build_dataframe_from_csv(self): pd._testing.assert_frame_equal(expected_df, df, check_column_type=False) def test_build_dataframe_from_csv_shaped_numbers_and_strings(self): - raw_csv = "Region~Product~Measure~Value\r\n" "r1~p1~Revenue~1.0\r\n" "r1~p2~Revenue~3.0\r\n" "r1~p1~Comment~Great Product\r\n" "r1~p2~Comment~" + raw_csv = ( + "Region~Product~Measure~Value\r\n" + "r1~p1~Revenue~1.0\r\n" + "r1~p2~Revenue~3.0\r\n" + "r1~p1~Comment~Great Product\r\n" + "r1~p2~Comment~" + ) df = build_dataframe_from_csv(raw_csv, dtype={"Revenue": float}, shaped=True) expected_df = pd.DataFrame( { - "Region": ["r1", "r1"], + "Region": ["r1", "r1"], "Product": ["p1", "p2"], "Comment": ["Great Product", ""], "Revenue": [1.00000, 3.00000], @@ -175,7 +181,7 @@ def test_build_dataframe_from_csv_shaped(self): expected_df = pd.DataFrame( { - "Region": ["r1", "r1", "r2"], + "Region": ["r1", "r1", "r2"], "Product": ["p1", "p2", "p2"], "Revenue": [1.00000, 3.00000, 4.00000], } @@ -189,7 +195,7 @@ def test_build_dataframe_from_csv_shaped_with_duplicates(self): expected_df = pd.DataFrame( { - "Region": ["r1", "r1", "r2"], + "Region": ["r1", "r1", "r2"], "Product": ["p1", "p1", "p2"], "Revenue": [1.00000, 1.00000, 4.00000], } diff --git a/docs/how-to-contribute.md b/docs/how-to-contribute.md index 325525d9..85d8c98d 100644 --- a/docs/how-to-contribute.md +++ b/docs/how-to-contribute.md @@ -1,22 +1,243 @@ -# How to Contribute +# Contributing to TM1Py -## Start Contributing +Thank you for your interest in contributing to TM1Py! This document provides guidelines and information about the +contribution process. -TM1py is an open-source project. It thrives on contributions from the TM1 community. If you find a bug or feel like you can contribute a missing feature, please follow these steps: +## Table of Contents -1. **Fork the Repository**: Start by forking the [TM1py GitHub repository](https://github.com/cubewise-code/tm1py). -2. **Make Your Changes**: Update the code to fix the bug or add your feature. -3. **Create a Pull Request**: Once your changes are ready, create a pull request so we can review and merge them. +- [Getting Started](#getting-started) +- [Development Setup](#development-setup) +- [Making Changes](#making-changes) +- [Pull Request Process](#pull-request-process) +- [Release Process](#release-process) +- [Code Style](#code-style) -## Code Style and Linting +## Getting Started -To ensure consistency across the codebase, we use the following tools: +1. Fork the repository +2. Clone your fork: `git clone https://github.com/YOUR_USERNAME/tm1py.git` +3. Add upstream remote: `git remote add upstream https://github.com/cubewise-code/tm1py.git` -- **[Black](https://black.readthedocs.io/en/stable/)**: An auto-formatter for Python code. Please format your code using Black before submitting a pull request. -- **[Ruff](https://beta.ruff.rs/docs/)**: A fast Python linter. Make sure your code passes all linting checks. +## Development Setup -## Editor Integration +1. **Create a virtual environment:** + ```bash + python -m venv venv + source venv/bin/activate # On Windows: venv\Scripts\activate + ``` -Both **Black** and **Ruff** have excellent integrations with popular editors like [VS Code](https://code.visualstudio.com/) and [PyCharm](https://www.jetbrains.com/pycharm/). Setting up these tools in your editor ensures that your code is automatically formatted and linted as you work. +2. **Install TM1Py in development mode:** + ```bash + pip install -e .[pandas,dev] + ``` -Thank you for contributing to TM1py and helping to make it better! +3. **Install development tools:** + ```bash + pip install black ruff pytest + ``` + +## Making Changes + +1. **Create a branch:** + ```bash + git checkout -b your-feature-name + ``` + +2. **Make your changes** following the code style guidelines + +3. **Format your code:** + ```bash + black . + ruff check --fix . + ``` + +4. **Test your changes** (if you have access to a TM1 instance): + ```bash + pytest Tests/ + ``` + +## Pull Request Process + +### 1. Before Opening a PR + +- Ensure your code follows the style guidelines +- Run formatting tools (Black, Ruff) +- Update documentation if needed +- Write clear, descriptive commit messages + +### 2. Opening a PR + +- Push your branch to your fork +- Open a PR against the `master` branch +- Fill out the PR template (if available) +- Describe what your PR does and why + +**Note**: You don't need to follow any special commit message format! Maintainers will handle versioning via labels. + +### 3. PR Validation + +Your PR will automatically trigger validation checks: + +- **Code formatting** (Black) +- **Linting** (Ruff) +- **Future**: Unit tests (when available) + +These checks must pass before your PR can be merged. + +### 4. Review Process + +- Maintainers will review your PR +- Address any feedback or requested changes +- Once approved, a maintainer will add the appropriate release label and merge + +## Release Process + +TM1Py uses **automated nightly releases** with semantic versioning. + +### For Contributors + +**You don't need to do anything special!** Just: + +1. Create your PR +2. Wait for review +3. That's it! + +No need for special: + +- Commit message formats +- Branch naming conventions +- Version bumping +- Release notes + +### For Maintainers + +**IMPORTANT**: Before merging a PR, you must add the appropriate label to control the version bump. + +| Label | Version Bump | When to Use | Example | +|------------------------------|-------------------|-------------------------------------------------------------------|-------------------------------------------| +| `release:patch` | `2.1.5` → `2.1.6` | Bug fixes, small improvements | Fix pandas compatibility issue | +| `release:minor` | `2.1.5` → `2.2.0` | New features, enhancements | Add support for new TM1 REST API endpoint | +| `release:major` | `2.1.5` → `3.0.0` | Breaking changes, major updates | Remove deprecated methods, change API | +| `skip-release` (or no label) | No version bump | Docs, tests, CI changes, or changes you don't want to release yet | Update README, fix typo in docs | + +**Default behavior**: If no label is added, **NO release will be created**. This is a safe default to prevent accidental +releases. + +**To create a release**: You must explicitly add `release:patch`, `release:minor`, or `release:major` label. + +### Release Timeline + +**Daily cycle:** + +``` +Day 1: + 10:00 AM - PR #123 (bug fix, labeled 'release:patch') merged + 2:00 PM - PR #124 (feature, labeled 'release:minor') merged + 5:00 PM - PR #125 (docs update, no label) merged + + 4:00 AM (next day) - Nightly workflow starts: + - Runs full integration tests (2-3 hours) + - If tests pass: + → Creates release 2.2.0 (because of the minor label) + → Publishes to PyPI (includes all 3 PRs) + → Updates documentation + 7:00 AM - Users can: pip install --upgrade TM1py + +Day 2: + 11:00 AM - PR #126 (minor fix, no label) merged + 3:00 PM - PR #127 (test update, no label) merged + + 4:00 AM (next day) - Nightly workflow starts: + - No release labels found + - Skips release (no version bump, no PyPI publish) +``` + +### How It Works + +1. **Merge to master** → PR is merged after validation passes +2. **Nightly at 4 AM CET** → Automated workflow runs: + - Checks for new commits since last release + - Runs full integration test suite + - Determines version bump from PR labels + - Creates GitHub Release + - Publishes to PyPI +3. **Next morning** → New version available to users! + +## Code Style + +### Python Code Style + +- **Line length**: 120 characters (configured in Black) +- **Formatting**: Use Black for automatic formatting +- **Linting**: Use Ruff for import sorting and error detection +- **Target version**: Python 3.7+ + +### Running Code Style Tools + +```bash +# Format code +black . + +# Check imports and linting +ruff check . + +# Auto-fix linting issues +ruff check --fix . +``` + +### Import Organization + +Imports should be organized by Ruff/isort: + +1. Standard library imports +2. Third-party imports +3. Local application imports + +Example: + +```python +# Standard library imports +from pathlib import Path +from typing import List + +# Third-party imports +import pandas as pd + +# Local application imports +from TM1py import TM1Service +``` + +## Testing + +### Running Tests + +If you have access to TM1 instances: + +```bash +# Run all tests +pytest Tests/ + +# Run specific test file +pytest Tests/test_cube_service.py + +# Run with verbose output +pytest Tests/ -v +``` + +### Test Configuration + +Tests require TM1 connection configuration. See `Tests/resources/` for setup instructions. + +## Questions? + +- Check existing [Issues](https://github.com/cubewise-code/tm1py/issues) +- Review [Discussions](https://github.com/cubewise-code/tm1py/discussions) +- Read the [Documentation](https://tm1py.readthedocs.io/) + +## License + +By contributing, you agree that your contributions will be licensed under the MIT License. + +--- + +Thank you for contributing to TM1Py! 🎉 diff --git a/pyproject.toml b/pyproject.toml index 784d2f33..a8c354a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,57 @@ [build-system] -requires = ["setuptools", "wheel"] +requires = ["setuptools>=61.0", "wheel"] build-backend = "setuptools.build_meta" +[project] +name = "TM1py" +version = "2.2.0" +description = "A python module for TM1." +readme = "README.md" +license = {text = "MIT"} +authors = [ + {name = "Marius Wirtz", email = "MWirtz@cubewise.com"} +] +keywords = ["TM1", "IBM Cognos TM1", "Planning Analytics", "PA", "Cognos"] +classifiers = [ + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Natural Language :: English", +] +requires-python = ">=3.7" +dependencies = [ + "ijson", + "requests", + "pytz", + 'requests_negotiate_sspi; platform_system=="Windows"', + "mdxpy>=1.3.1", +] + +[project.optional-dependencies] +pandas = ["pandas"] +dev = [ + "pytest", + "pytest-xdist", + "python-dateutil", + "black", + "ruff", +] + +[project.urls] +Homepage = "https://github.com/cubewise-code/tm1py" +Download = "https://github.com/cubewise-code/tm1py/releases/latest" + +[tool.setuptools.packages.find] +include = ["TM1py*"] + [tool.black] line-length = 120 target-version = ["py37"] diff --git a/setup.py b/setup.py index 53968b9d..60684932 100644 --- a/setup.py +++ b/setup.py @@ -1,54 +1,3 @@ from setuptools import setup -SCHEDULE_VERSION = "2.2" -SCHEDULE_DOWNLOAD_URL = "https://github.com/Cubewise-code/TM1py/tarball/" + SCHEDULE_VERSION - -with open("README.md", "r") as f: - long_description = f.read() - -setup( - name="TM1py", - packages=["TM1py", "TM1py/Exceptions", "TM1py/Objects", "TM1py/Services", "TM1py/Utils"], - version=SCHEDULE_VERSION, - description="The python module for TM1.", - long_description=long_description, - long_description_content_type="text/markdown", - license="MIT", - author="Marius Wirtz", - author_email="MWirtz@cubewise.com", - url="https://github.com/cubewise-code/tm1py", - download_url=SCHEDULE_DOWNLOAD_URL, - keywords=["TM1", "IBM Cognos TM1", "Planning Analytics", "PA", "Cognos"], - classifiers=[ - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Natural Language :: English", - ], - install_requires=[ - "ijson", - "requests", - "pytz", - 'requests_negotiate_sspi;platform_system=="Windows"', - "mdxpy>=1.3.1", - ], - extras_require={ - "pandas": ["pandas"], - "dev": [ - "pytest", - "pytest-xdist", - "python-dateutil", - "black", - "ruff", - ], - }, - python_requires=">=3.6", -) +setup()