diff --git a/.github/workflows/manual-cleanup.yml b/.github/workflows/manual-cleanup.yml index 545c2623..d1a26296 100644 --- a/.github/workflows/manual-cleanup.yml +++ b/.github/workflows/manual-cleanup.yml @@ -22,6 +22,9 @@ on: jobs: cleanup: + permissions: + contents: write + pull-requests: write name: Disk Cleanup runs-on: ubuntu-latest steps: diff --git a/.github/workflows/scc-checker.yml b/.github/workflows/scc-checker.yml index 2fa37de4..a01a37e2 100644 --- a/.github/workflows/scc-checker.yml +++ b/.github/workflows/scc-checker.yml @@ -6,6 +6,9 @@ on: jobs: scc-check: + permissions: + contents: write + pull-requests: write runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 diff --git a/CHANGELOG.md b/CHANGELOG.md index ca9bd182..72f73860 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,59 @@ +## [2.3.0-stage.5](https://github.com/aziontech/lib/compare/v2.3.0-stage.4...v2.3.0-stage.5) (2025-12-03) + + +### Bug Fixes + +* add support for ArrayBuffer and Uint8Array content types in Storage API (#323) ([2623b63](https://github.com/aziontech/lib/commit/2623b63837f2f9bd12672282599ee627645cc3d2)) + +## [2.3.0-stage.4](https://github.com/aziontech/lib/compare/v2.3.0-stage.3...v2.3.0-stage.4) (2025-11-28) + + +### Bug Fixes + +* adjust Node.js Polyfills and add Unicode Character Sanitization (#320) ([8bd151e](https://github.com/aziontech/lib/commit/8bd151ead39ff6c0879865aeb99a19512100bf2d)) + +## [2.3.0-stage.3](https://github.com/aziontech/lib/compare/v2.3.0-stage.2...v2.3.0-stage.3) (2025-11-21) + + +### Features + +* add AsyncLocalStorage snapshot polyfill and zlib inflateSync support (#317) ([e507301](https://github.com/aziontech/lib/commit/e507301be5ee6ba8875d1918bdbdbd710408d32b)) + +## [2.3.0-stage.2](https://github.com/aziontech/lib/compare/v2.3.0-stage.1...v2.3.0-stage.2) (2025-11-18) + + +### Features + +* add SvelteKit preset with custom adapter (#315) ([b46122b](https://github.com/aziontech/lib/commit/b46122b2a18c5f9f8c83e5c3ed2a4a56074b2d43)) + +## [2.3.0-stage.1](https://github.com/aziontech/lib/compare/v2.2.4-stage.3...v2.3.0-stage.1) (2025-11-13) + + +### Features + +* improve validation error messages with detailed formatting and fix cache settings (#314) ([f6a53cf](https://github.com/aziontech/lib/commit/f6a53cf768aab395039b679a10a544252593b1b3)) + +### [2.2.4-stage.3](https://github.com/aziontech/lib/compare/v2.2.4-stage.2...v2.2.4-stage.3) (2025-11-11) + + +### Bug Fixes + +* add required field validation for config schema (#313) ([b223cfd](https://github.com/aziontech/lib/commit/b223cfdf7f2f04187eb9326e603922fa9c9b96f2)) + +### [2.2.4-stage.2](https://github.com/aziontech/lib/compare/v2.2.4-stage.1...v2.2.4-stage.2) (2025-11-11) + + +### Bug Fixes + +* cmd manifest transform (#312) ([18b516a](https://github.com/aziontech/lib/commit/18b516ac28cb0d818e1b569df58c6e18538b4c4f)) + +### [2.2.4-stage.1](https://github.com/aziontech/lib/compare/v2.2.3...v2.2.4-stage.1) (2025-11-10) + + +### Bug Fixes + +* improve performance of pem cleanup in the jws package (#311) ([4e24b26](https://github.com/aziontech/lib/commit/4e24b266882692c37488295f500068dbffc59a0d)) + ### [2.2.3](https://github.com/aziontech/lib/compare/v2.2.2...v2.2.3) (2025-11-06) diff --git a/docs/release-diagram.md b/docs/release-diagram.md new file mode 100644 index 00000000..2460cad9 --- /dev/null +++ b/docs/release-diagram.md @@ -0,0 +1,93 @@ +# Release Workflow Diagram + +```mermaid +graph TD + A[Open PR to main or stage] + A --> B[Run CI] + B --> C[Lint and build] + C --> D[Unit tests] + D --> E[E2E tests] + E --> F[PR approved and merged] + + F --> G[Push to main or stage] + G --> H[Release job] + H --> I[Checkout] + I --> J[Setup Node 20] + J --> K[Install deps] + K --> L[Build npm run compile] + L --> M[Semantic release] + M --> M2[Publish package to npm] + + M2 --> N{Branch is main} + N -->|No| O[End] + N -->|Yes| P[Sync stage job] + P --> Q[Checkout] + Q --> R[Configure Git] + R --> S[Fetch and pull main] + S --> T[Checkout stage] + T --> U[Merge main into stage] + U --> V[Push stage] + + style A fill:#0d47a1,stroke:#0b3c87,stroke-width:1px,color:#ffffff + style B fill:#1565c0,stroke:#0d47a1,stroke-width:1px,color:#ffffff + style C fill:#1565c0,stroke:#0d47a1,stroke-width:1px,color:#ffffff + style D fill:#1565c0,stroke:#0d47a1,stroke-width:1px,color:#ffffff + style E fill:#1565c0,stroke:#0d47a1,stroke-width:1px,color:#ffffff + style F fill:#1976d2,stroke:#0d47a1,stroke-width:1px,color:#ffffff + + style G fill:#1b5e20,stroke:#0d3a12,stroke-width:1px,color:#ffffff + style H fill:#2e7d32,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style I fill:#2e7d32,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style J fill:#2e7d32,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style K fill:#2e7d32,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style L fill:#2e7d32,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style M fill:#388e3c,stroke:#1b5e20,stroke-width:1px,color:#ffffff + style M2 fill:#004d40,stroke:#00251a,stroke-width:1px,color:#ffffff + + style N fill:#e65100,stroke:#bf360c,stroke-width:1px,color:#ffffff + style O fill:#424242,stroke:#212121,stroke-width:1px,color:#ffffff + style P fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style Q fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style R fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style S fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style T fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style U fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 + style V fill:#f9a825,stroke:#f57f17,stroke-width:1px,color:#000000 +``` + +## Description + +This GitHub Actions workflow automates the CI and release process for the project. + +- **Trigger conditions** + + - CI (`ci.yml`) runs on pull requests targeting `main` or `stage`. + - Release (`release.yml`) runs on every `push` to the `main` or `stage` branches. + +- **Release flow (simplified)** + + - Checkout code and configure Node.js 20. + - Install dependencies and build the project (`npm run compile`). + - Run `semantic-release` to determine the next version and generate release artifacts. + - **Publish the package to the npm registry** using the configured npm token. + - If the push is on `main`, run the `sync-stage` job to merge `main` into `stage` and push the updated `stage` branch. + +- **Release job** (runs on every push to `main` or `stage`) + + - **Checkout** the repository with full history (`fetch-depth: 0`) using the `CUSTOM_GITHUB_TOKEN`. + - **Setup Node.js** version 20 and enable npm cache for faster installs. + - **Install dependencies** using `npm install`. + - **Build the project** by running `npm run compile`. + - **Publish a release** using `npx semantic-release`, authenticated with `CUSTOM_GITHUB_TOKEN` and `NPM_TOKEN`. + +- **Sync-stage job** (runs only when the push is to `main`) + - Declared with `needs: release`, so it runs only if the `release` job completes successfully. + - Additionally guarded by `if: github.ref == 'refs/heads/main'`, so it is skipped for pushes to `stage`. + - **Checkout** the repository with full history using `CUSTOM_GITHUB_TOKEN`. + - **Configure Git** user name and email for automated commits. + - **Synchronize branches** by merging `main` into `stage`: + - Fetch latest changes from `origin`. + - Pull the latest `main` branch. + - Check out the `stage` branch. + - Merge `main` into `stage` with a predefined commit message (including `[skip ci]`) and conflict strategy `-Xtheirs`. + - Push the updated `stage` branch back to `origin`. diff --git a/package.json b/package.json index 2ea017ec..1dfe5b3b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "azion", - "version": "2.2.3", + "version": "2.3.0-stage.5", "description": "Azion Packages for Edge Computing.", "scripts": { "prepare": "husky", @@ -77,7 +77,8 @@ "packages/unenv-preset/src/polyfills/*", "packages/bundler/src/polyfills/*", "packages/presets/src/presets/next/*", - "packages/presets/src/presets/nuxt/nitro/*" + "packages/presets/src/presets/nuxt/nitro/*", + "packages/presets/src/presets/svelte/kit/*" ], "exports": { ".": { @@ -90,6 +91,8 @@ }, "./presets/preset/*": "./packages/presets/dist/presets/*", "./preset/nuxt/*": "./packages/presets/src/presets/nuxt/nitro/*/index.js", + "./preset/sveltekit": "./packages/presets/src/presets/svelte/kit/index.js", + "./preset/sveltekit/cache": "./packages/presets/src/presets/svelte/kit/cache/index.js", "./bundler": { "require": "./packages/bundler/dist/index.cjs", "import": "./packages/bundler/dist/index.js" diff --git a/packages/bundler/src/bundlers/esbuild/esbuild.ts b/packages/bundler/src/bundlers/esbuild/esbuild.ts index 3e64af0a..9395749c 100644 --- a/packages/bundler/src/bundlers/esbuild/esbuild.ts +++ b/packages/bundler/src/bundlers/esbuild/esbuild.ts @@ -8,6 +8,7 @@ import AzionEsbuildConfig from './esbuild.config'; import AzionPolyfillPlugin from './plugins/azion-polyfills'; import OptionalChainingAssignmentPlugin from './plugins/babel-custom'; import NodePolyfillPlugin from './plugins/node-polyfills'; +import SanitizeWorker from './plugins/sanitize-worker'; // eslint-disable-next-line interface ESBuildConfig extends esbuild.BuildOptions {} @@ -23,6 +24,7 @@ interface ESBuildPluginClasses { NodePolyfillsPlugin: (isProduction: boolean) => ESBuildPlugin; AzionPolyfillsPlugin: (isProduction: boolean) => ESBuildPlugin; OptionalChainingAssignmentPlugin: () => ESBuildPlugin; + SanitizeWorker: (sanitize: boolean, options?: { outfile?: string }) => ESBuildPlugin; } // Create esbuild-specific plugins @@ -30,6 +32,7 @@ const bundlerPlugins = createBundlerPlugins { + const NAME = 'sanitize-worker'; + + const sanitizeUTF8 = (content: string) => { + let escapedCount = 0; + const chars = Array.from(content); + const result = chars + .map((char) => { + const codePoint = char.codePointAt(0)!; + // Escape caracteres fora do Basic Multilingual Plane (BMP) + if (codePoint >= 0x10000) { + escapedCount++; + return `\\u{${codePoint.toString(16)}}`; + } + return char; + }) + .join(''); + return { result, escapedCount }; + }; + + const processFile = (filePath: string) => { + const content = fs.readFileSync(filePath, 'utf-8'); + const { result, escapedCount } = sanitizeUTF8(content); + + if (escapedCount > 0) { + console.log(`[sanitize-worker] Escaped ${escapedCount} Unicode character(s) in ${filePath}`); + fs.writeFileSync(filePath, result, 'utf-8'); + } + }; + + return { + name: NAME, + setup(build: PluginBuild) { + build.onEnd(async (result) => { + if (!sanitize || result.errors.length > 0) { + return; + } + if (build.initialOptions.entryPoints) { + for (const filePath of Object.keys(build.initialOptions.entryPoints)) { + processFile(`${filePath}.js`); + } + } else { + const outfile = options?.outfile || build.initialOptions.outfile; + if (outfile) { + processFile(outfile); + } + } + }); + }, + }; +}; + +export default SanitizeWorker; diff --git a/packages/bundler/src/helpers/bundler-utils.ts b/packages/bundler/src/helpers/bundler-utils.ts index b0fe76ce..176036a2 100644 --- a/packages/bundler/src/helpers/bundler-utils.ts +++ b/packages/bundler/src/helpers/bundler-utils.ts @@ -45,6 +45,18 @@ export const createBundlerPlugins = < ? // eslint-disable-next-line @typescript-eslint/no-explicit-any [(pluginsClasses as any).OptionalChainingAssignmentPlugin()] : []), + // Add sanitize worker plugin if available (esbuild only) + ...('SanitizeWorker' in pluginsClasses + ? [ + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (pluginsClasses as any).SanitizeWorker(process.env.AZ_ENABLE_SANITIZE_WORKER ?? false, { + options: { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + outfile: (config as any).outfile, + }, + }), + ] + : []), ] as unknown as typeof config.plugins; return config; }; diff --git a/packages/bundler/src/polyfills/azion/storage/context/storage.context.js b/packages/bundler/src/polyfills/azion/storage/context/storage.context.js index d8555b3b..0ecf0648 100644 --- a/packages/bundler/src/polyfills/azion/storage/context/storage.context.js +++ b/packages/bundler/src/polyfills/azion/storage/context/storage.context.js @@ -43,7 +43,7 @@ class StorageContext { * Stores a object in the storage. * @async * @param {string} key - The key to store the value under. - * @param {ReadableStream|string} value - The value to store (can be a ReadableStream or a string). + * @param {ArrayBuffer | Uint8Array | ReadableStream} value - The value to store (can be a ReadableStream, ArrayBuffer, or Uint8Array). * @param {object} options - Additional options for storing the object. * @returns {Promise} A promise that resolves to an object representing the stored data. * @throws {Error} Throws an error if the storing process fails. @@ -57,7 +57,8 @@ class StorageContext { const writeStream = fs.createWriteStream(`${this.#pathBucket}/${key}`); await pipeline(value, writeStream); } else { - await fs.promises.writeFile(`${this.#pathBucket}/${key}`, value); + const fileContent = value instanceof ArrayBuffer ? Buffer.from(value) : value; + await fs.promises.writeFile(`${this.#pathBucket}/${key}`, fileContent); } const responseMetadata = await StorageContext.putMetadata(this.#pathBucket, key, options, this.#metadataPrefix); @@ -103,7 +104,7 @@ class StorageContext { * Generates a response object for the retrieved asset. * @static * @async - * @param {ReadableStream|string} value - The value of the asset. + * @param {ArrayBuffer | Uint8Array | ReadableStream} value - The value of the asset. * @param {object} metadataStore - Metadata associated with the asset. * @returns {Promise} A promise that resolves to an object representing the response asset. */ diff --git a/packages/bundler/src/polyfills/crypto/context/crypto.context.js b/packages/bundler/src/polyfills/crypto/context/crypto.context.js index 3981729c..eab08697 100644 --- a/packages/bundler/src/polyfills/crypto/context/crypto.context.js +++ b/packages/bundler/src/polyfills/crypto/context/crypto.context.js @@ -1,5 +1,6 @@ import * as crypto from 'node:crypto'; +export default crypto; export var { Cipher } = crypto; export var { Decipher } = crypto; export var { DiffieHellman } = crypto; @@ -35,39 +36,8 @@ export var { getRandomValues } = crypto; export var { randomUUID } = crypto; export var { generateKeyPair } = crypto; -export default { - Cipher, - Decipher, - DiffieHellman, - DiffieHellmanGroup, - Hash, - Hmac, - Sign, - Verify, - constants, - createCipheriv, - createDecipheriv, - createDiffieHellman, - createDiffieHellmanGroup, - createECDH, - createHash, - createHmac, - createSign, - createVerify, - getCiphers, - getDiffieHellman, - getHashes, - pbkdf2, - pbkdf2Sync, - privateDecrypt, - privateEncrypt, - pseudoRandomBytes, - publicDecrypt, - publicEncrypt, - randomBytes, - randomFill, - randomFillSync, - getRandomValues, - randomUUID, - generateKeyPair, -}; +// Export Web Crypto API from globalThis.crypto (edge runtime) +export var webcrypto = (typeof globalThis !== 'undefined' && globalThis.crypto) || crypto.webcrypto; +export var subtle = webcrypto?.subtle; +export var CryptoKey = (typeof globalThis !== 'undefined' && globalThis.CryptoKey) || crypto.CryptoKey; +export var KeyObject = crypto.KeyObject; diff --git a/packages/bundler/src/polyfills/crypto/crypto.polyfills.js b/packages/bundler/src/polyfills/crypto/crypto.polyfills.js index ac081ace..2d48db4b 100644 --- a/packages/bundler/src/polyfills/crypto/crypto.polyfills.js +++ b/packages/bundler/src/polyfills/crypto/crypto.polyfills.js @@ -39,6 +39,10 @@ export var { randomFillSync } = CRYPTO_CONTEXT.cryptoContext; export var { getRandomValues } = CRYPTO_CONTEXT.cryptoContext; export var { randomUUID } = CRYPTO_CONTEXT.cryptoContext; export var { generateKeyPair } = CRYPTO_CONTEXT.cryptoContext; +export var { CryptoKey } = CRYPTO_CONTEXT.cryptoContext; +export var { KeyObject } = CRYPTO_CONTEXT.cryptoContext; +export var { webcrypto } = CRYPTO_CONTEXT.cryptoContext; +export var { subtle } = CRYPTO_CONTEXT.cryptoContext; export default { Cipher, @@ -75,4 +79,8 @@ export default { getRandomValues, randomUUID, generateKeyPair, + CryptoKey, + KeyObject, + subtle, + webcrypto, }; diff --git a/packages/config/README.md b/packages/config/README.md index d51320d8..d59ab274 100644 --- a/packages/config/README.md +++ b/packages/config/README.md @@ -78,7 +78,6 @@ const config = defineConfig({ functionsEnabled: false, applicationAcceleratorEnabled: false, imageProcessorEnabled: false, - tieredCacheEnabled: false, cache: [ { name: 'mycache', @@ -102,6 +101,10 @@ const config = defineConfig({ option: 'allowlist', list: ['session', 'user'], }, + tieredCache: { + enabled: true, + topology: 'nearest-region', + }, }, ], rules: { @@ -624,7 +627,6 @@ Type definition for application configuration. - `functionsEnabled?: boolean` - Whether functions are enabled (default: false). - `applicationAcceleratorEnabled?: boolean` - Whether application accelerator is enabled (default: false). - `imageProcessorEnabled?: boolean` - Whether image processor is enabled (default: false). -- `tieredCacheEnabled?: boolean` - Whether tiered cache is enabled (default: false). - `cache?: AzionCache[]` - List of cache configurations. - `rules?: AzionRules` - Request and response rules. - `deviceGroups?: DeviceGroup[]` - List of device groups for mobile detection. @@ -722,6 +724,9 @@ Type definition for the cache configuration. - `cacheByQueryString?: CacheByQueryStringConfig` - Cache by query string settings. - `option: 'ignore' | 'all' | 'allowlist' | 'denylist'` - Cache by query string option. - `list?: string[]` - List of query string parameters (required when option is 'allowlist' or 'denylist'). +- `tieredCache?: TieredCacheConfig` - Tiered cache settings. + - `enabled: boolean` - Whether tiered cache is enabled. + - `topology: 'nearest-region' | 'br-east-1' | 'us-east-1'` - Tiered cache topology. ### `AzionRules` diff --git a/packages/config/src/configProcessor/helpers/schema.ts b/packages/config/src/configProcessor/helpers/schema.ts index 8453b16e..ca46ffc6 100644 --- a/packages/config/src/configProcessor/helpers/schema.ts +++ b/packages/config/src/configProcessor/helpers/schema.ts @@ -629,13 +629,23 @@ const azionConfigSchema = { errorMessage: "The 'enabled' field must be a boolean.", }, topology: { - type: 'string', - enum: ['nearest-region', 'us-east-1', 'br-east-1'], + type: ['string', 'null'], + enum: ['nearest-region', 'us-east-1', 'br-east-1', null], default: 'nearest-region', errorMessage: - "The 'topology' field must be one of 'nearest-region', 'br-east-1', 'us-east-1'.", + "The 'topology' field must be one of 'nearest-region', 'br-east-1', 'us-east-1' or null.", }, }, + required: ['enabled'], + if: { + properties: { enabled: { const: true } } + }, + then: { + required: ['enabled', 'topology'], + errorMessage: { + required: "When 'enabled' is true, 'topology' is required in the 'tiered_cache' object." + } + }, additionalProperties: false, errorMessage: { additionalProperties: "No additional properties are allowed in the 'tiered_cache' object.", @@ -877,7 +887,8 @@ const azionConfigSchema = { required: ['name'], additionalProperties: false, }, - errorMessage: "The 'applications' field must be an array of application objects", + minItems: 1, + errorMessage: "The 'applications' field must be an array of application objects with at least one item", }, workloads: { type: 'array', @@ -915,7 +926,6 @@ const azionConfigSchema = { maxLength: 250, errorMessage: 'Each domain must be a string between 1 and 250 characters', }, - minItems: 1, errorMessage: "The 'domains' field must be an array of domain strings", }, tls: { @@ -1002,7 +1012,7 @@ const azionConfigSchema = { certificate: { type: ['integer', 'null'], minimum: 1, - }, + }, crl: { type: ['array', 'null'], items: { type: 'integer' }, @@ -1089,20 +1099,22 @@ const azionConfigSchema = { required: "The 'name' and 'strategy' fields are required in each deployment", }, }, - errorMessage: "The 'deployments' field must be an array of deployment objects", + minItems: 1, + errorMessage: "The 'deployments' field must be an array of deployment objects with at least one item.", }, }, - required: ['name'], + required: ['name', 'deployments'], additionalProperties: false, errorMessage: { additionalProperties: 'No additional properties are allowed in workload items', required: { name: "The 'name' field is required in workloads", - domains: "The 'domains' field is required in workloads", + deployments: "The 'deployments' field is required in workloads", }, }, }, - errorMessage: "The 'workloads' field must be an array of workloads items.", + minItems: 1, + errorMessage: "The 'workloads' field must be an array of workloads items with at least one item.", }, purge: { type: 'array', @@ -1806,6 +1818,7 @@ const azionConfigSchema = { functions: { type: 'array', items: schemaFunction, + errorMessage: "The 'functions' field must be an array of function objects with at least one item", }, customPages: { type: 'array', @@ -1916,6 +1929,7 @@ const azionConfigSchema = { }, }, additionalProperties: false, + required: ['build', 'applications', 'workloads'], errorMessage: { additionalProperties: 'Config can only contain the following properties: build, functions, applications, workloads, purge, edgefirewall, networkList, waf, connectors, customPages', diff --git a/packages/config/src/configProcessor/helpers/schemaManifest.ts b/packages/config/src/configProcessor/helpers/schemaManifest.ts index 179cf28b..d6db8000 100644 --- a/packages/config/src/configProcessor/helpers/schemaManifest.ts +++ b/packages/config/src/configProcessor/helpers/schemaManifest.ts @@ -244,12 +244,16 @@ const schemaStorageManifest = { enum: ['read_only', 'read_write', 'restricted'], errorMessage: "The 'edge_access' field must be one of: read_only, read_write, restricted.", }, + prefix: { + type: 'string', + errorMessage: "The 'prefix' field must be a string.", + }, }, - required: ['name', 'dir'], + required: ['name', 'dir', 'prefix'], additionalProperties: false, errorMessage: { additionalProperties: 'No additional properties are allowed in storage items.', - required: "The 'name' and 'dir' fields are required.", + required: "The 'name', 'dir' and 'prefix' fields are required.", }, }; @@ -626,13 +630,26 @@ const schemaApplicationCacheSettings = { tiered_cache: { type: ['object', 'null'], properties: { + enabled: { + type: 'boolean', + errorMessage: "The 'enabled' field must be a boolean.", + }, topology: { - type: 'string', - enum: TIERED_CACHE_TOPOLOGY, - errorMessage: "The 'topology' must be one of: nearest-region, us-east-1, br-east-1.", + type: ['string', 'null'], + enum: [...TIERED_CACHE_TOPOLOGY, null], + errorMessage: "The 'topology' must be one of: nearest-region, us-east-1, br-east-1 or null.", }, }, - required: ['topology'], + required: ['enabled'], + if: { + properties: { enabled: { const: true } } + }, + then: { + required: ['enabled', 'topology'], + errorMessage: { + required: "When 'enabled' is true, 'topology' is required in the 'tiered_cache' object." + } + }, additionalProperties: false, }, }, @@ -1237,7 +1254,6 @@ const schemaWorkloadManifest = { maxLength: 250, errorMessage: 'Each domain must be a string between 1 and 250 characters', }, - minItems: 1, errorMessage: "The 'domains' field must be an array of domain strings", }, tls: { @@ -1726,7 +1742,8 @@ const schemaManifest = { applications: { type: 'array', items: schemaApplicationManifest, - errorMessage: "The 'applications' field must be an array of application items.", + minItems: 1, + errorMessage: "The 'applications' field must be an array of application items with at least one item.", }, workloads: { type: 'array', @@ -1769,19 +1786,16 @@ const schemaManifest = { type: 'object', properties: { application: { - type: 'integer', - minimum: 1, - errorMessage: "The 'application' field must be an integer >= 1.", + type: 'string', + errorMessage: "The 'application' field must be a string.", }, firewall: { - type: ['integer', 'null'], - minimum: 1, - errorMessage: "The 'firewall' field must be an integer >= 1 or null.", + type: ['string', 'null'], + errorMessage: "The 'firewall' field must be a string or null.", }, custom_page: { - type: ['integer', 'null'], - minimum: 1, - errorMessage: "The 'custom_page' field must be an integer >= 1 or null.", + type: ['string', 'null'], + errorMessage: "The 'custom_page' field must be a string or null.", }, }, required: ['application'], @@ -1807,7 +1821,9 @@ const schemaManifest = { required: "The 'name' and 'strategy' fields are required in each workload deployment.", }, }, - errorMessage: "The 'workload_deployments' field must be an array of workload deployment objects.", + minItems: 1, + errorMessage: + "The 'workload_deployments' field must be an array of workload deployment objects with at least one item.", }, connectors: { type: 'array', @@ -1859,9 +1875,8 @@ const schemaManifest = { type: 'object', properties: { connector: { - type: 'integer', - minimum: 1, - errorMessage: "The 'connector' field must be an integer >= 1.", + type: 'string', + errorMessage: "The 'connector' field must be a string.", }, ttl: { type: 'integer', @@ -1931,6 +1946,11 @@ const schemaManifest = { errorMessage: "The 'storage' field must be an array of storage items.", }, }, + required: ['build', 'applications', 'workloads', 'workload_deployments'], + errorMessage: { + required: + "The 'build', 'applications', 'workloads', and 'workload_deployments' fields are required in the manifest.", + }, }; export { schemaManifest }; diff --git a/packages/config/src/configProcessor/processStrategy/implementations/application/cacheProcessConfigStrategy.ts b/packages/config/src/configProcessor/processStrategy/implementations/application/cacheProcessConfigStrategy.ts index 44ede93e..5f89cb0a 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/application/cacheProcessConfigStrategy.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/application/cacheProcessConfigStrategy.ts @@ -52,7 +52,8 @@ class CacheProcessConfigStrategy extends ProcessConfigStrategy { cookie_names: cache?.cacheByCookie?.list || [], }; - return { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const cachePayload: any = { name: cache.name, browser_cache: { behavior: cache?.browser ? 'override' : 'honor', @@ -71,7 +72,6 @@ class CacheProcessConfigStrategy extends ProcessConfigStrategy { }, tiered_cache: { enabled: cache?.tieredCache?.enabled || false, - topology: cache?.tieredCache?.topology || 'nearest-region', }, }, application_accelerator: { @@ -85,6 +85,12 @@ class CacheProcessConfigStrategy extends ProcessConfigStrategy { }, }, }; + + if (cache?.tieredCache?.enabled === true) { + cachePayload.modules.cache.tiered_cache.topology = cache.tieredCache.topology; + } + + return cachePayload; }); } @@ -121,10 +127,13 @@ class CacheProcessConfigStrategy extends ProcessConfigStrategy { queryStringSort: cache.modules?.application_accelerator?.cache_vary_by_querystring?.sort_enabled || false, tieredCache: { enabled: cache.modules?.cache?.tiered_cache?.enabled || false, - topology: cache.modules?.cache?.tiered_cache?.topology || 'nearest-region', }, }; + if (cacheSetting.tieredCache && cacheSetting.tieredCache.enabled === true) { + cacheSetting.tieredCache.topology = cache.modules?.cache?.tiered_cache?.topology; + } + // Handle cache by query string if (cache.modules?.application_accelerator?.cache_vary_by_querystring) { const queryStringConfig = cache.modules.application_accelerator.cache_vary_by_querystring; diff --git a/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.test.ts b/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.test.ts index 6a80c1e2..3d3e9eb9 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.test.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.test.ts @@ -138,8 +138,7 @@ describe('CustomPagesProcessConfigStrategy', () => { expect(result[0].pages[0].page.attributes.ttl).toBe(0); expect(result[0].pages[0].page.attributes.uri).toBeNull(); expect(result[0].pages[0].page.attributes.custom_status_code).toBeNull(); - // Number('test-connector') results in NaN, so we check if it's NaN - expect(isNaN(result[0].pages[0].page.attributes.connector as number)).toBe(true); + expect(result[0].pages[0].page.attributes.connector as string).toBe('test-connector'); }); it('should transform custom pages to manifest format with explicit values', () => { @@ -189,7 +188,7 @@ describe('CustomPagesProcessConfigStrategy', () => { expect(result[0].pages[0].page.attributes.uri).toBe('/not-found'); expect(result[0].pages[0].page.attributes.custom_status_code).toBe(404); // Number('test-connector') results in NaN, so we check if it's NaN - expect(isNaN(result[0].pages[0].page.attributes.connector as number)).toBe(true); + expect(result[0].pages[0].page.attributes.connector as string).toBe('test-connector'); }); it('should transform custom pages with numeric connector ID', () => { @@ -224,7 +223,7 @@ describe('CustomPagesProcessConfigStrategy', () => { page: { type: 'page_connector', attributes: { - connector: 123, + connector: '123', ttl: 0, uri: null, custom_status_code: null, @@ -301,7 +300,7 @@ describe('CustomPagesProcessConfigStrategy', () => { expect(result[1].name).toBe('second-custom-page'); expect(result[1].pages).toHaveLength(1); expect(result[1].pages[0].code).toBe('default'); - expect(result[1].pages[0].page.attributes.connector).toBe(456); + expect(result[1].pages[0].page.attributes.connector).toBe('456'); }); it('should throw error when connector reference is invalid', () => { diff --git a/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.ts b/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.ts index 5d3304bc..27a02269 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/customPagesProcessConfigStrategy.ts @@ -54,7 +54,7 @@ class CustomPagesProcessConfigStrategy extends ProcessConfigStrategy { page: { type: pageEntry.page.type || 'page_connector', attributes: { - connector: Number(pageEntry.page.attributes.connector), // Convert to number for API + connector: String(pageEntry.page.attributes.connector), ttl: pageEntry.page.attributes.ttl ?? 0, uri: pageEntry.page.attributes.uri || null, custom_status_code: pageEntry.page.attributes.customStatusCode || null, diff --git a/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.test.ts b/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.test.ts index 62801c8e..5789dae1 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.test.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.test.ts @@ -257,18 +257,25 @@ describe('WorkloadDeploymentsProcessConfigStrategy', () => { describe('transformToConfig', () => { it('should return empty object when no workload_deployments are provided', () => { const payload = {}; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; + const result = strategy.transformToConfig(payload, transformedPayload); expect(result).toEqual({}); }); it('should return empty object when workload_deployments array is empty', () => { const payload = { workload_deployments: [] }; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; + const result = strategy.transformToConfig(payload, transformedPayload); expect(result).toEqual({}); }); it('should transform a basic workload_deployment manifest to config format', () => { const payload = { + workloads: [ + { + name: 'test-workload', + }, + ], workload_deployments: [ { name: 'production', @@ -286,30 +293,46 @@ describe('WorkloadDeploymentsProcessConfigStrategy', () => { ], }; + const transformedPayload: AzionConfig = { + workloads: [ + { + name: 'test-workload', + }, + ], + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any - const result = strategy.transformToConfig(payload) as { workloadDeployments: any[] }; + const result = strategy.transformToConfig(payload, transformedPayload) as any[]; - expect(result).toEqual({ - workloadDeployments: [ - { - name: 'production', - current: true, - active: true, - strategy: { - type: 'application', - attributes: { - application: 'my-edge-app', - firewall: null, - customPage: null, + expect(result).toEqual([ + { + name: 'test-workload', + deployments: [ + { + name: 'production', + current: true, + active: true, + strategy: { + type: 'application', + attributes: { + application: 'my-edge-app', + firewall: null, + customPage: null, + }, }, }, - }, - ], - }); + ], + }, + ]); }); it('should transform numeric IDs to strings in the config format', () => { const payload = { + workloads: [ + { + name: 'test-workload', + }, + ], workload_deployments: [ { name: 'production', @@ -325,30 +348,47 @@ describe('WorkloadDeploymentsProcessConfigStrategy', () => { ], }; + const transformedPayload: AzionConfig = { + workloads: [ + { + name: 'test-workload', + }, + ], + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any - const result = strategy.transformToConfig(payload) as { workloadDeployments: any[] }; + const result = strategy.transformToConfig(payload, transformedPayload) as any[]; - expect(result).toEqual({ - workloadDeployments: [ - { - name: 'production', - current: undefined, - active: undefined, - strategy: { - type: 'application', - attributes: { - application: '12345', - firewall: '67890', - customPage: 54321, + expect(result).toEqual([ + { + name: 'test-workload', + deployments: [ + { + name: 'production', + strategy: { + type: 'application', + attributes: { + application: '12345', + firewall: '67890', + customPage: '54321', + }, }, }, - }, - ], - }); + ], + }, + ]); }); it('should transform multiple workload_deployments to config format', () => { const payload = { + workloads: [ + { + name: 'test-workload', + }, + { + name: 'test-workload-2', + }, + ], workload_deployments: [ { name: 'deployment-1', @@ -379,15 +419,25 @@ describe('WorkloadDeploymentsProcessConfigStrategy', () => { ], }; + const transformedPayload: AzionConfig = { + workloads: [ + { + name: 'test-workload', + }, + { + name: 'test-workload-2', + }, + ], + }; + // eslint-disable-next-line @typescript-eslint/no-explicit-any - const result = strategy.transformToConfig(payload) as { workloadDeployments: any[] }; - - expect(result.workloadDeployments).toHaveLength(2); - expect(result.workloadDeployments[0].name).toBe('deployment-1'); - expect(result.workloadDeployments[1].name).toBe('deployment-2'); - expect(result.workloadDeployments[0].strategy.attributes.application).toBe('app-1'); - expect(result.workloadDeployments[1].strategy.attributes.application).toBe('app-2'); - expect(result.workloadDeployments[1].strategy.attributes.firewall).toBe('firewall-1'); + const result = strategy.transformToConfig(payload, transformedPayload) as any[]; + expect(result).toHaveLength(2); + expect(result[0].name).toBe('test-workload'); + expect(result[1].name).toBe('test-workload-2'); + expect(result[0].deployments[0].strategy.attributes.application).toBe('app-1'); + expect(result[0].deployments[1].strategy.attributes.application).toBe('app-2'); + expect(result[1].deployments[1].strategy.attributes.firewall).toBe('firewall-1'); }); }); }); diff --git a/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.ts b/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.ts index 0d671863..ad198963 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/workloadDeploymentsProcessConfigStrategy.ts @@ -128,40 +128,55 @@ class WorkloadDeploymentsProcessConfigStrategy extends ProcessConfigStrategy { * Note: This recreates deployments but doesn't assign them back to specific workloads * The CLI should handle the workload association logic */ - transformToConfig(payload: { - workload_deployments?: Array<{ - name: string; - current?: boolean; - active?: boolean; - strategy: { - type: string; - attributes: { - application: number | string; - firewall?: number | string | null; - custom_page?: number | null; + transformToConfig( + payload: { + workload_deployments?: Array<{ + name: string; + current?: boolean; + active?: boolean; + strategy: { + type: string; + attributes: { + application: number | string; + firewall?: number | string | null; + custom_page?: number | null; + }; }; - }; - }>; - }) { + }>; + }, + transformedPayload: AzionConfig, + ) { if (!Array.isArray(payload?.workload_deployments) || payload.workload_deployments.length === 0) { return {}; } - const workloadDeployments = payload.workload_deployments.map((deployment) => ({ - name: deployment.name, - current: deployment.current, - active: deployment.active, - strategy: { - type: deployment.strategy.type, - attributes: { - application: String(deployment.strategy.attributes.application), // CLI should resolve ID to name - firewall: deployment.strategy.attributes.firewall ? String(deployment.strategy.attributes.firewall) : null, // CLI should resolve ID to name - customPage: deployment.strategy.attributes.custom_page, - }, - }, - })); + transformedPayload.workloads = transformedPayload?.workloads?.map((workload, workloadIndex) => { + return { + ...workload, + deployments: payload?.workload_deployments?.map((deployment, deploymentIndex) => { + if (deploymentIndex === workloadIndex) { + return { + ...deployment, + strategy: { + ...deployment.strategy, + attributes: { + application: String(deployment.strategy.attributes.application), // CLI should resolve ID to name + firewall: deployment.strategy.attributes.firewall + ? String(deployment.strategy.attributes.firewall) + : null, // CLI should resolve ID to name + customPage: deployment.strategy.attributes.custom_page + ? String(deployment.strategy.attributes.custom_page) + : null, // CLI should resolve ID to name + }, + }, + }; + } + return deployment; + }), + }; + }); - return { workloadDeployments }; + return transformedPayload.workloads; } } diff --git a/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.test.ts b/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.test.ts index c26a7ce5..b8409ef9 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.test.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.test.ts @@ -151,13 +151,15 @@ describe('WorkloadProcessConfigStrategy', () => { describe('transformToConfig', () => { it('should return empty object when no workloads are provided', () => { const payload = {}; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; + const result = strategy.transformToConfig(payload, transformedPayload); expect(result).toEqual({}); }); it('should return empty object when workloads array is empty', () => { const payload = { workloads: [] }; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; + const result = strategy.transformToConfig(payload, transformedPayload); expect(result).toEqual({}); }); @@ -187,32 +189,32 @@ describe('WorkloadProcessConfigStrategy', () => { ], }; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; - expect(result).toEqual({ - workloads: [ - expect.objectContaining({ - name: 'test-workload', - active: true, - infrastructure: 1, - workloadDomainAllowAccess: true, - domains: ['example.com'], - tls: { - certificate: null, - ciphers: null, - minimumVersion: 'tls_1_3', - }, - protocols: { - http: { - versions: ['http1', 'http2'], - httpPorts: [80], - httpsPorts: [443], - quicPorts: null, - }, + const result = strategy.transformToConfig(payload, transformedPayload); + + expect(result).toEqual([ + expect.objectContaining({ + name: 'test-workload', + active: true, + infrastructure: 1, + workloadDomainAllowAccess: true, + domains: ['example.com'], + tls: { + certificate: null, + ciphers: null, + minimumVersion: 'tls_1_3', + }, + protocols: { + http: { + versions: ['http1', 'http2'], + httpPorts: [80], + httpsPorts: [443], + quicPorts: null, }, - }), - ], - }); + }, + }), + ]); }); it('should transform a complete workload manifest to config format', () => { @@ -249,40 +251,40 @@ describe('WorkloadProcessConfigStrategy', () => { ], }; - const result = strategy.transformToConfig(payload); + const transformedPayload: AzionConfig = {}; - expect(result).toEqual({ - workloads: [ - expect.objectContaining({ - name: 'complete-workload', - active: false, - infrastructure: 2, - workloadDomainAllowAccess: false, - domains: ['example.com', 'test.com'], - tls: { - certificate: 123, - ciphers: 5, - minimumVersion: 'tls_1_2', - }, - protocols: { - http: { - versions: ['http1'], - httpPorts: [8080], - httpsPorts: [8443], - quicPorts: [443], - }, + const result = strategy.transformToConfig(payload, transformedPayload); + + expect(result).toEqual([ + expect.objectContaining({ + name: 'complete-workload', + active: false, + infrastructure: 2, + workloadDomainAllowAccess: false, + domains: ['example.com', 'test.com'], + tls: { + certificate: 123, + ciphers: 5, + minimumVersion: 'tls_1_2', + }, + protocols: { + http: { + versions: ['http1'], + httpPorts: [8080], + httpsPorts: [8443], + quicPorts: [443], }, - mtls: { - enabled: true, - config: { - verification: 'permissive', - certificate: 456, - crl: [789], - }, + }, + mtls: { + enabled: true, + config: { + verification: 'permissive', + certificate: 456, + crl: [789], }, - }), - ], - }); + }, + }), + ]); }); it('should transform multiple workload manifests to config format', () => { @@ -331,11 +333,13 @@ describe('WorkloadProcessConfigStrategy', () => { ], }; - const result = strategy.transformToConfig(payload) as { workloads: Array<{ name: string }> }; + const transformedPayload: AzionConfig = {}; + + const result = strategy.transformToConfig(payload, transformedPayload) as Array<{ name: string }>; - expect(result.workloads).toHaveLength(2); - expect(result.workloads[0].name).toBe('workload-1'); - expect(result.workloads[1].name).toBe('workload-2'); + expect(result).toHaveLength(2); + expect(result[0].name).toBe('workload-1'); + expect(result[1].name).toBe('workload-2'); }); it('should handle empty domains array', () => { @@ -348,9 +352,13 @@ describe('WorkloadProcessConfigStrategy', () => { ], }; - const result = strategy.transformToConfig(payload) as { workloads: Array<{ domains: string[] }> }; + const transformedPayload: AzionConfig = { + workloads: [], + }; + + const result = strategy.transformToConfig(payload, transformedPayload) as Array<{ domains: string[] }>; - expect(result.workloads[0].domains).toEqual([]); + expect(result[0].domains).toEqual([]); }); it('should handle undefined mtls', () => { @@ -363,11 +371,22 @@ describe('WorkloadProcessConfigStrategy', () => { ], }; - const result = strategy.transformToConfig(payload) as { - workloads: Array<{ mtls?: { enabled?: false; config?: { verification: string; certificate?: number | null; crl?: number[] | null } } }>; + const transformedPayload: AzionConfig = { + workloads: [ + { + name: 'test-workload', + }, + ], }; - expect(result.workloads[0].mtls).toBeUndefined(); + const result = strategy.transformToConfig(payload, transformedPayload) as Array<{ + mtls?: { + enabled?: false; + config?: { verification: string; certificate?: number | null; crl?: number[] | null }; + }; + }>; + + expect(result[0].mtls).toBeUndefined(); }); }); }); diff --git a/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.ts b/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.ts index 002b317a..0f1d8437 100644 --- a/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.ts +++ b/packages/config/src/configProcessor/processStrategy/implementations/workloadProcessConfigStrategy.ts @@ -45,12 +45,12 @@ class WorkloadProcessConfigStrategy extends ProcessConfigStrategy { } // eslint-disable-next-line @typescript-eslint/no-explicit-any - transformToConfig(payload: { workloads?: any[] }) { + transformToConfig(payload: { workloads?: any[] }, transformedPayload: AzionConfig) { if (!payload.workloads || payload.workloads.length === 0) { return {}; } - const workloads = payload.workloads.map((workload) => ({ + transformedPayload.workloads = payload.workloads.map((workload) => ({ name: workload.name, active: workload.active, infrastructure: workload.infrastructure, @@ -81,7 +81,7 @@ class WorkloadProcessConfigStrategy extends ProcessConfigStrategy { : undefined, })); - return { workloads }; + return transformedPayload.workloads; } } diff --git a/packages/config/src/configProcessor/processStrategy/index.ts b/packages/config/src/configProcessor/processStrategy/index.ts index b1fc7450..7dcc100e 100644 --- a/packages/config/src/configProcessor/processStrategy/index.ts +++ b/packages/config/src/configProcessor/processStrategy/index.ts @@ -1,6 +1,7 @@ import ApplicationProcessConfigStrategy from './implementations/application/applicationProcessConfigStrategy'; import BuildProcessConfigStrategy from './implementations/buildProcessConfigStrategy'; import ConnectorProcessConfigStrategy from './implementations/connectorProcessConfigStrategy'; +import CustomPagesProcessConfigStrategy from './implementations/customPagesProcessConfigStrategy'; import FunctionsProcessConfigStrategy from './implementations/functionsProcessConfigStrategy'; import PurgeProcessConfigStrategy from './implementations/purgeProcessConfigStrategy'; import FirewallProcessConfigStrategy from './implementations/secure/firewallProcessConfigStrategy'; @@ -21,9 +22,10 @@ function factoryProcessContext() { processConfigContext.setStrategy('firewall', new FirewallProcessConfigStrategy()); processConfigContext.setStrategy('functions', new FunctionsProcessConfigStrategy()); processConfigContext.setStrategy('applications', new ApplicationProcessConfigStrategy()); - processConfigContext.setStrategy('workloads', new WorkloadProcessConfigStrategy()); processConfigContext.setStrategy('connectors', new ConnectorProcessConfigStrategy()); + processConfigContext.setStrategy('workloads', new WorkloadProcessConfigStrategy()); processConfigContext.setStrategy('workload_deployments', new WorkloadDeploymentsProcessConfigStrategy()); + processConfigContext.setStrategy('custom_pages', new CustomPagesProcessConfigStrategy()); return processConfigContext; } diff --git a/packages/config/src/configProcessor/validateConfig/index.ts b/packages/config/src/configProcessor/validateConfig/index.ts index 633435c9..f9ffa15d 100644 --- a/packages/config/src/configProcessor/validateConfig/index.ts +++ b/packages/config/src/configProcessor/validateConfig/index.ts @@ -24,10 +24,39 @@ function validateConfig( const validate = ajv.compile(schema); const valid = validate(config); + const handleMessageError = (errors: unknown[]) => { + const messages = errors + ?.slice(0, 3) + ?.map((error: unknown, index: number) => { + const errorObject = error as { + message: string; + instancePath: string; + params: { missingProperty?: string | undefined }; + }; + + const errorDetails = [ + `📍 Error #${index + 1}:`, + ` Message: ${errorObject.message}`, + errorObject.instancePath ? ` Path: ${errorObject.instancePath}` : null, + errorObject.params?.missingProperty ? ` Missing Property: ${errorObject.params.missingProperty}` : null, + ] + .filter(Boolean) + .join('\n'); + + return errorDetails; + }) + .join('\n\n'); + + const totalErrors = errors?.length || 0; + const moreErrorsNote = totalErrors > 3 ? `\n\n... and ${totalErrors - 3} more error(s)` : ''; + + return `⛔️ Azion Configuration Validation Failed\n${'-'.repeat(50)}\n${messages}${moreErrorsNote}\n${'-'.repeat(50)}`; + }; + if (!valid) { if (validate.errors && validate.errors.length > 0) { - const firstError = validate.errors[0]; - throw new Error('Azion validation: ' + firstError.message); + const messages = handleMessageError(validate.errors); + throw new Error(messages); } else { throw new Error('Azion validation failed.'); } diff --git a/packages/config/src/types.ts b/packages/config/src/types.ts index 6c66ec9d..0b432e50 100644 --- a/packages/config/src/types.ts +++ b/packages/config/src/types.ts @@ -230,7 +230,7 @@ export type AzionCache = { /** Indicates if tiered cache should be enabled */ enabled: boolean; /** Tiered cache topology */ - topology: TieredCacheTopology; + topology?: TieredCacheTopology; }; /** HTTP methods to be cached */ methods?: { diff --git a/packages/jwt/src/jws/index.ts b/packages/jwt/src/jws/index.ts index 2d11b020..6f994bae 100644 --- a/packages/jwt/src/jws/index.ts +++ b/packages/jwt/src/jws/index.ts @@ -41,7 +41,13 @@ export async function verifying( } export function pemToBinary(pem: string): Uint8Array { - return decodeBase64(pem.replace(/-+(BEGIN|END).*/g, '').replace(/\s/g, '')); + return decodeBase64( + pem + .split('\n') + .filter((line) => !line.includes('BEGIN') && !line.includes('END')) + .join('') + .replace(/\s/g, ''), + ); } export async function importPrivateKey(key: SignatureKey, alg: KeyImporterAlgorithm): Promise { diff --git a/packages/presets/README.md b/packages/presets/README.md index 7cef42d1..17bec9f0 100644 --- a/packages/presets/README.md +++ b/packages/presets/README.md @@ -83,6 +83,40 @@ export default defineNuxtConfig({ 📖 **[Read the complete Nuxt.js preset documentation](https://github.com/aziontech/lib/tree/main/packages/presets/docs/preset-nuxt.md)** +### Svelte Custom Adapter + +Supports both Server-Side Rendering (SSR) and Static Site Generation (SSG) for SvelteKit applications. + +**Features:** + +- SSR support with edge runtime optimization +- SSG support with automatic static generation +- Seamless integration with Azion CLI +- Optimized for edge computing performance + +**Quick Start:** + +```bash +npm install azion +``` + +Configure your `svelte.config.js`: + +```javascript +import adapter from 'azion/preset/svelte/ssr'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + kit: { + adapter: adapter(), + }, +}; + +export default config; +``` + +📖 **[Read the complete Svelte preset documentation](https://github.com/aziontech/lib/tree/main/packages/presets/docs/preset-svelte.md)** + ## Contributing We welcome contributions to add support for more frameworks and improve existing presets. Please check the main [Contributing Guidelines](https://github.com/aziontech/lib/tree/main/README.md#contributing) for more information. diff --git a/packages/presets/docs/preset-svelte.md b/packages/presets/docs/preset-svelte.md new file mode 100644 index 00000000..bb6c99bb --- /dev/null +++ b/packages/presets/docs/preset-svelte.md @@ -0,0 +1,121 @@ +# SvelteKit Preset + +This preset enables Server-Side Rendering (SSR) and Static Site Generation (SSG) for SvelteKit applications on the Azion Platform. + +## Prerequisites + +- Node.js 18+ +- SvelteKit project +- Azion CLI installed globally + +## Installation + +First, install the Azion package in your SvelteKit project: + +```bash +npm install azion +``` + +## Configuration SSR + +Configure your `svelte.config.js` to use the Azion SSR adapter: + +```javascript +import adapter from 'azion/preset/svelte/ssr'; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + kit: { + adapter: adapter(), + }, +}; + +export default config; +``` + +## Configuration SSG + +Configure your `svelte.config.js` to use the Azion SSG adapter: + +```javascript +import adapter from '@sveltejs/adapter-static'; // SvelteKit static adapter + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + kit: { + adapter: adapter(), + }, +}; + +export default config; +``` + +## Project Setup + +### 1. Link Your Project + +Connect your project to Azion and select the SvelteKit preset: + +```bash +azion link +``` + +When prompted, choose the **SvelteKit preset** from the available options. + +## Development Workflow + +### Preview Your Application + +#### Build and Preview + +Build your application and preview it locally: + +```bash +azion build +azion dev +``` + +#### Skip Framework Build (Optional) + +If you want to skip the SvelteKit framework build process and use existing build artifacts: + +```bash +azion dev --skip-framework-build +``` + +This is useful when you've already built your SvelteKit application and want to quickly test the edge function behavior. + +## Deployment + +### Deploy to Azion Edge + +Deploy your application directly from your local environment: + +```bash +azion -t +azion deploy --local +``` + +This command will: + +1. Build your SvelteKit application with the SSR adapter +2. Package the edge function +3. Deploy to Azion's edge network +4. Provide you with the deployment URL + +### Getting Help + +For additional support: + +- Check the [Azion Documentation](https://www.azion.com/en/documentation/) +- Visit the [SvelteKit Documentation](https://kit.svelte.dev/docs) +- Contact Azion Support for platform-specific issues + +## Next Steps + +After successful deployment: + +1. Test your application on the provided edge URL +2. Configure custom domains if needed + +> **Note**: We are currently working on a Pull Request to the official SvelteKit repository to include an Azion adapter natively. This will simplify the configuration process in future versions. diff --git a/packages/presets/package.json b/packages/presets/package.json index 7cd61717..4a2bd70f 100644 --- a/packages/presets/package.json +++ b/packages/presets/package.json @@ -19,6 +19,8 @@ "import": "./dist/index.js" }, "./preset/nuxt/*": "./src/presets/nuxt/nitro/*/index.js", + "./preset/sveltekit/*": "./src/presets/svelte/kit/index.js", + "./preset/sveltekit/cache/*": "./src/presets/svelte/kit/cache/index.js", "./presets/preset/*": "./dist/presets/*" }, "author": "aziontech", @@ -28,7 +30,8 @@ "package.json", "README.md", "src/presets/next/*", - "src/presets/nuxt/nitro/*" + "src/presets/nuxt/nitro/*", + "src/presets/svelte/kit/*" ], "dependencies": { "cookie": "^1.0.2", diff --git a/packages/presets/src/presets/svelte/config.ts b/packages/presets/src/presets/svelte/config.ts index d5a45539..9fefc5aa 100644 --- a/packages/presets/src/presets/svelte/config.ts +++ b/packages/presets/src/presets/svelte/config.ts @@ -1,8 +1,8 @@ import type { AzionConfig } from 'azion/config'; -import { createMPARules } from 'azion/config/rules'; const config: AzionConfig = { build: { + preset: 'svelte', bundler: 'esbuild', }, storage: [ @@ -24,12 +24,168 @@ const config: AzionConfig = { }, }, ], + functions: [ + { + name: '$FUNCTION_NAME', + path: './functions/handler.js', + bindings: { + storage: { + bucket: '$BUCKET_NAME', + prefix: '$BUCKET_PREFIX', + }, + }, + }, + ], applications: [ { name: '$APPLICATION_NAME', - rules: createMPARules({ - connector: '$CONNECTOR_NAME', - }), + rules: { + request: [ + { + name: 'Deliver Immutable Assets', + description: 'Delivers immutable assets.', + active: true, + criteria: [ + [ + { + variable: '${uri}', + conditional: 'if', + operator: 'starts_with', + argument: '/_app/immutable', + }, + ], + ], + behaviors: [ + { + type: 'set_connector', + attributes: { + value: '$CONNECTOR_NAME', + }, + }, + { + type: 'deliver', + }, + ], + }, + { + name: 'Redirect to index.html for Subpaths', + description: 'Handle subpath requests by rewriting to index.html', + active: true, + criteria: [ + [ + { + variable: '${uri}', + conditional: 'if', + operator: 'matches', + argument: '^(?!.*/$)(?![sS]*.[a-zA-Z0-9]+$).*', + }, + ], + ], + behaviors: [ + { + type: 'set_connector', + attributes: { + value: '$CONNECTOR_NAME', + }, + }, + { + type: 'rewrite_request', + attributes: { + value: '${uri}/index.html', + }, + }, + ], + }, + { + name: 'Redirect to index.html', + description: 'Handle directory requests by rewriting to index.html', + active: true, + criteria: [ + [ + { + variable: '${uri}', + conditional: 'if', + operator: 'matches', + argument: '.*/$', + }, + ], + ], + behaviors: [ + { + type: 'set_connector', + attributes: { + value: '$CONNECTOR_NAME', + }, + }, + { + type: 'rewrite_request', + attributes: { + value: '${uri}index.html', + }, + }, + ], + }, + { + name: 'Deliver Static Assets', + description: 'Deliver static assets directly from storage', + active: true, + criteria: [ + [ + { + variable: '${uri}', + conditional: 'if', + operator: 'matches', + argument: + '.(jpg|jpeg|png|gif|bmp|webp|svg|ico|ttf|otf|woff|woff2|eot|pdf|doc|docx|xls|xlsx|ppt|pptx|mp4|webm|mp3|wav|ogg|css|js|xml|html|txt|csv|zip|rar|7z|tar|gz|webmanifest|map|md|yaml|yml)$', + }, + ], + ], + behaviors: [ + { + type: 'set_connector', + attributes: { + value: '$CONNECTOR_NAME', + }, + }, + { + type: 'deliver', + }, + ], + }, + { + name: 'Execute Svelte Function', + description: 'Execute Svelte function for all requests', + active: true, + criteria: [ + [ + { + variable: '${uri}', + conditional: 'if', + operator: 'matches', + argument: '^/', + }, + ], + ], + behaviors: [ + { + type: 'run_function', + attributes: { + value: '$FUNCTION_NAME', + }, + }, + { + type: 'forward_cookies', + }, + ], + }, + ], + }, + functionsInstances: [ + { + name: '$FUNCTION_INSTANCE_NAME', + ref: '$FUNCTION_NAME', + }, + ], }, ], workloads: [ diff --git a/packages/presets/src/presets/svelte/kit/cache/index.js b/packages/presets/src/presets/svelte/kit/cache/index.js new file mode 100644 index 00000000..42fa3a99 --- /dev/null +++ b/packages/presets/src/presets/svelte/kit/cache/index.js @@ -0,0 +1,105 @@ +/* eslint-disable */ + +const cacheStorageName = 'sveltekit-cache'; +const hostname = 'cacheapisveltekit'; + +/** + * @param {Request | string} request + */ +export async function lookup(request, buildId) { + let isHEAD = typeof request !== 'string' && request.method === 'HEAD'; + if (isHEAD) request = new Request(request, { method: 'GET' }); + + try { + const pathname = new URL(request.url).pathname; + const cache = await caches.open(`${cacheStorageName}-${buildId}`); + const url = new URL(pathname, `http://${hostname}`); + const newRequest = new Request(url); + const result = await cache.match(newRequest); + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) { + console.log('[Cache lookup] Checking cache for', request.url, pathname, buildId); + } + if (result?.text) { + const res = await result.text(); + if (isHEAD && res) { + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) + console.log('[Cache lookup] Found cache for', request.url, pathname, buildId); + return res; + } + return new Response(null, res); + } + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) + console.log('[Cache lookup] No cache found for', request.url, pathname, buildId); + return null; + } catch (error) { + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) { + const pathname = new URL(request.url).pathname; + const message = error instanceof Error ? error.message : String(error); + console.log( + '[Cache lookup] No cache found for or caches not implemented', + request.url, + pathname, + buildId, + message, + ); + } + + return null; + } +} + +/** + * @param {Request | string} request + * @param {Response} res + * @param {any} ctx + * @param {string} buildId + */ +export function save(request, res, ctx, buildId) { + const isGET = typeof request === 'string' || request.method === 'GET'; + + if (isGET && isCacheable(res)) { + if (res.headers.has('Set-Cookie')) { + res = new Response(res.body, res); + res.headers.append('Cache-Control', 'private=Set-Cookie'); + } + ctx.waitUntil(put(request, res.clone(), buildId)); + } + return res; +} + +/** + * @param {Response} res + */ +export function isCacheable(res) { + if (res.status === 206) return false; + + const vary = res.headers.get('Vary') || ''; + if (!!~vary.indexOf('*')) return false; + + const ccontrol = res.headers.get('Cache-Control') || ''; + if (/(private|no-cache|no-store)/i.test(ccontrol)) return false; + + return true; +} + +/** + * @param {Request | string} request + * @param {Response} res + * @param {any} ctx + */ +export async function put(request, res, buildId) { + try { + const pathname = new URL(request.url).pathname; + const url = new URL(pathname, `http://${hostname}`); + const newRequest = new Request(url); + const cache = await caches.open(`${cacheStorageName}-${buildId}`); + await cache.put(newRequest, res.clone()); + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) console.log('[Cache put] Cached for', request.url, pathname, buildId); + } catch (error) { + if (process.env.AZ_SVELTEKIT_CACHE_DEBUG) { + const pathname = new URL(request.url).pathname; + const message = error instanceof Error ? error.message : String(error); + console.log('[Cache put] No cache found for or caches not implemented', request.url, pathname, buildId, message); + } + } +} diff --git a/packages/presets/src/presets/svelte/kit/files/worker.js b/packages/presets/src/presets/svelte/kit/files/worker.js new file mode 100644 index 00000000..8db68c99 --- /dev/null +++ b/packages/presets/src/presets/svelte/kit/files/worker.js @@ -0,0 +1,144 @@ +import { base_path, manifest, prerendered } from 'MANIFEST'; +import { Server } from 'SERVER'; +import { lookup as lookupCache, save as saveCache } from 'azion/preset/sveltekit/cache'; + +const server = new Server(manifest); + +const app_path = `/${manifest.appPath}`; + +const immutable = `${app_path}/immutable/`; +const version_file = `${app_path}/version.json`; + +const BUILD_ID_DEFINED = 'BUILD_ID'; + +/** + * We don't know the origin until we receive a request, but + * that's guaranteed to happen before we call `read` + * @type {string} + */ +let origin; + +/** + * @param {Request | string} request + * @param {boolean} isPrerendered + * @returns {Promise} + */ +const getStorageAsset = async (request, isPrerendered) => { + try { + const urlString = request instanceof Request ? request.url : request; + const requestPath = decodeURIComponent(new URL(urlString).pathname); + if (requestPath === '/') { + return fetch(new URL('index.html', 'file://')); + } + if (isPrerendered && !requestPath.endsWith('.html')) { + return fetch(new URL(`${requestPath}.html`, 'file://')); + } + const assetUrl = new URL(requestPath, 'file://'); + return fetch(assetUrl); + } catch (e) { + return new Response(e.message || e.toString(), { status: 404 }); + } +}; + +/** + * @type {{ ASSETS: { fetch: typeof fetch } }} + */ +const env = { + ASSETS: { + fetch: getStorageAsset, + }, +}; + +const initialized = server.init({ + // @ts-expect-error env contains environment variables and bindings + env, + read: async (file) => { + const url = `${origin}/${file}`; + const response = await /** @type {{ ASSETS: { fetch: typeof fetch } }} */ (env).ASSETS.fetch(url); + + if (!response.ok) { + throw new Error(`read(...) failed: could not fetch ${url} (${response.status} ${response.statusText})`); + } + + return response.body; + }, +}); + +export default { + /** + * @param {Request} req + * @param {{ ASSETS: { fetch: typeof fetch } }} env + * @param {unknown} ctx + * @returns {Promise} + */ + async fetch(req, env, ctx) { + env = { + ...env, + ASSETS: { + fetch: getStorageAsset, + }, + }; + + if (!origin) { + origin = new URL(req.url).origin; + await initialized; + } + + // skip cache if "cache-control: no-cache" in request + let pragma = req.headers.get('cache-control') || ''; + let res = !pragma.includes('no-cache') && (await lookupCache(req, BUILD_ID_DEFINED)); + if (res) return res; + + let { pathname, search } = new URL(req.url); + try { + pathname = decodeURIComponent(pathname); + } catch { + // ignore invalid URI + } + + const stripped_pathname = pathname.replace(/\/$/, ''); + + // files in /static, the service worker, and Vite imported server assets + let is_static_asset = false; + const filename = stripped_pathname.slice(base_path.length + 1); + if (filename) { + is_static_asset = + manifest.assets.has(filename) || + manifest.assets.has(filename + '/index.html') || + filename in manifest._.server_assets || + filename + '/index.html' in manifest._.server_assets; + } + + let location = pathname.at(-1) === '/' ? stripped_pathname : pathname + '/'; + + if (is_static_asset || prerendered.has(pathname) || pathname === version_file || pathname.startsWith(immutable)) { + res = await env.ASSETS.fetch(req, prerendered.has(pathname)); + } else if (location && prerendered.has(location)) { + // trailing slash redirect for prerendered pages + if (search) location += search; + res = new Response('', { + status: 308, + headers: { + location, + }, + }); + } else { + // dynamically-generated pages + res = await server.respond(req, { + // @ts-expect-error caches is not defined + platform: { + env, + ctx, + }, + getClientAddress() { + return /** @type {string} */ (req.headers.get('metadata.client_ip')); + }, + }); + } + + // write to `Cache` only if response is not an error, + // let `Cache.save` handle the Cache-Control and Vary headers + pragma = res.headers.get('cache-control') || ''; + return pragma && res.status < 400 ? saveCache(req, res, ctx, BUILD_ID_DEFINED) : res; + }, +}; diff --git a/packages/presets/src/presets/svelte/kit/index.js b/packages/presets/src/presets/svelte/kit/index.js new file mode 100644 index 00000000..095c8a23 --- /dev/null +++ b/packages/presets/src/presets/svelte/kit/index.js @@ -0,0 +1,191 @@ +import { copyFileSync, existsSync, writeFileSync } from 'node:fs'; +import path from 'node:path'; +import { fileURLToPath } from 'node:url'; + +const name = 'sveltejs/adapter-azion'; +// TODO: Implement version check +// const [kit_major, kit_minor] = VERSION.split('.'); + +/** + * @template T + * @template {keyof T} K + * @typedef {Partial> & Required>} PartialExcept + */ + +/** + * We use a custom `Builder` type here to support the minimum version of SvelteKit. + * @typedef {PartialExcept} Builder2_0_0 + */ + +/** @type {import('./index.js').default} */ +// eslint-disable-next-line @typescript-eslint/no-unused-vars +export default function (options = {}) { + return { + name, + /** @param {Builder2_0_0} builder */ + async adapt(builder) { + if (!builder.routes) { + throw new Error( + '@sveltejs/adapter-azion >=2.x (possibly installed through @sveltejs/adapter-auto) requires @sveltejs/kit version 1.5 or higher. ' + + 'Either downgrade the adapter or upgrade @sveltejs/kit', + ); + } + + if (existsSync(`${builder.config.kit.files.assets}/_headers`)) { + throw new Error( + `The _headers file should be placed in the project root rather than the ${builder.config.kit.files.assets} directory`, + ); + } + + if (existsSync(`${builder.config.kit.files.assets}/_redirects`)) { + throw new Error( + `The _redirects file should be placed in the project root rather than the ${builder.config.kit.files.assets} directory`, + ); + } + + let dest = builder.getBuildDirectory('azion'); + let assetsDest = builder.getBuildDirectory('../build'); + let workerDest = `${dest}/_worker.js`; + let assetsBinding = 'ASSETS'; + + const files = fileURLToPath(new URL('./files', import.meta.url).href); + const tmp = builder.getBuildDirectory('azion-tmp'); + + builder.rimraf(dest); + builder.rimraf(workerDest); + builder.rimraf(assetsDest); + + builder.mkdirp(dest); + builder.mkdirp(assetsDest); + builder.mkdirp(tmp); + + // client assets and prerendered pages + builder.writeClient(assetsDest); + builder.writePrerendered(assetsDest); + + // worker + const workerDestDir = path.dirname(workerDest); + writeFileSync( + `${tmp}/manifest.js`, + `export const manifest = ${builder.generateManifest({ relativePath: path.posix.relative(tmp, builder.getServerDirectory()) })};\n\n` + + `export const prerendered = new Set(${JSON.stringify(builder.prerendered.paths)});\n\n` + + `export const base_path = ${JSON.stringify(builder.config.kit.paths.base)};\n`, + ); + + // make buildId unique, with builder version and id 10 characters + let buildId = generateId(10); + try { + const { name } = builder.config.kit.version; + buildId = `${name}-${generateId(10)}`; + } catch { + console.log('Error parsing builder version, using random buildId'); + } + + builder.copy(`${files}/worker.js`, workerDest, { + replace: { + // the paths returned by the Wrangler config might be Windows paths, + // so we need to convert them to POSIX paths or else the backslashes + // will be interpreted as escape characters and create an incorrect import path + SERVER: `${posixify(path.relative(workerDestDir, builder.getServerDirectory()))}/index.js`, + MANIFEST: `${posixify(path.relative(workerDestDir, tmp))}/manifest.js`, + ASSETS: assetsBinding, + BUILD_ID: buildId, + }, + }); + if (builder.hasServerInstrumentationFile?.()) { + builder.instrument?.({ + entrypoint: workerDest, + instrumentation: `${builder.getServerDirectory()}/instrumentation.server.js`, + }); + } + + // _headers + if (existsSync('_headers')) { + copyFileSync('_headers', `${assetsDest}/_headers`); + } + writeFileSync(`${assetsDest}/_headers`, generate_headers(builder.getAppPath()), { flag: 'a' }); + + // _redirects + if (existsSync('_redirects')) { + copyFileSync('_redirects', `${assetsDest}/_redirects`); + } + if (builder.prerendered.redirects.size > 0) { + writeFileSync(`${assetsDest}/_redirects`, generate_redirects(builder.prerendered.redirects), { + flag: 'a', + }); + } + }, + emulate() { + // we want to invoke `getPlatformProxy` only once, but await it only when it is accessed. + // If we would await it here, it would hang indefinitely because the platform proxy only resolves once a request happens + const get_emulated = async () => { + const platform = /** @type {App.Platform} */ ({}); + /** @type {Record} */ + const env = {}; + const prerender_platform = /** @type {App.Platform} */ (/** @type {unknown} */ ({ env })); + return { platform, prerender_platform }; + }; + + /** @type {{ platform: App.Platform, prerender_platform: App.Platform }} */ + let emulated; + + return { + platform: async ({ prerender }) => { + emulated ??= await get_emulated(); + return prerender ? emulated.prerender_platform : emulated.platform; + }, + }; + }, + supports: { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + read: ({ route }) => { + // TODO bump peer dep in next adapter major to simplify this + + return true; + }, + instrumentation: () => true, + }, + }; +} + +/** @param {string} app_dir */ +function generate_headers(app_dir) { + return ` +# === START AUTOGENERATED SVELTE IMMUTABLE HEADERS === +/${app_dir}/* + X-Robots-Tag: noindex + Cache-Control: no-cache +/${app_dir}/immutable/* + ! Cache-Control + Cache-Control: public, immutable, max-age=31536000 +# === END AUTOGENERATED SVELTE IMMUTABLE HEADERS === +`.trimEnd(); +} + +/** @param {Map} redirects */ +function generate_redirects(redirects) { + const rules = Array.from( + redirects.entries(), + ([path, redirect]) => `${path} ${redirect.location} ${redirect.status}`, + ).join('\n'); + + return ` +# === START AUTOGENERATED SVELTE PRERENDERED REDIRECTS === +${rules} +# === END AUTOGENERATED SVELTE PRERENDERED REDIRECTS === +`.trimEnd(); +} + +/** @param {string} str */ +function posixify(str) { + return str.replace(/\\/g, '/'); +} + +function generateId(length = 10) { + const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; + let result = ''; + for (let i = 0; i < length; i++) { + result += chars.charAt(Math.floor(Math.random() * chars.length)); + } + return result; +} diff --git a/packages/presets/src/presets/svelte/prebuild.ts b/packages/presets/src/presets/svelte/prebuild.ts index 5b6ea5c5..ec9d8570 100644 --- a/packages/presets/src/presets/svelte/prebuild.ts +++ b/packages/presets/src/presets/svelte/prebuild.ts @@ -1,20 +1,251 @@ -import { BuildConfiguration, BuildContext } from 'azion/config'; +import { AzionPrebuildResult, BuildConfiguration, BuildContext } from 'azion/config'; import { exec, getPackageManager } from 'azion/utils/node'; -import { mkdir } from 'fs/promises'; +import { existsSync } from 'fs'; +import { mkdir, readFile, rm } from 'fs/promises'; + +const SVELTE_CONFIG_FILES = ['svelte.config.js', 'svelte.config.ts', 'svelte.config.mjs', 'svelte.config.cjs']; +const AZION_CONFIG_FILES = ['azion.config.js', 'azion.config.ts', 'azion.config.mjs', 'azion.config.cjs']; +const PATHS = { + BUILD_DIR_DEFAULT: 'build', + AZION_BUILD_DIR: '.svelte-kit/azion', + WORKER_FILE: '.svelte-kit/azion/_worker.js', +} as const; + +async function readSvelteConfig() { + try { + let configPath = ''; + for (const file of SVELTE_CONFIG_FILES) { + if (existsSync(file)) { + configPath = file; + break; + } + } + + if (!configPath) { + throw new Error('Svelte config file not found'); + } + + const content = await readFile(configPath, 'utf8'); + const hasAzionPreset = /azion\/preset\/sveltekit/i.test(content); + const hasAzionAdapter = /@sveltejs\/adapter-azion/i.test(content); + const hasAzionConfig = hasAzionPreset || hasAzionAdapter; + + // check if adapater-static and assets,pages config exists + const hasAdapterStatic = /(adapter-static)/i.test(content); + + // Capture assets and pages directory values + const assetsMatch = /assets:\s*['"`]([^'"`]+)['"`]/i.exec(content); + const pagesMatch = /pages:\s*['"`]([^'"`]+)['"`]/i.exec(content); + + const assetsDir = assetsMatch ? assetsMatch[1] : null; + const pagesDir = pagesMatch ? pagesMatch[1] : null; + const hasChangedAssetsDir = assetsDir !== null; + const hasChangedPagesDir = pagesDir !== null; + + if (hasAdapterStatic) { + return { + adapter: { + azion: hasAzionConfig, + static: true, + changedAssetsDir: hasChangedAssetsDir, + changedPagesDir: hasChangedPagesDir, + assetsDir, + pagesDir, + }, + }; + } + + return hasAzionConfig ? { adapter: { azion: true } } : null; + } catch (err) { + console.log('Svelte config loading error:', err instanceof Error ? err.message : String(err)); + console.log('Falling back to default config'); + return null; + } +} + +async function readAzionConfig() { + try { + let configPath = ''; + for (const file of AZION_CONFIG_FILES) { + if (existsSync(file)) { + configPath = file; + break; + } + } + + if (!configPath) { + throw new Error('Azion config file not found'); + } + + const content = await readFile(configPath, 'utf8'); + + // Capture dir value from storage array + const storageMatch = /storage:\s*\[\s*\{[^}]*dir:\s*['"`]([^'"`]+)['"`]/i.exec(content); + const storageDir = storageMatch ? storageMatch[1] : null; + + return { + storageDir, + }; + } catch (err) { + console.log('Azion config loading error:', err instanceof Error ? err.message : String(err)); + console.log('Falling back to default config'); + return null; + } +} + +function createAzionPrebuildResult(): AzionPrebuildResult { + return { + injection: { + globals: { + __AZ_SVELTE_KIT__: JSON.stringify(true), + }, + }, + filesToInject: [], + bundler: { + defineVars: {}, + plugins: [], + }, + }; +} + +function createConfigMismatchError(azionDir: string, svelteDir: string, suggestion?: string): Error { + const message = [ + 'Configuration mismatch detected!', + `Azion storage directory: "${azionDir}"`, + `Svelte assets directory: "${svelteDir}"`, + suggestion || 'Please update your azion.config to match the Svelte adapter configuration.', + ].join('\n'); + + return new Error(message); +} + +function configureSvelteKitPreset(buildConfig: BuildConfiguration) { + delete buildConfig.preset.handler; + buildConfig.preset.config.build!.entry = PATHS.WORKER_FILE; +} /** * Runs custom prebuild actions for Svelte */ -async function prebuild(_: BuildConfiguration, ctx: BuildContext): Promise { +async function prebuild(buildConfig: BuildConfiguration, ctx: BuildContext): Promise { const packageManager = await getPackageManager(); - const outDir = 'build'; + const [svelteConfig, azionConfig] = await Promise.all([readSvelteConfig(), readAzionConfig()]); + + // Extract adapter configuration + const adapter = { + hasAzion: svelteConfig?.adapter?.azion, + hasStatic: svelteConfig?.adapter?.static, + assetsDir: svelteConfig?.adapter?.assetsDir, + pagesDir: svelteConfig?.adapter?.pagesDir, + hasChangedAssetsDir: svelteConfig?.adapter?.changedAssetsDir, + hasChangedPagesDir: svelteConfig?.adapter?.changedPagesDir, + }; + + // Extract storage configuration + const storage = { + dir: azionConfig?.storageDir, + }; + + // Adapter static + if (adapter.hasStatic) { + const buildDir = adapter.assetsDir || PATHS.BUILD_DIR_DEFAULT; + await mkdir(buildDir, { recursive: true }); + const normalizeDir = (dir: string) => dir.replace(/^\.\//, '').replace(/\/$/, ''); - // if skipFrameworkBuild is true, we need to create the dist folder + // Validate configuration consistency + const validateDirectoryConsistency = () => { + // Check if assets and pages directories match (required for adapter-static) + if (adapter.hasChangedAssetsDir || adapter.hasChangedPagesDir) { + if (adapter.assetsDir !== adapter.pagesDir) { + throw new Error( + 'Assets and pages directories must be the same when using @sveltejs/adapter-static.\n' + + `Current configuration: assets="${adapter.assetsDir}", pages="${adapter.pagesDir}"`, + ); + } + } + + const svelteDir = adapter.assetsDir || PATHS.BUILD_DIR_DEFAULT; + const normalizedSvelteDir = normalizeDir(svelteDir); + + // Case 1: No Azion config exists, but Svelte uses custom directory + if (!storage.dir && adapter.assetsDir) { + if (normalizedSvelteDir !== PATHS.BUILD_DIR_DEFAULT) { + throw createConfigMismatchError( + PATHS.BUILD_DIR_DEFAULT, + normalizedSvelteDir, + 'Change the storage config dir option on azion.config or assetsDir option on svelte.config.', + ); + } + } + + // Case 2: Azion config exists with custom directory + if (storage.dir) { + const normalizedStorageDir = normalizeDir(storage.dir); + if (normalizedStorageDir !== normalizedSvelteDir) { + // Clean up incorrect storage directory + try { + rm(storage.dir, { recursive: true, force: true }).catch((error) => + console.warn(`Failed to clean up directory ${storage.dir}:`, error), + ); + } catch (error) { + console.warn(`Failed to clean up directory ${storage.dir}:`, error); + } + + const suggestion = + adapter.hasChangedAssetsDir || adapter.hasChangedPagesDir + ? 'Please update your azion.config to match the Svelte adapter configuration.' + : 'Please update your azion.config to match the Svelte adapter configuration or assetsDir option on svelte.config.'; + + throw createConfigMismatchError(normalizedStorageDir, normalizedSvelteDir, suggestion); + } + } + }; + + validateDirectoryConsistency(); + } + + // If skipFrameworkBuild is true, we need to create the dist folder if (ctx.skipFrameworkBuild) { - await mkdir(outDir, { recursive: true }); + await mkdir(adapter.assetsDir || PATHS.BUILD_DIR_DEFAULT, { recursive: true }); + if (adapter.hasAzion) { + try { + // Check if worker file exists from previous build + if (existsSync(PATHS.WORKER_FILE)) { + configureSvelteKitPreset(buildConfig); + return createAzionPrebuildResult(); + } else { + console.log('Worker file not found! Please run command build without --skip-framework-build'); + process.exit(1); + } + } catch { + console.log('SvelteKit build artifacts not found! Please run command build without --skip-framework-build'); + process.exit(1); + } + } return; } + // Azion adapter + if (adapter.hasAzion) { + await exec(`${packageManager} run build`, { + scope: 'SvelteKit', + verbose: true, + }); + try { + // Check if worker file was generated + if (existsSync(PATHS.WORKER_FILE)) { + configureSvelteKitPreset(buildConfig); + return createAzionPrebuildResult(); + } else { + console.log('Worker file not generated! Please check your SvelteKit adapter configuration'); + process.exit(1); + } + } catch { + console.log('SvelteKit build failed! Please run command build!'); + process.exit(1); + } + } + // Build static files (fallback for non-adapter builds) await exec(`${packageManager} run build`, { scope: 'Svelte', verbose: true, diff --git a/packages/storage/src/index.ts b/packages/storage/src/index.ts index 09e81fc1..b018c9a5 100644 --- a/packages/storage/src/index.ts +++ b/packages/storage/src/index.ts @@ -23,6 +23,7 @@ import { AzionObjectCollectionParams, AzionStorageClient, AzionStorageResponse, + ContentObjectStorage, CreateAzionStorageClient, EdgeAccessType, } from './types'; @@ -139,7 +140,7 @@ export const createBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => createObjectMethod(token, name, key, content, params, resolvedOptions), @@ -149,7 +150,7 @@ export const createBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => updateObjectMethod(token, name, key, content, params, resolvedOptions), @@ -223,7 +224,7 @@ export const getBucketsMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => createObjectMethod(token, bucket.name, key, content, params, resolvedOptions), @@ -233,7 +234,7 @@ export const getBucketsMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => updateObjectMethod(token, bucket.name, key, content, params, resolvedOptions), @@ -297,7 +298,7 @@ const getBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => createObjectMethod(token, name, key, content, params, resolvedOptions), @@ -307,7 +308,7 @@ const getBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => updateObjectMethod(token, name, key, content, params, resolvedOptions), @@ -369,7 +370,7 @@ export const updateBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => createObjectMethod(token, name, key, content, params, resolvedOptions), @@ -379,7 +380,7 @@ export const updateBucketMethod = async ( params, }: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }): Promise> => updateObjectMethod(token, name, key, content, params, resolvedOptions), @@ -547,7 +548,7 @@ const getObjectByKeyMethod = createInternalOrExternalMethod( * @param {string} token - Authentication token for Azion API. * @param {string} bucket - Name of the bucket to create the object in. * @param {string} key - Key (name) of the object to create. - * @param {string} content - Content of the content to upload. + * @param {ContentObjectStorage} content - Content of the content to upload. * @param {AzionClientOptions} [options] - Client options including debug mode. * @returns {Promise>} The created object or error message */ @@ -556,7 +557,7 @@ const createObjectMethod = createInternalOrExternalMethod( token: string, bucket: string, key: string, - content: string, + content: ContentObjectStorage, params?: { content_type?: string }, options?: AzionClientOptions, ): Promise> => { @@ -579,7 +580,7 @@ const createObjectMethod = createInternalOrExternalMethod( token: string, bucket: string, key: string, - content: string, + content: ContentObjectStorage, params?: { content_type?: string }, options?: AzionClientOptions, ): Promise> => { @@ -596,7 +597,6 @@ const createObjectMethod = createInternalOrExternalMethod( return { data: { key: apiResponse.data.object_key, - content, content_type: params?.content_type, state: apiResponse.state, }, @@ -623,7 +623,7 @@ const updateObjectMethod = createInternalOrExternalMethod( token: string, bucket: string, key: string, - content: string, + content: ContentObjectStorage, params?: { content_type?: string }, options?: AzionClientOptions, ): Promise> => { @@ -643,7 +643,7 @@ const updateObjectMethod = createInternalOrExternalMethod( token: string, bucket: string, key: string, - content: string, + content: ContentObjectStorage, params?: { content_type?: string }, options?: AzionClientOptions, ): Promise> => { @@ -955,7 +955,7 @@ const createObjectWrapper = ({ }: { bucket: string; key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; options?: AzionClientOptions; }): Promise> => @@ -1025,7 +1025,7 @@ const updateObjectWrapper = ({ }: { bucket: string; key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; options?: AzionClientOptions; }): Promise> => diff --git a/packages/storage/src/services/api/index.ts b/packages/storage/src/services/api/index.ts index 2237b795..9a976ef2 100644 --- a/packages/storage/src/services/api/index.ts +++ b/packages/storage/src/services/api/index.ts @@ -14,7 +14,7 @@ import { ApiListObjectsResponse, } from './types'; -import { AzionEnvironment, EdgeAccessType } from '../../types'; +import { AzionEnvironment, ContentObjectStorage, EdgeAccessType } from '../../types'; /** * Gets base URL based on environment @@ -368,7 +368,7 @@ const getObjects = async ( * @param {string} token - Authentication token for Azion API. * @param {string} bucketName - Name of the bucket. * @param {string} key - Key of the object to create. - * @param {string} file - Content of the object. + * @param {ContentObjectStorage} file - Content of the object. * @param {string} [contentType='application/octet-stream'] - Content type of the object. * @param {boolean} [debug] - Enable debug mode for detailed logging. * @param {AzionEnvironment} [env='production'] - Environment to use for the API call. @@ -378,13 +378,15 @@ const postObject = async ( token: string, bucketName: string, key: string, - file: string, + file: ContentObjectStorage, contentType: string = 'application/octet-stream', debug?: boolean, env: AzionEnvironment = 'production', ): Promise => { try { const baseUrl = getBaseUrl(env); + // convert file to Uint8Array if file is string, otherwise use as-is (ArrayBuffer or ReadableStream) + const fileContent = typeof file === 'string' ? new TextEncoder().encode(file) : file; const data = await fetchWithErrorHandling( `${baseUrl}/buckets/${bucketName}/objects/${key}`, { @@ -394,7 +396,7 @@ const postObject = async ( 'Content-Type': contentType, Authorization: `Token ${token}`, }, - body: file, + body: fileContent as BodyInit, }, debug, ); @@ -430,7 +432,7 @@ const getObjectByKey = async ( key: string, debug?: boolean, env: AzionEnvironment = 'production', -): Promise<{ data?: string; error?: ApiError }> => { +): Promise<{ data?: ContentObjectStorage; error?: ApiError }> => { try { const baseUrl = getBaseUrl(env); const headers = buildHeaders(token); @@ -469,13 +471,15 @@ const putObject = async ( token: string, bucketName: string, key: string, - file: string, + file: ContentObjectStorage, contentType: string = 'application/octet-stream', debug?: boolean, env: AzionEnvironment = 'production', ): Promise => { try { const baseUrl = getBaseUrl(env); + // convert file to Uint8Array if file is string, otherwise use as-is (ArrayBuffer or ReadableStream) + const fileContent = typeof file === 'string' ? new TextEncoder().encode(file) : file; const data = await fetchWithErrorHandling( `${baseUrl}/buckets/${bucketName}/objects/${key}`, { @@ -485,7 +489,7 @@ const putObject = async ( 'Content-Type': contentType, Authorization: `Token ${token}`, }, - body: file, + body: fileContent as BodyInit, }, debug, ); diff --git a/packages/storage/src/services/runtime/index.ts b/packages/storage/src/services/runtime/index.ts index 5d73b668..4cb7a267 100644 --- a/packages/storage/src/services/runtime/index.ts +++ b/packages/storage/src/services/runtime/index.ts @@ -6,12 +6,13 @@ import { AzionDeletedBucketObject, AzionObjectCollectionParams, AzionStorageResponse, + ContentObjectStorage, } from '../../types'; import { removeLeadingSlash, retryWithBackoff } from '../../utils/index'; export const isInternalStorageAvailable = (): boolean => { // eslint-disable-next-line @typescript-eslint/no-explicit-any - return (globalThis as any).Azion?.Sql || null; + return (globalThis as any).Azion?.Storage || null; }; /** @@ -38,6 +39,7 @@ export class InternalStorageClient implements AzionBucket { if (!this.storage) { // eslint-disable-next-line @typescript-eslint/no-explicit-any this.storage = new (globalThis as any).Azion.Storage(bucketName); + this.name = bucketName; } } @@ -129,14 +131,12 @@ export class InternalStorageClient implements AzionBucket { try { const storageObject = await retryWithBackoff(() => this.storage!.get(key)); const arrayBuffer = await storageObject.arrayBuffer(); - const decoder = new TextDecoder(); - const content = decoder.decode(arrayBuffer); return { data: { state: 'executed-runtime', key: removeLeadingSlash(key), size: storageObject.contentLength, - content: content, + content: arrayBuffer, content_type: storageObject.metadata?.get('content-type'), }, }; @@ -158,7 +158,7 @@ export class InternalStorageClient implements AzionBucket { * * @param {Object} params - Parameters for creating an object. * @param {string} params.key - The key of the object to create. - * @param {string} params.content - The content of the object. + * @param {ContentObjectStorage} params.content - The content of the object. * @param {{ content_type?: string }} [params.options] - Optional metadata for the object. * @returns {Promise>} The created object or error message. */ @@ -168,24 +168,29 @@ export class InternalStorageClient implements AzionBucket { options, }: { key: string; - content: string; + content: ContentObjectStorage; options?: { content_type?: string }; }): Promise> { this.initializeStorage(this.name); try { - const contentBuffer = new TextEncoder().encode(content); - await retryWithBackoff(() => - this.storage!.put(key, contentBuffer.buffer, { - 'content-type': options?.content_type, - }), + const fileContent = typeof content === 'string' ? new TextEncoder().encode(content as string) : content; + await retryWithBackoff( + () => + this.storage!.put(key, fileContent, { + 'content-type': options?.content_type, + }), + 1000, + this.debug, ); + + const size = content instanceof ArrayBuffer ? content.byteLength : content.toString().length; + return { data: { state: 'executed-runtime', key: removeLeadingSlash(key), - size: contentBuffer.byteLength, + size, content_type: options?.content_type, - content: content, }, }; } catch (error) { @@ -206,7 +211,7 @@ export class InternalStorageClient implements AzionBucket { * * @param {Object} params - Parameters for updating an object. * @param {string} params.key - The key of the object to update. - * @param {string} params.content - The new content of the object. + * @param {ContentObjectStorage} params.content - The new content of the object. * @param {{ content_type?: string }} [params.options] - Optional metadata for the object. * @returns {Promise>} The updated object or error message. */ @@ -216,7 +221,7 @@ export class InternalStorageClient implements AzionBucket { options, }: { key: string; - content: string; + content: ContentObjectStorage; options?: { content_type?: string }; }): Promise> { return this.createObject({ key, content, options }); diff --git a/packages/storage/src/types.ts b/packages/storage/src/types.ts index 25ea3972..047a81df 100644 --- a/packages/storage/src/types.ts +++ b/packages/storage/src/types.ts @@ -16,6 +16,8 @@ export type AzionStorageResponse = { }; }; +export type ContentObjectStorage = ArrayBuffer | ReadableStream | Uint8Array | string; + /** * Represents an Azion storage bucket with methods to interact with objects. * @@ -62,7 +64,7 @@ export interface AzionBucket { * * @param {Object} params - Parameters for creating the object. * @param {string} params.key - The key for the new object. - * @param {string} params.content - The content of the new object. + * @param {ContentObjectStorage} params.content - The content of the new object. * @param {Object} [params.params] - Additional parameters for the object. * @param {string} [params.params.content_type] - The content type of the object. * @returns {Promise>} A promise that resolves to the created bucket object or error message. @@ -76,7 +78,7 @@ export interface AzionBucket { */ createObject: (params: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }) => Promise>; @@ -85,7 +87,7 @@ export interface AzionBucket { * * @param {Object} params - Parameters for updating the object. * @param {string} params.key - The key of the object to update. - * @param {string} params.content - The new content for the object. + * @param {ContentObjectStorage} params.content - The new content for the object. * @param {Object} [params.params] - Additional parameters for the object. * @param {string} [params.params.content_type] - The new content type for the object. * @returns {Promise>} A promise that resolves to the updated bucket object or error message. @@ -99,7 +101,7 @@ export interface AzionBucket { */ updateObject: (params: { key: string; - content: string; + content: ContentObjectStorage; params?: { content_type?: string }; }) => Promise>; @@ -122,7 +124,7 @@ export interface AzionBucketObject { size?: number; last_modified?: string; content_type?: string; - content?: string; + content?: ContentObjectStorage; } export interface AzionBucketObjects { diff --git a/packages/storage/src/utils/index.ts b/packages/storage/src/utils/index.ts index cbc1396a..1e731572 100644 --- a/packages/storage/src/utils/index.ts +++ b/packages/storage/src/utils/index.ts @@ -27,10 +27,11 @@ export function removeLeadingSlash(key: string): string { * @template T The type of the function's return value. * @param {() => Promise} fn The function to retry. * @param {number} [delay=1000] The initial delay between attempts in milliseconds. + * @param {boolean} [debug=false] Whether to enable debug logging. * @returns {Promise} The result of the function if it succeeds within the allowed attempts. * @throws {Error} If the function fails after the specified number of attempts. */ -export const retryWithBackoff = async (fn: () => Promise, delay: number = 1000): Promise => { +export const retryWithBackoff = async (fn: () => Promise, delay: number = 1000, debug?: boolean): Promise => { let attempt = 0; const maxTime = 120000; // 2 minutes in milliseconds const startTime = Date.now(); @@ -38,7 +39,8 @@ export const retryWithBackoff = async (fn: () => Promise, delay: number = while (Date.now() - startTime < maxTime) { try { return await fn(); - } catch { + } catch (error) { + if (debug) console.error('Error in retryWithBackoff:', error); if (attempt === 0) { console.warn('Attempting to synchronize. The content may not be synchronized on the edge yet.'); } diff --git a/packages/storage/tests/object.test.ts b/packages/storage/tests/object.test.ts index 519c5f4d..58438d34 100644 --- a/packages/storage/tests/object.test.ts +++ b/packages/storage/tests/object.test.ts @@ -30,7 +30,7 @@ describe('Storage Module - Object operations', () => { options: { debug, env }, }); - expect(result.data).toEqual({ key: 'test-object', content: 'file-content', state: 'success' }); + expect(result.data).toEqual({ key: 'test-object', state: 'success' }); expect(services.postObject).toHaveBeenCalledWith( mockToken, 'test-bucket', diff --git a/packages/types/src/index.ts b/packages/types/src/index.ts index 593e8db5..21fac2ac 100644 --- a/packages/types/src/index.ts +++ b/packages/types/src/index.ts @@ -2,11 +2,12 @@ export namespace Azion { // eslint-disable-next-line @typescript-eslint/no-namespace export declare namespace Storage { + export type ContentObjectStorage = ArrayBuffer | ReadableStream | string | Uint8Array; export interface StorageInstance { list(): Promise<{ entries: { key: string; content_length?: number }[] }>; put( key: string, - value: ArrayBuffer, + value: ContentObjectStorage, options?: { 'content-length'?: string; 'content-type'?: string }, ): Promise; delete(key: string): Promise; diff --git a/packages/unenv-preset/src/index.ts b/packages/unenv-preset/src/index.ts index f2ddcd53..ead81322 100644 --- a/packages/unenv-preset/src/index.ts +++ b/packages/unenv-preset/src/index.ts @@ -16,6 +16,8 @@ export default { setInterval: `${polyfillsPath}/node/globals/set-interval.js`, clearInterval: `${polyfillsPath}/node/globals/clear-interval.js`, console: `${polyfillsPath}/node/globals/console.js`, + asyncStorage: `${polyfillsPath}/node/globals/async-storage.js`, + dateToString: `${polyfillsPath}/node/globals/date-to-string.js`, }, alias: { 'azion/utils': 'azion/utils', @@ -24,11 +26,13 @@ export default { '@fastly/http-compute-js': '@fastly/http-compute-js', accepts: 'accepts', assert: 'assert-browserify', + buffer: `${polyfillsPath}/node/buffer.js`, https: `${polyfillsPath}/node/https.js`, module: `${polyfillsPath}/node/module.js`, string_decoder: 'string_decoder/lib/string_decoder.js', timers: 'timers-browserify/', util: `${polyfillsPath}/node/util.js`, + zlib: `${polyfillsPath}/node/zlib.js`, }, external: ['node:async_hooks', 'node:fs/promises', 'node:stream', 'node:crypto'], polyfill: [ diff --git a/packages/unenv-preset/src/polyfills/node/buffer.js b/packages/unenv-preset/src/polyfills/node/buffer.js new file mode 100644 index 00000000..9734f766 --- /dev/null +++ b/packages/unenv-preset/src/polyfills/node/buffer.js @@ -0,0 +1,109 @@ +import * as base64 from 'base64-js'; +import * as originalBuffer from 'unenv/node/buffer'; + +if (!originalBuffer.Buffer.prototype.latin1Slice) { + originalBuffer.Buffer.prototype.latin1Slice = function (start, end) { + return originalBuffer.Buffer.from(this).toString('latin1', start, end); + }; +} +if (!originalBuffer.Buffer.prototype.utf8Slice) { + originalBuffer.Buffer.prototype.utf8Slice = function (start, end) { + return originalBuffer.Buffer.from(this).toString('utf8', start, end); + }; +} + +// Store original methods +const originalToString = originalBuffer.Buffer.prototype.toString; +const originalWrite = originalBuffer.Buffer.prototype.write; +const originalIsEncoding = originalBuffer.Buffer.prototype.isEncoding; + +// Helper functions for base64url conversion +const INVALID_BASE64_RE = /[^+/0-9A-Za-z-_]/g; +const BASE64_CHAR_62 = '+'; +const BASE64_CHAR_63 = '/'; +const BASE64URL_CHAR_62 = '-'; +const BASE64URL_CHAR_63 = '_'; + +function base64urlFromBase64(str) { + return str.replaceAll(BASE64_CHAR_62, BASE64URL_CHAR_62).replaceAll(BASE64_CHAR_63, BASE64URL_CHAR_63); +} + +function base64urlToBase64(str) { + return str.replaceAll(BASE64URL_CHAR_62, BASE64_CHAR_62).replaceAll(BASE64URL_CHAR_63, BASE64_CHAR_63); +} + +// Override toString to support base64url encoding +originalBuffer.Buffer.prototype.toString = function toString(encoding, start, end) { + if (encoding === 'base64url') { + return base64Slice(this, start, end, encoding); + } + if (encoding === 'base64') { + return base64Slice(this, start, end, encoding); + } + return originalToString.call(this, encoding, start, end); +}; + +function blitBuffer(src, dst, offset, length) { + let i; + for (i = 0; i < length; ++i) { + if (i + offset >= dst.length || i >= src.length) break; + dst[i + offset] = src[i]; + } + return i; +} + +function base64clean(str) { + // Node takes equal signs as end of the Base64 encoding + str = str.split('=')[0]; + // Node strips out invalid characters like \n and \t from the string, base64-js does not + str = str.trim().replace(INVALID_BASE64_RE, ''); + // Node converts strings with length < 2 to '' + if (str.length < 2) return ''; + // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not + while (str.length % 4 !== 0) { + str = str + '='; + } + return str; +} + +function base64ToBytes(str) { + return base64.toByteArray(base64clean(str)); +} + +function base64Write(buf, string, offset, length, encoding) { + const b64 = encoding === 'base64url' ? base64urlToBase64(string) : string; + return blitBuffer(base64ToBytes(b64), buf, offset, length); +} + +function base64Slice(buf, start, end, encoding) { + let b64; + if (start === 0 && end === buf.length) { + b64 = base64.fromByteArray(buf); + } else { + b64 = base64.fromByteArray(buf.slice(start, end)); + } + return encoding === 'base64url' ? base64urlFromBase64(b64) : b64; +} + +originalBuffer.Buffer.prototype.write = function write(string, offset, length, encoding) { + if (encoding === 'base64') { + return base64Write(this, string, offset, length, encoding); + } + return originalWrite.call(this, string, offset, length, encoding); +}; + +originalBuffer.Buffer.prototype.isEncoding = function isEncoding(encoding) { + if (String(encoding).toLowerCase() === 'base64url') { + return true; + } + return originalIsEncoding.call(this, encoding); +}; + +export * from 'unenv/node/buffer'; + +export const Buffer = originalBuffer.Buffer; + +export default { + ...originalBuffer.default, + Buffer, +}; diff --git a/packages/unenv-preset/src/polyfills/node/globals/async-storage.js b/packages/unenv-preset/src/polyfills/node/globals/async-storage.js new file mode 100644 index 00000000..cbeae840 --- /dev/null +++ b/packages/unenv-preset/src/polyfills/node/globals/async-storage.js @@ -0,0 +1,22 @@ +import async_hooks from 'async_hooks'; +// Implement snapshot for AsyncLocalStorage +if (async_hooks.AsyncLocalStorage && !async_hooks.AsyncLocalStorage.prototype.snapshot) { + async_hooks.AsyncLocalStorage.prototype.snapshot = function () { + const store = this.getStore(); + return () => store; + }; +} +// Also add snapshot as a static method if needed +if (async_hooks.AsyncLocalStorage && !async_hooks.AsyncLocalStorage.snapshot) { + async_hooks.AsyncLocalStorage.snapshot = () => { + return (fn, ...args) => { + if (typeof fn === 'function') { + const result = fn(...args); + return result; + } + return fn; + }; + }; +} + +export default async_hooks.AsyncLocalStorage; diff --git a/packages/unenv-preset/src/polyfills/node/globals/date-to-string.js b/packages/unenv-preset/src/polyfills/node/globals/date-to-string.js new file mode 100644 index 00000000..20924db2 --- /dev/null +++ b/packages/unenv-preset/src/polyfills/node/globals/date-to-string.js @@ -0,0 +1,14 @@ +// TODO: fix temp, move to runtime +const originalToString = Object.prototype.toString; + +// Override Object.prototype.toString to properly identify Date objects +Object.prototype.toString = function () { + // Check if this is a Date instance + if (this instanceof Date || (this && typeof this.getTime === 'function' && typeof this.toISOString === 'function')) { + return '[object Date]'; + } + // Fall back to original toString for other objects + return originalToString.call(this); +}; + +export default Object.prototype.toString; diff --git a/packages/unenv-preset/src/polyfills/node/internal/_internal.js b/packages/unenv-preset/src/polyfills/node/internal/_internal.js deleted file mode 100644 index 58ed60ce..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/_internal.js +++ /dev/null @@ -1,10 +0,0 @@ -export function notImplemented(name) { - const fn = () => { - throw createNotImplementedError(name); - }; - return Object.assign(fn, { __unenv__: true }); -} - -export function createNotImplementedError(name) { - return new Error(`[unenv] ${name} is not implemented yet!`); -} diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/inherits.js b/packages/unenv-preset/src/polyfills/node/internal/util/inherits.js deleted file mode 100644 index d081a094..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/inherits.js +++ /dev/null @@ -1,14 +0,0 @@ -export function inherits(ctor, superCtor) { - if (!superCtor) { - return; - } - ctor.super_ = superCtor; - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true, - }, - }); -} diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/legacy-types.js b/packages/unenv-preset/src/polyfills/node/internal/util/legacy-types.js deleted file mode 100644 index c869a0d7..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/legacy-types.js +++ /dev/null @@ -1,47 +0,0 @@ -/* eslint-disable */ -export const isRegExp = (val) => val instanceof RegExp; - -export const isDate = (val) => val instanceof Date; - -export const isArray = (val) => Array.isArray(val); - -export const isBoolean = (val) => typeof val === 'boolean'; - -export const isNull = (val) => val === null; - -export const isNullOrUndefined = (val) => val === null || val === undefined; - -export const isNumber = (val) => typeof val === 'number'; - -export const isString = (val) => typeof val === 'string'; - -export const isSymbol = (val) => typeof val === 'symbol'; - -export const isUndefined = (val) => val === undefined; - -export const isFunction = (val) => typeof val === 'function'; - -export const isBuffer = (val) => { - return ( - val && - typeof val === 'object' && - typeof val.copy === 'function' && - typeof val.fill === 'function' && - typeof val.readUInt8 === 'function' - ); -}; - -export const isDeepStrictEqual = (a, b) => JSON.stringify(a) === JSON.stringify(b); - -export const isObject = (val) => - val !== null && typeof val === 'object' && Object.getPrototypeOf(val).isPrototypeOf(Object); - -export const isError = (val) => val instanceof Error; - -// Source https://github.com/jonschlinkert/is-primitive/blob/b22c524da5cbac075f14145780ec4b3637afd7dc/index.js -export const isPrimitive = (val) => { - if (typeof val === 'object') { - return val === null; - } - return typeof val !== 'function'; -}; diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/log.js b/packages/unenv-preset/src/polyfills/node/internal/util/log.js deleted file mode 100644 index 926cc546..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/log.js +++ /dev/null @@ -1,64 +0,0 @@ -/* eslint-disable */ -export const log = (...args) => { - console.log(...args); -}; - -export const debuglog = (section, _cb) => { - const fn = (msg, ...params) => { - if (fn.enabled) { - console.debug(`[${section}] ${msg}`, ...params); - } - }; - fn.enabled = true; - return fn; -}; - -export const debug = debuglog; - -export const inspect = (object) => JSON.stringify(object, null, 2); - -export const format = (...args) => _format(...args); - -export const formatWithOptions = (_options, ...args) => _format(...args); - -// Source: https://github.com/tmpfs/format-util/blob/0c989942c959b179eec294a4e725afd63e743f18/format.js -function _format(fmt, ...args) { - const re = /(%?)(%([djos]))/g; - if (args.length > 0) { - fmt = fmt.replace(re, (match, escaped, ptn, flag) => { - let arg = args.shift(); - switch (flag) { - case 'o': - if (Array.isArray(arg)) { - arg = JSON.stringify(arg); - break; - } - break; - case 's': - arg = '' + arg; - break; - case 'd': - arg = Number(arg); - break; - case 'j': - arg = JSON.stringify(arg); - break; - } - if (!escaped) { - return arg; - } - args.unshift(arg); - return match; - }); - } - - // arguments remain after formatting - if (args.length > 0) { - fmt += ' ' + args.join(' '); - } - - // update escaped %% values - fmt = fmt.replace(/%{2}/g, '%'); - - return '' + fmt; -} diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/mime.js b/packages/unenv-preset/src/polyfills/node/internal/util/mime.js deleted file mode 100644 index fe6c37d6..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/mime.js +++ /dev/null @@ -1,44 +0,0 @@ -// https://nodejs.org/api/util.html#class-utilmimetype - -export class MIMEType { - __unenv__ = true; - - params = new MIMEParams(); - type; - subtype; - - constructor(input) { - const [essence = '', ...params] = String(input).split(';'); - const [type = '', subtype = ''] = essence.split('/'); - this.type = type; - this.subtype = subtype; - this.params = new MIMEParams(); - for (const param of params) { - const [name, value] = param.split('='); - this.params.set(name, value); - } - } - - get essence() { - return this.type + '/' + this.subtype; - } - - toString() { - const paramsStr = this.params.toString(); - return this.essence + (paramsStr ? `;${paramsStr}` : ''); - } -} - -// https://nodejs.org/api/util.html#util_class_util_mimeparams - -export class MIMEParams extends Map { - __unenv__ = true; - - get(name) { - return super.get(name) || null; - } - - toString() { - return [...this.entries()].map(([name, value]) => `${name}=${value}`).join('&'); - } -} diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/promisify.js b/packages/unenv-preset/src/polyfills/node/internal/util/promisify.js deleted file mode 100644 index e23566b2..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/promisify.js +++ /dev/null @@ -1,24 +0,0 @@ -const customSymbol = Symbol.for('nodejs.util.promisify.custom'); - -function _promisify(fn) { - if (fn[customSymbol]) { - return fn[customSymbol]; - } - return function (...args) { - return new Promise((resolve, reject) => { - try { - fn.call(this, ...args, (err, val) => { - if (err) { - return reject(err); - } - resolve(val); - }); - } catch (error) { - console.error('Error in promisified function:', error.stack); - reject(error); - } - }); - }; -} - -export const promisify = Object.assign(_promisify, { custom: customSymbol }); diff --git a/packages/unenv-preset/src/polyfills/node/internal/util/types.js b/packages/unenv-preset/src/polyfills/node/internal/util/types.js deleted file mode 100644 index 09090c11..00000000 --- a/packages/unenv-preset/src/polyfills/node/internal/util/types.js +++ /dev/null @@ -1,131 +0,0 @@ -/* eslint-disable */ -import { notImplemented } from '../_internal.js'; - -export const isExternal = (_obj) => false; - -export const isDate = (val) => val instanceof Date; - -export const isArgumentsObject = /*@__PURE__*/ notImplemented('util.types.isArgumentsObject'); - -export const isBigIntObject = (val) => val instanceof BigInt; - -export const isBooleanObject = (val) => val instanceof Boolean; - -export const isNumberObject = (val) => val instanceof Number; - -export const isStringObject = (val) => val instanceof String; - -export const isSymbolObject = (val) => val instanceof Symbol; - -export const isNativeError = /*@__PURE__*/ notImplemented('util.types.isNativeError'); - -export const isRegExp = (val) => val instanceof RegExp; - -export const isAsyncFunction = /*@__PURE__*/ notImplemented('util.types.isAsyncFunction'); - -export const isGeneratorFunction = /*@__PURE__*/ notImplemented('util.types.isGeneratorFunction'); - -export const isGeneratorObject = /*@__PURE__*/ notImplemented('util.types.isGeneratorObject'); - -export const isPromise = (val) => val instanceof Promise; - -export const isMap = (val) => val instanceof Map; - -export const isSet = (val) => val instanceof Set; - -export const isMapIterator = /*@__PURE__*/ notImplemented('util.types.isMapIterator'); - -export const isSetIterator = /*@__PURE__*/ notImplemented('util.types.isSetIterator'); - -export const isWeakMap = (val) => val instanceof WeakMap; - -export const isWeakSet = (val) => val instanceof WeakSet; - -export const isArrayBuffer = (val) => val instanceof ArrayBuffer; - -export const isDataView = (val) => val instanceof DataView; - -export const isSharedArrayBuffer = (val) => val instanceof SharedArrayBuffer; - -export const isProxy = /*@__PURE__*/ notImplemented('util.types.isProxy'); - -export const isModuleNamespaceObject = /*@__PURE__*/ notImplemented('util.types.isModuleNamespaceObject'); - -export const isAnyArrayBuffer = /*@__PURE__*/ notImplemented('util.types.isAnyArrayBuffer'); - -export const isBoxedPrimitive = /*@__PURE__*/ notImplemented('util.types.isBoxedPrimitive'); - -export const isArrayBufferView = /*@__PURE__*/ notImplemented('util.types.isArrayBufferView'); - -export const isTypedArray = /*@__PURE__*/ notImplemented('util.types.isTypedArray'); - -export const isUint8Array = /*@__PURE__*/ notImplemented('util.types.isUint8Array'); - -export const isUint8ClampedArray = /*@__PURE__*/ notImplemented('util.types.isUint8ClampedArray'); - -export const isUint16Array = /*@__PURE__*/ notImplemented('util.types.isUint16Array'); - -export const isUint32Array = /*@__PURE__*/ notImplemented('util.types.isUint32Array'); - -export const isInt8Array = /*@__PURE__*/ notImplemented('util.types.isInt8Array'); - -export const isInt16Array = /*@__PURE__*/ notImplemented('util.types.isInt16Array'); - -export const isInt32Array = /*@__PURE__*/ notImplemented('util.types.isInt32Array'); - -export const isFloat32Array = /*@__PURE__*/ notImplemented('util.types.isFloat32Array'); - -export const isFloat64Array = /*@__PURE__*/ notImplemented('util.types.isFloat64Array'); - -export const isBigInt64Array = /*@__PURE__*/ notImplemented('util.types.isBigInt64Array'); - -export const isBigUint64Array = /*@__PURE__*/ notImplemented('util.types.isBigUint64Array'); - -export const isKeyObject = /*@__PURE__*/ notImplemented('util.types.isKeyObject'); - -// export const isCryptoKey = /*@__PURE__*/ notImplemented('util.types.isCryptoKey'); -export const isCryptoKey = (val) => typeof CryptoKey !== 'undefined' && val instanceof CryptoKey; - -export default { - isExternal, - isDate, - isArgumentsObject, - isBigIntObject, - isBooleanObject, - isNumberObject, - isStringObject, - isSymbolObject, - isNativeError, - isRegExp, - isAsyncFunction, - isGeneratorFunction, - isGeneratorObject, - isPromise, - isMap, - isSet, - isMapIterator, - isSetIterator, - isWeakMap, - isWeakSet, - isArrayBuffer, - isDataView, - isSharedArrayBuffer, - isProxy, - isModuleNamespaceObject, - isAnyArrayBuffer, - isBoxedPrimitive, - isArrayBufferView, - isTypedArray, - isUint8Array, - isUint8ClampedArray, - isUint16Array, - isUint32Array, - isInt8Array, - isInt16Array, - isInt32Array, - isFloat32Array, - isFloat64Array, - isBigInt64Array, - isBigUint64Array, - isKeyObject, // CryptoKey -}; diff --git a/packages/unenv-preset/src/polyfills/node/util.js b/packages/unenv-preset/src/polyfills/node/util.js index 00df1acb..cea939c9 100644 --- a/packages/unenv-preset/src/polyfills/node/util.js +++ b/packages/unenv-preset/src/polyfills/node/util.js @@ -1,130 +1,19 @@ -/* eslint-disable */ -// https://nodejs.org/api/util.html -import types from 'node:util/types'; -import { notImplemented } from './internal/_internal.js'; -import { inherits } from './internal/util/inherits.js'; -import { - isArray, - isBoolean, - isBuffer, - isDate, - isDeepStrictEqual, - isError, - isFunction, - isNull, - isNullOrUndefined, - isNumber, - isObject, - isPrimitive, - isRegExp, - isString, - isSymbol, - isUndefined, -} from './internal/util/legacy-types.js'; -import { debug, debuglog, format, formatWithOptions, inspect, log } from './internal/util/log.js'; -import { MIMEParams, MIMEType } from './internal/util/mime.js'; -import { promisify } from './internal/util/promisify.js'; +import * as originalUtil from 'unenv/node/util'; -export { MIMEParams, MIMEType } from './internal/util/mime.js'; +export * from 'unenv/node/util'; -export * from './internal/util/legacy-types.js'; +const isCryptoKey = (val) => typeof CryptoKey !== 'undefined' && val instanceof CryptoKey; -export * from './internal/util/log.js'; +// eslint-disable-next-line no-undef +const isKeyObject = (val) => typeof KeyObject !== 'undefined' && val instanceof KeyObject; -export { inherits } from './internal/util/inherits.js'; - -export { promisify }; - -export { default as types } from './internal/util/types.js'; - -export const TextDecoder = globalThis.TextDecoder; - -export const TextEncoder = globalThis.TextEncoder; - -export const deprecate = (fn) => fn; - -export const _errnoException = notImplemented('util._errnoException'); - -export const _exceptionWithHostPort = notImplemented('util._exceptionWithHostPort'); - -export const _extend = notImplemented('util._extend'); - -export const aborted = notImplemented('util.aborted'); - -export const callbackify = notImplemented('util.callbackify'); - -export const getSystemErrorMap = notImplemented('util.getSystemErrorMap'); - -export const getSystemErrorName = notImplemented('util.getSystemErrorName'); - -export const toUSVString = notImplemented('util.toUSVString'); - -export const stripVTControlCharacters = notImplemented('util.stripVTControlCharacters'); - -export const transferableAbortController = notImplemented('util.transferableAbortController'); - -export const transferableAbortSignal = notImplemented('util.transferableAbortSignal'); - -export const parseArgs = notImplemented('util.parseArgs'); - -export const parseEnv = notImplemented('util.parseEnv'); - -export const styleText = notImplemented('util.styleText'); - -/** @deprecated */ -export const getCallSite = notImplemented('util.getCallSite'); - -export const getCallSites = notImplemented('util.getCallSites'); - -export const getSystemErrorMessage = notImplemented('util.getSystemErrorMessage'); +export const types = { + ...originalUtil.types, + isCryptoKey, + isKeyObject, +}; export default { - // @ts-expect-error - _errnoException, - _exceptionWithHostPort, - _extend, - aborted, - callbackify, - deprecate, - getCallSite, - getCallSites, - getSystemErrorMessage, - getSystemErrorMap, - getSystemErrorName, - inherits, - promisify, - stripVTControlCharacters, - toUSVString, - TextDecoder, - TextEncoder, + ...originalUtil, types, - transferableAbortController, - transferableAbortSignal, - parseArgs, - parseEnv, - styleText, - MIMEParams, - MIMEType, - isArray, - isBoolean, - isBuffer, - isDate, - isDeepStrictEqual, - isError, - isFunction, - isNull, - isNullOrUndefined, - isNumber, - isObject, - isPrimitive, - isRegExp, - isString, - isSymbol, - isUndefined, - debug, - debuglog, - format, - formatWithOptions, - inspect, - log, }; diff --git a/packages/unenv-preset/src/polyfills/node/zlib.js b/packages/unenv-preset/src/polyfills/node/zlib.js new file mode 100644 index 00000000..ee4125d6 --- /dev/null +++ b/packages/unenv-preset/src/polyfills/node/zlib.js @@ -0,0 +1,13 @@ +import { inflateSync as inflateSyncBrowserify } from 'browserify-zlib'; +import * as originalZlib from 'unenv/node/zlib'; + +export * from 'unenv/node/zlib'; + +export function inflateSync(buffer, options = {}) { + return inflateSyncBrowserify(buffer, options); +} + +export default { + ...originalZlib.default, + inflateSync, +};