Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/jsonnet/GIT_VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
5d9c576c6fbbcd81178560862e084035c0e72ceb
ba69e229877c795d0b03d039219b44958f553f97
85 changes: 85 additions & 0 deletions .github/jsonnet/base.jsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,16 @@ local images = import 'images.jsonnet';
local misc = import 'misc.jsonnet';

{
/**
* Creates a complete GitHub Actions workflow pipeline with multiple jobs.
*
* @param {string} name - The name of the workflow (becomes the .yml filename)
* @param {array of jobs} jobs - Array of job objects (created with ghJob, ghExternalJob, etc.)
* @param {array} [event=['pull_request']] - GitHub events that trigger this workflow
* @param {object} [permissions=null] - Permissions for the workflow (e.g., {contents: 'read'})
* @param {object} [concurrency=null] - Concurrency settings to limit parallel runs
* @returns {workflows} - GitHub Actions YAML manifest
*/
pipeline(name, jobs, event=['pull_request'], permissions=null, concurrency=null):: {
[name + '.yml']:
std.manifestYamlDoc(
Expand All @@ -13,6 +23,25 @@ local misc = import 'misc.jsonnet';
),
},

/**
* Creates a GitHub Actions job that runs on a containerized runner.
*
* @param {string} name - The name of the job (used as the job key)
* @param {number} [timeoutMinutes=30] - Maximum time in minutes before job is cancelled. Max value is 55, after which the runner is killed.
* @param {string} [runsOn=null] - Runner type (defaults to 'arc-runner-2')
* @param {string} [image=images.default_job_image] - Docker image to run the job in
* @param {steps} [steps=[]] - Array of step objects (created with step() or action())
* @param {string} [ifClause=null] - Conditional expression to determine if job should run
* @param {array} [needs=null] - Array of job names this job depends on
* @param {object} [outputs=null] - Job outputs available to dependent jobs
* @param {boolean} [useCredentials=true] - Whether to use Docker registry credentials. Must be set to false for public images.
* @param {object} [services=null] - Service containers to run alongside the job
* @param {object} [permissions=null] - Job-level permissions (overrides workflow permissions)
* @param {object} [concurrency=null] - Job-level concurrency settings
* @param {boolean} [continueOnError=null] - Whether to continue workflow if job fails
* @param {object} [env=null] - Environment variables for all steps in the job
* @returns {jobs} - GitHub Actions job definition
*/
ghJob(
name,
timeoutMinutes=30,
Expand Down Expand Up @@ -55,6 +84,14 @@ local misc = import 'misc.jsonnet';
(if env == null then {} else { env: env }),
},

/**
* Creates a GitHub Actions job that uses a reusable workflow from another repository.
*
* @param {string} name - The name of the job (used as the job key)
* @param {string} uses - The reusable workflow reference (e.g., 'owner/repo/.github/workflows/workflow.yml@ref')
* @param {object} [with=null] - Input parameters to pass to the reusable workflow
* @returns {jobs} - GitHub Actions external job definition
*/
ghExternalJob(
name,
uses,
Expand All @@ -68,6 +105,38 @@ local misc = import 'misc.jsonnet';
} else {}),
},

/**
* Creates a GitHub Actions step that runs shell commands.
*
* @docs https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idsteps
*
* @param {string} name - Display name for the step in the GitHub UI
* @param {string} run - Shell command(s) to execute
* @param {object} [env=null] - Environment variables for this step
* @param {string} [workingDirectory=null] - Directory to run the command in
* @param {string} [ifClause=null] - Conditional expression to determine if step should run
* @param {string} [id=null] - Unique identifier for this step (used to reference outputs)
* @param {boolean} [continueOnError=null] - Whether to continue job if this step fails, defaults to false
* @param {string} [shell=null] - Shell to use for running commands (e.g., 'bash', 'python', 'powershell', defaults to 'bash')
* @returns {steps} - Array containing a single step object

* @example
* base.step(
* name='Run tests',
* run='pytest tests/',
* env={ 'ENV_VAR': 'value' },
* workingDirectory='backend',
* )
*
* base.step(
* name='Set up Python',
* run=|||
* python -m venv venv
* source venv/bin/activate
* pip install -r requirements.txt
* |||,
* )
*/
step(name, run, env=null, workingDirectory=null, ifClause=null, id=null, continueOnError=null, shell=null)::
[
{
Expand All @@ -81,6 +150,22 @@ local misc = import 'misc.jsonnet';
+ (if shell == null then {} else { 'shell': shell }),
],

/**
* Creates a GitHub Actions step that uses a predefined action from the marketplace or repository.
* Security: Prefer pinning action references to a full commit SHA (e.g., actions/checkout@<commit_sha>) instead of a mutable tag/version,
* especially for lesser-known or smaller third-party actions to reduce supply chain attack risk.
*
* @docs https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idsteps
*
* @param {string} name - Display name for the step in the GitHub UI
* @param {string} uses - The action to use (e.g., 'actions/checkout@v4', './path/to/action')
* @param {object} [env=null] - Environment variables for this step
* @param {object} [with=null] - Input parameters to pass to the action
* @param {string} [id=null] - Unique identifier for this step (used to reference outputs)
* @param {string} [ifClause=null] - Conditional expression to determine if step should run
* @param {boolean} [continueOnError=null] - Whether to continue job if this step fails
* @returns {steps} - Array containing a single step object
*/
action(name, uses, env=null, with=null, id=null, ifClause=null, continueOnError=null)::
[
{
Expand Down
56 changes: 26 additions & 30 deletions .github/jsonnet/buckets.jsonnet
Original file line number Diff line number Diff line change
@@ -1,33 +1,29 @@
{
// Uploads all files in the source folder to the destination bucket, including compression and TTL headers.
//
// Warnings:
// - remote/destination files not included in the source will be DELETED recursively if pruneRemote is true!
// - the files in the source directory will be modified. Do not attempt to use this directory after running this command.
// - must be run with bash shell.
//
// Parameters:
// sourcePath: The source directory to upload. Can be a local folder of a path in a bucket, depending on sourceBucket. Required.
// sourceBucket: The source bucket. If null, the sourcePath is a local directory. Defaults to null.
// destinationBucket: The destination bucket. Required.
// destinationPath: The destination directory in the bucket. Required.
//
// pruneRemote: If true, all files in the destination bucket that are not in the source will be deleted. Can only be used with destinationPath containing 'pr-'.
//
// compressFileExtentions: A list of file extentions that will be compressed. Set to an empty list to disable compression.
// compressJobs: The number of parallel gzip compression jobs. Use 4 for arc-runner-2 and 16 for arc-runner-16. Defaults to 4.
//
// lowTTLfiles: A list of files, or a single regex, that will be uploaded with a low TTL. Use this for files that are not fingerprinted.
//
// lowTTL: The TTL for lowTTLfiles. Defaults to 60 seconds.
// lowTTLStaleWhileRevalidate: The stale-while-revalidate value for lowTTLfiles. Defaults to 60 seconds.
// lowTTLHeader: The Cache-Control header for lowTTLfiles. This is generated from lowTTL and lowTTLStaleWhileRevalidate.
//
// highTTL: The TTL for all other files. Defaults to 1 week.
// highTTLStaleWhileRevalidate: The stale-while-revalidate value for all other files. Defaults to 1 day.
// highTTLHeader: The Cache-Control header for all other files. This is generated from highTTL and highTTLStaleWhileRevalidate.
//
// additionalHeaders: Additional headers to add to all uploaded files. This should be an array of strings.
/**
* Uploads all files in the source folder to the destination bucket, including compression and TTL headers.
*
* WARNINGS:
* - Remote/destination files not included in the source will be DELETED recursively if pruneRemote is true!
* - The files in the source directory will be modified. Do not attempt to use this directory after running this command.
* - Must be run with bash shell.
*
* @param {string} sourcePath - The source directory to upload. Can be a local folder or a path in a bucket, depending on sourceBucket. Required.
* @param {string} [sourceBucket=null] - The source bucket. If null, the sourcePath is a local directory.
* @param {string} destinationBucket - The destination bucket. Required.
* @param {string} destinationPath - The destination directory in the bucket. Required.
* @param {boolean} [pruneRemote=false] - If true, all files in the destination bucket that are not in the source will be deleted. Can only be used with destinationPath containing 'pr-'.
* @param {array} [compressFileExtentions=['css', 'svg', 'html', 'json', 'js', 'xml', 'txt', 'map']] - A list of file extensions that will be compressed. Set to an empty list to disable compression.
* @param {number} [compressJobs=4] - The number of parallel gzip compression jobs. Use 4 for arc-runner-2 and 16 for arc-runner-16.
* @param {array|string} [lowTTLfiles=[]] - A list of files, or a single regex, that will be uploaded with a low TTL. Use this for files that are not fingerprinted.
* @param {number} [lowTTL=60] - The TTL for lowTTLfiles in seconds.
* @param {number} [lowTTLStaleWhileRevalidate=60] - The stale-while-revalidate value for lowTTLfiles in seconds.
* @param {string} [lowTTLHeader] - The Cache-Control header for lowTTLfiles. This is generated from lowTTL and lowTTLStaleWhileRevalidate.
* @param {number} [highTTL=604800] - The TTL for all other files in seconds (defaults to 1 week).
* @param {number} [highTTLStaleWhileRevalidate=86400] - The stale-while-revalidate value for all other files in seconds (defaults to 1 day).
* @param {string} [highTTLHeader] - The Cache-Control header for all other files. This is generated from highTTL and highTTLStaleWhileRevalidate.
* @param {array} [additionalHeaders=[]] - Additional headers to add to all uploaded files. This should be an array of strings.
* @returns {string} - Complete bash command for uploading files to Google Cloud Storage with compression and caching
*/
uploadFilesToBucketCommand(
sourcePath,
sourceBucket=null,
Expand All @@ -53,7 +49,7 @@
local highTTLfilesRegex = '(?!' + lowTTLfilesRegex + ').*';

local hasCompressedFiles = (std.isArray(compressFileExtentions) && std.length(compressFileExtentions) > 0) || (std.isString(compressFileExtentions) && compressFileExtentions != '');
local compressedFilesRegex = '(' + std.join('|', std.map(function(ext) '((.*(\\.|/))?' + ext + ')', compressFileExtentions)) + ')';
local compressedFilesRegex = '(' + std.join('|', std.map(function(ext) '(.*\\.' + ext + ')', compressFileExtentions)) + ')';
local uncompressedFilesRegex = '(?!' + compressedFilesRegex + ').*';

local compressionHeader = 'Content-Encoding: gzip';
Expand Down
78 changes: 45 additions & 33 deletions .github/jsonnet/cache.jsonnet
Original file line number Diff line number Diff line change
@@ -1,21 +1,26 @@
local base = import 'base.jsonnet';

{
// Fetch a cache from the cache server.
// This is a generic function that can be used to fetch any cache. It is advised to wrap this function
// in a more specific function that fetches a specific cache, setting the cacheName and folders parameters.
//
// To be paired with the uploadCache function.
//
// Parameters:
// cacheName: The name of the cache to fetch. The name of the repository is usually a good option. Required.
// backupCacheName: The name of a backup cache to fetch if the main cache fails. Default is null.
// folders: A list of folders that are in the cache. These will be deleted if the download fails. Can be an empty list if additionalCleanupCommands are used.
// additionalCleanupCommands: A list of additional commands to run if the download fails. Default is an empty list.
// ifClause: An optional if clause to conditionally run this step. Default is null.
// workingDirectory: The working directory for this step. Default is null.
// retry: Whether to retry the download if it fails. Default is true.
// continueWithoutCache: Whether to continue if the cache is not found. Default is true.
/**
* Fetch a cache from the cache server.
*
* This is a generic function that can be used to fetch any cache. It is advised to wrap this function
* in a more specific function that fetches a specific cache, setting the cacheName and folders parameters.
*
* To be paired with the uploadCache function.
*
* @param {string} cacheName - The name of the cache to fetch. The name of the repository is usually a good option.
* @param {string} [backupCacheName=null] - The name of a backup cache to fetch if the main cache fails.
* @param {array} [folders=[]] - A list of folders that are in the cache. These will be deleted if the download fails. Can be an empty list if additionalCleanupCommands are used.
* @param {string} [version='v1'] - The version of the cache to fetch.
* @param {string} [backupCacheVersion=version] - The version of the backup cache to fetch.
* @param {array} [additionalCleanupCommands=[]] - A list of additional commands to run if the download fails.
* @param {string} [ifClause=null] - An optional if clause to conditionally run this step.
* @param {string} [workingDirectory=null] - The working directory for this step.
* @param {boolean} [retry=true] - Whether to retry the download if it fails.
* @param {boolean} [continueWithoutCache=true] - Whether to continue if the cache is not found.
* @returns {steps} - GitHub Actions step to download cache from Google Cloud Storage
*/
fetchCache(
cacheName,
backupCacheName=null,
Expand Down Expand Up @@ -75,17 +80,21 @@ local base = import 'base.jsonnet';
workingDirectory=workingDirectory,
),

// Uploads a cache to the cache server.
// This is a generic function that can be used to upload any cache. It is advised to wrap this function
// in a more specific function that uploads a specific cache, setting the cacheName and folders parameters.
//
// To be paired with the fetchCache function.
//
// Parameters:
// cacheName: The name of the cache to upload. The name of the repository is usually a good option. Required.
// folders: A list of folders to include in the cache. Required unless tarCommand is given.
// compressionLevel: The compression level to use for zstd. Default is 10.
// tarCommand: The command to run to create the tar file. Default is 'tar -c ' + std.join(' ', folders).
/**
* Uploads a cache to the cache server.
*
* This is a generic function that can be used to upload any cache. It is advised to wrap this function
* in a more specific function that uploads a specific cache, setting the cacheName and folders parameters.
*
* To be paired with the fetchCache function.
*
* @param {string} cacheName - The name of the cache to upload. The name of the repository is usually a good option.
* @param {array} [folders=null] - A list of folders to include in the cache. Required unless tarCommand is given.
* @param {string} [version='v1'] - The version of the cache to upload.
* @param {number} [compressionLevel=10] - The compression level to use for zstd.
* @param {string} [tarCommand='tar -c ' + std.join(' ', folders)] - The command to run to create the tar file.
* @returns {steps} - GitHub Actions step to upload cache to Google Cloud Storage with zstd compression
*/
uploadCache(
cacheName,
folders=null,
Expand All @@ -110,13 +119,16 @@ local base = import 'base.jsonnet';
'echo "Upload finished"\n'
),

// Removes a cache from the cache server.
// This is a generic function that can be used to remove any cache. It is advised to wrap this function
// in a more specific function that removes a specific cache, setting the cacheName parameter.
//
// Parameters:
// cacheName: The name of the cache to remove. The name of the repository is usually a good option. Required.
// version: The version of the cache to remove. Default is 'v1'.
/**
* Removes a cache from the cache server.
*
* This is a generic function that can be used to remove any cache. It is advised to wrap this function
* in a more specific function that removes a specific cache, setting the cacheName parameter.
*
* @param {string} cacheName - The name of the cache to remove. The name of the repository is usually a good option.
* @param {string} [version='v1'] - The version of the cache to remove.
* @returns {steps} - GitHub Actions step to remove cache from Google Cloud Storage
*/
removeCache(cacheName, version='v1')::
base.step(
'remove ' + cacheName + ' cache',
Expand Down
7 changes: 7 additions & 0 deletions .github/jsonnet/clusters.jsonnet
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
local misc = import 'misc.jsonnet';

/**
* Kubernetes Cluster Configuration
*
* This module defines configuration for different Kubernetes clusters used for deployments.
* Each cluster configuration includes project information, authentication secrets, and
* node selector settings for job scheduling.
*/
{
test: {
project: 'gynzy-test-project',
Expand Down
22 changes: 15 additions & 7 deletions .github/jsonnet/complete-workflows.jsonnet
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,21 @@ local misc = import 'misc.jsonnet';
local yarn = import 'yarn.jsonnet';

{
/*
@param {string[]} repositories - The repositories to publish to
@param {boolean} isPublicFork - Whether the repository is a public fork
@param {boolean} checkVersionBump - Whether to assert if the version was bumped (recommended)
@param {ghJob} testJob - a job to be ran during PR to assert tests. can be an array of jobs
@param {string} branch - the branch to run the publish-prod job on
*/
/**
* Creates a complete set of workflows for JavaScript package publishing and testing.
*
* Generates three pipelines:
* 1. 'misc' - Jsonnet validation workflow
* 2. 'publish-prod' - Production package publishing on branch push
* 3. 'pr' - Pull request preview publishing and testing
*
* @param {array} [repositories=['gynzy']] - The repositories to publish to
* @param {boolean} [isPublicFork=true] - Whether the repository is a public fork (affects runner selection)
* @param {boolean} [checkVersionBump=true] - Whether to assert if the version was bumped (recommended)
* @param {jobs} [testJob=null] - A job to be run during PR to assert tests. Can be an array of jobs
* @param {string} [branch='main'] - The branch to run the publish-prod job on
* @returns {workflows} - Complete set of GitHub Actions workflows for JavaScript package lifecycle
*/
workflowJavascriptPackage(repositories=['gynzy'], isPublicFork=true, checkVersionBump=true, testJob=null, branch='main')::
local runsOn = (if isPublicFork then 'ubuntu-latest' else null);

Expand Down
Loading
Loading