diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index f7014c35..a7130553 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.11.0"
+ ".": "0.12.0"
}
\ No newline at end of file
diff --git a/.stats.yml b/.stats.yml
index e93723a0..c248682d 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
configured_endpoints: 193
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/gitpod%2Fgitpod-3dcdbd68ce4b336149d28d17ab08f211538ed6630112ae4883af2f6680643159.yml
-openapi_spec_hash: 7e4333995b65cf32663166801e2444bb
-config_hash: 8d7b241284195a8c51f5d670fbbe0ab4
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/gitpod/gitpod-5c5e706fd0877a91f01455f03ef01c45106b1fad3b4aef5967807bce81bcdd53.yml
+openapi_spec_hash: 9d64ab76ba1843ae85b5c719c2a90a3c
+config_hash: 9052d3b03d620cf6871184b15487e020
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ddc444e4..8cca5011 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,61 @@
# Changelog
+## 0.12.0 (2026-05-12)
+
+Full Changelog: [v0.11.0...v0.12.0](https://github.com/gitpod-io/gitpod-sdk-python/compare/v0.11.0...v0.12.0)
+
+### Features
+
+* **api:** add access_token field to runner create response models ([957eb60](https://github.com/gitpod-io/gitpod-sdk-python/commit/957eb60ea90eb7ed188da2cbfb0cd43838b0affd))
+* **api:** add AGENT_EXECUTION_CNF capability to RunnerCapability ([3a521b2](https://github.com/gitpod-io/gitpod-sdk-python/commit/3a521b2a8292e6351117379a4c0c5d6e82e592cc))
+* **api:** add allow_unverified_email_scim_fallback_match to scim_configurations ([e1bba76](https://github.com/gitpod-io/gitpod-sdk-python/commit/e1bba7667f79b870c55096a00f18032bda03a65d))
+* **api:** add credential_proxy to secrets, remove format from environment spec ([349a1ba](https://github.com/gitpod-io/gitpod-sdk-python/commit/349a1bab3893da2900dad96fe92213c0d79c3c92))
+* **api:** add incident trigger support to workflow_trigger and workflow_execution ([5e90f8a](https://github.com/gitpod-io/gitpod-sdk-python/commit/5e90f8ae3d12d2acf8f1eaab330b5a9d8a39fea0))
+* **api:** add integration_id field, make webhook_id required in pull_request trigger ([077b662](https://github.com/gitpod-io/gitpod-sdk-python/commit/077b6622dc4e21a2033c8275c46716286a6515b8))
+* **api:** add max_port_admission_level to organizations policies ([4942a70](https://github.com/gitpod-io/gitpod-sdk-python/commit/4942a703b999261d0b0935f4db63e76c8db5d103))
+* **api:** add old_path field to ContentGitChangedFile ([d79d4d4](https://github.com/gitpod-io/gitpod-sdk-python/commit/d79d4d49ef41a889a81a0d484616fca442849356))
+* **api:** add pagination and query parameter to runners.list_scm_organizations ([333311a](https://github.com/gitpod-io/gitpod-sdk-python/commit/333311aebaefb657861e2a9f6e8adb0f2abdd4dc))
+* **api:** add port_authentication capability to runner_capability ([c29b095](https://github.com/gitpod-io/gitpod-sdk-python/commit/c29b09596d2d87caaea047e61613a333c2fe4e31))
+* **api:** add prebuild trigger value to environments automations ([af2c44e](https://github.com/gitpod-io/gitpod-sdk-python/commit/af2c44e64fac19bf848c4325e0b39b183c998e74))
+* **api:** add project_creation_defaults to organizations policies ([5d8545f](https://github.com/gitpod-io/gitpod-sdk-python/commit/5d8545fa2691bb59b0acf8ca0121300d48349a1e))
+* **api:** add PULL_REQUEST_EVENT_REVIEW_REQUESTED to workflow_trigger events ([242a3ab](https://github.com/gitpod-io/gitpod-sdk-python/commit/242a3ab60ed3580fd9488858727294ed86568ccf))
+* **api:** add readiness_timeout field to service spec types ([8786477](https://github.com/gitpod-io/gitpod-sdk-python/commit/8786477b21152c9040f3281d5b2cb17f3eada5f2))
+* **api:** add RESOURCE_ROLE_ORG_ENVIRONMENTS_READER to resource_role ([e99dc40](https://github.com/gitpod-io/gitpod-sdk-python/commit/e99dc409933596ef561dbd6784e7041d4d64a084))
+* **api:** add StatusGoal model and goal field to agent_execution ([3d37569](https://github.com/gitpod-io/gitpod-sdk-python/commit/3d37569f37337919926bbf799e1069aa585eef49))
+* **api:** add SUPPORTED_MODEL_OPENAI_AUTO to agent_execution status ([54503f5](https://github.com/gitpod-io/gitpod-sdk-python/commit/54503f5295a703ee855eac4c11694d2bbe465d13))
+* **api:** add SUPPORTED_MODEL_OPUS_4_7 to agent_execution Status ([74af533](https://github.com/gitpod-io/gitpod-sdk-python/commit/74af5338d7f6447df5a6464a2b2ef893c3bf4f6e))
+* **api:** add UserInputMetadata type ([ea300f4](https://github.com/gitpod-io/gitpod-sdk-python/commit/ea300f4314c14529c02ffec1e38f474d8a426844))
+* **api:** remove deprecated access_token from runner responses ([003cd7d](https://github.com/gitpod-io/gitpod-sdk-python/commit/003cd7dcac020428985564e76c2bb6e45acd434c))
+* **api:** remove terminal field from RunsOn type ([faca2b2](https://github.com/gitpod-io/gitpod-sdk-python/commit/faca2b27b88f17a759bd91c305e5ea2a856a1e2c))
+* **internal/types:** support eagerly validating pydantic iterators ([cbf4bac](https://github.com/gitpod-io/gitpod-sdk-python/commit/cbf4bac0498fbc32a85fb3d620ba2d7d551b53d0))
+* support setting headers via env ([4e4f3fe](https://github.com/gitpod-io/gitpod-sdk-python/commit/4e4f3fe4d03901b3b63bccd13012197be6cc50ec))
+
+
+### Bug Fixes
+
+* **client:** add missing f-string prefix in file type error message ([9371ec1](https://github.com/gitpod-io/gitpod-sdk-python/commit/9371ec1502be44e4684f8b1e0c7c6d55e8ead8dd))
+* **client:** preserve hardcoded query params when merging with user params ([b7f0b1d](https://github.com/gitpod-io/gitpod-sdk-python/commit/b7f0b1d27ef51872bf81541dd7f81a8101f856af))
+* ensure file data are only sent as 1 parameter ([5c02854](https://github.com/gitpod-io/gitpod-sdk-python/commit/5c02854efdc2874535d3d7867823048d5e8d4693))
+* use correct field name format for multipart file arrays ([c731392](https://github.com/gitpod-io/gitpod-sdk-python/commit/c731392aa36e5a3bba895d4e52a7d50addab326a))
+
+
+### Performance Improvements
+
+* **client:** optimize file structure copying in multipart requests ([cb792b6](https://github.com/gitpod-io/gitpod-sdk-python/commit/cb792b633104ff26eb799d8c32703920213ec23e))
+
+
+### Chores
+
+* **internal:** more robust bootstrap script ([5f05caa](https://github.com/gitpod-io/gitpod-sdk-python/commit/5f05caacfd1a55617d435845771e28503eff687c))
+* **internal:** reformat pyproject.toml ([b061deb](https://github.com/gitpod-io/gitpod-sdk-python/commit/b061deb38573beb2771beb0dfe92a5c3ba09b9d3))
+* **internal:** regenerate SDK with no functional changes ([2dc3c8d](https://github.com/gitpod-io/gitpod-sdk-python/commit/2dc3c8dbe693a77dc985066501d9bc9afacac347))
+
+
+### Documentation
+
+* **api:** update trigger usage note in AutomationTrigger ([5a292cb](https://github.com/gitpod-io/gitpod-sdk-python/commit/5a292cb1ef87a0dd73d93445707412d25c0e95e0))
+* **types:** mark is_admin deprecated in Organization model ([5e7b9f3](https://github.com/gitpod-io/gitpod-sdk-python/commit/5e7b9f3d7dd7af385ca75b17f01c1cab87da8cb6))
+
## 0.11.0 (2026-04-02)
Full Changelog: [v0.10.0...v0.11.0](https://github.com/gitpod-io/gitpod-sdk-python/compare/v0.10.0...v0.11.0)
diff --git a/api.md b/api.md
index dbd40b76..cf05aa37 100644
--- a/api.md
+++ b/api.md
@@ -75,6 +75,7 @@ from gitpod.types import (
Role,
Type,
UserInputBlock,
+ UserInputMetadata,
WakeEvent,
AgentCreateExecutionConversationTokenResponse,
AgentCreatePromptResponse,
@@ -506,6 +507,7 @@ from gitpod.types.organizations import (
CustomSecurityAgent,
KernelControlsAction,
OrganizationPolicies,
+ ProjectCreationDefaults,
SecurityAgentPolicy,
VetoExecPolicy,
PolicyRetrieveResponse,
@@ -709,7 +711,7 @@ Methods:
- client.runners.check_repository_access(\*\*params) -> RunnerCheckRepositoryAccessResponse
- client.runners.create_logs_token(\*\*params) -> RunnerCreateLogsTokenResponse
- client.runners.create_runner_token(\*\*params) -> RunnerCreateRunnerTokenResponse
-- client.runners.list_scm_organizations(\*\*params) -> RunnerListScmOrganizationsResponse
+- client.runners.list_scm_organizations(\*\*params) -> SyncOrganizationsPage[RunnerListScmOrganizationsResponse]
- client.runners.parse_context_url(\*\*params) -> RunnerParseContextURLResponse
- client.runners.search_repositories(\*\*params) -> RunnerSearchRepositoriesResponse
diff --git a/pyproject.toml b/pyproject.toml
index 5d1671eb..6c612339 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "gitpod-sdk"
-version = "0.11.0"
+version = "0.12.0"
description = "The official Python library for the gitpod API"
dynamic = ["readme"]
license = "Apache-2.0"
@@ -171,7 +171,7 @@ show_error_codes = true
#
# We also exclude our `tests` as mypy doesn't always infer
# types correctly and Pyright will still catch any type errors.
-exclude = ['src/gitpod/_files.py', '_dev/.*.py', 'tests/.*']
+exclude = ["src/gitpod/_files.py", "_dev/.*.py", "tests/.*"]
strict_equality = true
implicit_reexport = true
diff --git a/scripts/bootstrap b/scripts/bootstrap
index b430fee3..fe8451e4 100755
--- a/scripts/bootstrap
+++ b/scripts/bootstrap
@@ -4,7 +4,7 @@ set -e
cd "$(dirname "$0")/.."
-if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then
+if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "${SKIP_BREW:-}" != "1" ] && [ -t 0 ]; then
brew bundle check >/dev/null 2>&1 || {
echo -n "==> Install Homebrew dependencies? (y/N): "
read -r response
diff --git a/src/gitpod/_base_client.py b/src/gitpod/_base_client.py
index cc7f8af2..a9da58f9 100644
--- a/src/gitpod/_base_client.py
+++ b/src/gitpod/_base_client.py
@@ -540,6 +540,10 @@ def _build_request(
files = cast(HttpxRequestFiles, ForceMultipartDict())
prepared_url = self._prepare_url(options.url)
+ # preserve hard-coded query params from the url
+ if params and prepared_url.query:
+ params = {**dict(prepared_url.params.items()), **params}
+ prepared_url = prepared_url.copy_with(raw_path=prepared_url.raw_path.split(b"?", 1)[0])
if "_" in prepared_url.host:
# work around https://github.com/encode/httpx/discussions/2880
kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")}
diff --git a/src/gitpod/_client.py b/src/gitpod/_client.py
index dddc11d8..454815c2 100644
--- a/src/gitpod/_client.py
+++ b/src/gitpod/_client.py
@@ -19,7 +19,11 @@
RequestOptions,
not_given,
)
-from ._utils import is_given, get_async_library
+from ._utils import (
+ is_given,
+ is_mapping_t,
+ get_async_library,
+)
from ._compat import cached_property
from ._version import __version__
from ._streaming import Stream as Stream, AsyncStream as AsyncStream
@@ -115,6 +119,15 @@ def __init__(
if base_url is None:
base_url = f"https://app.gitpod.io/api"
+ custom_headers_env = os.environ.get("GITPOD_CUSTOM_HEADERS")
+ if custom_headers_env is not None:
+ parsed: dict[str, str] = {}
+ for line in custom_headers_env.split("\n"):
+ colon = line.find(":")
+ if colon >= 0:
+ parsed[line[:colon].strip()] = line[colon + 1 :].strip()
+ default_headers = {**parsed, **(default_headers if is_mapping_t(default_headers) else {})}
+
super().__init__(
version=__version__,
base_url=base_url,
@@ -396,6 +409,15 @@ def __init__(
if base_url is None:
base_url = f"https://app.gitpod.io/api"
+ custom_headers_env = os.environ.get("GITPOD_CUSTOM_HEADERS")
+ if custom_headers_env is not None:
+ parsed: dict[str, str] = {}
+ for line in custom_headers_env.split("\n"):
+ colon = line.find(":")
+ if colon >= 0:
+ parsed[line[:colon].strip()] = line[colon + 1 :].strip()
+ default_headers = {**parsed, **(default_headers if is_mapping_t(default_headers) else {})}
+
super().__init__(
version=__version__,
base_url=base_url,
diff --git a/src/gitpod/_files.py b/src/gitpod/_files.py
index cc14c14f..76da9e08 100644
--- a/src/gitpod/_files.py
+++ b/src/gitpod/_files.py
@@ -3,8 +3,8 @@
import io
import os
import pathlib
-from typing import overload
-from typing_extensions import TypeGuard
+from typing import Sequence, cast, overload
+from typing_extensions import TypeVar, TypeGuard
import anyio
@@ -17,7 +17,9 @@
HttpxFileContent,
HttpxRequestFiles,
)
-from ._utils import is_tuple_t, is_mapping_t, is_sequence_t
+from ._utils import is_list, is_mapping, is_tuple_t, is_mapping_t, is_sequence_t
+
+_T = TypeVar("_T")
def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]:
@@ -97,7 +99,7 @@ async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles
elif is_sequence_t(files):
files = [(key, await _async_transform_file(file)) for key, file in files]
else:
- raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence")
+ raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence")
return files
@@ -121,3 +123,51 @@ async def async_read_file_content(file: FileContent) -> HttpxFileContent:
return await anyio.Path(file).read_bytes()
return file
+
+
+def deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]]) -> _T:
+ """Copy only the containers along the given paths.
+
+ Used to guard against mutation by extract_files without copying the entire structure.
+ Only dicts and lists that lie on a path are copied; everything else
+ is returned by reference.
+
+ For example, given paths=[["foo", "files", "file"]] and the structure:
+ {
+ "foo": {
+ "bar": {"baz": {}},
+ "files": {"file": }
+ }
+ }
+ The root dict, "foo", and "files" are copied (they lie on the path).
+ "bar" and "baz" are returned by reference (off the path).
+ """
+ return _deepcopy_with_paths(item, paths, 0)
+
+
+def _deepcopy_with_paths(item: _T, paths: Sequence[Sequence[str]], index: int) -> _T:
+ if not paths:
+ return item
+ if is_mapping(item):
+ key_to_paths: dict[str, list[Sequence[str]]] = {}
+ for path in paths:
+ if index < len(path):
+ key_to_paths.setdefault(path[index], []).append(path)
+
+ # if no path continues through this mapping, it won't be mutated and copying it is redundant
+ if not key_to_paths:
+ return item
+
+ result = dict(item)
+ for key, subpaths in key_to_paths.items():
+ if key in result:
+ result[key] = _deepcopy_with_paths(result[key], subpaths, index + 1)
+ return cast(_T, result)
+ if is_list(item):
+ array_paths = [path for path in paths if index < len(path) and path[index] == ""]
+
+ # if no path expects a list here, nothing will be mutated inside it - return by reference
+ if not array_paths:
+ return cast(_T, item)
+ return cast(_T, [_deepcopy_with_paths(entry, array_paths, index + 1) for entry in item])
+ return item
diff --git a/src/gitpod/_models.py b/src/gitpod/_models.py
index 29070e05..8c5ab260 100644
--- a/src/gitpod/_models.py
+++ b/src/gitpod/_models.py
@@ -25,7 +25,9 @@
ClassVar,
Protocol,
Required,
+ Annotated,
ParamSpec,
+ TypeAlias,
TypedDict,
TypeGuard,
final,
@@ -79,7 +81,15 @@
from ._constants import RAW_RESPONSE_HEADER
if TYPE_CHECKING:
+ from pydantic import GetCoreSchemaHandler, ValidatorFunctionWrapHandler
+ from pydantic_core import CoreSchema, core_schema
from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema
+else:
+ try:
+ from pydantic_core import CoreSchema, core_schema
+ except ImportError:
+ CoreSchema = None
+ core_schema = None
__all__ = ["BaseModel", "GenericModel"]
@@ -396,6 +406,76 @@ def model_dump_json(
)
+class _EagerIterable(list[_T], Generic[_T]):
+ """
+ Accepts any Iterable[T] input (including generators), consumes it
+ eagerly, and validates all items upfront.
+
+ Validation preserves the original container type where possible
+ (e.g. a set[T] stays a set[T]). Serialization (model_dump / JSON)
+ always emits a list — round-tripping through model_dump() will not
+ restore the original container type.
+ """
+
+ @classmethod
+ def __get_pydantic_core_schema__(
+ cls,
+ source_type: Any,
+ handler: GetCoreSchemaHandler,
+ ) -> CoreSchema:
+ (item_type,) = get_args(source_type) or (Any,)
+ item_schema: CoreSchema = handler.generate_schema(item_type)
+ list_of_items_schema: CoreSchema = core_schema.list_schema(item_schema)
+
+ return core_schema.no_info_wrap_validator_function(
+ cls._validate,
+ list_of_items_schema,
+ serialization=core_schema.plain_serializer_function_ser_schema(
+ cls._serialize,
+ info_arg=False,
+ ),
+ )
+
+ @staticmethod
+ def _validate(v: Iterable[_T], handler: "ValidatorFunctionWrapHandler") -> Any:
+ original_type: type[Any] = type(v)
+
+ # Normalize to list so list_schema can validate each item
+ if isinstance(v, list):
+ items: list[_T] = v
+ else:
+ try:
+ items = list(v)
+ except TypeError as e:
+ raise TypeError("Value is not iterable") from e
+
+ # Validate items against the inner schema
+ validated: list[_T] = handler(items)
+
+ # Reconstruct original container type
+ if original_type is list:
+ return validated
+ # str(list) produces the list's repr, not a string built from items,
+ # so skip reconstruction for str and its subclasses.
+ if issubclass(original_type, str):
+ return validated
+ try:
+ return original_type(validated)
+ except (TypeError, ValueError):
+ # If the type cannot be reconstructed, just return the validated list
+ return validated
+
+ @staticmethod
+ def _serialize(v: Iterable[_T]) -> list[_T]:
+ """Always serialize as a list so Pydantic's JSON encoder is happy."""
+ if isinstance(v, list):
+ return v
+ return list(v)
+
+
+EagerIterable: TypeAlias = Annotated[Iterable[_T], _EagerIterable]
+
+
def _construct_field(value: object, field: FieldInfo, key: str) -> object:
if value is None:
return field_get_default(field)
diff --git a/src/gitpod/_qs.py b/src/gitpod/_qs.py
index de8c99bc..4127c19c 100644
--- a/src/gitpod/_qs.py
+++ b/src/gitpod/_qs.py
@@ -2,17 +2,13 @@
from typing import Any, List, Tuple, Union, Mapping, TypeVar
from urllib.parse import parse_qs, urlencode
-from typing_extensions import Literal, get_args
+from typing_extensions import get_args
-from ._types import NotGiven, not_given
+from ._types import NotGiven, ArrayFormat, NestedFormat, not_given
from ._utils import flatten
_T = TypeVar("_T")
-
-ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
-NestedFormat = Literal["dots", "brackets"]
-
PrimitiveData = Union[str, int, float, bool, None]
# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"]
# https://github.com/microsoft/pyright/issues/3555
diff --git a/src/gitpod/_types.py b/src/gitpod/_types.py
index dbebac09..8a15cebe 100644
--- a/src/gitpod/_types.py
+++ b/src/gitpod/_types.py
@@ -47,6 +47,9 @@
ModelT = TypeVar("ModelT", bound=pydantic.BaseModel)
_T = TypeVar("_T")
+ArrayFormat = Literal["comma", "repeat", "indices", "brackets"]
+NestedFormat = Literal["dots", "brackets"]
+
# Approximates httpx internal ProxiesTypes and RequestFiles types
# while adding support for `PathLike` instances
diff --git a/src/gitpod/_utils/__init__.py b/src/gitpod/_utils/__init__.py
index 10cb66d2..1c090e51 100644
--- a/src/gitpod/_utils/__init__.py
+++ b/src/gitpod/_utils/__init__.py
@@ -24,7 +24,6 @@
coerce_integer as coerce_integer,
file_from_path as file_from_path,
strip_not_given as strip_not_given,
- deepcopy_minimal as deepcopy_minimal,
get_async_library as get_async_library,
maybe_coerce_float as maybe_coerce_float,
get_required_header as get_required_header,
diff --git a/src/gitpod/_utils/_utils.py b/src/gitpod/_utils/_utils.py
index eec7f4a1..199cd231 100644
--- a/src/gitpod/_utils/_utils.py
+++ b/src/gitpod/_utils/_utils.py
@@ -17,11 +17,11 @@
)
from pathlib import Path
from datetime import date, datetime
-from typing_extensions import TypeGuard
+from typing_extensions import TypeGuard, get_args
import sniffio
-from .._types import Omit, NotGiven, FileTypes, HeadersLike
+from .._types import Omit, NotGiven, FileTypes, ArrayFormat, HeadersLike
_T = TypeVar("_T")
_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...])
@@ -40,25 +40,45 @@ def extract_files(
query: Mapping[str, object],
*,
paths: Sequence[Sequence[str]],
+ array_format: ArrayFormat = "brackets",
) -> list[tuple[str, FileTypes]]:
"""Recursively extract files from the given dictionary based on specified paths.
A path may look like this ['foo', 'files', '', 'data'].
+ ``array_format`` controls how ```` segments contribute to the emitted
+ field name. Supported values: ``"brackets"`` (``foo[]``), ``"repeat"`` and
+ ``"comma"`` (``foo``), ``"indices"`` (``foo[0]``, ``foo[1]``).
+
Note: this mutates the given dictionary.
"""
files: list[tuple[str, FileTypes]] = []
for path in paths:
- files.extend(_extract_items(query, path, index=0, flattened_key=None))
+ files.extend(_extract_items(query, path, index=0, flattened_key=None, array_format=array_format))
return files
+def _array_suffix(array_format: ArrayFormat, array_index: int) -> str:
+ if array_format == "brackets":
+ return "[]"
+ if array_format == "indices":
+ return f"[{array_index}]"
+ if array_format == "repeat" or array_format == "comma":
+ # Both repeat the bare field name for each file part; there is no
+ # meaningful way to comma-join binary parts.
+ return ""
+ raise NotImplementedError(
+ f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}"
+ )
+
+
def _extract_items(
obj: object,
path: Sequence[str],
*,
index: int,
flattened_key: str | None,
+ array_format: ArrayFormat,
) -> list[tuple[str, FileTypes]]:
try:
key = path[index]
@@ -75,9 +95,11 @@ def _extract_items(
if is_list(obj):
files: list[tuple[str, FileTypes]] = []
- for entry in obj:
- assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "")
- files.append((flattened_key + "[]", cast(FileTypes, entry)))
+ for array_index, entry in enumerate(obj):
+ suffix = _array_suffix(array_format, array_index)
+ emitted_key = (flattened_key + suffix) if flattened_key else suffix
+ assert_is_file_content(entry, key=emitted_key)
+ files.append((emitted_key, cast(FileTypes, entry)))
return files
assert_is_file_content(obj, key=flattened_key)
@@ -86,8 +108,9 @@ def _extract_items(
index += 1
if is_dict(obj):
try:
- # We are at the last entry in the path so we must remove the field
- if (len(path)) == index:
+ # Remove the field if there are no more dict keys in the path,
+ # only "" traversal markers or end.
+ if all(p == "" for p in path[index:]):
item = obj.pop(key)
else:
item = obj[key]
@@ -105,6 +128,7 @@ def _extract_items(
path,
index=index,
flattened_key=flattened_key,
+ array_format=array_format,
)
elif is_list(obj):
if key != "":
@@ -116,9 +140,12 @@ def _extract_items(
item,
path,
index=index,
- flattened_key=flattened_key + "[]" if flattened_key is not None else "[]",
+ flattened_key=(
+ (flattened_key if flattened_key is not None else "") + _array_suffix(array_format, array_index)
+ ),
+ array_format=array_format,
)
- for item in obj
+ for array_index, item in enumerate(obj)
]
)
@@ -176,21 +203,6 @@ def is_iterable(obj: object) -> TypeGuard[Iterable[object]]:
return isinstance(obj, Iterable)
-def deepcopy_minimal(item: _T) -> _T:
- """Minimal reimplementation of copy.deepcopy() that will only copy certain object types:
-
- - mappings, e.g. `dict`
- - list
-
- This is done for performance reasons.
- """
- if is_mapping(item):
- return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()})
- if is_list(item):
- return cast(_T, [deepcopy_minimal(entry) for entry in item])
- return item
-
-
# copied from https://github.com/Rapptz/RoboDanny
def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str:
size = len(seq)
diff --git a/src/gitpod/_version.py b/src/gitpod/_version.py
index 7504df3f..6521e93a 100644
--- a/src/gitpod/_version.py
+++ b/src/gitpod/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "gitpod"
-__version__ = "0.11.0" # x-release-please-version
+__version__ = "0.12.0" # x-release-please-version
diff --git a/src/gitpod/pagination.py b/src/gitpod/pagination.py
index b55ecc60..83771ae0 100644
--- a/src/gitpod/pagination.py
+++ b/src/gitpod/pagination.py
@@ -51,6 +51,9 @@
"MembersPagePagination",
"SyncMembersPage",
"AsyncMembersPage",
+ "OrganizationsPagePagination",
+ "SyncOrganizationsPage",
+ "AsyncOrganizationsPage",
"OutputsPagePagination",
"SyncOutputsPage",
"AsyncOutputsPage",
@@ -819,6 +822,56 @@ def next_page_info(self) -> Optional[PageInfo]:
return PageInfo(params={"token": next_token})
+class OrganizationsPagePagination(BaseModel):
+ next_token: Optional[str] = FieldInfo(alias="nextToken", default=None)
+
+
+class SyncOrganizationsPage(BaseSyncPage[_T], BasePage[_T], Generic[_T]):
+ organizations: List[_T]
+ pagination: Optional[OrganizationsPagePagination] = None
+
+ @override
+ def _get_page_items(self) -> List[_T]:
+ organizations = self.organizations
+ if not organizations:
+ return []
+ return organizations
+
+ @override
+ def next_page_info(self) -> Optional[PageInfo]:
+ next_token = None
+ if self.pagination is not None:
+ if self.pagination.next_token is not None:
+ next_token = self.pagination.next_token
+ if not next_token:
+ return None
+
+ return PageInfo(params={"token": next_token})
+
+
+class AsyncOrganizationsPage(BaseAsyncPage[_T], BasePage[_T], Generic[_T]):
+ organizations: List[_T]
+ pagination: Optional[OrganizationsPagePagination] = None
+
+ @override
+ def _get_page_items(self) -> List[_T]:
+ organizations = self.organizations
+ if not organizations:
+ return []
+ return organizations
+
+ @override
+ def next_page_info(self) -> Optional[PageInfo]:
+ next_token = None
+ if self.pagination is not None:
+ if self.pagination.next_token is not None:
+ next_token = self.pagination.next_token
+ if not next_token:
+ return None
+
+ return PageInfo(params={"token": next_token})
+
+
class OutputsPagePagination(BaseModel):
next_token: Optional[str] = FieldInfo(alias="nextToken", default=None)
diff --git a/src/gitpod/resources/organizations/policies.py b/src/gitpod/resources/organizations/policies.py
index a9d30bfb..5e5f9877 100644
--- a/src/gitpod/resources/organizations/policies.py
+++ b/src/gitpod/resources/organizations/policies.py
@@ -6,6 +6,7 @@
import httpx
+from ...types import AdmissionLevel
from ..._types import Body, Omit, Query, Headers, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import maybe_transform, async_maybe_transform
from ..._compat import cached_property
@@ -18,6 +19,7 @@
)
from ..._base_client import make_request_options
from ...types.organizations import policy_update_params, policy_retrieve_params
+from ...types.admission_level import AdmissionLevel
from ...types.organizations.veto_exec_policy_param import VetoExecPolicyParam
from ...types.organizations.policy_retrieve_response import PolicyRetrieveResponse
@@ -109,9 +111,11 @@ def update(
maximum_environments_per_user: Optional[str] | Omit = omit,
maximum_environment_timeout: Optional[str] | Omit = omit,
maximum_running_environments_per_user: Optional[str] | Omit = omit,
+ max_port_admission_level: Optional[AdmissionLevel] | Omit = omit,
members_create_projects: Optional[bool] | Omit = omit,
members_require_projects: Optional[bool] | Omit = omit,
port_sharing_disabled: Optional[bool] | Omit = omit,
+ project_creation_defaults: Optional[policy_update_params.ProjectCreationDefaults] | Omit = omit,
require_custom_domain_access: Optional[bool] | Omit = omit,
restrict_account_creation_to_scim: Optional[bool] | Omit = omit,
security_agent_policy: Optional[policy_update_params.SecurityAgentPolicy] | Omit = omit,
@@ -201,6 +205,11 @@ def update(
maximum_running_environments_per_user: maximum_running_environments_per_user limits simultaneously running environments
per user
+ max_port_admission_level: max_port_admission_level caps the maximum admission level a user-opened port may
+ use. UNSPECIFIED means no cap (any AdmissionLevel value is allowed). System
+ ports (VS Code Browser, agents) are exempt. The legacy port_sharing_disabled
+ field, when true, takes precedence and blocks all user-initiated port sharing.
+
members_create_projects: members_create_projects controls whether members can create projects
members_require_projects: members_require_projects controls whether environments can only be created from
@@ -210,6 +219,9 @@ def update(
in the organization. System ports (VS Code Browser, agents) are always exempt
from this policy.
+ project_creation_defaults: project_creation_defaults contains updates to default settings applied to newly
+ created projects.
+
require_custom_domain_access: require_custom_domain_access controls whether users must access via custom
domain when one is configured. When true, access via app.gitpod.io is blocked.
@@ -245,9 +257,11 @@ def update(
"maximum_environments_per_user": maximum_environments_per_user,
"maximum_environment_timeout": maximum_environment_timeout,
"maximum_running_environments_per_user": maximum_running_environments_per_user,
+ "max_port_admission_level": max_port_admission_level,
"members_create_projects": members_create_projects,
"members_require_projects": members_require_projects,
"port_sharing_disabled": port_sharing_disabled,
+ "project_creation_defaults": project_creation_defaults,
"require_custom_domain_access": require_custom_domain_access,
"restrict_account_creation_to_scim": restrict_account_creation_to_scim,
"security_agent_policy": security_agent_policy,
@@ -349,9 +363,11 @@ async def update(
maximum_environments_per_user: Optional[str] | Omit = omit,
maximum_environment_timeout: Optional[str] | Omit = omit,
maximum_running_environments_per_user: Optional[str] | Omit = omit,
+ max_port_admission_level: Optional[AdmissionLevel] | Omit = omit,
members_create_projects: Optional[bool] | Omit = omit,
members_require_projects: Optional[bool] | Omit = omit,
port_sharing_disabled: Optional[bool] | Omit = omit,
+ project_creation_defaults: Optional[policy_update_params.ProjectCreationDefaults] | Omit = omit,
require_custom_domain_access: Optional[bool] | Omit = omit,
restrict_account_creation_to_scim: Optional[bool] | Omit = omit,
security_agent_policy: Optional[policy_update_params.SecurityAgentPolicy] | Omit = omit,
@@ -441,6 +457,11 @@ async def update(
maximum_running_environments_per_user: maximum_running_environments_per_user limits simultaneously running environments
per user
+ max_port_admission_level: max_port_admission_level caps the maximum admission level a user-opened port may
+ use. UNSPECIFIED means no cap (any AdmissionLevel value is allowed). System
+ ports (VS Code Browser, agents) are exempt. The legacy port_sharing_disabled
+ field, when true, takes precedence and blocks all user-initiated port sharing.
+
members_create_projects: members_create_projects controls whether members can create projects
members_require_projects: members_require_projects controls whether environments can only be created from
@@ -450,6 +471,9 @@ async def update(
in the organization. System ports (VS Code Browser, agents) are always exempt
from this policy.
+ project_creation_defaults: project_creation_defaults contains updates to default settings applied to newly
+ created projects.
+
require_custom_domain_access: require_custom_domain_access controls whether users must access via custom
domain when one is configured. When true, access via app.gitpod.io is blocked.
@@ -485,9 +509,11 @@ async def update(
"maximum_environments_per_user": maximum_environments_per_user,
"maximum_environment_timeout": maximum_environment_timeout,
"maximum_running_environments_per_user": maximum_running_environments_per_user,
+ "max_port_admission_level": max_port_admission_level,
"members_create_projects": members_create_projects,
"members_require_projects": members_require_projects,
"port_sharing_disabled": port_sharing_disabled,
+ "project_creation_defaults": project_creation_defaults,
"require_custom_domain_access": require_custom_domain_access,
"restrict_account_creation_to_scim": restrict_account_creation_to_scim,
"security_agent_policy": security_agent_policy,
diff --git a/src/gitpod/resources/organizations/scim_configurations.py b/src/gitpod/resources/organizations/scim_configurations.py
index 6176c402..4cfd9f5e 100644
--- a/src/gitpod/resources/organizations/scim_configurations.py
+++ b/src/gitpod/resources/organizations/scim_configurations.py
@@ -60,6 +60,7 @@ def create(
*,
organization_id: str,
sso_configuration_id: str,
+ allow_unverified_email_account_linking: Optional[bool] | Omit = omit,
name: Optional[str] | Omit = omit,
token_expires_in: Optional[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -107,6 +108,10 @@ def create(
sso_configuration_id: sso_configuration_id is the SSO configuration to link (required for user
provisioning)
+ allow_unverified_email_account_linking: allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+
name: name is a human-readable name for the SCIM configuration
token_expires_in: token_expires_in is the duration until the token expires. Defaults to 1 year.
@@ -126,6 +131,7 @@ def create(
{
"organization_id": organization_id,
"sso_configuration_id": sso_configuration_id,
+ "allow_unverified_email_account_linking": allow_unverified_email_account_linking,
"name": name,
"token_expires_in": token_expires_in,
},
@@ -194,6 +200,7 @@ def update(
self,
*,
scim_configuration_id: str,
+ allow_unverified_email_account_linking: Optional[bool] | Omit = omit,
enabled: Optional[bool] | Omit = omit,
name: Optional[str] | Omit = omit,
sso_configuration_id: Optional[str] | Omit = omit,
@@ -236,6 +243,10 @@ def update(
Args:
scim_configuration_id: scim_configuration_id is the ID of the SCIM configuration to update
+ allow_unverified_email_account_linking: allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+
enabled: enabled controls whether SCIM provisioning is active
name: name is a human-readable name for the SCIM configuration
@@ -255,6 +266,7 @@ def update(
body=maybe_transform(
{
"scim_configuration_id": scim_configuration_id,
+ "allow_unverified_email_account_linking": allow_unverified_email_account_linking,
"enabled": enabled,
"name": name,
"sso_configuration_id": sso_configuration_id,
@@ -482,6 +494,7 @@ async def create(
*,
organization_id: str,
sso_configuration_id: str,
+ allow_unverified_email_account_linking: Optional[bool] | Omit = omit,
name: Optional[str] | Omit = omit,
token_expires_in: Optional[str] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -529,6 +542,10 @@ async def create(
sso_configuration_id: sso_configuration_id is the SSO configuration to link (required for user
provisioning)
+ allow_unverified_email_account_linking: allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+
name: name is a human-readable name for the SCIM configuration
token_expires_in: token_expires_in is the duration until the token expires. Defaults to 1 year.
@@ -548,6 +565,7 @@ async def create(
{
"organization_id": organization_id,
"sso_configuration_id": sso_configuration_id,
+ "allow_unverified_email_account_linking": allow_unverified_email_account_linking,
"name": name,
"token_expires_in": token_expires_in,
},
@@ -616,6 +634,7 @@ async def update(
self,
*,
scim_configuration_id: str,
+ allow_unverified_email_account_linking: Optional[bool] | Omit = omit,
enabled: Optional[bool] | Omit = omit,
name: Optional[str] | Omit = omit,
sso_configuration_id: Optional[str] | Omit = omit,
@@ -658,6 +677,10 @@ async def update(
Args:
scim_configuration_id: scim_configuration_id is the ID of the SCIM configuration to update
+ allow_unverified_email_account_linking: allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+
enabled: enabled controls whether SCIM provisioning is active
name: name is a human-readable name for the SCIM configuration
@@ -677,6 +700,7 @@ async def update(
body=await async_maybe_transform(
{
"scim_configuration_id": scim_configuration_id,
+ "allow_unverified_email_account_linking": allow_unverified_email_account_linking,
"enabled": enabled,
"name": name,
"sso_configuration_id": sso_configuration_id,
diff --git a/src/gitpod/resources/runners/runners.py b/src/gitpod/resources/runners/runners.py
index 5a0ee2e4..1d92c061 100644
--- a/src/gitpod/resources/runners/runners.py
+++ b/src/gitpod/resources/runners/runners.py
@@ -41,7 +41,7 @@
async_to_raw_response_wrapper,
async_to_streamed_response_wrapper,
)
-from ...pagination import SyncRunnersPage, AsyncRunnersPage
+from ...pagination import SyncRunnersPage, AsyncRunnersPage, SyncOrganizationsPage, AsyncOrganizationsPage
from ..._base_client import AsyncPaginator, make_request_options
from ...types.runner import Runner
from ...types.runner_kind import RunnerKind
@@ -681,6 +681,8 @@ def list_scm_organizations(
*,
token: str | Omit = omit,
page_size: int | Omit = omit,
+ pagination: runner_list_scm_organizations_params.Pagination | Omit = omit,
+ query: str | Omit = omit,
runner_id: str | Omit = omit,
scm_host: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -689,7 +691,7 @@ def list_scm_organizations(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> RunnerListScmOrganizationsResponse:
+ ) -> SyncOrganizationsPage[RunnerListScmOrganizationsResponse]:
"""
Lists SCM organizations the user belongs to.
@@ -709,7 +711,29 @@ def list_scm_organizations(
scmHost: "github.com"
```
+ - Search GitLab groups:
+
+ Returns the first page of GitLab groups matching the substring.
+
+ ```yaml
+ runnerId: "d2c94c27-3b76-4a42-b88c-95a85e392c68"
+ scmHost: "gitlab.com"
+ query: "platform"
+ pagination:
+ pageSize: 25
+ ```
+
Args:
+ pagination: Pagination parameters. When unset, defaults to the standard PaginationRequest
+ defaults (page_size 25, max 100). Tokens are opaque and provider-specific.
+
+ query: Optional substring filter applied to the organization name.
+
+ - GitLab: forwarded to the upstream `search` parameter (server-side,
+ case-insensitive substring on name/path).
+ - GitHub and Bitbucket: not implemented as they don't support searching Empty
+ value means no filter.
+
scm_host: The SCM host to list organizations from (e.g., "github.com", "gitlab.com")
extra_headers: Send extra headers
@@ -720,10 +744,13 @@ def list_scm_organizations(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return self._post(
+ return self._get_api_list(
"/gitpod.v1.RunnerService/ListSCMOrganizations",
+ page=SyncOrganizationsPage[RunnerListScmOrganizationsResponse],
body=maybe_transform(
{
+ "pagination": pagination,
+ "query": query,
"runner_id": runner_id,
"scm_host": scm_host,
},
@@ -742,7 +769,8 @@ def list_scm_organizations(
runner_list_scm_organizations_params.RunnerListScmOrganizationsParams,
),
),
- cast_to=RunnerListScmOrganizationsResponse,
+ model=RunnerListScmOrganizationsResponse,
+ method="post",
)
def parse_context_url(
@@ -1505,11 +1533,13 @@ async def create_runner_token(
cast_to=RunnerCreateRunnerTokenResponse,
)
- async def list_scm_organizations(
+ def list_scm_organizations(
self,
*,
token: str | Omit = omit,
page_size: int | Omit = omit,
+ pagination: runner_list_scm_organizations_params.Pagination | Omit = omit,
+ query: str | Omit = omit,
runner_id: str | Omit = omit,
scm_host: str | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
@@ -1518,7 +1548,7 @@ async def list_scm_organizations(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
- ) -> RunnerListScmOrganizationsResponse:
+ ) -> AsyncPaginator[RunnerListScmOrganizationsResponse, AsyncOrganizationsPage[RunnerListScmOrganizationsResponse]]:
"""
Lists SCM organizations the user belongs to.
@@ -1538,7 +1568,29 @@ async def list_scm_organizations(
scmHost: "github.com"
```
+ - Search GitLab groups:
+
+ Returns the first page of GitLab groups matching the substring.
+
+ ```yaml
+ runnerId: "d2c94c27-3b76-4a42-b88c-95a85e392c68"
+ scmHost: "gitlab.com"
+ query: "platform"
+ pagination:
+ pageSize: 25
+ ```
+
Args:
+ pagination: Pagination parameters. When unset, defaults to the standard PaginationRequest
+ defaults (page_size 25, max 100). Tokens are opaque and provider-specific.
+
+ query: Optional substring filter applied to the organization name.
+
+ - GitLab: forwarded to the upstream `search` parameter (server-side,
+ case-insensitive substring on name/path).
+ - GitHub and Bitbucket: not implemented as they don't support searching Empty
+ value means no filter.
+
scm_host: The SCM host to list organizations from (e.g., "github.com", "gitlab.com")
extra_headers: Send extra headers
@@ -1549,10 +1601,13 @@ async def list_scm_organizations(
timeout: Override the client-level default timeout for this request, in seconds
"""
- return await self._post(
+ return self._get_api_list(
"/gitpod.v1.RunnerService/ListSCMOrganizations",
- body=await async_maybe_transform(
+ page=AsyncOrganizationsPage[RunnerListScmOrganizationsResponse],
+ body=maybe_transform(
{
+ "pagination": pagination,
+ "query": query,
"runner_id": runner_id,
"scm_host": scm_host,
},
@@ -1563,7 +1618,7 @@ async def list_scm_organizations(
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
- query=await async_maybe_transform(
+ query=maybe_transform(
{
"token": token,
"page_size": page_size,
@@ -1571,7 +1626,8 @@ async def list_scm_organizations(
runner_list_scm_organizations_params.RunnerListScmOrganizationsParams,
),
),
- cast_to=RunnerListScmOrganizationsResponse,
+ model=RunnerListScmOrganizationsResponse,
+ method="post",
)
async def parse_context_url(
diff --git a/src/gitpod/resources/secrets.py b/src/gitpod/resources/secrets.py
index 915a0c79..4ba66df7 100644
--- a/src/gitpod/resources/secrets.py
+++ b/src/gitpod/resources/secrets.py
@@ -56,6 +56,7 @@ def create(
*,
api_only: bool | Omit = omit,
container_registry_basic_auth_host: str | Omit = omit,
+ credential_proxy: secret_create_params.CredentialProxy | Omit = omit,
environment_variable: bool | Omit = omit,
file_path: str | Omit = omit,
name: str | Omit = omit,
@@ -122,6 +123,13 @@ def create(
container_registry_basic_auth_host: secret will be mounted as a docker config in the environment VM, mount will have
the docker registry host
+ credential_proxy: credential_proxy configures transparent credential injection when environments
+ materialize this secret. When set, the credential proxy intercepts HTTPS traffic
+ to the target hosts and replaces the dummy mounted value with the real value in
+ the specified HTTP header. The real secret value is never exposed in the
+ environment. This field is orthogonal to mount — a secret can be both mounted
+ and proxied at the same time.
+
environment_variable: secret will be created as an Environment Variable with the same name as the
secret
@@ -152,6 +160,7 @@ def create(
{
"api_only": api_only,
"container_registry_basic_auth_host": container_registry_basic_auth_host,
+ "credential_proxy": credential_proxy,
"environment_variable": environment_variable,
"file_path": file_path,
"name": name,
@@ -432,6 +441,7 @@ async def create(
*,
api_only: bool | Omit = omit,
container_registry_basic_auth_host: str | Omit = omit,
+ credential_proxy: secret_create_params.CredentialProxy | Omit = omit,
environment_variable: bool | Omit = omit,
file_path: str | Omit = omit,
name: str | Omit = omit,
@@ -498,6 +508,13 @@ async def create(
container_registry_basic_auth_host: secret will be mounted as a docker config in the environment VM, mount will have
the docker registry host
+ credential_proxy: credential_proxy configures transparent credential injection when environments
+ materialize this secret. When set, the credential proxy intercepts HTTPS traffic
+ to the target hosts and replaces the dummy mounted value with the real value in
+ the specified HTTP header. The real secret value is never exposed in the
+ environment. This field is orthogonal to mount — a secret can be both mounted
+ and proxied at the same time.
+
environment_variable: secret will be created as an Environment Variable with the same name as the
secret
@@ -528,6 +545,7 @@ async def create(
{
"api_only": api_only,
"container_registry_basic_auth_host": container_registry_basic_auth_host,
+ "credential_proxy": credential_proxy,
"environment_variable": environment_variable,
"file_path": file_path,
"name": name,
diff --git a/src/gitpod/types/agent_execution.py b/src/gitpod/types/agent_execution.py
index 07f02eda..206c3faf 100644
--- a/src/gitpod/types/agent_execution.py
+++ b/src/gitpod/types/agent_execution.py
@@ -21,6 +21,7 @@
"StatusCurrentOperation",
"StatusCurrentOperationLlm",
"StatusCurrentOperationToolUse",
+ "StatusGoal",
"StatusMcpIntegrationStatus",
"StatusOutputs",
"StatusUsedEnvironment",
@@ -308,6 +309,30 @@ class StatusCurrentOperation(BaseModel):
tool_use: Optional[StatusCurrentOperationToolUse] = FieldInfo(alias="toolUse", default=None)
+class StatusGoal(BaseModel):
+ """goal projects the current native Codex thread goal, if any."""
+
+ objective: Optional[str] = None
+ """
+ objective is the current goal text tracked by the native Codex thread-goal
+ subsystem.
+ """
+
+ status: Optional[
+ Literal[
+ "GOAL_STATUS_UNSPECIFIED",
+ "GOAL_STATUS_ACTIVE",
+ "GOAL_STATUS_PAUSED",
+ "GOAL_STATUS_COMPLETED",
+ "GOAL_STATUS_BUDGET_EXHAUSTED",
+ ]
+ ] = None
+ """status is the lifecycle state of the current goal."""
+
+ updated_at: Optional[datetime] = FieldInfo(alias="updatedAt", default=None)
+ """updated_at is the most recent native goal update timestamp, when available."""
+
+
class StatusMcpIntegrationStatus(BaseModel):
"""
MCPIntegrationStatus represents the status of a single MCP integration
@@ -393,6 +418,9 @@ class Status(BaseModel):
] = FieldInfo(alias="failureReason", default=None)
"""failure_reason contains a structured reason code for the failure."""
+ goal: Optional[StatusGoal] = None
+ """goal projects the current native Codex thread goal, if any."""
+
input_tokens_used: Optional[str] = FieldInfo(alias="inputTokensUsed", default=None)
iterations: Optional[str] = None
@@ -455,11 +483,13 @@ class Status(BaseModel):
"SUPPORTED_MODEL_OPUS_4_5_EXTENDED",
"SUPPORTED_MODEL_OPUS_4_6",
"SUPPORTED_MODEL_OPUS_4_6_EXTENDED",
+ "SUPPORTED_MODEL_OPUS_4_7",
"SUPPORTED_MODEL_HAIKU_4_5",
"SUPPORTED_MODEL_OPENAI_4O",
"SUPPORTED_MODEL_OPENAI_4O_MINI",
"SUPPORTED_MODEL_OPENAI_O1",
"SUPPORTED_MODEL_OPENAI_O1_MINI",
+ "SUPPORTED_MODEL_OPENAI_AUTO",
]
] = FieldInfo(alias="supportedModel", default=None)
"""supported_model is the LLM model being used by the agent execution."""
diff --git a/src/gitpod/types/environment_spec.py b/src/gitpod/types/environment_spec.py
index 798637b0..3118aa0e 100644
--- a/src/gitpod/types/environment_spec.py
+++ b/src/gitpod/types/environment_spec.py
@@ -144,12 +144,6 @@ class SecretCredentialProxy(BaseModel):
as a git credential) and proxied at the same time.
"""
- format: Optional[Literal["FORMAT_UNSPECIFIED", "FORMAT_PLAIN", "FORMAT_BASE64"]] = None
- """format describes how the secret value is encoded.
-
- The proxy uses this to decode the value before injecting it into the header.
- """
-
header: Optional[str] = None
"""header is the HTTP header name to inject (e.g. "Authorization")."""
diff --git a/src/gitpod/types/environment_spec_param.py b/src/gitpod/types/environment_spec_param.py
index 783a9330..dfad7957 100644
--- a/src/gitpod/types/environment_spec_param.py
+++ b/src/gitpod/types/environment_spec_param.py
@@ -152,12 +152,6 @@ class SecretCredentialProxy(TypedDict, total=False):
as a git credential) and proxied at the same time.
"""
- format: Literal["FORMAT_UNSPECIFIED", "FORMAT_PLAIN", "FORMAT_BASE64"]
- """format describes how the secret value is encoded.
-
- The proxy uses this to decode the value before injecting it into the header.
- """
-
header: str
"""header is the HTTP header name to inject (e.g. "Authorization")."""
diff --git a/src/gitpod/types/environment_status.py b/src/gitpod/types/environment_status.py
index 45ad12ca..d5ed5c65 100644
--- a/src/gitpod/types/environment_status.py
+++ b/src/gitpod/types/environment_status.py
@@ -91,6 +91,12 @@ class ContentGitChangedFile(BaseModel):
] = FieldInfo(alias="changeType", default=None)
"""ChangeType is the type of change that happened to the file"""
+ old_path: Optional[str] = FieldInfo(alias="oldPath", default=None)
+ """
+ old_path is the previous path of the file before a rename or copy. Only set when
+ change_type is RENAMED or COPIED.
+ """
+
path: Optional[str] = None
"""path is the path of the file"""
diff --git a/src/gitpod/types/environments/automations/service_spec.py b/src/gitpod/types/environments/automations/service_spec.py
index 98b9b1f6..4f451cd8 100644
--- a/src/gitpod/types/environments/automations/service_spec.py
+++ b/src/gitpod/types/environments/automations/service_spec.py
@@ -62,6 +62,12 @@ class ServiceSpec(BaseModel):
env: Optional[List[EnvironmentVariableItem]] = None
"""env specifies environment variables for the service."""
+ readiness_timeout: Optional[str] = FieldInfo(alias="readinessTimeout", default=None)
+ """
+ readiness_timeout is the maximum duration a service may remain in the Starting
+ phase while readiness checks run. 0s disables the timeout.
+ """
+
runs_on: Optional[RunsOn] = FieldInfo(alias="runsOn", default=None)
"""runs_on specifies the environment the service should run on."""
diff --git a/src/gitpod/types/environments/automations/service_spec_param.py b/src/gitpod/types/environments/automations/service_spec_param.py
index 93bed6a8..dc93d720 100644
--- a/src/gitpod/types/environments/automations/service_spec_param.py
+++ b/src/gitpod/types/environments/automations/service_spec_param.py
@@ -63,6 +63,12 @@ class ServiceSpecParam(TypedDict, total=False):
env: Iterable[EnvironmentVariableItem]
"""env specifies environment variables for the service."""
+ readiness_timeout: Annotated[str, PropertyInfo(alias="readinessTimeout")]
+ """
+ readiness_timeout is the maximum duration a service may remain in the Starting
+ phase while readiness checks run. 0s disables the timeout.
+ """
+
runs_on: Annotated[RunsOn, PropertyInfo(alias="runsOn")]
"""runs_on specifies the environment the service should run on."""
diff --git a/src/gitpod/types/environments/automations/service_update_params.py b/src/gitpod/types/environments/automations/service_update_params.py
index 7ec0e436..bbcef01e 100644
--- a/src/gitpod/types/environments/automations/service_update_params.py
+++ b/src/gitpod/types/environments/automations/service_update_params.py
@@ -69,6 +69,67 @@ class Spec(TypedDict, total=False):
env: Iterable[EnvironmentVariableItem]
+ readiness_timeout: Annotated[str, PropertyInfo(alias="readinessTimeout")]
+ """
+ A Duration represents a signed, fixed-length span of time represented as a count
+ of seconds and fractions of seconds at nanosecond resolution. It is independent
+ of any calendar and concepts like "day" or "month". It is related to Timestamp
+ in that the difference between two Timestamp values is a Duration and it can be
+ added or subtracted from a Timestamp. Range is approximately +-10,000 years.
+
+ # Examples
+
+ Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+ Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+ Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+ # JSON Mapping
+
+ In JSON format, the Duration type is encoded as a string rather than an object,
+ where the string ends in the suffix "s" (indicating seconds) and is preceded by
+ the number of seconds, with nanoseconds expressed as fractional seconds. For
+ example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "3s",
+ while 3 seconds and 1 nanosecond should be expressed in JSON format as
+ "3.000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
+ format as "3.000001s".
+ """
+
runs_on: Annotated[Optional[RunsOn], PropertyInfo(alias="runsOn")]
diff --git a/src/gitpod/types/environments/automations_file_param.py b/src/gitpod/types/environments/automations_file_param.py
index 226782ce..71161a2e 100644
--- a/src/gitpod/types/environments/automations_file_param.py
+++ b/src/gitpod/types/environments/automations_file_param.py
@@ -49,12 +49,74 @@ class Services(TypedDict, total=False):
name: str
+ readiness_timeout: Annotated[str, PropertyInfo(alias="readinessTimeout")]
+ """
+ A Duration represents a signed, fixed-length span of time represented as a count
+ of seconds and fractions of seconds at nanosecond resolution. It is independent
+ of any calendar and concepts like "day" or "month". It is related to Timestamp
+ in that the difference between two Timestamp values is a Duration and it can be
+ added or subtracted from a Timestamp. Range is approximately +-10,000 years.
+
+ # Examples
+
+ Example 1: Compute Duration from two Timestamps in pseudo code.
+
+ Timestamp start = ...;
+ Timestamp end = ...;
+ Duration duration = ...;
+
+ duration.seconds = end.seconds - start.seconds;
+ duration.nanos = end.nanos - start.nanos;
+
+ if (duration.seconds < 0 && duration.nanos > 0) {
+ duration.seconds += 1;
+ duration.nanos -= 1000000000;
+ } else if (duration.seconds > 0 && duration.nanos < 0) {
+ duration.seconds -= 1;
+ duration.nanos += 1000000000;
+ }
+
+ Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
+
+ Timestamp start = ...;
+ Duration duration = ...;
+ Timestamp end = ...;
+
+ end.seconds = start.seconds + duration.seconds;
+ end.nanos = start.nanos + duration.nanos;
+
+ if (end.nanos < 0) {
+ end.seconds -= 1;
+ end.nanos += 1000000000;
+ } else if (end.nanos >= 1000000000) {
+ end.seconds += 1;
+ end.nanos -= 1000000000;
+ }
+
+ Example 3: Compute Duration from datetime.timedelta in Python.
+
+ td = datetime.timedelta(days=3, minutes=10)
+ duration = Duration()
+ duration.FromTimedelta(td)
+
+ # JSON Mapping
+
+ In JSON format, the Duration type is encoded as a string rather than an object,
+ where the string ends in the suffix "s" (indicating seconds) and is preceded by
+ the number of seconds, with nanoseconds expressed as fractional seconds. For
+ example, 3 seconds with 0 nanoseconds should be encoded in JSON format as "3s",
+ while 3 seconds and 1 nanosecond should be expressed in JSON format as
+ "3.000000001s", and 3 seconds and 1 microsecond should be expressed in JSON
+ format as "3.000001s".
+ """
+
role: Literal["", "default", "editor", "ai-agent"]
runs_on: Annotated[RunsOn, PropertyInfo(alias="runsOn")]
triggered_by: Annotated[
- List[Literal["manual", "postEnvironmentStart", "postDevcontainerStart"]], PropertyInfo(alias="triggeredBy")
+ List[Literal["manual", "postEnvironmentStart", "postDevcontainerStart", "prebuild"]],
+ PropertyInfo(alias="triggeredBy"),
]
diff --git a/src/gitpod/types/organizations/__init__.py b/src/gitpod/types/organizations/__init__.py
index 6787a9a4..2dd4f126 100644
--- a/src/gitpod/types/organizations/__init__.py
+++ b/src/gitpod/types/organizations/__init__.py
@@ -27,6 +27,7 @@
from .policy_retrieve_response import PolicyRetrieveResponse as PolicyRetrieveResponse
from .domain_verification_state import DomainVerificationState as DomainVerificationState
from .invite_get_summary_params import InviteGetSummaryParams as InviteGetSummaryParams
+from .project_creation_defaults import ProjectCreationDefaults as ProjectCreationDefaults
from .conversation_sharing_policy import ConversationSharingPolicy as ConversationSharingPolicy
from .custom_domain_create_params import CustomDomainCreateParams as CustomDomainCreateParams
from .custom_domain_delete_params import CustomDomainDeleteParams as CustomDomainDeleteParams
diff --git a/src/gitpod/types/organizations/organization_policies.py b/src/gitpod/types/organizations/organization_policies.py
index 0816b40f..a988d81e 100644
--- a/src/gitpod/types/organizations/organization_policies.py
+++ b/src/gitpod/types/organizations/organization_policies.py
@@ -8,6 +8,7 @@
from .agent_policy import AgentPolicy
from .veto_exec_policy import VetoExecPolicy
from .security_agent_policy import SecurityAgentPolicy
+from .project_creation_defaults import ProjectCreationDefaults
__all__ = ["OrganizationPolicies", "EditorVersionRestrictions"]
@@ -132,6 +133,14 @@ class OrganizationPolicies(BaseModel):
```
"""
+ project_creation_defaults: Optional[ProjectCreationDefaults] = FieldInfo(
+ alias="projectCreationDefaults", default=None
+ )
+ """
+ project_creation_defaults contains default settings applied to newly created
+ projects.
+ """
+
security_agent_policy: Optional[SecurityAgentPolicy] = FieldInfo(alias="securityAgentPolicy", default=None)
"""
security_agent_policy contains security agent configuration for the
diff --git a/src/gitpod/types/organizations/policy_update_params.py b/src/gitpod/types/organizations/policy_update_params.py
index e0776802..3c1046c9 100644
--- a/src/gitpod/types/organizations/policy_update_params.py
+++ b/src/gitpod/types/organizations/policy_update_params.py
@@ -7,6 +7,7 @@
from ..._types import SequenceNotStr
from ..._utils import PropertyInfo
+from ..admission_level import AdmissionLevel
from .veto_exec_policy_param import VetoExecPolicyParam
from .conversation_sharing_policy import ConversationSharingPolicy
@@ -14,6 +15,7 @@
"PolicyUpdateParams",
"AgentPolicy",
"EditorVersionRestrictions",
+ "ProjectCreationDefaults",
"SecurityAgentPolicy",
"SecurityAgentPolicyCrowdstrike",
]
@@ -98,6 +100,14 @@ class PolicyUpdateParams(TypedDict, total=False):
per user
"""
+ max_port_admission_level: Annotated[Optional[AdmissionLevel], PropertyInfo(alias="maxPortAdmissionLevel")]
+ """
+ max_port_admission_level caps the maximum admission level a user-opened port may
+ use. UNSPECIFIED means no cap (any AdmissionLevel value is allowed). System
+ ports (VS Code Browser, agents) are exempt. The legacy port_sharing_disabled
+ field, when true, takes precedence and blocks all user-initiated port sharing.
+ """
+
members_create_projects: Annotated[Optional[bool], PropertyInfo(alias="membersCreateProjects")]
"""members_create_projects controls whether members can create projects"""
@@ -114,6 +124,14 @@ class PolicyUpdateParams(TypedDict, total=False):
from this policy.
"""
+ project_creation_defaults: Annotated[
+ Optional[ProjectCreationDefaults], PropertyInfo(alias="projectCreationDefaults")
+ ]
+ """
+ project_creation_defaults contains updates to default settings applied to newly
+ created projects.
+ """
+
require_custom_domain_access: Annotated[Optional[bool], PropertyInfo(alias="requireCustomDomainAccess")]
"""
require_custom_domain_access controls whether users must access via custom
@@ -186,6 +204,18 @@ class EditorVersionRestrictions(TypedDict, total=False):
"""
+class ProjectCreationDefaults(TypedDict, total=False):
+ """
+ project_creation_defaults contains updates to default settings applied to newly created projects.
+ """
+
+ insights_enabled: Annotated[Optional[bool], PropertyInfo(alias="insightsEnabled")]
+ """
+ insights_enabled controls whether Insights (co-author attribution) is
+ automatically enabled on newly created projects.
+ """
+
+
class SecurityAgentPolicyCrowdstrike(TypedDict, total=False):
"""crowdstrike contains CrowdStrike Falcon configuration updates"""
diff --git a/src/gitpod/types/organizations/project_creation_defaults.py b/src/gitpod/types/organizations/project_creation_defaults.py
new file mode 100644
index 00000000..96b610e0
--- /dev/null
+++ b/src/gitpod/types/organizations/project_creation_defaults.py
@@ -0,0 +1,21 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+
+from pydantic import Field as FieldInfo
+
+from ..._models import BaseModel
+
+__all__ = ["ProjectCreationDefaults"]
+
+
+class ProjectCreationDefaults(BaseModel):
+ """
+ ProjectCreationDefaults contains default settings applied to newly created projects.
+ """
+
+ insights_enabled: Optional[bool] = FieldInfo(alias="insightsEnabled", default=None)
+ """
+ insights_enabled controls whether Insights (co-author attribution) is
+ automatically enabled on newly created projects.
+ """
diff --git a/src/gitpod/types/organizations/scim_configuration.py b/src/gitpod/types/organizations/scim_configuration.py
index 8b12ff05..f3879104 100644
--- a/src/gitpod/types/organizations/scim_configuration.py
+++ b/src/gitpod/types/organizations/scim_configuration.py
@@ -30,6 +30,15 @@ class ScimConfiguration(BaseModel):
updated_at: datetime = FieldInfo(alias="updatedAt")
"""updated_at is when the SCIM configuration was last updated"""
+ allow_unverified_email_account_linking: Optional[bool] = FieldInfo(
+ alias="allowUnverifiedEmailAccountLinking", default=None
+ )
+ """
+ allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+ """
+
enabled: Optional[bool] = None
"""enabled indicates if SCIM provisioning is active"""
diff --git a/src/gitpod/types/organizations/scim_configuration_create_params.py b/src/gitpod/types/organizations/scim_configuration_create_params.py
index e8f5a2ee..225662ca 100644
--- a/src/gitpod/types/organizations/scim_configuration_create_params.py
+++ b/src/gitpod/types/organizations/scim_configuration_create_params.py
@@ -23,6 +23,15 @@ class ScimConfigurationCreateParams(TypedDict, total=False):
provisioning)
"""
+ allow_unverified_email_account_linking: Annotated[
+ Optional[bool], PropertyInfo(alias="allowUnverifiedEmailAccountLinking")
+ ]
+ """
+ allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+ """
+
name: Optional[str]
"""name is a human-readable name for the SCIM configuration"""
diff --git a/src/gitpod/types/organizations/scim_configuration_update_params.py b/src/gitpod/types/organizations/scim_configuration_update_params.py
index da919176..d0c0862f 100644
--- a/src/gitpod/types/organizations/scim_configuration_update_params.py
+++ b/src/gitpod/types/organizations/scim_configuration_update_params.py
@@ -14,6 +14,15 @@ class ScimConfigurationUpdateParams(TypedDict, total=False):
scim_configuration_id: Required[Annotated[str, PropertyInfo(alias="scimConfigurationId")]]
"""scim_configuration_id is the ID of the SCIM configuration to update"""
+ allow_unverified_email_account_linking: Annotated[
+ Optional[bool], PropertyInfo(alias="allowUnverifiedEmailAccountLinking")
+ ]
+ """
+ allow_unverified_email_account_linking allows SCIM to link provisioned users to
+ existing accounts when the identity provider does not mark the email address as
+ verified
+ """
+
enabled: Optional[bool]
"""enabled controls whether SCIM provisioning is active"""
diff --git a/src/gitpod/types/runner_capability.py b/src/gitpod/types/runner_capability.py
index c2e50b46..c2df28c5 100644
--- a/src/gitpod/types/runner_capability.py
+++ b/src/gitpod/types/runner_capability.py
@@ -18,4 +18,7 @@
"RUNNER_CAPABILITY_RUNNER_SIDE_AGENT",
"RUNNER_CAPABILITY_WARM_POOL",
"RUNNER_CAPABILITY_ASG_WARM_POOL",
+ "RUNNER_CAPABILITY_PORT_AUTHENTICATION",
+ "RUNNER_CAPABILITY_HORIZONTAL_SCALING",
+ "RUNNER_CAPABILITY_AGENT_EXECUTION_CNF",
]
diff --git a/src/gitpod/types/runner_list_scm_organizations_params.py b/src/gitpod/types/runner_list_scm_organizations_params.py
index bd1f788b..a852d57b 100644
--- a/src/gitpod/types/runner_list_scm_organizations_params.py
+++ b/src/gitpod/types/runner_list_scm_organizations_params.py
@@ -6,7 +6,7 @@
from .._utils import PropertyInfo
-__all__ = ["RunnerListScmOrganizationsParams"]
+__all__ = ["RunnerListScmOrganizationsParams", "Pagination"]
class RunnerListScmOrganizationsParams(TypedDict, total=False):
@@ -14,7 +14,43 @@ class RunnerListScmOrganizationsParams(TypedDict, total=False):
page_size: Annotated[int, PropertyInfo(alias="pageSize")]
+ pagination: Pagination
+ """Pagination parameters.
+
+ When unset, defaults to the standard PaginationRequest defaults (page_size 25,
+ max 100). Tokens are opaque and provider-specific.
+ """
+
+ query: str
+ """Optional substring filter applied to the organization name.
+
+ - GitLab: forwarded to the upstream `search` parameter (server-side,
+ case-insensitive substring on name/path).
+ - GitHub and Bitbucket: not implemented as they don't support searching Empty
+ value means no filter.
+ """
+
runner_id: Annotated[str, PropertyInfo(alias="runnerId")]
scm_host: Annotated[str, PropertyInfo(alias="scmHost")]
"""The SCM host to list organizations from (e.g., "github.com", "gitlab.com")"""
+
+
+class Pagination(TypedDict, total=False):
+ """Pagination parameters.
+
+ When unset, defaults to the standard PaginationRequest defaults
+ (page_size 25, max 100). Tokens are opaque and provider-specific.
+ """
+
+ token: str
+ """
+ Token for the next set of results that was returned as next_token of a
+ PaginationResponse
+ """
+
+ page_size: Annotated[int, PropertyInfo(alias="pageSize")]
+ """Page size is the maximum number of results to retrieve per page. Defaults to 25.
+
+ Maximum 100.
+ """
diff --git a/src/gitpod/types/runner_list_scm_organizations_response.py b/src/gitpod/types/runner_list_scm_organizations_response.py
index 8e9db077..8c8c50e8 100644
--- a/src/gitpod/types/runner_list_scm_organizations_response.py
+++ b/src/gitpod/types/runner_list_scm_organizations_response.py
@@ -1,19 +1,24 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import List, Optional
+from typing import Optional
from pydantic import Field as FieldInfo
from .._models import BaseModel
-__all__ = ["RunnerListScmOrganizationsResponse", "Organization"]
+__all__ = ["RunnerListScmOrganizationsResponse"]
-class Organization(BaseModel):
+class RunnerListScmOrganizationsResponse(BaseModel):
is_admin: Optional[bool] = FieldInfo(alias="isAdmin", default=None)
"""
- Whether the user has admin permissions in this organization. Admin permissions
- typically allow creating organization-level webhooks.
+ Deprecated: this field is unused by all known consumers and is scheduled for
+ removal in a future release. Do not read it.
+
+ Originally intended to gate organization-level webhook creation in the
+ dashboard, but that gating was never implemented. Populating this field on the
+ GitLab path requires a second fully-paginated ListGroups call, which is the main
+ reason we are deprecating it.
"""
name: Optional[str] = None
@@ -21,8 +26,3 @@ class Organization(BaseModel):
url: Optional[str] = None
"""Organization URL (e.g., "https://github.com/gitpod-io")"""
-
-
-class RunnerListScmOrganizationsResponse(BaseModel):
- organizations: Optional[List[Organization]] = None
- """List of organizations the user belongs to"""
diff --git a/src/gitpod/types/secret.py b/src/gitpod/types/secret.py
index 052d62a9..92a977c6 100644
--- a/src/gitpod/types/secret.py
+++ b/src/gitpod/types/secret.py
@@ -1,6 +1,6 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-from typing import Optional
+from typing import List, Optional
from datetime import datetime
from pydantic import Field as FieldInfo
@@ -9,7 +9,28 @@
from .secret_scope import SecretScope
from .shared.subject import Subject
-__all__ = ["Secret"]
+__all__ = ["Secret", "CredentialProxy"]
+
+
+class CredentialProxy(BaseModel):
+ """
+ credential_proxy configures transparent credential injection via the
+ credential proxy. When set, the credential proxy intercepts HTTPS
+ traffic to the target hosts and replaces the dummy mounted value with
+ the real value in the specified HTTP header. The real secret value is
+ never exposed in the environment.
+ This field is orthogonal to mount — a secret can be both mounted and
+ proxied at the same time.
+ """
+
+ header: Optional[str] = None
+ """header is the HTTP header name to inject (e.g. "Authorization")."""
+
+ target_hosts: Optional[List[str]] = FieldInfo(alias="targetHosts", default=None)
+ """
+ target_hosts lists the hostnames to intercept (for example "github.com" or
+ "\\**.github.com"). Wildcards are subdomain-only and do not match the apex domain.
+ """
class Secret(BaseModel):
@@ -116,6 +137,16 @@ class Secret(BaseModel):
creator: Optional[Subject] = None
"""creator is the identity of the creator of the secret"""
+ credential_proxy: Optional[CredentialProxy] = FieldInfo(alias="credentialProxy", default=None)
+ """
+ credential_proxy configures transparent credential injection via the credential
+ proxy. When set, the credential proxy intercepts HTTPS traffic to the target
+ hosts and replaces the dummy mounted value with the real value in the specified
+ HTTP header. The real secret value is never exposed in the environment. This
+ field is orthogonal to mount — a secret can be both mounted and proxied at the
+ same time.
+ """
+
environment_variable: Optional[bool] = FieldInfo(alias="environmentVariable", default=None)
"""
secret will be created as an Environment Variable with the same name as the
diff --git a/src/gitpod/types/secret_create_params.py b/src/gitpod/types/secret_create_params.py
index 99e545f3..a00660b1 100644
--- a/src/gitpod/types/secret_create_params.py
+++ b/src/gitpod/types/secret_create_params.py
@@ -4,10 +4,11 @@
from typing_extensions import Annotated, TypedDict
+from .._types import SequenceNotStr
from .._utils import PropertyInfo
from .secret_scope_param import SecretScopeParam
-__all__ = ["SecretCreateParams"]
+__all__ = ["SecretCreateParams", "CredentialProxy"]
class SecretCreateParams(TypedDict, total=False):
@@ -24,6 +25,16 @@ class SecretCreateParams(TypedDict, total=False):
the docker registry host
"""
+ credential_proxy: Annotated[CredentialProxy, PropertyInfo(alias="credentialProxy")]
+ """
+ credential_proxy configures transparent credential injection when environments
+ materialize this secret. When set, the credential proxy intercepts HTTPS traffic
+ to the target hosts and replaces the dummy mounted value with the real value in
+ the specified HTTP header. The real secret value is never exposed in the
+ environment. This field is orthogonal to mount — a secret can be both mounted
+ and proxied at the same time.
+ """
+
environment_variable: Annotated[bool, PropertyInfo(alias="environmentVariable")]
"""
secret will be created as an Environment Variable with the same name as the
@@ -52,3 +63,24 @@ class SecretCreateParams(TypedDict, total=False):
value: str
"""value is the plaintext value of the secret"""
+
+
+class CredentialProxy(TypedDict, total=False):
+ """
+ credential_proxy configures transparent credential injection when
+ environments materialize this secret. When set, the credential proxy
+ intercepts HTTPS traffic to the target hosts and replaces the dummy
+ mounted value with the real value in the specified HTTP header. The real
+ secret value is never exposed in the environment.
+ This field is orthogonal to mount — a secret can be both mounted and
+ proxied at the same time.
+ """
+
+ header: str
+ """header is the HTTP header name to inject (e.g. "Authorization")."""
+
+ target_hosts: Annotated[SequenceNotStr[str], PropertyInfo(alias="targetHosts")]
+ """
+ target_hosts lists the hostnames to intercept (for example "github.com" or
+ "\\**.github.com"). Wildcards are subdomain-only and do not match the apex domain.
+ """
diff --git a/src/gitpod/types/shared/automation_trigger.py b/src/gitpod/types/shared/automation_trigger.py
index 8ffd732b..3d1fafb8 100644
--- a/src/gitpod/types/shared/automation_trigger.py
+++ b/src/gitpod/types/shared/automation_trigger.py
@@ -20,7 +20,7 @@ class AutomationTrigger(BaseModel):
The `prebuild` field starts the automation during a prebuild of an environment. This phase does not have user secrets available.
The `before_snapshot` field triggers the automation after all prebuild tasks complete but before the snapshot is taken.
This is used for tasks that need to run last during prebuilds, such as IDE warmup.
- Note: The prebuild and before_snapshot triggers can only be used with tasks, not services.
+ Note: The before_snapshot trigger can only be used with tasks, not services.
"""
before_snapshot: Optional[bool] = FieldInfo(alias="beforeSnapshot", default=None)
diff --git a/src/gitpod/types/shared/resource_role.py b/src/gitpod/types/shared/resource_role.py
index 45759abb..1f0241d7 100644
--- a/src/gitpod/types/shared/resource_role.py
+++ b/src/gitpod/types/shared/resource_role.py
@@ -12,6 +12,7 @@
"RESOURCE_ROLE_ORG_PROJECTS_ADMIN",
"RESOURCE_ROLE_ORG_AUTOMATIONS_ADMIN",
"RESOURCE_ROLE_ORG_GROUPS_ADMIN",
+ "RESOURCE_ROLE_ORG_ENVIRONMENTS_READER",
"RESOURCE_ROLE_ORG_AUDIT_LOG_READER",
"RESOURCE_ROLE_GROUP_ADMIN",
"RESOURCE_ROLE_GROUP_VIEWER",
diff --git a/src/gitpod/types/shared/runs_on.py b/src/gitpod/types/shared/runs_on.py
index 5f26ef47..33cf1bd5 100644
--- a/src/gitpod/types/shared/runs_on.py
+++ b/src/gitpod/types/shared/runs_on.py
@@ -18,9 +18,3 @@ class RunsOn(BaseModel):
machine: Optional[object] = None
"""Machine runs the service/task directly on the VM/machine level."""
-
- terminal: Optional[object] = None
- """
- Terminal runs the service inside a managed PTY terminal in the devcontainer.
- Users can attach to the terminal interactively via the terminal API.
- """
diff --git a/src/gitpod/types/shared_params/automation_trigger.py b/src/gitpod/types/shared_params/automation_trigger.py
index 27dc462b..1020f005 100644
--- a/src/gitpod/types/shared_params/automation_trigger.py
+++ b/src/gitpod/types/shared_params/automation_trigger.py
@@ -20,7 +20,7 @@ class AutomationTrigger(TypedDict, total=False):
The `prebuild` field starts the automation during a prebuild of an environment. This phase does not have user secrets available.
The `before_snapshot` field triggers the automation after all prebuild tasks complete but before the snapshot is taken.
This is used for tasks that need to run last during prebuilds, such as IDE warmup.
- Note: The prebuild and before_snapshot triggers can only be used with tasks, not services.
+ Note: The before_snapshot trigger can only be used with tasks, not services.
"""
before_snapshot: Annotated[bool, PropertyInfo(alias="beforeSnapshot")]
diff --git a/src/gitpod/types/shared_params/resource_role.py b/src/gitpod/types/shared_params/resource_role.py
index 94f5abcc..71f2a545 100644
--- a/src/gitpod/types/shared_params/resource_role.py
+++ b/src/gitpod/types/shared_params/resource_role.py
@@ -14,6 +14,7 @@
"RESOURCE_ROLE_ORG_PROJECTS_ADMIN",
"RESOURCE_ROLE_ORG_AUTOMATIONS_ADMIN",
"RESOURCE_ROLE_ORG_GROUPS_ADMIN",
+ "RESOURCE_ROLE_ORG_ENVIRONMENTS_READER",
"RESOURCE_ROLE_ORG_AUDIT_LOG_READER",
"RESOURCE_ROLE_GROUP_ADMIN",
"RESOURCE_ROLE_GROUP_VIEWER",
diff --git a/src/gitpod/types/shared_params/runs_on.py b/src/gitpod/types/shared_params/runs_on.py
index 0182710a..8fc0fd3c 100644
--- a/src/gitpod/types/shared_params/runs_on.py
+++ b/src/gitpod/types/shared_params/runs_on.py
@@ -20,9 +20,3 @@ class RunsOn(TypedDict, total=False):
machine: object
"""Machine runs the service/task directly on the VM/machine level."""
-
- terminal: object
- """
- Terminal runs the service inside a managed PTY terminal in the devcontainer.
- Users can attach to the terminal interactively via the terminal API.
- """
diff --git a/src/gitpod/types/workflow_trigger.py b/src/gitpod/types/workflow_trigger.py
index 4b044761..8d7b81ec 100644
--- a/src/gitpod/types/workflow_trigger.py
+++ b/src/gitpod/types/workflow_trigger.py
@@ -27,10 +27,18 @@ class PullRequest(BaseModel):
"PULL_REQUEST_EVENT_MERGED",
"PULL_REQUEST_EVENT_CLOSED",
"PULL_REQUEST_EVENT_READY_FOR_REVIEW",
+ "PULL_REQUEST_EVENT_REVIEW_REQUESTED",
]
]
] = None
+ integration_id: Optional[str] = FieldInfo(alias="integrationId", default=None)
+ """
+ integration_id is the optional ID of an integration that acts as the source of
+ webhook events. When set, the trigger will be activated when the webhook
+ receives events.
+ """
+
webhook_id: Optional[str] = FieldInfo(alias="webhookId", default=None)
"""
webhook_id is the optional ID of a webhook that this trigger is bound to. When
@@ -84,6 +92,8 @@ class WorkflowTrigger(BaseModel):
- Manual: Can use any context type
- Time: Typically uses Projects or Repositories context
- PullRequest: Can use any context, FromTrigger uses PR repository context
+ - Incident: Typically uses Projects or Repositories context (no inherent repo
+ context)
"""
manual: Optional[object] = None
diff --git a/src/gitpod/types/workflow_trigger_context.py b/src/gitpod/types/workflow_trigger_context.py
index 6d82400e..769acdb0 100644
--- a/src/gitpod/types/workflow_trigger_context.py
+++ b/src/gitpod/types/workflow_trigger_context.py
@@ -103,6 +103,7 @@ class WorkflowTriggerContext(BaseModel):
- Manual: Can use any context type
- Time: Typically uses Projects or Repositories context
- PullRequest: Can use any context, FromTrigger uses PR repository context
+ - Incident: Typically uses Projects or Repositories context (no inherent repo context)
"""
agent: Optional[Agent] = None
diff --git a/src/gitpod/types/workflow_trigger_context_param.py b/src/gitpod/types/workflow_trigger_context_param.py
index 4ebb2e49..1522f619 100644
--- a/src/gitpod/types/workflow_trigger_context_param.py
+++ b/src/gitpod/types/workflow_trigger_context_param.py
@@ -104,6 +104,7 @@ class WorkflowTriggerContextParam(TypedDict, total=False):
- Manual: Can use any context type
- Time: Typically uses Projects or Repositories context
- PullRequest: Can use any context, FromTrigger uses PR repository context
+ - Incident: Typically uses Projects or Repositories context (no inherent repo context)
"""
agent: Agent
diff --git a/src/gitpod/types/workflow_trigger_param.py b/src/gitpod/types/workflow_trigger_param.py
index ed28c9f8..0753a773 100644
--- a/src/gitpod/types/workflow_trigger_param.py
+++ b/src/gitpod/types/workflow_trigger_param.py
@@ -26,9 +26,17 @@ class PullRequest(TypedDict, total=False):
"PULL_REQUEST_EVENT_MERGED",
"PULL_REQUEST_EVENT_CLOSED",
"PULL_REQUEST_EVENT_READY_FOR_REVIEW",
+ "PULL_REQUEST_EVENT_REVIEW_REQUESTED",
]
]
+ integration_id: Annotated[Optional[str], PropertyInfo(alias="integrationId")]
+ """
+ integration_id is the optional ID of an integration that acts as the source of
+ webhook events. When set, the trigger will be activated when the webhook
+ receives events.
+ """
+
webhook_id: Annotated[Optional[str], PropertyInfo(alias="webhookId")]
"""
webhook_id is the optional ID of a webhook that this trigger is bound to. When
@@ -82,6 +90,8 @@ class WorkflowTriggerParam(TypedDict, total=False):
- Manual: Can use any context type
- Time: Typically uses Projects or Repositories context
- PullRequest: Can use any context, FromTrigger uses PR repository context
+ - Incident: Typically uses Projects or Repositories context (no inherent repo
+ context)
"""
manual: object
diff --git a/tests/api_resources/environments/automations/test_services.py b/tests/api_resources/environments/automations/test_services.py
index 3a80cc01..daa24e8d 100644
--- a/tests/api_resources/environments/automations/test_services.py
+++ b/tests/api_resources/environments/automations/test_services.py
@@ -69,13 +69,13 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
"value_from": {"secret_ref": {"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"}},
}
],
+ "readiness_timeout": "+9125115.360s",
"runs_on": {
"docker": {
"environment": ["string"],
"image": "x",
},
"machine": {},
- "terminal": {},
},
"session": "session",
"spec_version": "specVersion",
@@ -182,13 +182,13 @@ def test_method_update_with_all_params(self, client: Gitpod) -> None:
"value_from": {"secret_ref": {"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"}},
}
],
+ "readiness_timeout": "+9125115.360s",
"runs_on": {
"docker": {
"environment": ["string"],
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
status={
@@ -431,13 +431,13 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
"value_from": {"secret_ref": {"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"}},
}
],
+ "readiness_timeout": "+9125115.360s",
"runs_on": {
"docker": {
"environment": ["string"],
"image": "x",
},
"machine": {},
- "terminal": {},
},
"session": "session",
"spec_version": "specVersion",
@@ -544,13 +544,13 @@ async def test_method_update_with_all_params(self, async_client: AsyncGitpod) ->
"value_from": {"secret_ref": {"id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e"}},
}
],
+ "readiness_timeout": "+9125115.360s",
"runs_on": {
"docker": {
"environment": ["string"],
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
status={
diff --git a/tests/api_resources/environments/automations/test_tasks.py b/tests/api_resources/environments/automations/test_tasks.py
index 29599ec1..242dc25e 100644
--- a/tests/api_resources/environments/automations/test_tasks.py
+++ b/tests/api_resources/environments/automations/test_tasks.py
@@ -71,7 +71,6 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
)
@@ -178,7 +177,6 @@ def test_method_update_with_all_params(self, client: Gitpod) -> None:
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
)
@@ -377,7 +375,6 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
)
@@ -484,7 +481,6 @@ async def test_method_update_with_all_params(self, async_client: AsyncGitpod) ->
"image": "x",
},
"machine": {},
- "terminal": {},
},
},
)
diff --git a/tests/api_resources/environments/test_automations.py b/tests/api_resources/environments/test_automations.py
index bed1aa5a..0f1eb63c 100644
--- a/tests/api_resources/environments/test_automations.py
+++ b/tests/api_resources/environments/test_automations.py
@@ -37,6 +37,7 @@ def test_method_upsert_with_all_params(self, client: Gitpod) -> None:
},
"description": "Development web server",
"name": "Web Server",
+ "readiness_timeout": "+9125115.360s",
"role": "",
"runs_on": {
"docker": {
@@ -44,7 +45,6 @@ def test_method_upsert_with_all_params(self, client: Gitpod) -> None:
"image": "x",
},
"machine": {},
- "terminal": {},
},
"triggered_by": ["postDevcontainerStart"],
}
@@ -61,7 +61,6 @@ def test_method_upsert_with_all_params(self, client: Gitpod) -> None:
"image": "x",
},
"machine": {},
- "terminal": {},
},
"triggered_by": ["postEnvironmentStart"],
}
@@ -119,6 +118,7 @@ async def test_method_upsert_with_all_params(self, async_client: AsyncGitpod) ->
},
"description": "Development web server",
"name": "Web Server",
+ "readiness_timeout": "+9125115.360s",
"role": "",
"runs_on": {
"docker": {
@@ -126,7 +126,6 @@ async def test_method_upsert_with_all_params(self, async_client: AsyncGitpod) ->
"image": "x",
},
"machine": {},
- "terminal": {},
},
"triggered_by": ["postDevcontainerStart"],
}
@@ -143,7 +142,6 @@ async def test_method_upsert_with_all_params(self, async_client: AsyncGitpod) ->
"image": "x",
},
"machine": {},
- "terminal": {},
},
"triggered_by": ["postEnvironmentStart"],
}
diff --git a/tests/api_resources/organizations/test_policies.py b/tests/api_resources/organizations/test_policies.py
index 56814db9..74257ac2 100644
--- a/tests/api_resources/organizations/test_policies.py
+++ b/tests/api_resources/organizations/test_policies.py
@@ -84,9 +84,11 @@ def test_method_update_with_all_params(self, client: Gitpod) -> None:
maximum_environments_per_user="20",
maximum_environment_timeout="3600s",
maximum_running_environments_per_user="5",
+ max_port_admission_level="ADMISSION_LEVEL_UNSPECIFIED",
members_create_projects=True,
members_require_projects=True,
port_sharing_disabled=True,
+ project_creation_defaults={"insights_enabled": True},
require_custom_domain_access=True,
restrict_account_creation_to_scim=True,
security_agent_policy={
@@ -203,9 +205,11 @@ async def test_method_update_with_all_params(self, async_client: AsyncGitpod) ->
maximum_environments_per_user="20",
maximum_environment_timeout="3600s",
maximum_running_environments_per_user="5",
+ max_port_admission_level="ADMISSION_LEVEL_UNSPECIFIED",
members_create_projects=True,
members_require_projects=True,
port_sharing_disabled=True,
+ project_creation_defaults={"insights_enabled": True},
require_custom_domain_access=True,
restrict_account_creation_to_scim=True,
security_agent_policy={
diff --git a/tests/api_resources/organizations/test_scim_configurations.py b/tests/api_resources/organizations/test_scim_configurations.py
index 2bd96136..03411ef3 100644
--- a/tests/api_resources/organizations/test_scim_configurations.py
+++ b/tests/api_resources/organizations/test_scim_configurations.py
@@ -39,6 +39,7 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
scim_configuration = client.organizations.scim_configurations.create(
organization_id="b0e12f6c-4c67-429d-a4a6-d9838b5da047",
sso_configuration_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
+ allow_unverified_email_account_linking=True,
name="name",
token_expires_in="+9125115.360s",
)
@@ -119,6 +120,7 @@ def test_method_update(self, client: Gitpod) -> None:
def test_method_update_with_all_params(self, client: Gitpod) -> None:
scim_configuration = client.organizations.scim_configurations.update(
scim_configuration_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
+ allow_unverified_email_account_linking=True,
enabled=False,
name="name",
sso_configuration_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
@@ -290,6 +292,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
scim_configuration = await async_client.organizations.scim_configurations.create(
organization_id="b0e12f6c-4c67-429d-a4a6-d9838b5da047",
sso_configuration_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
+ allow_unverified_email_account_linking=True,
name="name",
token_expires_in="+9125115.360s",
)
@@ -370,6 +373,7 @@ async def test_method_update(self, async_client: AsyncGitpod) -> None:
async def test_method_update_with_all_params(self, async_client: AsyncGitpod) -> None:
scim_configuration = await async_client.organizations.scim_configurations.update(
scim_configuration_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
+ allow_unverified_email_account_linking=True,
enabled=False,
name="name",
sso_configuration_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
diff --git a/tests/api_resources/test_automations.py b/tests/api_resources/test_automations.py
index f1412921..943c0d0a 100644
--- a/tests/api_resources/test_automations.py
+++ b/tests/api_resources/test_automations.py
@@ -113,6 +113,7 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
"manual": {},
"pull_request": {
"events": ["PULL_REQUEST_EVENT_UNSPECIFIED"],
+ "integration_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"webhook_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
},
"time": {"cron_expression": "cronExpression"},
@@ -256,6 +257,7 @@ def test_method_update_with_all_params(self, client: Gitpod) -> None:
"manual": {},
"pull_request": {
"events": ["PULL_REQUEST_EVENT_UNSPECIFIED"],
+ "integration_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"webhook_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
},
"time": {"cron_expression": "cronExpression"},
@@ -793,6 +795,7 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
"manual": {},
"pull_request": {
"events": ["PULL_REQUEST_EVENT_UNSPECIFIED"],
+ "integration_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"webhook_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
},
"time": {"cron_expression": "cronExpression"},
@@ -936,6 +939,7 @@ async def test_method_update_with_all_params(self, async_client: AsyncGitpod) ->
"manual": {},
"pull_request": {
"events": ["PULL_REQUEST_EVENT_UNSPECIFIED"],
+ "integration_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
"webhook_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
},
"time": {"cron_expression": "cronExpression"},
diff --git a/tests/api_resources/test_environments.py b/tests/api_resources/test_environments.py
index 0b44c1c4..969a5688 100644
--- a/tests/api_resources/test_environments.py
+++ b/tests/api_resources/test_environments.py
@@ -108,7 +108,6 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
"api_only": True,
"container_registry_basic_auth_host": "containerRegistryBasicAuthHost",
"credential_proxy": {
- "format": "FORMAT_UNSPECIFIED",
"header": "header",
"target_hosts": ["string"],
},
@@ -487,7 +486,6 @@ def test_method_create_from_project_with_all_params(self, client: Gitpod) -> Non
"api_only": True,
"container_registry_basic_auth_host": "containerRegistryBasicAuthHost",
"credential_proxy": {
- "format": "FORMAT_UNSPECIFIED",
"header": "header",
"target_hosts": ["string"],
},
@@ -808,7 +806,6 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
"api_only": True,
"container_registry_basic_auth_host": "containerRegistryBasicAuthHost",
"credential_proxy": {
- "format": "FORMAT_UNSPECIFIED",
"header": "header",
"target_hosts": ["string"],
},
@@ -1187,7 +1184,6 @@ async def test_method_create_from_project_with_all_params(self, async_client: As
"api_only": True,
"container_registry_basic_auth_host": "containerRegistryBasicAuthHost",
"credential_proxy": {
- "format": "FORMAT_UNSPECIFIED",
"header": "header",
"target_hosts": ["string"],
},
diff --git a/tests/api_resources/test_runners.py b/tests/api_resources/test_runners.py
index 3a7c4506..dad788c8 100644
--- a/tests/api_resources/test_runners.py
+++ b/tests/api_resources/test_runners.py
@@ -21,7 +21,7 @@
RunnerCheckRepositoryAccessResponse,
RunnerCheckAuthenticationForHostResponse,
)
-from gitpod.pagination import SyncRunnersPage, AsyncRunnersPage
+from gitpod.pagination import SyncRunnersPage, AsyncRunnersPage, SyncOrganizationsPage, AsyncOrganizationsPage
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -416,7 +416,7 @@ def test_streaming_response_create_runner_token(self, client: Gitpod) -> None:
@parametrize
def test_method_list_scm_organizations(self, client: Gitpod) -> None:
runner = client.runners.list_scm_organizations()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(SyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -424,10 +424,15 @@ def test_method_list_scm_organizations_with_all_params(self, client: Gitpod) ->
runner = client.runners.list_scm_organizations(
token="token",
page_size=0,
+ pagination={
+ "token": "token",
+ "page_size": 100,
+ },
+ query="query",
runner_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
scm_host="github.com",
)
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(SyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -437,7 +442,7 @@ def test_raw_response_list_scm_organizations(self, client: Gitpod) -> None:
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
runner = response.parse()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(SyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -447,7 +452,7 @@ def test_streaming_response_list_scm_organizations(self, client: Gitpod) -> None
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
runner = response.parse()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(SyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
assert cast(Any, response.is_closed) is True
@@ -925,7 +930,7 @@ async def test_streaming_response_create_runner_token(self, async_client: AsyncG
@parametrize
async def test_method_list_scm_organizations(self, async_client: AsyncGitpod) -> None:
runner = await async_client.runners.list_scm_organizations()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(AsyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -933,10 +938,15 @@ async def test_method_list_scm_organizations_with_all_params(self, async_client:
runner = await async_client.runners.list_scm_organizations(
token="token",
page_size=0,
+ pagination={
+ "token": "token",
+ "page_size": 100,
+ },
+ query="query",
runner_id="d2c94c27-3b76-4a42-b88c-95a85e392c68",
scm_host="github.com",
)
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(AsyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -946,7 +956,7 @@ async def test_raw_response_list_scm_organizations(self, async_client: AsyncGitp
assert response.is_closed is True
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
runner = await response.parse()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(AsyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
@pytest.mark.skip(reason="Mock server tests are disabled")
@parametrize
@@ -956,7 +966,7 @@ async def test_streaming_response_list_scm_organizations(self, async_client: Asy
assert response.http_request.headers.get("X-Stainless-Lang") == "python"
runner = await response.parse()
- assert_matches_type(RunnerListScmOrganizationsResponse, runner, path=["response"])
+ assert_matches_type(AsyncOrganizationsPage[RunnerListScmOrganizationsResponse], runner, path=["response"])
assert cast(Any, response.is_closed) is True
diff --git a/tests/api_resources/test_secrets.py b/tests/api_resources/test_secrets.py
index cbed1cda..694d69ab 100644
--- a/tests/api_resources/test_secrets.py
+++ b/tests/api_resources/test_secrets.py
@@ -34,6 +34,10 @@ def test_method_create_with_all_params(self, client: Gitpod) -> None:
secret = client.secrets.create(
api_only=True,
container_registry_basic_auth_host="containerRegistryBasicAuthHost",
+ credential_proxy={
+ "header": "header",
+ "target_hosts": ["string"],
+ },
environment_variable=True,
file_path="filePath",
name="DATABASE_URL",
@@ -247,6 +251,10 @@ async def test_method_create_with_all_params(self, async_client: AsyncGitpod) ->
secret = await async_client.secrets.create(
api_only=True,
container_registry_basic_auth_host="containerRegistryBasicAuthHost",
+ credential_proxy={
+ "header": "header",
+ "target_hosts": ["string"],
+ },
environment_variable=True,
file_path="filePath",
name="DATABASE_URL",
diff --git a/tests/test_client.py b/tests/test_client.py
index 92b0eea3..c105b4d9 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -438,6 +438,30 @@ def test_default_query_option(self) -> None:
client.close()
+ def test_hardcoded_query_params_in_url(self, client: Gitpod) -> None:
+ request = client._build_request(FinalRequestOptions(method="get", url="/foo?beta=true"))
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"beta": "true"}
+
+ request = client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/foo?beta=true",
+ params={"limit": "10", "page": "abc"},
+ )
+ )
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"beta": "true", "limit": "10", "page": "abc"}
+
+ request = client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/files/a%2Fb?beta=true",
+ params={"limit": "10"},
+ )
+ )
+ assert request.url.raw_path == b"/files/a%2Fb?beta=true&limit=10"
+
def test_request_extra_json(self, client: Gitpod) -> None:
request = client._build_request(
FinalRequestOptions(
@@ -1363,6 +1387,30 @@ async def test_default_query_option(self) -> None:
await client.close()
+ async def test_hardcoded_query_params_in_url(self, async_client: AsyncGitpod) -> None:
+ request = async_client._build_request(FinalRequestOptions(method="get", url="/foo?beta=true"))
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"beta": "true"}
+
+ request = async_client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/foo?beta=true",
+ params={"limit": "10", "page": "abc"},
+ )
+ )
+ url = httpx.URL(request.url)
+ assert dict(url.params) == {"beta": "true", "limit": "10", "page": "abc"}
+
+ request = async_client._build_request(
+ FinalRequestOptions(
+ method="get",
+ url="/files/a%2Fb?beta=true",
+ params={"limit": "10"},
+ )
+ )
+ assert request.url.raw_path == b"/files/a%2Fb?beta=true&limit=10"
+
def test_request_extra_json(self, client: Gitpod) -> None:
request = client._build_request(
FinalRequestOptions(
diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py
deleted file mode 100644
index c498f531..00000000
--- a/tests/test_deepcopy.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from gitpod._utils import deepcopy_minimal
-
-
-def assert_different_identities(obj1: object, obj2: object) -> None:
- assert obj1 == obj2
- assert id(obj1) != id(obj2)
-
-
-def test_simple_dict() -> None:
- obj1 = {"foo": "bar"}
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
-
-
-def test_nested_dict() -> None:
- obj1 = {"foo": {"bar": True}}
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
- assert_different_identities(obj1["foo"], obj2["foo"])
-
-
-def test_complex_nested_dict() -> None:
- obj1 = {"foo": {"bar": [{"hello": "world"}]}}
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
- assert_different_identities(obj1["foo"], obj2["foo"])
- assert_different_identities(obj1["foo"]["bar"], obj2["foo"]["bar"])
- assert_different_identities(obj1["foo"]["bar"][0], obj2["foo"]["bar"][0])
-
-
-def test_simple_list() -> None:
- obj1 = ["a", "b", "c"]
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
-
-
-def test_nested_list() -> None:
- obj1 = ["a", [1, 2, 3]]
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
- assert_different_identities(obj1[1], obj2[1])
-
-
-class MyObject: ...
-
-
-def test_ignores_other_types() -> None:
- # custom classes
- my_obj = MyObject()
- obj1 = {"foo": my_obj}
- obj2 = deepcopy_minimal(obj1)
- assert_different_identities(obj1, obj2)
- assert obj1["foo"] is my_obj
-
- # tuples
- obj3 = ("a", "b")
- obj4 = deepcopy_minimal(obj3)
- assert obj3 is obj4
diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py
index 0ca5a8dc..4a107218 100644
--- a/tests/test_extract_files.py
+++ b/tests/test_extract_files.py
@@ -4,7 +4,7 @@
import pytest
-from gitpod._types import FileTypes
+from gitpod._types import FileTypes, ArrayFormat
from gitpod._utils import extract_files
@@ -35,6 +35,12 @@ def test_multiple_files() -> None:
assert query == {"documents": [{}, {}]}
+def test_top_level_file_array() -> None:
+ query = {"files": [b"file one", b"file two"], "title": "hello"}
+ assert extract_files(query, paths=[["files", ""]]) == [("files[]", b"file one"), ("files[]", b"file two")]
+ assert query == {"title": "hello"}
+
+
@pytest.mark.parametrize(
"query,paths,expected",
[
@@ -62,3 +68,24 @@ def test_ignores_incorrect_paths(
expected: list[tuple[str, FileTypes]],
) -> None:
assert extract_files(query, paths=paths) == expected
+
+
+@pytest.mark.parametrize(
+ "array_format,expected_top_level,expected_nested",
+ [
+ ("brackets", [("files[]", b"a"), ("files[]", b"b")], [("items[][file]", b"a"), ("items[][file]", b"b")]),
+ ("repeat", [("files", b"a"), ("files", b"b")], [("items[file]", b"a"), ("items[file]", b"b")]),
+ ("comma", [("files", b"a"), ("files", b"b")], [("items[file]", b"a"), ("items[file]", b"b")]),
+ ("indices", [("files[0]", b"a"), ("files[1]", b"b")], [("items[0][file]", b"a"), ("items[1][file]", b"b")]),
+ ],
+)
+def test_array_format_controls_file_field_names(
+ array_format: ArrayFormat,
+ expected_top_level: list[tuple[str, FileTypes]],
+ expected_nested: list[tuple[str, FileTypes]],
+) -> None:
+ top_level = {"files": [b"a", b"b"]}
+ assert extract_files(top_level, paths=[["files", ""]], array_format=array_format) == expected_top_level
+
+ nested = {"items": [{"file": b"a"}, {"file": b"b"}]}
+ assert extract_files(nested, paths=[["items", "", "file"]], array_format=array_format) == expected_nested
diff --git a/tests/test_files.py b/tests/test_files.py
index efde0d4e..d8f636ba 100644
--- a/tests/test_files.py
+++ b/tests/test_files.py
@@ -4,7 +4,8 @@
import pytest
from dirty_equals import IsDict, IsList, IsBytes, IsTuple
-from gitpod._files import to_httpx_files, async_to_httpx_files
+from gitpod._files import to_httpx_files, deepcopy_with_paths, async_to_httpx_files
+from gitpod._utils import extract_files
readme_path = Path(__file__).parent.parent.joinpath("README.md")
@@ -49,3 +50,99 @@ def test_string_not_allowed() -> None:
"file": "foo", # type: ignore
}
)
+
+
+def assert_different_identities(obj1: object, obj2: object) -> None:
+ assert obj1 == obj2
+ assert obj1 is not obj2
+
+
+class TestDeepcopyWithPaths:
+ def test_copies_top_level_dict(self) -> None:
+ original = {"file": b"data", "other": "value"}
+ result = deepcopy_with_paths(original, [["file"]])
+ assert_different_identities(result, original)
+
+ def test_file_value_is_same_reference(self) -> None:
+ file_bytes = b"contents"
+ original = {"file": file_bytes}
+ result = deepcopy_with_paths(original, [["file"]])
+ assert_different_identities(result, original)
+ assert result["file"] is file_bytes
+
+ def test_list_popped_wholesale(self) -> None:
+ files = [b"f1", b"f2"]
+ original = {"files": files, "title": "t"}
+ result = deepcopy_with_paths(original, [["files", ""]])
+ assert_different_identities(result, original)
+ result_files = result["files"]
+ assert isinstance(result_files, list)
+ assert_different_identities(result_files, files)
+
+ def test_nested_array_path_copies_list_and_elements(self) -> None:
+ elem1 = {"file": b"f1", "extra": 1}
+ elem2 = {"file": b"f2", "extra": 2}
+ original = {"items": [elem1, elem2]}
+ result = deepcopy_with_paths(original, [["items", "", "file"]])
+ assert_different_identities(result, original)
+ result_items = result["items"]
+ assert isinstance(result_items, list)
+ assert_different_identities(result_items, original["items"])
+ assert_different_identities(result_items[0], elem1)
+ assert_different_identities(result_items[1], elem2)
+
+ def test_empty_paths_returns_same_object(self) -> None:
+ original = {"foo": "bar"}
+ result = deepcopy_with_paths(original, [])
+ assert result is original
+
+ def test_multiple_paths(self) -> None:
+ f1 = b"file1"
+ f2 = b"file2"
+ original = {"a": f1, "b": f2, "c": "unchanged"}
+ result = deepcopy_with_paths(original, [["a"], ["b"]])
+ assert_different_identities(result, original)
+ assert result["a"] is f1
+ assert result["b"] is f2
+ assert result["c"] is original["c"]
+
+ def test_extract_files_does_not_mutate_original_top_level(self) -> None:
+ file_bytes = b"contents"
+ original = {"file": file_bytes, "other": "value"}
+
+ copied = deepcopy_with_paths(original, [["file"]])
+ extracted = extract_files(copied, paths=[["file"]])
+
+ assert extracted == [("file", file_bytes)]
+ assert original == {"file": file_bytes, "other": "value"}
+ assert copied == {"other": "value"}
+
+ def test_extract_files_does_not_mutate_original_nested_array_path(self) -> None:
+ file1 = b"f1"
+ file2 = b"f2"
+ original = {
+ "items": [
+ {"file": file1, "extra": 1},
+ {"file": file2, "extra": 2},
+ ],
+ "title": "example",
+ }
+
+ copied = deepcopy_with_paths(original, [["items", "", "file"]])
+ extracted = extract_files(copied, paths=[["items", "", "file"]])
+
+ assert [entry for _, entry in extracted] == [file1, file2]
+ assert original == {
+ "items": [
+ {"file": file1, "extra": 1},
+ {"file": file2, "extra": 2},
+ ],
+ "title": "example",
+ }
+ assert copied == {
+ "items": [
+ {"extra": 1},
+ {"extra": 2},
+ ],
+ "title": "example",
+ }
diff --git a/tests/test_models.py b/tests/test_models.py
index 47fa9efd..ce2b6d8d 100644
--- a/tests/test_models.py
+++ b/tests/test_models.py
@@ -1,7 +1,8 @@
import json
-from typing import TYPE_CHECKING, Any, Dict, List, Union, Optional, cast
+from typing import TYPE_CHECKING, Any, Dict, List, Union, Iterable, Optional, cast
from datetime import datetime, timezone
-from typing_extensions import Literal, Annotated, TypeAliasType
+from collections import deque
+from typing_extensions import Literal, Annotated, TypedDict, TypeAliasType
import pytest
import pydantic
@@ -9,7 +10,7 @@
from gitpod._utils import PropertyInfo
from gitpod._compat import PYDANTIC_V1, parse_obj, model_dump, model_json
-from gitpod._models import DISCRIMINATOR_CACHE, BaseModel, construct_type
+from gitpod._models import DISCRIMINATOR_CACHE, BaseModel, EagerIterable, construct_type
class BasicModel(BaseModel):
@@ -961,3 +962,56 @@ def __getattr__(self, attr: str) -> Item: ...
assert model.a.prop == 1
assert isinstance(model.a, Item)
assert model.other == "foo"
+
+
+# NOTE: Workaround for Pydantic Iterable behavior.
+# Iterable fields are replaced with a ValidatorIterator and may be consumed
+# during serialization, which can cause subsequent dumps to return empty data.
+# See: https://github.com/pydantic/pydantic/issues/9541
+@pytest.mark.parametrize(
+ "data, expected_validated",
+ [
+ ([1, 2, 3], [1, 2, 3]),
+ ((1, 2, 3), (1, 2, 3)),
+ (set([1, 2, 3]), set([1, 2, 3])),
+ (iter([1, 2, 3]), [1, 2, 3]),
+ ([], []),
+ ((x for x in [1, 2, 3]), [1, 2, 3]),
+ (map(lambda x: x, [1, 2, 3]), [1, 2, 3]),
+ (frozenset([1, 2, 3]), frozenset([1, 2, 3])),
+ (deque([1, 2, 3]), deque([1, 2, 3])),
+ ],
+ ids=["list", "tuple", "set", "iterator", "empty", "generator", "map", "frozenset", "deque"],
+)
+@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2")
+def test_iterable_construction(data: Iterable[int], expected_validated: Iterable[int]) -> None:
+ class TypeWithIterable(TypedDict):
+ items: EagerIterable[int]
+
+ class Model(BaseModel):
+ data: TypeWithIterable
+
+ m = Model.model_validate({"data": {"items": data}})
+ assert m.data["items"] == expected_validated
+
+ # Verify repeated dumps don't lose data (the original bug)
+ assert m.model_dump()["data"]["items"] == list(expected_validated)
+ assert m.model_dump()["data"]["items"] == list(expected_validated)
+
+
+@pytest.mark.skipif(PYDANTIC_V1, reason="this is only supported in pydantic v2")
+def test_iterable_construction_str_falls_back_to_list() -> None:
+ # str is iterable (over chars), but str(list_of_chars) produces the list's repr
+ # rather than reconstructing a string from items. We special-case str to fall
+ # back to list instead of attempting reconstruction.
+ class TypeWithIterable(TypedDict):
+ items: EagerIterable[str]
+
+ class Model(BaseModel):
+ data: TypeWithIterable
+
+ m = Model.model_validate({"data": {"items": "hello"}})
+
+ # falls back to list of chars rather than calling str(["h", "e", "l", "l", "o"])
+ assert m.data["items"] == ["h", "e", "l", "l", "o"]
+ assert m.model_dump()["data"]["items"] == ["h", "e", "l", "l", "o"]