From 0a1ade4f56528e3a5a452afb1700fd0199dd5d2c Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Mon, 20 Oct 2025 10:34:24 -0500 Subject: [PATCH 1/7] Updating lc image to flex --- eng/templates/official/jobs/ci-lc-tests.yml | 4 ++-- workers/tests/utils/testutils_lc.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml index eed1db324..9eda89694 100644 --- a/eng/templates/official/jobs/ci-lc-tests.yml +++ b/eng/templates/official/jobs/ci-lc-tests.yml @@ -12,14 +12,14 @@ jobs: strategy: matrix: - Python39: - PYTHON_VERSION: '3.9' Python310: PYTHON_VERSION: '3.10' Python311: PYTHON_VERSION: '3.11' Python312: PYTHON_VERSION: '3.12' + Python313: + PYTHON_VERSION: '3.13' steps: - task: UsePythonVersion@0 inputs: diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index 94979adb0..f521abc62 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -26,7 +26,7 @@ # Linux Consumption Testing Constants _DOCKER_PATH = "DOCKER_PATH" _DOCKER_DEFAULT_PATH = "docker" -_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list" +_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/bookworm/flexconsumption/tags/list" _MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" _FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \ "/archive/refs/heads/dev.zip" From b8dfeeecf8a33010a20cf89a5baa3c22fff0b3f2 Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Mon, 20 Oct 2025 11:28:02 -0500 Subject: [PATCH 2/7] Fixing test --- .../tests/consumption_tests/test_linux_consumption.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/workers/tests/consumption_tests/test_linux_consumption.py b/workers/tests/consumption_tests/test_linux_consumption.py index 109a6806c..7e12b602a 100644 --- a/workers/tests/consumption_tests/test_linux_consumption.py +++ b/workers/tests/consumption_tests/test_linux_consumption.py @@ -213,16 +213,6 @@ def test_reload_variables_after_oom_error(self): sleep(2) logs = ctrl.get_container_logs() - self.assertRegex( - logs, - r"Applying prioritize_customer_dependencies: " - r"worker_dependencies_path: \/azure-functions-host\/" - r"workers\/python\/.*?\/LINUX\/X64," - r" customer_dependencies_path: \/home\/site\/wwwroot\/" - r"\.python_packages\/lib\/site-packages, working_directory:" - r" \/home\/site\/wwwroot, Linux Consumption: True," - r" Placeholder: False") - self.assertNotIn("Failure Exception: ModuleNotFoundError", logs) From 6f777d1985768f471aa88331617df4d20a738b72 Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Mon, 20 Oct 2025 12:28:55 -0500 Subject: [PATCH 3/7] Mounting proxy worker for 3.13 tests --- workers/tests/utils/testutils_lc.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index f521abc62..2ee8e3020 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -219,7 +219,13 @@ def spawn_container(self, container according to the image name. Return the port of container. """ # Construct environment variables and start the docker container - worker_path = os.path.join(PROJECT_ROOT, 'azure_functions_worker') + worker_name = 'azure_functions_worker' \ + if sys.version_info.minor < 13 else 'proxy_worker' + + worker_path = os.path.join(PROJECT_ROOT, worker_name) + container_worker_path = ( + f"/azure-functions-host/workers/python/{self._py_version}/LINUX/X64/{worker_name}" + ) # TODO: Mount library in docker container # self._download_azure_functions() @@ -227,11 +233,6 @@ def spawn_container(self, # Download python extension base package ext_folder = self._download_extensions() - container_worker_path = ( - f"/azure-functions-host/workers/python/{self._py_version}/" - "LINUX/X64/azure_functions_worker" - ) - base_ext_container_path = ( f"/azure-functions-host/workers/python/{self._py_version}/" "LINUX/X64/azurefunctions/extensions/base" From 84497838e4f3c00773a7423d070ddea79744d2d6 Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Mon, 20 Oct 2025 15:25:02 -0500 Subject: [PATCH 4/7] Debug logging fix --- .../handle_event.py | 2 +- .../azure_functions_runtime/handle_event.py | 2 +- workers/tests/utils/testutils_lc.py | 41 ++++++++++++++++++- 3 files changed, 41 insertions(+), 4 deletions(-) diff --git a/runtimes/v1/azure_functions_runtime_v1/handle_event.py b/runtimes/v1/azure_functions_runtime_v1/handle_event.py index 1b2836348..4e877e3f8 100644 --- a/runtimes/v1/azure_functions_runtime_v1/handle_event.py +++ b/runtimes/v1/azure_functions_runtime_v1/handle_event.py @@ -276,7 +276,7 @@ async def function_environment_reload_request(request): os.environ[var] = env_vars[var] if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING): - root_logger = logging.getLogger("azure.functions") + root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) # calling load_binding_registry again since the diff --git a/runtimes/v2/azure_functions_runtime/handle_event.py b/runtimes/v2/azure_functions_runtime/handle_event.py index 082763191..3d6b8acfe 100644 --- a/runtimes/v2/azure_functions_runtime/handle_event.py +++ b/runtimes/v2/azure_functions_runtime/handle_event.py @@ -326,7 +326,7 @@ async def function_environment_reload_request(request): # TODO: Apply PYTHON_THREADPOOL_THREAD_COUNT if is_envvar_true(PYTHON_ENABLE_DEBUG_LOGGING): - root_logger = logging.getLogger("azure.functions") + root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) # calling load_binding_registry again since the diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index 2ee8e3020..4f532670a 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -26,7 +26,10 @@ # Linux Consumption Testing Constants _DOCKER_PATH = "DOCKER_PATH" _DOCKER_DEFAULT_PATH = "docker" -_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/bookworm/flexconsumption/tags/list" +_MESH_IMAGE_URL = ( + "https://mcr.microsoft.com/v2/azure-functions/bookworm/" + "flexconsumption/tags/list" +) _MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" _FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \ "/archive/refs/heads/dev.zip" @@ -224,9 +227,33 @@ def spawn_container(self, worker_path = os.path.join(PROJECT_ROOT, worker_name) container_worker_path = ( - f"/azure-functions-host/workers/python/{self._py_version}/LINUX/X64/{worker_name}" + f"/azure-functions-host/workers/python/{self._py_version}/" + f"LINUX/X64/{worker_name}" ) + # For Python 3.13+, also mount the runtime libraries + runtime_v2_path = None + runtime_v1_path = None + container_runtime_v2_path = None + container_runtime_v1_path = None + + if sys.version_info.minor >= 13: + repo_root = os.path.dirname(PROJECT_ROOT) + runtime_v2_path = os.path.join( + repo_root, 'runtimes', 'v2', 'azure_functions_runtime' + ) + runtime_v1_path = os.path.join( + repo_root, 'runtimes', 'v1', 'azure_functions_runtime_v1' + ) + container_runtime_v2_path = ( + f"/azure-functions-host/workers/python/{self._py_version}/" + "LINUX/X64/azure_functions_runtime" + ) + container_runtime_v1_path = ( + f"/azure-functions-host/workers/python/{self._py_version}/" + "LINUX/X64/azure_functions_runtime_v1" + ) + # TODO: Mount library in docker container # self._download_azure_functions() @@ -256,6 +283,16 @@ def spawn_container(self, run_cmd.extend(["-e", f"WEBSITE_SITE_NAME={self._uuid}"]) run_cmd.extend(["-e", "WEBSITE_SKU=Dynamic"]) run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}']) + + # Mount runtime libraries for Python 3.13+ + if runtime_v2_path and runtime_v1_path: + run_cmd.extend([ + "-v", f'{runtime_v2_path}:{container_runtime_v2_path}' + ]) + run_cmd.extend([ + "-v", f'{runtime_v1_path}:{container_runtime_v1_path}' + ]) + run_cmd.extend(["-v", f'{base_ext_local_path}:{base_ext_container_path}']) From 5fb31779e54a14547dba3538740868e03e8260ba Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Wed, 22 Oct 2025 10:34:24 -0500 Subject: [PATCH 5/7] Adding py314 --- eng/templates/official/jobs/ci-lc-tests.yml | 2 ++ workers/tests/consumption_tests/test_linux_consumption.py | 1 + workers/tests/utils/testutils_lc.py | 3 ++- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml index 9eda89694..578231392 100644 --- a/eng/templates/official/jobs/ci-lc-tests.yml +++ b/eng/templates/official/jobs/ci-lc-tests.yml @@ -20,6 +20,8 @@ jobs: PYTHON_VERSION: '3.12' Python313: PYTHON_VERSION: '3.13' + Python314: + PYTHON_VERSION: '3.14' steps: - task: UsePythonVersion@0 inputs: diff --git a/workers/tests/consumption_tests/test_linux_consumption.py b/workers/tests/consumption_tests/test_linux_consumption.py index 7e12b602a..d8bc9d2a5 100644 --- a/workers/tests/consumption_tests/test_linux_consumption.py +++ b/workers/tests/consumption_tests/test_linux_consumption.py @@ -213,6 +213,7 @@ def test_reload_variables_after_oom_error(self): sleep(2) logs = ctrl.get_container_logs() + assert "Finished prioritize_customer_dependencies" in logs self.assertNotIn("Failure Exception: ModuleNotFoundError", logs) diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index 4f532670a..c3ef48740 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -26,8 +26,9 @@ # Linux Consumption Testing Constants _DOCKER_PATH = "DOCKER_PATH" _DOCKER_DEFAULT_PATH = "docker" +_OS_TYPE = "bookworm" if sys.version_info.minor < 14 else "noble" _MESH_IMAGE_URL = ( - "https://mcr.microsoft.com/v2/azure-functions/bookworm/" + f"https://mcr.microsoft.com/v2/azure-functions/{_OS_TYPE}/" "flexconsumption/tags/list" ) _MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" From bfd59eebc8bc62501aa19b468b303a1020103bc6 Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Mon, 27 Oct 2025 10:46:28 -0500 Subject: [PATCH 6/7] Updated image repo for flexconsumption --- workers/tests/utils/testutils_lc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index c3ef48740..bae6b9278 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -31,7 +31,7 @@ f"https://mcr.microsoft.com/v2/azure-functions/{_OS_TYPE}/" "flexconsumption/tags/list" ) -_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" +_MESH_IMAGE_REPO = f"mcr.microsoft.com/azure-functions/{_OS_TYPE}/flexconsumption" _FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \ "/archive/refs/heads/dev.zip" _FUNC_FILE_NAME = "azure-functions-python-library-dev" From 28bc1200884745132befc7b6f2c368ba0a440cb1 Mon Sep 17 00:00:00 2001 From: Gavin Aguiar Date: Tue, 2 Dec 2025 11:25:48 -0600 Subject: [PATCH 7/7] Token updates for flex --- workers/pyproject.toml | 1 + workers/tests/utils/testutils_lc.py | 125 +++++++++------------------- 2 files changed, 39 insertions(+), 87 deletions(-) diff --git a/workers/pyproject.toml b/workers/pyproject.toml index 09793ed61..e932839af 100644 --- a/workers/pyproject.toml +++ b/workers/pyproject.toml @@ -86,6 +86,7 @@ dev = [ "pre-commit", "invoke", "cryptography", + "pyjwt", "jsonpickle", "orjson" ] diff --git a/workers/tests/utils/testutils_lc.py b/workers/tests/utils/testutils_lc.py index bae6b9278..320531e16 100644 --- a/workers/tests/utils/testutils_lc.py +++ b/workers/tests/utils/testutils_lc.py @@ -77,24 +77,16 @@ def assign_container(self, env: Dict[str, str] = {}): env["WEBSITE_SITE_NAME"] = self._uuid env["WEBSITE_HOSTNAME"] = f"{self._uuid}.azurewebsites.com" - # Debug: Print SCM_RUN_FROM_PACKAGE value - scm_package = env.get("SCM_RUN_FROM_PACKAGE", "NOT_SET") - print(f"🔍 DEBUG: SCM_RUN_FROM_PACKAGE in env: {scm_package}") - # Wait for the container to be ready - max_retries = 60 + max_retries = 10 for i in range(max_retries): try: ping_req = requests.Request(method="GET", url=f"{url}/admin/host/ping") ping_response = self.send_request(ping_req) if ping_response.ok: - print(f"🔍 DEBUG: Container ready after {i + 1} attempts") break - else: - print("🔍 DEBUG: Ping attempt {i+1}/60 failed with status " - f"{ping_response.status_code}") except Exception as e: - print(f"🔍 DEBUG: Ping attempt {i + 1}/60 failed with exception: {e}") + pass time.sleep(1) else: raise RuntimeError(f'Container {self._uuid} did not become ready in time') @@ -129,16 +121,9 @@ def send_request( prepped = session.prepare_request(req) prepped.headers['Content-Type'] = 'application/json' - # Try to generate a proper JWT token first - try: - jwt_token = self._generate_jwt_token() - # Use JWT token for newer Azure Functions host versions - prepped.headers['Authorization'] = f'Bearer {jwt_token}' - except ImportError: - # Fall back to the old SWT token format if jwt library is not available - swt_token = self._get_site_restricted_token() - prepped.headers['x-ms-site-restricted-token'] = swt_token - prepped.headers['Authorization'] = f'Bearer {swt_token}' + # For flex consumption, use JWT Bearer token + jwt_token = self._generate_jwt_token() + prepped.headers['Authorization'] = f'Bearer {jwt_token}' # Add additional headers required by Azure Functions host prepped.headers['x-site-deployment-id'] = self._uuid @@ -219,10 +204,9 @@ def _download_extensions() -> str: def spawn_container(self, image: str, env: Dict[str, str] = {}) -> int: - """Create a docker container and record its port. Create a docker - container according to the image name. Return the port of container. - """ - # Construct environment variables and start the docker container + """Create a docker container and record its port.""" + if not os.getenv('_DUMMY_CONT_KEY'): + os.environ['_DUMMY_CONT_KEY'] = "Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==" worker_name = 'azure_functions_worker' \ if sys.version_info.minor < 13 else 'proxy_worker' @@ -255,10 +239,6 @@ def spawn_container(self, "LINUX/X64/azure_functions_runtime_v1" ) - # TODO: Mount library in docker container - # self._download_azure_functions() - - # Download python extension base package ext_folder = self._download_extensions() base_ext_container_path = ( @@ -277,10 +257,12 @@ def spawn_container(self, run_cmd.extend(["--cap-add", "SYS_ADMIN"]) run_cmd.extend(["--device", "/dev/fuse"]) run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"]) - run_cmd.extend(["-e", - f"CONTAINER_ENCRYPTION_KEY={os.getenv('_DUMMY_CONT_KEY')}"]) + encryption_key = os.getenv('_DUMMY_CONT_KEY') + full_key_bytes = base64.b64decode(encryption_key.encode()) + aes_key_bytes = full_key_bytes[:32] + aes_key_base64 = base64.b64encode(aes_key_bytes).decode() + run_cmd.extend(["-e", f"CONTAINER_ENCRYPTION_KEY={aes_key_base64}"]) run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"]) - # Add required environment variables for JWT issuer validation run_cmd.extend(["-e", f"WEBSITE_SITE_NAME={self._uuid}"]) run_cmd.extend(["-e", "WEBSITE_SKU=Dynamic"]) run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}']) @@ -355,103 +337,72 @@ def safe_kill_container(self) -> bool: @classmethod def _get_site_restricted_token(cls) -> str: - """Get the header value which can be used by x-ms-site-restricted-token - which expires in one day. - """ - # For compatibility with older Azure Functions host versions, - # try the old SWT format first + """Get SWT token for site-restricted authentication.""" exp_ns = int((time.time() + 24 * 60 * 60) * 1000000000) token = cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), f'exp={exp_ns}') return token def _generate_jwt_token(self) -> str: - """Generate a proper JWT token for newer Azure Functions host versions.""" + """Generate JWT token for Flex consumption authentication.""" try: import jwt - except ImportError: - # Fall back to SWT format if JWT library not available - return self._get_site_restricted_token() + except ImportError as e: + raise RuntimeError("PyJWT library required. Install with: pip install pyjwt") from e - # JWT payload matching Azure Functions host expectations - exp_time = int(time.time()) + (24 * 60 * 60) # 24 hours from now - - # Use the site name consistently for issuer and audience validation + exp_time = int(time.time()) + (24 * 60 * 60) + iat_time = int(time.time()) site_name = self._uuid - container_name = self._uuid - - # According to Azure Functions host analysis, use site-specific issuer format - # This matches the ValidIssuers array in ScriptJwtBearerExtensions.cs issuer = f"https://{site_name}.azurewebsites.net" payload = { 'exp': exp_time, - 'iat': int(time.time()), - # Use site-specific issuer format that matches ValidIssuers in the host + 'iat': iat_time, + 'nbf': iat_time, 'iss': issuer, - # For Linux Consumption in placeholder mode, audience is the container name - 'aud': container_name + 'aud': site_name, + 'sub': site_name, } - # Use the same encryption key for JWT signing - key = base64.b64decode(os.getenv('_DUMMY_CONT_KEY').encode()) + encryption_key_str = os.getenv('_DUMMY_CONT_KEY') + if not encryption_key_str: + raise RuntimeError("_DUMMY_CONT_KEY environment variable not set") - # Generate JWT token using HMAC SHA256 (matches Azure Functions host) + key_bytes = base64.b64decode(encryption_key_str.encode()) + key = key_bytes[:32] jwt_token = jwt.encode(payload, key, algorithm='HS256') return jwt_token @classmethod - def _get_site_encrypted_context(cls, - site_name: str, - env: Dict[str, str]) -> str: - """Get the encrypted context for placeholder mode specialization""" - # Ensure WEBSITE_SITE_NAME is set to simulate production mode + def _get_site_encrypted_context(cls, site_name: str, env: Dict[str, str]) -> str: + """Get encrypted specialization context.""" env["WEBSITE_SITE_NAME"] = site_name - - ctx = { - "SiteId": 1, - "SiteName": site_name, - "Environment": env - } - + ctx = {"SiteId": 1, "SiteName": site_name, "Environment": env} json_ctx = json.dumps(ctx) - encrypted = cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), json_ctx) return encrypted @classmethod def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str: - """Encrypt plain text context into an encrypted message which can - be accepted by the host - """ - # Decode the encryption key + """Encrypt context for specialization.""" encryption_key_bytes = base64.b64decode(encryption_key.encode()) + aes_key = encryption_key_bytes[:32] - # Pad the plaintext to be a multiple of the AES block size padder = padding.PKCS7(algorithms.AES.block_size).padder() plain_text_bytes = padder.update(plain_text.encode()) + padder.finalize() - # Initialization vector (IV) (fixed value for simplicity) iv_bytes = '0123456789abcedf'.encode() - - # Create AES cipher with CBC mode - cipher = Cipher(algorithms.AES(encryption_key_bytes), - modes.CBC(iv_bytes), backend=default_backend()) - - # Perform encryption + cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes), backend=default_backend()) encryptor = cipher.encryptor() encrypted_bytes = encryptor.update(plain_text_bytes) + encryptor.finalize() - # Compute SHA256 hash of the encryption key - digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) - digest.update(encryption_key_bytes) - key_sha256 = digest.finalize() - - # Encode IV, encrypted message, and SHA256 hash in base64 iv_base64 = base64.b64encode(iv_bytes).decode() encrypted_base64 = base64.b64encode(encrypted_bytes).decode() + + digest = hashes.Hash(hashes.SHA256(), backend=default_backend()) + digest.update(aes_key) + key_sha256 = digest.finalize() key_sha256_base64 = base64.b64encode(key_sha256).decode() - # Return the final result return f'{iv_base64}.{encrypted_base64}.{key_sha256_base64}' def __enter__(self):