diff --git a/.gitignore b/.gitignore index b70a990c..bab10c21 100755 --- a/.gitignore +++ b/.gitignore @@ -16,6 +16,7 @@ docker-stack.yml npm-debug.log* yarn-debug.log* yarn-error.log* +/logs # Editor directories and files .idea diff --git a/README.md b/README.md index 21f8d0a6..4eaac022 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ Please be aware of these related repositories that all have to be used in concer ## Minimum Hardware Requirements -- SD card 64 GB or mo +- SD card 64 GB or more - RaspberryPi 4 or newer with at least 4 GB RAM ## License diff --git a/backend b/backend index 2caab2e9..e20beacf 160000 --- a/backend +++ b/backend @@ -1 +1 @@ -Subproject commit 2caab2e90ff20a545c8337320409042c7000adb4 +Subproject commit e20beacfccc3c578cf0e887976f05961c4d880cc diff --git a/cli b/cli index c9506242..9e152680 160000 --- a/cli +++ b/cli @@ -1 +1 @@ -Subproject commit c950624201dc3c4a3bc0cf8af985a146bca0494e +Subproject commit 9e1526808e772898516b1031e282d19f8120dc5a diff --git a/docker-compose.yml b/docker-compose.yml index bb64ab77..da1848c4 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,6 +17,8 @@ services: proxy: image: traefik:v2.2 + extra_hosts: + - "host.docker.internal:host-gateway" ports: - "80:80" - "8090:8080" @@ -26,6 +28,7 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock - ./backend:/app + - ./traefik_dynamic.yml:/etc/traefik/traefik_dynamic.yml command: # Enable Docker in Traefik, so that it reads labels from Docker services - --providers.docker @@ -34,6 +37,12 @@ services: - --providers.docker.constraints=Label(`traefik.constraint-label-stack`, `${TRAEFIK_TAG?Variable not set}`) # Do not expose all Docker services, only the ones explicitly exposed - --providers.docker.exposedbydefault=false + # Enable file provider for push-av server + - --providers.file.filename=/etc/traefik/traefik_dynamic.yml + # insecureSkipVerify is enabled for push_av_server as it uses a self-signed certificate. + # This ensures Traefik can proxy requests HTTP -> HTTPS without failing certificate validation. + # service specific transport is not supported in v2.2 and was introduced in v2.4 hence enabling it here. + - --serversTransport.insecureSkipVerify=true # Enable the access log, with HTTP requests - --accesslog # Enable the Traefik log, for configurations and errors @@ -68,11 +77,12 @@ services: - node.labels.${STACK_NAME?Variable not set}.app-db-data == true backend: - image: 'ghcr.io/project-chip/csa-certification-tool-backend:8f86ccd' + image: 'ghcr.io/project-chip/csa-certification-tool-backend:e20beac' ports: - "8888:8888" - "50000:50000" + - "5000:5000/udp" #To receive H264 packets from camera-controller volumes: - /var/run/docker.sock:/var/run/docker.sock - /usr/bin/docker:/usr/bin/docker @@ -89,7 +99,7 @@ services: privileged: true build: context: ./backend - command: /start-reload.sh + command: bash -c "./prestart.sh; python3 ./app/main.py" labels: - traefik.enable=true - traefik.constraint-label-stack=${TRAEFIK_TAG?Variable not set} @@ -97,7 +107,7 @@ services: - traefik.http.services.${STACK_NAME?Variable not set}-backend.loadbalancer.server.port=80 frontend: - image: 'ghcr.io/project-chip/csa-certification-tool-frontend:5c0c9dd' + image: 'ghcr.io/project-chip/csa-certification-tool-frontend:1acbec0' build: context: ./frontend labels: diff --git a/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.adoc b/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.adoc index 66c3e119..28971fec 100755 --- a/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.adoc +++ b/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.adoc @@ -15,9 +15,9 @@ * limitations under the License. //// -:ubuntu-version: 24.04 +:ubuntu-version: 24.04.x :ubuntu-description: Ubuntu Server {ubuntu-version} LTS (64-bit) -:th-version: v2.11+fall2024 +:th-version: v2.14+fall2025 = Matter Test-Harness User Manual: {th-version} ifdef::env-github[] :tip-caption: :bulb: @@ -25,7 +25,6 @@ ifdef::env-github[] :important-caption: :heavy_exclamation_mark: :caution-caption: :fire: :warning-caption: :warning: -:imagesdir: https://github.com/project-chip/certification-tool/tree/main/docs/Matter_TH_User_Guide endif::[] ifndef::env-github[] :icons: font @@ -48,8 +47,11 @@ endif::[] | Matter 1.0 | v2.6 | https://drive.google.com/file/d/10YkV4mDulhLoA6RJOKZNNKWhHTH1tOfu/view?usp=drive_link[link] | https://groups.csa-iot.org/wg/members-all/document/folder/2729[Causeway Link] | 96c9357 | | Matter 1.1 | v2.8.1 | https://drive.google.com/file/d/15fU3L7QE-MNBslf53A_6sFgn1Wq0Pvqd/view?usp=drive_link[link] | https://groups.csa-iot.org/wg/members-all/document/folder/2730[Causeway Link] | 5a21d17 | | Matter 1.2 | TH-Fall2023 | https://drive.google.com/file/d/1WTjhc7xbYt18RvpABU3_r47uqOLd7NN1/view?usp=drive_link[link] | https://groups.csa-iot.org/wg/members-all/document/folder/3045[Causeway Link] | 19771ed | -| Matter 1.3 | v2.10.2+spring2024 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/3314[Causeway Link] | 50bcad1 | To install, use the tag "v2.10.2+spring2024" in the instructions of <> +| Matter 1.3 | v2.10.4+spring2024 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/3314[Causeway Link] | 50bcad1 | To install, use the tag "v2.10.4+spring2024" in the instructions of <> | Matter 1.4 | v2.11+fall2024 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/4120[Causeway Link] | f2e5de7 | To install, use the tag "v2.11+fall2024" in the instructions of <> +| Matter 1.4.1 | v2.12+spring2025 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/4497[Causeway Link] | 91eab26 | To install, use the tag "v2.12+spring2025" in the instructions of <> +| Matter 1.4.2 | v2.13+summer2025 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/4651[Causeway Link] | 1b2b3fd | To install, use the tag "v2.13+summer2025" in the instructions of <> +| Matter 1.5 | v2.14+fall2025 | N/A | https://groups.csa-iot.org/wg/members-all/document/folder/4825[Causeway Link] | ca9d111 | To install, use the tag "v2.14+fall2025" in the instructions of <> |=== @@ -113,9 +115,22 @@ endif::[] | 39 | 04-Sep-2024 | [Apple]Hilton Lima | * Moved PIXIT section. + * Added 'Test Parameters for SDK Python Tests' section. + * Changed Table of Contents display level. -| 40 | 14-Oct-2024 | [Apple]Romulo Quidute | * Added Warning for WIFI_SSID with special chars or empty spaces. + +| 40 | 24-Sep-2024 | [Apple] Hilton Lima | * Updated nRF util and desktop application links. +| 41 | 14-Oct-2024 | [Apple]Romulo Quidute | * Added Warning for WIFI_SSID with special chars or empty spaces. + * Updated Test-Harness Links for v2.11+fall2024 release. - +| 42 | 03-Jan-2025 | [Apple]Romulo Quidute | * Added Reuse commissioning information section. +| 43 | 25-Feb-2025 | [Apple]Romulo Quidute | * Updated supported Ubuntu version to 24.04.2. +| 44 | 25-Feb-2025 | [Apple]Antonio Melo Jr. | * Replacing the patch number with a 'x' in the supported Ubuntu version. +| 45 | 13-Mar-2025 | [Apple]Romulo Quidute | * Updated Test-Harness Links for v2.12+spring2025 release. +| 46 | 31-Mar-2025 | [Apple]Romulo Quidute | * Added mapping volume section inside SDK Python Tests. +| 47 | 05-May-2025 | [Apple]Romulo Quidute, [Apple]Antonio Melo| Changes for v2.13+summer2025 version: + + * Added the platform certification configuration section. + + * Updated the docker command in Factory-reset the DUT section + + * Added the Wi-Fi PAF section supported by Matter 1.4.2. +| 48 | 10-Nov-2025 | [Apple]Romulo Quidute, [Apple]Antonio Melo| * Changes for v2.14+fall2025 version. + +| 49 | 14-Nov-2025 | [Apple]Antonio Melo Jr. | * Adding the Side Load feature instructions to the custom Test Cases section. + + * Fixing the Causeway links in the references section. + + * Fixing the REPL command in the Matter Python REPL section. |=== <<< @@ -140,12 +155,12 @@ The TH tool can be used by any DUT vendor to run the Matter certification tests, <<< == *References* -. Matter Specification: https://groups.csa-iot.org/wg/members-all/document/folder/4120[Matter Specification (Causeway)] / https://github.com/CHIP-Specifications/connectedhomeip-spec[Matter Specification (Github)] -. Matter SDK Repo github: https://github.com/project-chip/connectedhomeip[https://github.com/project-chip/connectedhomeip] -. Matter Test Plans: https://groups.csa-iot.org/wg/members-all/document/folder/4120[Matter Test Plans (Causeway)] / https://github.com/CHIP-Specifications/chip-test-plans[Matter Test Plans (GitHub)] -. PICS Tool: https://picstool.csa-iot.org/#userguide[PICS Tool - Connectivity Standards Alliance (csa-iot.org)] -. XML Files: https://groups.csa-iot.org/wg/members-all/document/folder/4120 -. TEDS Matter tool: https://groups.csa-iot.org/wg/matter-wg/document/28545 +. Matter Specification: https://groups.csa-iot.org/wg/members-all/document/folder/4825[Matter Specification (Causeway)] / https://github.com/CHIP-Specifications/connectedhomeip-spec[Matter Specification (GitHub)] +. Matter SDK GitHub Repository: https://github.com/project-chip/connectedhomeip[Connectedhomeip GitHub Repository] +. Matter Test Plans: https://groups.csa-iot.org/wg/members-all/document/folder/4825[Matter Test Plans (Causeway)] / https://github.com/CHIP-Specifications/chip-test-plans[Matter Test Plans (GitHub)] +. Matter PICS Tool: https://picstool.csa-iot.org/#userguide[PICS Tool - Connectivity Standards Alliance (csa-iot.org)] +. Matter XML Files: https://groups.csa-iot.org/wg/members-all/document/folder/4825[XML Files - Connectivity Standards Alliance (csa-iot.org)] +. TEDS Matter Tool: https://groups.csa-iot.org/wg/matter-wg/document/28545[TEDS Matter Tool - Connectivity Standards Alliance (csa-iot.org)] *Important:* @@ -214,7 +229,7 @@ There are two ways to obtain the latest TH on Raspberry Pi. Follow the instructi NOTE: This instruction applies to the latest version of the Test Harness this document refers to. For earlier versions of the TH please follow the user guide of that specific TH version as, for example, Ubuntu versions might differ per installation. -==== *Prerequisites* +==== Prerequisites The following equipment will be required to have a complete TH setup: @@ -326,14 +341,14 @@ If using multipass, to find the IP address use the command ==== Substitute the SDK's docker image and update sample apps -If the platform of the machine that will run the TH is 'linux/amr64' it will not be necessary to build a new SDK docker image. +If the platform of the machine that will run the TH is 'linux/arm64' it will not be necessary to build a new SDK docker image. To run TH on a machine using the 'linux/amd64' platform, you will need to first build a new SDK docker image. * Get the SDK commit SHA |=== -|Value for variable `SDK_DOCKER_TAG` in TH repository path `certification-tool/backend/app/core/config.py` +|Value for variable `SDK_DOCKER_TAG` in TH repository path `certification-tool/backend/test_collections/matter/config.py` |=== * Download the Dockerfile for chip-cert-bins from the commit you need @@ -442,9 +457,22 @@ Custom Python files folder are located at: .Test-Harness displaying the custom tests. image::images/img_60.png[] -|=== -|Hint: You can copy the original SDK Yaml/Python test to Custom Yaml/Python folder and do any changes on it. -|=== +[NOTE] +.*Experimenting with Test Cases:* +==== +You can copy the original SDK Yaml/Python test to Custom Yaml/Python folder and do any changes on it. +==== + +==== Test-Harness Side Load Feature with Custom Test Cases +For the Side Load feature, the user may benefit from the *custom* Test Cases feature mentioned above. +To Side Load any desired script, follow the steps below: + +1. Stop the Test-Harness by executing the script `stop.sh` located at `certification-tool/scripts/` folder +2. Download the latest Test Case script from the SDK's master branch (https://github.com/project-chip/connectedhomeip[connectedhomeip] repository) +3. Place the desired script on the appropriate *custom* folder (depending if the script is Yaml or Python) +4. Start the Test-Harness by executing the script `start.sh` located at `certification-tool/scripts/` folder +5. Change the Project's configuration as required to run the Side Loaded script (e.g. updating `test_parameters`) +6. The Side Loaded script will be available on the Test Cases list in the Custom tab (refer to the image above) === Troubleshooting @@ -616,10 +644,10 @@ To set up the Thread Board, follow the instructions below. NOTE: _The nRF52840-DK setup can be performed in two methods either by flashing the pre-built binary hex of sample apps which is released along with the TH by using the nRF Connect Desktop application tool (refer Section 5.2.2.1) or by building the docker environment to build the sample apps (refer Section 5.2.2.2)._ -===== *Instructions to Set Up nRF52840-DK Using nRF Connect Desktop Application Tool* +===== Instructions to Set Up nRF52840-DK Using nRF Connect Desktop Application Tool .. Requirements: -. nRF Connect for Desktop tool: Installer for https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/4-0-0/nrfconnect-setup-4.0.0-ia32.exe[Windows], https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/4-0-0/nrfconnect-4.0.0.dmg[MAC] or https://nsscprodmedia.blob.core.windows.net/prod/software-and-other-downloads/desktop-software/nrf-connect-for-desktop/4-0-0/nrfconnect-4.0.0-x86_64.appimage[Linux] +. nRF Connect for Desktop tool installer: https://www.nordicsemi.com/Products/Development-tools/nRF-Connect-for-Desktop/Download#infotabs[Link] + NOTE: _The J-Link driver needs to be separately installed on macOS and Linux. Download and install it from https://www.segger.com/downloads/jlink[SEGGER] under the section J-Link Software and Documentation Pack._ @@ -648,7 +676,7 @@ image:images/img_9.png[] . Connect the nRF52840-Dongle to the USB port of the Raspberry Pi having the latest TH. . For the Thread DUT, enable discoverable over Bluetooth LE (e.g., on nRF52840 DK: select Button 4) and start the Thread Setup Test execution by referring to <> . -===== *Instructions to Set Up nRF52840-DK Using Docker Environment* +===== Instructions to Set Up nRF52840-DK Using Docker Environment . To build the sample apps for nRF-Connect, check out the Matter repository and bootstrap using following commands: + @@ -717,7 +745,7 @@ NOTE: _Parameters can be omitted if flashing the example app onto the hardware i |=== -===== *Building and Flashing Sample Apps for nRF-Connect* +===== Building and Flashing Sample Apps for nRF-Connect Perform the following procedure, regardless of the method used for setting up the environment: @@ -782,9 +810,9 @@ Remember to set `PATH_TO_PAA_ROOTS` and substitute `` |`source python_env/bin/activate` |=== -* Run chip-repl: +* Run matter-repl: |=== -|`python3 python_env/bin/chip-repl` +|`python3 python_env/bin/matter-repl` |=== @@ -796,14 +824,16 @@ If the DUT supports Thread Transport, DUT vendors need to use the OTBR that is s Currently the OTBR in the TH works with either the Nordic RCP dongle or SiLabs RCP dongle. Refer to <> to flash the NRF52840 firmware or <> to flash the SiLabs firmware and get the RCP’s ready. Once the RCP’s are programmed, the user needs to insert the RCP dongle on to the Raspberry Pi running the TH and reboot the Raspberry Pi. === Instructions to Flash the Firmware NRF52840 RCPDongle -. Download RCP firmware package from the following link on the user’s system — https://groups.csa-iot.org/wg/matter-csg/document/34870[Thread RCP Firmware Package] +. Download RCP firmware package from the following link on the user’s system — https://groups.csa-iot.org/wg/members-all/document/39226[Thread RCP Firmware Package] . nRF Util is a unified command line utility for Nordic products. For more details, refer to the following link— https://www.nordicsemi.com/Products/Development-tools/nrf-util[https://www.nordicsemi.com/Products/Development-tools/nrf-util] -. Install the nRF Util dependency in the user’s system using the following command: +. Install the nRF Util dependencies on the user’s system using the following commands: + -|=== -|`python3 -m pip install -U nrfutil` -|=== +[source,shell] +---- +python3 -m pip install -U nrfutil +nrfutil install nrf5sdk-tools +---- . Connect the nRF52840 Dongle to the USB port of the user’s system. . Press the Reset button on the dongle to enter the DFU mode (the red LED on the dongle starts blinking). @@ -824,7 +854,7 @@ Example: + . In case any permission issue occurs during flashing, launch the terminal and retry in sudo mode. === Nrfconnect Sample APPs Firmwares to Flash on the NRF52840DK Kit -The https://groups.csa-iot.org/wg/matter-csg/document/33943[Nrfconnect Sample apps binary Package] is available for download and should be flashed in the development kit NRF52840DK to use it as DUT in the Test-Harness tests. +The https://groups.csa-iot.org/wg/members-all/document/folder/2269[Nrfconnect Sample apps binary Package] is available for download and should be flashed in the development kit NRF52840DK to use it as DUT in the Test-Harness tests. === Instructions to Flash SiLabs RCP @@ -1035,6 +1065,101 @@ lo: flags=73 mtu 65536 + |=== *If any interface matches tayga ip address, change the conflicting IP on host.* +<<< +== *Wi-Fi PAF Commissioning* + +This section provides a guide to enable Wi-Fi PAF commissioning tests. + +=== Components Needed for Tests +To enable the Wi-Fi PAF Commissioning tests, the following hardware and software components are required. + +==== Hardware +* *Raspberry Pi*: version permissible for CSA Matter Test Events, used as a platform for Test Harness(Pi 4 or higher). +* *WLAN USB dongle*: compatible with the latest hostapd and +wpa_supplicant, used as the Wi-Fi subsystem for transmitting and +receiving Wi-Fi Un-Synchronized Discovery (USD) Public Action Frames +(PAF), including one of the following kinds: +** NETGEAR A6210: https://www.amazon.com/NETGEAR-Dual-Band-Adapter-A6210-10000S-Refurbished/dp/B00NSB0G66 +** NETGEAR WN111 802.11n Wireless LAN USB 2.0 Adapter: https://a.co/d/4I7YMez +** Linksys AE6000 Dual-Band Wireless Mini USB Adapter: https://a.co/d/iyXXpIs +** SANOXY USB Mini Wifi Wireless LAN Internet Adapter: https://www.amazon.com/SANOXY-150Mbps-Wireless-Network-802-11n/dp/B01HFRCUVM + +==== Software +* *hostapd and wpa_supplicant*: download from the latest master branch. Make sure this exists: CONFIG_NAN_USD=y +** Used to enable transmitting and receiving Wi-Fi Un-Synchronized +Discovery (USD) Public Action Frames (PAF) on platforms for both +Test Harness and DUT. +** *Here is the procedure to configure the above*: +*** git clone https://w1.fi/hostap.git +*** cd hostap/wpa_supplicant +*** git checkout master (Run this if you are in different branch) +*** vi defconfig or nano defconfig +*** Make sure "CONFIG_NAN_USD=y" is set (It should be at the last line) +*** cp defconfig .config +*** sudo apt update +*** sudo apt install libnl-3-dev libnl-genl-3-dev +*** make all + +=== Matter SDK +* Matter 1.5 SDK is capable of Wi-Fi PAF commissioning +* Regular build command with “chip_device_config_enable_wifipaf=true” (*This step is required only when building from the Master branch. If you're using the SDK provided in the apps folder of the TH, it's not needed*) +* *Test commands on SDK as a Commissionee*: +** `$ ./chip-all-clusters-app --wifi --wifipaf freq_list=2437` // for 2.4GHz. +** `$ ./chip-all-clusters-app --wifi --wifipaf freq_list=2437,2412,5745,5220` // for default 2.4GHz CH6 + a list of channels in 2.4GHz + a list of 5GHz channels +* *Test commands on SDK as a Commissioner*: + +Use "wifipaf-wifi" to pair: + +** `./chip-tool-paf pairing wifipaf-wifi [node_id] [ssid] [ap_pwd] [passcode] [discriminator]` +** *Example*: `./chip-tool-paf pairing wifipaf-wifi 1 n_m_2g nxp12345 20202021 3840` // on default 2.4GHz +** *Example*: `sudo ./chip-tool pairing wifipaf-wifi 1 n_m_2g nxp12345 20202021 3840 --freq 5220` // on 5GHz + +=== Baseline Test Harness (TH) Configuration for Testing DUT Commissionable Device +1. If DUT commissionable device is 2.4 GHz-only, configure TH commissioner to create an active subscriber on Default Publish Channel (2.4GHz Channel 6) for a test. +2. If DUT commissionable device is 2.4 + 5 GHz, to test 2.4GHz commissioning, configure TH commissioner to create an active subscriber on Default Publish Channel (2.4GHz Channel 6) for a test. +3. If DUT commissionable device is 2.4 + 5 GHz, to test 5GHz commissioning, configure TH commissioner to create an active subscriber on CH 44 non-ETSI regulatory domains or CH 149 in ETSI regulatory for a test. + +=== Baseline Test Harness (TH) Configuration for Testing DUT Commissioner +For non-ETSI regulatory domain, configure the TH Commissionee with the Default Publish Channel to be Channel 6 in 2.4 GHz and a Publish Channel List that includes all 20 MHz channels in 2.4 GHz band, Channel 44 and Channel 149 in 5 GHz band, and the Commissioner's network operating channel if the operating channel is not a DFS channel. + +=== Test Procedures and Test Commands +The test procedures and test commands for Wi-Fi PAF commissioning are documented in the Test Plan Verification spreadsheet under the Matter Specifications and Test Plans folder: +https://groups.csa-iot.org/wg/members-all/document/folder/2269 *or* https://docs.google.com/spreadsheets/d/19ZAbIRObi1HcvbesI4tSVmQn7cxz-ZNyFH80onmae7Y/edit?gid=311763523#gid=311763523 + +*The test steps are specifically in the following places*: + +* 3.2.1. [TC-DD-2.1] Announcement by Device Verification [DUT - Commissionee]: Steps: 8, 9, 10, and 11 with “MCORE.DD.DISCOVERY_PAF” enabled in PICS. +** TH does not need any special setting, just need to connect to AP. It's ok even using ethernet. But Wi-Fi should be on that I always test by using Wi-Fi connecting to AP. Test wpa_supplicant configure file script is below: +*** ctrl_interface=DIR=/run/wpa_supplicant +*** update_config=1 +*** network={ + + + ssid="n_m_2g" + + + key_mgmt=WPA-PSK + + + psk="nxp12345" + + +} +** *Before executing TC-DD-2.1 please read the “readme” file available in the TC-DD-2.1 folder and complete the configuration.* +* 3.2.2. [TC-DD-2.2] Discovery by Commissioner Verification [DUT - Commissioner]: Steps: 3.a and 3.b with “MCORE.DD.DISCOVERY_PAF” enabled in PICS. +* 3.3.11. [TC-DD-3.11] Commissioning Flow = 0 (Standard Flow) - QR Code [DUT - Commissioner]: Steps: 2.a, 2.b and 2.c with “MCORE.DD.DISCOVERY_PAF” enabled in PICS +* 3.3.12. [TC-DD-3.12] Commissioning Flow = 1 (User-Intent Flow) - QR Code [DUT - Commissioner]: Steps: 2.a, 2.b, 2.c and 2.d with “MCORE.DD.DISCOVERY_PAF” enabled in PICS +* 3.3.13. [TC-DD-3.13] Commissioning Flow = 2 (Custom Flow) - QR Code [DUT - Commissioner]: Steps: 2.a, 2.b, 2.c and 2.d with “MCORE.DD.DISCOVERY_PAF” enabled in PICS +* 3.3.14. [TC-DD-3.14] Commissioning Flow - QR Code - Negative Scenario [DUT - Commissioner]: Steps: 4.a and 4.b with “MCORE.DD.DISCOVERY_PAF” enabled in PICS + +=== Configure the environment +* *Configure it as commissionee*: +** Configure the wpa_supplicant on the commissionee side using the +provided example file named wpa_supplicant-def_comm.conf. +** Run `./config_paf_env.sh comee` (*Make sure the “config_paf_env.sh” file from the “scripts” folder inside the provided ZIP archive is copied to the working directory before executing this command*). +* *Configure it as commissioner*: +** Configure the wpa_supplicant on the commissionee side using the +provided example file named wpa_supplicant-def_comer.conf. +** Set the ssid/password of AP to wpa_supplicant-def_comer.conf +** Run `./config_paf_env.sh comer` (*Make sure the “config_paf_env.sh” file from the “scripts” folder inside the provided ZIP archive is copied to the working directory before executing this command*). +* Renew the commissionee environment before running the test: +** Run `./renew_paf_comee.sh` (*Make sure the “renew_paf_comee.sh file from the “scripts” folder inside the provided ZIP archive is copied to the working directory before executing this command*) +** The above script is just clearing the tmp files and removing the +existing networks. <<< == *Test Configuration* @@ -1250,6 +1375,15 @@ Only one of the following parameter is allowed, also when one of them is configu + WARNING: This is an invalid configuration. TH will not accept both parameters set at the same time. +.. Overwrite the default timeout. Value in [s]: ++ +[source,xml] +---- +"test_parameters": { + "timeout": 300 +} +---- + On completion of the "network" and the "dut_config" configuration, select the *Update* and then *Create* button to create the Test Project. @@ -1458,6 +1592,14 @@ Follow the instructions below to execute the test cases. . After execution of the above commands ensure that the PAA’s are available locally at */var/paa-root-certs* . + +===== Mapped Volumes +The following host directories are mapped into the cert-bins container: + +* `/root/python_testing` -> `/home/ubuntu/certification-tool/backend/test_collections/matter/sdk_tests/sdk_checkout/python_testing` +* `/paa-root-certs` -> `/var/paa-root-certs` +* `/credentials/development` -> `/var/credentials/development` + ===== Placeholders for Steps Device-specific configuration is shown as shell variables. *PLEASE REPLACE THOSE WITH THE CORRECT VALUE* in the steps below. @@ -1605,7 +1747,7 @@ The above example will be used to define the following arguments when running th ====== Test Parameters Examples -Access the spreadsheet via the https://groups.csa-iot.org/wg/matter-csg/document/36336[Verification Steps Document] and review the information provided. Based on this data, create the parameters set as requested. +Access the spreadsheet via the https://groups.csa-iot.org/wg/members-all/document/folder/2269[Verification Steps Document] and review the information provided. Based on this data, create the parameters set as requested. Below are some specific examples assembled from data obtained from the spreadsheet. @@ -1669,3 +1811,133 @@ image:images/img_61.png[SDK Python Tests - Suites] image:images/img_62.png[SDK Python Tests - Commissioning Mode prompt] image:images/img_63.png[SDK Python Tests - DUT should be commissioned prompt] + +===== Reuse commissioning information +This allows users to perform multiple test run executions without the need to perform the commissioning step in every test run execution. +The TH is now storing the last commissioning information, so a prompt will presented asking user to reuse those previous commissioning information or if he wants to perform a new commissioning procedure. + +image:images/img_67.png[SDK Python Tests - Reuse commissioning information prompt] + +<<< +== *Matter Test Harness Cameras* + +Please follow the instructions below to use the camera feature in the Test Harness for certification purposes. The camera feature allows users to run test cases that require a camera stream, such as those related to video streaming or image capture. + +=== Prerequisites +Before using the camera feature for the certification, please make sure there is a functional Raspberry Pi TH environment setup. Refer to the <> for the installation of the TH environment. + +==== Install GStreamer +The camera feature requires GStreamer to be installed on the Raspberry Pi. Use the following linux command to install into the system: + +```shell +sudo apt-get install \ + libgstreamer1.0-dev \ + libgstreamer-plugins-base1.0-dev \ + libgstreamer-plugins-bad1.0-dev \ + gstreamer1.0-plugins-base \ + gstreamer1.0-plugins-good \ + gstreamer1.0-plugins-bad \ + gstreamer1.0-plugins-ugly \ + gstreamer1.0-libav \ + gstreamer1.0-tools \ + gstreamer1.0-x \ + gstreamer1.0-alsa \ + gstreamer1.0-gl \ + gstreamer1.0-gtk3 \ + gstreamer1.0-qt5 \ + gstreamer1.0-pulseaudio +``` + +==== Chrome Browser Configuration +The camera feature requires the Chrome browser to be configured to allow a camera stream be shown in the video popup of the Test Harness. For that, it's necessary to enable the configuration flag “Insecure origins treated as secure” with an existing http address running the Test Harness application. + +Follow the following steps to enable this required flag: + +1. Open the Chrome browser and navigate to `chrome://flags/` in a new tab +2. Search for `Insecure origins treated as secure` flag +3. Enter the IP of the Test Harness into the field and enable the flag as shown in the image below +4. Relaunch the Browser + +image:images/img_69.png[Chrome Browser - Insecure origins treated as secure flag] + +=== Setup Environment + +Auto-update and start Test Harness with the camera feature (`v2.14-beta1.1+winter2025` or later) with the following linux commands: + +- `cd ~/certification-tool` +- `./scripts/ubuntu/auto-update.sh v2.14-beta1.1+winter2025` +- `./scripts/start.sh` + +=== Running Camera Test Cases + +The Test Harness UI can be launched from a browser on any system on the same network by using the TH IP address. You can then use the camera application to run a camera-related Test Case. + +==== Running chip-camera-app with USB Camera + +The chip-camera-app located in the ~/apps directory can be executed using the following linux commands: + +1. `rm -rf /tmp/chip_*` +2. `cd ~/apps` +3. `./chip-camera-app` (use `./chip-camera-app --camera-deferred-offer` for the *TC_WEBRTC_1_3* test case) + +<<< +== *Platform Certification Configuration* +Platform certification is the process of validating that a hardware or software platform meets specific technical and compliance standards. Certifying the platform allows device manufacturers to build products using a pre-approved foundation, reducing development time and simplifying the certification process for their final products. + +=== Selecting Test Cases Rules +The Test Harness applies different rules to pre-select test cases depending on the type of certification being performed. The selection process takes into account the presence of specific flags in the PICS file and, in some cases, the content of additional configuration files such as the `platform-test.json` and `dmp-test-skip.xml` files. + +The `dmp-test-skip.xml` file is provided by the PICS Tool, while the `platform-test.json` file is a static file that the Test Harness automatically downloads in the background from the SDK repository at the `connectedhomeip` project. + + +The three selection modes are: + +* *Platform Certification* +* *Derived Product Certification* +* *Full Product Certification* + +Each mode is designed to ensure that only the relevant test cases are executed based on the scope and purpose of the certification. + + +==== Platform Certification +In this mode, the product is being certified as a platform. The behavior is defined as follows: + +* The PICS file must contain the `PICS_PLAT_CERT` flag set to `True`. +* The TH will pre-select test cases that: +** Are explicitly listed in the new `platform-test.json` file. +** Comply with the standard PICS rules. + +==== Derived Product Certification +This mode applies to products built on a previously certified platform. The behavior is: + +* The PICS file must contain the `PICS_PLAT_CERT_DONE` flag set to `True`, along with any product-specific PICS entries. +* The TH will pre-select test cases that: +** Comply with the standard PICS rules. +** Are **not** listed in the `dmp-test-skip.xml` (DMP file). + +==== Full Product Certification +This is the default mode when the product does not fall under the previous two categories: + +* The PICS file may contain any PICS entries, except `PICS_PLAT_CERT` and `PICS_PLAT_CERT_DONE`. +* The TH will pre-select test cases according to standard PICS rules only. + +=== Configuration Input + +On the project configuration screen, users can upload both the PICS file and the optional `dmp-test-skip.xml` file. Based on the content of the PICS file, the TH determines the certification mode by checking the values of the following flags: + +* `MCORE.PLAT_CERT = True` → Platform Certification +* `MCORE.PLAT_CERT_DONE = True` → Derived Product Certification +* Neither flag set → Full Product Certification + +This logic enables the TH to filter and pre-select the appropriate test cases based on the selected certification type. + +The `dmp-test-skip.xml` file can be uploaded either by dragging and dropping the file into the upload area or by manually selecting it. Note that only one DMP file is accepted at a time. If a second DMP file is uploaded, the previous configuration will be discarded. + +[NOTE] +==== +The file must be named exactly `dmp-test-skip.xml`. Renaming the file will result in it being ignored by the Test Harness. +==== + +Once the DMP file is uploaded, its content can be viewed in the panel on the left under the "dmp_test_skip" item, as shown in the figure. + +image:images/img_68.png[Platform Certification - Upload dmp-test-skip.xml file] diff --git a/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.pdf b/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.pdf index 3b273bb6..095213ec 100644 Binary files a/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.pdf and b/docs/Matter_TH_User_Guide/Matter_TH_User_Guide.pdf differ diff --git a/docs/Matter_TH_User_Guide/images/img_67.png b/docs/Matter_TH_User_Guide/images/img_67.png new file mode 100644 index 00000000..6fb1bd24 Binary files /dev/null and b/docs/Matter_TH_User_Guide/images/img_67.png differ diff --git a/docs/Matter_TH_User_Guide/images/img_68.png b/docs/Matter_TH_User_Guide/images/img_68.png new file mode 100644 index 00000000..401fb8c4 Binary files /dev/null and b/docs/Matter_TH_User_Guide/images/img_68.png differ diff --git a/docs/Matter_TH_User_Guide/images/img_69.png b/docs/Matter_TH_User_Guide/images/img_69.png new file mode 100644 index 00000000..518eb1a4 Binary files /dev/null and b/docs/Matter_TH_User_Guide/images/img_69.png differ diff --git a/frontend b/frontend index 5c0c9dd3..1acbec08 160000 --- a/frontend +++ b/frontend @@ -1 +1 @@ -Subproject commit 5c0c9dd3da5719afc543cc13e82f0667925e64ea +Subproject commit 1acbec08f06a268c44a9b7b5c12ff8d240d1877b diff --git a/logs/.keep b/logs/.keep new file mode 100644 index 00000000..c3a09082 --- /dev/null +++ b/logs/.keep @@ -0,0 +1 @@ +Do not remove this file \ No newline at end of file diff --git a/scripts/pi-setup/auto-install.sh b/scripts/pi-setup/auto-install.sh index 17f643b3..8094c411 100755 --- a/scripts/pi-setup/auto-install.sh +++ b/scripts/pi-setup/auto-install.sh @@ -15,23 +15,8 @@ # See the License for the specific language governing permissions and # limitations under the License. ROOT_DIR=$(realpath $(dirname "$0")/../..) -SCRIPT_DIR="$ROOT_DIR/scripts" -PI_SCRIPT_DIR="$SCRIPT_DIR/pi-setup" -UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" +PI_SCRIPT_DIR="$ROOT_DIR/scripts/pi-setup" +LOG_FILENAME=$(date +"log-pi_setup-auto-install_%F-%H-%M-%S") +LOG_PATH="$ROOT_DIR/logs/$LOG_FILENAME" -source "$SCRIPT_DIR/utils.sh" - -print_start_of_script - -check_installation_prerequisites -verify_return_code - -print_script_step "Installing Raspberry Pi Dependencies" -$PI_SCRIPT_DIR/install-pi-dependencies.sh -verify_return_code - -print_script_step "Running Ubuntu auto-install" -$UBUNTU_SCRIPT_DIR/auto-install.sh -verify_return_code - -print_end_of_script +$PI_SCRIPT_DIR/internal-auto-install.sh $* | tee $LOG_PATH diff --git a/scripts/pi-setup/internal-auto-install.sh b/scripts/pi-setup/internal-auto-install.sh new file mode 100755 index 00000000..17f643b3 --- /dev/null +++ b/scripts/pi-setup/internal-auto-install.sh @@ -0,0 +1,37 @@ +#! /usr/bin/env bash + + # + # Copyright (c) 2023 Project CHIP Authors + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +ROOT_DIR=$(realpath $(dirname "$0")/../..) +SCRIPT_DIR="$ROOT_DIR/scripts" +PI_SCRIPT_DIR="$SCRIPT_DIR/pi-setup" +UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" + +source "$SCRIPT_DIR/utils.sh" + +print_start_of_script + +check_installation_prerequisites +verify_return_code + +print_script_step "Installing Raspberry Pi Dependencies" +$PI_SCRIPT_DIR/install-pi-dependencies.sh +verify_return_code + +print_script_step "Running Ubuntu auto-install" +$UBUNTU_SCRIPT_DIR/auto-install.sh +verify_return_code + +print_end_of_script diff --git a/scripts/sideload.sh b/scripts/sideload.sh new file mode 100755 index 00000000..257ab76e --- /dev/null +++ b/scripts/sideload.sh @@ -0,0 +1,73 @@ +#! /usr/bin/env bash + + # + # Copyright (c) 2024 Project CHIP Authors + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + +ROOT_DIR=$(realpath $(dirname "$0")/..) +SCRIPT_DIR="$ROOT_DIR/scripts" + + +DATE_STR=$(date +"%F-%H-%M-%S") + +SIDELOAD_LOGFILE_PATH="logs/sideload_$DATE_STR.log" + +# Redirect all output (stdout and stderr) to both the terminal and the log file +exec > >(tee -a "$SIDELOAD_LOGFILE_PATH") 2>&1 + +# backend container +CONTAINER_NAME="certification-tool-backend-1" + +source "$SCRIPT_DIR/utils.sh" + +print_start_of_script + +print_script_step "Updating tests information" +docker exec -i $CONTAINER_NAME python3 /app/test_collections/matter/sdk_tests/support/python_testing/list_python_tests_classes.py +if [ $? -ne 0 ]; then + echo "Unable to execute command in backend container. Could you please check if it is running?" + exit 1 +fi + +# Retrieve backend container ID +print_script_step "Retrieving backend container ID..." +CONTAINER_ID=$(docker ps -qf "name=${CONTAINER_NAME}") + +# Check if the container was found +if [ -z "$CONTAINER_ID" ]; then + echo "Container '${CONTAINER_NAME}' not found." + exit 1 +fi + +# Restart the container +print_script_step "Restarting container ${CONTAINER_NAME} (ID: ${CONTAINER_ID})..." +docker restart "$CONTAINER_ID" + +# Check if the restart process was successful +if [ $? -eq 0 ]; then + echo -n "Waiting for backend to start" + CHECK_BACKEND_SERVICE="docker exec -i $CONTAINER_NAME curl --fail -s --output /dev/null http://localhost/docs" + until $CHECK_BACKEND_SERVICE + do + echo -n "." + sleep 5 + done + echo "Container restarted successfully!" + +else + echo "Failed to restart the container!" + exit 1 +fi + +print_end_of_script diff --git a/scripts/start.sh b/scripts/start.sh index 2f3c0451..6691d23f 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -33,6 +33,17 @@ set -e BACKEND_COMPOSE_DEV="-f docker-compose.override-backend-dev.yml" FRONTEND_COMPOSE_DEV="-f docker-compose.override-frontend-dev.yml" +DATE_STR=$(date +"%F-%H-%M-%S") + +BACKEND_LOGFILE_PATH="logs/backend_service_start_$DATE_STR.log" +FRONTEND_LOGFILE_PATH="logs/frontend_service_start_$DATE_STR.log" + +print_start_container() { + echo "################################################################################" >> "$1" 2>&1 + echo "start.sh: Starting..." >> "$1" 2>&1 + echo "################################################################################" >> "$1" 2>&1 +} + # Parse args for which docker compose overrides to use BACKEND_COMPOSE="" FRONTEND_COMPOSE="" @@ -91,12 +102,15 @@ if [ $? -ne 0 ]; then exit 1 fi +print_start_container "$FRONTEND_LOGFILE_PATH" + if [ "$FRONTEND_DEV" = true ] ; then echo "!!!! Matter TH frontend started in development mode." echo "!!!! Manually start frontend by connecting to the frontend container" else echo -n "Waiting for frontend to start" - until docker compose exec frontend curl --fail -s --output /dev/null http://localhost:4200 + CHECK_FRONTEND_SERVICE="docker compose exec frontend curl --fail -s --output /dev/null http://localhost:4200" + until $CHECK_FRONTEND_SERVICE >> $FRONTEND_LOGFILE_PATH 2>&1 do echo -n "." sleep 5 @@ -104,12 +118,15 @@ else echo " done" fi +print_start_container "$BACKEND_LOGFILE_PATH" + if [ "$BACKEND_DEV" = true ] ; then echo "!!!! Matter TH backend started in development mode." echo "!!!! Manually start backend by connecting to the backend container" else echo -n "Waiting for backend to start" - until docker compose exec backend curl --fail -s --output /dev/null http://localhost/docs + CHECK_BACKEND_SERVICE="docker compose exec backend curl --fail -s --output /dev/null http://localhost/docs" + until $CHECK_BACKEND_SERVICE >> $BACKEND_LOGFILE_PATH 2>&1 do echo -n "." sleep 5 @@ -117,4 +134,10 @@ else echo " done" fi +echo "Backend startup process completed" >> $BACKEND_LOGFILE_PATH 2>&1 +docker compose logs backend >> $BACKEND_LOGFILE_PATH 2>&1 + +echo "Frontend startup process completed" >> $FRONTEND_LOGFILE_PATH 2>&1 +docker compose logs frontend >> $FRONTEND_LOGFILE_PATH 2>&1 + echo "Script 'start.sh' completed successfully" diff --git a/scripts/ubuntu/1-install-dependencies.sh b/scripts/ubuntu/1-install-dependencies.sh index b82a5cc6..80a8afdb 100755 --- a/scripts/ubuntu/1-install-dependencies.sh +++ b/scripts/ubuntu/1-install-dependencies.sh @@ -39,8 +39,23 @@ readarray packagelist < "$UBUNTU_SCRIPT_DIR/package-dependency-list.txt" SAVEIFS=$IFS IFS=$(echo -en "\r") for package in ${packagelist[@]}; do - print_script_step "Instaling package: ${package[@]}" - sudo DEBIAN_FRONTEND=noninteractive apt-get satisfy ${package[@]} -y --allow-downgrades + print_script_step "Installing package: ${package[@]}" + + # Special handling for docker-ce to avoid version 29.x + if [[ "${package%%[[:space:]]}" == docker-ce* ]]; then + # Get the latest version that is not 29.x + DOCKER_VERSION=$(apt-cache madison docker-ce | awk '$3 !~ /^5:29\./ {print $3; exit}') + if [ -n "$DOCKER_VERSION" ]; then + print_script_step "Installing docker-ce version $DOCKER_VERSION (excluding 29.x)" + sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades docker-ce=$DOCKER_VERSION docker-ce-cli=$DOCKER_VERSION containerd.io + sudo apt-mark hold docker-ce docker-ce-cli + else + echo "ERROR: No suitable docker-ce version found (excluding 29.x)" + exit 1 + fi + else + sudo DEBIAN_FRONTEND=noninteractive apt-get satisfy "${package%%[[:space:]]}" -y --allow-downgrades + fi done IFS=$SAVEIFS diff --git a/scripts/ubuntu/1.1-install-docker-repository.sh b/scripts/ubuntu/1.1-install-docker-repository.sh index 97ba7a88..004aeac7 100755 --- a/scripts/ubuntu/1.1-install-docker-repository.sh +++ b/scripts/ubuntu/1.1-install-docker-repository.sh @@ -23,33 +23,30 @@ source "$SCRIPT_DIR/utils.sh" print_start_of_script +set +e print_script_step "Verify docker.download.com is reachable" # Verify docker.download.com is reachable before attempting to install the # Docker Package Repo (network randomly fails after service restarts). -# A ping will be attempted and retried in increments of 1 second before -# a 5 minute timeout. -timeout 300s bash -c ' -start_time=$(date) -echo "Ping started at: $start_time" -while :; do - if ping -c 1 docker.download.com | grep -q "1 received"; then - echo "Ping docker.download.com successful" - end_time=$(date) - echo "Ping ended at: $end_time" - echo "Ping duration: $(($(date +%s) - $(date -d "$start_time" +%s))) seconds" - break - fi - echo "Ping docker.download.com failed, retrying..." - sleep 1 +for i in {1..5} +do + timeout 2 bash -c "(echo >/dev/tcp/docker.download.com/80) &>/dev/null" + retVal=$? + if [ $retVal -eq 0 ]; then + echo "The docker.download.com is reacheable" + break + else + echo "The docker.download.com is unreachable for try $i" + sleep $(expr $i \* 2) + fi + + if [ "$i" -eq '5' ]; then + echo "Failed to stablish connection with the docker.download.com service." + echo "Please verify your connection or try again later." + exit 1 + fi done -if [ $? -eq 124 ]; then - end_time=$(date) - echo "docker.download.com: Timeout reached" - echo "Ping ended at: $end_time" - echo "Ping duration: $(($(date +%s) - $(date -d "$start_time" +%s))) seconds" -fi -' +set -e # Reference link: https://docs.docker.com/engine/install/ubuntu/ print_script_step "Add Docker's official GPG key" sudo apt-get update -y diff --git a/scripts/ubuntu/1.2-install-additional-dependencies.sh b/scripts/ubuntu/1.2-install-additional-dependencies.sh new file mode 100755 index 00000000..76de38e1 --- /dev/null +++ b/scripts/ubuntu/1.2-install-additional-dependencies.sh @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +# +# Copyright (c) 2025 Project CHIP Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +set -e + +ROOT_DIR=$(realpath $(dirname "$0")/../..) +SCRIPT_DIR="$ROOT_DIR/scripts" +UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" + +source "$SCRIPT_DIR/utils.sh" + +print_start_of_script + +print_script_step "Install additional dependencies" + +readarray -t packagelist < "$UBUNTU_SCRIPT_DIR/additional-dependency-list.txt" + +for package in "${packagelist[@]}"; do + [ -z "$package" ] && continue + + print_script_step "Installing additional package: $package" + + sudo DEBIAN_FRONTEND=noninteractive apt-get satisfy "$package" -y --allow-downgrades +done + +print_end_of_script \ No newline at end of file diff --git a/scripts/ubuntu/additional-dependency-list.txt b/scripts/ubuntu/additional-dependency-list.txt new file mode 100644 index 00000000..be1c4372 --- /dev/null +++ b/scripts/ubuntu/additional-dependency-list.txt @@ -0,0 +1,11 @@ +libgstreamer1.0-dev (>=1.16.0) +libgstreamer-plugins-base1.0-dev (>=1.16.0) +gstreamer1.0-plugins-base (>=1.16.0) +gstreamer1.0-plugins-good (>=1.16.0) +gstreamer1.0-plugins-bad (>=1.16.0) +libatomic1 (>=10.0.0) +libavformat60 (>=7:6.0) +libavcodec60 (>=7:6.0) +libavutil58 (>=7:6.0) +libswresample4 (>=7:6.0) +libswscale7 (>=7:6.0) \ No newline at end of file diff --git a/scripts/ubuntu/auto-install.sh b/scripts/ubuntu/auto-install.sh index 4785367f..723bcef2 100755 --- a/scripts/ubuntu/auto-install.sh +++ b/scripts/ubuntu/auto-install.sh @@ -14,45 +14,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - ROOT_DIR=$(realpath $(dirname "$0")/../..) -SCRIPT_DIR="$ROOT_DIR/scripts" -UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" - -source "$SCRIPT_DIR/utils.sh" - -print_start_of_script - -check_installation_prerequisites -verify_return_code - -print_script_step "Installing Test Harness Dependencies" -$UBUNTU_SCRIPT_DIR/1-install-dependencies.sh -verify_return_code - -print_script_step "Configure Machine" -$UBUNTU_SCRIPT_DIR/2-machine-cofiguration.sh -verify_return_code - -print_script_step "Update Test Harness code" -# Store the current branch for the update -CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) -$UBUNTU_SCRIPT_DIR/auto-update.sh "$CURRENT_BRANCH" -verify_return_code - -print_script_step "Revert needrestart config to default" -sudo sed -i "s/\$nrconf{kernelhints} = -1;/#\$nrconf{kernelhints} = -1;/g" /etc/needrestart/needrestart.conf -sudo sed -i "s/\$nrconf{restart} = 'a';/#\$nrconf{restart} = 'i';/" /etc/needrestart/needrestart.conf - -print_end_of_script - -print_installation_success +UBUNTU_SCRIPT_DIR="$ROOT_DIR/scripts/ubuntu" +LOG_DIR="$ROOT_DIR/logs" -print_script_step "You need to reboot to finish setup" -printf "Do you want to reboot now? (Press 1 to reboot now)\n" -select yn in "Yes" "No"; do - case $yn in - Yes ) sudo reboot; break;; - No ) exit;; - esac -done +LOG_FILENAME=$(date +"log_ubuntu_auto_install_%F-%H-%M-%S") +LOG_PATH="$LOG_DIR/$LOG_FILENAME" +$UBUNTU_SCRIPT_DIR/internal-auto-install.sh $* | tee $LOG_PATH diff --git a/scripts/ubuntu/auto-update.sh b/scripts/ubuntu/auto-update.sh index 484930a4..2fe3bbbc 100755 --- a/scripts/ubuntu/auto-update.sh +++ b/scripts/ubuntu/auto-update.sh @@ -15,15 +15,11 @@ # See the License for the specific language governing permissions and # limitations under the License. ROOT_DIR=$(realpath $(dirname "$0")/../..) +UBUNTU_SCRIPT_DIR="$ROOT_DIR/scripts/ubuntu" +LOG_FILENAME=$(date +"log-ubuntu-auto-update_%F-%H-%M-%S") +LOG_PATH="$ROOT_DIR/logs/$LOG_FILENAME" SCRIPT_DIR="$ROOT_DIR/scripts" -source "$SCRIPT_DIR/utils.sh" - -print_start_of_script - -check_installation_prerequisites -verify_return_code - if [ $# != 1 ] || [ $1 = "--help" ]; then echo "Usage:" echo "./scripts/ubuntu/auto-update.sh " @@ -31,17 +27,10 @@ if [ $# != 1 ] || [ $1 = "--help" ]; then exit 1 fi -print_script_step "Stopping Containers" -$SCRIPT_DIR/stop.sh - BRANCH_NAME=$1 print_script_step "Update Test Harness code" $SCRIPT_DIR/update-th-code.sh "$BRANCH_NAME" verify_return_code -print_script_step "Update Test Harness Setup" -$SCRIPT_DIR/update.sh "$BRANCH_NAME" -verify_return_code - -print_end_of_script +$UBUNTU_SCRIPT_DIR/internal-auto-update.sh $* | tee $LOG_PATH diff --git a/scripts/ubuntu/internal-auto-install.sh b/scripts/ubuntu/internal-auto-install.sh new file mode 100755 index 00000000..ed02e20a --- /dev/null +++ b/scripts/ubuntu/internal-auto-install.sh @@ -0,0 +1,61 @@ +#! /usr/bin/env bash + + # + # Copyright (c) 2023 Project CHIP Authors + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +ROOT_DIR=$(realpath $(dirname "$0")/../..) +SCRIPT_DIR="$ROOT_DIR/scripts" +UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" + +source "$SCRIPT_DIR/utils.sh" + +print_start_of_script + +check_installation_prerequisites +verify_return_code + +print_script_step "Installing Test Harness Dependencies" +$UBUNTU_SCRIPT_DIR/1-install-dependencies.sh +verify_return_code + +print_script_step "Installing Additional Dependencies" +$UBUNTU_SCRIPT_DIR/1.2-install-additional-dependencies.sh +verify_return_code + +print_script_step "Configure Machine" +$UBUNTU_SCRIPT_DIR/2-machine-cofiguration.sh +verify_return_code + +print_script_step "Update Test Harness code" +# Store the current branch for the update +CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) +$UBUNTU_SCRIPT_DIR/auto-update.sh "$CURRENT_BRANCH" +verify_return_code + +print_script_step "Revert needrestart config to default" +sudo sed -i "s/\$nrconf{kernelhints} = -1;/#\$nrconf{kernelhints} = -1;/g" /etc/needrestart/needrestart.conf +sudo sed -i "s/\$nrconf{restart} = 'a';/#\$nrconf{restart} = 'i';/" /etc/needrestart/needrestart.conf + +print_end_of_script + +print_installation_success + +print_script_step "You need to reboot to finish setup" +printf "Do you want to reboot now? (Press 1 to reboot now)\n" +select yn in "Yes" "No"; do + case $yn in + Yes ) sudo reboot; break;; + No ) exit;; + esac +done diff --git a/scripts/ubuntu/internal-auto-update.sh b/scripts/ubuntu/internal-auto-update.sh new file mode 100755 index 00000000..05ff780a --- /dev/null +++ b/scripts/ubuntu/internal-auto-update.sh @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + + # + # Copyright (c) 2024 Project CHIP Authors + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +ROOT_DIR=$(realpath $(dirname "$0")/../..) +SCRIPT_DIR="$ROOT_DIR/scripts" +UBUNTU_SCRIPT_DIR="$SCRIPT_DIR/ubuntu" + +source "$SCRIPT_DIR/utils.sh" + +print_start_of_script + +check_installation_prerequisites +verify_return_code + +print_script_step "Stopping Containers" +$SCRIPT_DIR/stop.sh + +BRANCH_NAME=$1 + +print_script_step "Update Test Harness Setup" +$SCRIPT_DIR/update.sh "$BRANCH_NAME" +verify_return_code + +print_script_step "Installing Additional Dependencies" +$UBUNTU_SCRIPT_DIR/1.2-install-additional-dependencies.sh +verify_return_code + +print_end_of_script diff --git a/scripts/ubuntu/package-dependency-list.txt b/scripts/ubuntu/package-dependency-list.txt index 28d9642f..915d6939 100644 --- a/scripts/ubuntu/package-dependency-list.txt +++ b/scripts/ubuntu/package-dependency-list.txt @@ -1,3 +1,3 @@ -docker-ce (>=5:24.0.7-1~ubuntu.22.04~jammy) +docker-ce (<< 5:29.0) python3-pip (>=24.0+dfsg-1ubuntu1) python3-venv (>=3.12.3-0ubuntu1) \ No newline at end of file diff --git a/scripts/update-setup-cli-dependencies.sh b/scripts/update-setup-cli-dependencies.sh index 49294265..087a2753 100755 --- a/scripts/update-setup-cli-dependencies.sh +++ b/scripts/update-setup-cli-dependencies.sh @@ -23,9 +23,8 @@ source "$SCRIPT_DIR/utils.sh" print_start_of_script -print_script_step "Running Poetry install" -source ~/.profile #ensure poetry is in path +print_script_step "Running CLI install script" cd $ROOT_DIR/cli -poetry install --no-root +./scripts/th_cli_install.sh print_end_of_script diff --git a/traefik_dynamic.yml b/traefik_dynamic.yml new file mode 100644 index 00000000..6a59666c --- /dev/null +++ b/traefik_dynamic.yml @@ -0,0 +1,17 @@ +http: + routers: + pushav-router: + rule: "PathPrefix(`/pushav`)" + service: pushav-service + middlewares: + - pushav-stripprefix + services: + pushav-service: + loadBalancer: + servers: + - url: "https://host.docker.internal:1234" + middlewares: + pushav-stripprefix: + stripPrefix: + prefixes: + - "/pushav"