Compare commits

..

13 Commits

Author SHA1 Message Date
J. Nick Koston
dd345106f7 lock 2025-06-12 22:37:26 -05:00
J. Nick Koston
9763821d68 lock 2025-06-12 22:34:03 -05:00
J. Nick Koston
3d358cf431 lock 2025-06-12 22:21:15 -05:00
J. Nick Koston
1cebeb53d3 lock 2025-06-12 22:10:40 -05:00
J. Nick Koston
09bfa7f527 lock 2025-06-12 22:09:53 -05:00
J. Nick Koston
a61138c4f7 lock 2025-06-12 22:05:31 -05:00
J. Nick Koston
b23445e1c3 lock 2025-06-12 22:00:28 -05:00
J. Nick Koston
b887c1bf08 lock 2025-06-12 21:58:52 -05:00
J. Nick Koston
9206888966 label 2025-06-12 21:46:38 -05:00
J. Nick Koston
567cba4510 speed up 2025-06-12 21:37:22 -05:00
J. Nick Koston
7da5e02388 tweak 2025-06-12 21:34:03 -05:00
J. Nick Koston
1dd189cf36 tweak 2025-06-12 21:33:53 -05:00
J. Nick Koston
9e5dc01fd4 tweak 2025-06-12 21:33:29 -05:00
27 changed files with 449 additions and 257 deletions

View File

@@ -377,7 +377,15 @@ jobs:
id: list-components
run: |
. venv/bin/activate
components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
# Check if we should test all components (via label)
if [[ "${{ contains(github.event.pull_request.labels.*.name, 'test-all-components') }}" == "true" ]]; then
echo "Label 'test-all-components' found - testing ALL components"
components=$(script/list-components.py)
else
components=$(script/list-components.py --changed --branch ${{ steps.target-branch.outputs.branch }})
fi
output_components=$(echo "$components" | jq -R -s -c 'split("\n")[:-1] | map(select(length > 0))')
count=$(echo "$output_components" | jq length)
@@ -415,11 +423,15 @@ jobs:
- name: test_build_components -e config -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e config -c ${{ matrix.file }}
# Use 4 parallel jobs for config validation
./script/test_build_components -e config -c ${{ matrix.file }} -j 4 -f
- name: test_build_components -e compile -c ${{ matrix.file }}
run: |
. venv/bin/activate
./script/test_build_components -e compile -c ${{ matrix.file }}
mkdir -p build_cache
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
# Use 2 parallel jobs for compilation (resource intensive)
./script/test_build_components -e compile -c ${{ matrix.file }} -j 2 -f -b $PWD/build_cache
test-build-components-splitter:
name: Split components for testing into 20 groups maximum
@@ -471,17 +483,28 @@ jobs:
- name: Validate config
run: |
. venv/bin/activate
for component in ${{ matrix.components }}; do
./script/test_build_components -e config -c $component
# Process all components in parallel for config validation
components="${{ matrix.components }}"
# Convert space-separated list to multiple -c flags
component_args=""
for component in $components; do
component_args="$component_args -c $component"
done
# Use 8 parallel jobs for lightweight config validation
./script/test_build_components -e config $component_args -j 8 -f
- name: Compile config
run: |
. venv/bin/activate
mkdir build_cache
mkdir -p build_cache
export PLATFORMIO_BUILD_CACHE_DIR=$PWD/build_cache
for component in ${{ matrix.components }}; do
./script/test_build_components -e compile -c $component
# Process all components in parallel for compilation
components="${{ matrix.components }}"
component_args=""
for component in $components; do
component_args="$component_args -c $component"
done
# Use 4 parallel jobs for resource-intensive compilation
./script/test_build_components -e compile $component_args -j 4 -f -b $PWD/build_cache
ci-status:
name: CI Status

View File

@@ -22,7 +22,6 @@ from esphome.cpp_generator import ( # noqa: F401
TemplateArguments,
add,
add_build_flag,
add_build_unflag,
add_define,
add_global,
add_library,
@@ -35,7 +34,6 @@ from esphome.cpp_generator import ( # noqa: F401
process_lambda,
progmem_array,
safe_exp,
set_cpp_standard,
statement,
static_const_array,
templatable,

View File

@@ -260,7 +260,7 @@ uint16_t APIConnection::encode_message_to_buffer(ProtoMessage &msg, uint16_t mes
return 0; // Doesn't fit
}
// Allocate buffer space - pass payload size, allocation functions add header/footer space
// Allocate exact buffer space needed (just the payload, not the overhead)
ProtoWriteBuffer buffer =
is_single ? conn->allocate_single_message_buffer(size) : conn->allocate_batch_message_buffer(size);

View File

@@ -94,13 +94,6 @@ COMPILER_OPTIMIZATIONS = {
"SIZE": "CONFIG_COMPILER_OPTIMIZATION_SIZE",
}
ARDUINO_ALLOWED_VARIANTS = [
VARIANT_ESP32,
VARIANT_ESP32C3,
VARIANT_ESP32S2,
VARIANT_ESP32S3,
]
def get_cpu_frequencies(*frequencies):
return [str(x) + "MHZ" for x in frequencies]
@@ -150,17 +143,12 @@ def set_core_data(config):
CORE.data[KEY_ESP32][KEY_COMPONENTS] = {}
elif conf[CONF_TYPE] == FRAMEWORK_ARDUINO:
CORE.data[KEY_CORE][KEY_TARGET_FRAMEWORK] = "arduino"
if variant not in ARDUINO_ALLOWED_VARIANTS:
raise cv.Invalid(
f"ESPHome does not support using the Arduino framework for the {variant}. Please use the ESP-IDF framework instead.",
path=[CONF_FRAMEWORK, CONF_TYPE],
)
CORE.data[KEY_CORE][KEY_FRAMEWORK_VERSION] = cv.Version.parse(
config[CONF_FRAMEWORK][CONF_VERSION]
)
CORE.data[KEY_ESP32][KEY_BOARD] = config[CONF_BOARD]
CORE.data[KEY_ESP32][KEY_VARIANT] = variant
CORE.data[KEY_ESP32][KEY_VARIANT] = config[CONF_VARIANT]
CORE.data[KEY_ESP32][KEY_EXTRA_BUILD_FILES] = {}
return config
@@ -630,21 +618,6 @@ ESP_IDF_FRAMEWORK_SCHEMA = cv.All(
)
def _set_default_framework(config):
if CONF_FRAMEWORK not in config:
config = config.copy()
variant = config[CONF_VARIANT]
if variant in ARDUINO_ALLOWED_VARIANTS:
config[CONF_FRAMEWORK] = ARDUINO_FRAMEWORK_SCHEMA({})
config[CONF_FRAMEWORK][CONF_TYPE] = FRAMEWORK_ARDUINO
else:
config[CONF_FRAMEWORK] = ESP_IDF_FRAMEWORK_SCHEMA({})
config[CONF_FRAMEWORK][CONF_TYPE] = FRAMEWORK_ESP_IDF
return config
FRAMEWORK_ESP_IDF = "esp-idf"
FRAMEWORK_ARDUINO = "arduino"
FRAMEWORK_SCHEMA = cv.typed_schema(
@@ -654,6 +627,7 @@ FRAMEWORK_SCHEMA = cv.typed_schema(
},
lower=True,
space="-",
default_type=FRAMEWORK_ARDUINO,
)
@@ -680,11 +654,10 @@ CONFIG_SCHEMA = cv.All(
),
cv.Optional(CONF_PARTITIONS): cv.file_,
cv.Optional(CONF_VARIANT): cv.one_of(*VARIANTS, upper=True),
cv.Optional(CONF_FRAMEWORK): FRAMEWORK_SCHEMA,
cv.Optional(CONF_FRAMEWORK, default={}): FRAMEWORK_SCHEMA,
}
),
_detect_variant,
_set_default_framework,
set_core_data,
)
@@ -695,7 +668,6 @@ FINAL_VALIDATE_SCHEMA = cv.Schema(final_validate)
async def to_code(config):
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_platformio_option("board_upload.flash_size", config[CONF_FLASH_SIZE])
cg.set_cpp_standard("gnu++17")
cg.add_build_flag("-DUSE_ESP32")
cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
cg.add_build_flag(f"-DUSE_ESP32_VARIANT_{config[CONF_VARIANT]}")

View File

@@ -183,7 +183,6 @@ async def to_code(config):
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_build_flag("-DUSE_ESP8266")
cg.set_cpp_standard("gnu++17")
cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
cg.add_define("ESPHOME_VARIANT", "ESP8266")

View File

@@ -129,13 +129,6 @@ void INA219Component::setup() {
}
}
void INA219Component::on_powerdown() {
// Mode = 0 -> power down
if (!this->write_byte_16(INA219_REGISTER_CONFIG, 0)) {
ESP_LOGE(TAG, "powerdown error");
}
}
void INA219Component::dump_config() {
ESP_LOGCONFIG(TAG, "INA219:");
LOG_I2C_DEVICE(this);

View File

@@ -15,7 +15,6 @@ class INA219Component : public PollingComponent, public i2c::I2CDevice {
void dump_config() override;
float get_setup_priority() const override;
void update() override;
void on_powerdown() override;
void set_shunt_resistance_ohm(float shunt_resistance_ohm) { shunt_resistance_ohm_ = shunt_resistance_ohm; }
void set_max_current_a(float max_current_a) { max_current_a_ = max_current_a; }

View File

@@ -264,7 +264,6 @@ async def component_to_code(config):
# force using arduino framework
cg.add_platformio_option("framework", "arduino")
cg.add_build_flag("-DUSE_ARDUINO")
cg.set_cpp_standard("gnu++17")
# disable library compatibility checks
cg.add_platformio_option("lib_ldf_mode", "off")

View File

@@ -31,6 +31,7 @@ CONFIG_SCHEMA = cv.Schema(
}
),
},
cv.only_with_arduino,
).extend(cv.COMPONENT_SCHEMA)

View File

@@ -167,7 +167,6 @@ async def to_code(config):
cg.add_platformio_option("lib_ldf_mode", "chain+")
cg.add_platformio_option("board", config[CONF_BOARD])
cg.add_build_flag("-DUSE_RP2040")
cg.set_cpp_standard("gnu++17")
cg.add_define("ESPHOME_BOARD", config[CONF_BOARD])
cg.add_define("ESPHOME_VARIANT", "RP2040")

View File

@@ -110,7 +110,15 @@ void TemplateAlarmControlPanel::loop() {
delay = this->arming_night_time_;
}
if ((millis() - this->last_update_) > delay) {
this->bypass_before_arming();
#ifdef USE_BINARY_SENSOR
for (auto sensor_info : this->sensor_map_) {
// Check for sensors left on and set to bypass automatically and remove them from monitoring
if ((sensor_info.second.flags & BINARY_SENSOR_MODE_BYPASS_AUTO) && (sensor_info.first->state)) {
ESP_LOGW(TAG, "%s is left on and will be automatically bypassed", sensor_info.first->get_name().c_str());
this->bypassed_sensor_indicies_.push_back(sensor_info.second.store_index);
}
}
#endif
this->publish_state(this->desired_state_);
}
return;
@@ -251,23 +259,10 @@ void TemplateAlarmControlPanel::arm_(optional<std::string> code, alarm_control_p
if (delay > 0) {
this->publish_state(ACP_STATE_ARMING);
} else {
this->bypass_before_arming();
this->publish_state(state);
}
}
void TemplateAlarmControlPanel::bypass_before_arming() {
#ifdef USE_BINARY_SENSOR
for (auto sensor_info : this->sensor_map_) {
// Check for sensors left on and set to bypass automatically and remove them from monitoring
if ((sensor_info.second.flags & BINARY_SENSOR_MODE_BYPASS_AUTO) && (sensor_info.first->state)) {
ESP_LOGW(TAG, "'%s' is left on and will be automatically bypassed", sensor_info.first->get_name().c_str());
this->bypassed_sensor_indicies_.push_back(sensor_info.second.store_index);
}
}
#endif
}
void TemplateAlarmControlPanel::control(const AlarmControlPanelCall &call) {
if (call.get_state()) {
if (call.get_state() == ACP_STATE_ARMED_AWAY) {

View File

@@ -60,7 +60,6 @@ class TemplateAlarmControlPanel : public alarm_control_panel::AlarmControlPanel,
bool get_requires_code_to_arm() const override { return this->requires_code_to_arm_; }
bool get_all_sensors_ready() { return this->sensors_ready_; };
void set_restore_mode(TemplateAlarmControlPanelRestoreMode restore_mode) { this->restore_mode_ = restore_mode; }
void bypass_before_arming();
#ifdef USE_BINARY_SENSOR
/** Add a binary_sensor to the alarm_panel.

View File

@@ -8,6 +8,8 @@ CONFIG_SCHEMA = cv.All(
cv.only_with_esp_idf,
)
AUTO_LOAD = ["web_server"]
async def to_code(config):
# Increase the maximum supported size of headers section in HTTP request packet to be processed by the server

View File

@@ -9,12 +9,10 @@
#include "utils.h"
#include "web_server_idf.h"
#ifdef USE_WEBSERVER
#include "esphome/components/web_server/web_server.h"
#include "esphome/components/web_server/list_entities.h"
#endif // USE_WEBSERVER
#include "web_server_idf.h"
namespace esphome {
namespace web_server_idf {
@@ -275,7 +273,6 @@ void AsyncResponseStream::printf(const char *fmt, ...) {
this->print(str);
}
#ifdef USE_WEBSERVER
AsyncEventSource::~AsyncEventSource() {
for (auto *ses : this->sessions_) {
delete ses; // NOLINT(cppcoreguidelines-owning-memory)
@@ -514,7 +511,6 @@ void AsyncEventSourceResponse::deferrable_send_state(void *source, const char *e
}
}
}
#endif
} // namespace web_server_idf
} // namespace esphome

View File

@@ -1,7 +1,6 @@
#pragma once
#ifdef USE_ESP_IDF
#include "esphome/core/defines.h"
#include <esp_http_server.h>
#include <functional>
@@ -13,12 +12,10 @@
#include <vector>
namespace esphome {
#ifdef USE_WEBSERVER
namespace web_server {
class WebServer;
class ListEntitiesIterator;
}; // namespace web_server
#endif
namespace web_server_idf {
#define F(string_literal) (string_literal)
@@ -223,7 +220,6 @@ class AsyncWebHandler {
virtual bool isRequestHandlerTrivial() { return true; }
};
#ifdef USE_WEBSERVER
class AsyncEventSource;
class AsyncEventSourceResponse;
@@ -311,13 +307,10 @@ class AsyncEventSource : public AsyncWebHandler {
connect_handler_t on_connect_{};
esphome::web_server::WebServer *web_server_;
};
#endif // USE_WEBSERVER
class DefaultHeaders {
friend class AsyncWebServerRequest;
#ifdef USE_WEBSERVER
friend class AsyncEventSourceResponse;
#endif
public:
// NOLINTNEXTLINE(readability-identifier-naming)

View File

@@ -507,8 +507,6 @@ class EsphomeCore:
self.libraries: list[Library] = []
# A set of build flags to set in the platformio project
self.build_flags: set[str] = set()
# A set of build unflags to set in the platformio project
self.build_unflags: set[str] = set()
# A set of defines to set for the compile process in esphome/core/defines.h
self.defines: set[Define] = set()
# A map of all platformio options to apply
@@ -547,7 +545,6 @@ class EsphomeCore:
self.global_statements = []
self.libraries = []
self.build_flags = set()
self.build_unflags = set()
self.defines = set()
self.platformio_options = {}
self.loaded_integrations = set()
@@ -769,15 +766,11 @@ class EsphomeCore:
self.libraries.append(library)
return library
def add_build_flag(self, build_flag: str) -> str:
def add_build_flag(self, build_flag):
self.build_flags.add(build_flag)
_LOGGER.debug("Adding build flag: %s", build_flag)
return build_flag
def add_build_unflag(self, build_unflag: str) -> None:
self.build_unflags.add(build_unflag)
_LOGGER.debug("Adding build unflag: %s", build_unflag)
def add_define(self, define):
if isinstance(define, str):
define = Define(define)

View File

@@ -649,27 +649,21 @@ class Application {
std::string area_;
const char *comment_{nullptr};
const char *compilation_time_{nullptr};
Component *current_component_{nullptr};
bool name_add_mac_suffix_;
uint32_t last_loop_{0};
uint32_t loop_interval_{16};
<<<<<<< Updated upstream
size_t dump_config_at_{SIZE_MAX};
uint32_t app_state_{0};
Component *current_component_{nullptr};
=======
>>>>>>> Stashed changes
uint32_t loop_component_start_time_{0};
size_t dump_config_at_{SIZE_MAX};
bool name_add_mac_suffix_;
uint8_t app_state_{0};
#ifdef USE_SOCKET_SELECT_SUPPORT
// Socket select management
std::vector<int> socket_fds_; // Vector of all monitored socket file descriptors
bool socket_fds_changed_{false}; // Flag to rebuild base_read_fds_ when socket_fds_ changes
int max_fd_{-1}; // Highest file descriptor number for select()
fd_set base_read_fds_{}; // Cached fd_set rebuilt only when socket_fds_ changes
fd_set read_fds_{}; // Working fd_set for select(), copied from base_read_fds_
int max_fd_{-1}; // Highest file descriptor number for select()
bool socket_fds_changed_{false}; // Flag to rebuild base_read_fds_ when socket_fds_ changes
#endif
};

View File

@@ -63,7 +63,7 @@ extern const uint32_t STATUS_LED_OK;
extern const uint32_t STATUS_LED_WARNING;
extern const uint32_t STATUS_LED_ERROR;
enum class RetryResult : uint8_t { DONE, RETRY };
enum class RetryResult { DONE, RETRY };
extern const uint32_t WARN_IF_BLOCKING_OVER_MS;

View File

@@ -608,17 +608,6 @@ def add_build_flag(build_flag: str):
CORE.add_build_flag(build_flag)
def add_build_unflag(build_unflag: str) -> None:
"""Add a global build unflag to the compiler flags."""
CORE.add_build_unflag(build_unflag)
def set_cpp_standard(standard: str) -> None:
"""Set C++ standard with compiler flag `-std={standard}`."""
CORE.add_build_unflag("-std=gnu++11")
CORE.add_build_flag(f"-std={standard}")
def add_define(name: str, value: SafeExpType = None):
"""Add a global define to the auto-generated defines.h file.

View File

@@ -10,6 +10,7 @@ import urllib.parse
import esphome.config_validation as cv
from esphome.core import CORE, TimePeriodSeconds
from esphome.git_lock import git_operation_lock
_LOGGER = logging.getLogger(__name__)
@@ -59,66 +60,72 @@ def clone_or_update(
)
repo_dir = _compute_destination_path(key, domain)
if not repo_dir.is_dir():
_LOGGER.info("Cloning %s", key)
_LOGGER.debug("Location: %s", repo_dir)
cmd = ["git", "clone", "--depth=1"]
cmd += ["--", url, str(repo_dir)]
run_git_command(cmd)
if ref is not None:
# We need to fetch the PR branch first, otherwise git will complain
# about missing objects
_LOGGER.info("Fetching %s", ref)
run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir))
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
if submodules is not None:
_LOGGER.info(
"Initialising submodules (%s) for %s", ", ".join(submodules), key
)
run_git_command(
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
)
else:
# Check refresh needed
file_timestamp = Path(repo_dir / ".git" / "FETCH_HEAD")
# On first clone, FETCH_HEAD does not exists
if not file_timestamp.exists():
file_timestamp = Path(repo_dir / ".git" / "HEAD")
age = datetime.now() - datetime.fromtimestamp(file_timestamp.stat().st_mtime)
if refresh is None or age.total_seconds() > refresh.total_seconds:
old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir))
_LOGGER.info("Updating %s", key)
# Use lock to prevent concurrent access to the same repository
with git_operation_lock(key):
if not repo_dir.is_dir():
_LOGGER.info("Cloning %s", key)
_LOGGER.debug("Location: %s", repo_dir)
# Stash local changes (if any)
run_git_command(
["git", "stash", "push", "--include-untracked"], str(repo_dir)
)
# Fetch remote ref
cmd = ["git", "fetch", "--", "origin"]
cmd = ["git", "clone", "--depth=1"]
cmd += ["--", url, str(repo_dir)]
run_git_command(cmd)
if ref is not None:
cmd.append(ref)
run_git_command(cmd, str(repo_dir))
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
# We need to fetch the PR branch first, otherwise git will complain
# about missing objects
_LOGGER.info("Fetching %s", ref)
run_git_command(["git", "fetch", "--", "origin", ref], str(repo_dir))
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
if submodules is not None:
_LOGGER.info(
"Updating submodules (%s) for %s", ", ".join(submodules), key
"Initialising submodules (%s) for %s", ", ".join(submodules), key
)
run_git_command(
["git", "submodule", "update", "--init"] + submodules, str(repo_dir)
)
def revert():
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir))
else:
# Check refresh needed
file_timestamp = Path(repo_dir / ".git" / "FETCH_HEAD")
# On first clone, FETCH_HEAD does not exists
if not file_timestamp.exists():
file_timestamp = Path(repo_dir / ".git" / "HEAD")
age = datetime.now() - datetime.fromtimestamp(
file_timestamp.stat().st_mtime
)
if refresh is None or age.total_seconds() > refresh.total_seconds:
old_sha = run_git_command(["git", "rev-parse", "HEAD"], str(repo_dir))
_LOGGER.info("Updating %s", key)
_LOGGER.debug("Location: %s", repo_dir)
# Stash local changes (if any)
run_git_command(
["git", "stash", "push", "--include-untracked"], str(repo_dir)
)
# Fetch remote ref
cmd = ["git", "fetch", "--", "origin"]
if ref is not None:
cmd.append(ref)
run_git_command(cmd, str(repo_dir))
# Hard reset to FETCH_HEAD (short-lived git ref corresponding to most recent fetch)
run_git_command(["git", "reset", "--hard", "FETCH_HEAD"], str(repo_dir))
return repo_dir, revert
if submodules is not None:
_LOGGER.info(
"Updating submodules (%s) for %s", ", ".join(submodules), key
)
run_git_command(
["git", "submodule", "update", "--init"] + submodules,
str(repo_dir),
)
return repo_dir, None
def revert():
_LOGGER.info("Reverting changes to %s -> %s", key, old_sha)
run_git_command(["git", "reset", "--hard", old_sha], str(repo_dir))
return repo_dir, revert
return repo_dir, None
GIT_DOMAINS = {

141
esphome/git_lock.py Normal file
View File

@@ -0,0 +1,141 @@
"""File locking for git operations to prevent race conditions."""
from contextlib import contextmanager
import hashlib
import logging
from pathlib import Path
import sys
import tempfile
import time
# Platform-specific imports
if sys.platform == "win32":
import msvcrt
else:
import fcntl
_LOGGER = logging.getLogger(__name__)
# Global lock directory
LOCK_DIR = Path(tempfile.gettempdir()) / "esphome_git_locks"
LOCK_DIR.mkdir(exist_ok=True)
def _acquire_lock_unix(lock_file, timeout, identifier):
"""Acquire lock on Unix systems using fcntl."""
start_time = time.time()
last_log_time = start_time
while True:
try:
fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB)
return True
except OSError:
elapsed = time.time() - start_time
if elapsed > timeout:
raise TimeoutError(
f"Could not acquire lock for {identifier} within {timeout}s"
)
# Log progress every 10 seconds
if time.time() - last_log_time > 10:
_LOGGER.info(
f"Still waiting for lock {identifier} ({elapsed:.1f}s elapsed)..."
)
last_log_time = time.time()
time.sleep(0.1)
def _release_lock_unix(lock_file):
"""Release lock on Unix systems."""
try:
fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN)
except Exception:
pass
def _acquire_lock_windows(lock_file, timeout, identifier):
"""Acquire lock on Windows systems using msvcrt."""
start_time = time.time()
while True:
try:
msvcrt.locking(lock_file.fileno(), msvcrt.LK_NBLCK, 1)
return True
except OSError:
if time.time() - start_time > timeout:
raise TimeoutError(
f"Could not acquire lock for {identifier} within {timeout}s"
)
time.sleep(0.1)
def _release_lock_windows(lock_file):
"""Release lock on Windows systems."""
try:
msvcrt.locking(lock_file.fileno(), msvcrt.LK_UNLCK, 1)
except Exception:
pass
@contextmanager
def git_operation_lock(identifier: str, timeout: float = 30.0):
"""
Acquire a file lock for a git operation.
:param identifier: Unique identifier for the operation (e.g., repo URL or path)
:param timeout: Maximum time to wait for the lock in seconds
"""
# Create a safe filename from the identifier
lock_name = hashlib.sha256(identifier.encode()).hexdigest()[:16]
lock_path = LOCK_DIR / f"{lock_name}.lock"
# Ensure lock file exists
lock_path.touch(exist_ok=True)
lock_file = None
acquired = False
try:
# Open in binary mode for Windows compatibility
lock_file = open(lock_path, "r+b")
# Platform-specific lock acquisition
if sys.platform == "win32":
acquired = _acquire_lock_windows(lock_file, timeout, identifier)
else:
acquired = _acquire_lock_unix(lock_file, timeout, identifier)
if acquired:
_LOGGER.debug(f"Acquired lock for {identifier}")
yield
finally:
if lock_file:
if acquired:
# Platform-specific lock release
if sys.platform == "win32":
_release_lock_windows(lock_file)
else:
_release_lock_unix(lock_file)
_LOGGER.debug(f"Released lock for {identifier}")
lock_file.close()
@contextmanager
def platformio_init_lock(timeout: float = 30.0):
"""Lock for PlatformIO initialization to prevent race conditions."""
with git_operation_lock("platformio_init", timeout=timeout):
yield
@contextmanager
def platformio_install_lock(package_name: str, timeout: float = 300.0):
"""Lock for PlatformIO package installation to prevent race conditions."""
_LOGGER.info(
f"Waiting for PlatformIO package installation lock ({package_name})..."
)
with git_operation_lock(f"platformio_install_{package_name}", timeout=timeout):
_LOGGER.info(f"Acquired PlatformIO package installation lock ({package_name})")
yield
_LOGGER.info(f"Released PlatformIO package installation lock ({package_name})")

View File

@@ -86,9 +86,28 @@ def run_platformio_cli(*args, **kwargs) -> str | int:
if os.environ.get("ESPHOME_USE_SUBPROCESS") is not None:
return run_external_process(*cmd, **kwargs)
import platformio.__main__
# Import with minimal locking to prevent initialization race conditions
from esphome.git_lock import platformio_init_lock
with platformio_init_lock():
import platformio.__main__
patch_structhash()
# For first-time PlatformIO runs, use a lock to prevent directory creation conflicts
home_pio = Path.home() / ".platformio"
if not home_pio.exists() and len(args) > 0 and args[0] == "run":
from esphome.git_lock import platformio_install_lock
_LOGGER.info("First PlatformIO run detected, using initialization lock...")
with platformio_install_lock("first_run", timeout=120.0):
# Create the directory if it still doesn't exist
home_pio.mkdir(exist_ok=True)
result = run_external_command(platformio.__main__.main, *cmd, **kwargs)
_LOGGER.info("First PlatformIO run completed")
return result
# Normal execution without locking
return run_external_command(platformio.__main__.main, *cmd, **kwargs)

View File

@@ -67,6 +67,20 @@ esp8266:
"""
ESP32_CONFIG = """
esp32:
board: {board}
framework:
type: arduino
"""
ESP32S2_CONFIG = """
esp32:
board: {board}
framework:
type: esp-idf
"""
ESP32C3_CONFIG = """
esp32:
board: {board}
framework:
@@ -91,6 +105,8 @@ rtl87xx:
HARDWARE_BASE_CONFIGS = {
"ESP8266": ESP8266_CONFIG,
"ESP32": ESP32_CONFIG,
"ESP32S2": ESP32S2_CONFIG,
"ESP32C3": ESP32C3_CONFIG,
"RP2040": RP2040_CONFIG,
"BK72XX": BK72XX_CONFIG,
"RTL87XX": RTL87XX_CONFIG,

View File

@@ -153,9 +153,6 @@ def get_ini_content():
# Sort to avoid changing build flags order
CORE.add_platformio_option("build_flags", sorted(CORE.build_flags))
# Sort to avoid changing build unflags order
CORE.add_platformio_option("build_unflags", sorted(CORE.build_unflags))
content = "[platformio]\n"
content += f"description = ESPHome {__version__}\n"

View File

@@ -48,9 +48,6 @@ lib_deps =
lvgl/lvgl@8.4.0 ; lvgl
build_flags =
-DESPHOME_LOG_LEVEL=ESPHOME_LOG_LEVEL_VERY_VERBOSE
-std=gnu++17
build_unflags =
-std=gnu++11
src_filter =
+<./>
+<../tests/dummy_main.cpp>
@@ -76,8 +73,6 @@ lib_deps =
build_flags =
${common.build_flags}
-DUSE_ARDUINO
build_unflags =
${common.build_unflags}
; This are common settings for all IDF-framework based environments.
[common:idf]
@@ -85,8 +80,6 @@ extends = common
build_flags =
${common.build_flags}
-DUSE_ESP_IDF
build_unflags =
${common.build_unflags}
; This are common settings for the ESP8266 using Arduino.
[common:esp8266-arduino]
@@ -111,8 +104,6 @@ build_flags =
-Wno-nonnull-compare
-DUSE_ESP8266
-DUSE_ESP8266_FRAMEWORK_ARDUINO
build_unflags =
${common.build_unflags}
extra_scripts = post:esphome/components/esp8266/post_build.py.script
; This are common settings for the ESP32 (all variants) using Arduino.
@@ -144,8 +135,6 @@ build_flags =
-DUSE_ESP32
-DUSE_ESP32_FRAMEWORK_ARDUINO
-DAUDIO_NO_SD_FS ; i2s_audio
build_unflags =
${common.build_unflags}
extra_scripts = post:esphome/components/esp32/post_build.py.script
; This are common settings for the ESP32 (all variants) using IDF.
@@ -166,8 +155,6 @@ build_flags =
-Wno-nonnull-compare
-DUSE_ESP32
-DUSE_ESP32_FRAMEWORK_ESP_IDF
build_unflags =
${common.build_unflags}
extra_scripts = post:esphome/components/esp32/post_build.py.script
; This are common settings for the ESP32 using the latest ESP-IDF version.
@@ -194,8 +181,6 @@ build_flags =
${common:arduino.build_flags}
-DUSE_RP2040
-DUSE_RP2040_FRAMEWORK_ARDUINO
build_unflags =
${common.build_unflags}
; This are common settings for the LibreTiny (all variants) using Arduino.
[common:libretiny-arduino]
@@ -207,8 +192,6 @@ lib_deps =
build_flags =
${common:arduino.build_flags}
-DUSE_LIBRETINY
build_unflags =
${common.build_unflags}
build_src_flags = -include Arduino.h
; This is the common settings for the nRF52 using Zephyr.
@@ -241,8 +224,6 @@ board = nodemcuv2
build_flags =
${common:esp8266-arduino.build_flags}
${flags:runtime.build_flags}
build_unflags =
${common.build_unflags}
[env:esp8266-arduino-tidy]
extends = common:esp8266-arduino
@@ -250,8 +231,6 @@ board = nodemcuv2
build_flags =
${common:esp8266-arduino.build_flags}
${flags:clangtidy.build_flags}
build_unflags =
${common.build_unflags}
;;;;;;;; ESP32 ;;;;;;;;
@@ -263,8 +242,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
[env:esp32-arduino-tidy]
extends = common:esp32-arduino
@@ -273,8 +250,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
[env:esp32-idf]
extends = common:esp32-idf
@@ -284,8 +259,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
[env:esp32-idf-5_3]
extends = common:esp32-idf-5_3
@@ -295,8 +268,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
[env:esp32-idf-tidy]
extends = common:esp32-idf
@@ -306,8 +277,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32
build_unflags =
${common.build_unflags}
;;;;;;;; ESP32-C3 ;;;;;;;;
@@ -318,8 +287,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
[env:esp32c3-arduino-tidy]
extends = common:esp32-arduino
@@ -328,8 +295,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
[env:esp32c3-idf]
extends = common:esp32-idf
@@ -339,8 +304,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
[env:esp32c3-idf-5_3]
extends = common:esp32-idf-5_3
@@ -350,8 +313,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
[env:esp32c3-idf-tidy]
extends = common:esp32-idf
@@ -361,8 +322,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32C3
build_unflags =
${common.build_unflags}
;;;;;;;; ESP32-C6 ;;;;;;;;
@@ -384,8 +343,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
[env:esp32s2-arduino-tidy]
extends = common:esp32-arduino
@@ -394,8 +351,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
[env:esp32s2-idf]
extends = common:esp32-idf
@@ -405,8 +360,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
[env:esp32s2-idf-5_3]
extends = common:esp32-idf-5_3
@@ -416,8 +369,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
[env:esp32s2-idf-tidy]
extends = common:esp32-idf
@@ -427,8 +378,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32S2
build_unflags =
${common.build_unflags}
;;;;;;;; ESP32-S3 ;;;;;;;;
@@ -439,8 +388,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
[env:esp32s3-arduino-tidy]
extends = common:esp32-arduino
@@ -449,8 +396,6 @@ build_flags =
${common:esp32-arduino.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
[env:esp32s3-idf]
extends = common:esp32-idf
@@ -460,8 +405,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
[env:esp32s3-idf-5_3]
extends = common:esp32-idf-5_3
@@ -471,8 +414,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:runtime.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
[env:esp32s3-idf-tidy]
extends = common:esp32-idf
@@ -482,8 +423,6 @@ build_flags =
${common:esp32-idf.build_flags}
${flags:clangtidy.build_flags}
-DUSE_ESP32_VARIANT_ESP32S3
build_unflags =
${common.build_unflags}
;;;;;;;; ESP32-P4 ;;;;;;;;
@@ -505,8 +444,6 @@ board = rpipico
build_flags =
${common:rp2040-arduino.build_flags}
${flags:runtime.build_flags}
build_unflags =
${common.build_unflags}
;;;;;;;; LibreTiny ;;;;;;;;
@@ -518,8 +455,6 @@ build_flags =
${flags:runtime.build_flags}
-DUSE_BK72XX
-DUSE_LIBRETINY_VARIANT_BK7231N
build_unflags =
${common.build_unflags}
[env:rtl87xxb-arduino]
extends = common:libretiny-arduino
@@ -529,8 +464,6 @@ build_flags =
${flags:runtime.build_flags}
-DUSE_RTL87XX
-DUSE_LIBRETINY_VARIANT_RTL8710B
build_unflags =
${common.build_unflags}
[env:rtl87xxc-arduino]
extends = common:libretiny-arduino
@@ -540,8 +473,6 @@ build_flags =
${flags:runtime.build_flags}
-DUSE_RTL87XX
-DUSE_LIBRETINY_VARIANT_RTL8720C
build_unflags =
${common.build_unflags}
[env:host]
extends = common
@@ -552,8 +483,6 @@ build_flags =
${common.build_flags}
-DUSE_HOST
-std=c++17
build_unflags =
${common.build_unflags}
;;;;;;;; nRF52 ;;;;;;;;
@@ -563,8 +492,6 @@ board = adafruit_feather_nrf52840
build_flags =
${common:nrf52-zephyr.build_flags}
${flags:runtime.build_flags}
build_unflags =
${common.build_unflags}
[env:nrf52-tidy]
extends = common:nrf52-zephyr
@@ -572,5 +499,3 @@ board = adafruit_feather_nrf52840
build_flags =
${common:nrf52-zephyr.build_flags}
${flags:clangtidy.build_flags}
build_unflags =
${common.build_unflags}

View File

@@ -8,7 +8,7 @@ pre-commit
pytest==8.4.0
pytest-cov==6.2.1
pytest-mock==3.14.1
pytest-asyncio==1.0.0
pytest-asyncio==0.26.0
pytest-xdist==3.7.0
asyncmock==0.4.2
hypothesis==6.92.1

View File

@@ -3,25 +3,37 @@
set -e
help() {
echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>]" 1>&2
echo "Usage: $0 [-e <config|compile|clean>] [-c <string>] [-t <string>] [-j <number>] [-p <string>] [-f]" 1>&2
echo 1>&2
echo " - e - Parameter for esphome command. Default compile. Common alternative is config." 1>&2
echo " - c - Component folder name to test. Default *. E.g. '-c logger'." 1>&2
echo " - t - Target name to test. Put '-t list' to display all possibilities. E.g. '-t esp32-s2-idf-51'." 1>&2
echo " - j - Number of parallel jobs. Default is number of CPU cores." 1>&2
echo " - p - Platform filter. E.g. '-p esp32' to test only ESP32 platforms." 1>&2
echo " - f - Fail fast. Exit on first failure." 1>&2
echo " - b - Build cache directory. E.g. '-b /tmp/esphome_cache'." 1>&2
exit 1
}
# Parse parameter:
# - `e` - Parameter for `esphome` command. Default `compile`. Common alternative is `config`.
# - `c` - Component folder name to test. Default `*`.
esphome_command="compile"
target_component="*"
while getopts e:c:t: flag
num_jobs=$(nproc 2>/dev/null || sysctl -n hw.ncpu 2>/dev/null || echo 4)
platform_filter=""
fail_fast=false
build_cache_dir=""
while getopts e:c:t:j:p:b:fh flag
do
case $flag in
e) esphome_command=${OPTARG};;
c) target_component=${OPTARG};;
t) requested_target_platform=${OPTARG};;
j) num_jobs=${OPTARG};;
p) platform_filter=${OPTARG};;
f) fail_fast=true;;
b) build_cache_dir=${OPTARG};;
h) help;;
\?) help;;
esac
done
@@ -29,16 +41,66 @@ done
cd "$(dirname "$0")/.."
if ! [ -d "./tests/test_build_components/build" ]; then
mkdir ./tests/test_build_components/build
mkdir -p ./tests/test_build_components/build
fi
# Export build cache directory if specified
if [ -n "$build_cache_dir" ]; then
export PLATFORMIO_BUILD_CACHE_DIR="$build_cache_dir"
mkdir -p "$build_cache_dir"
echo "Using build cache directory: $build_cache_dir"
fi
# Track PIDs for parallel execution
pids=()
failed_builds=()
build_count=0
total_builds=0
# Function to wait for jobs and handle failures
wait_for_jobs() {
local max_jobs=$1
while [ ${#pids[@]} -ge $max_jobs ]; do
for i in "${!pids[@]}"; do
if ! kill -0 "${pids[$i]}" 2>/dev/null; then
wait "${pids[$i]}"
exit_code=$?
if [ $exit_code -ne 0 ]; then
failed_builds+=("${build_info[$i]}")
if [ "$fail_fast" = true ]; then
echo "Build failed, exiting due to fail-fast mode"
# Kill remaining jobs
for pid in "${pids[@]}"; do
kill -TERM "$pid" 2>/dev/null || true
done
exit 1
fi
fi
unset pids[$i]
unset build_info[$i]
# Reindex arrays
pids=("${pids[@]}")
build_info=("${build_info[@]}")
break
fi
done
sleep 0.1
done
}
start_esphome() {
if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
echo "Skipping $target_platform_with_version"
return
fi
# Apply platform filter if specified
if [ -n "$platform_filter" ] && [[ ! "$target_platform_with_version" =~ ^$platform_filter ]]; then
echo "Skipping $target_platform_with_version (filtered)"
return
fi
# create dynamic yaml file in `build` folder.
# `./tests/test_build_components/build/[target_component].[test_name].[target_platform_with_version].yaml`
component_test_file="./tests/test_build_components/build/$target_component.$test_name.$target_platform_with_version.yaml"
cp $target_platform_file $component_test_file
@@ -49,17 +111,79 @@ start_esphome() {
sed -i "s!\$component_test_file!../../.$f!g" $component_test_file
fi
# Start esphome process
echo "> [$target_component] [$test_name] [$target_platform_with_version]"
set -x
# TODO: Validate escape of Command line substitution value
python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
{ set +x; } 2>/dev/null
# Start esphome process in background
build_count=$((build_count + 1))
echo "> [$build_count/$total_builds] [$target_component] [$test_name] [$target_platform_with_version]"
(
# Add compile process limit for ESPHome internal parallelization
export ESPHOME_COMPILE_PROCESS_LIMIT=2
# For compilation, add a small random delay to reduce thundering herd effect
# This helps stagger the package installation requests
if [ "$esphome_command" = "compile" ]; then
sleep $((RANDOM % 5))
fi
python3 -m esphome -s component_name $target_component -s component_dir ../../components/$target_component -s test_name $test_name -s target_platform $target_platform $esphome_command $component_test_file
) &
local pid=$!
pids+=($pid)
build_info+=("$target_component/$test_name/$target_platform_with_version")
# Wait if we've reached the job limit
wait_for_jobs $num_jobs
}
# Find all test yaml files.
# - `./tests/components/[target_component]/[test_name].[target_platform].yaml`
# - `./tests/components/[target_component]/[test_name].all.yaml`
# First pass: count total builds
echo "Calculating total number of builds..."
for f in ./tests/components/$target_component/*.*.yaml; do
[ -f "$f" ] || continue
IFS='/' read -r -a folder_name <<< "$f"
IFS='.' read -r -a file_name <<< "${folder_name[4]}"
target_platform="${file_name[1]}"
file_name_parts=${#file_name[@]}
if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
IFS='/' read -r -a folder_name <<< "$target_platform_file"
IFS='.' read -r -a file_name <<< "${folder_name[3]}"
target_platform="${file_name[1]}"
target_platform_with_version=${target_platform_file:52}
target_platform_with_version=${target_platform_with_version%.*}
if [ -n "$platform_filter" ] && [[ ! "$target_platform_with_version" =~ ^$platform_filter ]]; then
continue
fi
if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
continue
fi
total_builds=$((total_builds + 1))
done
else
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
if [ -f "$target_platform_file" ]; then
for target_platform_file in ./tests/test_build_components/build_components_base.$target_platform*.yaml; do
target_platform_with_version=${target_platform_file:52}
target_platform_with_version=${target_platform_with_version%.*}
if [ -n "$platform_filter" ] && [[ ! "$target_platform_with_version" =~ ^$platform_filter ]]; then
continue
fi
if [ -n "$requested_target_platform" ] && [ "$requested_target_platform" != "$target_platform_with_version" ]; then
continue
fi
total_builds=$((total_builds + 1))
done
fi
fi
done
echo "Total builds to execute: $total_builds with $num_jobs parallel jobs"
echo
# Second pass: execute builds
for f in ./tests/components/$target_component/*.*.yaml; do
[ -f "$f" ] || continue
IFS='/' read -r -a folder_name <<< "$f"
@@ -72,22 +196,21 @@ for f in ./tests/components/$target_component/*.*.yaml; do
if [ "$target_platform" = "all" ] || [ $file_name_parts = 2 ]; then
# Test has *not* defined a specific target platform. Need to run tests for all possible target platforms.
for target_platform_file in ./tests/test_build_components/build_components_base.*.yaml; do
IFS='/' read -r -a folder_name <<< "$target_platform_file"
IFS='.' read -r -a file_name <<< "${folder_name[3]}"
target_platform="${file_name[1]}"
target_platform_with_version=${target_platform_file:52}
target_platform_with_version=${target_platform_with_version%.*}
start_esphome
done
else
# Test has defined a specific target platform.
# Validate we have a base test yaml for selected platform.
# The target_platform is sourced from the following location.
# 1. `./tests/test_build_components/build_components_base.[target_platform].yaml`
# 2. `./tests/test_build_components/build_components_base.[target_platform]-ard.yaml`
target_platform_file="./tests/test_build_components/build_components_base.$target_platform.yaml"
if ! [ -f "$target_platform_file" ]; then
echo "No base test file [./tests/test_build_components/build_components_base.$target_platform.yaml] for component test [$f] found."
@@ -104,3 +227,23 @@ for f in ./tests/components/$target_component/*.*.yaml; do
done
fi
done
# Wait for all remaining jobs
wait_for_jobs 1
echo
echo "============================================"
echo "Build Summary:"
echo "Total builds: $total_builds"
echo "Failed builds: ${#failed_builds[@]}"
if [ ${#failed_builds[@]} -gt 0 ]; then
echo
echo "Failed builds:"
for build in "${failed_builds[@]}"; do
echo " - $build"
done
exit 1
else
echo "All builds completed successfully!"
fi