Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion homeassistant/components/anthropic/manifest.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"domain": "anthropic",
"name": "Anthropic Conversation",
"name": "Anthropic",
"after_dependencies": ["assist_pipeline", "intent"],
"codeowners": ["@Shulyaka"],
"config_flow": true,
Expand Down
39 changes: 27 additions & 12 deletions homeassistant/components/aws_s3/backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from homeassistant.core import HomeAssistant, callback

from . import S3ConfigEntry
from .const import CONF_BUCKET, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
from .const import CONF_BUCKET, CONF_PREFIX, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
from .helpers import async_list_backups_from_s3

_LOGGER = logging.getLogger(__name__)
Expand Down Expand Up @@ -100,6 +100,13 @@ def __init__(self, hass: HomeAssistant, entry: S3ConfigEntry) -> None:
self.unique_id = entry.entry_id
self._backup_cache: dict[str, AgentBackup] = {}
self._cache_expiration = time()
self._prefix: str = entry.data.get(CONF_PREFIX, "")

def _with_prefix(self, key: str) -> str:
"""Add prefix to a key if configured."""
if not self._prefix:
return key
return f"{self._prefix}/{key}"

@handle_boto_errors
async def async_download_backup(
Expand All @@ -115,7 +122,9 @@ async def async_download_backup(
backup = await self._find_backup_by_id(backup_id)
tar_filename, _ = suggested_filenames(backup)

response = await self._client.get_object(Bucket=self._bucket, Key=tar_filename)
response = await self._client.get_object(
Bucket=self._bucket, Key=self._with_prefix(tar_filename)
)
return response["Body"].iter_chunks()

async def async_upload_backup(
Expand All @@ -142,7 +151,7 @@ async def async_upload_backup(
metadata_content = json.dumps(backup.as_dict())
await self._client.put_object(
Bucket=self._bucket,
Key=metadata_filename,
Key=self._with_prefix(metadata_filename),
Body=metadata_content,
)
except BotoCoreError as err:
Expand All @@ -169,7 +178,7 @@ async def _upload_simple(

await self._client.put_object(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
Body=bytes(file_data),
)

Expand All @@ -186,7 +195,7 @@ async def _upload_multipart(
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
multipart_upload = await self._client.create_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
)
upload_id = multipart_upload["UploadId"]
try:
Expand Down Expand Up @@ -216,7 +225,7 @@ async def _upload_multipart(
)
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
PartNumber=part_number,
UploadId=upload_id,
Body=part_data.tobytes(),
Expand Down Expand Up @@ -244,7 +253,7 @@ async def _upload_multipart(
)
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
PartNumber=part_number,
UploadId=upload_id,
Body=remaining_data.tobytes(),
Expand All @@ -253,7 +262,7 @@ async def _upload_multipart(

await cast(Any, self._client).complete_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
UploadId=upload_id,
MultipartUpload={"Parts": parts},
)
Expand All @@ -262,7 +271,7 @@ async def _upload_multipart(
try:
await self._client.abort_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
Key=self._with_prefix(tar_filename),
UploadId=upload_id,
)
except BotoCoreError:
Expand All @@ -283,8 +292,12 @@ async def async_delete_backup(
tar_filename, metadata_filename = suggested_filenames(backup)

# Delete both the backup file and its metadata file
await self._client.delete_object(Bucket=self._bucket, Key=tar_filename)
await self._client.delete_object(Bucket=self._bucket, Key=metadata_filename)
await self._client.delete_object(
Bucket=self._bucket, Key=self._with_prefix(tar_filename)
)
await self._client.delete_object(
Bucket=self._bucket, Key=self._with_prefix(metadata_filename)
)

# Reset cache after successful deletion
self._cache_expiration = time()
Expand Down Expand Up @@ -317,7 +330,9 @@ async def _list_backups(self) -> dict[str, AgentBackup]:
if time() <= self._cache_expiration:
return self._backup_cache

backups_list = await async_list_backups_from_s3(self._client, self._bucket)
backups_list = await async_list_backups_from_s3(
self._client, self._bucket, self._prefix
)
self._backup_cache = {b.backup_id: b for b in backups_list}
self._cache_expiration = time() + CACHE_TTL

Expand Down
34 changes: 25 additions & 9 deletions homeassistant/components/aws_s3/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
CONF_ACCESS_KEY_ID,
CONF_BUCKET,
CONF_ENDPOINT_URL,
CONF_PREFIX,
CONF_SECRET_ACCESS_KEY,
DEFAULT_ENDPOINT_URL,
DESCRIPTION_AWS_S3_DOCS_URL,
Expand All @@ -39,6 +40,7 @@
vol.Required(CONF_ENDPOINT_URL, default=DEFAULT_ENDPOINT_URL): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.URL)
),
vol.Optional(CONF_PREFIX, default=""): cv.string,
}
)

Expand All @@ -53,12 +55,17 @@ async def async_step_user(
errors: dict[str, str] = {}

if user_input is not None:
self._async_abort_entries_match(
{
CONF_BUCKET: user_input[CONF_BUCKET],
CONF_ENDPOINT_URL: user_input[CONF_ENDPOINT_URL],
}
)
normalized_prefix = user_input.get(CONF_PREFIX, "").strip("/")
# Check for existing entries, treating missing prefix as empty
for entry in self._async_current_entries(include_ignore=False):
entry_prefix = (entry.data.get(CONF_PREFIX) or "").strip("/")
if (
entry.data.get(CONF_BUCKET) == user_input[CONF_BUCKET]
and entry.data.get(CONF_ENDPOINT_URL)
== user_input[CONF_ENDPOINT_URL]
and entry_prefix == normalized_prefix
):
return self.async_abort(reason="already_configured")

hostname = urlparse(user_input[CONF_ENDPOINT_URL]).hostname
if not hostname or not hostname.endswith(AWS_DOMAIN):
Expand All @@ -83,9 +90,18 @@ async def async_step_user(
except ConnectionError:
errors[CONF_ENDPOINT_URL] = "cannot_connect"
else:
return self.async_create_entry(
title=user_input[CONF_BUCKET], data=user_input
)
data = dict(user_input)
if not normalized_prefix:
# Do not persist empty optional values
data.pop(CONF_PREFIX, None)
else:
data[CONF_PREFIX] = normalized_prefix

title = user_input[CONF_BUCKET]
if normalized_prefix:
title = f"{title} - {normalized_prefix}"

return self.async_create_entry(title=title, data=data)

return self.async_show_form(
step_id="user",
Expand Down
1 change: 1 addition & 0 deletions homeassistant/components/aws_s3/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
CONF_SECRET_ACCESS_KEY = "secret_access_key"
CONF_ENDPOINT_URL = "endpoint_url"
CONF_BUCKET = "bucket"
CONF_PREFIX = "prefix"

AWS_DOMAIN = "amazonaws.com"
DEFAULT_ENDPOINT_URL = f"https://s3.eu-central-1.{AWS_DOMAIN}/"
Expand Down
7 changes: 5 additions & 2 deletions homeassistant/components/aws_s3/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed

from .const import CONF_BUCKET, DOMAIN
from .const import CONF_BUCKET, CONF_PREFIX, DOMAIN
from .helpers import async_list_backups_from_s3

SCAN_INTERVAL = timedelta(hours=6)
Expand Down Expand Up @@ -53,11 +53,14 @@ def __init__(
)
self.client = client
self._bucket: str = entry.data[CONF_BUCKET]
self._prefix: str = entry.data.get(CONF_PREFIX, "")

async def _async_update_data(self) -> SensorData:
"""Fetch data from AWS S3."""
try:
backups = await async_list_backups_from_s3(self.client, self._bucket)
backups = await async_list_backups_from_s3(
self.client, self._bucket, self._prefix
)
except BotoCoreError as error:
raise UpdateFailed(
translation_domain=DOMAIN,
Expand Down
8 changes: 7 additions & 1 deletion homeassistant/components/aws_s3/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,17 @@
async def async_list_backups_from_s3(
client: S3Client,
bucket: str,
prefix: str,
) -> list[AgentBackup]:
"""List backups from an S3 bucket by reading metadata files."""
paginator = client.get_paginator("list_objects_v2")
metadata_files: list[dict[str, Any]] = []
async for page in paginator.paginate(Bucket=bucket):

list_kwargs: dict[str, Any] = {"Bucket": bucket}
if prefix:
list_kwargs["Prefix"] = prefix + "/"

async for page in paginator.paginate(**list_kwargs):
metadata_files.extend(
obj
for obj in page.get("Contents", [])
Expand Down
4 changes: 3 additions & 1 deletion homeassistant/components/aws_s3/quality_scale.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ rules:
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
unique-config-entry:
status: exempt
comment: Hassfest does not recognize the duplicate prevention logic. Duplicate entries are prevented by checking bucket, endpoint URL, and prefix in the config flow.

# Silver
action-exceptions:
Expand Down
2 changes: 2 additions & 0 deletions homeassistant/components/aws_s3/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,14 @@
"access_key_id": "Access key ID",
"bucket": "Bucket name",
"endpoint_url": "Endpoint URL",
"prefix": "Prefix",
"secret_access_key": "Secret access key"
},
"data_description": {
"access_key_id": "Access key ID to connect to AWS S3 API",
"bucket": "Bucket must already exist and be writable by the provided credentials.",
"endpoint_url": "Endpoint URL provided to [Boto3 Session]({boto3_docs_url}). Region-specific [AWS S3 endpoints]({aws_s3_docs_url}) are available in their docs.",
"prefix": "Folder or prefix to store backups in, for example `backups`",
"secret_access_key": "Secret access key to connect to AWS S3 API"
},
"title": "Add AWS S3 bucket"
Expand Down
54 changes: 54 additions & 0 deletions homeassistant/components/hassio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from homeassistant.components.http import StaticPathConfig
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
from homeassistant.const import (
ATTR_DEVICE_ID,
ATTR_NAME,
EVENT_CORE_CONFIG_UPDATE,
HASSIO_USER_NAME,
Expand All @@ -34,11 +35,13 @@
async_get_hass_or_none,
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
discovery_flow,
issue_registry as ir,
selector,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_call_later
Expand Down Expand Up @@ -92,6 +95,7 @@
DATA_SUPERVISOR_INFO,
DOMAIN,
HASSIO_UPDATE_INTERVAL,
SupervisorEntityModel,
)
from .coordinator import (
HassioDataUpdateCoordinator,
Expand Down Expand Up @@ -147,6 +151,7 @@
SERVICE_BACKUP_PARTIAL = "backup_partial"
SERVICE_RESTORE_FULL = "restore_full"
SERVICE_RESTORE_PARTIAL = "restore_partial"
SERVICE_MOUNT_RELOAD = "mount_reload"

VALID_ADDON_SLUG = vol.Match(re.compile(r"^[-_.A-Za-z0-9]+$"))

Expand Down Expand Up @@ -229,6 +234,19 @@ def valid_addon(value: Any) -> str:
}
)

SCHEMA_MOUNT_RELOAD = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): selector.DeviceSelector(
selector.DeviceSelectorConfig(
filter=selector.DeviceFilterSelectorConfig(
integration=DOMAIN,
model=SupervisorEntityModel.MOUNT,
)
)
)
}
)


def _is_32_bit() -> bool:
size = struct.calcsize("P")
Expand Down Expand Up @@ -444,6 +462,42 @@ async def async_service_handler(service: ServiceCall) -> None:
DOMAIN, service, async_service_handler, schema=settings.schema
)

dev_reg = dr.async_get(hass)

async def async_mount_reload(service: ServiceCall) -> None:
"""Handle service calls for Hass.io."""
coordinator: HassioDataUpdateCoordinator | None = None

if (device := dev_reg.async_get(service.data[ATTR_DEVICE_ID])) is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="mount_reload_unknown_device_id",
)

if (
device.name is None
or device.model != SupervisorEntityModel.MOUNT
or (coordinator := hass.data.get(ADDONS_COORDINATOR)) is None
or coordinator.entry_id not in device.config_entries
):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="mount_reload_invalid_device",
)

try:
await supervisor_client.mounts.reload_mount(device.name)
except SupervisorError as error:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="mount_reload_error",
translation_placeholders={"name": device.name, "error": str(error)},
) from error

hass.services.async_register(
DOMAIN, SERVICE_MOUNT_RELOAD, async_mount_reload, SCHEMA_MOUNT_RELOAD
)

async def update_info_data(_: datetime | None = None) -> None:
"""Update last available supervisor information."""
supervisor_client = get_supervisor_client(hass)
Expand Down
3 changes: 3 additions & 0 deletions homeassistant/components/hassio/icons.json
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@
"host_shutdown": {
"service": "mdi:power"
},
"mount_reload": {
"service": "mdi:reload"
},
"restore_full": {
"service": "mdi:backup-restore"
},
Expand Down
10 changes: 10 additions & 0 deletions homeassistant/components/hassio/services.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -165,3 +165,13 @@ restore_partial:
example: "password"
selector:
text:

mount_reload:
fields:
device_id:
required: true
selector:
device:
filter:
integration: hassio
model: Home Assistant Mount
Loading
Loading