Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix I/O in event loop. #104

Merged
merged 1 commit into from
Jun 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Fix I/O in event loop.
  • Loading branch information
twrecked committed Jun 16, 2024
commit fd819cb2128e06097564d7f110fb5b643f7e8b59
2 changes: 2 additions & 0 deletions changelog
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
0.9.0b10:
Fix IO in event loop issues
0.9.0a9:
I forgot to bump the revision
Fix set availability service
Expand Down
4 changes: 2 additions & 2 deletions custom_components/virtual/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Get the config.
_LOGGER.debug(f"creating new cfg")
vcfg = BlendedCfg(hass, entry.data)
vcfg.load()
await vcfg.async_load()

# create the devices.
_LOGGER.debug("creating the devices")
Expand Down Expand Up @@ -157,7 +157,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
unload_ok = await hass.config_entries.async_unload_platforms(entry, VIRTUAL_PLATFORMS)
if unload_ok:
bcfg = BlendedCfg(hass, entry.data)
bcfg.delete()
await bcfg.async_delete()
hass.data[COMPONENT_DOMAIN].pop(entry.data[ATTR_GROUP_NAME])
# _LOGGER.debug(f"after hass={hass.data[COMPONENT_DOMAIN]}")

Expand Down
212 changes: 107 additions & 105 deletions custom_components/virtual/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@
layout.
"""

import aiofiles
import copy
import logging
import json
import threading
import voluptuous as vol
import uuid
from datetime import timedelta
Expand All @@ -30,7 +30,7 @@
)
from homeassistant.helpers import config_validation as cv
from homeassistant.util import slugify
from homeassistant.util.yaml import load_yaml, save_yaml
from homeassistant.util.yaml import parse_yaml, dump

from .const import *
from .entity import virtual_schema
Expand All @@ -49,8 +49,6 @@
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=""): cv.string,
}))

DB_LOCK = threading.Lock()


def _fix_value(value):
""" If needed, convert value into a type that can be stored in yaml.
Expand All @@ -60,96 +58,110 @@ def _fix_value(value):
return value


def _load_meta_data(hass, group_name: str):
async def _async_load_json(file_name):
_LOGGER.debug("_async_load_yaml1 file_name for %s", file_name)
try:
async with aiofiles.open(file_name, 'r') as meta_file:
_LOGGER.debug("_async_load_yaml2 file_name for %s", file_name)
contents = await meta_file.read()
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)
return json.loads(contents)
except Exception as e:
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)
return {}


async def _async_save_json(file_name, data):
_LOGGER.debug("_async_save_yaml1 file_name for %s", file_name)
try:
async with aiofiles.open(file_name, 'w') as meta_file:
data = json.dumps(data, indent=4)
await meta_file.write(data)
except Exception as e:
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)


async def _async_load_yaml(file_name):
_LOGGER.debug("_async_load_yaml1 file_name for %s", file_name)
try:
async with aiofiles.open(file_name, 'r') as meta_file:
_LOGGER.debug("_async_load_yaml2 file_name for %s", file_name)
contents = await meta_file.read()
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)
return parse_yaml(contents)
except Exception as e:
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)
return {}


async def _async_save_yaml(file_name, data):
_LOGGER.debug("_async_save_yaml1 file_name for %s", file_name)
try:
async with aiofiles.open(file_name, 'w') as meta_file:
data = dump(data)
await meta_file.write(data)
except Exception as e:
_LOGGER.debug("_async_load_yaml3 file_name for %s", file_name)


async def _load_meta_data(hass, group_name: str):
"""Read in meta data for a particular group.
"""
devices = {}
with DB_LOCK:
try:
with open(default_meta_file(hass), 'r') as meta_file:
devices = json.load(meta_file).get(ATTR_DEVICES, {})
except Exception as e:
_LOGGER.debug(f"no meta data yet {str(e)}")
return devices.get(group_name, {})
data = await _async_load_json(default_meta_file(hass))
return data.get(ATTR_DEVICES, {}).get(group_name, {})


def _save_meta_data(hass, group_name, meta_data):
async def _save_meta_data(hass, group_name, meta_data):
"""Save meta data for a particular group name.
"""
with DB_LOCK:
# Read in current meta data
devices = await _async_load_json(default_meta_file(hass))
devices = devices.get(ATTR_DEVICES, {})

# Read in current meta data
devices = {}
try:
with open(default_meta_file(hass), 'r') as meta_file:
devices = json.load(meta_file).get(ATTR_DEVICES, {})
except Exception as e:
_LOGGER.debug(f"no meta data yet {str(e)}")

# Update (or add) the group piece.
_LOGGER.debug(f"meta before {devices}")
devices.update({
group_name: meta_data
})
_LOGGER.debug(f"meta after {devices}")

# Write it back out.
try:
with open(default_meta_file(hass), 'w') as meta_file:
json.dump({
ATTR_VERSION: 1,
ATTR_DEVICES: devices
}, meta_file, indent=4)
except Exception as e:
_LOGGER.debug(f"couldn't save meta data {str(e)}")


def _delete_meta_data(hass, group_name):
# Update (or add) the group piece.
_LOGGER.debug(f"meta before {devices}")
devices.update({
group_name: meta_data
})
_LOGGER.debug(f"meta after {devices}")

# Write it back out.
await _async_save_json(default_meta_file(hass), {
ATTR_VERSION: 1,
ATTR_DEVICES: devices
})


async def _delete_meta_data(hass, group_name):
"""Save meta data for a particular group name.
"""
with DB_LOCK:

# Read in current meta data
devices = {}
try:
with open(default_meta_file(hass), 'r') as meta_file:
devices = json.load(meta_file).get(ATTR_DEVICES, {})
except Exception as e:
_LOGGER.debug(f"no meta data yet {str(e)}")

# Delete the group piece.
_LOGGER.debug(f"meta before {devices}")
devices.pop(group_name)
_LOGGER.debug(f"meta after {devices}")

# Write it back out.
try:
with open(default_meta_file(hass), 'w') as meta_file:
json.dump({
ATTR_VERSION: 1,
ATTR_DEVICES: devices
}, meta_file, indent=4)
except Exception as e:
_LOGGER.error(f"couldn't save meta data {str(e)}")


def _save_user_data(file_name, devices):
try:
save_yaml(file_name, {
ATTR_VERSION: 1,
ATTR_DEVICES: devices
})
except Exception as e:
_LOGGER.error(f"couldn't save user data {str(e)}")
# Read in current meta data
devices = await _async_load_json(default_meta_file(hass))
devices = devices.get(ATTR_DEVICES, {})

# Delete the group piece.
_LOGGER.debug(f"meta before {devices}")
devices.pop(group_name)
_LOGGER.debug(f"meta after {devices}")

# Write it back out.
await _async_save_json(default_meta_file(hass), {
ATTR_VERSION: 1,
ATTR_DEVICES: devices
})


async def _save_user_data(file_name, devices):
await _async_save_yaml(file_name, {
ATTR_VERSION: 1,
ATTR_DEVICES: devices
})

def _load_user_data(file_name):
entities = {}
try:
entities = load_yaml(file_name).get(ATTR_DEVICES, [])
except Exception as e:
_LOGGER.error(f"failed to read virtual file {str(e)}")
return entities

async def _load_user_data(file_name):
entities = await _async_load_yaml(file_name)
return entities.get(ATTR_DEVICES, {})


def _fix_config(config):
Expand Down Expand Up @@ -258,26 +270,16 @@ def __init__(self, hass, flow_data):
self._hass = hass
self._group_name = flow_data[ATTR_GROUP_NAME]
self._file_name = flow_data[ATTR_FILE_NAME]
self._changed: bool = False

self._meta_data = {}
self._orphaned_entities = {}
self._devices = []
self._entities = {}

def _load_meta_data(self):
return _load_meta_data(self._hass, self._group_name)

def _save_meta_data(self):
_save_meta_data(self._hass, self._group_name, self._meta_data)
self._changed = False

def _load_user_data(self):
return _load_user_data(self._file_name)

def load(self):
meta_data = self._load_meta_data()
devices = self._load_user_data()
async def async_load(self):
meta_data = await _load_meta_data(self._hass, self._group_name)
devices = await _load_user_data(self._file_name)
changed = False

_LOGGER.debug(f"loaded-meta-data={meta_data}")
_LOGGER.debug(f"loaded-devices={devices}")
Expand Down Expand Up @@ -312,7 +314,7 @@ def load(self):
ATTR_UNIQUE_ID: unique_id,
ATTR_ENTITY_ID: _make_entity_id(platform, name)
}})
self._changed = True
changed = True

# Now copy over the entity id of the device. Not having this is a
# bug.
Expand Down Expand Up @@ -347,20 +349,20 @@ def load(self):
self._orphaned_entities.update({
values[ATTR_UNIQUE_ID]: values
})
self._changed = True
changed = True

# Make sure changes are kept.
if self._changed:
self._save_meta_data()
if changed:
await _save_meta_data(self._hass, self._group_name, self._meta_data)

_LOGGER.debug(f"meta-data={self._meta_data}")
_LOGGER.debug(f"devices={self._devices}")
_LOGGER.debug(f"entities={self._entities}")
_LOGGER.debug(f"orphaned-entities={self._orphaned_entities}")

def delete(self):
async def async_delete(self):
_LOGGER.debug(f"deleting {self._group_name}")
_delete_meta_data(self._hass, self._group_name)
await _delete_meta_data(self._hass, self._group_name)

@property
def devices(self):
Expand Down Expand Up @@ -392,7 +394,7 @@ class UpgradeCfg(object):
"""

@staticmethod
def import_yaml(hass, config):
async def async_import_yaml(hass, config):
""" Take the current virtual config and make the new yaml file.

Virtual needs a lot of fine tuning so rather than get rid of the
Expand Down Expand Up @@ -442,8 +444,8 @@ def import_yaml(hass, config):

_LOGGER.debug(f"devices-meta-data={devices_meta_data}")

_save_user_data(default_config_file(hass), devices)
_save_meta_data(hass, IMPORTED_GROUP_NAME, devices_meta_data)
await _save_user_data(default_config_file(hass), devices)
await _save_meta_data(hass, IMPORTED_GROUP_NAME, devices_meta_data)

@staticmethod
def create_flow_data(hass, _config):
Expand Down
2 changes: 1 addition & 1 deletion custom_components/virtual/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ async def async_step_import(self, import_data):
"""Import momentary config from configuration.yaml."""

_LOGGER.debug(f"importing aarlo YAML {import_data}")
UpgradeCfg.import_yaml(self.hass, import_data)
await UpgradeCfg.async_import_yaml(self.hass, import_data)
data = UpgradeCfg.create_flow_data(self.hass, import_data)

return self.async_create_entry(
Expand Down
Loading