diff --git a/custom_components/enedis/__init__.py b/custom_components/enedis/__init__.py index e7021de..38402b2 100644 --- a/custom_components/enedis/__init__.py +++ b/custom_components/enedis/__init__.py @@ -3,35 +3,86 @@ import logging +import homeassistant.helpers.config_validation as cv import voluptuous as vol + from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.const import CONF_AFTER, CONF_BEFORE +from homeassistant.core import HomeAssistant, ServiceCall -from .const import DOMAIN, PLATFORMS, RELOAD_HISTORY +from .const import ( + CONF_POWER_MODE, + CONF_STATISTIC_ID, + DOMAIN, + PLATFORMS, + FETCH_SERVICE, + CLEAR_SERVICE, + CONF_RULES, + CONF_RULE_START_TIME, + CONF_RULE_END_TIME, + CONF_ENTRY, +) from .coordinator import EnedisDataUpdateCoordinator +from .helpers import async_service_load_datas_history, async_service_datas_clear _LOGGER = logging.getLogger(__name__) +HISTORY_SERVICE_SCHEMA = vol.Schema( + { + vol.Optional(CONF_ENTRY): str, + vol.Optional(CONF_POWER_MODE): str, + vol.Optional(CONF_AFTER): cv.date, + vol.Optional(CONF_BEFORE): cv.date, + } +) +CLEAR_SERVICE_SCHEMA = vol.Schema( + { + vol.Required(CONF_STATISTIC_ID): str, + } +) + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Enedis as config entry.""" hass.data.setdefault(DOMAIN, {}) + if ( + entry.options.get("peak_cost") is not None + and entry.options.get("offpeak_cost") is not None + ): + options = dict(entry.options).copy() + for k, rule in entry.options.get(CONF_RULES, {}).items(): + rule[ + CONF_RULE_START_TIME + ] = f'{rule[CONF_RULE_START_TIME].replace("H", ":")}:00' + rule[ + CONF_RULE_END_TIME + ] = f'{rule[CONF_RULE_END_TIME].replace("H", ":")}:00' + options[CONF_RULES] = entry.options.get(CONF_RULES) + options.pop("peak_cost") + options.pop("offpeak_cost") + hass.config_entries.async_update_entry(entry=entry, options=options) + coordinator = EnedisDataUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() - if coordinator.data is None: - return False - entry.async_on_unload(entry.add_update_listener(_async_update_listener)) hass.data[DOMAIN][entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - async def async_reload_history(call) -> None: - await coordinator.async_load_datas_history(call) + async def async_reload_history(call: ServiceCall) -> None: + await async_service_load_datas_history(hass, coordinator.api, call) + + async def async_clear(call: ServiceCall) -> None: + await async_service_datas_clear(hass, call) hass.services.async_register( - DOMAIN, RELOAD_HISTORY, async_reload_history, schema=vol.Schema({}) + DOMAIN, FETCH_SERVICE, async_reload_history, schema=HISTORY_SERVICE_SCHEMA ) + hass.services.async_register( + DOMAIN, CLEAR_SERVICE, async_clear, schema=CLEAR_SERVICE_SCHEMA + ) + + entry.async_on_unload(entry.add_update_listener(_async_update_listener)) return True diff --git a/custom_components/enedis/config_flow.py b/custom_components/enedis/config_flow.py index b56846a..5b766e4 100644 --- a/custom_components/enedis/config_flow.py +++ b/custom_components/enedis/config_flow.py @@ -1,26 +1,63 @@ """Config flow to configure integration.""" import logging +from typing import Any -import voluptuous as vol import homeassistant.helpers.config_validation as cv +import voluptuous as vol +from enedisgatewaypy import EnedisByPDL, EnedisException from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow -from homeassistant.const import CONF_TOKEN, CONF_SOURCE +from homeassistant.const import CONF_TOKEN from homeassistant.core import callback +from homeassistant.data_entry_flow import FlowResult from homeassistant.helpers.aiohttp_client import async_create_clientsession +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, + TimeSelector, + TimeSelectorConfig, +) -from .const import CONF_PDL, DOMAIN, CONF_DETAIL -from .enedisgateway import ( - EnedisGateway, - EnedisGatewayException, - HP, - HC, - DEFAULT_HC_PRICE, - DEFAULT_HP_PRICE, - CONSUMPTION, - PRODUCTION, +from .const import ( + CONF_CONSUMTPION, + CONF_PDL, + CONF_PRODUCTION, + CONF_RULE_DELETE, + CONF_RULE_END_TIME, + CONF_RULE_ID, + CONF_RULE_NAME, + CONF_RULE_NEW_ID, + CONF_RULE_PRICE, + CONF_RULE_START_TIME, + CONF_RULES, + CONSUMPTION_DAILY, + CONSUMPTION_DETAIL, + COST_CONSUMTPION, + COST_PRODUCTION, + DEFAULT_CC_PRICE, + DEFAULT_PC_PRICE, + DOMAIN, + PRODUCTION_DAILY, + PRODUCTION_DETAIL, ) -DATA_SCHEMA = vol.Schema({vol.Required(CONF_PDL): str, vol.Required(CONF_TOKEN): str}) +PRODUCTION_CHOICE = [ + SelectOptionDict(value=PRODUCTION_DAILY, label="Journalier"), + SelectOptionDict(value=PRODUCTION_DETAIL, label="Détaillé"), +] + +CONSUMPTION_CHOICE = [ + SelectOptionDict(value=CONSUMPTION_DAILY, label="Journalier"), + SelectOptionDict(value=CONSUMPTION_DETAIL, label="Détaillé"), +] + +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_PDL): str, + vol.Required(CONF_TOKEN): str, + } +) _LOGGER = logging.getLogger(__name__) @@ -39,22 +76,28 @@ def async_get_options_flow(config_entry): async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} - options = {CONF_SOURCE: CONSUMPTION, HP: DEFAULT_HP_PRICE, CONF_DETAIL: False} if user_input is not None: + self._async_abort_entries_match({CONF_PDL: user_input[CONF_PDL]}) + api = EnedisByPDL( + token=user_input[CONF_TOKEN], + session=async_create_clientsession(self.hass), + timeout=30, + ) try: - await self.async_set_unique_id(user_input[CONF_PDL]) - self._abort_if_unique_id_configured() - api = EnedisGateway( - pdl=user_input[CONF_PDL], - token=user_input[CONF_TOKEN], - session=async_create_clientsession(self.hass), - ) - await api.async_get_identity() - except EnedisGatewayException: + await api.async_get_identity(user_input[CONF_PDL]) + except EnedisException as error: + _LOGGER.error(error) errors["base"] = "cannot_connect" else: return self.async_create_entry( - title=DOMAIN, data=user_input, options=options + title=f"Linky ({user_input[CONF_PDL]})", + data=user_input, + options={ + CONF_CONSUMTPION: user_input.get(CONF_CONSUMTPION), + COST_CONSUMTPION: DEFAULT_CC_PRICE, + CONF_PRODUCTION: user_input.get(CONF_PRODUCTION), + COST_PRODUCTION: DEFAULT_PC_PRICE, + }, ) return self.async_show_form( @@ -67,51 +110,149 @@ class EnedisOptionsFlowHandler(OptionsFlow): def __init__(self, config_entry: ConfigEntry) -> None: """Initialize options flow.""" - self.init_input = None self.config_entry = config_entry + rules = config_entry.options.get(CONF_RULES, {}) + self._rules: dict[str, Any] = rules.copy() + self._conf_rule_id: int | None = None - async def async_step_init(self, user_input=None): - """Handle a flow initialized by the user.""" + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle options flow.""" if user_input is not None: - if ( - self.hass.data[DOMAIN][self.config_entry.entry_id] - .data.get("contracts", {}) - .get("offpeak_hours") - is not None - ): - self.init_input = user_input - return await self.async_step_offpeak() - return self.async_create_entry(title="", data=user_input) + if sel_rule := user_input.get(CONF_RULES): + return await self.async_step_rules(None, sel_rule) + return self._save_config(user_input) + + return self._async_init_form() + + @callback + def _save_config(self, data: dict[str, Any]) -> FlowResult: + """Save the updated options.""" + new_data = {k: v for k, v in data.items() if k not in [CONF_RULES]} + if self._rules: + new_data[CONF_RULES] = self._rules + + return self.async_create_entry(title="", data=new_data) + + @callback + def _async_init_form(self) -> FlowResult: + """Handle a flow initialized by the user.""" + rules_list = { + k: f"{v.get(CONF_RULE_NAME)} {v.get(CONF_RULE_START_TIME)}-{v.get(CONF_RULE_END_TIME)} {v.get(CONF_RULE_PRICE)}" + if v + else k + for k, v in self._rules.items() + } + rules = {CONF_RULE_NEW_ID: "Add new", **rules_list} + options = self.config_entry.options options_schema = vol.Schema( { - vol.Required( - CONF_SOURCE, - default=self.config_entry.options.get(CONF_SOURCE, CONSUMPTION), - ): vol.In([CONSUMPTION, PRODUCTION]), vol.Optional( - CONF_DETAIL, - default=self.config_entry.options.get(CONF_DETAIL), - ): bool, + CONF_PRODUCTION, + description={"suggested_value": options.get(CONF_PRODUCTION)}, + ): SelectSelector( + SelectSelectorConfig( + options=PRODUCTION_CHOICE, + mode=SelectSelectorMode.DROPDOWN, + custom_value=True, + ) + ), + vol.Optional( + COST_PRODUCTION, + default=options.get(COST_PRODUCTION, DEFAULT_PC_PRICE), + ): cv.positive_float, + vol.Optional( + CONF_CONSUMTPION, + description={"suggested_value": options.get(CONF_CONSUMTPION)}, + ): SelectSelector( + SelectSelectorConfig( + options=CONSUMPTION_CHOICE, + mode=SelectSelectorMode.DROPDOWN, + custom_value=True, + ) + ), + vol.Optional( + COST_CONSUMTPION, + default=options.get(COST_CONSUMTPION, DEFAULT_CC_PRICE), + ): cv.positive_float, + vol.Optional(CONF_RULES): vol.In(rules), } ) return self.async_show_form(step_id="init", data_schema=options_schema) - async def async_step_offpeak(self, user_input=None): - """Handle a flow offpeak.""" + async def async_step_rules( + self, user_input: dict[str, Any] | None = None, rule_id: str | None = None + ) -> FlowResult: + """Handle options flow for apps list.""" + if rule_id is not None: + self._conf_rule_id = rule_id if rule_id != CONF_RULE_NEW_ID else None + return self._async_rules_form(rule_id) + if user_input is not None: - self.init_input.update(user_input) - return self.async_create_entry(title="", data=self.init_input) + rule_id = user_input.get(CONF_RULE_ID, self._conf_rule_id) + if rule_id: + if user_input.get(CONF_RULE_DELETE, False): + self._rules.pop(rule_id) + else: + self._rules[rule_id] = { + CONF_RULE_NAME: user_input.get(CONF_RULE_NAME), + CONF_RULE_START_TIME: user_input.get(CONF_RULE_START_TIME), + CONF_RULE_END_TIME: user_input.get(CONF_RULE_END_TIME), + CONF_RULE_PRICE: float( + user_input.get(CONF_RULE_PRICE, DEFAULT_CC_PRICE) + ), + } - offpeak_schema = vol.Schema( - { - vol.Optional( - HC, default=self.config_entry.options.get(HC, DEFAULT_HC_PRICE) - ): cv.positive_float, - vol.Optional( - HP, default=self.config_entry.options.get(HP, DEFAULT_HP_PRICE) - ): cv.positive_float, - } + return await self.async_step_init() + + @callback + def _async_rules_form(self, rule_id: str) -> FlowResult: + """Return configuration form for rules.""" + rule_schema = { + vol.Optional( + CONF_RULE_NAME, + description={ + "suggested_value": self._rules.get(rule_id, {}).get(CONF_RULE_NAME) + }, + ): str, + vol.Optional( + CONF_RULE_START_TIME, + description={ + "suggested_value": self._rules.get(rule_id, {}).get( + CONF_RULE_START_TIME + ) + }, + ): TimeSelector(TimeSelectorConfig()), + vol.Optional( + CONF_RULE_END_TIME, + description={ + "suggested_value": self._rules.get(rule_id, {}).get( + CONF_RULE_END_TIME + ) + }, + ): TimeSelector(TimeSelectorConfig()), + vol.Optional( + CONF_RULE_PRICE, + description={ + "suggested_value": self._rules.get(rule_id, {}).get(CONF_RULE_PRICE) + }, + ): cv.positive_float, + } + if rule_id == CONF_RULE_NEW_ID: + id = str(len(self._rules.keys()) + 1) + data_schema = vol.Schema({vol.Required(CONF_RULE_ID): id, **rule_schema}) + else: + data_schema = vol.Schema( + {**rule_schema, vol.Optional(CONF_RULE_DELETE, default=False): bool} + ) + + return self.async_show_form( + step_id="rules", + data_schema=data_schema, + description_placeholders={ + "rule_id": f"`{rule_id}`" if rule_id != CONF_RULE_NEW_ID else "", + }, ) - return self.async_show_form(step_id="offpeak", data_schema=offpeak_schema) diff --git a/custom_components/enedis/const.py b/custom_components/enedis/const.py index a90d1fc..4860ce4 100644 --- a/custom_components/enedis/const.py +++ b/custom_components/enedis/const.py @@ -1,10 +1,35 @@ """Constants for the Enedis integration.""" - -DOMAIN = "enedis" +COST_CONSUMTPION = "consumption_cost" +COST_PRODUCTION = "production_cost" +CONF_CONSUMTPION = "config_consumption" +CONF_ENTRY = "entry" CONF_PDL = "pdl" -PLATFORMS = ["sensor"] -CONF_PRODUCTION = "production_daily" -CONF_CONSUMPTION = "consumption_daily" -CONF_DETAIL = "detail" +CONF_POWER_MODE = "power_mode" +CONF_PRODUCTION = "config_production" +CONF_RULE_DELETE = "rule_delete" +CONF_RULE_END_TIME = "rule_end_time" +CONF_RULE_ID = "rule_id" +CONF_RULE_NAME = "rule_name" +CONF_RULE_NEW_ID = "rules_new_id" +CONF_RULE_PRICE = "rule_price" +CONF_RULE_START_TIME = "rule_start_time" +CONF_RULES = "rules" +CONF_STATISTIC_ID = "statistic_id" +CONF_QUERY = "query" +CONSUMPTION = "consumption" +CONSUMPTION_DAILY = "daily_consumption" +CONSUMPTION_DETAIL = "consumption_load_curve" CONTRACTS = "contracts" -RELOAD_HISTORY = "reload_history" +DEFAULT_CC_PRICE = 0.1740 +DEFAULT_HC_PRICE = 0.1470 +DEFAULT_HP_PRICE = 0.1841 +DEFAULT_PC_PRICE = 0.06 +DOMAIN = "enedis" +MANUFACTURER = "Enedis" +PLATFORMS = ["sensor"] +PRODUCTION = "production" +PRODUCTION_DAILY = "daily_production" +PRODUCTION_DETAIL = "production_load_curve" +FETCH_SERVICE = "fetch_datas" +URL = "http://enedisgateway.tech" +CLEAR_SERVICE = "clear_datas" diff --git a/custom_components/enedis/coordinator.py b/custom_components/enedis/coordinator.py index 9354cf5..30b7c97 100644 --- a/custom_components/enedis/coordinator.py +++ b/custom_components/enedis/coordinator.py @@ -2,27 +2,40 @@ from __future__ import annotations import logging -import re from datetime import datetime, timedelta -from homeassistant.components.recorder import get_instance -from homeassistant.components.recorder.models import StatisticData, StatisticMetaData -from homeassistant.components.recorder.statistics import ( - async_add_external_statistics, - get_last_statistics, - statistics_during_period, -) +from enedisgatewaypy import EnedisByPDL, EnedisException from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_SOURCE, CONF_TOKEN, ENERGY_KILO_WATT_HOUR +from homeassistant.const import CONF_AFTER, CONF_BEFORE, CONF_NAME, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_create_clientsession -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from homeassistant.util import dt as dt_util - -from .const import CONF_DETAIL, CONF_PDL, DOMAIN -from .enedisgateway import HC, HP, EnedisException, EnedisGateway +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import ( + CONF_CONSUMTPION, + CONF_PDL, + CONF_PRODUCTION, + CONF_QUERY, + CONF_RULE_END_TIME, + CONF_RULE_NAME, + CONF_RULE_PRICE, + CONF_RULE_START_TIME, + CONF_RULES, + CONF_STATISTIC_ID, + CONSUMPTION, + CONSUMPTION_DAILY, + CONSUMPTION_DETAIL, + CONTRACTS, + COST_CONSUMTPION, + COST_PRODUCTION, + DOMAIN, + PRODUCTION, + PRODUCTION_DAILY, + PRODUCTION_DETAIL, +) +from .helpers import async_fetch_datas -SCAN_INTERVAL = timedelta(hours=2) +SCAN_INTERVAL = timedelta(hours=3) _LOGGER = logging.getLogger(__name__) @@ -33,316 +46,108 @@ class EnedisDataUpdateCoordinator(DataUpdateCoordinator): def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Class to manage fetching data API.""" self.hass = hass + self.entry = entry self.pdl = entry.data[CONF_PDL] - self.power = entry.options[CONF_SOURCE].lower() - self.hp = entry.options.get(HP) - self.hc = entry.options.get(HC) - self.detail = entry.options.get(CONF_DETAIL, False) - - self.enedis = EnedisGateway( - pdl=self.pdl, + self.api = EnedisByPDL( token=entry.data[CONF_TOKEN], session=async_create_clientsession(hass), + timeout=30, ) - self.statistics = {} super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) - async def _async_update_data(self): + async def _async_update_data(self) -> list(str, str): """Update data via API.""" - unit = ENERGY_KILO_WATT_HOUR - start = ( - (datetime.now() - timedelta(days=6)).strftime("%Y-%m-%d") - if self.detail - else (datetime.now() - timedelta(days=365)).strftime("%Y-%m-%d") - ) - end = datetime.now().strftime("%Y-%m-%d") + statistics = {} - if (contracts := self.statistics.get("contracts", {})) is None or len( - contracts - ) == 0: + # Fetch contract datas + if not (contracts := statistics.get("contracts", {})): try: - contracts = await self.enedis.async_get_contract_by_pdl() - except EnedisException: - _LOGGER.warning("Contract data is not complete") - - try: - datas = await self.enedis.async_get_datas( - self.power, start, end, self.detail - ) - hourly_data = datas.get("meter_reading", {}).get("interval_reading", []) - except EnedisException: - hourly_data = [] - - try: - offpeak_hours = await self._async_offpeak_statistics(hourly_data, unit) - peak_hours = await self._async_peak_statistics(hourly_data, unit) - self.statistics = { - "contracts": contracts, - "energy": { - CONF_SOURCE: self.power, - "offpeak_hours": offpeak_hours, - "peak_hours": peak_hours, - }, + contracts = await self.api.async_get_contract(self.pdl) + statistics.update({CONTRACTS: contracts}) + except EnedisException as error: + _LOGGER.error(error) + + # Fetch consumption and production datas + rules = self.entry.options.get(CONF_RULES, {}) + if self.entry.options.get(CONF_PRODUCTION) in [ + PRODUCTION_DAILY, + PRODUCTION_DETAIL, + ]: + mode = { + CONF_QUERY: self.entry.options.get(CONF_PRODUCTION), + CONF_AFTER: self.minus_date(365) + if self.entry.options.get(CONF_PRODUCTION) in [PRODUCTION_DAILY] + else self.minus_date(6), + CONF_BEFORE: datetime.now(), + CONF_RULES: [ + { + CONF_NAME: PRODUCTION.lower(), + CONF_STATISTIC_ID: f"{DOMAIN}:{self.pdl}_{PRODUCTION}".lower(), + CONF_RULE_NAME: None, + CONF_RULE_START_TIME: "00:00:00", + CONF_RULE_END_TIME: "00:00:00", + CONF_RULE_PRICE: self.entry.options.get(COST_PRODUCTION), + }, + ], + CONF_PDL: self.pdl, } - except EnedisException as error: - raise UpdateFailed(error) - - return self.statistics - - async def _async_insert_costs(self, statistics, statistic_id, name, price) -> dict: - """Insert costs.""" - last_stats = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, statistic_id, True - ) - cost_sum = 0 if not last_stats else last_stats[statistic_id][0]["sum"] - - costs = [] - for stat in statistics: - _cost = round(stat[1] * price, 2) - cost_sum += _cost - costs.append(StatisticData(start=stat[0], state=_cost, sum=cost_sum)) - - metadata = StatisticMetaData( - has_mean=False, - has_sum=True, - name=name, - source=DOMAIN, - statistic_id=statistic_id, - unit_of_measurement="EUR", - ) - async_add_external_statistics(self.hass, metadata, costs) - - async def _async_offpeak_statistics(self, hourly_data, unit) -> dict: - if self.detail is False: - _LOGGER.debug("Off-peak hours are not eligible") - return - statistic_id = f"{DOMAIN}:{self.pdl}_{self.power}_offpeak" - last_stats = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, statistic_id, True - ) - - if not last_stats: - energy_sum = 0 - last_stats_time = None - else: - energy_sum = last_stats[statistic_id][0]["sum"] - last_stats_time = dt_util.parse_datetime( - last_stats[statistic_id][0]["start"] - ) - _LOGGER.debug(f"Last date in database {last_stats_time}") - - statistics = [] - statistics_cost = [] - ref_date = None - last_value = 0 - for data in hourly_data: - if (value := int(data.get("value"))) is None: - continue - - start = dt_util.parse_datetime(data["date"]).replace(tzinfo=dt_util.UTC) - if last_stats_time is not None and start <= last_stats_time + timedelta( - days=1 - ): - continue - - if start.time() > datetime.min.time(): - if self.enedis.check_offpeak(start): - if ref_date is None: - ref_date = datetime.combine( - start.date(), datetime.min.time() - ).replace(tzinfo=dt_util.UTC) - interval = float( - self.weighted_interval(data.get("interval_length")) - ) - _LOGGER.debug( - f"Offpeak Value {value} - Interval {interval} Hours {start}" - ) - last_value += value * interval - continue - else: - if last_value > 0: - - if self.enedis.check_offpeak(start): - interval = float( - self.weighted_interval(data.get("interval_length")) - ) - last_value += value * interval - _LOGGER.debug( - f"Offpeak Value {value} - Interval {interval} Hours {start}" - ) - - value_kwh = round(last_value / 1000, 2) - statistics_cost.append((ref_date, value_kwh)) - energy_sum += value_kwh - - _LOGGER.debug( - f"Offpeak Hours: {value_kwh} at {ref_date}, sum is {round(energy_sum, 2)}" - ) - statistics.append( - StatisticData( - start=ref_date, state=value_kwh, sum=round(energy_sum, 2) - ) - ) - last_value = 0 - ref_date = None - - metadata = StatisticMetaData( - has_mean=False, - has_sum=True, - name=f"Off-peak {self.power} ({self.pdl})", - source=DOMAIN, - statistic_id=statistic_id, - unit_of_measurement=unit, - ) - - async_add_external_statistics(self.hass, metadata, statistics) - - if self.hc: - await self._async_insert_costs( - statistics_cost, - f"{DOMAIN}:{self.pdl}_{self.power}_offpeak_cost", - f"Price of off-peak hours {self.power} ({self.pdl})", - self.hc, - ) - - return energy_sum - - async def _async_peak_statistics( - self, hourly_data, unit, force=False, statistic_id=None - ) -> dict: - if statistic_id is None: - statistic_id = f"{DOMAIN}:{self.pdl}_{self.power}_peak" - last_stats = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, statistic_id, True - ) - - if not last_stats or force is True: - energy_sum = 0 - last_stats_time = None - else: - energy_sum = last_stats[statistic_id][0]["sum"] - last_stats_time = dt_util.parse_datetime( - last_stats[statistic_id][0]["start"] - ) - _LOGGER.debug(f"Last date in database {last_stats_time}") - - statistics = [] - statistics_cost = [] - last_value = 0 - ref_date = None - for data in hourly_data: - if (value := int(data.get("value"))) is None: - continue - - start = dt_util.parse_datetime(data["date"]).replace(tzinfo=dt_util.UTC) - if last_stats_time is not None and start <= last_stats_time + timedelta( - days=1 - ): - continue - - if start.time() > datetime.min.time(): - if not self.enedis.check_offpeak(start): - if ref_date is None: - ref_date = datetime.combine( - start.date(), datetime.min.time() - ).replace(tzinfo=dt_util.UTC) - interval = float( - self.weighted_interval(data.get("interval_length")) - ) - _LOGGER.debug( - f"Peak Value {value} - Interval {interval} Hours {start}" - ) - last_value += value * interval - continue - else: - date_refer = value_kwh = None - if last_value > 0: - - if not self.enedis.check_offpeak(start): - interval = float( - self.weighted_interval(data.get("interval_length")) - ) - last_value += value * interval - - _LOGGER.debug( - f"Peak Value {value} - Interval {interval} Hours {start}" - ) - - value_kwh = round(last_value / 1000, 2) - date_refer = ref_date - last_value = 0 - ref_date = None - else: - value_kwh = round(value / 1000, 2) - date_refer = start - - statistics_cost.append((date_refer, value_kwh)) - energy_sum += value_kwh - _LOGGER.debug( - f"Peak Hours : {value_kwh} at {date_refer}, sum is {round(energy_sum, 2)}" - ) - statistics.append( - StatisticData( - start=date_refer, state=value_kwh, sum=round(energy_sum, 2) - ) + datas = await async_fetch_datas(self.hass, self.api, **mode) + statistics.update(datas) + + if self.entry.options.get(CONF_CONSUMTPION) in [CONSUMPTION_DAILY] or ( + self.entry.options.get(CONF_CONSUMTPION) in [CONSUMPTION_DETAIL] + and len(rules.keys()) == 0 + ): + mode = { + CONF_QUERY: self.entry.options.get(CONF_CONSUMTPION), + CONF_AFTER: self.minus_date(365) + if self.entry.options.get(CONF_CONSUMTPION) in [CONSUMPTION_DAILY] + else self.minus_date(6), + CONF_BEFORE: datetime.now(), + CONF_RULES: [ + { + CONF_NAME: CONSUMPTION.lower(), + CONF_STATISTIC_ID: f"{DOMAIN}:{self.pdl}_{CONSUMPTION}".lower(), + CONF_RULE_NAME: None, + CONF_RULE_START_TIME: "00:00:00", + CONF_RULE_END_TIME: "00:00:00", + CONF_RULE_PRICE: self.entry.options.get(COST_CONSUMTPION), + }, + ], + CONF_PDL: self.pdl, + } + datas = await async_fetch_datas(self.hass, self.api, **mode) + statistics.update(datas) + elif ( + self.entry.options.get(CONF_CONSUMTPION) in [CONSUMPTION_DETAIL] + and len(rules.keys()) > 0 + ): + datas_rules = [] + for rule in rules.values(): + datas_rules.append( + { + CONF_NAME: f"{CONSUMPTION}_{rule[CONF_RULE_NAME]}".lower(), + CONF_STATISTIC_ID: f"{DOMAIN}:{self.pdl}_{CONSUMPTION}_{rule[CONF_RULE_NAME]}".lower(), + CONF_RULE_NAME: rule[CONF_RULE_NAME], + CONF_RULE_START_TIME: rule[CONF_RULE_START_TIME], + CONF_RULE_END_TIME: rule[CONF_RULE_END_TIME], + CONF_RULE_PRICE: rule[CONF_RULE_PRICE], + } ) - metadata = StatisticMetaData( - has_mean=False, - has_sum=True, - name=f"Peak {self.power} ({self.pdl})", - source=DOMAIN, - statistic_id=statistic_id, - unit_of_measurement=unit, - ) - - async_add_external_statistics(self.hass, metadata, statistics) - - if self.hp and force is False: - await self._async_insert_costs( - statistics_cost, - f"{DOMAIN}:{self.pdl}_{self.power}_peak_cost", - f"Price of peak hours {self.power} ({self.pdl})", - self.hp, - ) - - return energy_sum - - def weighted_interval(self, interval): - """Compute weighted.""" - if interval is None: - return 1 - rslt = re.findall("PT([0-9]{2})M", interval) - if len(rslt) == 1: - return int(rslt[0]) / 60 - - async def async_load_datas_history(self, call): - """Load datas in statics table.""" - unit = ENERGY_KILO_WATT_HOUR - statistic_id = f"{DOMAIN}:{self.pdl}_{self.power}_peak" - start_stat = (datetime.now() - timedelta(days=365)).replace(tzinfo=dt_util.UTC) - - stat = await get_instance(self.hass).async_add_executor_job( - statistics_during_period, - self.hass, - start_stat, - None, - [statistic_id], - "hour", - True, - ) - - start = (stat[statistic_id][0]["start"].date() - timedelta(days=1)).strftime( - "%Y-%m-%d" - ) - end = start_stat.strftime("%Y-%m-%d") - - statistic_id = f"{DOMAIN}:{self.pdl}_{self.power}" + mode = { + CONF_QUERY: self.entry.options.get(CONF_CONSUMTPION), + CONF_AFTER: self.minus_date(6), + CONF_BEFORE: datetime.now(), + CONF_RULES: datas_rules, + CONF_PDL: self.pdl, + } + datas = await async_fetch_datas(self.hass, self.api, **mode) + statistics.update(datas) - try: - datas = await self.enedis.async_get_datas(self.power, start, end, False) - hourly_data = datas.get("meter_reading", {}).get("interval_reading", []) - except EnedisException: - hourly_data = [] + return statistics - await self._async_peak_statistics(hourly_data, unit, True, statistic_id) + @staticmethod + def minus_date(days: int) -> datetime: + """Substract now.""" + return datetime.now() - timedelta(days=days) diff --git a/custom_components/enedis/enedisgateway.py b/custom_components/enedis/enedisgateway.py deleted file mode 100644 index a22001f..0000000 --- a/custom_components/enedis/enedisgateway.py +++ /dev/null @@ -1,153 +0,0 @@ -"""Class for Enedis Gateway (http://enedisgateway.tech).""" - - -import logging -import re -import datetime -from datetime import datetime as dt - -import requests -from requests.exceptions import RequestException - -URL = "https://enedisgateway.tech" -API_URL = f"{URL}/api" -MANUFACTURER = "Enedis" -PRODUCTION = "Production" -PRODUCTION_DAILY = "production_daily" -PRODUCTION_DETAIL = "production_detail" -CONSUMPTION = "Consumption" -CONSUMPTION_DAILY = "consumption_daily" -CONSUMPTION_DETAIL = "consumption_detail" -HP = "peak_hours" -HC = "offpeak_hours" -DEFAULT_HP_PRICE = 0.1841 -DEFAULT_HC_PRICE = 0.1470 - -_LOGGER = logging.getLogger(__name__) - - -class EnedisGateway: - """Class for Enedis Gateway API.""" - - def __init__(self, pdl, token, session=None): - """Init.""" - self.pdl = str(pdl) - self.token = token - self.session = session if session else requests.Session() - self.has_offpeak = False - self.offpeaks = [] - - async def _async_make_request(self, payload): - """Request session.""" - headers = {"Authorization": self.token, "Content-Type": "application/json"} - try: - _LOGGER.debug(f"Make request {payload}") - resp = await self.session.request( - method="POST", url=API_URL, json=payload, headers=headers, timeout=5 - ) - response = await resp.json() - if "error" in response: - raise EnedisGatewayException(response.get("description")) - if "tag" in response and response["tag"] in [ - "limit_reached", - "enedis_return_ko", - ]: - raise EnedisGatewayException(response.get("description")) - return response - except RequestException as error: - raise EnedisException("Request failed") from error - - def hass_offpeak(self): - """Has offpeak hours.""" - return len(self.offpeak) > 0 - - def check_offpeak(self, start: datetime): - """Return offpeak status.""" - if self.hass_offpeak: - start_time = start.time() - for range in self.offpeaks: - starting = dt.strptime(range[0], "%HH%M").time() - ending = dt.strptime(range[1], "%HH%M").time() - if start_time > starting and start_time <= ending: - return True - return False - - def get_offpeak(self): - """Return offpeak detail.""" - return self.offpeaks - - async def async_get_identity(self): - """Get identity.""" - payload = {"type": "identity", "usage_point_id": str(self.pdl)} - return await self._async_make_request(payload) - - async def async_get_addresses(self): - """Get addresses.""" - payload = {"type": "addresses", "usage_point_id": str(self.pdl)} - return await self._async_make_request(payload) - - async def async_get_addresses_by_pdl(self): - """Return all.""" - datas = {} - addresses = await self.async_get_addresses() - for addresses in addresses.get("customer", {}).get("usage_points"): - if addresses.get("usage_point", {}).get("usage_point_id") == self.pdl: - datas.update(addresses.get("usage_point")) - return datas - - async def async_get_contracts(self): - """Get contracts.""" - payload = {"type": "contracts", "usage_point_id": str(self.pdl)} - return await self._async_make_request(payload) - - async def async_get_contract_by_pdl(self): - """Return all.""" - datas = {} - contracts = await self.async_get_contracts() - for contract in contracts.get("customer", {}).get("usage_points", ""): - if contract.get("usage_point", {}).get("usage_point_id") == self.pdl: - datas.update(contract.get("contracts")) - - if offpeak_hours := datas.get("offpeak_hours"): - self.offpeaks = re.findall("(?:(\\w+)-(\\w+))+", offpeak_hours) - return datas - - async def async_get_max_power(self, start, end): - """Get consumption max power.""" - payload = { - "type": "daily_consumption_max_power", - "usage_point_id": self.pdl, - "start": f"{start}", - "end": f"{end}", - } - return await self._async_make_request(payload) - - async def async_get_datas(self, service, start, end, detail=False): - """Get datas.""" - payload = { - "type": f"daily_{service}", - "usage_point_id": f"{self.pdl}", - "start": f"{start}", - "end": f"{end}", - } - if detail: - payload = { - "type": f"{service}_load_curve", - "usage_point_id": f"{self.pdl}", - "start": f"{start}", - "end": f"{end}", - } - return await self._async_make_request(payload) - - -class EnedisException(Exception): - """Enedis exception.""" - - -class EnedisGatewayException(EnedisException): - """Enedis gateway error.""" - - def __init__(self, message): - """Initialize.""" - super().__init__(message) - _LOGGER.error(message) diff --git a/custom_components/enedis/helpers.py b/custom_components/enedis/helpers.py new file mode 100644 index 0000000..b6039cd --- /dev/null +++ b/custom_components/enedis/helpers.py @@ -0,0 +1,301 @@ +"""Helper module.""" + +import logging +import re +from datetime import datetime, timedelta + +from enedisgatewaypy import EnedisByPDL, EnedisException +from homeassistant.components.recorder import get_instance +from homeassistant.components.recorder.models import StatisticData, StatisticMetaData +from homeassistant.components.recorder.statistics import ( + async_add_external_statistics, + clear_statistics, + get_last_statistics, + statistics_during_period, +) +from homeassistant.const import ( + CONF_AFTER, + CONF_BEFORE, + CONF_NAME, + ENERGY_KILO_WATT_HOUR, +) +from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.util import dt as dt_util + +from .const import ( + CONF_ENTRY, + CONF_POWER_MODE, + CONF_RULE_END_TIME, + CONF_RULE_NAME, + CONF_RULE_PRICE, + CONF_RULE_START_TIME, + CONF_STATISTIC_ID, + CONSUMPTION, + CONSUMPTION_DAILY, + CONSUMPTION_DETAIL, + COST_CONSUMTPION, + COST_PRODUCTION, + DOMAIN, + PRODUCTION, +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_fetch_datas( + hass: HomeAssistant, + api: EnedisByPDL, + query: str, + rules: list, + after: datetime, + before: datetime, + pdl: str, +) -> dict: + """Fetch datas.""" + datas_collected = [] + try: + # Collect interval + datas = await api.async_fetch_datas(query, after, before, pdl) + datas_collected = datas.get("meter_reading", {}).get("interval_reading", []) + _LOGGER.debug(datas_collected) + except EnedisException as error: + _LOGGER.error(error) + return await async_statistics(hass, datas_collected, rules) + + +async def async_statistics(hass: HomeAssistant, datas_collected, rules: list = None): + """Compute statistics.""" + global_statistics = {} + collects = {} + for rule in rules: + statistic_id = rule[CONF_STATISTIC_ID] + name = rule[CONF_NAME] + + if collects.get(name) is None: + collects.update( + { + name: { + "metadata": StatisticMetaData( + has_mean=False, + has_sum=True, + name=name, + source=DOMAIN, + statistic_id=statistic_id, + unit_of_measurement=ENERGY_KILO_WATT_HOUR, + ), + "metacost": StatisticMetaData( + has_mean=False, + has_sum=True, + name=f"{name}_cost", + source=DOMAIN, + statistic_id=f"{statistic_id}_cost", + unit_of_measurement="EUR", + ), + "statistics": {}, + CONF_RULE_PRICE: rule[CONF_RULE_PRICE], + CONF_STATISTIC_ID: statistic_id, + } + } + ) + + # Fetch last information in database + last_stats = await get_instance(hass).async_add_executor_job( + get_last_statistics, hass, 1, statistic_id, True, "sum" + ) + + # Fetch last sum in database + summary = ( + 0 if not last_stats else last_stats[statistic_id][0]["sum"] + ) + + # Fetch last time in database + last_stats_time = ( + None if not last_stats else last_stats[statistic_id][0]["start"] + ) + + ref_date = None + value = 0 + for data in datas_collected: + if (value_collected := int(data.get("value"))) is None: + continue + + interval = float(weighted_interval(data.get("interval_length"))) + value_collected = value_collected / 1000 * interval # Convert Wh to Kwh + + date_collected = dt_util.parse_datetime(data["date"]).replace( + tzinfo=dt_util.UTC + ) + + if not has_range( + date_collected, rule[CONF_RULE_START_TIME], rule[CONF_RULE_END_TIME] + ): + continue + + if ( + last_stats_time is not None + and date_collected <= last_stats_time + timedelta(days=1) + ): + continue + + if ref_date is None: + value += value_collected + _LOGGER.debug("New loop :%s %s", date_collected, value_collected) + ref_date = date_collected + elif date_collected.day == ref_date.day: + value += value_collected + _LOGGER.debug("Same days : %s %s", date_collected, value_collected) + elif ( + date_collected.time() == datetime.strptime("00:00", "%H:%M").time() + ) and ref_date.time() != datetime.strptime("00:00", "%H:%M").time(): + value += value_collected + _LOGGER.debug("Midnight : %s %s", date_collected, value_collected) + elif ref_date: + date_ref = dateatmidnight(ref_date) + if get_sum := collects[name]["statistics"].get(date_ref): + value = get_sum[0] + value + + summary += value + cost = round(value * rule[CONF_RULE_PRICE], 2) + cost_summary = round(summary * rule[CONF_RULE_PRICE], 2) + + collects[name]["statistics"].update( + {date_ref: (value, summary, cost, cost_summary)} + ) + _LOGGER.debug( + "Collected : %s %s %s - %s€ %s€", + date_ref, + value, + summary, + cost, + cost_summary, + ) + + ref_date = date_collected + value = value_collected + _LOGGER.debug("New day : %s %s", date_collected, value_collected) + + if value > 0: + date_ref = dateatmidnight(ref_date) + if get_sum := collects[name]["statistics"].get(date_ref): + value = get_sum[0] + value + + summary += value + cost = round(value * rule[CONF_RULE_PRICE], 2) + cost_summary = round(summary * rule[CONF_RULE_PRICE], 2) + + collects[name]["statistics"].update( + {date_ref: (value, summary, cost, cost_summary)} + ) + _LOGGER.debug( + "Collected : %s %s %s - %s€ %s€", + date_ref, + value, + summary, + cost, + cost_summary, + ) + + if rule.get("disabled") is None: + global_statistics.update({name: summary}) + + for name, values in collects.items(): + stats = [] + costs = [] + for date_ref, datas in collects[name]["statistics"].items(): + stats.append(StatisticData(start=date_ref, state=datas[0], sum=datas[1])) + costs.append(StatisticData(start=date_ref, state=datas[2], sum=datas[3])) + + if stats and costs: + _LOGGER.debug("Add %s stat in table", name) + hass.async_add_executor_job( + async_add_external_statistics, hass, values["metadata"], stats + ) + _LOGGER.debug("Add %s cost in table", name) + hass.async_add_executor_job( + async_add_external_statistics, hass, values["metacost"], costs + ) + return global_statistics + + +def weighted_interval(interval: str) -> float | int: + """Compute weighted.""" + if interval and len(rslt := re.findall("PT([0-9]{2})M", interval)) == 1: + return int(rslt[0]) / 60 + return 1 + + +def has_range(hour: datetime, start: str, end: str) -> bool: + """Check offpeak hour.""" + midnight = datetime.strptime("00:00:00", "%H:%M:%S").time() + start_time = hour.time() + starting = datetime.strptime(start, "%H:%M:%S").time() + ending = datetime.strptime(end, "%H:%M:%S").time() + if start_time > starting and start_time <= ending: + return True + elif (ending == midnight) and (start_time > starting or start_time == midnight): + return True + return False + + +def dateatmidnight(date: datetime): + """Return date at midnight , ex 01/01/2000 00h00.""" + return datetime.combine(date, datetime.min.time()).replace(tzinfo=dt_util.UTC) + + +async def async_service_load_datas_history( + hass: HomeAssistant, api: EnedisByPDL, call: ServiceCall +): + """Load datas in statics table.""" + entry_id = call.data[CONF_ENTRY] + entry = hass.data[DOMAIN].get(entry_id) + pdl = entry.pdl + query = call.data[CONF_POWER_MODE] + if query in [CONSUMPTION_DAILY, CONSUMPTION_DETAIL]: + power = CONSUMPTION + cost = entry.config_entry.options[COST_CONSUMTPION] + else: + power = PRODUCTION + cost = entry.config_entry.options[COST_PRODUCTION] + start = call.data[CONF_AFTER] + statistic_id = f"{DOMAIN}:{entry.pdl}_{power}" + + rules = [ + { + CONF_NAME: power.lower(), + CONF_STATISTIC_ID: statistic_id.lower(), + CONF_RULE_NAME: None, + CONF_RULE_START_TIME: "00:00:00", + CONF_RULE_END_TIME: "00:00:00", + CONF_RULE_PRICE: cost, + "disabled": True, + }, + ] + + stat = await get_instance(hass).async_add_executor_job( + statistics_during_period, + hass, + dateatmidnight(start), + None, + [statistic_id], + "hour", + ) + + if stat.get(statistic_id): + end = ( + dt_util.parse_datetime(stat[statistic_id][0]["start"]) + .replace(tzinfo=dt_util.UTC) + .date() + ) + else: + end = call.data[CONF_BEFORE] + + await async_fetch_datas(hass, api, query, rules, start, end, pdl) + + +async def async_service_datas_clear(hass: HomeAssistant, call: ServiceCall): + """Clear data in database.""" + statistic_id = call.data[CONF_STATISTIC_ID] + if not statistic_id.startswith("enedis:"): + _LOGGER.error("statistic_id is incorrect %s", statistic_id) + return + hass.async_add_executor_job(clear_statistics, get_instance(hass), [statistic_id]) diff --git a/custom_components/enedis/manifest.json b/custom_components/enedis/manifest.json index 99cf7dc..40d171b 100644 --- a/custom_components/enedis/manifest.json +++ b/custom_components/enedis/manifest.json @@ -4,13 +4,9 @@ "config_flow": true, "documentation": "https://github.com/cyr-ius/hass-enedis", "issue_tracker": "https://github.com/cyr-ius/hass-enedis/issues", - "requirements": [], - "ssdp": [], - "zeroconf": [], - "homekit": {}, + "requirements": ["enedisgatewaypy==1.3"], "dependencies": ["recorder"], - "after_dependencies": [], "codeowners": ["@cyr-ius"], "iot_class": "cloud_polling", - "version": "2.2.2" + "version": "2.3.7" } diff --git a/custom_components/enedis/sensor.py b/custom_components/enedis/sensor.py index 9141f42..b970d68 100644 --- a/custom_components/enedis/sensor.py +++ b/custom_components/enedis/sensor.py @@ -7,14 +7,13 @@ SensorEntity, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ENERGY_KILO_WATT_HOUR, CONF_SOURCE +from homeassistant.const import ENERGY_KILO_WATT_HOUR from homeassistant.core import HomeAssistant from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .enedisgateway import MANUFACTURER, URL -from .const import DOMAIN, CONF_DETAIL +from .const import DOMAIN, MANUFACTURER, URL, CONTRACTS _LOGGER = logging.getLogger(__name__) @@ -26,10 +25,11 @@ async def async_setup_entry( ) -> None: """Set up the sensors.""" coordinator = hass.data[DOMAIN][config_entry.entry_id] - source = config_entry.options[CONF_SOURCE] - entities = [PowerSensor(coordinator, source, "peak_hours")] - if config_entry.options.get(CONF_DETAIL): - entities.append(PowerSensor(coordinator, source, "offpeak_hours")) + entities = [ + PowerSensor(coordinator, name) + for name in coordinator.data.keys() + if name != CONTRACTS + ] async_add_entities(entities) @@ -39,14 +39,15 @@ class PowerSensor(CoordinatorEntity, SensorEntity): _attr_device_class = DEVICE_CLASS_ENERGY _attr_native_unit_of_measurement = ENERGY_KILO_WATT_HOUR _attr_state_class = STATE_CLASS_TOTAL_INCREASING + _attr_has_entity_name = True - def __init__(self, coordinator, source, sensor_type): + def __init__(self, coordinator, sensor_name): """Initialize the sensor.""" super().__init__(coordinator) - contracts = coordinator.data.get("contracts", {}) - self.sensor_type = sensor_type - self._attr_unique_id = f"{coordinator.pdl}_{source}_{sensor_type}" - self._attr_name = f"{source} {sensor_type}" + contracts = coordinator.data.get(CONTRACTS, {}) + self.sensor_mode = sensor_name + self._attr_unique_id = f"{coordinator.pdl}_{sensor_name}" + self._attr_name = sensor_name self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, coordinator.pdl)}, name=f"Linky ({coordinator.pdl})", @@ -66,6 +67,5 @@ def __init__(self, coordinator, source, sensor_type): @property def native_value(self): - """Max power.""" - value = int(self.coordinator.data.get("energy", {}).get(self.sensor_type)) - return float(value) + """Value power.""" + return float(self.coordinator.data.get(self.name)) diff --git a/custom_components/enedis/services.yaml b/custom_components/enedis/services.yaml index 1f3f512..04be904 100644 --- a/custom_components/enedis/services.yaml +++ b/custom_components/enedis/services.yaml @@ -1,5 +1,46 @@ # Enedis service. +fetch_datas: + name: Fetch datas + description: This service allows you to fetch datas + fields: + entry: + name: Entry + required: true + selector: + config_entry: + integration: enedis + power_mode: + name: Power type + required: true + default: daily_consumption + selector: + select: + mode: dropdown + options: + - label: Consumption + value: daily_consumption + - label: Production + value: daily_production + after: + name: Start Date + description: Start date + required: true + selector: + date: + before: + name: End Date + description: End date + required: true + selector: + date: -reload_history: - name: Retrieve datas history - description: This service allows you to retrieve all the data for the last year +# Enedis service. +clear_datas: + name: Clear datas + description: This service allows you to clear datas + fields: + statistic_id: + name: Statistic Id + required: true + selector: + text: \ No newline at end of file diff --git a/custom_components/enedis/strings.json b/custom_components/enedis/strings.json index 36c1724..87ffc72 100644 --- a/custom_components/enedis/strings.json +++ b/custom_components/enedis/strings.json @@ -4,14 +4,16 @@ "step": { "user": { "title": "Register Enedis gateway account", + "description": "To start the collection, it is necessary to make the settings via the options.", "data": { - "pdl": "[%key:common::config_flow::data::pdl]", - "token": "[%key:common::config_flow::data::token]" + "pdl": "pdl", + "token": "token" } } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" @@ -21,20 +23,31 @@ "step": { "init": { "title": "Enedis options", - "description": "Please select options to fetch datas", + "description": "Off-peak and peak hours only if consumption is detail", "data": { - "energy": "Mode", - "detail": "Detail (recommanded to compute offpeak hours)" + "config_consumption": "Enable fetch Consumption datas (recommanded)", + "consumption_cost": "Consumption cost", + "config_production": "Enable fetch Production datas (recommanded)", + "production_cost": "Production cost", + "offpeak_cost": "Off-peak hours price", + "peak_cost": "Peak hours price" } }, - "offpeak": { - "title": "Enedis options", - "description": "Please indicate the prices for off-peak and peak hours (optional)", + "rules": { + "title": "Add new range", + "description": "Add new range for tarification", "data": { - "offpeak_hours": "Off-peak hours price", - "peak_hours": "Peak hours price" + "rule_id": "Id", + "rule_name": "Name", + "rule_start_time": "Start time", + "rule_end_time": "End time", + "rule_price": "Price", + "rule_delete": "Delete this range" } } + }, + "error": { + "syntax_error": "syntax error" } } } \ No newline at end of file diff --git a/custom_components/enedis/translations/en.json b/custom_components/enedis/translations/en.json index e7889cb..f643707 100644 --- a/custom_components/enedis/translations/en.json +++ b/custom_components/enedis/translations/en.json @@ -4,6 +4,7 @@ "step": { "user": { "title": "Register Enedis gateway account", + "description": "To start the collection, it is necessary to make the settings via the options.", "data": { "pdl": "PDL", "token": "Enedis Gateway Token" @@ -11,30 +12,40 @@ } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + "cannot_connect": "Failed to connect", + "unknown": "Unexpected error" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" + "already_configured": "Service is already configured" } }, "options": { "step": { "init": { "title": "Enedis options", - "description": "Please select options to fetch datas", + "description": "Custom Off-peak and peak hours only if consumption is detail", "data": { - "energy": "Mode", - "detail": "Detail (recommanded to compute offpeak hours)" + "config_consumption": "Enable fetch Consumption datas (recommanded)", + "consumption_cost": "Consumption cost", + "config_production": "Enable fetch Production datas", + "production_cost": "Production cost" } }, - "offpeak": { - "title": "Enedis options", - "description": "Please indicate the prices for off-peak and peak hours (optional)", + "rules": { + "title": "Add new range", + "description": "Add new range for tarification", "data": { - "offpeak_hours": "Off-peak hours price", - "peak_hours": "Peak hours price" + "rule_id": "Id", + "rule_name": "Name", + "rule_start_time":"Start time", + "rule_end_time":"End time", + "rule_price":"Price", + "rule_delete": "Delete this range" } - } - } + } + }, + "error": { + "syntax_error": "syntax error" + } } } \ No newline at end of file diff --git a/custom_components/enedis/translations/fr.json b/custom_components/enedis/translations/fr.json new file mode 100644 index 0000000..5bf76d6 --- /dev/null +++ b/custom_components/enedis/translations/fr.json @@ -0,0 +1,51 @@ +{ + "config": { + "flow_title": "{name}", + "step": { + "user": { + "title": "Enregistre ton compte Enedis Gateway", + "description": "Pour démarrer la collecte, il est nécessaire d'effectuer les paramétrages via les options.", + "data": { + "pdl": "PDL", + "token": "Jeton" + } + } + }, + "error": { + "cannot_connect": "Impossible de se connecter", + "unknown": "Erreur imprévue" + }, + "abort": { + "already_configured": "Service déjà configuré" + } + }, + "options": { + "step": { + "init": { + "title": "Options", + "description": "La tarification personnalisée est disponible en mode Consommation: détaillé.", + "data": { + "config_consumption": "Collecter les données de consommation.", + "consumption_cost": "Tarif horaire", + "config_production": "Collecter des données de production (solaire, éolien..)", + "production_cost": "Tarif horaire (rachat)" + } + }, + "rules": { + "title": "Ajouter une nouvelle plage horaire", + "description": "Permet de déclarer une plage pour une tarification spécifique.\n Attention la consommation doit être en mode détaillé", + "data": { + "rule_id": "Id", + "rule_name": "Nom", + "rule_start_time":"Heure de début", + "rule_end_time":"Heure de fin", + "rule_price":"Prix", + "rule_delete": "Effacer cette plage horaire" + } + } + }, + "error": { + "syntax_error": "Erreur de syntax" + } + } +} \ No newline at end of file