Skip to content

Commit

Permalink
Fix: Detected blocking call to open file
Browse files Browse the repository at this point in the history
Fixed by wrapping calls inside loop.run_in_executor(...)
  • Loading branch information
davidrapan committed Jun 28, 2024
1 parent 7d32ba2 commit 577ff94
Show file tree
Hide file tree
Showing 9 changed files with 102 additions and 94 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Validate
name: Validate with hacs

on:
push:
Expand Down
18 changes: 9 additions & 9 deletions custom_components/solcast/config_flow.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Config flow for Solcast Solar integration."""
"""Config flow for Solcast PV Forecast integration."""
from __future__ import annotations
from typing import Any

Expand All @@ -17,7 +17,7 @@

@config_entries.HANDLERS.register(DOMAIN)
class SolcastSolarFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Solcast Solar."""
"""Handle a config flow for Solcast PV Forecast."""

VERSION = 6 #v5 started in 4.0.8, #6 started 4.0.15

Expand All @@ -38,7 +38,7 @@ async def async_step_user(

if user_input is not None:
return self.async_create_entry(
title= "Solcast Solar",
title= "Solcast PV Forecast",
data = {},
options={
CONF_API_KEY: user_input[CONF_API_KEY],
Expand Down Expand Up @@ -128,10 +128,10 @@ async def async_step_api(self, user_input: dict[str, Any] | None = None) -> Flow

self.hass.config_entries.async_update_entry(
self.config_entry,
title="Solcast Solar",
title="Solcast PV Forecast",
options=allConfigData,
)
return self.async_create_entry(title="Solcast Solar", data=None)
return self.async_create_entry(title="Solcast PV Forecast", data=None)

return self.async_show_form(
step_id="api",
Expand Down Expand Up @@ -230,11 +230,11 @@ async def async_step_dampen(self, user_input: dict[str, Any] | None = None) -> F

self.hass.config_entries.async_update_entry(
self.config_entry,
title="Solcast Solar",
title="Solcast PV Forecast",
options=allConfigData,
)

return self.async_create_entry(title="Solcast Solar", data=None)
return self.async_create_entry(title="Solcast PV Forecast", data=None)
except Exception as e:
errors["base"] = "unknown"

Expand Down Expand Up @@ -311,11 +311,11 @@ async def async_step_customsensor(self, user_input: dict[str, Any] | None = None

self.hass.config_entries.async_update_entry(
self.config_entry,
title="Solcast Solar",
title="Solcast PV Forecast",
options=allConfigData,
)

return self.async_create_entry(title="Solcast Solar", data=None)
return self.async_create_entry(title="Solcast PV Forecast", data=None)
except Exception as e:
errors["base"] = "unknown"

Expand Down
6 changes: 3 additions & 3 deletions custom_components/solcast/const.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
"""Constants for the Solcast Solar integration."""
"""Constants for the Solcast PV Forecast integration."""

from __future__ import annotations

from typing import Final

from homeassistant.helpers import selector

DOMAIN = "solcast_solar"
SOLCAST_URL = "https://api.solcast.com.au"
DOMAIN = "solcast"

SOLCAST_URL = "https://api.solcast.com.au"

ATTR_ENTRY_TYPE: Final = "entry_type"
ENTRY_TYPE_SERVICE: Final = "service"
Expand Down
4 changes: 2 additions & 2 deletions custom_components/solcast/coordinator.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""The Solcast Solar integration."""
"""The Solcast PV Forecast integration."""
from __future__ import annotations

import logging
Expand All @@ -15,7 +15,7 @@
_LOGGER = logging.getLogger(__name__)

class SolcastUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching data from Solcast Solar API."""
"""Class to manage fetching data from Solcast PV Forecast API."""

def __init__(self, hass: HomeAssistant, solcast: SolcastApi, version: str) -> None:
"""Initialize."""
Expand Down
8 changes: 4 additions & 4 deletions custom_components/solcast/manifest.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
{
"domain": "solcast_solar",
"domain": "solcast",
"name": "Solcast PV Forecast",
"after_dependencies": ["http"],
"codeowners": ["@oziee"],
"codeowners": ["@davidrapan"],
"config_flow": true,
"dependencies": ["homeassistant", "recorder", "select"],
"documentation": "https://github.com/oziee/ha-solcast-solar",
"documentation": "https://github.com/davidrapan/ha-solcast/",
"integration_type": "service",
"iot_class": "cloud_polling",
"issue_tracker": "https://github.com/oziee/ha-solcast-solar/issues",
"issue_tracker": "https://github.com/davidrapan/ha-solcast/issues",
"requirements": ["aiohttp>=3.8.5", "datetime>=4.3", "isodate>=0.6.1"],
"version": "4.0.23.1"
}
2 changes: 1 addition & 1 deletion custom_components/solcast/select.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def __init__(
self._attr_device_info = {
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
ATTR_NAME: "Solcast PV Forecast",
ATTR_MANUFACTURER: "Oziee",
ATTR_MANUFACTURER: "Solcast",
ATTR_MODEL: "Solcast PV Forecast",
ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE,
ATTR_SW_VERSION: coordinator._version,
Expand Down
4 changes: 2 additions & 2 deletions custom_components/solcast/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ def __init__(
self._attr_device_info = {
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
ATTR_NAME: "Solcast PV Forecast", #entry.title,
ATTR_MANUFACTURER: "Oziee",
ATTR_MANUFACTURER: "Solcast",
ATTR_MODEL: "Solcast PV Forecast",
ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE,
ATTR_SW_VERSION: coordinator._version,
Expand Down Expand Up @@ -402,7 +402,7 @@ def __init__(
self._attr_device_info = {
ATTR_IDENTIFIERS: {(DOMAIN, entry.entry_id)},
ATTR_NAME: "Solcast PV Forecast", #entry.title,
ATTR_MANUFACTURER: "Oziee",
ATTR_MANUFACTURER: "Solcast",
ATTR_MODEL: "Solcast PV Forecast",
ATTR_ENTRY_TYPE: DeviceEntryType.SERVICE,
ATTR_SW_VERSION: coordinator._version,
Expand Down
148 changes: 78 additions & 70 deletions custom_components/solcast/solcastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ def object_hook(self, obj):
ret[key] = value
return ret

def open_file(filepath, mode, x):
with open(filepath, mode) as file:
return x(file)

@dataclass
class ConnectionOptions:
"""Solcast API options for connection."""
Expand Down Expand Up @@ -94,8 +98,9 @@ async def serialize_data(self):
return

async with self._serialize_lock:
with open(self._filename, "w") as f:
json.dump(self._data, f, ensure_ascii=False, cls=DateTimeEncoder)
loop = asyncio.get_running_loop()
await loop.run_in_executor(None, lambda: open_file(self._filename, "w",
lambda file: json.dump(self._data, file, ensure_ascii = False, cls = DateTimeEncoder)))

async def sites_data(self):
"""Request data via the Solcast API."""
Expand All @@ -107,23 +112,26 @@ async def sites_data(self):
params = {"format": "json", "api_key": spl.strip()}
_LOGGER.debug(f"SOLCAST - trying to connect to - {self.options.host}/rooftop_sites?format=json&api_key=REDACTED")
async with async_timeout.timeout(60):
if file_exists(self._filename.replace('solcast','sites')):
filename = self._filename.replace('solcast','sites')
if file_exists(filename):
status = 404
with open(self._filename.replace('solcast','sites')) as f:
resp_json = json.load(f, cls=JSONDecoder)
status = 200
loop = asyncio.get_running_loop()
resp_json = await loop.run_in_executor(None, lambda: open_file(filename, "r",
lambda file: json.load(file, cls = JSONDecoder)))
status = 200
else:
resp: ClientResponse = await self.aiohttp_session.get(
url=f"{self.options.host}/rooftop_sites", params=params, ssl=False
)

resp_json = await resp.json(content_type=None)
status = resp.status
if status == 200:
async with self._serialize_lock:
with open(self._filename.replace('solcast','sites'), "w") as f:
json.dump(resp_json, f, ensure_ascii=False, cls=DateTimeEncoder)

loop = asyncio.get_running_loop()
await loop.run_in_executor(None, lambda: open_file(filename, "w",
lambda file: json.dump(resp_json, file, ensure_ascii = False, cls = DateTimeEncoder)))

if status == 200:
d = cast(dict, resp_json)
_LOGGER.debug(f"SOLCAST - Status 200 OK - sites_data returned data: {d}")
Expand Down Expand Up @@ -226,40 +234,41 @@ async def load_saved_data(self):
try:
if len(self._sites) > 0:
if file_exists(self._filename):
with open(self._filename) as data_file:
jsonData = json.load(data_file, cls=JSONDecoder)
json_version = jsonData.get("version", 1)
#self._weather = jsonData.get("weather", "unknown")
_LOGGER.debug(f"SOLCAST - load_saved_data file exists.. file type is {type(jsonData)}")
if json_version == _JSON_VERSION:
self._loaded_data = True
self._data = jsonData

#any new API keys so no sites data yet for those
ks = {}
for d in self._sites:
if not any(s == d.get('resource_id', '') for s in jsonData['siteinfo']):
ks[d.get('resource_id')] = d.get('apikey')

if len(ks.keys()) > 0:
#some api keys rooftop data does not exist yet so go and get it
_LOGGER.debug("SOLCAST - Must be new API jey added so go and get the data for it")
for a in ks:
await self.http_data_call(r_id=a, api=ks[a], dopast=True)
await self.serialize_data()

#any site changes that need to be removed
l = []
for s in jsonData['siteinfo']:
if not any(d.get('resource_id', '') == s for d in self._sites):
_LOGGER.info(f"Solcast rooftop resource id {s} no longer part of your system.. removing saved data from cached file")
l.append(s)

for ll in l:
del jsonData['siteinfo'][ll]

#create an up to date forecast and make sure the TZ fits just in case its changed
await self.buildforcastdata()
loop = asyncio.get_running_loop()
jsonData = await loop.run_in_executor(None, lambda: open_file(self._filename, "r",
lambda file: json.load(file, cls = JSONDecoder)))
json_version = jsonData.get("version", 1)
#self._weather = jsonData.get("weather", "unknown")
_LOGGER.debug(f"SOLCAST - load_saved_data file exists.. file type is {type(jsonData)}")
if json_version == _JSON_VERSION:
self._loaded_data = True
self._data = jsonData

#any new API keys so no sites data yet for those
ks = {}
for d in self._sites:
if not any(s == d.get('resource_id', '') for s in jsonData['siteinfo']):
ks[d.get('resource_id')] = d.get('apikey')

if len(ks.keys()) > 0:
#some api keys rooftop data does not exist yet so go and get it
_LOGGER.debug("SOLCAST - Must be new API jey added so go and get the data for it")
for a in ks:
await self.http_data_call(r_id=a, api=ks[a], dopast=True)
await self.serialize_data()

#any site changes that need to be removed
l = []
for s in jsonData['siteinfo']:
if not any(d.get('resource_id', '') == s for d in self._sites):
_LOGGER.info(f"Solcast rooftop resource id {s} no longer part of your system.. removing saved data from cached file")
l.append(s)

for ll in l:
del jsonData['siteinfo'][ll]

#create an up to date forecast and make sure the TZ fits just in case its changed
await self.buildforcastdata()

if not self._loaded_data:
#no file to load
Expand Down Expand Up @@ -678,10 +687,11 @@ async def fetch_data(self, path= "", hours=168, site="", apikey="", cachedname="
if self.apiCacheEnabled and file_exists(apiCacheFileName):
_LOGGER.debug(f"SOLCAST - Getting cached testing data for site {site}")
status = 404
with open(apiCacheFileName) as f:
resp_json = json.load(f)
status = 200
_LOGGER.debug(f"SOLCAST - Got cached file data for site {site}")
loop = asyncio.get_running_loop()
resp_json = await loop.run_in_executor(None, lambda: open_file(apiCacheFileName, "r",
lambda file: json.load(file, cls = JSONDecoder)))
status = 200
_LOGGER.debug(f"SOLCAST - Got cached file data for site {site}")
else:
#_LOGGER.debug(f"SOLCAST - OK REAL API CALL HAPPENING RIGHT NOW")
resp: ClientResponse = await self.aiohttp_session.get(
Expand All @@ -697,9 +707,11 @@ async def fetch_data(self, path= "", hours=168, site="", apikey="", cachedname="
_LOGGER.debug("SOLCAST - Status 200 OK - API returned data.")

if self.apiCacheEnabled:
with open(apiCacheFileName, 'w') as f:
json.dump(resp_json, f, ensure_ascii=False)

loop = asyncio.get_running_loop()
# Should it be with: cls = DateTimeEncoder ???
await loop.run_in_executor(None, lambda: open_file(apiCacheFileName, "w",
lambda file: json.dump(resp_json, file, ensure_ascii = False)))

d = cast(dict, resp_json)
_LOGGER.debug(f"SOLCAST - fetch_data Returned: {d}")
return d
Expand Down Expand Up @@ -768,37 +780,36 @@ async def buildforcastdata(self):
yesterday = dt.now(self._tz).date() + timedelta(days=-730)
lastday = dt.now(self._tz).date() + timedelta(days=7)

_forecasts = []
for s in self._data['siteinfo']:
_forecasts = {}

for s, siteinfo in self._data['siteinfo'].items():
tally = 0
for x in self._data['siteinfo'][s]['forecasts']:
for x in siteinfo['forecasts']:
#loop each rooftop site and its forecasts
z = x["period_start"]
zz = z.astimezone(self._tz) #- timedelta(minutes=30)

#v4.0.8 added code to dampen the forecast data.. (* self._damp[h])

if zz.date() < lastday and zz.date() > yesterday:
if yesterday < zz.date() < lastday:
h = f"{zz.hour}"
if zz.date() == today:
tally += min(x[self._use_data_field] * 0.5 * self._damp[h], self._hardlimit)

itm = next((item for item in _forecasts if item["period_start"] == z), None)
itm = _forecasts.get(z)
if itm:
itm["pv_estimate"] = min(round(itm["pv_estimate"] + (x["pv_estimate"] * self._damp[h]),4), self._hardlimit)
itm["pv_estimate10"] = min(round(itm["pv_estimate10"] + (x["pv_estimate10"] * self._damp[h]),4), self._hardlimit)
itm["pv_estimate90"] = min(round(itm["pv_estimate90"] + (x["pv_estimate90"] * self._damp[h]),4), self._hardlimit)
else:
_forecasts.append({"period_start": z,"pv_estimate": min(round((x["pv_estimate"]* self._damp[h]),4), self._hardlimit),
"pv_estimate10": min(round((x["pv_estimate10"]* self._damp[h]),4), self._hardlimit),
"pv_estimate90": min(round((x["pv_estimate90"]* self._damp[h]),4), self._hardlimit)})

self._data['siteinfo'][s]['tally'] = round(tally, 4)

_forecasts = sorted(_forecasts, key=itemgetter("period_start"))

self._data_forecasts = _forecasts
_forecasts[z] = {"period_start": z,"pv_estimate": min(round((x["pv_estimate"]* self._damp[h]),4), self._hardlimit),
"pv_estimate10": min(round((x["pv_estimate10"]* self._damp[h]),4), self._hardlimit),
"pv_estimate90": min(round((x["pv_estimate90"]* self._damp[h]),4), self._hardlimit)}

siteinfo['tally'] = round(tally, 4)

self._data_forecasts = list(_forecasts.values())
self._data_forecasts.sort(key=itemgetter("period_start"))

await self.checkDataRecords()

Expand All @@ -820,7 +831,4 @@ async def checkDataRecords(self):
if len(h) == 48:
_LOGGER.debug(f"SOLCAST - Data for {da} contains all 48 records")
else:
_LOGGER.debug(f"SOLCAST - Data for {da} contains only {len(h)} of 48 records and may produce inaccurate forecast data")



_LOGGER.debug(f"SOLCAST - Data for {da} contains only {len(h)} of 48 records and may produce inaccurate forecast data")
4 changes: 2 additions & 2 deletions hacs.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "Solcast PV Solar",
"name": "Solcast PV Forecast",
"render_readme": false,
"homeassistant": "2023.7",
"zip_release": true,
"filename": "solcast_solar.zip"
"filename": "solcast.zip"
}

0 comments on commit 577ff94

Please sign in to comment.