aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--pyproject.toml12
-rw-r--r--scripts/common/__init__.py20
-rw-r--r--scripts/common/postgres.py21
-rw-r--r--scripts/common/queue.py23
-rw-r--r--scripts/elvia.py48
-rw-r--r--scripts/elvia_gridtariff.py60
-rw-r--r--scripts/entsoe.py59
-rw-r--r--scripts/esphomeapi.py90
-rw-r--r--scripts/mqtt_listener.py76
-rw-r--r--scripts/mqtt_watch.py30
-rw-r--r--scripts/nb.py33
-rw-r--r--scripts/neohub.py22
-rw-r--r--scripts/queue_runner.py27
-rw-r--r--scripts/tibber_consumption.py41
-rw-r--r--scripts/tibber_prices.py58
-rw-r--r--scripts/yr.py42
16 files changed, 376 insertions, 286 deletions
diff --git a/pyproject.toml b/pyproject.toml
index 241d61a..de2c150 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -52,3 +52,15 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
# Assume Python 3.10
target-version = "py310"
+[tool.ruff.format]
+# Like Black, use double quotes for strings.
+quote-style = "double"
+
+# Like Black, indent with spaces, rather than tabs.
+indent-style = "space"
+
+# Like Black, respect magic trailing commas.
+skip-magic-trailing-comma = false
+
+# Like Black, automatically detect the appropriate line ending.
+line-ending = "auto"
diff --git a/scripts/common/__init__.py b/scripts/common/__init__.py
index 83f1b66..e349610 100644
--- a/scripts/common/__init__.py
+++ b/scripts/common/__init__.py
@@ -1,12 +1,12 @@
#!/usr/bin/env python3
-''' common functions and stuff '''
+""" common functions and stuff """
import logging
import os
import sys
from datetime import datetime
-QUEUE = bool(os.environ.get('el_QUEUE', False))
+QUEUE = bool(os.environ.get("el_QUEUE", False))
name = os.path.splitext(os.path.split(sys.argv[0])[-1])[0]
@@ -17,17 +17,17 @@ logging.basicConfig(format="%(name)s: %(levelname)s %(message)s")
# Use queue or postgres
if QUEUE is True:
- log.debug('Importing dbi from queue')
+ log.debug("Importing dbi from queue")
from .queue import dbi
else:
- log.debug('Importing dbi from postgres')
+ log.debug("Importing dbi from postgres")
from .postgres import dbi
# Insert state
def statein(sensor, value, device_class, unit, **kwargs):
- ''' Insert new state in db '''
- verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+ """Insert new state in db"""
+ verbose = bool(kwargs["verbose"]) if "verbose" in kwargs else False
sql = """INSERT INTO states
(sensor_id,
@@ -38,10 +38,6 @@ def statein(sensor, value, device_class, unit, **kwargs):
SELECT sensors.id, %s, %s, %s, %s
FROM sensors
WHERE sensors.name = %s;"""
- values = (value,
- device_class,
- unit,
- datetime.utcnow(),
- sensor)
- log.debug('dbi(' + sql + ',' + str(values) + ')')
+ values = (value, device_class, unit, datetime.utcnow(), sensor)
+ log.debug("dbi(" + sql + "," + str(values) + ")")
dbi(sql, values, verbose=verbose)
diff --git a/scripts/common/postgres.py b/scripts/common/postgres.py
index efa4b5a..5f6ea31 100644
--- a/scripts/common/postgres.py
+++ b/scripts/common/postgres.py
@@ -1,28 +1,31 @@
#!/usr/bin/env python3
-''' common functions and stuff '''
+""" common functions and stuff """
import os
import sys
import psycopg
-pg_db = os.environ['el_pg_db']
-pg_host = os.environ['el_pg_host']
-pg_user = os.environ.get('el_pg_user','')
-pg_pass = os.environ.get('el_pg_pass','')
+pg_db = os.environ["el_pg_db"]
+pg_host = os.environ["el_pg_host"]
+pg_user = os.environ.get("el_pg_user", "")
+pg_pass = os.environ.get("el_pg_pass", "")
+
def dbi(sql, values, **kwargs):
- ''' insert into db '''
- verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+ """insert into db"""
+ verbose = bool(kwargs["verbose"]) if "verbose" in kwargs else False
# pylint: disable=E1129
- with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn:
+ with psycopg.connect(
+ dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass
+ ) as conn:
cur = conn.cursor()
if isinstance(values, list):
cur.executemany(sql, values)
elif isinstance(values, tuple):
cur.execute(sql, values)
else:
- print('`values` is a', type(values), 'but it needs to be tuple or list')
+ print("`values` is a", type(values), "but it needs to be tuple or list")
sys.exit(1)
if verbose is True:
print("Inserted and/or changed", cur.rowcount, "rows in db")
diff --git a/scripts/common/queue.py b/scripts/common/queue.py
index 4510d55..de49802 100644
--- a/scripts/common/queue.py
+++ b/scripts/common/queue.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' common functions and stuff '''
+""" common functions and stuff """
import logging
import os
@@ -7,26 +7,27 @@ import pickle
from litequeue import SQLQueue
-QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
-QUEUE_DIR = os.environ.get('el_QUEUE_dir', 'queue')
+QUEUE_DB = os.environ.get("el_QUEUE_db", "litequeue.db")
+QUEUE_DIR = os.environ.get("el_QUEUE_dir", "queue")
-QUEUE_DB = QUEUE_DIR + "/" + QUEUE_DB
+QUEUE_DB = QUEUE_DIR + "/" + QUEUE_DB
log = logging.getLogger(__name__)
# Initialize queue
q = SQLQueue(QUEUE_DB, maxsize=None)
-def dbi(sql,values,**kwargs):
- ''' insert into queue '''
- log.debug('function dbi()')
- verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
- log.debug('Inserting into sqlite-queue')
+def dbi(sql, values, **kwargs):
+ """insert into queue"""
+ log.debug("function dbi()")
+ verbose = bool(kwargs["verbose"]) if "verbose" in kwargs else False
+
+ log.debug("Inserting into sqlite-queue")
q.put(pickle.dumps([sql, values]))
if verbose is True:
- log.debug('verbose = True')
- table = sql.split(' ')[2]
+ log.debug("verbose = True")
+ table = sql.split(" ")[2]
num = 1 if isinstance(values, tuple) else len(values)
print("Inserted " + str(num) + " item(s) into queue for " + str(table))
return True
diff --git a/scripts/elvia.py b/scripts/elvia.py
index b74f784..fea0f61 100644
--- a/scripts/elvia.py
+++ b/scripts/elvia.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Get energy consumption from Elvia '''
+""" Get energy consumption from Elvia """
import os
import sys
@@ -9,14 +9,14 @@ import common
import requests
from tzlocal import get_localzone
-apiKey = os.environ['el_elvia_token']
+apiKey = os.environ["el_elvia_token"]
apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues"
-startTime = datetime.now(get_localzone()) - timedelta(days = 2)
-startTime = startTime.isoformat('T')
-#startTime = '2023-05-23T08:16:15.408667+02:00' # <- edit for manual starttime. Like when filling in missing info.
+startTime = datetime.now(get_localzone()) - timedelta(days=2)
+startTime = startTime.isoformat("T")
+# startTime = '2023-05-23T08:16:15.408667+02:00' # <- edit for manual starttime. Like when filling in missing info.
-endTime = datetime.now(get_localzone()).isoformat('T')
+endTime = datetime.now(get_localzone()).isoformat("T")
### Get the data
@@ -25,8 +25,8 @@ try:
# Request headers
hdr = {
- 'Cache-Control': 'no-cache',
- 'Authorization': "Bearer " + apiKey,
+ "Cache-Control": "no-cache",
+ "Authorization": "Bearer " + apiKey,
}
response = requests.get(url, headers=hdr, timeout=10)
@@ -40,25 +40,35 @@ except requests.exceptions.RequestException as e:
sys.exit(e)
data = response.json()
-print("Got "+ str(len(data['meteringpoints'][0]['metervalue']['timeSeries'])) +" items from between "+ startTime + " and " + endTime)
+print(
+ "Got "
+ + str(len(data["meteringpoints"][0]["metervalue"]["timeSeries"]))
+ + " items from between "
+ + startTime
+ + " and "
+ + endTime
+)
### insert data into database
values = []
-for item in data['meteringpoints'][0]['metervalue']['timeSeries']:
+for item in data["meteringpoints"][0]["metervalue"]["timeSeries"]:
# Only deal with verified items.
- if item['verified']:
- values.append((
- data['meteringpoints'][0]['meteringPointId'],
- item['startTime'],
- item['endTime'],
- item['value'],
- item['uom'],
- item['production']))
+ if item["verified"]:
+ values.append(
+ (
+ data["meteringpoints"][0]["meteringPointId"],
+ item["startTime"],
+ item["endTime"],
+ item["value"],
+ item["uom"],
+ item["production"],
+ )
+ )
# SQL
-sql = """INSERT INTO elvia
+sql = """INSERT INTO elvia
VALUES(%s, %s, %s, %s, %s, %s)
ON CONFLICT (startTime,endTime) DO NOTHING;"""
diff --git a/scripts/elvia_gridtariff.py b/scripts/elvia_gridtariff.py
index 720336b..98164bb 100644
--- a/scripts/elvia_gridtariff.py
+++ b/scripts/elvia_gridtariff.py
@@ -1,5 +1,5 @@
#!/usr/bin/python3
-''' get grid tariffs'''
+""" get grid tariffs"""
import os
import sys
@@ -11,16 +11,16 @@ from tzlocal import get_localzone
# API documentation: https://elvia.portal.azure-api.net/docs/services/gridtariffapi/operations/post-digin-api-v-tariffquery-meteringpointsgridtariffs?
-apiKey = os.environ['el_elvia_grid_api_key']
+apiKey = os.environ["el_elvia_grid_api_key"]
apiUrl = "https://elvia.azure-api.net/grid-tariff/digin/api/1/tariffquery/meteringpointsgridtariffs"
-meteringPointId = os.environ['el_meteringPointId']
+meteringPointId = os.environ["el_meteringPointId"]
-startTime = datetime.now(get_localzone()) - timedelta(days = 2)
-startTime = startTime.strftime('%Y-%m-%d')
-#startTime = '2023-05-23' # <- edit for manual starttime. Like when filling in missing info.
+startTime = datetime.now(get_localzone()) - timedelta(days=2)
+startTime = startTime.strftime("%Y-%m-%d")
+# startTime = '2023-05-23' # <- edit for manual starttime. Like when filling in missing info.
-endTime = datetime.now(get_localzone()) + timedelta(days = 2)
-endTime = endTime.strftime('%Y-%m-%d')
+endTime = datetime.now(get_localzone()) + timedelta(days=2)
+endTime = endTime.strftime("%Y-%m-%d")
### Get the data
@@ -28,16 +28,16 @@ try:
url = apiUrl
# Request headers
hdr = {
- 'Cache-Control': 'no-cache',
- 'X-API-Key': apiKey,
- 'Content-Type': 'application/json'
+ "Cache-Control": "no-cache",
+ "X-API-Key": apiKey,
+ "Content-Type": "application/json",
}
# Request body
body = {
- 'starttime': startTime,
- 'endtime': endTime,
- 'meteringPointIds': [ meteringPointId ]
+ "starttime": startTime,
+ "endtime": endTime,
+ "meteringPointIds": [meteringPointId],
}
response = requests.post(url, headers=hdr, json=body, timeout=10)
@@ -52,24 +52,34 @@ except requests.exceptions.RequestException as e:
data = response.json()
-print("Got "+ str(len(data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours'])) +" items from between "+ startTime + " and " + endTime)
+print(
+ "Got "
+ + str(len(data["gridTariffCollections"][0]["gridTariff"]["tariffPrice"]["hours"]))
+ + " items from between "
+ + startTime
+ + " and "
+ + endTime
+)
### insert data into database
values = []
-for item in data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours']:
- values.append((
- meteringPointId,
- item['startTime'],
- item['expiredAt'],
- item['shortName'],
- item['isPublicHoliday'],
- item['energyPrice']['total'],
- item['energyPrice']['totalExVat']))
+for item in data["gridTariffCollections"][0]["gridTariff"]["tariffPrice"]["hours"]:
+ values.append(
+ (
+ meteringPointId,
+ item["startTime"],
+ item["expiredAt"],
+ item["shortName"],
+ item["isPublicHoliday"],
+ item["energyPrice"]["total"],
+ item["energyPrice"]["totalExVat"],
+ )
+ )
# SQL
-sql = """INSERT INTO elvia_gridtariff
+sql = """INSERT INTO elvia_gridtariff
VALUES(%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING"""
diff --git a/scripts/entsoe.py b/scripts/entsoe.py
index 70a1f19..fc34274 100644
--- a/scripts/entsoe.py
+++ b/scripts/entsoe.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Get energyprices from Entsoe '''
+""" Get energyprices from Entsoe """
import os
import sys
@@ -22,36 +22,49 @@ from tzlocal import get_localzone
# > respond to your request.
# 1: https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_authentication_and_authorisation
# 2: https://transparency.entsoe.eu/content/static_content/download?path=/Static%20content/API-Token-Management.pdf
-apiKey = os.environ['el_entsoe_token']
+apiKey = os.environ["el_entsoe_token"]
# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html
apiUrl = "https://web-api.tp.entsoe.eu/api?securityToken=" + apiKey
-startTime = datetime.now(get_localzone()) - timedelta(days = 7)
-startTime = startTime.strftime('%Y%m%d')
-#startTime = '20230523' # <- edit for manual starttime. Like when filling in missing info.
+startTime = datetime.now(get_localzone()) - timedelta(days=7)
+startTime = startTime.strftime("%Y%m%d")
+# startTime = '20230523' # <- edit for manual starttime. Like when filling in missing info.
-endTime = datetime.now(get_localzone()) + timedelta(days = 1)
-endTime = endTime.strftime('%Y%m%d')
+endTime = datetime.now(get_localzone()) + timedelta(days=1)
+endTime = endTime.strftime("%Y%m%d")
jobname = os.path.splitext(os.path.basename(__file__))[0]
# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_areas
-areas = [ {"name": "NO-0", "code": "10YNO-0--------C"},
- {"name": "NO-1", "code": "10YNO-1--------2"},
- {"name": "NO-2", "code": "10YNO-2--------T"},
- {"name": "NO-3", "code": "10YNO-3--------J"},
- {"name": "NO-4", "code": "10YNO-4--------9"} ]
+areas = [
+ {"name": "NO-0", "code": "10YNO-0--------C"},
+ {"name": "NO-1", "code": "10YNO-1--------2"},
+ {"name": "NO-2", "code": "10YNO-2--------T"},
+ {"name": "NO-3", "code": "10YNO-3--------J"},
+ {"name": "NO-4", "code": "10YNO-4--------9"},
+]
-UTC = tz.gettz('UTC')
-CET = tz.gettz('Europe/Oslo')
+UTC = tz.gettz("UTC")
+CET = tz.gettz("Europe/Oslo")
# Get the data
-values=[]
+values = []
for area in areas:
try:
- url = apiUrl + "&documentType=A44&in_Domain=" + area["code"] + "&out_Domain=" + area["code"] + "&periodStart=" + startTime + "0000&periodEnd=" + endTime + "0000"
+ url = (
+ apiUrl
+ + "&documentType=A44&in_Domain="
+ + area["code"]
+ + "&out_Domain="
+ + area["code"]
+ + "&periodStart="
+ + startTime
+ + "0000&periodEnd="
+ + endTime
+ + "0000"
+ )
print("Getting data for " + area["code"])
response = requests.get(url, timeout=10)
@@ -69,20 +82,19 @@ for area in areas:
items = 0
if "Publication_MarketDocument" in data_dict:
for lista in data_dict["Publication_MarketDocument"]["TimeSeries"]:
- utctime = datetime.strptime(lista["Period"]["timeInterval"]["start"], "%Y-%m-%dT%H:%MZ")
- utctime = utctime.replace(tzinfo = UTC)
+ utctime = datetime.strptime(
+ lista["Period"]["timeInterval"]["start"], "%Y-%m-%dT%H:%MZ"
+ )
+ utctime = utctime.replace(tzinfo=UTC)
cettime = utctime.astimezone(CET)
items += len(lista["Period"]["Point"])
for item in lista["Period"]["Point"]:
# the response contains timerange, but not timestamp for every price, so we must calculate it
- time = str(cettime + timedelta(hours = int(item["position"]) - 1))
+ time = str(cettime + timedelta(hours=int(item["position"]) - 1))
# append values
- values.append((
- time,
- area["name"],
- item["price.amount"]))
+ values.append((time, area["name"], item["price.amount"]))
print("Got " + str(items) + " records")
@@ -92,5 +104,4 @@ sql = """ INSERT INTO entsoe
ON CONFLICT (starttime, zone) DO NOTHING"""
-
common.dbi(sql, values, verbose=True)
diff --git a/scripts/esphomeapi.py b/scripts/esphomeapi.py
index 5fcbe37..390fbd5 100644
--- a/scripts/esphomeapi.py
+++ b/scripts/esphomeapi.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Check on esphome-devices '''
+""" Check on esphome-devices """
import argparse
import asyncio
@@ -9,40 +9,42 @@ import sys
import common
-#import colorlog
+# import colorlog
import zeroconf
from aioesphomeapi import APIClient, APIConnectionError, ReconnectLogic, SensorState
sleepsec = 60
-noise_psk = os.environ['el_esphome_api_psk']
+noise_psk = os.environ["el_esphome_api_psk"]
+
async def main(args):
"""Connect to an ESPHome device and get details."""
- log.debug('function main()')
+ log.debug("function main()")
# Establish connection
api = APIClient(
address=args.address,
port=6053,
- password='',
+ password="",
client_info="esphomeapi.py",
- noise_psk=noise_psk)
+ noise_psk=noise_psk,
+ )
- log.debug('Connecting')
+ log.debug("Connecting")
try:
await api.connect(login=True)
except APIConnectionError as e:
log.error("esphome api connection error - %s", e)
sys.exit(1)
- log.info('Connected. Api version: ' + str(api.api_version))
+ log.info("Connected. Api version: " + str(api.api_version))
# Show device details
- log.debug('Getting device info')
+ log.debug("Getting device info")
device = vars(await api.device_info())
- log.info('Device name: ' + device['name'])
+ log.info("Device name: " + device["name"])
- log.debug('Getting sensors')
+ log.debug("Getting sensors")
rawsensors, _ = await api.list_entities_services()
sensors = {}
for sensor in rawsensors:
@@ -50,29 +52,41 @@ async def main(args):
if log.isEnabledFor(logging.DEBUG):
from pprint import pformat
- log.debug('Sensors: \n' + pformat(sensors))
- log.info('Disconnecting')
+ log.debug("Sensors: \n" + pformat(sensors))
+
+ log.info("Disconnecting")
await api.disconnect()
def callback(state):
if type(state) == SensorState and state.missing_state is False:
- log.debug('function callback(' + str(state) + ')')
+ log.debug("function callback(" + str(state) + ")")
sensor = sensors[state.key]
value = state.state
- if 'accuracy_decimals' in sensor:
- decimals = sensor['accuracy_decimals']
+ if "accuracy_decimals" in sensor:
+ decimals = sensor["accuracy_decimals"]
value = round(value, decimals) if decimals > 0 else round(value)
- unit = sensor['unit_of_measurement'] if 'unit_of_measurement' in sensor else ''
- device_class = sensor['device_class'] if 'device_class' in sensor else ''
-
- log.info(sensor['name'] + ' ' + sensor['device_class'] + ' - ' + str(value) + str(unit))
- common.statein(device['friendly_name'], value, device_class, unit, verbose=True)
+ unit = (
+ sensor["unit_of_measurement"] if "unit_of_measurement" in sensor else ""
+ )
+ device_class = sensor["device_class"] if "device_class" in sensor else ""
+
+ log.info(
+ sensor["name"]
+ + " "
+ + sensor["device_class"]
+ + " - "
+ + str(value)
+ + str(unit)
+ )
+ common.statein(
+ device["friendly_name"], value, device_class, unit, verbose=True
+ )
async def on_connect() -> None:
- log.debug('function on_connect()')
- log.info('Connected to API')
+ log.debug("function on_connect()")
+ log.info("Connected to API")
try:
await api.subscribe_states(callback)
except APIConnectionError as e:
@@ -83,7 +97,7 @@ async def main(args):
await api.disconnect()
async def on_disconnect() -> None:
- log.debug('function on_disconnect()')
+ log.debug("function on_disconnect()")
log.warning("Disconnected from API")
await asyncio.sleep(sleepsec)
@@ -91,14 +105,14 @@ async def main(args):
client=api,
on_connect=on_connect,
on_disconnect=on_disconnect,
- zeroconf_instance=zeroconf.Zeroconf()
+ zeroconf_instance=zeroconf.Zeroconf(),
)
await reconnect.start()
try:
while True:
try:
- log.debug('Sleep for ' + str(sleepsec) + 's')
+ log.debug("Sleep for " + str(sleepsec) + "s")
await asyncio.sleep(sleepsec)
except Exception as e:
log.error("catched exception - %s", e)
@@ -114,8 +128,8 @@ async def main(args):
if __name__ == "__main__":
# Logging
- #handler = colorlog.StreamHandler()
- #handler.setFormatter(colorlog.ColoredFormatter(
+ # handler = colorlog.StreamHandler()
+ # handler.setFormatter(colorlog.ColoredFormatter(
# "%(log_color)s%(levelname)s - %(message)s {%(filename)s:%(lineno)d}",
# log_colors={
# 'DEBUG': 'light_black',
@@ -124,26 +138,30 @@ if __name__ == "__main__":
# 'ERROR': 'red',
# 'CRITICAL': 'red,bg_white'
# }))
- #log = colorlog.getLogger(__name__)
- #log.setLevel(logging.WARNING)
- #log.addHandler(handler)
+ # log = colorlog.getLogger(__name__)
+ # log.setLevel(logging.WARNING)
+ # log.addHandler(handler)
log = logging.getLogger(__name__)
log.setLevel(logging.WARNING)
logging.basicConfig(format="%(levelname)s - %(message)s {%(filename)s:%(lineno)d}")
- parser = argparse.ArgumentParser(description="Connect to an esphome-device and access the native api")
- parser.add_argument( "-a", "--address", help="Address of esp-device to connect to")
- parser.add_argument( "-v", "--verbose", help="Set logging to debug mode", action="store_true")
+ parser = argparse.ArgumentParser(
+ description="Connect to an esphome-device and access the native api"
+ )
+ parser.add_argument("-a", "--address", help="Address of esp-device to connect to")
+ parser.add_argument(
+ "-v", "--verbose", help="Set logging to debug mode", action="store_true"
+ )
args = parser.parse_args()
# Verbose logging?
if args.verbose:
log.setLevel(logging.DEBUG)
- log.debug('asyncio.run(main(args))')
+ log.debug("asyncio.run(main(args))")
asyncio.run(main(args))
-print('Bottom of script. Exiting.')
+print("Bottom of script. Exiting.")
sys.exit(0)
diff --git a/scripts/mqtt_listener.py b/scripts/mqtt_listener.py
index 981c40a..3ee688d 100644
--- a/scripts/mqtt_listener.py
+++ b/scripts/mqtt_listener.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Listen for mqtt-events, and trigger for some '''
+""" Listen for mqtt-events, and trigger for some """
import json
import os
@@ -8,41 +8,45 @@ from datetime import datetime
import common
import paho.mqtt.client as mqtt
-mqtt_server = os.environ['el_mqtt_server']
-mqtt_port = int(os.environ['el_mqtt_port'])
-keepalive = int(os.environ['el_mqtt_keepalive'])
-mqtt_topic = os.environ['el_mqtt_topic']
-mqtt_user = os.environ['el_mqtt_user']
-mqtt_pass = os.environ['el_mqtt_pass']
-
-tempsensors = [ 'Bad Temp',
- 'Barnerom Temp',
- 'Isobod Temp',
- 'Kjøkken Temp Matskap',
- 'Kontor Temp',
- 'Loft Temp',
- 'Soverom Temp',
- 'Stue Temp Display',
- 'Stue Temp Stuebord',
- 'Stue Temp Teleskap',
- 'Toalett motion',
- 'Utebod Temp',
- 'Vaskerom Temp']
+mqtt_server = os.environ["el_mqtt_server"]
+mqtt_port = int(os.environ["el_mqtt_port"])
+keepalive = int(os.environ["el_mqtt_keepalive"])
+mqtt_topic = os.environ["el_mqtt_topic"]
+mqtt_user = os.environ["el_mqtt_user"]
+mqtt_pass = os.environ["el_mqtt_pass"]
+
+tempsensors = [
+ "Bad Temp",
+ "Barnerom Temp",
+ "Isobod Temp",
+ "Kjøkken Temp Matskap",
+ "Kontor Temp",
+ "Loft Temp",
+ "Soverom Temp",
+ "Stue Temp Display",
+ "Stue Temp Stuebord",
+ "Stue Temp Teleskap",
+ "Toalett motion",
+ "Utebod Temp",
+ "Vaskerom Temp",
+]
+
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
- print("Connected with result code "+str(rc))
+ print("Connected with result code " + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(mqtt_topic)
+
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
- name = msg.topic.split('/')[1]
+ name = msg.topic.split("/")[1]
data = json.loads(msg.payload)
- if name == 'HAN' and 'current' in data:
+ if name == "HAN" and "current" in data:
sql = """INSERT INTO mqtt_han
(name,
current,
@@ -51,21 +55,21 @@ def on_message(client, userdata, msg):
linkquality,
time)
VALUES(%s,%s,%s,%s,%s,%s)"""
- values = (name,
- data['current'],
- data['power'],
- data['voltage'],
- data['linkquality'],
- datetime.utcnow())
+ values = (
+ name,
+ data["current"],
+ data["power"],
+ data["voltage"],
+ data["linkquality"],
+ datetime.utcnow(),
+ )
common.dbi(sql, values, verbose=True)
if name in tempsensors:
- if 'temperature' in data:
- common.statein(name, data['temperature'], 'temperature', '°C', verbose=True)
- if 'humidity' in data:
- common.statein(name, data['humidity'], 'humidity', '%', verbose=True)
-
-
+ if "temperature" in data:
+ common.statein(name, data["temperature"], "temperature", "°C", verbose=True)
+ if "humidity" in data:
+ common.statein(name, data["humidity"], "humidity", "%", verbose=True)
# mqtt
diff --git a/scripts/mqtt_watch.py b/scripts/mqtt_watch.py
index a845d30..e2c4e21 100644
--- a/scripts/mqtt_watch.py
+++ b/scripts/mqtt_watch.py
@@ -1,44 +1,44 @@
#!/usr/bin/env python3
-''' Listen for mqtt-events, and trigger for some '''
+""" Listen for mqtt-events, and trigger for some """
import os
from datetime import datetime
import paho.mqtt.client as mqtt
-mqtt_server = os.environ['el_mqtt_server']
-mqtt_port = int(os.environ['el_mqtt_port'])
-keepalive = int(os.environ['el_mqtt_keepalive'])
-mqtt_topic = os.environ['el_mqtt_topic']
-mqtt_user = os.environ['el_mqtt_user']
-mqtt_pass = os.environ['el_mqtt_pass']
+mqtt_server = os.environ["el_mqtt_server"]
+mqtt_port = int(os.environ["el_mqtt_port"])
+keepalive = int(os.environ["el_mqtt_keepalive"])
+mqtt_topic = os.environ["el_mqtt_topic"]
+mqtt_user = os.environ["el_mqtt_user"]
+mqtt_pass = os.environ["el_mqtt_pass"]
+
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
- print("Connected with result code "+str(rc))
+ print("Connected with result code " + str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
- client.subscribe('#')
+ client.subscribe("#")
+
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
-
print(msg.topic, datetime.utcnow(), msg.payload)
- #if name.startswith('tmp') and 'temperature' in data and 'humidity' in data:
+ # if name.startswith('tmp') and 'temperature' in data and 'humidity' in data:
# sql = "INSERT INTO mqtt_temps (name, temperature, humidity, battery, linkquality, voltage, time) VALUES(%s,%s,%s,%s,%s,%s,%s)"
# values = (name, data['temperature'], data['humidity'], data['battery'], data['linkquality'], data['voltage'], datetime.utcnow())
- #elif name == 'HAN' and 'current' in data:
+ # elif name == 'HAN' and 'current' in data:
# sql = "INSERT INTO mqtt_han (name, current, power, voltage, linkquality, time) VALUES(%s,%s,%s,%s,%s,%s)"
# values = (name, data['current'], data['power'], data['voltage'], data['linkquality'], datetime.utcnow())
- #else:
+ # else:
# return
- #common.dbi(sql, values, verbose=True)
-
+ # common.dbi(sql, values, verbose=True)
# mqtt
diff --git a/scripts/nb.py b/scripts/nb.py
index 2e2a7f9..d00b0d2 100644
--- a/scripts/nb.py
+++ b/scripts/nb.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Get exchange rates from nb '''
+""" Get exchange rates from nb """
import csv
import os
@@ -14,15 +14,15 @@ from tzlocal import get_localzone
# I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON.
apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en"
-pg_db = os.environ['el_pg_db']
-pg_host = os.environ['el_pg_host']
+pg_db = os.environ["el_pg_db"]
+pg_host = os.environ["el_pg_host"]
pg_table = "nbex"
-startTime = datetime.now(get_localzone()) - timedelta(days = 10)
-startTime = startTime.strftime('%Y-%m-%d')
-#startTime = '2023-05-23' # <- edit for manual starttime. Like when filling in missing info.
+startTime = datetime.now(get_localzone()) - timedelta(days=10)
+startTime = startTime.strftime("%Y-%m-%d")
+# startTime = '2023-05-23' # <- edit for manual starttime. Like when filling in missing info.
-endTime = datetime.now(get_localzone()).strftime('%Y-%m-%d')
+endTime = datetime.now(get_localzone()).strftime("%Y-%m-%d")
temp = tempfile.NamedTemporaryFile()
@@ -36,7 +36,7 @@ try:
print("Oh shit")
response.raise_for_status()
- with open(temp.name,'w', encoding="utf-8") as fd:
+ with open(temp.name, "w", encoding="utf-8") as fd:
fd.write(response.text)
except requests.exceptions.RequestException as e:
@@ -48,19 +48,22 @@ except requests.exceptions.RequestException as e:
values = []
with open(temp.name, encoding="utf-8") as csvfile:
- csvReader = csv.DictReader(csvfile, delimiter=';')
+ csvReader = csv.DictReader(csvfile, delimiter=";")
for item in csvReader:
- values.append((
- item["TIME_PERIOD"],
- item["BASE_CUR"],
- item["QUOTE_CUR"],
- item["OBS_VALUE"]))
+ values.append(
+ (
+ item["TIME_PERIOD"],
+ item["BASE_CUR"],
+ item["QUOTE_CUR"],
+ item["OBS_VALUE"],
+ )
+ )
temp.close()
# SQL
-sql = """INSERT INTO nbex
+sql = """INSERT INTO nbex
VALUES(%s, %s, %s, %s)
ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING"""
diff --git a/scripts/neohub.py b/scripts/neohub.py
index e15a00e..bf43859 100644
--- a/scripts/neohub.py
+++ b/scripts/neohub.py
@@ -1,5 +1,5 @@
#!/usr/bin/python3
-''' Get stuff from neohub! This is mostly the usage-example from https://gitlab.com/neohubapi/neohubapi/ '''
+""" Get stuff from neohub! This is mostly the usage-example from https://gitlab.com/neohubapi/neohubapi/ """
import asyncio
import os
@@ -9,20 +9,21 @@ from datetime import datetime
import common
import neohubapi.neohub as neohub
-neohub_ip = os.environ['el_neohub_ip']
-neohub_port = os.environ['el_neohub_port']
-SLEEP = 120 # Sleep between runs
+neohub_ip = os.environ["el_neohub_ip"]
+neohub_port = os.environ["el_neohub_port"]
+SLEEP = 120 # Sleep between runs
+
async def call_neohub():
- ''' async runner! w00p '''
+ """async runner! w00p"""
# Legacy connection
hub = neohub.NeoHub(neohub_ip, int(neohub_port))
# Or, for a websocket connection:
# hub = neohub.Neohub(port=4243, token='xxx-xxxxxxx')
# system = await hub.get_system()
hub_data, devices = await hub.get_live_data()
- for device in devices['thermostats']:
- #print(f"Temperature in zone {device.name}: {device}")
+ for device in devices["thermostats"]:
+ # print(f"Temperature in zone {device.name}: {device}")
values = (
datetime.utcnow(),
device.time,
@@ -32,9 +33,10 @@ async def call_neohub():
device.heat_on,
device.current_floor_temperature,
device.target_temperature,
- device.temperature)
+ device.temperature,
+ )
- sql = """INSERT INTO neohub
+ sql = """INSERT INTO neohub
(timestamp,
time,
device_id,
@@ -47,7 +49,7 @@ async def call_neohub():
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s)"""
common.dbi(sql, values, verbose=True)
- common.statein('neohub', device.temperature, 'temperature', '°C', verbose=True)
+ common.statein("neohub", device.temperature, "temperature", "°C", verbose=True)
# Loop it forever
diff --git a/scripts/queue_runner.py b/scripts/queue_runner.py
index 9ed52f2..fd88600 100644
--- a/scripts/queue_runner.py
+++ b/scripts/queue_runner.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' move items from queue to database '''
+""" move items from queue to database """
import os
import pickle
@@ -9,10 +9,12 @@ import time
from common.postgres import dbi
from litequeue import SQLQueue
-QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
-QUEUE_DIR = os.environ.get('el_QUEUE_dir', 'queue')
-QUEUE_DB = QUEUE_DIR + "/" + QUEUE_DB
-QUEUE_SLEEP = int(os.environ.get('el_QUEUE_sleep', 15)) # Default sleep 15 seconds when queue empty
+QUEUE_DB = os.environ.get("el_QUEUE_db", "litequeue.db")
+QUEUE_DIR = os.environ.get("el_QUEUE_dir", "queue")
+QUEUE_DB = QUEUE_DIR + "/" + QUEUE_DB
+QUEUE_SLEEP = int(
+ os.environ.get("el_QUEUE_sleep", 15)
+) # Default sleep 15 seconds when queue empty
# Unlock all
con = sqlite3.connect(QUEUE_DB)
@@ -39,8 +41,8 @@ while True:
# get message
task = q.pop()
- raw = pickle.loads(task['message'])
- msgids.append(task['message_id'])
+ raw = pickle.loads(task["message"])
+ msgids.append(task["message_id"])
sql = raw[0]
# if the queue-item already is a batch-job, don't do any more batch-work
@@ -50,28 +52,27 @@ while True:
else:
values.append(raw[1])
-
# Check if we can batch up with the next message in queue
i += 1
if i < 10 and q.qsize() - len(msgids) >= 1:
- nextraw = pickle.loads(q.peek()['message'])
+ nextraw = pickle.loads(q.peek()["message"])
nextsql = nextraw[0]
nextvalues = nextraw[1]
if sql == nextsql and isinstance(nextvalues, tuple):
continue
- dbi(sql,values)
+ dbi(sql, values)
for msgid in msgids:
q.done(msgid)
- table = sql.split(' ')[2].strip()
+ table = sql.split(" ")[2].strip()
num = 1 if isinstance(values, tuple) else len(values)
- left = str(q.qsize()) + " items left in queue" if q.qsize() > 0 else ''
+ left = str(q.qsize()) + " items left in queue" if q.qsize() > 0 else ""
print("Processed", num, "item(s) for table", table + ".", left)
msgids = []
values = []
- i=0
+ i = 0
print("END")
diff --git a/scripts/tibber_consumption.py b/scripts/tibber_consumption.py
index b88d00b..f54faa6 100644
--- a/scripts/tibber_consumption.py
+++ b/scripts/tibber_consumption.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' import energy consumption from tibber '''
+""" import energy consumption from tibber """
import os
import sys
@@ -10,13 +10,13 @@ import requests
from tzlocal import get_localzone
# variables
-apiKey = os.environ['el_tibber_token']
+apiKey = os.environ["el_tibber_token"]
apiUrl = "https://api.tibber.com/v1-beta/gql"
-startTime = datetime.now(get_localzone()) - timedelta(days = 1)
-startTime = startTime.isoformat('T')
+startTime = datetime.now(get_localzone()) - timedelta(days=1)
+startTime = startTime.isoformat("T")
-endTime = datetime.now(get_localzone()).isoformat('T')
+endTime = datetime.now(get_localzone()).isoformat("T")
# Get the data
try:
@@ -24,11 +24,12 @@ try:
# Request headers
hdr = {
- 'Authorization': "Bearer " + apiKey,
- 'Content-Type': 'application/json',
+ "Authorization": "Bearer " + apiKey,
+ "Content-Type": "application/json",
}
- body = {"query":"""{
+ body = {
+ "query": """{
viewer {
homes {
consumption(resolution: HOURLY, last:100) {
@@ -43,7 +44,8 @@ try:
}
}
}
- } }"""}
+ } }"""
+ }
response = requests.post(url, headers=hdr, json=body, timeout=10)
if response.status_code != 200:
@@ -65,17 +67,20 @@ print("Got " + str(numdata) + " rows from Tibber")
values = []
for item in data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]:
if item["consumption"] is not None:
- values.append((
- item["from"],
- item["to"],
- item["consumption"],
- item["consumptionUnit"],
- item["cost"],
- item["unitPrice"],
- item["unitPriceVAT"]))
+ values.append(
+ (
+ item["from"],
+ item["to"],
+ item["consumption"],
+ item["consumptionUnit"],
+ item["cost"],
+ item["unitPrice"],
+ item["unitPriceVAT"],
+ )
+ )
# SQL
-sql = """INSERT INTO tibber_consumption
+sql = """INSERT INTO tibber_consumption
VALUES(%s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (startTime,endTime) DO NOTHING"""
diff --git a/scripts/tibber_prices.py b/scripts/tibber_prices.py
index af839bd..53642bb 100644
--- a/scripts/tibber_prices.py
+++ b/scripts/tibber_prices.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' import energy prices from tibber '''
+""" import energy prices from tibber """
import os
import sys
@@ -10,13 +10,13 @@ import requests
from tzlocal import get_localzone
# variables
-apiKey = os.environ['el_tibber_token']
+apiKey = os.environ["el_tibber_token"]
apiUrl = "https://api.tibber.com/v1-beta/gql"
-startTime = datetime.now(get_localzone()) - timedelta(days = 1)
-startTime = startTime.isoformat('T')
+startTime = datetime.now(get_localzone()) - timedelta(days=1)
+startTime = startTime.isoformat("T")
-endTime = datetime.now(get_localzone()).isoformat('T')
+endTime = datetime.now(get_localzone()).isoformat("T")
# Get the data
try:
@@ -24,11 +24,12 @@ try:
# Request headers
hdr = {
- 'Authorization': "Bearer " + apiKey,
- 'Content-Type': 'application/json',
+ "Authorization": "Bearer " + apiKey,
+ "Content-Type": "application/json",
}
- body = {"query":"""{
+ body = {
+ "query": """{
viewer {
homes {
currentSubscription{
@@ -38,7 +39,8 @@ try:
}
}
}
- } }"""}
+ } }"""
+ }
response = requests.post(url, headers=hdr, json=body, timeout=10)
if response.status_code != 200:
@@ -52,7 +54,11 @@ except requests.exceptions.RequestException as e:
data = response.json()
-numdata = len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]) + len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"])
+numdata = len(
+ data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]
+) + len(
+ data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]
+)
print("Got " + str(numdata) + " rows from Tibber")
@@ -60,25 +66,25 @@ print("Got " + str(numdata) + " rows from Tibber")
# prices
prices = []
-for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]:
- prices.append((
- item["startsAt"],
- item["total"],
- item["energy"],
- item["tax"],
- item["level"]))
-
-for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]:
- prices.append((
- item["startsAt"],
- item["total"],
- item["energy"],
- item["tax"],
- item["level"]))
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"][
+ "today"
+]:
+ prices.append(
+ (item["startsAt"], item["total"], item["energy"], item["tax"], item["level"])
+ )
+
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"][
+ "tomorrow"
+]:
+ prices.append(
+ (item["startsAt"], item["total"], item["energy"], item["tax"], item["level"])
+ )
# SQL
-sql = """INSERT INTO tibber_prices
+sql = (
+ """INSERT INTO tibber_prices
VALUES(%s, %s, %s, %s, %s)
ON CONFLICT (startsat) DO NOTHING""",
+)
common.dbi(sql, prices, verbose=True)
diff --git a/scripts/yr.py b/scripts/yr.py
index 6d53d5c..d4dc159 100644
--- a/scripts/yr.py
+++ b/scripts/yr.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-''' Get weatherdata from yr.no '''
+""" Get weatherdata from yr.no """
import os
import sys
@@ -7,11 +7,16 @@ import sys
import common
import requests
-location = int(os.environ['el_location'])
-lat = str(os.environ['el_yr_lat'])
-lon = str(os.environ['el_yr_lon'])
+location = int(os.environ["el_location"])
+lat = str(os.environ["el_yr_lat"])
+lon = str(os.environ["el_yr_lon"])
-apiUrl = "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat=" + lat + "&lon=" + lon
+apiUrl = (
+ "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat="
+ + lat
+ + "&lon="
+ + lon
+)
### Get the data
try:
@@ -19,8 +24,8 @@ try:
# Request headers
hdr = {
- 'User-Agent': 'gratis.morell@litepost.no',
- 'Cache-Control': 'no-cache',
+ "User-Agent": "gratis.morell@litepost.no",
+ "Cache-Control": "no-cache",
}
response = requests.get(url, headers=hdr, timeout=10)
@@ -41,18 +46,21 @@ data = response.json()
values = []
for item in data["properties"]["timeseries"]:
details = item["data"]["instant"]["details"]
- values.append((
- item["time"],
- location,
- details["air_temperature"],
- details["air_pressure_at_sea_level"],
- details["cloud_area_fraction"],
- details["relative_humidity"],
- details["wind_from_direction"],
- details["wind_speed"]))
+ values.append(
+ (
+ item["time"],
+ location,
+ details["air_temperature"],
+ details["air_pressure_at_sea_level"],
+ details["cloud_area_fraction"],
+ details["relative_humidity"],
+ details["wind_from_direction"],
+ details["wind_speed"],
+ )
+ )
# SQL
-sql = """INSERT INTO yr (
+sql = """INSERT INTO yr (
time,
location,
air_temperature,