From 8d186d39483beff64a1c11f80c6ca5e56dd7bbc5 Mon Sep 17 00:00:00 2001 From: Dennis Eriksen Date: Wed, 1 Feb 2023 20:32:11 +0100 Subject: moving and renaming/breaking everything --- common/__init__.py | 13 ------ common/postgres.py | 28 ------------ common/queue.py | 22 --------- elvia2pgsql.py | 57 ----------------------- elvia_gridtariff2pgsql.py | 69 ---------------------------- entsoe2pgsql.py | 87 ----------------------------------- mqtt2queue.py | 54 ---------------------- mqtt2queue.service | 17 ------- mqtt_listener.service | 17 +++++++ nb2pgsql.py | 59 ------------------------ neohub2pgsql.py | 27 ----------- queue2pgsql.py | 74 ------------------------------ queue2pgsql.service | 17 ------- queue_runner.service | 17 +++++++ requirements.txt | 21 ++++++++- run.sh | 11 ++++- scripts/common/__init__.py | 13 ++++++ scripts/common/postgres.py | 28 ++++++++++++ scripts/common/queue.py | 22 +++++++++ scripts/elvia.py | 57 +++++++++++++++++++++++ scripts/elvia_gridtariff.py | 69 ++++++++++++++++++++++++++++ scripts/entsoe.py | 87 +++++++++++++++++++++++++++++++++++ scripts/mqtt_listener.py | 54 ++++++++++++++++++++++ scripts/nb.py | 59 ++++++++++++++++++++++++ scripts/neohub.py | 27 +++++++++++ scripts/queue_runner.py | 74 ++++++++++++++++++++++++++++++ scripts/tibber_consumption.py | 76 +++++++++++++++++++++++++++++++ scripts/tibber_prices.py | 76 +++++++++++++++++++++++++++++++ scripts/yr.py | 58 ++++++++++++++++++++++++ tibber_consumption2pgsql.py | 102 ----------------------------------------- tibber_prices2pgsql.py | 103 ------------------------------------------ todb.py | 22 --------- yr2pgsql.py | 58 ------------------------ 33 files changed, 764 insertions(+), 811 deletions(-) delete mode 100644 common/__init__.py delete mode 100644 common/postgres.py delete mode 100644 common/queue.py delete mode 100755 elvia2pgsql.py delete mode 100755 elvia_gridtariff2pgsql.py delete mode 100755 entsoe2pgsql.py delete mode 100644 mqtt2queue.py delete mode 100644 mqtt2queue.service create mode 100644 mqtt_listener.service delete mode 100755 nb2pgsql.py delete mode 100755 neohub2pgsql.py delete mode 100644 queue2pgsql.py delete mode 100644 queue2pgsql.service create mode 100644 queue_runner.service create mode 100644 scripts/common/__init__.py create mode 100644 scripts/common/postgres.py create mode 100644 scripts/common/queue.py create mode 100644 scripts/elvia.py create mode 100644 scripts/elvia_gridtariff.py create mode 100644 scripts/entsoe.py create mode 100644 scripts/mqtt_listener.py create mode 100644 scripts/nb.py create mode 100644 scripts/neohub.py create mode 100644 scripts/queue_runner.py create mode 100644 scripts/tibber_consumption.py create mode 100644 scripts/tibber_prices.py create mode 100644 scripts/yr.py delete mode 100755 tibber_consumption2pgsql.py delete mode 100755 tibber_prices2pgsql.py delete mode 100644 todb.py delete mode 100755 yr2pgsql.py diff --git a/common/__init__.py b/common/__init__.py deleted file mode 100644 index 89977d2..0000000 --- a/common/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python3 -''' common functions and stuff ''' - -import os - - -QUEUE = bool(os.environ.get('el_QUEUE', False)) - -# Initialize queue -if QUEUE is True: - from .queue import dbi -else: - from .postgres import dbi diff --git a/common/postgres.py b/common/postgres.py deleted file mode 100644 index bff0720..0000000 --- a/common/postgres.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python3 -''' common functions and stuff ''' - -import os -import sys -import psycopg - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_user = os.environ.get('el_pg_user','') -pg_pass = os.environ.get('el_pg_pass','') - -def dbi(sql, values, **kwargs): - ''' insert into db ''' - verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False - # pylint: disable=E1129 - with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn: - cur = conn.cursor() - if isinstance(values, list): - cur.executemany(sql, values) - elif isinstance(values, tuple): - cur.execute(sql, values) - else: - print('`values` is a', type(values), 'but it needs to be tuple or list') - sys.exit(1) - if verbose is True: - print("Inserted and/or changed", cur.rowcount, "rows into db") - return True diff --git a/common/queue.py b/common/queue.py deleted file mode 100644 index e567fb4..0000000 --- a/common/queue.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 -''' common functions and stuff ''' - -import os -import sys -import pickle -from litequeue import SQLQueue - -QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db') - -# Initialize queue -q = SQLQueue(QUEUE_DB, maxsize=None) - -def dbi(sql,values,**kwargs): - verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False - - q.put(pickle.dumps([sql, values])) - if verbose is True: - table = sql.split(' ')[2] - num = 1 if isinstance(values, tuple) else len(values) - print("Inserted", num, "item(s) into queue for", table) - return True diff --git a/elvia2pgsql.py b/elvia2pgsql.py deleted file mode 100755 index be5eaae..0000000 --- a/elvia2pgsql.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python3 -''' elvia2pgsql ''' - -import os -import sys -import requests - -from datetime import datetime -from datetime import timedelta -from tzlocal import get_localzone - -from common import dbi - -apiKey = os.environ['el_elvia_token'] -apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues" - -pg_table = "elvia" - -startTime = datetime.now(get_localzone()) - timedelta(days = 2) -startTime = startTime.isoformat('T') -endTime = datetime.now(get_localzone()).isoformat('T') - - -### Get the data -try: - url = apiUrl + "?startTime=" + startTime + "&endTime=" + endTime - - # Request headers - hdr = { - 'Cache-Control': 'no-cache', - 'Authorization': "Bearer " + apiKey, - } - - response = requests.get(url, headers=hdr) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - -except Exception as e: - print("oh lol") - sys.exit(e) - -data = response.json() -print("Got "+ str(len(data['meteringpoints'][0]['metervalue']['timeSeries'])) +" items from between "+ startTime + " and " + endTime) - -### insert data into database - -values = [] -for item in data['meteringpoints'][0]['metervalue']['timeSeries']: - # Only deal with verified items. - if item['verified']: - values.append((data['meteringpoints'][0]['meteringPointId'], item['startTime'], item['endTime'], item['value'], item['uom'], item['production'])) - - -# Count new rows and insert -dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True) diff --git a/elvia_gridtariff2pgsql.py b/elvia_gridtariff2pgsql.py deleted file mode 100755 index bcf4bd7..0000000 --- a/elvia_gridtariff2pgsql.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/python3 -''' get grid tariffs''' - -import os -import sys -from datetime import datetime -from datetime import timedelta -import requests -from tzlocal import get_localzone - -from common import dbi - -# API documentation: https://elvia.portal.azure-api.net/docs/services/gridtariffapi/operations/post-digin-api-v-tariffquery-meteringpointsgridtariffs? - -apiKey = os.environ['el_elvia_grid_api_key'] -apiUrl = "https://elvia.azure-api.net/grid-tariff/digin/api/1/tariffquery/meteringpointsgridtariffs" -meteringPointId = os.environ['el_meteringPointId'] - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "elvia_gridtariff" - - -startTime = datetime.now(get_localzone()) - timedelta(days = 2) -startTime = startTime.strftime('%Y-%m-%d') - -endTime = datetime.now(get_localzone()) + timedelta(days = 2) -endTime = endTime.strftime('%Y-%m-%d') - - -### Get the data -try: - url = apiUrl - # Request headers - hdr = { - 'Cache-Control': 'no-cache', - 'X-API-Key': apiKey, - 'Content-Type': 'application/json' - } - - # Request body - body = { - 'starttime': startTime, - 'endtime': endTime, - 'meteringPointIds': [ meteringPointId ] - } - - response = requests.post(url, headers=hdr, json=body) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - -except Exception as e: - print("oh lol") - sys.exit(e) - -data = response.json() - -print("Got "+ str(len(data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours'])) +" items from between "+ startTime + " and " + endTime) - - -### insert data into database - -values = [] -for item in data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours']: - values.append((meteringPointId, item['startTime'], item['expiredAt'], item['shortName'], item['isPublicHoliday'], item['energyPrice']['total'], item['energyPrice']['totalExVat'])) - -dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING", values, verbose=True) diff --git a/entsoe2pgsql.py b/entsoe2pgsql.py deleted file mode 100755 index 2597a98..0000000 --- a/entsoe2pgsql.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/bin/python3 - -import os -import sys -from datetime import datetime -from datetime import timedelta -import requests -import xmltodict -from tzlocal import get_localzone -from dateutil import tz - -from common import dbi - - -# variables - -# Getting an api-key isn't very well documented. The documentation [1] points -# to a pdf [2], which says the following: -# > In order to request the access to the Restful API, please register on the -# > Transparency Platform and send an email to transparency@entsoe.eu with -# > “Restful API access” in the subject line. Indicate the email address you -# > entered during registration in the email body. We will make our best to -# > respond to your request. -# 1: https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_authentication_and_authorisation -# 2: https://transparency.entsoe.eu/content/static_content/download?path=/Static%20content/API-Token-Management.pdf -apiKey = os.environ['el_entsoe_token'] - -# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html -apiUrl = "https://transparency.entsoe.eu/api?securityToken=" + apiKey - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "entsoe" - -startTime = datetime.now(get_localzone()) - timedelta(days = 7) -startTime = startTime.strftime('%Y%m%d') - -endTime = datetime.now(get_localzone()) + timedelta(days = 1) -endTime = endTime.strftime('%Y%m%d') - -# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_areas -areas = [ {"name": "NO-0", "code": "10YNO-0--------C"}, - {"name": "NO-1", "code": "10YNO-1--------2"}, - {"name": "NO-2", "code": "10YNO-2--------T"}, - {"name": "NO-3", "code": "10YNO-3--------J"}, - {"name": "NO-4", "code": "10YNO-4--------9"} ] - -UTC = tz.gettz('UTC') -CET = tz.gettz('Europe/Oslo') - - -# Get the data -values=[] -for area in areas: - try: - url = apiUrl + "&documentType=A44&in_Domain=" + area["code"] + "&out_Domain=" + area["code"] + "&periodStart=" + startTime + "0000&periodEnd=" + endTime + "0000" - - print("Getting data for " + area["code"]) - response = requests.get(url) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - - except Exception as e: - print("oh lol") - sys.exit(e) - - data_dict = xmltodict.parse(response.content) - - items = 0 - if "Publication_MarketDocument" in data_dict: - for lista in data_dict["Publication_MarketDocument"]["TimeSeries"]: - utctime = datetime.strptime(lista["Period"]["timeInterval"]["start"], "%Y-%m-%dT%H:%MZ") - utctime = utctime.replace(tzinfo = UTC) - cettime = utctime.astimezone(CET) - items += len(lista["Period"]["Point"]) - - for item in lista["Period"]["Point"]: - # the response contains timerange, but not timestamp for every price, so we must calculate it - time = str(cettime + timedelta(hours = int(item["position"]) - 1)) - - # append values - values.append((time, area["name"], item["price.amount"])) - print("Got " + str(items) + " records") - -dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s) ON CONFLICT (starttime, zone) DO NOTHING", values, verbose=True) diff --git a/mqtt2queue.py b/mqtt2queue.py deleted file mode 100644 index 24ee207..0000000 --- a/mqtt2queue.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 - -import os -import json -from datetime import datetime -import paho.mqtt.client as mqtt - -from common import dbi - -mqtt_server = os.environ['el_mqtt_server'] -mqtt_port = int(os.environ['el_mqtt_port']) -keepalive = int(os.environ['el_mqtt_keepalive']) -mqtt_topic = os.environ['el_mqtt_topic'] - -# The callback for when the client receives a CONNACK response from the server. -def on_connect(client, userdata, flags, rc): - print("Connected with result code "+str(rc)) - - # Subscribing in on_connect() means that if we lose the connection and - # reconnect then subscriptions will be renewed. - client.subscribe(mqtt_topic) - -# The callback for when a PUBLISH message is received from the server. -def on_message(client, userdata, msg): - name = msg.topic.split('/')[1] - data = json.loads(msg.payload) - - if name.startswith('tmp') and 'temperature' in data and 'humidity' in data: - sql = "INSERT INTO mqtt_temps (name, temperature, humidity, battery, linkquality, voltage, time) VALUES(%s,%s,%s,%s,%s,%s,%s)" - values = (name, data['temperature'], data['humidity'], data['battery'], data['linkquality'], data['voltage'], datetime.utcnow()) - - elif name == 'HAN' and 'current' in data: - sql = "INSERT INTO mqtt_han (name, current, power, voltage, linkquality, time) VALUES(%s,%s,%s,%s,%s,%s)" - values = (name, data['current'], data['power'], data['voltage'], data['linkquality'], datetime.utcnow()) - - else: - return - - dbi(sql, values, verbose=True) - - - -# mqtt -client = mqtt.Client() -client.on_connect = on_connect -client.on_message = on_message - -client.connect(mqtt_server, mqtt_port, keepalive) - -# Blocking call that processes network traffic, dispatches callbacks and -# handles reconnecting. -# Other loop*() functions are available that give a threaded interface and a -# manual interface. -client.loop_forever() diff --git a/mqtt2queue.service b/mqtt2queue.service deleted file mode 100644 index a63c994..0000000 --- a/mqtt2queue.service +++ /dev/null @@ -1,17 +0,0 @@ -[Unit] -Description=Simple service to start python-listener -StartLimitIntervalSec=100 -StartLimitBurst=5 - -[Service] -Type=simple -EnvironmentFile=%h/energyscripts/.env -WorkingDirectory=%h/energyscripts -ExecStart=%h/energyscripts/venv/bin/python3 -u mqtt2queue.py -Restart=on-failure -TimeoutStopSec=70 -RestartSec=30 -SyslogIdentifier=mqtt2queue - -[Install] -WantedBy=default.target diff --git a/mqtt_listener.service b/mqtt_listener.service new file mode 100644 index 0000000..1ae79ca --- /dev/null +++ b/mqtt_listener.service @@ -0,0 +1,17 @@ +[Unit] +Description=Simple service to start mqtt-listener +StartLimitIntervalSec=100 +StartLimitBurst=5 + +[Service] +Type=simple +EnvironmentFile=%h/energyscripts/.env +WorkingDirectory=%h/energyscripts +ExecStart=%h/energyscripts/venv/bin/python3 -u scripts/mqtt_listener.py +Restart=on-failure +TimeoutStopSec=70 +RestartSec=30 +SyslogIdentifier=%N + +[Install] +WantedBy=default.target diff --git a/nb2pgsql.py b/nb2pgsql.py deleted file mode 100755 index 1901182..0000000 --- a/nb2pgsql.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/python3 - -import os -import sys -import csv -import json -import requests -import tempfile - -from datetime import datetime -from datetime import timedelta -from tzlocal import get_localzone - -from common import dbi - -# I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON. -apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en" - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "nbex" - -startTime = datetime.now(get_localzone()) - timedelta(days = 10) -startTime = startTime.strftime('%Y-%m-%d') - -endTime = datetime.now(get_localzone()).strftime('%Y-%m-%d') - -temp = tempfile.NamedTemporaryFile() - -### Get the data -try: - url = apiUrl + "&startPeriod=" + startTime + "&endPeriod=" + endTime - - response = requests.get(url) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - - with open(temp.name,'w') as fd: - fd.write(response.text) - -except Exception as e: - print("oh lol") - sys.exit(e) - - -### insert data into database - -values = [] -with open(temp.name) as csvfile: - csvReader = csv.DictReader(csvfile, delimiter=';') - - for item in csvReader: - values.append((item["TIME_PERIOD"],item["BASE_CUR"],item["QUOTE_CUR"],item["OBS_VALUE"])) -temp.close() - - -dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values,verbose=True) diff --git a/neohub2pgsql.py b/neohub2pgsql.py deleted file mode 100755 index 5c62606..0000000 --- a/neohub2pgsql.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/python3 - -import os -import asyncio -from datetime import datetime -import neohubapi.neohub as neohub - -from common import dbi - -neohub_ip = os.environ['el_neohub_ip'] -neohub_port = os.environ['el_neohub_port'] - -async def run(): - # Legacy connection - hub = neohub.NeoHub(neohub_ip, int(neohub_port)) - # Or, for a websocket connection: - # hub = neohub.Neohub(port=4243, token='xxx-xxxxxxx') - system = await hub.get_system() - hub_data, devices = await hub.get_live_data() - for device in devices['thermostats']: - print(f"Temperature in zone {device.name}: {device}") - sql = "INSERT INTO neohub (timestamp, time, device_id, away, heat_mode, heat_on, current_floor_temperature, target_temperature, temperature) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)" - values = (datetime.utcnow(), device.time, device.device_id, device.away, device.heat_mode, device.heat_on, device.current_floor_temperature, device.target_temperature, device.temperature) - dbi(sql, values) - -asyncio.run(run()) - diff --git a/queue2pgsql.py b/queue2pgsql.py deleted file mode 100644 index 436dcb8..0000000 --- a/queue2pgsql.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -''' move items from queue to database ''' - -import os -import time -import pickle -import sqlite3 -from litequeue import SQLQueue - -from common.postgres import dbi - -QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db') - -# Unlock all -con = sqlite3.connect(QUEUE_DB) -cur = con.cursor() -cur.execute("UPDATE Queue SET status = 0 WHERE status = 1") -con.commit() -con.close() - -# Open Queue -q = SQLQueue(QUEUE_DB, maxsize=None) -q.prune() - -msgids = [] -values = [] - -i = 0 - -# Event loop -while True: - # Just sleep if queue is empty - if q.empty() == True: - time.sleep(10) - continue - - # get message - task = q.pop() - raw = pickle.loads(task['message']) - msgids.append(task['message_id']) - sql = raw[0] - - # if the queue-item already is a batch-job, don't do any more batch-work - if isinstance(raw[1], list): - values = raw[1] - i = 10 - else: - values.append(raw[1]) - - - # Check if we can batch up with the next message in queue - i += 1 - if i < 10 and q.qsize() - len(msgids) >= 1: - nextraw = pickle.loads(q.peek()['message']) - nextsql = nextraw[0] - nextvalues = nextraw[1] - if sql == nextsql and isinstance(nextvalues, tuple): - continue - - dbi(sql,values) - - for id in msgids: - q.done(id) - - table = sql.split(' ')[2] - num = 1 if isinstance(values, tuple) else len(values) - left = str(q.qsize()) + " items left in queue" if q.qsize() > 0 else '' - print("Processed", num, "item(s) for table", table + ".", left) - - msgids = [] - values = [] - i=0 - -print("END") diff --git a/queue2pgsql.service b/queue2pgsql.service deleted file mode 100644 index 49e4066..0000000 --- a/queue2pgsql.service +++ /dev/null @@ -1,17 +0,0 @@ -[Unit] -Description=Simple service to start python-listener -StartLimitIntervalSec=100 -StartLimitBurst=5 - -[Service] -Type=simple -EnvironmentFile=%h/energyscripts/.env -WorkingDirectory=%h/energyscripts -ExecStart=%h/energyscripts/venv/bin/python3 -u queue2pgsql.py -Restart=on-failure -TimeoutStopSec=70 -RestartSec=30 -SyslogIdentifier=queue2pgsql - -[Install] -WantedBy=default.target diff --git a/queue_runner.service b/queue_runner.service new file mode 100644 index 0000000..6b78068 --- /dev/null +++ b/queue_runner.service @@ -0,0 +1,17 @@ +[Unit] +Description=Simple service to start queue-runner +StartLimitIntervalSec=100 +StartLimitBurst=5 + +[Service] +Type=simple +EnvironmentFile=%h/energyscripts/.env +WorkingDirectory=%h/energyscripts +ExecStart=%h/energyscripts/venv/bin/python3 -u scripts/queue_runner.py +Restart=on-failure +TimeoutStopSec=70 +RestartSec=30 +SyslogIdentifier=%N + +[Install] +WantedBy=default.target diff --git a/requirements.txt b/requirements.txt index 0dd3ee4..e2d36d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,32 @@ +astroid==2.14.1 async-property==0.2.1 -async-timeout==4.0.2 certifi==2022.12.7 charset-normalizer==3.0.1 +DateTime==5.0 +dill==0.3.6 idna==3.4 +isort==5.12.0 +lazy-object-proxy==1.9.0 litequeue==0.5 +mccabe==0.7.0 neohubapi==1.0 paho-mqtt==1.6.1 +platformdirs==2.6.2 psycopg==3.1.8 +psycopg2-binary==2.9.5 +pylint==2.16.0 +python-dateutil==2.8.2 +pytz==2022.7.1 +pytz-deprecation-shim==0.1.0.post0 requests==2.28.2 +six==1.16.0 +tomli==2.0.1 +tomlkit==0.11.6 +typing_extensions==4.4.0 +tzdata==2022.7 +tzlocal==4.2 urllib3==1.26.14 websockets==10.4 +wrapt==1.14.1 xmltodict==0.13.0 +zope.interface==5.5.2 diff --git a/run.sh b/run.sh index 8efffd5..57d3128 100755 --- a/run.sh +++ b/run.sh @@ -7,4 +7,13 @@ cd ${0:h} setopt all_export source .env unsetopt all_export -./venv/bin/python3 $1 || exit 1 + +if [[ -f $1 ]]; then + ./venv/bin/python3 $1 +elif [[ -f scripts/$1 ]]; then + ./venv/bin/python3 scripts/$1 +else + print -u2 "Computer says no" && exit 1 +fi + +exit $? diff --git a/scripts/common/__init__.py b/scripts/common/__init__.py new file mode 100644 index 0000000..89977d2 --- /dev/null +++ b/scripts/common/__init__.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +''' common functions and stuff ''' + +import os + + +QUEUE = bool(os.environ.get('el_QUEUE', False)) + +# Initialize queue +if QUEUE is True: + from .queue import dbi +else: + from .postgres import dbi diff --git a/scripts/common/postgres.py b/scripts/common/postgres.py new file mode 100644 index 0000000..bff0720 --- /dev/null +++ b/scripts/common/postgres.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +''' common functions and stuff ''' + +import os +import sys +import psycopg + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_user = os.environ.get('el_pg_user','') +pg_pass = os.environ.get('el_pg_pass','') + +def dbi(sql, values, **kwargs): + ''' insert into db ''' + verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False + # pylint: disable=E1129 + with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn: + cur = conn.cursor() + if isinstance(values, list): + cur.executemany(sql, values) + elif isinstance(values, tuple): + cur.execute(sql, values) + else: + print('`values` is a', type(values), 'but it needs to be tuple or list') + sys.exit(1) + if verbose is True: + print("Inserted and/or changed", cur.rowcount, "rows into db") + return True diff --git a/scripts/common/queue.py b/scripts/common/queue.py new file mode 100644 index 0000000..e567fb4 --- /dev/null +++ b/scripts/common/queue.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 +''' common functions and stuff ''' + +import os +import sys +import pickle +from litequeue import SQLQueue + +QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db') + +# Initialize queue +q = SQLQueue(QUEUE_DB, maxsize=None) + +def dbi(sql,values,**kwargs): + verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False + + q.put(pickle.dumps([sql, values])) + if verbose is True: + table = sql.split(' ')[2] + num = 1 if isinstance(values, tuple) else len(values) + print("Inserted", num, "item(s) into queue for", table) + return True diff --git a/scripts/elvia.py b/scripts/elvia.py new file mode 100644 index 0000000..be5eaae --- /dev/null +++ b/scripts/elvia.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python3 +''' elvia2pgsql ''' + +import os +import sys +import requests + +from datetime import datetime +from datetime import timedelta +from tzlocal import get_localzone + +from common import dbi + +apiKey = os.environ['el_elvia_token'] +apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues" + +pg_table = "elvia" + +startTime = datetime.now(get_localzone()) - timedelta(days = 2) +startTime = startTime.isoformat('T') +endTime = datetime.now(get_localzone()).isoformat('T') + + +### Get the data +try: + url = apiUrl + "?startTime=" + startTime + "&endTime=" + endTime + + # Request headers + hdr = { + 'Cache-Control': 'no-cache', + 'Authorization': "Bearer " + apiKey, + } + + response = requests.get(url, headers=hdr) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + +except Exception as e: + print("oh lol") + sys.exit(e) + +data = response.json() +print("Got "+ str(len(data['meteringpoints'][0]['metervalue']['timeSeries'])) +" items from between "+ startTime + " and " + endTime) + +### insert data into database + +values = [] +for item in data['meteringpoints'][0]['metervalue']['timeSeries']: + # Only deal with verified items. + if item['verified']: + values.append((data['meteringpoints'][0]['meteringPointId'], item['startTime'], item['endTime'], item['value'], item['uom'], item['production'])) + + +# Count new rows and insert +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True) diff --git a/scripts/elvia_gridtariff.py b/scripts/elvia_gridtariff.py new file mode 100644 index 0000000..bcf4bd7 --- /dev/null +++ b/scripts/elvia_gridtariff.py @@ -0,0 +1,69 @@ +#!/usr/bin/python3 +''' get grid tariffs''' + +import os +import sys +from datetime import datetime +from datetime import timedelta +import requests +from tzlocal import get_localzone + +from common import dbi + +# API documentation: https://elvia.portal.azure-api.net/docs/services/gridtariffapi/operations/post-digin-api-v-tariffquery-meteringpointsgridtariffs? + +apiKey = os.environ['el_elvia_grid_api_key'] +apiUrl = "https://elvia.azure-api.net/grid-tariff/digin/api/1/tariffquery/meteringpointsgridtariffs" +meteringPointId = os.environ['el_meteringPointId'] + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "elvia_gridtariff" + + +startTime = datetime.now(get_localzone()) - timedelta(days = 2) +startTime = startTime.strftime('%Y-%m-%d') + +endTime = datetime.now(get_localzone()) + timedelta(days = 2) +endTime = endTime.strftime('%Y-%m-%d') + + +### Get the data +try: + url = apiUrl + # Request headers + hdr = { + 'Cache-Control': 'no-cache', + 'X-API-Key': apiKey, + 'Content-Type': 'application/json' + } + + # Request body + body = { + 'starttime': startTime, + 'endtime': endTime, + 'meteringPointIds': [ meteringPointId ] + } + + response = requests.post(url, headers=hdr, json=body) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + +except Exception as e: + print("oh lol") + sys.exit(e) + +data = response.json() + +print("Got "+ str(len(data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours'])) +" items from between "+ startTime + " and " + endTime) + + +### insert data into database + +values = [] +for item in data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours']: + values.append((meteringPointId, item['startTime'], item['expiredAt'], item['shortName'], item['isPublicHoliday'], item['energyPrice']['total'], item['energyPrice']['totalExVat'])) + +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING", values, verbose=True) diff --git a/scripts/entsoe.py b/scripts/entsoe.py new file mode 100644 index 0000000..2597a98 --- /dev/null +++ b/scripts/entsoe.py @@ -0,0 +1,87 @@ +#!/bin/python3 + +import os +import sys +from datetime import datetime +from datetime import timedelta +import requests +import xmltodict +from tzlocal import get_localzone +from dateutil import tz + +from common import dbi + + +# variables + +# Getting an api-key isn't very well documented. The documentation [1] points +# to a pdf [2], which says the following: +# > In order to request the access to the Restful API, please register on the +# > Transparency Platform and send an email to transparency@entsoe.eu with +# > “Restful API access” in the subject line. Indicate the email address you +# > entered during registration in the email body. We will make our best to +# > respond to your request. +# 1: https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_authentication_and_authorisation +# 2: https://transparency.entsoe.eu/content/static_content/download?path=/Static%20content/API-Token-Management.pdf +apiKey = os.environ['el_entsoe_token'] + +# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html +apiUrl = "https://transparency.entsoe.eu/api?securityToken=" + apiKey + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "entsoe" + +startTime = datetime.now(get_localzone()) - timedelta(days = 7) +startTime = startTime.strftime('%Y%m%d') + +endTime = datetime.now(get_localzone()) + timedelta(days = 1) +endTime = endTime.strftime('%Y%m%d') + +# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_areas +areas = [ {"name": "NO-0", "code": "10YNO-0--------C"}, + {"name": "NO-1", "code": "10YNO-1--------2"}, + {"name": "NO-2", "code": "10YNO-2--------T"}, + {"name": "NO-3", "code": "10YNO-3--------J"}, + {"name": "NO-4", "code": "10YNO-4--------9"} ] + +UTC = tz.gettz('UTC') +CET = tz.gettz('Europe/Oslo') + + +# Get the data +values=[] +for area in areas: + try: + url = apiUrl + "&documentType=A44&in_Domain=" + area["code"] + "&out_Domain=" + area["code"] + "&periodStart=" + startTime + "0000&periodEnd=" + endTime + "0000" + + print("Getting data for " + area["code"]) + response = requests.get(url) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + + except Exception as e: + print("oh lol") + sys.exit(e) + + data_dict = xmltodict.parse(response.content) + + items = 0 + if "Publication_MarketDocument" in data_dict: + for lista in data_dict["Publication_MarketDocument"]["TimeSeries"]: + utctime = datetime.strptime(lista["Period"]["timeInterval"]["start"], "%Y-%m-%dT%H:%MZ") + utctime = utctime.replace(tzinfo = UTC) + cettime = utctime.astimezone(CET) + items += len(lista["Period"]["Point"]) + + for item in lista["Period"]["Point"]: + # the response contains timerange, but not timestamp for every price, so we must calculate it + time = str(cettime + timedelta(hours = int(item["position"]) - 1)) + + # append values + values.append((time, area["name"], item["price.amount"])) + print("Got " + str(items) + " records") + +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s) ON CONFLICT (starttime, zone) DO NOTHING", values, verbose=True) diff --git a/scripts/mqtt_listener.py b/scripts/mqtt_listener.py new file mode 100644 index 0000000..24ee207 --- /dev/null +++ b/scripts/mqtt_listener.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 + +import os +import json +from datetime import datetime +import paho.mqtt.client as mqtt + +from common import dbi + +mqtt_server = os.environ['el_mqtt_server'] +mqtt_port = int(os.environ['el_mqtt_port']) +keepalive = int(os.environ['el_mqtt_keepalive']) +mqtt_topic = os.environ['el_mqtt_topic'] + +# The callback for when the client receives a CONNACK response from the server. +def on_connect(client, userdata, flags, rc): + print("Connected with result code "+str(rc)) + + # Subscribing in on_connect() means that if we lose the connection and + # reconnect then subscriptions will be renewed. + client.subscribe(mqtt_topic) + +# The callback for when a PUBLISH message is received from the server. +def on_message(client, userdata, msg): + name = msg.topic.split('/')[1] + data = json.loads(msg.payload) + + if name.startswith('tmp') and 'temperature' in data and 'humidity' in data: + sql = "INSERT INTO mqtt_temps (name, temperature, humidity, battery, linkquality, voltage, time) VALUES(%s,%s,%s,%s,%s,%s,%s)" + values = (name, data['temperature'], data['humidity'], data['battery'], data['linkquality'], data['voltage'], datetime.utcnow()) + + elif name == 'HAN' and 'current' in data: + sql = "INSERT INTO mqtt_han (name, current, power, voltage, linkquality, time) VALUES(%s,%s,%s,%s,%s,%s)" + values = (name, data['current'], data['power'], data['voltage'], data['linkquality'], datetime.utcnow()) + + else: + return + + dbi(sql, values, verbose=True) + + + +# mqtt +client = mqtt.Client() +client.on_connect = on_connect +client.on_message = on_message + +client.connect(mqtt_server, mqtt_port, keepalive) + +# Blocking call that processes network traffic, dispatches callbacks and +# handles reconnecting. +# Other loop*() functions are available that give a threaded interface and a +# manual interface. +client.loop_forever() diff --git a/scripts/nb.py b/scripts/nb.py new file mode 100644 index 0000000..1901182 --- /dev/null +++ b/scripts/nb.py @@ -0,0 +1,59 @@ +#!/bin/python3 + +import os +import sys +import csv +import json +import requests +import tempfile + +from datetime import datetime +from datetime import timedelta +from tzlocal import get_localzone + +from common import dbi + +# I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON. +apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en" + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "nbex" + +startTime = datetime.now(get_localzone()) - timedelta(days = 10) +startTime = startTime.strftime('%Y-%m-%d') + +endTime = datetime.now(get_localzone()).strftime('%Y-%m-%d') + +temp = tempfile.NamedTemporaryFile() + +### Get the data +try: + url = apiUrl + "&startPeriod=" + startTime + "&endPeriod=" + endTime + + response = requests.get(url) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + + with open(temp.name,'w') as fd: + fd.write(response.text) + +except Exception as e: + print("oh lol") + sys.exit(e) + + +### insert data into database + +values = [] +with open(temp.name) as csvfile: + csvReader = csv.DictReader(csvfile, delimiter=';') + + for item in csvReader: + values.append((item["TIME_PERIOD"],item["BASE_CUR"],item["QUOTE_CUR"],item["OBS_VALUE"])) +temp.close() + + +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values,verbose=True) diff --git a/scripts/neohub.py b/scripts/neohub.py new file mode 100644 index 0000000..5c62606 --- /dev/null +++ b/scripts/neohub.py @@ -0,0 +1,27 @@ +#!/usr/bin/python3 + +import os +import asyncio +from datetime import datetime +import neohubapi.neohub as neohub + +from common import dbi + +neohub_ip = os.environ['el_neohub_ip'] +neohub_port = os.environ['el_neohub_port'] + +async def run(): + # Legacy connection + hub = neohub.NeoHub(neohub_ip, int(neohub_port)) + # Or, for a websocket connection: + # hub = neohub.Neohub(port=4243, token='xxx-xxxxxxx') + system = await hub.get_system() + hub_data, devices = await hub.get_live_data() + for device in devices['thermostats']: + print(f"Temperature in zone {device.name}: {device}") + sql = "INSERT INTO neohub (timestamp, time, device_id, away, heat_mode, heat_on, current_floor_temperature, target_temperature, temperature) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)" + values = (datetime.utcnow(), device.time, device.device_id, device.away, device.heat_mode, device.heat_on, device.current_floor_temperature, device.target_temperature, device.temperature) + dbi(sql, values) + +asyncio.run(run()) + diff --git a/scripts/queue_runner.py b/scripts/queue_runner.py new file mode 100644 index 0000000..436dcb8 --- /dev/null +++ b/scripts/queue_runner.py @@ -0,0 +1,74 @@ +#!/usr/bin/env python3 +''' move items from queue to database ''' + +import os +import time +import pickle +import sqlite3 +from litequeue import SQLQueue + +from common.postgres import dbi + +QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db') + +# Unlock all +con = sqlite3.connect(QUEUE_DB) +cur = con.cursor() +cur.execute("UPDATE Queue SET status = 0 WHERE status = 1") +con.commit() +con.close() + +# Open Queue +q = SQLQueue(QUEUE_DB, maxsize=None) +q.prune() + +msgids = [] +values = [] + +i = 0 + +# Event loop +while True: + # Just sleep if queue is empty + if q.empty() == True: + time.sleep(10) + continue + + # get message + task = q.pop() + raw = pickle.loads(task['message']) + msgids.append(task['message_id']) + sql = raw[0] + + # if the queue-item already is a batch-job, don't do any more batch-work + if isinstance(raw[1], list): + values = raw[1] + i = 10 + else: + values.append(raw[1]) + + + # Check if we can batch up with the next message in queue + i += 1 + if i < 10 and q.qsize() - len(msgids) >= 1: + nextraw = pickle.loads(q.peek()['message']) + nextsql = nextraw[0] + nextvalues = nextraw[1] + if sql == nextsql and isinstance(nextvalues, tuple): + continue + + dbi(sql,values) + + for id in msgids: + q.done(id) + + table = sql.split(' ')[2] + num = 1 if isinstance(values, tuple) else len(values) + left = str(q.qsize()) + " items left in queue" if q.qsize() > 0 else '' + print("Processed", num, "item(s) for table", table + ".", left) + + msgids = [] + values = [] + i=0 + +print("END") diff --git a/scripts/tibber_consumption.py b/scripts/tibber_consumption.py new file mode 100644 index 0000000..a411562 --- /dev/null +++ b/scripts/tibber_consumption.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 +''' import energy consumption from tibber ''' + +import os +import sys +from datetime import datetime +from datetime import timedelta +from tzlocal import get_localzone +import requests + +from common import dbi + + +# variables +apiKey = os.environ['el_tibber_token'] +apiUrl = "https://api.tibber.com/v1-beta/gql" + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "tibber_consumption" + +startTime = datetime.now(get_localzone()) - timedelta(days = 1) +startTime = startTime.isoformat('T') + +endTime = datetime.now(get_localzone()).isoformat('T') + +# Get the data +try: + url = apiUrl + + # Request headers + hdr = { + 'Authorization': "Bearer " + apiKey, + 'Content-Type': 'application/json', + } + + body = {"query":"""{ + viewer { + homes { + consumption(resolution: HOURLY, last:100) { + nodes { + from + to + cost + unitPrice + unitPriceVAT + consumption + consumptionUnit + } + } + } + } }"""} + + response = requests.post(url, headers=hdr, json=body) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + +except Exception as e: + print("oh lol") + sys.exit(e) + +data = response.json() + +numdata = len(data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]) +print("Got " + str(numdata) + " rows from Tibber") + +### insert data into database +# consumption +values = [] +for item in data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]: + if item["consumption"] is not None: + values.append((item["from"],item["to"],item["consumption"],item["consumptionUnit"],item["cost"],item["unitPrice"],item["unitPriceVAT"])) + +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True) diff --git a/scripts/tibber_prices.py b/scripts/tibber_prices.py new file mode 100644 index 0000000..81b3afb --- /dev/null +++ b/scripts/tibber_prices.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 +''' import energy prices from tibber ''' + +import os +import sys +from datetime import datetime +from datetime import timedelta +from tzlocal import get_localzone +import requests + +from common import dbi + + +# variables +apiKey = os.environ['el_tibber_token'] +apiUrl = "https://api.tibber.com/v1-beta/gql" + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "tibber_prices" + +startTime = datetime.now(get_localzone()) - timedelta(days = 1) +startTime = startTime.isoformat('T') + +endTime = datetime.now(get_localzone()).isoformat('T') + +# Get the data +try: + url = apiUrl + + # Request headers + hdr = { + 'Authorization': "Bearer " + apiKey, + 'Content-Type': 'application/json', + } + + body = {"query":"""{ + viewer { + homes { + currentSubscription{ + priceInfo{ + today { total energy tax startsAt level currency } + tomorrow { total energy tax startsAt level currency } + } + } + } + } }"""} + + response = requests.post(url, headers=hdr, json=body) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + +except Exception as e: + print("oh lol") + sys.exit(e) + +data = response.json() + +numdata = len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]) + len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]) + +print("Got " + str(numdata) + " rows from Tibber") + +### insert data into database + +# prices +prices = [] +for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]: + prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"])) + +for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]: + prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"])) + + +dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s) ON CONFLICT (startsat) DO NOTHING", prices, verbose=True) diff --git a/scripts/yr.py b/scripts/yr.py new file mode 100644 index 0000000..9c3ae5e --- /dev/null +++ b/scripts/yr.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +''' Get weatherdata from yr.no ''' + +import os +import sys +import requests + +from common import dbi + +lat = str(os.environ['el_yr_lat']) +lon = str(os.environ['el_yr_lon']) + +pg_db = os.environ['el_pg_db'] +pg_host = os.environ['el_pg_host'] +pg_table = "yr" + +apiUrl = "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat=" + lat + "&lon=" + lon + +### Get the data +try: + url = apiUrl + + # Request headers + hdr = { + 'User-Agent': 'gratis.morell@litepost.no', + 'Cache-Control': 'no-cache', + } + + response = requests.get(url, headers=hdr) + if response.status_code != 200: + print(response.status_code) + print("Oh shit") + response.raise_for_status() + +except Exception as e: + print("oh lol") + sys.exit(e) + +data = response.json() + + +### insert data into database + +values = [] +for item in data["properties"]["timeseries"]: + details = item["data"]["instant"]["details"] + values.append((item["time"],details["air_temperature"],details["air_pressure_at_sea_level"],details["cloud_area_fraction"],details["relative_humidity"],details["wind_from_direction"],details["wind_speed"])) + +sql = "INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET + air_temperature=EXCLUDED.air_temperature, + air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level, + cloud_area_fraction=EXCLUDED.cloud_area_fraction, + relative_humidity=EXCLUDED.relative_humidity, + wind_from_direction=EXCLUDED.wind_from_direction, + wind_speed=EXCLUDED.wind_speed, + updated=now()""" + +dbi(sql, values, verbose=True) diff --git a/tibber_consumption2pgsql.py b/tibber_consumption2pgsql.py deleted file mode 100755 index e710b8f..0000000 --- a/tibber_consumption2pgsql.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/python3 - -import os -import sys -import json -import psycopg2 -import requests - -from datetime import datetime -from datetime import timedelta -from tzlocal import get_localzone - - -# variables -apiKey = os.environ['el_tibber_token'] -apiUrl = "https://api.tibber.com/v1-beta/gql" - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "tibber_consumption" - -startTime = datetime.now(get_localzone()) - timedelta(days = 1) -startTime = startTime.isoformat('T') - -endTime = datetime.now(get_localzone()).isoformat('T') - -# Get the data -try: - url = apiUrl - - # Request headers - hdr = { - 'Authorization': "Bearer " + apiKey, - 'Content-Type': 'application/json', - } - - body = {"query":"""{ - viewer { - homes { - consumption(resolution: HOURLY, last:100) { - nodes { - from - to - cost - unitPrice - unitPriceVAT - consumption - consumptionUnit - } - } - } - } }"""} - - response = requests.post(url, headers=hdr, json=body) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - -except Exception as e: - print("oh lol") - sys.exit(e) - -data = response.json() - -numdata = len(data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]) -print("Got " + str(numdata) + " rows from Tibber") - -### insert data into database -# consumption -values = [] -for item in data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]: - if item["consumption"] is not None: - values.append((item["from"],item["to"],item["consumption"],item["consumptionUnit"],item["cost"],item["unitPrice"],item["unitPriceVAT"])) - -# connect to db -conn = psycopg2.connect(database=pg_db, host=pg_host) -cur = conn.cursor() - -# count rows before we start -cur.execute("SELECT COUNT(*) FROM " + pg_table) -before = cur.fetchone() - -# insert data -try: - cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values) - conn.commit() -except Exception as e: - conn.rollback() - raise e - -# count rows after we finnish -cur.execute("SELECT COUNT(*) FROM " + pg_table) -after = cur.fetchone() - -# count *new* rows -newRows = after[0] - before[0] - -# close connection -conn.close() - -print("Successfully inserted " + str(newRows) + " records into the database") diff --git a/tibber_prices2pgsql.py b/tibber_prices2pgsql.py deleted file mode 100755 index 046072f..0000000 --- a/tibber_prices2pgsql.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/python3 - -import os -import sys -import json -import psycopg2 -import requests - -from datetime import datetime -from datetime import timedelta -from tzlocal import get_localzone - - -# variables -apiKey = os.environ['el_tibber_token'] -apiUrl = "https://api.tibber.com/v1-beta/gql" - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "tibber_prices" - -startTime = datetime.now(get_localzone()) - timedelta(days = 1) -startTime = startTime.isoformat('T') - -endTime = datetime.now(get_localzone()).isoformat('T') - -# Get the data -try: - url = apiUrl - - # Request headers - hdr = { - 'Authorization': "Bearer " + apiKey, - 'Content-Type': 'application/json', - } - - body = {"query":"""{ - viewer { - homes { - currentSubscription{ - priceInfo{ - today { total energy tax startsAt level currency } - tomorrow { total energy tax startsAt level currency } - } - } - } - } }"""} - - response = requests.post(url, headers=hdr, json=body) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - -except Exception as e: - print("oh lol") - sys.exit(e) - -data = response.json() - -numdata = len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]) + len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]) - -print("Got " + str(numdata) + " rows from Tibber") - -### insert data into database - -# prices -prices = [] -for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]: - prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"])) - -for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]: - prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"])) - - -# connect to db -conn = psycopg2.connect(database=pg_db, host=pg_host) -cur = conn.cursor() - -# count rows before we start -cur.execute("SELECT COUNT(*) FROM " + pg_table) -before = cur.fetchone() - -# insert data -try: - cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s) ON CONFLICT (startsat) DO NOTHING", prices) - conn.commit() -except Exception as e: - conn.rollback() - raise e - -# count rows after we finnish -cur.execute("SELECT COUNT(*) FROM " + pg_table) -after = cur.fetchone() - -# count *new* rows -newRows = after[0] - before[0] - -# close connection -conn.close() - - -print("Successfully inserted " + str(newRows) + " records into the database") diff --git a/todb.py b/todb.py deleted file mode 100644 index 7426ace..0000000 --- a/todb.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/env python3 - -import os -import psycopg - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_user = os.environ['el_pg_user'] -pg_pass = os.environ['el_pg_pass'] - -def todb(sql, values): - with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn: - if type(values) == list: - cur = conn.cursor() - cur.executemany(sql, values) - return cur.rowcount - elif type(values) == tuple: - conn.execute(sql, values) - return 1 - else: - print("OH SHIT") - exit() diff --git a/yr2pgsql.py b/yr2pgsql.py deleted file mode 100755 index 9c3ae5e..0000000 --- a/yr2pgsql.py +++ /dev/null @@ -1,58 +0,0 @@ -#!/usr/bin/env python3 -''' Get weatherdata from yr.no ''' - -import os -import sys -import requests - -from common import dbi - -lat = str(os.environ['el_yr_lat']) -lon = str(os.environ['el_yr_lon']) - -pg_db = os.environ['el_pg_db'] -pg_host = os.environ['el_pg_host'] -pg_table = "yr" - -apiUrl = "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat=" + lat + "&lon=" + lon - -### Get the data -try: - url = apiUrl - - # Request headers - hdr = { - 'User-Agent': 'gratis.morell@litepost.no', - 'Cache-Control': 'no-cache', - } - - response = requests.get(url, headers=hdr) - if response.status_code != 200: - print(response.status_code) - print("Oh shit") - response.raise_for_status() - -except Exception as e: - print("oh lol") - sys.exit(e) - -data = response.json() - - -### insert data into database - -values = [] -for item in data["properties"]["timeseries"]: - details = item["data"]["instant"]["details"] - values.append((item["time"],details["air_temperature"],details["air_pressure_at_sea_level"],details["cloud_area_fraction"],details["relative_humidity"],details["wind_from_direction"],details["wind_speed"])) - -sql = "INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET - air_temperature=EXCLUDED.air_temperature, - air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level, - cloud_area_fraction=EXCLUDED.cloud_area_fraction, - relative_humidity=EXCLUDED.relative_humidity, - wind_from_direction=EXCLUDED.wind_from_direction, - wind_speed=EXCLUDED.wind_speed, - updated=now()""" - -dbi(sql, values, verbose=True) -- cgit v1.2.3