aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDennis Eriksen <d@ennis.no>2022-09-12 12:08:33 +0200
committerDennis Eriksen <d@ennis.no>2022-09-12 12:08:33 +0200
commit550c48e3c2d824023bc954e508bae3dca3921771 (patch)
tree89cafa70f2b48500d18b59b3f45cf9c4bf298085
downloadenergyscripts-550c48e3c2d824023bc954e508bae3dca3921771.tar.gz
initial commit
-rw-r--r--.gitignore8
-rw-r--r--README.md4
-rwxr-xr-xelvia2pgsql.py84
-rw-r--r--env.sample10
-rw-r--r--schema.sql128
-rwxr-xr-xtibber_consumption2pgsql.py102
-rwxr-xr-xtibber_prices2pgsql.py103
-rwxr-xr-xyr2pgsql.py86
8 files changed, 525 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..8535f83
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..9c2881e
--- /dev/null
+++ b/README.md
@@ -0,0 +1,4 @@
+# Energyscripts
+
+This is a small collection of scripts I use to collect information about my
+energyconsumption.
diff --git a/elvia2pgsql.py b/elvia2pgsql.py
new file mode 100755
index 0000000..1b74f02
--- /dev/null
+++ b/elvia2pgsql.py
@@ -0,0 +1,84 @@
+#!/bin/python3
+
+import os
+import sys
+import json
+import psycopg2
+import requests
+
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+
+
+apiKey = os.environ['elvia_token']
+apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues"
+
+pg_db = os.environ['pg_db']
+pg_host = os.environ['pg_host']
+pg_table = "elvia"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 1)
+startTime = startTime.isoformat('T')
+
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+
+### Get the data
+try:
+ url = apiUrl + "?startTime=" + startTime + "&endTime=" + endTime
+
+ # Request headers
+ hdr = {
+ 'Cache-Control': 'no-cache',
+ 'Authorization': "Bearer " + apiKey,
+ }
+
+ response = requests.get(url, headers=hdr)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+print("Got "+ str(len(data['meteringpoints'][0]['metervalue']['timeSeries'])) +" items from between "+ startTime + " and " + endTime)
+
+### insert data into database
+
+values = []
+for item in data['meteringpoints'][0]['metervalue']['timeSeries']:
+ # Only deal with verified items.
+ if item['verified']:
+ values.append((data['meteringpoints'][0]['meteringPointId'], item['startTime'], item['endTime'], item['value'], item['uom'], item['production']))
+
+# connect to database
+conn = psycopg2.connect(database=pg_db, host=pg_host)
+cur = conn.cursor()
+
+# count rows before we start
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+before = cur.fetchone()
+
+# insert data
+try:
+ cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values)
+ conn.commit()
+except Exception as e:
+ conn.rollback()
+ raise e
+
+# count rows after we finnish
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+after = cur.fetchone()
+
+# count *new* rows
+newRows = after[0] - before[0]
+
+# close connection
+conn.close()
+
+print("Successfully inserted " + str(newRows) + " records into the database")
diff --git a/env.sample b/env.sample
new file mode 100644
index 0000000..0e1b0a5
--- /dev/null
+++ b/env.sample
@@ -0,0 +1,10 @@
+export elvia_token=XXX
+
+export pg_db=XXX
+export pg_host=XXX
+
+export tibber_token=XXX
+
+export yr_lat=0.0
+export yr_lon=0.0
+
diff --git a/schema.sql b/schema.sql
new file mode 100644
index 0000000..6b20b52
--- /dev/null
+++ b/schema.sql
@@ -0,0 +1,128 @@
+--
+-- PostgreSQL database dump
+--
+
+-- Dumped from database version 14.5 (Ubuntu 14.5-0ubuntu0.22.04.1)
+-- Dumped by pg_dump version 14.5 (Ubuntu 14.5-0ubuntu0.22.04.1)
+
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET idle_in_transaction_session_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SELECT pg_catalog.set_config('search_path', '', false);
+SET check_function_bodies = false;
+SET xmloption = content;
+SET client_min_messages = warning;
+SET row_security = off;
+
+SET default_tablespace = '';
+
+SET default_table_access_method = heap;
+
+--
+-- Name: elvia; Type: TABLE; Schema: public; Owner: dennis
+--
+
+CREATE TABLE public.elvia (
+ meteringpointid bigint NOT NULL,
+ starttime timestamp with time zone NOT NULL,
+ endtime timestamp with time zone NOT NULL,
+ value numeric,
+ unit character varying(10),
+ production boolean,
+ inserted timestamp with time zone DEFAULT now() NOT NULL
+);
+
+
+ALTER TABLE public.elvia OWNER TO dennis;
+
+--
+-- Name: tibber_consumption; Type: TABLE; Schema: public; Owner: dennis
+--
+
+CREATE TABLE public.tibber_consumption (
+ starttime timestamp with time zone NOT NULL,
+ endtime timestamp with time zone NOT NULL,
+ consumption numeric NOT NULL,
+ consumptionunit character varying(10) DEFAULT 'kWh'::character varying,
+ cost numeric NOT NULL,
+ unitprice numeric NOT NULL,
+ unitpricevat numeric NOT NULL,
+ inserted timestamp with time zone DEFAULT now() NOT NULL
+);
+
+
+ALTER TABLE public.tibber_consumption OWNER TO dennis;
+
+--
+-- Name: tibber_prices; Type: TABLE; Schema: public; Owner: dennis
+--
+
+CREATE TABLE public.tibber_prices (
+ startsat timestamp with time zone NOT NULL,
+ total numeric NOT NULL,
+ energy numeric NOT NULL,
+ tax numeric NOT NULL,
+ level character varying(16),
+ inserted timestamp with time zone DEFAULT now() NOT NULL
+);
+
+
+ALTER TABLE public.tibber_prices OWNER TO dennis;
+
+--
+-- Name: yr; Type: TABLE; Schema: public; Owner: dennis
+--
+
+CREATE TABLE public.yr (
+ "time" timestamp with time zone NOT NULL,
+ air_temperature real,
+ air_pressure_at_sea_level real,
+ cloud_area_fraction real,
+ relative_humidity real,
+ wind_from_direction real,
+ wind_speed real,
+ inserted timestamp with time zone DEFAULT now() NOT NULL,
+ updated timestamp with time zone
+);
+
+
+ALTER TABLE public.yr OWNER TO dennis;
+
+--
+-- Name: elvia el_pkey; Type: CONSTRAINT; Schema: public; Owner: dennis
+--
+
+ALTER TABLE ONLY public.elvia
+ ADD CONSTRAINT el_pkey PRIMARY KEY (starttime, endtime);
+
+
+--
+-- Name: tibber_consumption tibber_consumption_pkey; Type: CONSTRAINT; Schema: public; Owner: dennis
+--
+
+ALTER TABLE ONLY public.tibber_consumption
+ ADD CONSTRAINT tibber_consumption_pkey PRIMARY KEY (starttime, endtime);
+
+
+--
+-- Name: tibber_prices tibber_prices_pkey; Type: CONSTRAINT; Schema: public; Owner: dennis
+--
+
+ALTER TABLE ONLY public.tibber_prices
+ ADD CONSTRAINT tibber_prices_pkey PRIMARY KEY (startsat);
+
+
+--
+-- Name: yr yr_pkey; Type: CONSTRAINT; Schema: public; Owner: dennis
+--
+
+ALTER TABLE ONLY public.yr
+ ADD CONSTRAINT yr_pkey PRIMARY KEY ("time");
+
+
+--
+-- PostgreSQL database dump complete
+--
+
diff --git a/tibber_consumption2pgsql.py b/tibber_consumption2pgsql.py
new file mode 100755
index 0000000..6b61b9c
--- /dev/null
+++ b/tibber_consumption2pgsql.py
@@ -0,0 +1,102 @@
+#!/bin/python3
+
+import os
+import sys
+import json
+import psycopg2
+import requests
+
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+
+
+# variables
+apiKey = os.environ['tibber_token']
+apiUrl = "https://api.tibber.com/v1-beta/gql"
+
+pg_db = os.environ['pg_db']
+pg_host = os.environ['pg_host']
+pg_table = "tibber_consumption"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 1)
+startTime = startTime.isoformat('T')
+
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+# Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'Authorization': "Bearer " + apiKey,
+ 'Content-Type': 'application/json',
+ }
+
+ body = {"query":"""{
+ viewer {
+ homes {
+ consumption(resolution: HOURLY, last:100) {
+ nodes {
+ from
+ to
+ cost
+ unitPrice
+ unitPriceVAT
+ consumption
+ consumptionUnit
+ }
+ }
+ }
+ } }"""}
+
+ response = requests.post(url, headers=hdr, json=body)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+numdata = len(data["data"]["viewer"]["homes"][0]["consumption"]["nodes"])
+print("Got " + str(numdata) + " rows from Tibber")
+
+### insert data into database
+# consumption
+values = []
+for item in data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]:
+ if item["consumption"] is not None:
+ values.append((item["from"],item["to"],item["consumption"],item["consumptionUnit"],item["cost"],item["unitPrice"],item["unitPriceVAT"]))
+
+# connect to db
+conn = psycopg2.connect(database=pg_db, host=pg_host)
+cur = conn.cursor()
+
+# count rows before we start
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+before = cur.fetchone()
+
+# insert data
+try:
+ cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values)
+ conn.commit()
+except Exception as e:
+ conn.rollback()
+ raise e
+
+# count rows after we finnish
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+after = cur.fetchone()
+
+# count *new* rows
+newRows = after[0] - before[0]
+
+# close connection
+conn.close()
+
+print("Successfully inserted " + str(newRows) + " records into the database")
diff --git a/tibber_prices2pgsql.py b/tibber_prices2pgsql.py
new file mode 100755
index 0000000..5c0af5c
--- /dev/null
+++ b/tibber_prices2pgsql.py
@@ -0,0 +1,103 @@
+#!/bin/python3
+
+import os
+import sys
+import json
+import psycopg2
+import requests
+
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+
+
+# variables
+apiKey = os.environ['tibber_token']
+apiUrl = "https://api.tibber.com/v1-beta/gql"
+
+pg_db = os.environ['pg_db']
+pg_host = os.environ['pg_host']
+pg_table = "tibber_prices"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 1)
+startTime = startTime.isoformat('T')
+
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+# Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'Authorization': "Bearer " + apiKey,
+ 'Content-Type': 'application/json',
+ }
+
+ body = {"query":"""{
+ viewer {
+ homes {
+ currentSubscription{
+ priceInfo{
+ today { total energy tax startsAt level currency }
+ tomorrow { total energy tax startsAt level currency }
+ }
+ }
+ }
+ } }"""}
+
+ response = requests.post(url, headers=hdr, json=body)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+numdata = len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]) + len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"])
+
+print("Got " + str(numdata) + " rows from Tibber")
+
+### insert data into database
+
+# prices
+prices = []
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]:
+ prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"]))
+
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]:
+ prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"]))
+
+
+# connect to db
+conn = psycopg2.connect(database=pg_db, host=pg_host)
+cur = conn.cursor()
+
+# count rows before we start
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+before = cur.fetchone()
+
+# insert data
+try:
+ cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s) ON CONFLICT (startsat) DO NOTHING", prices)
+ conn.commit()
+except Exception as e:
+ conn.rollback()
+ raise e
+
+# count rows after we finnish
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+after = cur.fetchone()
+
+# count *new* rows
+newRows = after[0] - before[0]
+
+# close connection
+conn.close()
+
+
+print("Successfully inserted " + str(newRows) + " records into the database")
diff --git a/yr2pgsql.py b/yr2pgsql.py
new file mode 100755
index 0000000..f37f0be
--- /dev/null
+++ b/yr2pgsql.py
@@ -0,0 +1,86 @@
+#!/bin/python3
+
+import os
+import sys
+import json
+import psycopg2
+import requests
+
+lat = str(os.environ['yr_lat'])
+lon = str(os.environ['yr_lon'])
+
+pg_db = os.environ['pg_db']
+pg_host = os.environ['pg_host']
+pg_table = "yr"
+
+apiUrl = "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat=" + lat + "&lon=" + lon
+
+### Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'User-Agent': 'gratis.morell@litepost.no',
+ 'Cache-Control': 'no-cache',
+ }
+
+ response = requests.get(url, headers=hdr)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+
+
+
+
+
+### insert data into database
+
+values = []
+for item in data["properties"]["timeseries"]:
+ details = item["data"]["instant"]["details"]
+ values.append((item["time"],details["air_temperature"],details["air_pressure_at_sea_level"],details["cloud_area_fraction"],details["relative_humidity"],details["wind_from_direction"],details["wind_speed"]))
+
+# connect to database
+conn = psycopg2.connect(database=pg_db, host=pg_host)
+cur = conn.cursor()
+
+# count rows before we start
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+before = cur.fetchone()
+
+# insert data
+try:
+ cur.executemany("INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET
+ air_temperature=EXCLUDED.air_temperature,
+ air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level,
+ cloud_area_fraction=EXCLUDED.cloud_area_fraction,
+ relative_humidity=EXCLUDED.relative_humidity,
+ wind_from_direction=EXCLUDED.wind_from_direction,
+ wind_speed=EXCLUDED.wind_speed,
+ updated=now()
+ """, values)
+ conn.commit()
+except Exception as e:
+ conn.rollback()
+ raise e
+
+# count rows after we finnish
+cur.execute("SELECT COUNT(*) FROM " + pg_table)
+after = cur.fetchone()
+
+# count *new* rows
+newRows = after[0] - before[0]
+
+# close connection
+conn.close()
+
+print("Successfully inserted " + str(newRows) + " records into the database. Might have updated a bunch more.")