aboutsummaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rw-r--r--scripts/common/__init__.py13
-rw-r--r--scripts/common/postgres.py28
-rw-r--r--scripts/common/queue.py22
-rw-r--r--scripts/elvia.py57
-rw-r--r--scripts/elvia_gridtariff.py69
-rw-r--r--scripts/entsoe.py87
-rw-r--r--scripts/mqtt_listener.py54
-rw-r--r--scripts/nb.py59
-rw-r--r--scripts/neohub.py27
-rw-r--r--scripts/queue_runner.py74
-rw-r--r--scripts/tibber_consumption.py76
-rw-r--r--scripts/tibber_prices.py76
-rw-r--r--scripts/yr.py58
13 files changed, 700 insertions, 0 deletions
diff --git a/scripts/common/__init__.py b/scripts/common/__init__.py
new file mode 100644
index 0000000..89977d2
--- /dev/null
+++ b/scripts/common/__init__.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+
+
+QUEUE = bool(os.environ.get('el_QUEUE', False))
+
+# Initialize queue
+if QUEUE is True:
+ from .queue import dbi
+else:
+ from .postgres import dbi
diff --git a/scripts/common/postgres.py b/scripts/common/postgres.py
new file mode 100644
index 0000000..bff0720
--- /dev/null
+++ b/scripts/common/postgres.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+import sys
+import psycopg
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_user = os.environ.get('el_pg_user','')
+pg_pass = os.environ.get('el_pg_pass','')
+
+def dbi(sql, values, **kwargs):
+ ''' insert into db '''
+ verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+ # pylint: disable=E1129
+ with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn:
+ cur = conn.cursor()
+ if isinstance(values, list):
+ cur.executemany(sql, values)
+ elif isinstance(values, tuple):
+ cur.execute(sql, values)
+ else:
+ print('`values` is a', type(values), 'but it needs to be tuple or list')
+ sys.exit(1)
+ if verbose is True:
+ print("Inserted and/or changed", cur.rowcount, "rows into db")
+ return True
diff --git a/scripts/common/queue.py b/scripts/common/queue.py
new file mode 100644
index 0000000..e567fb4
--- /dev/null
+++ b/scripts/common/queue.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+import sys
+import pickle
+from litequeue import SQLQueue
+
+QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
+
+# Initialize queue
+q = SQLQueue(QUEUE_DB, maxsize=None)
+
+def dbi(sql,values,**kwargs):
+ verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+
+ q.put(pickle.dumps([sql, values]))
+ if verbose is True:
+ table = sql.split(' ')[2]
+ num = 1 if isinstance(values, tuple) else len(values)
+ print("Inserted", num, "item(s) into queue for", table)
+ return True
diff --git a/scripts/elvia.py b/scripts/elvia.py
new file mode 100644
index 0000000..be5eaae
--- /dev/null
+++ b/scripts/elvia.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+''' elvia2pgsql '''
+
+import os
+import sys
+import requests
+
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+
+from common import dbi
+
+apiKey = os.environ['el_elvia_token']
+apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues"
+
+pg_table = "elvia"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 2)
+startTime = startTime.isoformat('T')
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+
+### Get the data
+try:
+ url = apiUrl + "?startTime=" + startTime + "&endTime=" + endTime
+
+ # Request headers
+ hdr = {
+ 'Cache-Control': 'no-cache',
+ 'Authorization': "Bearer " + apiKey,
+ }
+
+ response = requests.get(url, headers=hdr)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+print("Got "+ str(len(data['meteringpoints'][0]['metervalue']['timeSeries'])) +" items from between "+ startTime + " and " + endTime)
+
+### insert data into database
+
+values = []
+for item in data['meteringpoints'][0]['metervalue']['timeSeries']:
+ # Only deal with verified items.
+ if item['verified']:
+ values.append((data['meteringpoints'][0]['meteringPointId'], item['startTime'], item['endTime'], item['value'], item['uom'], item['production']))
+
+
+# Count new rows and insert
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True)
diff --git a/scripts/elvia_gridtariff.py b/scripts/elvia_gridtariff.py
new file mode 100644
index 0000000..bcf4bd7
--- /dev/null
+++ b/scripts/elvia_gridtariff.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python3
+''' get grid tariffs'''
+
+import os
+import sys
+from datetime import datetime
+from datetime import timedelta
+import requests
+from tzlocal import get_localzone
+
+from common import dbi
+
+# API documentation: https://elvia.portal.azure-api.net/docs/services/gridtariffapi/operations/post-digin-api-v-tariffquery-meteringpointsgridtariffs?
+
+apiKey = os.environ['el_elvia_grid_api_key']
+apiUrl = "https://elvia.azure-api.net/grid-tariff/digin/api/1/tariffquery/meteringpointsgridtariffs"
+meteringPointId = os.environ['el_meteringPointId']
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "elvia_gridtariff"
+
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 2)
+startTime = startTime.strftime('%Y-%m-%d')
+
+endTime = datetime.now(get_localzone()) + timedelta(days = 2)
+endTime = endTime.strftime('%Y-%m-%d')
+
+
+### Get the data
+try:
+ url = apiUrl
+ # Request headers
+ hdr = {
+ 'Cache-Control': 'no-cache',
+ 'X-API-Key': apiKey,
+ 'Content-Type': 'application/json'
+ }
+
+ # Request body
+ body = {
+ 'starttime': startTime,
+ 'endtime': endTime,
+ 'meteringPointIds': [ meteringPointId ]
+ }
+
+ response = requests.post(url, headers=hdr, json=body)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+print("Got "+ str(len(data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours'])) +" items from between "+ startTime + " and " + endTime)
+
+
+### insert data into database
+
+values = []
+for item in data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours']:
+ values.append((meteringPointId, item['startTime'], item['expiredAt'], item['shortName'], item['isPublicHoliday'], item['energyPrice']['total'], item['energyPrice']['totalExVat']))
+
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING", values, verbose=True)
diff --git a/scripts/entsoe.py b/scripts/entsoe.py
new file mode 100644
index 0000000..2597a98
--- /dev/null
+++ b/scripts/entsoe.py
@@ -0,0 +1,87 @@
+#!/bin/python3
+
+import os
+import sys
+from datetime import datetime
+from datetime import timedelta
+import requests
+import xmltodict
+from tzlocal import get_localzone
+from dateutil import tz
+
+from common import dbi
+
+
+# variables
+
+# Getting an api-key isn't very well documented. The documentation [1] points
+# to a pdf [2], which says the following:
+# > In order to request the access to the Restful API, please register on the
+# > Transparency Platform and send an email to transparency@entsoe.eu with
+# > “Restful API access” in the subject line. Indicate the email address you
+# > entered during registration in the email body. We will make our best to
+# > respond to your request.
+# 1: https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_authentication_and_authorisation
+# 2: https://transparency.entsoe.eu/content/static_content/download?path=/Static%20content/API-Token-Management.pdf
+apiKey = os.environ['el_entsoe_token']
+
+# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html
+apiUrl = "https://transparency.entsoe.eu/api?securityToken=" + apiKey
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "entsoe"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 7)
+startTime = startTime.strftime('%Y%m%d')
+
+endTime = datetime.now(get_localzone()) + timedelta(days = 1)
+endTime = endTime.strftime('%Y%m%d')
+
+# https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_areas
+areas = [ {"name": "NO-0", "code": "10YNO-0--------C"},
+ {"name": "NO-1", "code": "10YNO-1--------2"},
+ {"name": "NO-2", "code": "10YNO-2--------T"},
+ {"name": "NO-3", "code": "10YNO-3--------J"},
+ {"name": "NO-4", "code": "10YNO-4--------9"} ]
+
+UTC = tz.gettz('UTC')
+CET = tz.gettz('Europe/Oslo')
+
+
+# Get the data
+values=[]
+for area in areas:
+ try:
+ url = apiUrl + "&documentType=A44&in_Domain=" + area["code"] + "&out_Domain=" + area["code"] + "&periodStart=" + startTime + "0000&periodEnd=" + endTime + "0000"
+
+ print("Getting data for " + area["code"])
+ response = requests.get(url)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+ except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+ data_dict = xmltodict.parse(response.content)
+
+ items = 0
+ if "Publication_MarketDocument" in data_dict:
+ for lista in data_dict["Publication_MarketDocument"]["TimeSeries"]:
+ utctime = datetime.strptime(lista["Period"]["timeInterval"]["start"], "%Y-%m-%dT%H:%MZ")
+ utctime = utctime.replace(tzinfo = UTC)
+ cettime = utctime.astimezone(CET)
+ items += len(lista["Period"]["Point"])
+
+ for item in lista["Period"]["Point"]:
+ # the response contains timerange, but not timestamp for every price, so we must calculate it
+ time = str(cettime + timedelta(hours = int(item["position"]) - 1))
+
+ # append values
+ values.append((time, area["name"], item["price.amount"]))
+ print("Got " + str(items) + " records")
+
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s) ON CONFLICT (starttime, zone) DO NOTHING", values, verbose=True)
diff --git a/scripts/mqtt_listener.py b/scripts/mqtt_listener.py
new file mode 100644
index 0000000..24ee207
--- /dev/null
+++ b/scripts/mqtt_listener.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python3
+
+import os
+import json
+from datetime import datetime
+import paho.mqtt.client as mqtt
+
+from common import dbi
+
+mqtt_server = os.environ['el_mqtt_server']
+mqtt_port = int(os.environ['el_mqtt_port'])
+keepalive = int(os.environ['el_mqtt_keepalive'])
+mqtt_topic = os.environ['el_mqtt_topic']
+
+# The callback for when the client receives a CONNACK response from the server.
+def on_connect(client, userdata, flags, rc):
+ print("Connected with result code "+str(rc))
+
+ # Subscribing in on_connect() means that if we lose the connection and
+ # reconnect then subscriptions will be renewed.
+ client.subscribe(mqtt_topic)
+
+# The callback for when a PUBLISH message is received from the server.
+def on_message(client, userdata, msg):
+ name = msg.topic.split('/')[1]
+ data = json.loads(msg.payload)
+
+ if name.startswith('tmp') and 'temperature' in data and 'humidity' in data:
+ sql = "INSERT INTO mqtt_temps (name, temperature, humidity, battery, linkquality, voltage, time) VALUES(%s,%s,%s,%s,%s,%s,%s)"
+ values = (name, data['temperature'], data['humidity'], data['battery'], data['linkquality'], data['voltage'], datetime.utcnow())
+
+ elif name == 'HAN' and 'current' in data:
+ sql = "INSERT INTO mqtt_han (name, current, power, voltage, linkquality, time) VALUES(%s,%s,%s,%s,%s,%s)"
+ values = (name, data['current'], data['power'], data['voltage'], data['linkquality'], datetime.utcnow())
+
+ else:
+ return
+
+ dbi(sql, values, verbose=True)
+
+
+
+# mqtt
+client = mqtt.Client()
+client.on_connect = on_connect
+client.on_message = on_message
+
+client.connect(mqtt_server, mqtt_port, keepalive)
+
+# Blocking call that processes network traffic, dispatches callbacks and
+# handles reconnecting.
+# Other loop*() functions are available that give a threaded interface and a
+# manual interface.
+client.loop_forever()
diff --git a/scripts/nb.py b/scripts/nb.py
new file mode 100644
index 0000000..1901182
--- /dev/null
+++ b/scripts/nb.py
@@ -0,0 +1,59 @@
+#!/bin/python3
+
+import os
+import sys
+import csv
+import json
+import requests
+import tempfile
+
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+
+from common import dbi
+
+# I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON.
+apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en"
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "nbex"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 10)
+startTime = startTime.strftime('%Y-%m-%d')
+
+endTime = datetime.now(get_localzone()).strftime('%Y-%m-%d')
+
+temp = tempfile.NamedTemporaryFile()
+
+### Get the data
+try:
+ url = apiUrl + "&startPeriod=" + startTime + "&endPeriod=" + endTime
+
+ response = requests.get(url)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+ with open(temp.name,'w') as fd:
+ fd.write(response.text)
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+
+### insert data into database
+
+values = []
+with open(temp.name) as csvfile:
+ csvReader = csv.DictReader(csvfile, delimiter=';')
+
+ for item in csvReader:
+ values.append((item["TIME_PERIOD"],item["BASE_CUR"],item["QUOTE_CUR"],item["OBS_VALUE"]))
+temp.close()
+
+
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values,verbose=True)
diff --git a/scripts/neohub.py b/scripts/neohub.py
new file mode 100644
index 0000000..5c62606
--- /dev/null
+++ b/scripts/neohub.py
@@ -0,0 +1,27 @@
+#!/usr/bin/python3
+
+import os
+import asyncio
+from datetime import datetime
+import neohubapi.neohub as neohub
+
+from common import dbi
+
+neohub_ip = os.environ['el_neohub_ip']
+neohub_port = os.environ['el_neohub_port']
+
+async def run():
+ # Legacy connection
+ hub = neohub.NeoHub(neohub_ip, int(neohub_port))
+ # Or, for a websocket connection:
+ # hub = neohub.Neohub(port=4243, token='xxx-xxxxxxx')
+ system = await hub.get_system()
+ hub_data, devices = await hub.get_live_data()
+ for device in devices['thermostats']:
+ print(f"Temperature in zone {device.name}: {device}")
+ sql = "INSERT INTO neohub (timestamp, time, device_id, away, heat_mode, heat_on, current_floor_temperature, target_temperature, temperature) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
+ values = (datetime.utcnow(), device.time, device.device_id, device.away, device.heat_mode, device.heat_on, device.current_floor_temperature, device.target_temperature, device.temperature)
+ dbi(sql, values)
+
+asyncio.run(run())
+
diff --git a/scripts/queue_runner.py b/scripts/queue_runner.py
new file mode 100644
index 0000000..436dcb8
--- /dev/null
+++ b/scripts/queue_runner.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+''' move items from queue to database '''
+
+import os
+import time
+import pickle
+import sqlite3
+from litequeue import SQLQueue
+
+from common.postgres import dbi
+
+QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
+
+# Unlock all
+con = sqlite3.connect(QUEUE_DB)
+cur = con.cursor()
+cur.execute("UPDATE Queue SET status = 0 WHERE status = 1")
+con.commit()
+con.close()
+
+# Open Queue
+q = SQLQueue(QUEUE_DB, maxsize=None)
+q.prune()
+
+msgids = []
+values = []
+
+i = 0
+
+# Event loop
+while True:
+ # Just sleep if queue is empty
+ if q.empty() == True:
+ time.sleep(10)
+ continue
+
+ # get message
+ task = q.pop()
+ raw = pickle.loads(task['message'])
+ msgids.append(task['message_id'])
+ sql = raw[0]
+
+ # if the queue-item already is a batch-job, don't do any more batch-work
+ if isinstance(raw[1], list):
+ values = raw[1]
+ i = 10
+ else:
+ values.append(raw[1])
+
+
+ # Check if we can batch up with the next message in queue
+ i += 1
+ if i < 10 and q.qsize() - len(msgids) >= 1:
+ nextraw = pickle.loads(q.peek()['message'])
+ nextsql = nextraw[0]
+ nextvalues = nextraw[1]
+ if sql == nextsql and isinstance(nextvalues, tuple):
+ continue
+
+ dbi(sql,values)
+
+ for id in msgids:
+ q.done(id)
+
+ table = sql.split(' ')[2]
+ num = 1 if isinstance(values, tuple) else len(values)
+ left = str(q.qsize()) + " items left in queue" if q.qsize() > 0 else ''
+ print("Processed", num, "item(s) for table", table + ".", left)
+
+ msgids = []
+ values = []
+ i=0
+
+print("END")
diff --git a/scripts/tibber_consumption.py b/scripts/tibber_consumption.py
new file mode 100644
index 0000000..a411562
--- /dev/null
+++ b/scripts/tibber_consumption.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+''' import energy consumption from tibber '''
+
+import os
+import sys
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+import requests
+
+from common import dbi
+
+
+# variables
+apiKey = os.environ['el_tibber_token']
+apiUrl = "https://api.tibber.com/v1-beta/gql"
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "tibber_consumption"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 1)
+startTime = startTime.isoformat('T')
+
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+# Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'Authorization': "Bearer " + apiKey,
+ 'Content-Type': 'application/json',
+ }
+
+ body = {"query":"""{
+ viewer {
+ homes {
+ consumption(resolution: HOURLY, last:100) {
+ nodes {
+ from
+ to
+ cost
+ unitPrice
+ unitPriceVAT
+ consumption
+ consumptionUnit
+ }
+ }
+ }
+ } }"""}
+
+ response = requests.post(url, headers=hdr, json=body)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+numdata = len(data["data"]["viewer"]["homes"][0]["consumption"]["nodes"])
+print("Got " + str(numdata) + " rows from Tibber")
+
+### insert data into database
+# consumption
+values = []
+for item in data["data"]["viewer"]["homes"][0]["consumption"]["nodes"]:
+ if item["consumption"] is not None:
+ values.append((item["from"],item["to"],item["consumption"],item["consumptionUnit"],item["cost"],item["unitPrice"],item["unitPriceVAT"]))
+
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True)
diff --git a/scripts/tibber_prices.py b/scripts/tibber_prices.py
new file mode 100644
index 0000000..81b3afb
--- /dev/null
+++ b/scripts/tibber_prices.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python3
+''' import energy prices from tibber '''
+
+import os
+import sys
+from datetime import datetime
+from datetime import timedelta
+from tzlocal import get_localzone
+import requests
+
+from common import dbi
+
+
+# variables
+apiKey = os.environ['el_tibber_token']
+apiUrl = "https://api.tibber.com/v1-beta/gql"
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "tibber_prices"
+
+startTime = datetime.now(get_localzone()) - timedelta(days = 1)
+startTime = startTime.isoformat('T')
+
+endTime = datetime.now(get_localzone()).isoformat('T')
+
+# Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'Authorization': "Bearer " + apiKey,
+ 'Content-Type': 'application/json',
+ }
+
+ body = {"query":"""{
+ viewer {
+ homes {
+ currentSubscription{
+ priceInfo{
+ today { total energy tax startsAt level currency }
+ tomorrow { total energy tax startsAt level currency }
+ }
+ }
+ }
+ } }"""}
+
+ response = requests.post(url, headers=hdr, json=body)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+numdata = len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]) + len(data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"])
+
+print("Got " + str(numdata) + " rows from Tibber")
+
+### insert data into database
+
+# prices
+prices = []
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["today"]:
+ prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"]))
+
+for item in data["data"]["viewer"]["homes"][0]["currentSubscription"]["priceInfo"]["tomorrow"]:
+ prices.append((item["startsAt"],item["total"],item["energy"],item["tax"],item["level"]))
+
+
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s) ON CONFLICT (startsat) DO NOTHING", prices, verbose=True)
diff --git a/scripts/yr.py b/scripts/yr.py
new file mode 100644
index 0000000..9c3ae5e
--- /dev/null
+++ b/scripts/yr.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+''' Get weatherdata from yr.no '''
+
+import os
+import sys
+import requests
+
+from common import dbi
+
+lat = str(os.environ['el_yr_lat'])
+lon = str(os.environ['el_yr_lon'])
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_table = "yr"
+
+apiUrl = "https://api.met.no/weatherapi/locationforecast/2.0/compact?lat=" + lat + "&lon=" + lon
+
+### Get the data
+try:
+ url = apiUrl
+
+ # Request headers
+ hdr = {
+ 'User-Agent': 'gratis.morell@litepost.no',
+ 'Cache-Control': 'no-cache',
+ }
+
+ response = requests.get(url, headers=hdr)
+ if response.status_code != 200:
+ print(response.status_code)
+ print("Oh shit")
+ response.raise_for_status()
+
+except Exception as e:
+ print("oh lol")
+ sys.exit(e)
+
+data = response.json()
+
+
+### insert data into database
+
+values = []
+for item in data["properties"]["timeseries"]:
+ details = item["data"]["instant"]["details"]
+ values.append((item["time"],details["air_temperature"],details["air_pressure_at_sea_level"],details["cloud_area_fraction"],details["relative_humidity"],details["wind_from_direction"],details["wind_speed"]))
+
+sql = "INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET
+ air_temperature=EXCLUDED.air_temperature,
+ air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level,
+ cloud_area_fraction=EXCLUDED.cloud_area_fraction,
+ relative_humidity=EXCLUDED.relative_humidity,
+ wind_from_direction=EXCLUDED.wind_from_direction,
+ wind_speed=EXCLUDED.wind_speed,
+ updated=now()"""
+
+dbi(sql, values, verbose=True)