#!/bin/python3 import os import sys import csv import json import psycopg2 import requests import tempfile from datetime import datetime from datetime import timedelta from tzlocal import get_localzone # I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON. apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en" pg_db = os.environ['el_pg_db'] pg_host = os.environ['el_pg_host'] pg_table = "nbex" startTime = datetime.now(get_localzone()) - timedelta(days = 10) startTime = startTime.strftime('%Y-%m-%d') endTime = datetime.now(get_localzone()).strftime('%Y-%m-%d') temp = tempfile.NamedTemporaryFile() ### Get the data try: url = apiUrl + "&startPeriod=" + startTime + "&endPeriod=" + endTime response = requests.get(url) if response.status_code != 200: print(response.status_code) print("Oh shit") response.raise_for_status() with open(temp.name,'w') as fd: fd.write(response.text) except Exception as e: print("oh lol") sys.exit(e) ### insert data into database values = [] with open(temp.name) as csvfile: csvReader = csv.DictReader(csvfile, delimiter=';') for item in csvReader: values.append((item["TIME_PERIOD"],item["BASE_CUR"],item["QUOTE_CUR"],item["OBS_VALUE"])) temp.close() # connect to database conn = psycopg2.connect(database=pg_db, host=pg_host) cur = conn.cursor() # count rows before we start cur.execute("SELECT COUNT(*) FROM " + pg_table) before = cur.fetchone() # insert data try: cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values) conn.commit() except Exception as e: conn.rollback() raise e # count rows after we finnish cur.execute("SELECT COUNT(*) FROM " + pg_table) after = cur.fetchone() # count *new* rows newRows = after[0] - before[0] # close connection conn.close() print("Successfully inserted " + str(newRows) + " records into the database")