aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore3
-rw-r--r--.pylintrc570
-rw-r--r--common/__init__.py13
-rw-r--r--common/postgres.py28
-rw-r--r--common/queue.py21
-rwxr-xr-xelvia2pgsql.py37
-rwxr-xr-xelvia_gridtariff2pgsql.py38
-rwxr-xr-xentsoe2pgsql.py52
-rw-r--r--env.sample3
-rw-r--r--mqtt2queue.py13
-rwxr-xr-xnb2pgsql.py31
-rw-r--r--queue2pgsql.py12
-rw-r--r--todb.py2
-rwxr-xr-xyr2pgsql.py54
14 files changed, 692 insertions, 185 deletions
diff --git a/.gitignore b/.gitignore
index af68312..cc50e13 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,9 @@ litequeue.db
litequeue.db-shm
litequeue.db-wal
+# test
+test.py
+
# Environments
.env
.venv
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..caf2a25
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,570 @@
+[MASTER]
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-allow-list=
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
+# for backward compatibility.)
+extension-pkg-whitelist=
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+fail-on=
+
+# Specify a score threshold to be exceeded before program exits with error.
+fail-under=10.0
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore=CVS
+
+# Add files or directories matching the regex patterns to the ignore-list. The
+# regex matches against paths and can be in Posix or Windows format.
+ignore-paths=
+
+# Files or directories matching the regex patterns are skipped. The regex
+# matches against base names, not paths.
+ignore-patterns=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use.
+jobs=1
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Minimum Python version to use for version dependent checks. Will default to
+# the version used to run pylint.
+py-version=3.10
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+#disable=raw-checker-failed,
+# bad-inline-option,
+# locally-disabled,
+# file-ignored,
+# suppressed-message,
+# useless-suppression,
+# deprecated-pragma,
+# use-symbolic-message-instead
+disable=line-too-long,
+ invalid-name
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'error', 'warning', 'refactor', and 'convention'
+# which contain the number of messages in each category, as well as 'statement'
+# which is the total number of statements analyzed. This score is used by the
+# global evaluation report (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit,argparse.parse_error
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+bad-names-rgxs=
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style.
+#class-attribute-rgx=
+
+# Naming style matching correct class constant names.
+class-const-naming-style=UPPER_CASE
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style.
+#class-const-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+good-names-rgxs=
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style.
+#variable-rgx=
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+
+[LOGGING]
+
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO
+
+# Regular expression of note tags to take in consideration.
+#notes-rgx=
+
+
+[SIMILARITIES]
+
+# Comments are removed from the similarity computation
+ignore-comments=yes
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings=yes
+
+# Imports are removed from the similarity computation
+ignore-imports=no
+
+# Signatures are removed from the similarity computation
+ignore-signatures=no
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the 'python-enchant' package.
+spelling-dict=
+
+# List of comma separated words that should be considered directives if they
+# appear and the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=no
+
+# This flag controls whether the implicit-str-concat should generate a warning
+# on implicit string concatenation in sequences defined over several lines.
+check-str-concat-over-line-jumps=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# class is considered mixin if its name matches the mixin-class-rgx option.
+ignore-mixin-members=yes
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# Regex pattern to define which classes are considered mixins ignore-mixin-
+# members is set to 'yes'
+mixin-class-rgx=.*[Mm]ixin
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of names allowed to shadow builtins
+allowed-redefined-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
+
+
+[CLASSES]
+
+# Warn about protected attribute access inside special methods
+check-protected-access-in-special-methods=no
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=cls
+
+
+[DESIGN]
+
+# List of regular expressions of class ancestor names to ignore when counting
+# public methods (see R0903)
+exclude-too-few-public-methods=
+
+# List of qualified class names to ignore when counting class parents (see
+# R0901)
+ignored-parents=
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=
+
+# Output a graph (.gv or any supported image format) of external dependencies
+# to the given file (report RP0402 must not be disabled).
+ext-import-graph=
+
+# Output a graph (.gv or any supported image format) of all (i.e. internal and
+# external) dependencies to the given file (report RP0402 must not be
+# disabled).
+import-graph=
+
+# Output a graph (.gv or any supported image format) of internal dependencies
+# to the given file (report RP0402 must not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "BaseException, Exception".
+overgeneral-exceptions=BaseException,
+ Exception
diff --git a/common/__init__.py b/common/__init__.py
new file mode 100644
index 0000000..89977d2
--- /dev/null
+++ b/common/__init__.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+
+
+QUEUE = bool(os.environ.get('el_QUEUE', False))
+
+# Initialize queue
+if QUEUE is True:
+ from .queue import dbi
+else:
+ from .postgres import dbi
diff --git a/common/postgres.py b/common/postgres.py
new file mode 100644
index 0000000..bff0720
--- /dev/null
+++ b/common/postgres.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+import sys
+import psycopg
+
+pg_db = os.environ['el_pg_db']
+pg_host = os.environ['el_pg_host']
+pg_user = os.environ.get('el_pg_user','')
+pg_pass = os.environ.get('el_pg_pass','')
+
+def dbi(sql, values, **kwargs):
+ ''' insert into db '''
+ verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+ # pylint: disable=E1129
+ with psycopg.connect(dbname=pg_db, host=pg_host, user=pg_user, password=pg_pass) as conn:
+ cur = conn.cursor()
+ if isinstance(values, list):
+ cur.executemany(sql, values)
+ elif isinstance(values, tuple):
+ cur.execute(sql, values)
+ else:
+ print('`values` is a', type(values), 'but it needs to be tuple or list')
+ sys.exit(1)
+ if verbose is True:
+ print("Inserted and/or changed", cur.rowcount, "rows into db")
+ return True
diff --git a/common/queue.py b/common/queue.py
new file mode 100644
index 0000000..7804c8d
--- /dev/null
+++ b/common/queue.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python3
+''' common functions and stuff '''
+
+import os
+import sys
+import pickle
+from litequeue import SQLQueue
+
+QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
+
+# Initialize queue
+q = SQLQueue(QUEUE_DB, maxsize=None)
+
+def dbi(sql,values,**kwargs):
+ print("lolqu")
+ verbose = bool(kwargs['verbose']) if 'verbose' in kwargs else False
+
+ q.put(pickle.dumps([sql, values]))
+ if verbose is True:
+ print("Inserted into queue")
+ return True
diff --git a/elvia2pgsql.py b/elvia2pgsql.py
index 4ed2430..be5eaae 100755
--- a/elvia2pgsql.py
+++ b/elvia2pgsql.py
@@ -1,26 +1,23 @@
-#!/bin/python3
+#!/usr/bin/env python3
+''' elvia2pgsql '''
import os
import sys
-import json
-import psycopg2
import requests
from datetime import datetime
from datetime import timedelta
from tzlocal import get_localzone
+from common import dbi
apiKey = os.environ['el_elvia_token']
apiUrl = "https://elvia.azure-api.net/customer/metervalues/api/v1/metervalues"
-pg_db = os.environ['el_pg_db']
-pg_host = os.environ['el_pg_host']
pg_table = "elvia"
startTime = datetime.now(get_localzone()) - timedelta(days = 2)
startTime = startTime.isoformat('T')
-
endTime = datetime.now(get_localzone()).isoformat('T')
@@ -55,30 +52,6 @@ for item in data['meteringpoints'][0]['metervalue']['timeSeries']:
if item['verified']:
values.append((data['meteringpoints'][0]['meteringPointId'], item['startTime'], item['endTime'], item['value'], item['uom'], item['production']))
-# connect to database
-conn = psycopg2.connect(database=pg_db, host=pg_host)
-cur = conn.cursor()
-
-# count rows before we start
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-before = cur.fetchone()
-
-# insert data
-try:
- cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values)
- conn.commit()
-except Exception as e:
- conn.rollback()
- raise e
-
-# count rows after we finnish
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-after = cur.fetchone()
-
-# count *new* rows
-newRows = after[0] - before[0]
-
-# close connection
-conn.close()
-print("Successfully inserted " + str(newRows) + " records into the database")
+# Count new rows and insert
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s) ON CONFLICT (startTime,endTime) DO NOTHING", values, verbose=True)
diff --git a/elvia_gridtariff2pgsql.py b/elvia_gridtariff2pgsql.py
index b6b423b..bcf4bd7 100755
--- a/elvia_gridtariff2pgsql.py
+++ b/elvia_gridtariff2pgsql.py
@@ -1,15 +1,15 @@
-#!/bin/python3
+#!/usr/bin/python3
+''' get grid tariffs'''
import os
import sys
-import json
-import psycopg2
-import requests
-
from datetime import datetime
from datetime import timedelta
+import requests
from tzlocal import get_localzone
+from common import dbi
+
# API documentation: https://elvia.portal.azure-api.net/docs/services/gridtariffapi/operations/post-digin-api-v-tariffquery-meteringpointsgridtariffs?
apiKey = os.environ['el_elvia_grid_api_key']
@@ -66,30 +66,4 @@ values = []
for item in data['gridTariffCollections'][0]['gridTariff']['tariffPrice']['hours']:
values.append((meteringPointId, item['startTime'], item['expiredAt'], item['shortName'], item['isPublicHoliday'], item['energyPrice']['total'], item['energyPrice']['totalExVat']))
-# connect to database
-conn = psycopg2.connect(database=pg_db, host=pg_host)
-cur = conn.cursor()
-
-# count rows before we start
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-before = cur.fetchone()
-
-# insert data
-try:
- cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING", values)
- conn.commit()
-except Exception as e:
- conn.rollback()
- raise e
-
-# count rows after we finnish
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-after = cur.fetchone()
-
-# count *new* rows
-newRows = after[0] - before[0]
-
-# close connection
-conn.close()
-
-print("Successfully inserted " + str(newRows) + " records into the database")
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (meteringPointId,startTime,endTime) DO NOTHING", values, verbose=True)
diff --git a/entsoe2pgsql.py b/entsoe2pgsql.py
index 3fdb6b6..2597a98 100755
--- a/entsoe2pgsql.py
+++ b/entsoe2pgsql.py
@@ -2,21 +2,20 @@
import os
import sys
-import json
-import psycopg2
-import requests
-import xmltodict
-
from datetime import datetime
from datetime import timedelta
+import requests
+import xmltodict
from tzlocal import get_localzone
from dateutil import tz
+from common import dbi
+
# variables
# Getting an api-key isn't very well documented. The documentation [1] points
-# to a pdf [2], which says the following:
+# to a pdf [2], which says the following:
# > In order to request the access to the Restful API, please register on the
# > Transparency Platform and send an email to transparency@entsoe.eu with
# > “Restful API access” in the subject line. Indicate the email address you
@@ -45,7 +44,7 @@ areas = [ {"name": "NO-0", "code": "10YNO-0--------C"},
{"name": "NO-2", "code": "10YNO-2--------T"},
{"name": "NO-3", "code": "10YNO-3--------J"},
{"name": "NO-4", "code": "10YNO-4--------9"} ]
-
+
UTC = tz.gettz('UTC')
CET = tz.gettz('Europe/Oslo')
@@ -55,20 +54,20 @@ values=[]
for area in areas:
try:
url = apiUrl + "&documentType=A44&in_Domain=" + area["code"] + "&out_Domain=" + area["code"] + "&periodStart=" + startTime + "0000&periodEnd=" + endTime + "0000"
-
+
print("Getting data for " + area["code"])
response = requests.get(url)
if response.status_code != 200:
print(response.status_code)
print("Oh shit")
response.raise_for_status()
-
+
except Exception as e:
print("oh lol")
sys.exit(e)
-
+
data_dict = xmltodict.parse(response.content)
-
+
items = 0
if "Publication_MarketDocument" in data_dict:
for lista in data_dict["Publication_MarketDocument"]["TimeSeries"]:
@@ -84,34 +83,5 @@ for area in areas:
# append values
values.append((time, area["name"], item["price.amount"]))
print("Got " + str(items) + " records")
-
-
-# connect to db
-conn = psycopg2.connect(database=pg_db, host=pg_host)
-cur = conn.cursor()
-
-# count rows before we start
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-before = cur.fetchone()
-
-# insert data
-print("Inserting into database")
-try:
- cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s) ON CONFLICT (starttime, zone) DO NOTHING", values)
- conn.commit()
-except Exception as e:
- conn.rollback()
- raise e
-
-# count rows after we finnish
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-after = cur.fetchone()
-
-# count *new* rows
-newRows = after[0] - before[0]
-
-# close connection
-conn.close()
-
-print("Successfully inserted " + str(newRows) + " records into the database")
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s) ON CONFLICT (starttime, zone) DO NOTHING", values, verbose=True)
diff --git a/env.sample b/env.sample
index e40aa42..6632904 100644
--- a/env.sample
+++ b/env.sample
@@ -16,3 +16,6 @@ export el_entsoe_token=XXX
export el_neohub_ip=XXX
export el_neohub_port=4242
+
+export el_QUEUE=False
+export el_QUEUE_db='litequeue.db'
diff --git a/mqtt2queue.py b/mqtt2queue.py
index 102b1eb..24ee207 100644
--- a/mqtt2queue.py
+++ b/mqtt2queue.py
@@ -2,18 +2,16 @@
import os
import json
-import pickle
from datetime import datetime
-from litequeue import SQLQueue
import paho.mqtt.client as mqtt
+from common import dbi
+
mqtt_server = os.environ['el_mqtt_server']
mqtt_port = int(os.environ['el_mqtt_port'])
keepalive = int(os.environ['el_mqtt_keepalive'])
mqtt_topic = os.environ['el_mqtt_topic']
-q = SQLQueue("litequeue.db", maxsize=None)
-
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
@@ -30,12 +28,15 @@ def on_message(client, userdata, msg):
if name.startswith('tmp') and 'temperature' in data and 'humidity' in data:
sql = "INSERT INTO mqtt_temps (name, temperature, humidity, battery, linkquality, voltage, time) VALUES(%s,%s,%s,%s,%s,%s,%s)"
values = (name, data['temperature'], data['humidity'], data['battery'], data['linkquality'], data['voltage'], datetime.utcnow())
- q.put(pickle.dumps([sql, values]))
elif name == 'HAN' and 'current' in data:
sql = "INSERT INTO mqtt_han (name, current, power, voltage, linkquality, time) VALUES(%s,%s,%s,%s,%s,%s)"
values = (name, data['current'], data['power'], data['voltage'], data['linkquality'], datetime.utcnow())
- q.put(pickle.dumps([sql, values]))
+
+ else:
+ return
+
+ dbi(sql, values, verbose=True)
diff --git a/nb2pgsql.py b/nb2pgsql.py
index 3993c41..1901182 100755
--- a/nb2pgsql.py
+++ b/nb2pgsql.py
@@ -4,7 +4,6 @@ import os
import sys
import csv
import json
-import psycopg2
import requests
import tempfile
@@ -12,6 +11,8 @@ from datetime import datetime
from datetime import timedelta
from tzlocal import get_localzone
+from common import dbi
+
# I'm not sure I understand Norges Banks json-model. It seems a lot easier to just get the CSV, and convert it to JSON.
apiUrl = "https://data.norges-bank.no/api/data/EXR/B.EUR.NOK.SP?format=csv&locale=en"
@@ -55,30 +56,4 @@ with open(temp.name) as csvfile:
temp.close()
-# connect to database
-conn = psycopg2.connect(database=pg_db, host=pg_host)
-cur = conn.cursor()
-
-# count rows before we start
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-before = cur.fetchone()
-
-# insert data
-try:
- cur.executemany("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values)
- conn.commit()
-except Exception as e:
- conn.rollback()
- raise e
-
-# count rows after we finnish
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-after = cur.fetchone()
-
-# count *new* rows
-newRows = after[0] - before[0]
-
-# close connection
-conn.close()
-
-print("Successfully inserted " + str(newRows) + " records into the database")
+dbi("INSERT INTO " + pg_table + " VALUES(%s,%s,%s,%s) ON CONFLICT (startdate,base_cur,quote_cur) DO NOTHING", values,verbose=True)
diff --git a/queue2pgsql.py b/queue2pgsql.py
index 0c95f6d..253f5b4 100644
--- a/queue2pgsql.py
+++ b/queue2pgsql.py
@@ -1,23 +1,25 @@
#!/usr/bin/env python3
+''' move items from queue to database '''
import os
import time
import pickle
import sqlite3
from litequeue import SQLQueue
-from datetime import datetime
-from todb import todb
+from common import dbi
+
+QUEUE_DB = os.environ.get('el_QUEUE_db', 'litequeue.db')
# Unlock all
-con = sqlite3.connect("litequeue.db")
+con = sqlite3.connect(QUEUE_DB)
cur = con.cursor()
cur.execute("UPDATE Queue SET status = 0 WHERE status = 1")
con.commit()
con.close()
# Open Queue
-q = SQLQueue("litequeue.db", maxsize=None)
+q = SQLQueue(QUEUE_DB, maxsize=None)
q.prune()
# Event loop
@@ -28,7 +30,7 @@ while True:
continue
task = q.pop()
raw = pickle.loads(task['message'])
- todb(raw[0],raw[1])
+ dbi(raw[0],raw[1])
print("Processed " + str(task['message_id']) + ". " + str(q.qsize() - 1) + " left")
q.done(task['message_id'])
diff --git a/todb.py b/todb.py
index e5988cd..7426ace 100644
--- a/todb.py
+++ b/todb.py
@@ -13,8 +13,10 @@ def todb(sql, values):
if type(values) == list:
cur = conn.cursor()
cur.executemany(sql, values)
+ return cur.rowcount
elif type(values) == tuple:
conn.execute(sql, values)
+ return 1
else:
print("OH SHIT")
exit()
diff --git a/yr2pgsql.py b/yr2pgsql.py
index 28bb06e..9c3ae5e 100755
--- a/yr2pgsql.py
+++ b/yr2pgsql.py
@@ -1,11 +1,12 @@
-#!/bin/python3
+#!/usr/bin/env python3
+''' Get weatherdata from yr.no '''
import os
import sys
-import json
-import psycopg2
import requests
+from common import dbi
+
lat = str(os.environ['el_yr_lat'])
lon = str(os.environ['el_yr_lon'])
@@ -38,10 +39,6 @@ except Exception as e:
data = response.json()
-
-
-
-
### insert data into database
values = []
@@ -49,38 +46,13 @@ for item in data["properties"]["timeseries"]:
details = item["data"]["instant"]["details"]
values.append((item["time"],details["air_temperature"],details["air_pressure_at_sea_level"],details["cloud_area_fraction"],details["relative_humidity"],details["wind_from_direction"],details["wind_speed"]))
-# connect to database
-conn = psycopg2.connect(database=pg_db, host=pg_host)
-cur = conn.cursor()
-
-# count rows before we start
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-before = cur.fetchone()
-
-# insert data
-try:
- cur.executemany("INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET
- air_temperature=EXCLUDED.air_temperature,
- air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level,
- cloud_area_fraction=EXCLUDED.cloud_area_fraction,
- relative_humidity=EXCLUDED.relative_humidity,
- wind_from_direction=EXCLUDED.wind_from_direction,
- wind_speed=EXCLUDED.wind_speed,
- updated=now()
- """, values)
- conn.commit()
-except Exception as e:
- conn.rollback()
- raise e
-
-# count rows after we finnish
-cur.execute("SELECT COUNT(*) FROM " + pg_table)
-after = cur.fetchone()
-
-# count *new* rows
-newRows = after[0] - before[0]
-
-# close connection
-conn.close()
+sql = "INSERT INTO " + pg_table + """ VALUES(%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (time) DO UPDATE SET
+ air_temperature=EXCLUDED.air_temperature,
+ air_pressure_at_sea_level=EXCLUDED.air_pressure_at_sea_level,
+ cloud_area_fraction=EXCLUDED.cloud_area_fraction,
+ relative_humidity=EXCLUDED.relative_humidity,
+ wind_from_direction=EXCLUDED.wind_from_direction,
+ wind_speed=EXCLUDED.wind_speed,
+ updated=now()"""
-print("Successfully inserted " + str(newRows) + " records into the database. Might have updated a bunch more.")
+dbi(sql, values, verbose=True)