From fc212abd6568feb79e4cd0d5d63448a59378b2b7 Mon Sep 17 00:00:00 2001
From: Erik Reid <erik.reid@geant.org>
Date: Thu, 8 Nov 2018 17:37:59 +0100
Subject: [PATCH] initial modularization

---
 inventory_provider/alarmsdb.py                |  41 +++
 inventory_provider/constants.py               |  35 +++
 inventory_provider/juniper.py                 |  60 +++-
 inventory_provider/router_interfaces.py       | 295 +++---------------
 inventory_provider/snmp.py                    | 108 +++++++
 ...test_bgp_peers.py => test_juniper_data.py} |   0
 6 files changed, 282 insertions(+), 257 deletions(-)
 create mode 100644 inventory_provider/alarmsdb.py
 create mode 100644 inventory_provider/constants.py
 create mode 100644 inventory_provider/snmp.py
 rename test/{test_bgp_peers.py => test_juniper_data.py} (100%)

diff --git a/inventory_provider/alarmsdb.py b/inventory_provider/alarmsdb.py
new file mode 100644
index 00000000..ff6c04ff
--- /dev/null
+++ b/inventory_provider/alarmsdb.py
@@ -0,0 +1,41 @@
+import contextlib
+import logging
+
+import mysql.connector
+
+from inventory_provider.constants import DATABASE_LOGGER_NAME
+
+
+@contextlib.contextmanager
+def connection(alarmsdb):
+    cx = None
+    try:
+        cx = mysql.connector.connect(
+            host=alarmsdb["hostname"],
+            user=alarmsdb["username"],
+            passwd=alarmsdb["password"],
+            db=alarmsdb["dbname"])
+        yield cx
+    finally:
+        if cx:
+            cx.close()
+
+
+@contextlib.contextmanager
+def cursor(cnx):
+    csr = None
+    try:
+        csr = cnx.cursor()
+        yield csr
+    finally:
+        if csr:
+            csr.close()
+
+def _db_test(db, router):
+    database_logger = logging.getLogger(DATABASE_LOGGER_NAME)
+    with cursor(db) as crs:
+        database_logger.debug("_db_test: %r" % router)
+        query = "SELECT absid FROM routers WHERE hostname = %s"
+        crs.execute(query, (router['hostname'],))
+        for (absid,) in crs:
+            database_logger.debug("absid: %r" % absid)
diff --git a/inventory_provider/constants.py b/inventory_provider/constants.py
new file mode 100644
index 00000000..8319a4de
--- /dev/null
+++ b/inventory_provider/constants.py
@@ -0,0 +1,35 @@
+SNMP_LOGGER_NAME = "snmp-logger"
+THREADING_LOGGER_NAME = "threading-logger"
+JUNIPER_LOGGER_NAME = "juniper-logger"
+DATABASE_LOGGER_NAME = "database-logger"
+
+CONFIG_SCHEMA = {
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "type": "object",
+    "properties": {
+        "alarms-db": {
+            "type": "object",
+            "properties": {
+                "hostname": {"type": "string"},
+                "dbname": {"type": "string"},
+                "username": {"type": "string"},
+                "password": {"type": "string"}
+            },
+            "required": ["hostname", "dbname", "username", "password"],
+            "additionalProperties": False
+        },
+        "oid_list.conf": {"type": "string"},
+        "routers_community.conf": {"type": "string"},
+        "ssh": {
+            "type": "object",
+            "properties": {
+                "private-key": {"type": "string"},
+                "known-hosts": {"type": "string"}
+            },
+            "required": ["private-key", "known-hosts"],
+            "additionalProperties": False
+        }
+    },
+    "required": ["alarms-db", "oid_list.conf", "routers_community.conf"],
+    "additionalProperties": False
+}
diff --git a/inventory_provider/juniper.py b/inventory_provider/juniper.py
index 409d1469..27ae3342 100644
--- a/inventory_provider/juniper.py
+++ b/inventory_provider/juniper.py
@@ -1,5 +1,13 @@
+import contextlib
+import json
+import logging
 import re
 
+import paramiko
+
+from inventory_provider.constants import JUNIPER_LOGGER_NAME
+
+
 def neighbors(router, routing_instances=["IAS"], group_expression=None):
 
     for config in router["bgp"]["configuration"]:
@@ -28,4 +36,54 @@ def interfaces(router):
                         ifc["oper-status"][0]["data"]),
                     "description": ifc["description"][0]["data"],
                     "type": "logical"
-                }
\ No newline at end of file
+                }
+
+
+@contextlib.contextmanager
+def ssh_connection(router, ssh_params):
+    import os
+    key_filename = os.path.join(os.path.dirname(__file__), ssh_params["private-key"])
+    known_hosts = os.path.join(os.path.dirname(__file__), ssh_params["known-hosts"])
+    k = paramiko.DSSKey.from_private_key_file(key_filename)
+
+    router["hostname"] = "mx1.ams.nl.geant.net"
+    with paramiko.SSHClient() as ssh:
+        ssh.load_host_keys(known_hosts)
+        ssh.connect(
+            hostname=router["hostname"],
+            username="Monit0r",
+            pkey=k)
+        yield ssh
+
+
+def exec_router_commands_json(router, ssh_params, commands):
+    juniper_logger = logging.getLogger(JUNIPER_LOGGER_NAME)
+    with ssh_connection(router, ssh_params) as ssh:
+
+        _, stdout, _ = ssh.exec_command("set cli screen-length 0")
+        assert stdout.channel.recv_exit_status() == 0
+
+        def _dups_to_list(pairs):
+            counter_map = {}
+            for k, v in pairs:
+                counter_map.setdefault(k, []).append(v)
+            result = {}
+            for k, v in counter_map.items():
+                if len(v) == 1:
+                    result[k] = v[0]
+                else:
+                    result[k] = v
+            return result
+
+        for c in commands:
+            juniper_logger.debug("command: '%s'" % (c + " | display json"))
+            _, stdout, _ = ssh.exec_command(c + " | display json")
+            assert stdout.channel.recv_exit_status() == 0
+            # TODO: error handling
+            output = stdout.read()
+            if output:
+                juniper_logger.debug("%r output: [%d] %r" % (router, len(output), output[:20]))
+                yield json.loads(output, object_pairs_hook=_dups_to_list)
+            else:
+                juniper_logger.debug("%r output empty" % router)
+                yield {}
diff --git a/inventory_provider/router_interfaces.py b/inventory_provider/router_interfaces.py
index dd7166c4..e7f0d1c1 100644
--- a/inventory_provider/router_interfaces.py
+++ b/inventory_provider/router_interfaces.py
@@ -1,117 +1,19 @@
-import contextlib
 import json
 import logging
 import re
-from multiprocessing import Pool, Process, Queue
+from multiprocessing import Process, Queue
 
 import click
 import jsonschema
-import mysql.connector
-import paramiko
-from pysnmp.hlapi import nextCmd, SnmpEngine, CommunityData, \
-        UdpTransportTarget, ContextData, ObjectType, ObjectIdentity
-
-
-SNMP_LOGGER_NAME = "snmp-logger"
-THREADING_LOGGER_NAME = "threading-logger"
-JUNIPER_LOGGER_NAME = "juniper-logger"
-DATABASE_LOGGER_NAME = "database-logger"
-
-CONFIG_SCHEMA = {
-    "$schema": "http://json-schema.org/draft-07/schema#",
-    "type": "object",
-    "properties": {
-        "alarms-db": {
-            "type": "object",
-            "properties": {
-                "hostname": {"type": "string"},
-                "dbname": {"type": "string"},
-                "username": {"type": "string"},
-                "password": {"type": "string"}
-            },
-            "required": ["hostname", "dbname", "username", "password"],
-            "additionalProperties": False
-        },
-        "oid_list.conf": {"type": "string"},
-        "routers_community.conf": {"type": "string"},
-        "ssh": {
-            "type": "object",
-            "properties": {
-                "private-key": {"type": "string"},
-                "known-hosts": {"type": "string"}
-            },
-            "required": ["private-key", "known-hosts"],
-            "additionalProperties": False
-        }
-    },
-    "required": ["alarms-db", "oid_list.conf", "routers_community.conf"],
-    "additionalProperties": False
-}
 
+from inventory_provider import constants
+from inventory_provider import snmp
+from inventory_provider import juniper
 
-def walk(agent_hostname, community, base_oid):
-    """
-    https://stackoverflow.com/a/45001921
-    http://snmplabs.com/pysnmp/docs/hlapi/asyncore/sync/manager/cmdgen/nextcmd.html
-    http://snmplabs.com/pysnmp/faq/pass-custom-mib-to-manager.html
-    https://github.com/etingof/pysnmp/blob/master/examples/v3arch/asyncore/manager/cmdgen/getnext-multiple-oids-and-resolve-with-mib.py
-    http://snmplabs.com/pysnmp/examples/smi/manager/browsing-mib-tree.html
-
-    :param agent_hostname:
-    :param community:
-    :param base_oid:
-    :return:
-    """
 
-    snmp_logger = logging.getLogger(SNMP_LOGGER_NAME)
-
-    from pysnmp.smi import builder, view, compiler, rfc1902
-    mibBuilder = builder.MibBuilder()
-    mibViewController = view.MibViewController(mibBuilder)
-    compiler.addMibCompiler(mibBuilder, sources=['http://mibs.snmplabs.com/asn1/@mib@'])
-    # Pre-load MIB modules we expect to work with
-    mibBuilder.loadModules('SNMPv2-MIB', 'SNMP-COMMUNITY-MIB', 'RFC1213-MIB')
-
-    snmp_logger.debug("walking %s: %s" % (agent_hostname, base_oid))
-
-    for (engineErrorIndication,
-         pduErrorIndication,
-         errorIndex,
-         varBinds) in nextCmd(
-            SnmpEngine(),
-            CommunityData(community),
-            UdpTransportTarget((agent_hostname, 161)),
-            ContextData(),
-            ObjectType(ObjectIdentity(base_oid)),
-            lexicographicMode=False,
-            lookupNames=True,
-            lookupValues=True):
-        assert not engineErrorIndication
-        assert not pduErrorIndication
-        assert errorIndex == 0
-        # varBinds = [
-        #     rfc1902.ObjectType(rfc1902.ObjectIdentity(x[0]),x[1])
-        #         .resolveWithMib(mibViewController)
-        #     for x in varBinds]
-        for oid, val in varBinds:
-            yield {"oid": "." + str(oid), "value": val.prettyPrint()}
 
 
-def _validate_config(ctx, param, value):
-    """
-    loads, validates and returns configuration parameters
-
-    :param ctx:
-    :param param:
-    :param value:
-    :return:
-    """
-    config = json.loads(value.read())
-    jsonschema.validate(config, CONFIG_SCHEMA)
-    return config
-
-
-def load_oids(config_file):
+def _load_oids(config_file):
     """
     :param config_file: file-like object
     :return:
@@ -124,7 +26,7 @@ def load_oids(config_file):
     return result
 
 
-def load_routers(config_file):
+def _load_routers(config_file):
     """
     :param config_file: file-like object
     :return:
@@ -141,163 +43,47 @@ def load_routers(config_file):
         }
 
 
-@contextlib.contextmanager
-def connection(alarmsdb):
-    cx = None
-    try:
-        cx = mysql.connector.connect(
-            host=alarmsdb["hostname"],
-            user=alarmsdb["username"],
-            passwd=alarmsdb["password"],
-            db=alarmsdb["dbname"])
-        yield cx
-    finally:
-        if cx:
-            cx.close()
-
-
-@contextlib.contextmanager
-def cursor(cnx):
-    csr = None
-    try:
-        csr = cnx.cursor()
-        yield csr
-    finally:
-        if csr:
-            csr.close()
-
-
-def _db_test(db, router):
-    database_logger = logging.getLogger(DATABASE_LOGGER_NAME)
-    with cursor(db) as crs:
-        database_logger.debug("_db_test: %r" % router)
-        query = "SELECT absid FROM routers WHERE hostname = %s"
-        crs.execute(query, (router['hostname'],))
-        for (absid,) in crs:
-            database_logger.debug("absid: %r" % absid)
-
-
-def _v6address_oid2str(dotted_decimal):
-    hex_params = []
-    for dec in re.split(r'\.', dotted_decimal):
-        hex_params.append("%02x" % int(dec))
-    return ":".join(hex_params)
-
-
-def get_router_interfaces(router):
-    with open("oid_list.conf") as f:
-        oid_map = load_oids(f)
-
-    details = {}
-    for name, oid in oid_map.items():
-        details[name] = walk(router["hostname"], router["community"], oid)
-        details[name] = list(details[name])
-
-    v4IfcNames = {}
-    for v4IfcName in details["v4InterfaceName"]:
-        m = re.match(r'.*\.(\d+)$', v4IfcName["oid"])
-        assert m, "sanity failure parsing oid: " + v4IfcName["oid"]
-        v4IfcNames[m.group(1)] = v4IfcName["value"]
-
-    interfaces = []
-    for v4Address, v4Mask, v4InterfaceOID in zip(
-            details["v4Address"],
-            details["v4Mask"],
-            details["v4InterfaceOID"]):
-        yield {
-            "v4Address": v4Address["value"],
-            "v4Mask": v4Mask["value"],
-            "v4InterfaceName": v4IfcNames[v4InterfaceOID["value"]]
-        }
+def _validate_config(ctx, param, value):
+    """
+    loads, validates and returns configuration parameters
+
+    :param ctx:
+    :param param:
+    :param value:
+    :return:
+    """
+    config = json.loads(value.read())
+    jsonschema.validate(config, constants.CONFIG_SCHEMA)
+    with open(config["oid_list.conf"]) as f:
+        config["oids"] = _load_oids(f)
+    with open(config["routers_community.conf"]) as f:
+        config["routers"] = list(_load_routers(f))
+    return config
+
+
 
-    v6IfcNames = {}
-    for v6InterfaceName in details["v6InterfaceName"]:
-        m = re.match(r'.*\.(\d+)$', v6InterfaceName["oid"])
-        assert m, "sanity failure parsing oid: " + v6InterfaceName["oid"]
-        v6IfcNames[m.group(1)] = v6InterfaceName["value"]
-
-    for v6AddressAndMask in details["v6AddressAndMask"]:
-        pattern = (
-            r'^'
-            + oid_map["v6AddressAndMask"].replace(r'.', r'\.')
-            + r'\.(\d+)\.(.+)$'
-        )
-        m = re.match(pattern, v6AddressAndMask["oid"])
-        assert m, "sanity failure parsing oid: " + v6InterfaceName["oid"]
-        yield {
-            "v6Address": _v6address_oid2str(m.group(2)),
-            "v6Mask": v6AddressAndMask["value"],
-            "v6InterfaceName": v6IfcNames[m.group(1)]
-        }
 
 
-@contextlib.contextmanager
-def ssh_connection(router, ssh_params):
-    import os
-    key_filename = os.path.join(os.path.dirname(__file__), ssh_params["private-key"])
-    known_hosts = os.path.join(os.path.dirname(__file__), ssh_params["known-hosts"])
-    k = paramiko.DSSKey.from_private_key_file(key_filename)
-
-    router["hostname"] = "mx1.ams.nl.geant.net"
-    with paramiko.SSHClient() as ssh:
-        ssh.load_host_keys(known_hosts)
-        ssh.connect(
-            hostname=router["hostname"],
-            username="Monit0r",
-            pkey=k)
-        yield ssh
-
-
-def exec_router_commands_json(router, ssh_params, commands):
-    juniper_logger = logging.getLogger(JUNIPER_LOGGER_NAME)
-    with ssh_connection(router, ssh_params) as ssh:
-
-        _, stdout, _ = ssh.exec_command("set cli screen-length 0")
-        assert stdout.channel.recv_exit_status() == 0
-
-        def _dups_to_list(pairs):
-            counter_map = {}
-            for k, v in pairs:
-                counter_map.setdefault(k, []).append(v)
-            result = {}
-            for k, v in counter_map.items():
-                if len(v) == 1:
-                    result[k] = v[0]
-                else:
-                    result[k] = v
-            return result
-
-        for c in commands:
-            juniper_logger.debug("command: '%s'" % (c + " | display json"))
-            _, stdout, _ = ssh.exec_command(c + " | display json")
-            assert stdout.channel.recv_exit_status() == 0
-            # TODO: error handling
-            output = stdout.read()
-            if output:
-                juniper_logger.debug("%r output: [%d] %r" % (router, len(output), output[:20]))
-                yield json.loads(output, object_pairs_hook=_dups_to_list)
-            else:
-                juniper_logger.debug("%r output empty" % router)
-                yield {}
-
-
-def get_router_interfaces_q(router, q):
-    threading_logger = logging.getLogger(THREADING_LOGGER_NAME)
+
+
+
+def get_router_interfaces_q(router, config, q):
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
     threading_logger.debug("[ENTER>>] get_router_interfaces_q: %r" % router)
-    q.put(list(get_router_interfaces(router)))
+    q.put(list(snmp.get_router_interfaces(router, config)))
     threading_logger.debug("[<<EXIT]  get_router_interfaces_q: %r" % router)
 
 
 def exec_router_commands_json_q(router, ssh_params, commands, q):
-    threading_logger = logging.getLogger(THREADING_LOGGER_NAME)
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
     threading_logger.debug("[ENTER>>] exec_router_commands_q: %r" % router)
-    q.put(list(exec_router_commands_json(router, ssh_params, commands)))
+    q.put(list(juniper.exec_router_commands_json(router, ssh_params, commands)))
     threading_logger.debug("[<<EXIT] exec_router_commands_q: %r" % router)
 
 
 def get_router_details(router, params, q):
 
-    threading_logger = logging.getLogger(THREADING_LOGGER_NAME)
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
 
     threading_logger.debug("[ENTER>>]get_router_details: %r" % router)
 
@@ -338,13 +124,10 @@ def get_router_details(router, params, q):
 
 def load_network_details(config):
 
-    threading_logger = logging.getLogger(THREADING_LOGGER_NAME)
-
-    with open("routers_community.conf") as f:
-        routers = list(load_routers(f))
+    threading_logger = logging.getLogger(constants.THREADING_LOGGER_NAME)
 
     processes = []
-    for r in routers:
+    for r in config["routers"]:
         q = Queue()
         p = Process(target=get_router_details, args=(r, config, q))
         p.start()
@@ -379,9 +162,9 @@ def cli(config):
 
 if __name__ == "__main__":
     logging.basicConfig(level=logging.WARNING)
-    logging.getLogger(SNMP_LOGGER_NAME).setLevel(logging.DEBUG)
-    logging.getLogger(THREADING_LOGGER_NAME).setLevel(logging.INFO)
-    logging.getLogger(JUNIPER_LOGGER_NAME).setLevel(logging.DEBUG)
-    logging.getLogger(DATABASE_LOGGER_NAME).setLevel(logging.DEBUG)
+    logging.getLogger(constants.SNMP_LOGGER_NAME).setLevel(logging.DEBUG)
+    logging.getLogger(constants.THREADING_LOGGER_NAME).setLevel(logging.INFO)
+    logging.getLogger(constants.JUNIPER_LOGGER_NAME).setLevel(logging.DEBUG)
+    logging.getLogger(constants.DATABASE_LOGGER_NAME).setLevel(logging.DEBUG)
     cli()
 
diff --git a/inventory_provider/snmp.py b/inventory_provider/snmp.py
new file mode 100644
index 00000000..dc647597
--- /dev/null
+++ b/inventory_provider/snmp.py
@@ -0,0 +1,108 @@
+import logging
+import re
+
+from pysnmp.hlapi import nextCmd, SnmpEngine, CommunityData, \
+        UdpTransportTarget, ContextData, ObjectType, ObjectIdentity
+from pysnmp.smi import builder, view, compiler, rfc1902
+
+from inventory_provider.constants import SNMP_LOGGER_NAME
+
+
+def _v6address_oid2str(dotted_decimal):
+    hex_params = []
+    for dec in re.split(r'\.', dotted_decimal):
+        hex_params.append("%02x" % int(dec))
+    return ":".join(hex_params)
+
+
+def walk(agent_hostname, community, base_oid):
+    """
+    https://stackoverflow.com/a/45001921
+    http://snmplabs.com/pysnmp/docs/hlapi/asyncore/sync/manager/cmdgen/nextcmd.html
+    http://snmplabs.com/pysnmp/faq/pass-custom-mib-to-manager.html
+    https://github.com/etingof/pysnmp/blob/master/examples/v3arch/asyncore/manager/cmdgen/getnext-multiple-oids-and-resolve-with-mib.py
+    http://snmplabs.com/pysnmp/examples/smi/manager/browsing-mib-tree.html
+
+    :param agent_hostname:
+    :param community:
+    :param base_oid:
+    :return:
+    """
+
+    snmp_logger = logging.getLogger(SNMP_LOGGER_NAME)
+
+    mibBuilder = builder.MibBuilder()
+    mibViewController = view.MibViewController(mibBuilder)
+    compiler.addMibCompiler(mibBuilder, sources=['http://mibs.snmplabs.com/asn1/@mib@'])
+    # Pre-load MIB modules we expect to work with
+    mibBuilder.loadModules('SNMPv2-MIB', 'SNMP-COMMUNITY-MIB', 'RFC1213-MIB')
+
+    snmp_logger.debug("walking %s: %s" % (agent_hostname, base_oid))
+
+    for (engineErrorIndication,
+         pduErrorIndication,
+         errorIndex,
+         varBinds) in nextCmd(
+            SnmpEngine(),
+            CommunityData(community),
+            UdpTransportTarget((agent_hostname, 161)),
+            ContextData(),
+            ObjectType(ObjectIdentity(base_oid)),
+            lexicographicMode=False,
+            lookupNames=True,
+            lookupValues=True):
+        assert not engineErrorIndication
+        assert not pduErrorIndication
+        assert errorIndex == 0
+        # varBinds = [
+        #     rfc1902.ObjectType(rfc1902.ObjectIdentity(x[0]),x[1])
+        #         .resolveWithMib(mibViewController)
+        #     for x in varBinds]
+        for oid, val in varBinds:
+            yield {"oid": "." + str(oid), "value": val.prettyPrint()}
+
+
+def get_router_interfaces(router, config):
+    oid_map = config["oids"]
+
+    details = {}
+    for name, oid in oid_map.items():
+        details[name] = walk(router["hostname"], router["community"], oid)
+        details[name] = list(details[name])
+
+    v4IfcNames = {}
+    for v4IfcName in details["v4InterfaceName"]:
+        m = re.match(r'.*\.(\d+)$', v4IfcName["oid"])
+        assert m, "sanity failure parsing oid: " + v4IfcName["oid"]
+        v4IfcNames[m.group(1)] = v4IfcName["value"]
+
+    interfaces = []
+    for v4Address, v4Mask, v4InterfaceOID in zip(
+            details["v4Address"],
+            details["v4Mask"],
+            details["v4InterfaceOID"]):
+        yield {
+            "v4Address": v4Address["value"],
+            "v4Mask": v4Mask["value"],
+            "v4InterfaceName": v4IfcNames[v4InterfaceOID["value"]]
+        }
+
+    v6IfcNames = {}
+    for v6InterfaceName in details["v6InterfaceName"]:
+        m = re.match(r'.*\.(\d+)$', v6InterfaceName["oid"])
+        assert m, "sanity failure parsing oid: " + v6InterfaceName["oid"]
+        v6IfcNames[m.group(1)] = v6InterfaceName["value"]
+
+    for v6AddressAndMask in details["v6AddressAndMask"]:
+        pattern = (
+            r'^'
+            + oid_map["v6AddressAndMask"].replace(r'.', r'\.')
+            + r'\.(\d+)\.(.+)$'
+        )
+        m = re.match(pattern, v6AddressAndMask["oid"])
+        assert m, "sanity failure parsing oid: " + v6InterfaceName["oid"]
+        yield {
+            "v6Address": _v6address_oid2str(m.group(2)),
+            "v6Mask": v6AddressAndMask["value"],
+            "v6InterfaceName": v6IfcNames[m.group(1)]
+        }
diff --git a/test/test_bgp_peers.py b/test/test_juniper_data.py
similarity index 100%
rename from test/test_bgp_peers.py
rename to test/test_juniper_data.py
-- 
GitLab