Skip to content
Snippets Groups Projects
Commit 756e7570 authored by Robert Latta's avatar Robert Latta
Browse files

added fiberlink functionality

parent 21b26e33
No related branches found
No related tags found
No related merge requests found
import logging
import re
from collections import defaultdict
from inventory_provider.db import db
logger = logging.getLogger(__name__)
def _convert_to_dict(crs):
return [dict((crs.description[i][0], "" if value is None else value)
......@@ -109,6 +115,65 @@ WHERE
return r
def get_fibre_spans(connection):
_sql = """
SELECT c.absid, c.name,
parent.absid parent_absid, parent.name parent_name,
parent.status parent_status, LOWER(parent.circuit_type) parent_type,
pa.name pop_a, pa.abbreviation pop_abbr_a,
ea.name equipment_a, LOWER(ea.type) eq_type_a,
pb.name pop_b, pb.abbreviation pop_abbr_b,
eb.name equipment_b, LOWER(eb.type) eq_type_b
FROM vcircuitconns c
INNER JOIN pop pa ON pa.absid = c.PTR_pop_a
INNER JOIN pop pb ON pb.absid = c.PTR_pop_b
INNER JOIN equipment ea ON ea.absid = c.PTR_equip_a
INNER JOIN equipment eb ON eb.absid = c.PTR_equip_b
INNER JOIN circuit_glue cg ON c.absid = cg.PTR_component
INNER JOIN circuit parent ON parent.absid = cg.PTR_circuit
WHERE
c.is_circuit = 1 AND c.status != 'terminated' AND parent.status != 'terminated'
AND c.circuit_type = 'fibre span'
"""
ne_details = {}
with db.cursor(connection) as crs:
crs.execute(_sql)
rows = _convert_to_dict(crs)
for row in rows:
if row['parent_type'] != 'fibre route':
logger.debug(f'Wrong Parent Type c: {row["absid"]} '
f'p: {row["parent_absid"]} {row["parent_type"]}')
continue
ne_pattern = r'.+-(OLA|DTNX)\d+-\d.*'
ne_a_match = re.match(ne_pattern, row['equipment_a'])
ne_b_match = re.match(ne_pattern, row['equipment_b'])
if ne_a_match:
ne_details[f'{row["equipment_a"]}_{row["parent_absid"]}'] = {
'ne': row['equipment_a'],
'df_route': row['parent_name'],
'df_route_id': row['parent_absid'],
'df_status': row['parent_status'],
'pop': row['pop_a'],
'pop_abbreviation': row['pop_abbr_a'],
}
if ne_b_match:
ne_details[f'{row["equipment_b"]}_{row["parent_absid"]}'] = {
'ne': row['equipment_b'],
'df_route': row['parent_name'],
'df_route_id': row['parent_absid'],
'df_status': row['parent_status'],
'pop': row['pop_b'],
'pop_abbreviation': row['pop_abbr_b']
}
by_ne = defaultdict(lambda: [])
for d in ne_details.values():
by_ne[d['ne']].append(d)
yield from by_ne.items()
def get_circuits(connection):
_sql = """
SELECT *
......
import ipaddress
import itertools
import json
import logging
import re
......@@ -412,6 +413,69 @@ def get_trap_metadata(source_equipment, interface, circuit_id):
return Response(result, mimetype="application/json")
@routes.route("/infinera-fibrelink-info/<ne_name_str>/<object_name_str>",
methods=['GET', 'POST'])
@common.require_accepts_json
def get_fibrelink_trap_metadata(ne_name_str, object_name_str):
objects = object_name_str.split('_')
shelfs = [x.split('-')[0] for x in objects]
p = r'([a-zA-Z\d]+?-(OLA|DTNX)\d+(-\d)?)'
matches = re.findall(p, ne_name_str)
assert len(matches) == 2
r = common.get_current_redis()
# double check that we only need to check the two nodes and not the objects
cache_key = f'classifier-cache:fiberlink:{ne_name_str}:{object_name_str}'
result = r.get(cache_key)
if result:
result = result.decode('utf-8')
else:
nes_a = f'{matches[0][0]}-{shelfs[0]}'
nes_b = f'{matches[1][0]}-{shelfs[1]}'
result = []
df_a = r.get(f'opsdb:ne_fibre_spans:{nes_a}')
df_b = r.get(f'opsdb:ne_fibre_spans:{nes_b}')
if df_a:
a = json.loads(df_a.decode('utf-8'))
if df_b:
b = json.loads(df_b.decode('utf-8'))
if df_a and df_b:
matches = [x for x in itertools.product(a, b) if
x[0]['df_route_id'] == x[1]['df_route_id']]
if matches:
match = matches[0]
result = {
'ends': {
'a': {
'pop': match[0]['pop'],
'pop_abbreviation': match[0]['pop_abbreviation'],
},
'b': {
'pop': match[1]['pop'],
'pop_abbreviation': match[1]['pop_abbreviation'],
},
},
'df_route': {
'id': match[0]['df_route_id'],
'name': match[0]['df_route'],
'status': match[0]['df_status'],
},
'related-services':
get_top_level_services(match[0]['df_route_id'], r)
}
result = json.dumps(result)
r.set(cache_key, result)
if not result:
return Response(
response=f'no available info for {ne_name_str}',
status=404,
mimetype="text/html")
return Response(result, mimetype="application/json")
@routes.route('/coriant-info/<equipment_name>/<path:entity_string>',
methods=['GET', 'POST'])
@common.require_accepts_json
......
......@@ -41,6 +41,12 @@ def update_geant_lambdas():
return Response('OK')
@routes.route("update-fibre-spans", methods=['GET', 'POST'])
def update_fibre_spans():
worker.update_fibre_spans.delay()
return Response('OK')
@routes.route("update-service-hierarchy")
def update_service_hierarchy():
worker.update_circuit_hierarchy.delay()
......
......@@ -311,6 +311,26 @@ def update_geant_lambdas(self):
rp.execute()
@app.task(base=InventoryTask, bind=True, name='update_fibre_spans')
@log_task_entry_and_exit
def update_fibre_spans(self):
r = get_next_redis(InventoryTask.config)
rp = r.pipeline()
# scan with bigger batches, to mitigate network latency effects
for key in r.scan_iter('opsdb:ne_fibre_spans:*', count=1000):
rp.delete(key)
rp.execute()
with db.connection(InventoryTask.config["ops-db"]) as cx:
rp = r.pipeline()
for ne, fs in opsdb.get_fibre_spans(cx):
rp.set(
f'opsdb:ne_fibre_spans:{ne}',
json.dumps(fs))
rp.execute()
@app.task(base=InventoryTask, bind=True,
name='update_neteng_managed_device_list')
@log_task_entry_and_exit
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment