Skip to content
Snippets Groups Projects
Commit 85b1f0c7 authored by Robert Latta's avatar Robert Latta
Browse files

Merge branch 'feature/DBOARD3-462' into develop

parents 9dbfb567 9a47f5e1
No related branches found
No related tags found
No related merge requests found
...@@ -883,6 +883,103 @@ def get_fiberlink_trap_metadata(ne_name_str: str, object_name_str: str) \ ...@@ -883,6 +883,103 @@ def get_fiberlink_trap_metadata(ne_name_str: str, object_name_str: str) \
return Response(result, mimetype="application/json") return Response(result, mimetype="application/json")
@routes.route("/tnms-fibre-info/<path:enms_pc_name>", methods=['GET', 'POST'])
@common.require_accepts_json
def get_tnms_fibre_trap_metadata(enms_pc_name: str) -> Response:
"""
Handler for /classifier/infinera-fiberlink-info that
returns metadata for a particular opitical path segment.
TODO: no schema is declared, and there are no validation tests
:param enms_pc_name: both node names separated by a forward slash
:return:
"""
r = common.get_current_redis()
# double check that we only need to check the two nodes and not the objects
cache_key = f'classifier-cache:fiberlink:{enms_pc_name}'
result = _ignore_cache_or_retrieve(request, cache_key, r)
if not result:
logger.debug("1")
try:
equipment_a, equipment_b = enms_pc_name.split("/")
logger.debug(f"{equipment_a} - {equipment_b}")
except ValueError:
raise ClassifierProcessingError(
f'unable to parse {enms_pc_name} '
'into two elements')
circuits = r.get(f'ims:node_pair_services:{enms_pc_name}')
all_frs = {}
all_tls = {}
contacts = set()
if circuits:
circuits = json.loads(circuits.decode('utf-8'))
for c in circuits:
contacts.update(c['contacts'])
for fr in c['fibre-routes']:
all_frs[fr['id']] = fr
for fr in c['related-services']:
all_tls[fr['id']] = fr
fibre_routes = list(all_frs.values())
top_level_services = list(all_tls.values())
if fibre_routes:
location_a = _location_from_equipment(equipment_a, r)
location_b = _location_from_equipment(equipment_b, r)
if location_a:
loc_a = location_a
else:
loc_a = _LOCATION(equipment_a, '', '')
if location_b:
loc_b = location_b
else:
loc_b = _LOCATION(equipment_b, '', '')
# added locations in preparation for refactoring to be in-line
# with other location data. Once Dashboard has been altered to
# use this for fiberlink alarms the 'ends' attribute can be
# removed
result = {
'locations': [
build_locations(loc_a, loc_b)
],
'ends': {
'a': {
'pop': loc_a['name'],
'pop_abbreviation': loc_a['abbreviation'],
'equipment': loc_a['equipment']
},
'b': {
'pop': loc_b['name'],
'pop_abbreviation': loc_b['abbreviation'],
'equipment': loc_b['equipment']
},
},
'df_route': fibre_routes[0],
'related-services': top_level_services,
'contacts': sorted(list(contacts))
}
for rs in result['related-services']:
rs.pop('id', None)
result = json.dumps(result)
r.set(cache_key, result)
if not result:
return Response(
response=f"no available info for {enms_pc_name}",
status=404,
mimetype="text/html")
return Response(result, mimetype="application/json")
@routes.route('/coriant-info/<equipment_name>/<path:entity_string>', @routes.route('/coriant-info/<equipment_name>/<path:entity_string>',
methods=['GET', 'POST']) methods=['GET', 'POST'])
@common.require_accepts_json @common.require_accepts_json
......
...@@ -754,6 +754,9 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False): ...@@ -754,6 +754,9 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False):
s.pop('port_a_id', None) s.pop('port_a_id', None)
s.pop('port_b_id', None) s.pop('port_b_id', None)
# using a dict to ensure no duplicates
node_pair_services = defaultdict(dict)
for key, value in port_id_details.items(): for key, value in port_id_details.items():
for details in value: for details in value:
k = f"{details['equipment_name']}:" \ k = f"{details['equipment_name']}:" \
...@@ -784,6 +787,10 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False): ...@@ -784,6 +787,10 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False):
type_services = services_by_type.setdefault( type_services = services_by_type.setdefault(
ims_sorted_service_type_key(circ['service_type']), dict()) ims_sorted_service_type_key(circ['service_type']), dict())
type_services[circ['id']] = circ type_services[circ['id']] = circ
if circ['other_end_equipment']:
node_pair_services[
f"{circ['equipment']}/{circ['other_end_equipment']}"
][circ['id']] = circ
interface_services[k].extend(circuits) interface_services[k].extend(circuits)
...@@ -800,6 +807,10 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False): ...@@ -800,6 +807,10 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False):
rp.delete(k) rp.delete(k)
rp.execute() rp.execute()
rp = r.pipeline() rp = r.pipeline()
for k in r.scan_iter('ims:node_pair_services:*', count=1000):
rp.delete(k)
rp.execute()
rp = r.pipeline()
for k in r.scan_iter('ims:access_services:*', count=1000): for k in r.scan_iter('ims:access_services:*', count=1000):
rp.delete(k) rp.delete(k)
for k in r.scan_iter('ims:gws_indirect:*', count=1000): for k in r.scan_iter('ims:gws_indirect:*', count=1000):
...@@ -816,6 +827,12 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False): ...@@ -816,6 +827,12 @@ def update_circuit_hierarchy_and_port_id_services(self, use_current=False):
json.dumps(v)) json.dumps(v))
rp.execute() rp.execute()
rp = r.pipeline() rp = r.pipeline()
for k, v in node_pair_services.items():
rp.set(
f'ims:node_pair_services:{k}',
json.dumps(list(v.values())))
rp.execute()
rp = r.pipeline()
populate_poller_cache(interface_services, r) populate_poller_cache(interface_services, r)
...@@ -1675,6 +1692,8 @@ def transform_ims_data(data): ...@@ -1675,6 +1692,8 @@ def transform_ims_data(data):
services_by_type = {} services_by_type = {}
interface_services = defaultdict(list) interface_services = defaultdict(list)
# using a dict to ensure no duplicates
node_pair_services = defaultdict(dict)
for key, value in port_id_details.items(): for key, value in port_id_details.items():
for details in value: for details in value:
...@@ -1706,13 +1725,18 @@ def transform_ims_data(data): ...@@ -1706,13 +1725,18 @@ def transform_ims_data(data):
type_services = services_by_type.setdefault( type_services = services_by_type.setdefault(
ims_sorted_service_type_key(circ['service_type']), dict()) ims_sorted_service_type_key(circ['service_type']), dict())
type_services[circ['id']] = circ type_services[circ['id']] = circ
if circ['other_end_equipment']:
node_pair_services[
f"{circ['equipment']}/{circ['other_end_equipment']}"
][circ['id']] = circ
interface_services[k].extend(circuits) interface_services[k].extend(circuits)
return { return {
'hierarchy': hierarchy, 'hierarchy': hierarchy,
'interface_services': interface_services, 'interface_services': interface_services,
'services_by_type': services_by_type 'services_by_type': services_by_type,
'node_pair_services': node_pair_services
} }
...@@ -1723,6 +1747,7 @@ def persist_ims_data(data, use_current=False): ...@@ -1723,6 +1747,7 @@ def persist_ims_data(data, use_current=False):
lg_routers = data['lg_routers'] lg_routers = data['lg_routers']
interface_services = data['interface_services'] interface_services = data['interface_services']
services_by_type = data['services_by_type'] services_by_type = data['services_by_type']
node_pair_services = data['node_pair_services']
if use_current: if use_current:
r = get_current_redis(InventoryTask.config) r = get_current_redis(InventoryTask.config)
...@@ -1735,7 +1760,8 @@ def persist_ims_data(data, use_current=False): ...@@ -1735,7 +1760,8 @@ def persist_ims_data(data, use_current=False):
'ims:circuit_hierarchy:*', 'ims:circuit_hierarchy:*',
'ims:interface_services:*', 'ims:interface_services:*',
'ims:access_services:*', 'ims:access_services:*',
'ims:gws_indirect:*' 'ims:gws_indirect:*',
'ims:node_pair_services:*'
]: ]:
rp = r.pipeline() rp = r.pipeline()
for k in r.scan_iter(key_pattern, count=1000): for k in r.scan_iter(key_pattern, count=1000):
...@@ -1761,6 +1787,13 @@ def persist_ims_data(data, use_current=False): ...@@ -1761,6 +1787,13 @@ def persist_ims_data(data, use_current=False):
f'ims:interface_services:{k}', f'ims:interface_services:{k}',
json.dumps(v)) json.dumps(v))
rp.execute() rp.execute()
rp = r.pipeline()
for k, v in node_pair_services.items():
rp.set(
f'ims:node_pair_services:{k}',
json.dumps(list(v.values())))
rp.execute()
rp = r.pipeline() rp = r.pipeline()
populate_poller_cache(interface_services, r) populate_poller_cache(interface_services, r)
......
...@@ -242,6 +242,18 @@ def test_transform_ims_data(): ...@@ -242,6 +242,18 @@ def test_transform_ims_data():
assert len(v[0]["fibre-routes"]) == 1 assert len(v[0]["fibre-routes"]) == 1
assert v[0]["fibre-routes"][0]["id"] == "carrier_id_3" assert v[0]["fibre-routes"][0]["id"] == "carrier_id_3"
nps = res["node_pair_services"]
assert list(nps.keys()) == ["eq_a/eq_b", "eq_b/eq_a"]
v1 = nps["eq_a/eq_b"]["circ_id_1"]
v2 = nps["eq_a/eq_b"]["circ_id_1"]
assert v1 == v2
assert json.dumps(v1, sort_keys=True) == json.dumps(v2, sort_keys=True)
assert len(v) == 1
assert len(v[0]["related-services"]) == 1
assert v[0]["related-services"][0]["id"] == "sub_circuit_2"
assert len(v[0]["fibre-routes"]) == 1
assert v[0]["fibre-routes"][0]["id"] == "carrier_id_3"
def test_persist_ims_data(mocker, data_config, mocked_redis): def test_persist_ims_data(mocker, data_config, mocked_redis):
...@@ -285,6 +297,10 @@ def test_persist_ims_data(mocker, data_config, mocked_redis): ...@@ -285,6 +297,10 @@ def test_persist_ims_data(mocker, data_config, mocked_redis):
} }
] ]
}, },
"node_pair_services": {
"np1": {"id_1": "data for np1"},
"np2": {"id_2": "data for np2"},
},
"services_by_type": {}, "services_by_type": {},
} }
for k in r.keys("ims:*"): for k in r.keys("ims:*"):
...@@ -303,6 +319,9 @@ def test_persist_ims_data(mocker, data_config, mocked_redis): ...@@ -303,6 +319,9 @@ def test_persist_ims_data(mocker, data_config, mocked_redis):
assert [k.decode("utf-8") for k in r.keys("ims:interface_services:*")] == \ assert [k.decode("utf-8") for k in r.keys("ims:interface_services:*")] == \
["ims:interface_services:if1", "ims:interface_services:if2"] ["ims:interface_services:if1", "ims:interface_services:if2"]
assert [k.decode("utf-8") for k in r.keys("ims:node_pair_services:*")] == \
["ims:node_pair_services:np1", "ims:node_pair_services:np2"]
assert [k.decode("utf-8") for k in r.keys("poller_cache:*")] == \ assert [k.decode("utf-8") for k in r.keys("poller_cache:*")] == \
["poller_cache:eq1", "poller_cache:eq2"] ["poller_cache:eq1", "poller_cache:eq2"]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment