Skip to content
Snippets Groups Projects
Commit e4936163 authored by Remco Tukker's avatar Remco Tukker
Browse files

publishing the network excel file

parent cac86203
No related branches found
No related tags found
1 merge request!84Feature/comp 284 excel publisher networks
...@@ -3,7 +3,8 @@ import logging ...@@ -3,7 +3,8 @@ import logging
import openpyxl import openpyxl
from compendium_v2.conversion import mapping from compendium_v2.conversion import mapping
from compendium_v2.db.presentation_model_enums import CarryMechanism, ConnectivityCoverage, UserCategory, FeeType from compendium_v2.db.presentation_model_enums import CarryMechanism, ConnectivityCoverage, MonitoringMethod, \
UserCategory, FeeType, YesNoPlanned
from compendium_v2.environment import setup_logging from compendium_v2.environment import setup_logging
from compendium_v2.resources import get_resource_file_path from compendium_v2.resources import get_resource_file_path
...@@ -827,3 +828,239 @@ def fetch_remote_campuses_excel_data(): ...@@ -827,3 +828,239 @@ def fetch_remote_campuses_excel_data():
yield from create_points_for_year(2020, 16) yield from create_points_for_year(2020, 16)
yield from create_points_for_year(2021, 10) yield from create_points_for_year(2021, 10)
yield from create_points_for_year(2022, 4) yield from create_points_for_year(2022, 4)
def fetch_dark_fibre_iru_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Dark Fibre"
ws = wb[sheet_name]
rows = list(ws.rows)
def parse_int(excel_value):
if excel_value is None or excel_value == "":
return None
return int(str(excel_value).replace("-", "").replace(" ", "").replace(".", ""))
def create_points_for_year(year, start_column):
for i in range(10, 53):
nren_name = rows[i][start_column].value
if not nren_name:
continue
nren_name = nren_name.upper()
s = start_column
iru = ""
if year > 2019:
s += 1
iru = rows[i][s].value
elif parse_int(rows[i][s + 1].value) is not None:
iru = "Yes" if parse_int(rows[i][s + 1].value) else "No"
if iru:
length_in_country = parse_int(rows[i][s + 1].value)
length_out_country = parse_int(rows[i][s + 3].value)
iru = iru == "Yes"
yield nren_name, year, iru, length_in_country, length_out_country
yield from create_points_for_year(2016, 42)
yield from create_points_for_year(2017, 36)
yield from create_points_for_year(2018, 30)
yield from create_points_for_year(2019, 24)
yield from create_points_for_year(2020, 17)
yield from create_points_for_year(2021, 10)
yield from create_points_for_year(2022, 3)
def fetch_dark_fibre_installed_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Dark Fibre"
ws = wb[sheet_name]
rows = list(ws.rows)
def parse_int(excel_value):
if excel_value is None or excel_value == "":
return None
return int(str(excel_value).replace("-", "").replace(" ", "").replace(".", ""))
def create_points_for_year(year, start_column):
for i in range(10, 53):
nren_name = rows[i][start_column].value
if not nren_name:
continue
nren_name = nren_name.upper()
s = start_column
if year > 2019:
s += 1
installed_length = parse_int(rows[i][s + 2].value)
if installed_length is not None:
installed = bool(installed_length)
yield nren_name, year, installed, installed_length
yield from create_points_for_year(2016, 42)
yield from create_points_for_year(2017, 36)
yield from create_points_for_year(2018, 30)
yield from create_points_for_year(2019, 24)
yield from create_points_for_year(2020, 17)
yield from create_points_for_year(2021, 10)
yield from create_points_for_year(2022, 3)
def fetch_iru_duration_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "IRU duration"
ws = wb[sheet_name]
rows = list(ws.rows)
result = {}
def create_points_for_year(year, start_column):
for i in range(3, 46):
nren_name = rows[i][start_column].value
if not nren_name:
continue
nren_name = nren_name.upper()
years = rows[i][start_column + 1].value
if not years:
continue
years = str(years).split(" ")[0].split("+")[0].split("-")[0]
if not years:
continue
try:
years = int(years)
except ValueError:
logger.warning(f'Invalid iru duration Value :{nren_name} ({year}) with value ({years})')
continue
result[(nren_name, year)] = years
create_points_for_year(2019, 10)
create_points_for_year(2020, 7)
create_points_for_year(2021, 4)
create_points_for_year(2022, 1)
return result
def fetch_passive_monitoring_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Traffic monitoring"
ws = wb[sheet_name]
rows = list(ws.rows)
def create_points_for_year(year, start_column):
for i in range(6, 48):
nren_name = rows[i][1].value
if not nren_name:
continue
nren_name = nren_name.upper()
monitoring = rows[i][start_column].value
method = rows[i][start_column + 1].value
if monitoring:
monitoring = monitoring == "Yes"
method = {
"SPAN ports": MonitoringMethod.span_ports,
"Passive optical TAPS": MonitoringMethod.taps,
"Both": MonitoringMethod.both,
None: None
}[method]
yield nren_name, year, monitoring, method
yield from create_points_for_year(2021, 4)
yield from create_points_for_year(2022, 2)
def fetch_largest_link_capacity_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Largest IP Trunk capacity"
ws = wb[sheet_name]
rows = list(ws.rows)
result = {}
def create_points_for_year(year, start_column):
for i in range(5, 47):
nren_name = rows[i][5].value
if not nren_name:
continue
nren_name = nren_name.upper()
largest_capacity = rows[i][start_column].value
if largest_capacity:
result[(nren_name, year)] = int(largest_capacity)
create_points_for_year(2016, 12)
create_points_for_year(2017, 11)
create_points_for_year(2018, 10)
create_points_for_year(2019, 9)
create_points_for_year(2020, 8)
create_points_for_year(2021, 7)
create_points_for_year(2022, 6)
return result
def fetch_typical_backbone_capacity_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Typical IP Trunk capacity"
ws = wb[sheet_name]
rows = list(ws.rows)
result = {}
def create_points_for_year(year, start_column):
for i in range(5, 47):
nren_name = rows[i][4].value
if not nren_name:
continue
nren_name = nren_name.upper()
typical_capacity = rows[i][start_column].value
if typical_capacity:
result[(nren_name, year)] = int(typical_capacity)
create_points_for_year(2016, 11)
create_points_for_year(2017, 10)
create_points_for_year(2018, 9)
create_points_for_year(2019, 8)
create_points_for_year(2020, 7)
create_points_for_year(2021, 6)
create_points_for_year(2022, 5)
return result
def fetch_non_r_e_peers_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Peering-Non R& Network"
ws = wb[sheet_name]
rows = list(ws.rows)
def create_points_for_year(year, start_column):
for i in range(5, 48):
nren_name = rows[i][2].value
if not nren_name:
continue
nren_name = nren_name.upper()
nr_peers = rows[i][start_column].value
if nr_peers:
yield nren_name, year, int(nr_peers)
yield from create_points_for_year(2016, 10)
yield from create_points_for_year(2017, 9)
yield from create_points_for_year(2018, 8)
yield from create_points_for_year(2019, 7)
yield from create_points_for_year(2020, 6)
yield from create_points_for_year(2021, 5)
yield from create_points_for_year(2022, 3)
def fetch_ops_automation_excel_data():
wb = openpyxl.load_workbook(EXCEL_FILE_NETWORKS, data_only=True, read_only=True)
sheet_name = "Automation"
ws = wb[sheet_name]
rows = list(ws.rows)
def create_points_for_year(year, start_column):
for i in range(5, 48):
nren_name = rows[i][1].value
if not nren_name:
continue
nren_name = nren_name.upper()
automation = rows[i][start_column].value
specifics = rows[i][start_column + 1].value or ""
if automation:
automation = YesNoPlanned[automation.lower()]
yield nren_name, year, automation, specifics
yield from create_points_for_year(2021, 5)
yield from create_points_for_year(2022, 3)
...@@ -33,6 +33,7 @@ def get_uppercase_nren_dict(): ...@@ -33,6 +33,7 @@ def get_uppercase_nren_dict():
nren_dict['AZSCIENCENET'] = nren_dict['ANAS'] nren_dict['AZSCIENCENET'] = nren_dict['ANAS']
nren_dict['GRNET S.A.'] = nren_dict['GRNET'] nren_dict['GRNET S.A.'] = nren_dict['GRNET']
nren_dict['FUNET'] = nren_dict['CSC'] nren_dict['FUNET'] = nren_dict['CSC']
nren_dict['PIONEER'] = nren_dict['PIONIER']
return nren_dict return nren_dict
......
...@@ -418,15 +418,134 @@ def db_remote_campuses_migration(nren_dict): ...@@ -418,15 +418,134 @@ def db_remote_campuses_migration(nren_dict):
connections.append({'country': country, 'local_r_and_e_connection': connected_to_r_e}) connections.append({'country': country, 'local_r_and_e_connection': connected_to_r_e})
nren = nren_dict[abbrev] nren = nren_dict[abbrev]
connection_carrier = presentation_models.RemoteCampuses( new_entry = presentation_models.RemoteCampuses(
nren=nren, nren=nren,
nren_id=nren.id, nren_id=nren.id,
year=year, year=year,
remote_campus_connectivity=connectivity, remote_campus_connectivity=connectivity,
connections=connections connections=connections
) )
db.session.merge(connection_carrier) db.session.merge(new_entry)
db.session.commit()
def db_dark_fibre_lease_migration(nren_dict):
data_rows = excel_parser.fetch_dark_fibre_iru_excel_data()
iru_duration = excel_parser.fetch_iru_duration_excel_data()
for (abbrev, year, iru, length_in_country, length_out_country) in data_rows:
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.DarkFibreLease(
nren=nren,
nren_id=nren.id,
year=year,
iru_or_lease=iru,
fibre_length_in_country=length_in_country,
fibre_length_outside_country=length_out_country,
iru_duration=iru_duration.get((abbrev, year))
)
db.session.merge(new_entry)
db.session.commit()
def db_dark_fibre_installed_migration(nren_dict):
data_rows = excel_parser.fetch_dark_fibre_installed_excel_data()
for (abbrev, year, installed, length) in data_rows:
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.DarkFibreInstalled(
nren=nren,
nren_id=nren.id,
year=year,
installed=installed,
fibre_length_in_country=length
)
db.session.merge(new_entry)
db.session.commit()
def db_passive_monitoring_migration(nren_dict):
data_rows = excel_parser.fetch_passive_monitoring_excel_data()
for (abbrev, year, monitoring, method) in data_rows:
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.PassiveMonitoring(
nren=nren,
nren_id=nren.id,
year=year,
monitoring=monitoring,
method=method
)
db.session.merge(new_entry)
db.session.commit()
def db_capacity_migration(nren_dict):
largest_data_rows = excel_parser.fetch_largest_link_capacity_excel_data()
typical_data_rows = excel_parser.fetch_typical_backbone_capacity_excel_data()
for abbrev, year in largest_data_rows.keys() | typical_data_rows.keys():
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.Capacity(
nren=nren,
nren_id=nren.id,
year=year,
largest_link_capacity=largest_data_rows.get((abbrev, year)),
typical_backbone_capacity=typical_data_rows.get((abbrev, year))
)
db.session.merge(new_entry)
db.session.commit()
def db_non_r_e_peers_migration(nren_dict):
data_rows = excel_parser.fetch_non_r_e_peers_excel_data()
for (abbrev, year, nr_of_non_r_and_e_peers) in data_rows:
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.NonREPeers(
nren=nren,
nren_id=nren.id,
year=year,
nr_of_non_r_and_e_peers=nr_of_non_r_and_e_peers
)
db.session.merge(new_entry)
db.session.commit()
def db_ops_automation_migration(nren_dict):
data_rows = excel_parser.fetch_ops_automation_excel_data()
for (abbrev, year, automation, specifics) in data_rows:
if abbrev not in nren_dict:
logger.warning(f'{abbrev} unknown. Skipping.')
continue
nren = nren_dict[abbrev]
new_entry = presentation_models.OpsAutomation(
nren=nren,
nren_id=nren.id,
year=year,
ops_automation=automation,
ops_automation_specifics=specifics
)
db.session.merge(new_entry)
db.session.commit() db.session.commit()
...@@ -450,6 +569,13 @@ def _cli(app): ...@@ -450,6 +569,13 @@ def _cli(app):
db_connectivity_load_migration(nren_dict) db_connectivity_load_migration(nren_dict)
db_remote_campuses_migration(nren_dict) db_remote_campuses_migration(nren_dict)
db_dark_fibre_lease_migration(nren_dict)
db_dark_fibre_installed_migration(nren_dict)
db_passive_monitoring_migration(nren_dict)
db_capacity_migration(nren_dict)
db_non_r_e_peers_migration(nren_dict)
db_ops_automation_migration(nren_dict)
@click.command() @click.command()
@click.option('--config', type=click.STRING, default='config.json') @click.option('--config', type=click.STRING, default='config.json')
......
...@@ -13,7 +13,8 @@ def test_excel_publisher(app_with_survey_db, mocker): ...@@ -13,7 +13,8 @@ def test_excel_publisher(app_with_survey_db, mocker):
mocker.patch('compendium_v2.publishers.excel_parser.EXCEL_FILE_ORGANISATION', EXCEL_FILE) mocker.patch('compendium_v2.publishers.excel_parser.EXCEL_FILE_ORGANISATION', EXCEL_FILE)
with app_with_survey_db.app_context(): with app_with_survey_db.app_context():
nren_names = ['SURF', 'KIFU', 'University of Malta', 'ASNET-AM', 'SIKT', 'LAT', 'RASH', 'ANAS', 'GRNET', 'CSC'] nren_names = ['SURF', 'KIFU', 'University of Malta', 'ASNET-AM', 'SIKT', 'LAT', 'RASH', 'ANAS', 'GRNET', 'CSC',
'PIONIER']
db.session.add_all([presentation_models.NREN(name=nren_name, country='country') for nren_name in nren_names]) db.session.add_all([presentation_models.NREN(name=nren_name, country='country') for nren_name in nren_names])
db.session.commit() db.session.commit()
......
...@@ -210,7 +210,7 @@ def test_publisher(app_with_survey_db, mocker): ...@@ -210,7 +210,7 @@ def test_publisher(app_with_survey_db, mocker):
mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.recursive_query', institutions_urls_data) mocker.patch('compendium_v2.publishers.survey_publisher_old_db_2022.recursive_query', institutions_urls_data)
nren_names = ['Nren1', 'Nren2', 'Nren3', 'Nren4', 'SURF', 'KIFU', 'University of Malta', 'ASNET-AM', nren_names = ['Nren1', 'Nren2', 'Nren3', 'Nren4', 'SURF', 'KIFU', 'University of Malta', 'ASNET-AM',
'SIKT', 'LAT', 'RASH', 'ANAS', 'GRNET', 'CSC'] 'SIKT', 'LAT', 'RASH', 'ANAS', 'GRNET', 'CSC', 'PIONIER']
with app_with_survey_db.app_context(): with app_with_survey_db.app_context():
db.session.add_all([presentation_models.NREN(name=nren_name, country='country') for nren_name in nren_names]) db.session.add_all([presentation_models.NREN(name=nren_name, country='country') for nren_name in nren_names])
db.session.commit() db.session.commit()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment