Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
I
inventory-provider
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
geant-swd
dashboardv3
inventory-provider
Commits
acd46c20
Commit
acd46c20
authored
6 years ago
by
Erik Reid
Browse files
Options
Downloads
Patches
Plain Diff
removed a lot of dead/commented code
parent
02125ef0
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
inventory_provider/routes/jobs.py
+1
-64
1 addition, 64 deletions
inventory_provider/routes/jobs.py
inventory_provider/tasks/worker.py
+0
-115
0 additions, 115 deletions
inventory_provider/tasks/worker.py
with
1 addition
and
179 deletions
inventory_provider/routes/jobs.py
+
1
−
64
View file @
acd46c20
...
...
@@ -3,81 +3,18 @@ import logging
from
flask
import
Blueprint
,
Response
,
current_app
from
inventory_provider.tasks
import
worker
from
inventory_provider.constants
import
TASK_LOGGER_NAME
routes
=
Blueprint
(
"
inventory-data-job-routes
"
,
__name__
)
@routes.route
(
"
/update
"
,
methods
=
[
'
GET
'
,
'
POST
'
])
def
update
():
task_logger
=
logging
.
getLogger
(
TASK_LOGGER_NAME
)
worker
.
start_refresh_cache_all
(
current_app
.
config
[
"
INVENTORY_PROVIDER_CONFIG
"
])
#
#
# app.send_task(
# 'inventory_provider.tasks.worker.refresh_cache_all')
# db_subtasks = [
# update_junosspace_device_list.s(),
# update_inventory_system_cache.s()
# ]
#
# ch = (
# group(db_subtasks),
# _chain_separator_task.s(),
#
# )
#
#
# task_logger.debug(
# 'launching task: '
# 'inventory_provider.tasks.worker.refresh_cache_all')
# app.send_task(
# 'inventory_provider.tasks.worker.refresh_cache_all')
return
Response
(
"
OK
"
)
# @routes.route("/update-startup", methods=['GET', 'POST'])
# def startup_update():
# task_logger = logging.getLogger(TASK_LOGGER_NAME)
# task_logger.debug(
# 'launching task: '
# 'inventory_provider.tasks.worker.update_alarmsdb_cache')
# app.send_task(
# 'inventory_provider.tasks.worker.update_alarmsdb_cache')
return
Response
(
"
OK
"
)
# @routes.route("update-interfaces-to-services", methods=['GET'])
# def update_interfaces_to_services():
# app.send_task(
# 'inventory_provider.tasks.worker.update_interfaces_to_services')
# return Response("OK")
#
#
# @routes.route("update-service-hierarchy", methods=['GET'])
# def update_service_hierarchy():
# app.send_task('inventory_provider.tasks.worker.update_circuit_hierarchy')
# return Response("OK")
#
#
# @routes.route("update-equipment-locations", methods=['GET'])
# def update_equipment_locations():
# app.send_task('inventory_provider.tasks.worker.update_equipment_locations')
# return Response("OK")
@routes.route
(
"
update-from-inventory-system
"
,
methods
=
[
'
GET
'
])
def
update_from_inventory_system
():
app
.
send_task
(
'
inventory_provider.tasks.worker.update_inventory_system_cache
'
)
return
Response
(
"
OK
"
)
@routes.route
(
"
update-interface-statuses
"
)
def
update_interface_statuses
():
app
.
send_task
(
'
inventory_provider.tasks.worker.update_interface_statuses
'
)
worker
.
update_interface_statuses
().
async_start
()
return
Response
(
"
OK
"
)
This diff is collapsed.
Click to expand it.
inventory_provider/tasks/worker.py
+
0
−
115
View file @
acd46c20
...
...
@@ -63,40 +63,11 @@ class InventoryTask(Task):
key
,
etree
.
tostring
(
xml_doc
,
encoding
=
'
unicode
'
))
# @staticmethod
# def save_key_json(hostname, key, data_obj):
# InventoryTask.save_key(
# hostname,
# key,
# json.dumps(data_obj))
#
# @staticmethod
# def save_key_etree(hostname, key, xml_doc):
# InventoryTask.save_key(
# hostname,
# key,
# etree.tostring(xml_doc, encoding='unicode'))
# def _wait_for_result(async_result):
# import time
# logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# while not async_result.ready():
# logger.debug("async_result not ready ... wait")
# time.sleep(5.0)
# return async_result.get()
class
WorkerArgs
(
bootsteps
.
Step
):
def
__init__
(
self
,
worker
,
config_filename
,
**
options
):
with
open
(
config_filename
)
as
f
:
InventoryTask
.
config
=
config
.
load
(
f
)
# InventoryTask.logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# interfaces_key = "interface_services"
# equipment_locations_key = "equipment_locations"
# service_child_to_parents_key = "child_to_parent_circuit_relations"
# service_parent_to_children_key = "parent_to_children_circuit_relations"
# interface_status_key = "interface_statuses"
def
worker_args
(
parser
):
...
...
@@ -141,18 +112,6 @@ def netconf_refresh_config(self, hostname):
task_logger
.
debug
(
'
<<< netconf_refresh_config(%r)
'
%
hostname
)
# @app.task(bind=InventoryTask)
# def update_alarmsdb_cache(self):
# logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# logger.debug('STARTING: update_alarmsdb_cache')
#
# with db.connection(InventoryTask.config["alarms-db"]) as cx:
# for table_name, data in alarmsdb.load_cache(cx):
# InventoryTask.save_value_json('alarmsdb:%s' % table_name, data)
#
# logger.debug('FINISHED: update_alarmsdb_cache')
@app.task
()
def
update_interfaces_to_services
():
task_logger
=
logging
.
getLogger
(
constants
.
TASK_LOGGER_NAME
)
...
...
@@ -241,23 +200,6 @@ def update_interface_statuses():
task_logger
.
debug
(
'
<<< update_interface_statuses
'
)
# @app.task()
# def update_inventory_system_cache():
# task_logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# task_logger.debug('>>> update_inventory_system_cache')
#
# subtasks = [
# update_interfaces_to_services.s(),
# update_circuit_hierarchy.s(),
# update_equipment_locations.s(),
# # update_interface_statuses.s()
# ]
#
# group(subtasks).apply()
#
# task_logger.debug('<<< update_inventory_system_cache')
@app.task
()
def
update_junosspace_device_list
():
task_logger
=
logging
.
getLogger
(
constants
.
TASK_LOGGER_NAME
)
...
...
@@ -291,63 +233,6 @@ def _derive_router_hostnames(config):
return
junosspace_equipment
&
opsdb_equipment
# @app.task()
# def refresh_cache_for_router(hostname):
# task_logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# task_logger.debug('>>> refresh_cache_for_router(%r)' % hostname)
#
# # TODO: !!!! extract community string from netconf data
# task_logger.error(
# 'TODO: !!!! extract community string from netconf data')
# subtasks = [
# netconf_refresh_config.s(hostname),
# snmp_refresh_interfaces.s(hostname, '0pBiFbD')
# ]
#
# group(subtasks).apply()
#
# # TODO: clear classifier cache
#
# task_logger.debug('<<< refresh_cache_for_router(%r)' % hostname)
# @app.task()
# def _chain_separator_task():
# """
# boilerplate in order to support groups as chord elements
# cf. https://stackoverflow.com/questions/15123772/celery-chaining-groups-and-subtasks-out-of-order-execution
# cf. http://docs.celeryproject.org/en/latest/userguide/canvas.html
# ('Chaining a group together with another task will automatically upgrade it to be a chord')
# :return:
# """ # noqa E501
# task_logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# task_logger.debug('>>>_chain_separator_task<<<')
# pass
# @app.task()
# def refresh_cache_all():
# task_logger = logging.getLogger(constants.TASK_LOGGER_NAME)
# task_logger.debug('>>> refresh_cache_all')
#
# subtasks = [
# update_junosspace_device_list.s(),
# update_inventory_system_cache.s()
# ]
#
# group(subtasks).apply()
#
# subtasks = []
# for hostname in _derive_router_hostnames(InventoryTask.config):
# task_logger.debug(
# 'queueing refresh_cache_for_router for %r' % hostname)
# subtasks.append(refresh_cache_for_router.s(hostname))
#
# group(subtasks).apply()
#
# task_logger.debug('<<< refresh_cache_all')
def
start_refresh_cache_all
(
config
):
"""
utility function intended to be called outside of the worker process
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment