171 lines
5.0 KiB
Python
171 lines
5.0 KiB
Python
from __future__ import annotations
|
||
|
||
import argparse
|
||
import asyncio
|
||
import logging
|
||
import threading
|
||
from typing import Dict, List, Tuple
|
||
|
||
import uvicorn
|
||
|
||
from .api import create_app
|
||
from .config import Config
|
||
from .connection import ConnectionManager
|
||
from .logging_utils import init_logging
|
||
from .metrics import TransceiverCollector
|
||
from .models import DeviceHealthState, DeviceMetricsSnapshot
|
||
from .scraper import scraper_loop
|
||
from .sqlite_store import PasswordEncryptor, SQLiteDeviceStore
|
||
from .registry import DeviceRegistry
|
||
|
||
logger = logging.getLogger(__name__)
|
||
|
||
|
||
def _parse_http_listen(http_listen: str) -> Tuple[str, int]:
|
||
"""解析 global.http_listen 字符串为 (host, port)."""
|
||
if ":" not in http_listen:
|
||
raise ValueError(f"Invalid http_listen value: {http_listen!r}")
|
||
host, port_str = http_listen.rsplit(":", 1)
|
||
if not host:
|
||
host = "0.0.0.0"
|
||
try:
|
||
port = int(port_str)
|
||
except ValueError as exc: # noqa: BLE001
|
||
raise ValueError(f"Invalid port in http_listen: {http_listen!r}") from exc
|
||
return host, port
|
||
|
||
|
||
def _build_arg_parser() -> argparse.ArgumentParser:
|
||
parser = argparse.ArgumentParser(description="NETCONF Transceiver Exporter")
|
||
parser.add_argument(
|
||
"--config",
|
||
dest="config_path",
|
||
default="config.yaml",
|
||
help="Path to configuration YAML file",
|
||
)
|
||
parser.add_argument(
|
||
"--log-level",
|
||
dest="log_level",
|
||
default=None,
|
||
help="Override log level from config (e.g. INFO, DEBUG)",
|
||
)
|
||
return parser
|
||
|
||
|
||
def _netconf_get_rpc(manager, filter_xml: str) -> str:
|
||
"""
|
||
默认 NETCONF RPC 调用封装。
|
||
|
||
- 使用 subtree filter;
|
||
- 返回 rpc-reply 的 XML 字符串表示。
|
||
"""
|
||
reply = manager.get(filter=("subtree", filter_xml))
|
||
return str(reply)
|
||
|
||
|
||
async def async_main(argv: List[str] | None = None) -> None:
|
||
"""
|
||
应用入口(异步部分):
|
||
|
||
- 解析 CLI 参数并加载配置;
|
||
- 初始化日志、SQLite、Registry、ConnectionManager、Collector 与 FastAPI 应用;
|
||
- 启动 Scraper 线程与 Uvicorn Server;
|
||
- 在 server 结束后触发 Scraper 优雅退出并关闭资源。
|
||
"""
|
||
parser = _build_arg_parser()
|
||
args = parser.parse_args(argv)
|
||
|
||
cfg = Config.from_file(args.config_path)
|
||
if args.log_level:
|
||
cfg.global_.log_level = args.log_level
|
||
|
||
# 初始化日志
|
||
init_logging(cfg.global_)
|
||
logger.info("Starting NETCONF Transceiver Exporter")
|
||
|
||
# 初始化加密与 SQLite
|
||
encryptor = PasswordEncryptor(cfg.global_.password_secret)
|
||
store = SQLiteDeviceStore(cfg.global_.runtime_db_path, encryptor)
|
||
store.init_db()
|
||
|
||
# 初始化 Registry:先注册静态设备,再加载并注册 runtime 设备
|
||
registry = DeviceRegistry(global_scrape_interval=cfg.global_.scrape_interval_seconds)
|
||
for dev in cfg.devices:
|
||
registry.register_static_device(dev)
|
||
for dev in store.load_runtime_devices():
|
||
registry.register_runtime_device(dev)
|
||
|
||
# Metrics cache & health state
|
||
metrics_cache: Dict[str, DeviceMetricsSnapshot] = {}
|
||
health_state: Dict[str, DeviceHealthState] = {}
|
||
collector = TransceiverCollector(metrics_cache, health_state)
|
||
|
||
# Connection manager
|
||
connection_manager = ConnectionManager(cfg.global_)
|
||
|
||
# FastAPI 应用
|
||
app = create_app(registry, store, collector, cfg.global_)
|
||
|
||
host, port = _parse_http_listen(cfg.global_.http_listen)
|
||
uv_config = uvicorn.Config(
|
||
app,
|
||
host=host,
|
||
port=port,
|
||
log_level=cfg.global_.log_level.lower(),
|
||
)
|
||
server = uvicorn.Server(uv_config)
|
||
|
||
stop_event = threading.Event()
|
||
|
||
scraper_thread = threading.Thread(
|
||
target=scraper_loop,
|
||
args=(
|
||
stop_event,
|
||
registry,
|
||
connection_manager,
|
||
_netconf_get_rpc,
|
||
metrics_cache,
|
||
health_state,
|
||
cfg.global_,
|
||
),
|
||
daemon=True,
|
||
name="netconf-scraper",
|
||
)
|
||
scraper_thread.start()
|
||
|
||
try:
|
||
await server.serve()
|
||
finally:
|
||
# 触发 Scraper 退出并等待其在超时时间内结束
|
||
stop_event.set()
|
||
scraper_thread.join(timeout=cfg.global_.shutdown_timeout_seconds)
|
||
if scraper_thread.is_alive():
|
||
logger.error(
|
||
"Scraper thread did not stop gracefully within timeout=%s seconds",
|
||
cfg.global_.shutdown_timeout_seconds,
|
||
)
|
||
|
||
# 关闭所有 NETCONF 连接与 SQLite 存储
|
||
try:
|
||
connection_manager.close_all()
|
||
except Exception as exc: # noqa: BLE001
|
||
logger.error("Error closing NETCONF connections: %s", exc)
|
||
|
||
try:
|
||
store.close()
|
||
except Exception as exc: # noqa: BLE001
|
||
logger.error("Error closing SQLite store: %s", exc)
|
||
|
||
|
||
def main() -> None:
|
||
"""同步入口,供 `python -m exporter.main` 或命令行使用。"""
|
||
try:
|
||
asyncio.run(async_main())
|
||
except KeyboardInterrupt:
|
||
# 优雅退出:忽略 Ctrl+C 引发的 traceback
|
||
pass
|
||
|
||
|
||
if __name__ == "__main__": # pragma: no cover
|
||
main()
|