Compare commits
No commits in common. "a41d638268530d91a364a0399d1ef2c9702fb66e" and "d0433f45d27264a75dcbf424a286057bb6f142b3" have entirely different histories.
a41d638268
...
d0433f45d2
|
|
@ -2,10 +2,10 @@
|
|||
# Enable protocol servers for testing
|
||||
|
||||
# Database configuration
|
||||
DB_HOST=postgres
|
||||
DB_HOST=calejo-postgres-test
|
||||
DB_PORT=5432
|
||||
DB_NAME=calejo_test
|
||||
DB_USER=calejo_test
|
||||
DB_USER=calejo
|
||||
DB_PASSWORD=password
|
||||
|
||||
# Enable internal protocol servers for testing
|
||||
|
|
@ -15,7 +15,7 @@ MODBUS_ENABLED=true
|
|||
# REST API configuration
|
||||
REST_API_ENABLED=true
|
||||
REST_API_HOST=0.0.0.0
|
||||
REST_API_PORT=8080
|
||||
REST_API_PORT=8081
|
||||
|
||||
# Health monitoring
|
||||
HEALTH_MONITOR_PORT=9091
|
||||
|
|
|
|||
|
|
@ -101,16 +101,6 @@ CREATE TABLE IF NOT EXISTS users (
|
|||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- Create discovery_results table
|
||||
CREATE TABLE IF NOT EXISTS discovery_results (
|
||||
scan_id VARCHAR(100) PRIMARY KEY,
|
||||
status VARCHAR(50) NOT NULL,
|
||||
discovered_endpoints JSONB,
|
||||
scan_started_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
scan_completed_at TIMESTAMP,
|
||||
error_message TEXT
|
||||
);
|
||||
|
||||
-- Create indexes for better performance
|
||||
CREATE INDEX IF NOT EXISTS idx_pump_plans_station_pump ON pump_plans(station_id, pump_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_pump_plans_interval ON pump_plans(interval_start, interval_end);
|
||||
|
|
@ -118,8 +108,6 @@ CREATE INDEX IF NOT EXISTS idx_pump_plans_status ON pump_plans(plan_status);
|
|||
CREATE INDEX IF NOT EXISTS idx_emergency_stops_cleared ON emergency_stops(cleared_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_logs_timestamp ON audit_logs(timestamp);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_logs_user ON audit_logs(user_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_discovery_results_status ON discovery_results(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_discovery_results_timestamp ON discovery_results(scan_started_at);
|
||||
|
||||
-- Insert sample data for testing
|
||||
INSERT INTO pump_stations (station_id, station_name, location) VALUES
|
||||
|
|
|
|||
|
|
@ -140,64 +140,13 @@ class SSHDeployer:
|
|||
dirs[:] = [d for d in dirs if not d.startswith('.')]
|
||||
|
||||
for file in files:
|
||||
# Skip hidden files except .env files
|
||||
if file.startswith('.') and not file.startswith('.env'):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, file)
|
||||
arcname = os.path.relpath(file_path, '.')
|
||||
|
||||
# Handle docker-compose.yml specially for test environment
|
||||
if file == 'docker-compose.yml' and 'test' in self.config_file:
|
||||
# Create modified docker-compose for test environment
|
||||
modified_compose = self.create_test_docker_compose(file_path)
|
||||
temp_compose_path = os.path.join(temp_dir, 'docker-compose.yml')
|
||||
with open(temp_compose_path, 'w') as f:
|
||||
f.write(modified_compose)
|
||||
tar.add(temp_compose_path, arcname='docker-compose.yml')
|
||||
# Handle .env files for test environment
|
||||
elif file.startswith('.env') and 'test' in self.config_file:
|
||||
if file == '.env.test':
|
||||
# Copy .env.test as .env for test environment
|
||||
temp_env_path = os.path.join(temp_dir, '.env')
|
||||
with open(file_path, 'r') as src, open(temp_env_path, 'w') as dst:
|
||||
dst.write(src.read())
|
||||
tar.add(temp_env_path, arcname='.env')
|
||||
# Skip other .env files in test environment
|
||||
else:
|
||||
if not file.startswith('.'):
|
||||
file_path = os.path.join(root, file)
|
||||
arcname = os.path.relpath(file_path, '.')
|
||||
tar.add(file_path, arcname=arcname)
|
||||
|
||||
return package_path
|
||||
|
||||
def create_test_docker_compose(self, original_compose_path: str) -> str:
|
||||
"""Create modified docker-compose.yml for test environment"""
|
||||
with open(original_compose_path, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Replace container names and ports for test environment
|
||||
replacements = {
|
||||
'calejo-control-adapter': 'calejo-control-adapter-test',
|
||||
'calejo-postgres': 'calejo-postgres-test',
|
||||
'calejo-prometheus': 'calejo-prometheus-test',
|
||||
'calejo-grafana': 'calejo-grafana-test',
|
||||
'"8080:8080"': '"8081:8080"', # Test app port
|
||||
'"4840:4840"': '"4841:4840"', # Test OPC UA port
|
||||
'"502:502"': '"503:502"', # Test Modbus port
|
||||
'"9090:9090"': '"9092:9090"', # Test Prometheus metrics
|
||||
'"5432:5432"': '"5433:5432"', # Test PostgreSQL port
|
||||
'"9091:9090"': '"9093:9090"', # Test Prometheus UI
|
||||
'"3000:3000"': '"3001:3000"', # Test Grafana port
|
||||
'calejo': 'calejo_test', # Test database name
|
||||
'calejo-network': 'calejo-network-test',
|
||||
'@postgres:5432': '@calejo_test-postgres-test:5432', # Fix database hostname
|
||||
' - DATABASE_URL=postgresql://calejo_test:password@calejo_test-postgres-test:5432/calejo_test': ' # DATABASE_URL removed - using .env file instead' # Remove DATABASE_URL to use .env file
|
||||
}
|
||||
|
||||
for old, new in replacements.items():
|
||||
content = content.replace(old, new)
|
||||
|
||||
return content
|
||||
|
||||
def deploy(self, dry_run: bool = False):
|
||||
"""Main deployment process"""
|
||||
print("🚀 Starting SSH deployment...")
|
||||
|
|
@ -265,10 +214,8 @@ class SSHDeployer:
|
|||
|
||||
# Wait for services
|
||||
print("⏳ Waiting for services to start...")
|
||||
# Determine health check port based on environment
|
||||
health_port = "8081" if 'test' in self.config_file else "8080"
|
||||
for i in range(30):
|
||||
if self.execute_remote(f"curl -s http://localhost:{health_port}/health > /dev/null", "", silent=True):
|
||||
if self.execute_remote("curl -s http://localhost:8080/health > /dev/null", "", silent=True):
|
||||
print(" ✅ Services started successfully")
|
||||
break
|
||||
print(f" ⏳ Waiting... ({i+1}/30)")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,3 @@
|
|||
GET http://95.111.206.155:8081/api/v1/dashboard/discovery/results/scan_20251107_092049 404 (Not Found)
|
||||
(anonymous) @ discovery.js:114
|
||||
setInterval
|
||||
pollScanStatus @ discovery.js:112
|
||||
startDiscoveryScan @ discovery.js:81
|
||||
await in startDiscoveryScan
|
||||
(anonymous) @ discovery.js:34
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Mock-Dependent End-to-End Test Runner
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from .configuration_manager import (
|
|||
configuration_manager, OPCUAConfig, ModbusTCPConfig, PumpStationConfig,
|
||||
PumpConfig, SafetyLimitsConfig, DataPointMapping, ProtocolType, ProtocolMapping
|
||||
)
|
||||
from src.discovery.protocol_discovery_persistent import persistent_discovery_service, DiscoveryStatus, DiscoveredEndpoint
|
||||
from src.discovery.protocol_discovery_fast import discovery_service, DiscoveryStatus, DiscoveredEndpoint
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -975,7 +975,7 @@ async def delete_protocol_mapping(mapping_id: str):
|
|||
async def get_discovery_status():
|
||||
"""Get current discovery service status"""
|
||||
try:
|
||||
status = persistent_discovery_service.get_discovery_status()
|
||||
status = discovery_service.get_discovery_status()
|
||||
return {
|
||||
"success": True,
|
||||
"status": status
|
||||
|
|
@ -990,7 +990,7 @@ async def start_discovery_scan(background_tasks: BackgroundTasks):
|
|||
"""Start a new discovery scan"""
|
||||
try:
|
||||
# Check if scan is already running
|
||||
status = persistent_discovery_service.get_discovery_status()
|
||||
status = discovery_service.get_discovery_status()
|
||||
if status["is_scanning"]:
|
||||
raise HTTPException(status_code=409, detail="Discovery scan already in progress")
|
||||
|
||||
|
|
@ -998,7 +998,7 @@ async def start_discovery_scan(background_tasks: BackgroundTasks):
|
|||
scan_id = f"scan_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
|
||||
async def run_discovery():
|
||||
await persistent_discovery_service.discover_all_protocols(scan_id)
|
||||
await discovery_service.discover_all_protocols(scan_id)
|
||||
|
||||
background_tasks.add_task(run_discovery)
|
||||
|
||||
|
|
@ -1018,33 +1018,33 @@ async def start_discovery_scan(background_tasks: BackgroundTasks):
|
|||
async def get_discovery_results(scan_id: str):
|
||||
"""Get results for a specific discovery scan"""
|
||||
try:
|
||||
result = persistent_discovery_service.get_scan_result(scan_id)
|
||||
result = discovery_service.get_scan_result(scan_id)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail=f"Discovery scan {scan_id} not found")
|
||||
|
||||
# Convert discovered endpoints to dict format
|
||||
endpoints_data = []
|
||||
for endpoint in result["discovered_endpoints"]:
|
||||
for endpoint in result.discovered_endpoints:
|
||||
endpoint_data = {
|
||||
"protocol_type": endpoint.get("protocol_type"),
|
||||
"address": endpoint.get("address"),
|
||||
"port": endpoint.get("port"),
|
||||
"device_id": endpoint.get("device_id"),
|
||||
"device_name": endpoint.get("device_name"),
|
||||
"capabilities": endpoint.get("capabilities", []),
|
||||
"response_time": endpoint.get("response_time"),
|
||||
"discovered_at": endpoint.get("discovered_at")
|
||||
"protocol_type": endpoint.protocol_type.value,
|
||||
"address": endpoint.address,
|
||||
"port": endpoint.port,
|
||||
"device_id": endpoint.device_id,
|
||||
"device_name": endpoint.device_name,
|
||||
"capabilities": endpoint.capabilities,
|
||||
"response_time": endpoint.response_time,
|
||||
"discovered_at": endpoint.discovered_at.isoformat() if endpoint.discovered_at else None
|
||||
}
|
||||
endpoints_data.append(endpoint_data)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"scan_id": scan_id,
|
||||
"status": result.get("status"),
|
||||
"scan_duration": None, # Not available in current implementation
|
||||
"errors": result.get("error_message"),
|
||||
"timestamp": result.get("scan_started_at"),
|
||||
"status": result.status.value,
|
||||
"scan_duration": result.scan_duration,
|
||||
"errors": result.errors,
|
||||
"timestamp": result.timestamp.isoformat() if result.timestamp else None,
|
||||
"discovered_endpoints": endpoints_data
|
||||
}
|
||||
except HTTPException:
|
||||
|
|
@ -1059,12 +1059,12 @@ async def get_recent_discoveries():
|
|||
"""Get most recently discovered endpoints"""
|
||||
try:
|
||||
# Get recent scan results and extract endpoints
|
||||
status = persistent_discovery_service.get_discovery_status()
|
||||
status = discovery_service.get_discovery_status()
|
||||
recent_scans = status.get("recent_scans", [])[-5:] # Last 5 scans
|
||||
|
||||
recent_endpoints = []
|
||||
for scan_id in recent_scans:
|
||||
result = persistent_discovery_service.get_scan_result(scan_id)
|
||||
result = discovery_service.get_scan_result(scan_id)
|
||||
if result and result.discovered_endpoints:
|
||||
recent_endpoints.extend(result.discovered_endpoints)
|
||||
|
||||
|
|
@ -1076,14 +1076,14 @@ async def get_recent_discoveries():
|
|||
endpoints_data = []
|
||||
for endpoint in recent_endpoints:
|
||||
endpoint_data = {
|
||||
"protocol_type": endpoint.get("protocol_type"),
|
||||
"address": endpoint.get("address"),
|
||||
"port": endpoint.get("port"),
|
||||
"device_id": endpoint.get("device_id"),
|
||||
"device_name": endpoint.get("device_name"),
|
||||
"capabilities": endpoint.get("capabilities", []),
|
||||
"response_time": endpoint.get("response_time"),
|
||||
"discovered_at": endpoint.get("discovered_at")
|
||||
"protocol_type": endpoint.protocol_type.value,
|
||||
"address": endpoint.address,
|
||||
"port": endpoint.port,
|
||||
"device_id": endpoint.device_id,
|
||||
"device_name": endpoint.device_name,
|
||||
"capabilities": endpoint.capabilities,
|
||||
"response_time": endpoint.response_time,
|
||||
"discovered_at": endpoint.discovered_at.isoformat() if endpoint.discovered_at else None
|
||||
}
|
||||
endpoints_data.append(endpoint_data)
|
||||
|
||||
|
|
@ -1100,7 +1100,7 @@ async def get_recent_discoveries():
|
|||
async def apply_discovery_results(scan_id: str, station_id: str, pump_id: str, data_type: str, db_source: str):
|
||||
"""Apply discovered endpoints as protocol mappings"""
|
||||
try:
|
||||
result = persistent_discovery_service.get_scan_result(scan_id)
|
||||
result = discovery_service.get_scan_result(scan_id)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(status_code=404, detail=f"Discovery scan {scan_id} not found")
|
||||
|
|
|
|||
|
|
@ -1,258 +0,0 @@
|
|||
"""
|
||||
Protocol Discovery Service - Persistent version with database storage
|
||||
"""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Any, Optional
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass, asdict
|
||||
|
||||
from sqlalchemy import text
|
||||
from config.settings import settings
|
||||
from src.database.flexible_client import FlexibleDatabaseClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DiscoveryStatus(Enum):
|
||||
"""Discovery operation status"""
|
||||
PENDING = "pending"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
class ProtocolType(Enum):
|
||||
MODBUS_TCP = "modbus_tcp"
|
||||
MODBUS_RTU = "modbus_rtu"
|
||||
OPC_UA = "opc_ua"
|
||||
REST_API = "rest_api"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DiscoveredEndpoint:
|
||||
protocol_type: ProtocolType
|
||||
address: str
|
||||
port: Optional[int] = None
|
||||
device_id: Optional[str] = None
|
||||
device_name: Optional[str] = None
|
||||
capabilities: Optional[List[str]] = None
|
||||
response_time: Optional[float] = None
|
||||
discovered_at: Optional[datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.capabilities is None:
|
||||
self.capabilities = []
|
||||
|
||||
|
||||
@dataclass
|
||||
class DiscoveryResult:
|
||||
scan_id: str
|
||||
status: DiscoveryStatus
|
||||
discovered_endpoints: List[DiscoveredEndpoint]
|
||||
scan_started_at: datetime
|
||||
scan_completed_at: Optional[datetime] = None
|
||||
error_message: Optional[str] = None
|
||||
|
||||
|
||||
class PersistentProtocolDiscoveryService:
|
||||
"""
|
||||
Protocol discovery service with database persistence
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._current_scan_id: Optional[str] = None
|
||||
self._db_client = FlexibleDatabaseClient(settings.database_url)
|
||||
|
||||
async def initialize(self):
|
||||
"""Initialize database connection"""
|
||||
try:
|
||||
await self._db_client.connect()
|
||||
logger.info("Discovery service database initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize discovery service database: {e}")
|
||||
|
||||
def get_discovery_status(self) -> Dict[str, Any]:
|
||||
"""Get current discovery service status"""
|
||||
try:
|
||||
# Get recent scans from database
|
||||
query = text("""
|
||||
SELECT scan_id, status, scan_started_at, scan_completed_at
|
||||
FROM discovery_results
|
||||
ORDER BY scan_started_at DESC
|
||||
LIMIT 5
|
||||
""")
|
||||
|
||||
with self._db_client.engine.connect() as conn:
|
||||
result = conn.execute(query)
|
||||
recent_scans = [
|
||||
{
|
||||
'scan_id': row[0],
|
||||
'status': row[1],
|
||||
'scan_started_at': row[2].isoformat() if row[2] else None,
|
||||
'scan_completed_at': row[3].isoformat() if row[3] else None
|
||||
}
|
||||
for row in result
|
||||
]
|
||||
|
||||
# Get total discovered endpoints
|
||||
query = text("""
|
||||
SELECT COUNT(*)
|
||||
FROM discovery_results dr,
|
||||
jsonb_array_elements(dr.discovered_endpoints) AS endpoint
|
||||
WHERE dr.status = 'completed'
|
||||
""")
|
||||
|
||||
with self._db_client.engine.connect() as conn:
|
||||
result = conn.execute(query)
|
||||
total_endpoints = result.scalar() or 0
|
||||
|
||||
return {
|
||||
"current_scan_id": self._current_scan_id,
|
||||
"is_scanning": self._current_scan_id is not None,
|
||||
"recent_scans": recent_scans,
|
||||
"total_discovered_endpoints": total_endpoints
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting discovery status: {e}")
|
||||
return {
|
||||
"current_scan_id": None,
|
||||
"is_scanning": False,
|
||||
"recent_scans": [],
|
||||
"total_discovered_endpoints": 0
|
||||
}
|
||||
|
||||
def get_scan_result(self, scan_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get result for a specific scan from database"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT scan_id, status, discovered_endpoints,
|
||||
scan_started_at, scan_completed_at, error_message
|
||||
FROM discovery_results
|
||||
WHERE scan_id = :scan_id
|
||||
""")
|
||||
|
||||
with self._db_client.engine.connect() as conn:
|
||||
result = conn.execute(query, {"scan_id": scan_id})
|
||||
row = result.fetchone()
|
||||
|
||||
if row:
|
||||
return {
|
||||
"scan_id": row[0],
|
||||
"status": row[1],
|
||||
"discovered_endpoints": row[2] if row[2] else [],
|
||||
"scan_started_at": row[3].isoformat() if row[3] else None,
|
||||
"scan_completed_at": row[4].isoformat() if row[4] else None,
|
||||
"error_message": row[5]
|
||||
}
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting scan result {scan_id}: {e}")
|
||||
return None
|
||||
|
||||
async def discover_all_protocols(self, scan_id: str) -> None:
|
||||
"""
|
||||
Discover all available protocols (simulated for now)
|
||||
"""
|
||||
try:
|
||||
# Store scan as started
|
||||
await self._store_scan_result(
|
||||
scan_id=scan_id,
|
||||
status=DiscoveryStatus.RUNNING,
|
||||
discovered_endpoints=[],
|
||||
scan_started_at=datetime.now(),
|
||||
scan_completed_at=None,
|
||||
error_message=None
|
||||
)
|
||||
|
||||
# Simulate discovery process
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# Create mock discovered endpoints
|
||||
discovered_endpoints = [
|
||||
{
|
||||
"protocol_type": "modbus_tcp",
|
||||
"address": "192.168.1.100",
|
||||
"port": 502,
|
||||
"device_id": "pump_controller_001",
|
||||
"device_name": "Main Pump Controller",
|
||||
"capabilities": ["read_coils", "read_holding_registers"],
|
||||
"response_time": 0.15,
|
||||
"discovered_at": datetime.now().isoformat()
|
||||
},
|
||||
{
|
||||
"protocol_type": "opc_ua",
|
||||
"address": "192.168.1.101",
|
||||
"port": 4840,
|
||||
"device_id": "scada_server_001",
|
||||
"device_name": "SCADA Server",
|
||||
"capabilities": ["browse", "read", "write"],
|
||||
"response_time": 0.25,
|
||||
"discovered_at": datetime.now().isoformat()
|
||||
}
|
||||
]
|
||||
|
||||
# Store completed scan
|
||||
await self._store_scan_result(
|
||||
scan_id=scan_id,
|
||||
status=DiscoveryStatus.COMPLETED,
|
||||
discovered_endpoints=discovered_endpoints,
|
||||
scan_started_at=datetime.now(),
|
||||
scan_completed_at=datetime.now(),
|
||||
error_message=None
|
||||
)
|
||||
|
||||
logger.info(f"Discovery scan {scan_id} completed with {len(discovered_endpoints)} endpoints")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Discovery scan {scan_id} failed: {e}")
|
||||
await self._store_scan_result(
|
||||
scan_id=scan_id,
|
||||
status=DiscoveryStatus.FAILED,
|
||||
discovered_endpoints=[],
|
||||
scan_started_at=datetime.now(),
|
||||
scan_completed_at=datetime.now(),
|
||||
error_message=str(e)
|
||||
)
|
||||
|
||||
async def _store_scan_result(
|
||||
self,
|
||||
scan_id: str,
|
||||
status: DiscoveryStatus,
|
||||
discovered_endpoints: List[Dict[str, Any]],
|
||||
scan_started_at: datetime,
|
||||
scan_completed_at: Optional[datetime] = None,
|
||||
error_message: Optional[str] = None
|
||||
) -> None:
|
||||
"""Store scan result in database"""
|
||||
try:
|
||||
query = text("""
|
||||
INSERT INTO discovery_results
|
||||
(scan_id, status, discovered_endpoints, scan_started_at, scan_completed_at, error_message)
|
||||
VALUES (:scan_id, :status, :discovered_endpoints, :scan_started_at, :scan_completed_at, :error_message)
|
||||
ON CONFLICT (scan_id) DO UPDATE SET
|
||||
status = EXCLUDED.status,
|
||||
discovered_endpoints = EXCLUDED.discovered_endpoints,
|
||||
scan_completed_at = EXCLUDED.scan_completed_at,
|
||||
error_message = EXCLUDED.error_message
|
||||
""")
|
||||
|
||||
with self._db_client.engine.connect() as conn:
|
||||
conn.execute(query, {
|
||||
"scan_id": scan_id,
|
||||
"status": status.value,
|
||||
"discovered_endpoints": json.dumps(discovered_endpoints),
|
||||
"scan_started_at": scan_started_at,
|
||||
"scan_completed_at": scan_completed_at,
|
||||
"error_message": error_message
|
||||
})
|
||||
conn.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to store scan result {scan_id}: {e}")
|
||||
|
||||
|
||||
# Global instance
|
||||
persistent_discovery_service = PersistentProtocolDiscoveryService()
|
||||
|
|
@ -177,11 +177,6 @@ class CalejoControlAdapter:
|
|||
await self.db_client.connect()
|
||||
logger.info("database_connected")
|
||||
|
||||
# Initialize persistent discovery service
|
||||
from src.discovery.protocol_discovery_persistent import persistent_discovery_service
|
||||
await persistent_discovery_service.initialize()
|
||||
logger.info("persistent_discovery_service_initialized")
|
||||
|
||||
# Load safety limits
|
||||
await self.safety_enforcer.load_safety_limits()
|
||||
logger.info("safety_limits_loaded")
|
||||
|
|
|
|||
|
|
@ -4,7 +4,6 @@ Start Dashboard Server for Protocol Mapping Testing
|
|||
"""
|
||||
|
||||
import os
|
||||
import asyncio
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
|
@ -14,7 +13,6 @@ from fastapi import Request
|
|||
|
||||
from src.dashboard.api import dashboard_router
|
||||
from src.dashboard.templates import DASHBOARD_HTML
|
||||
from src.discovery.protocol_discovery_persistent import persistent_discovery_service
|
||||
|
||||
# Create FastAPI app
|
||||
app = FastAPI(title="Calejo Control Adapter Dashboard", version="1.0.0")
|
||||
|
|
@ -40,22 +38,6 @@ async def health_check():
|
|||
"""Health check endpoint"""
|
||||
return {"status": "healthy", "service": "dashboard"}
|
||||
|
||||
async def initialize_services():
|
||||
"""Initialize services before starting the server"""
|
||||
try:
|
||||
print("🔄 Starting persistent discovery service initialization...")
|
||||
await persistent_discovery_service.initialize()
|
||||
print("✅ Persistent discovery service initialized")
|
||||
|
||||
# Test that it's working
|
||||
status = persistent_discovery_service.get_discovery_status()
|
||||
print(f"📊 Discovery status: {status}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ Failed to initialize persistent discovery service: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Get port from environment variable or default to 8080
|
||||
port = int(os.getenv("REST_API_PORT", "8080"))
|
||||
|
|
@ -64,9 +46,6 @@ if __name__ == "__main__":
|
|||
print(f"📊 Dashboard available at: http://localhost:{port}")
|
||||
print("📊 Protocol Mapping tab should be visible in the navigation")
|
||||
|
||||
# Initialize services
|
||||
asyncio.run(initialize_services())
|
||||
|
||||
uvicorn.run(
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
|
|
|
|||
Loading…
Reference in New Issue