Add comprehensive automated tests for mock SCADA and optimizer services
This commit is contained in:
parent
8a8b0de337
commit
d6635806f3
|
|
@ -231,6 +231,78 @@ rm docker-compose.test.yml
|
|||
rm -rf tests/mock_services/
|
||||
```
|
||||
|
||||
## Automated Testing
|
||||
|
||||
The test environment includes comprehensive automated tests:
|
||||
|
||||
### Test Categories
|
||||
|
||||
1. **Health Checks** - Verify all services are running
|
||||
2. **API Tests** - Test REST API endpoints
|
||||
3. **Unit Tests** - Test individual components
|
||||
4. **Integration Tests** - Test service interactions
|
||||
5. **End-to-End Tests** - Test complete workflows
|
||||
|
||||
### Running Tests
|
||||
|
||||
#### Using the Test Runner Script
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
./scripts/run-mock-tests.sh
|
||||
|
||||
# Run specific test categories
|
||||
./scripts/run-mock-tests.sh --health
|
||||
./scripts/run-mock-tests.sh --api
|
||||
./scripts/run-mock-tests.sh --unit
|
||||
./scripts/run-mock-tests.sh --integration
|
||||
./scripts/run-mock-tests.sh --e2e
|
||||
|
||||
# Wait for services only
|
||||
./scripts/run-mock-tests.sh --wait-only
|
||||
```
|
||||
|
||||
#### Using Pytest Directly
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
python -m pytest tests/
|
||||
|
||||
# Run mock service integration tests
|
||||
python -m pytest tests/integration/test_mock_services.py -v
|
||||
|
||||
# Run with specific markers
|
||||
python -m pytest tests/ -m "mock" -v
|
||||
python -m pytest tests/ -m "integration" -v
|
||||
```
|
||||
|
||||
### Test Coverage
|
||||
|
||||
The automated tests cover:
|
||||
|
||||
- **Mock SCADA Service**: Health, data retrieval, equipment control, alarms
|
||||
- **Mock Optimizer Service**: Health, model listing, optimization, forecasting
|
||||
- **Calejo Control Adapter**: Health, dashboard, API endpoints
|
||||
- **End-to-End Workflows**: SCADA to optimization, alarm response, forecast planning
|
||||
|
||||
### Test Configuration
|
||||
|
||||
- **pytest-mock.ini**: Configuration for mock service tests
|
||||
- **60-second timeout**: Services must be ready within 60 seconds
|
||||
- **Comprehensive error handling**: Tests handle service unavailability gracefully
|
||||
|
||||
## Continuous Integration
|
||||
|
||||
For CI/CD pipelines, the test runner can be integrated:
|
||||
|
||||
```yaml
|
||||
# Example GitHub Actions workflow
|
||||
- name: Run Mock Service Tests
|
||||
run: |
|
||||
./scripts/setup-test-environment.sh
|
||||
./scripts/run-mock-tests.sh --all
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
After setting up the test environment:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
[tool:pytest]
|
||||
# Configuration for mock service tests
|
||||
|
||||
# Test discovery
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
|
||||
# Output formatting
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--strict-config
|
||||
--disable-warnings
|
||||
|
||||
# Markers
|
||||
markers =
|
||||
mock: Tests that require mock services
|
||||
scada: Tests for SCADA functionality
|
||||
optimizer: Tests for optimizer functionality
|
||||
integration: Integration tests
|
||||
e2e: End-to-end tests
|
||||
slow: Slow running tests
|
||||
|
||||
# Filter warnings
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
|
||||
# Test timeout (seconds)
|
||||
timeout = 30
|
||||
|
||||
# Coverage configuration (if coverage is installed)
|
||||
# --cov=src
|
||||
# --cov-report=term-missing
|
||||
# --cov-report=html
|
||||
|
||||
# JUnit XML output (for CI/CD)
|
||||
# junit_family = xunit2
|
||||
|
|
@ -0,0 +1,338 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Automated test runner for mock SCADA and optimizer services
|
||||
|
||||
set -e
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Function to print colored output
|
||||
print_status() {
|
||||
echo -e "${BLUE}[INFO]${NC} $1"
|
||||
}
|
||||
|
||||
print_success() {
|
||||
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
||||
}
|
||||
|
||||
print_warning() {
|
||||
echo -e "${YELLOW}[WARNING]${NC} $1"
|
||||
}
|
||||
|
||||
print_error() {
|
||||
echo -e "${RED}[ERROR]${NC} $1"
|
||||
}
|
||||
|
||||
# Function to wait for services
|
||||
wait_for_services() {
|
||||
print_status "Waiting for mock services to be ready..."
|
||||
|
||||
max_wait=60
|
||||
start_time=$(date +%s)
|
||||
|
||||
while true; do
|
||||
current_time=$(date +%s)
|
||||
elapsed=$((current_time - start_time))
|
||||
|
||||
if [ $elapsed -ge $max_wait ]; then
|
||||
print_error "Services not ready within $max_wait seconds"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Check if all services are responding
|
||||
scada_ready=$(curl -s http://localhost:8081/health | grep -q "healthy" && echo "yes" || echo "no")
|
||||
optimizer_ready=$(curl -s http://localhost:8082/health | grep -q "healthy" && echo "yes" || echo "no")
|
||||
calejo_ready=$(curl -s http://localhost:8080/health > /dev/null && echo "yes" || echo "no")
|
||||
|
||||
if [ "$scada_ready" = "yes" ] && [ "$optimizer_ready" = "yes" ] && [ "$calejo_ready" = "yes" ]; then
|
||||
print_success "All services are ready!"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo " Waiting... ($elapsed/$max_wait seconds)"
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
|
||||
# Function to run specific test categories
|
||||
run_unit_tests() {
|
||||
print_status "Running unit tests..."
|
||||
if python -m pytest tests/unit/ -v --tb=short; then
|
||||
print_success "Unit tests passed"
|
||||
return 0
|
||||
else
|
||||
print_error "Unit tests failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_integration_tests() {
|
||||
print_status "Running integration tests..."
|
||||
if python -m pytest tests/integration/test_mock_services.py -v --tb=short; then
|
||||
print_success "Integration tests passed"
|
||||
return 0
|
||||
else
|
||||
print_error "Integration tests failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_all_tests() {
|
||||
print_status "Running all tests..."
|
||||
if python -m pytest tests/ -v --tb=short; then
|
||||
print_success "All tests passed"
|
||||
return 0
|
||||
else
|
||||
print_error "Some tests failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_health_checks() {
|
||||
print_status "Running health checks..."
|
||||
|
||||
services=(
|
||||
"Calejo Control Adapter:8080"
|
||||
"Mock SCADA:8081"
|
||||
"Mock Optimizer:8082"
|
||||
)
|
||||
|
||||
all_healthy=true
|
||||
|
||||
for service in "${services[@]}"; do
|
||||
name="${service%:*}"
|
||||
port="${service#*:}"
|
||||
|
||||
if curl -s "http://localhost:$port/health" > /dev/null; then
|
||||
print_success "$name is healthy"
|
||||
else
|
||||
print_error "$name is not responding"
|
||||
all_healthy=false
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$all_healthy" = "true" ]; then
|
||||
print_success "All health checks passed"
|
||||
return 0
|
||||
else
|
||||
print_error "Some health checks failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_api_tests() {
|
||||
print_status "Running API tests..."
|
||||
|
||||
# Test SCADA API
|
||||
print_status "Testing SCADA API..."
|
||||
if curl -s http://localhost:8081/api/v1/data | python -m json.tool > /dev/null 2>&1; then
|
||||
print_success "SCADA API is accessible"
|
||||
else
|
||||
print_error "SCADA API test failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Test Optimizer API
|
||||
print_status "Testing Optimizer API..."
|
||||
if curl -s http://localhost:8082/api/v1/models | python -m json.tool > /dev/null 2>&1; then
|
||||
print_success "Optimizer API is accessible"
|
||||
else
|
||||
print_error "Optimizer API test failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Test Calejo API
|
||||
print_status "Testing Calejo API..."
|
||||
if curl -s http://localhost:8080/health > /dev/null; then
|
||||
print_success "Calejo API is accessible"
|
||||
else
|
||||
print_error "Calejo API test failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_success "All API tests passed"
|
||||
return 0
|
||||
}
|
||||
|
||||
run_end_to_end_test() {
|
||||
print_status "Running end-to-end test..."
|
||||
|
||||
# This simulates a complete workflow
|
||||
print_status "1. Getting SCADA data..."
|
||||
scada_data=$(curl -s http://localhost:8081/api/v1/data)
|
||||
if [ $? -eq 0 ]; then
|
||||
print_success "SCADA data retrieved"
|
||||
else
|
||||
print_error "Failed to get SCADA data"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_status "2. Running optimization..."
|
||||
optimization_result=$(curl -s -X POST http://localhost:8082/api/v1/optimize/energy_optimization \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"power_load": 450, "time_of_day": 14, "production_rate": 95}')
|
||||
if [ $? -eq 0 ]; then
|
||||
print_success "Optimization completed"
|
||||
else
|
||||
print_error "Optimization failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_status "3. Testing equipment control..."
|
||||
control_result=$(curl -s -X POST http://localhost:8081/api/v1/control/pump_1 \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"command": "START"}')
|
||||
if [ $? -eq 0 ]; then
|
||||
print_success "Equipment control successful"
|
||||
else
|
||||
print_error "Equipment control failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
print_success "End-to-end test completed successfully"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Function to display usage
|
||||
usage() {
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --health Run health checks only"
|
||||
echo " --api Run API tests only"
|
||||
echo " --unit Run unit tests only"
|
||||
echo " --integration Run integration tests only"
|
||||
echo " --e2e Run end-to-end test only"
|
||||
echo " --all Run all tests (default)"
|
||||
echo " --wait-only Only wait for services, don't run tests"
|
||||
echo " -h, --help Show this help message"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 # Run all tests"
|
||||
echo " $0 --health # Run health checks"
|
||||
echo " $0 --api --e2e # Run API and end-to-end tests"
|
||||
}
|
||||
|
||||
# Parse command line arguments
|
||||
HEALTH_ONLY=false
|
||||
API_ONLY=false
|
||||
UNIT_ONLY=false
|
||||
INTEGRATION_ONLY=false
|
||||
E2E_ONLY=false
|
||||
ALL_TESTS=true
|
||||
WAIT_ONLY=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--health)
|
||||
HEALTH_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
--api)
|
||||
API_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
--unit)
|
||||
UNIT_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
--integration)
|
||||
INTEGRATION_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
--e2e)
|
||||
E2E_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
--wait-only)
|
||||
WAIT_ONLY=true
|
||||
ALL_TESTS=false
|
||||
shift
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
print_error "Unknown option: $1"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Main execution
|
||||
print_status "Starting automated tests for mock deployment..."
|
||||
|
||||
# Wait for services
|
||||
if ! wait_for_services; then
|
||||
print_error "Cannot proceed with tests - services not available"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$WAIT_ONLY" = "true" ]; then
|
||||
print_success "Services are ready - exiting as requested"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Run tests based on options
|
||||
overall_result=0
|
||||
|
||||
if [ "$HEALTH_ONLY" = "true" ] || [ "$ALL_TESTS" = "true" ]; then
|
||||
if ! run_health_checks; then
|
||||
overall_result=1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$API_ONLY" = "true" ] || [ "$ALL_TESTS" = "true" ]; then
|
||||
if ! run_api_tests; then
|
||||
overall_result=1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$UNIT_ONLY" = "true" ] || [ "$ALL_TESTS" = "true" ]; then
|
||||
if ! run_unit_tests; then
|
||||
overall_result=1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$INTEGRATION_ONLY" = "true" ] || [ "$ALL_TESTS" = "true" ]; then
|
||||
if ! run_integration_tests; then
|
||||
overall_result=1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$E2E_ONLY" = "true" ] || [ "$ALL_TESTS" = "true" ]; then
|
||||
if ! run_end_to_end_test; then
|
||||
overall_result=1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Final result
|
||||
if [ $overall_result -eq 0 ]; then
|
||||
echo ""
|
||||
print_success "🎉 All tests completed successfully!"
|
||||
echo ""
|
||||
echo "📊 Test Summary:"
|
||||
echo " ✅ Health checks passed"
|
||||
echo " ✅ API tests passed"
|
||||
echo " ✅ Unit tests passed"
|
||||
echo " ✅ Integration tests passed"
|
||||
echo " ✅ End-to-end tests passed"
|
||||
echo ""
|
||||
echo "🚀 Mock deployment is ready for development!"
|
||||
else
|
||||
echo ""
|
||||
print_error "❌ Some tests failed. Please check the logs above."
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -0,0 +1,402 @@
|
|||
"""
|
||||
Integration tests for mock SCADA and optimizer services
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
import json
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# Test configuration
|
||||
SCADA_BASE_URL = "http://localhost:8081"
|
||||
OPTIMIZER_BASE_URL = "http://localhost:8082"
|
||||
CALEJO_BASE_URL = "http://localhost:8080"
|
||||
|
||||
class TestMockSCADAService:
|
||||
"""Test suite for mock SCADA service"""
|
||||
|
||||
def test_scada_health(self):
|
||||
"""Test SCADA service health endpoint"""
|
||||
response = requests.get(f"{SCADA_BASE_URL}/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert data["service"] == "mock-scada"
|
||||
|
||||
def test_scada_get_all_data(self):
|
||||
"""Test retrieving all SCADA data"""
|
||||
response = requests.get(f"{SCADA_BASE_URL}/api/v1/data")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify structure
|
||||
assert "timestamp" in data
|
||||
assert "data" in data
|
||||
assert "equipment" in data
|
||||
|
||||
# Verify data fields
|
||||
scada_data = data["data"]
|
||||
assert "temperature" in scada_data
|
||||
assert "pressure" in scada_data
|
||||
assert "flow_rate" in scada_data
|
||||
assert "level" in scada_data
|
||||
assert "power" in scada_data
|
||||
assert "status" in scada_data
|
||||
assert "efficiency" in scada_data
|
||||
|
||||
# Verify equipment status
|
||||
equipment = data["equipment"]
|
||||
assert "pump_1" in equipment
|
||||
assert "valve_1" in equipment
|
||||
assert "compressor" in equipment
|
||||
|
||||
def test_scada_get_specific_data(self):
|
||||
"""Test retrieving specific SCADA data tags"""
|
||||
tags = ["temperature", "pressure", "flow_rate"]
|
||||
|
||||
for tag in tags:
|
||||
response = requests.get(f"{SCADA_BASE_URL}/api/v1/data/{tag}")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["tag"] == tag
|
||||
assert "value" in data
|
||||
assert "unit" in data
|
||||
assert "timestamp" in data
|
||||
|
||||
def test_scada_control_equipment(self):
|
||||
"""Test controlling SCADA equipment"""
|
||||
equipment = "pump_1"
|
||||
command = "START"
|
||||
|
||||
response = requests.post(
|
||||
f"{SCADA_BASE_URL}/api/v1/control/{equipment}",
|
||||
json={"command": command}
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["equipment"] == equipment
|
||||
assert data["current_status"] == command
|
||||
assert "previous_status" in data
|
||||
assert "timestamp" in data
|
||||
assert "message" in data
|
||||
|
||||
def test_scada_alarms(self):
|
||||
"""Test SCADA alarm system"""
|
||||
response = requests.get(f"{SCADA_BASE_URL}/api/v1/alarms")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "alarms" in data
|
||||
# Alarms may be empty or contain alarm objects
|
||||
assert isinstance(data["alarms"], list)
|
||||
|
||||
def test_scada_invalid_tag(self):
|
||||
"""Test requesting invalid SCADA tag"""
|
||||
response = requests.get(f"{SCADA_BASE_URL}/api/v1/data/invalid_tag")
|
||||
assert response.status_code == 404
|
||||
data = response.json()
|
||||
assert "error" in data
|
||||
|
||||
def test_scada_invalid_control(self):
|
||||
"""Test invalid control command"""
|
||||
response = requests.post(
|
||||
f"{SCADA_BASE_URL}/api/v1/control/invalid_equipment",
|
||||
json={"command": "INVALID_COMMAND"}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestMockOptimizerService:
|
||||
"""Test suite for mock optimizer service"""
|
||||
|
||||
def test_optimizer_health(self):
|
||||
"""Test optimizer service health endpoint"""
|
||||
response = requests.get(f"{OPTIMIZER_BASE_URL}/health")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "healthy"
|
||||
assert data["service"] == "mock-optimizer"
|
||||
|
||||
def test_optimizer_get_models(self):
|
||||
"""Test retrieving available optimization models"""
|
||||
response = requests.get(f"{OPTIMIZER_BASE_URL}/api/v1/models")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "models" in data
|
||||
models = data["models"]
|
||||
|
||||
# Verify model structure
|
||||
assert "energy_optimization" in models
|
||||
assert "production_optimization" in models
|
||||
assert "cost_optimization" in models
|
||||
|
||||
# Verify model details
|
||||
energy_model = models["energy_optimization"]
|
||||
assert energy_model["name"] == "Energy Consumption Optimizer"
|
||||
assert "parameters" in energy_model
|
||||
assert isinstance(energy_model["parameters"], list)
|
||||
|
||||
def test_energy_optimization(self):
|
||||
"""Test energy optimization model"""
|
||||
test_data = {
|
||||
"power_load": 450.0,
|
||||
"time_of_day": 14,
|
||||
"production_rate": 95.0
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/energy_optimization",
|
||||
json=test_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "optimization_id" in data
|
||||
assert data["model"] == "energy_optimization"
|
||||
assert "result" in data
|
||||
assert "processing_time" in data
|
||||
assert "timestamp" in data
|
||||
|
||||
result = data["result"]
|
||||
assert "optimal_power_setpoint" in result
|
||||
assert "recommended_actions" in result
|
||||
assert "estimated_savings" in result
|
||||
assert "confidence" in result
|
||||
|
||||
def test_production_optimization(self):
|
||||
"""Test production optimization model"""
|
||||
test_data = {
|
||||
"raw_material_quality": 85.0,
|
||||
"machine_utilization": 92.0,
|
||||
"operator_skill": 88.0
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/production_optimization",
|
||||
json=test_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["model"] == "production_optimization"
|
||||
result = data["result"]
|
||||
assert "optimal_production_rate" in result
|
||||
assert "efficiency_gain" in result
|
||||
|
||||
def test_cost_optimization(self):
|
||||
"""Test cost optimization model"""
|
||||
test_data = {
|
||||
"energy_cost": 55.0,
|
||||
"labor_cost": 30.0,
|
||||
"maintenance_cost": 15.0
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/cost_optimization",
|
||||
json=test_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["model"] == "cost_optimization"
|
||||
result = data["result"]
|
||||
assert "optimal_cost_structure" in result
|
||||
assert "cost_reduction" in result
|
||||
|
||||
def test_optimizer_history(self):
|
||||
"""Test optimization history"""
|
||||
response = requests.get(f"{OPTIMIZER_BASE_URL}/api/v1/history")
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "history" in data
|
||||
assert "total_optimizations" in data
|
||||
assert isinstance(data["history"], list)
|
||||
|
||||
def test_optimizer_forecast(self):
|
||||
"""Test forecast generation"""
|
||||
forecast_data = {"hours": 24}
|
||||
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/forecast",
|
||||
json=forecast_data
|
||||
)
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "forecast" in data
|
||||
assert "generated_at" in data
|
||||
assert "horizon_hours" in data
|
||||
|
||||
forecast = data["forecast"]
|
||||
assert len(forecast) == 24
|
||||
|
||||
# Verify forecast structure
|
||||
for item in forecast:
|
||||
assert "timestamp" in item
|
||||
assert "energy_consumption" in item
|
||||
assert "production_rate" in item
|
||||
assert "efficiency" in item
|
||||
assert "cost" in item
|
||||
|
||||
def test_optimizer_invalid_model(self):
|
||||
"""Test optimization with invalid model"""
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/invalid_model",
|
||||
json={"test": "data"}
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
def test_optimizer_missing_data(self):
|
||||
"""Test optimization with missing data"""
|
||||
response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/energy_optimization",
|
||||
json={}
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
class TestCalejoIntegration:
|
||||
"""Test Calejo Control Adapter integration with mock services"""
|
||||
|
||||
def test_calejo_health(self):
|
||||
"""Test Calejo Control Adapter health"""
|
||||
response = requests.get(f"{CALEJO_BASE_URL}/health")
|
||||
assert response.status_code == 200
|
||||
# Health endpoint should return success status
|
||||
|
||||
def test_calejo_dashboard(self):
|
||||
"""Test dashboard accessibility"""
|
||||
response = requests.get(f"{CALEJO_BASE_URL}/dashboard")
|
||||
# Dashboard might return 200 or redirect
|
||||
assert response.status_code in [200, 302]
|
||||
|
||||
def test_calejo_api_status(self):
|
||||
"""Test API status endpoint"""
|
||||
response = requests.get(f"{CALEJO_BASE_URL}/api/v1/status")
|
||||
# Status endpoint should be accessible
|
||||
assert response.status_code in [200, 404] # Might not be implemented yet
|
||||
|
||||
def test_calejo_metrics(self):
|
||||
"""Test metrics endpoint"""
|
||||
response = requests.get(f"{CALEJO_BASE_URL}/api/v1/metrics")
|
||||
# Metrics endpoint should be accessible
|
||||
assert response.status_code in [200, 404] # Might not be implemented yet
|
||||
|
||||
|
||||
class TestEndToEndWorkflow:
|
||||
"""Test end-to-end workflows with mock services"""
|
||||
|
||||
def test_scada_to_optimizer_workflow(self):
|
||||
"""Test workflow: SCADA data -> Optimization -> Control"""
|
||||
|
||||
# 1. Get current SCADA data
|
||||
scada_response = requests.get(f"{SCADA_BASE_URL}/api/v1/data")
|
||||
assert scada_response.status_code == 200
|
||||
scada_data = scada_response.json()
|
||||
|
||||
# 2. Run energy optimization based on SCADA data
|
||||
optimization_data = {
|
||||
"power_load": scada_data["data"]["power"]["value"],
|
||||
"time_of_day": datetime.now().hour,
|
||||
"production_rate": scada_data["data"]["flow_rate"]["value"]
|
||||
}
|
||||
|
||||
opt_response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/energy_optimization",
|
||||
json=optimization_data
|
||||
)
|
||||
assert opt_response.status_code == 200
|
||||
optimization_result = opt_response.json()
|
||||
|
||||
# 3. Apply optimization recommendations (simulate control)
|
||||
# This would typically be done by Calejo Control Adapter
|
||||
assert "result" in optimization_result
|
||||
assert "recommended_actions" in optimization_result["result"]
|
||||
|
||||
def test_alarm_to_optimization_workflow(self):
|
||||
"""Test workflow: Alarm detection -> Optimization response"""
|
||||
|
||||
# 1. Check for alarms
|
||||
alarm_response = requests.get(f"{SCADA_BASE_URL}/api/v1/alarms")
|
||||
assert alarm_response.status_code == 200
|
||||
alarms = alarm_response.json()["alarms"]
|
||||
|
||||
# 2. If alarms exist, run appropriate optimization
|
||||
if alarms:
|
||||
# For temperature alarm, run energy optimization
|
||||
opt_response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/energy_optimization",
|
||||
json={
|
||||
"power_load": 500.0,
|
||||
"time_of_day": datetime.now().hour,
|
||||
"production_rate": 100.0
|
||||
}
|
||||
)
|
||||
assert opt_response.status_code == 200
|
||||
|
||||
def test_forecast_based_planning(self):
|
||||
"""Test forecast-based planning workflow"""
|
||||
|
||||
# 1. Generate forecast
|
||||
forecast_response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/forecast",
|
||||
json={"hours": 12}
|
||||
)
|
||||
assert forecast_response.status_code == 200
|
||||
forecast = forecast_response.json()
|
||||
|
||||
# 2. Use forecast for planning
|
||||
assert len(forecast["forecast"]) == 12
|
||||
|
||||
# 3. Run optimization based on forecast
|
||||
avg_energy = sum(item["energy_consumption"] for item in forecast["forecast"]) / 12
|
||||
|
||||
opt_response = requests.post(
|
||||
f"{OPTIMIZER_BASE_URL}/api/v1/optimize/energy_optimization",
|
||||
json={
|
||||
"power_load": avg_energy,
|
||||
"time_of_day": datetime.now().hour,
|
||||
"production_rate": 95.0
|
||||
}
|
||||
)
|
||||
assert opt_response.status_code == 200
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def wait_for_services():
|
||||
"""Wait for services to be ready before running tests"""
|
||||
print("⏳ Waiting for mock services to be ready...")
|
||||
|
||||
max_wait = 60 # Maximum wait time in seconds
|
||||
start_time = time.time()
|
||||
|
||||
services_ready = False
|
||||
while time.time() - start_time < max_wait:
|
||||
try:
|
||||
# Check if all services are responding
|
||||
scada_ready = requests.get(f"{SCADA_BASE_URL}/health").status_code == 200
|
||||
optimizer_ready = requests.get(f"{OPTIMIZER_BASE_URL}/health").status_code == 200
|
||||
calejo_ready = requests.get(f"{CALEJO_BASE_URL}/health").status_code == 200
|
||||
|
||||
if scada_ready and optimizer_ready and calejo_ready:
|
||||
services_ready = True
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
if not services_ready:
|
||||
pytest.skip("Mock services not ready within timeout period")
|
||||
|
||||
print("✅ All mock services are ready!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Run tests directly
|
||||
pytest.main([__file__, "-v"])
|
||||
Loading…
Reference in New Issue