CalejoControl/deploy/ssh/deploy-remote.py

321 lines
13 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
"""
Calejo Control Adapter - Python SSH Deployment Script
Alternative deployment script using Python for more complex deployments
"""
import os
import sys
import yaml
import paramiko
import argparse
import tempfile
import tarfile
from pathlib import Path
from typing import Dict, Any
class SSHDeployer:
"""SSH-based deployment manager"""
def __init__(self, config_file: str):
self.config_file = config_file
self.config = self.load_config()
self.ssh_client = None
self.sftp_client = None
def load_config(self) -> Dict[str, Any]:
"""Load deployment configuration from YAML file"""
try:
with open(self.config_file, 'r') as f:
config = yaml.safe_load(f)
# Validate required configuration
required = ['ssh.host', 'ssh.username', 'ssh.key_file']
for req in required:
keys = req.split('.')
current = config
for key in keys:
if key not in current:
raise ValueError(f"Missing required configuration: {req}")
current = current[key]
return config
except Exception as e:
print(f"❌ Error loading configuration: {e}")
sys.exit(1)
def connect(self) -> bool:
"""Establish SSH connection"""
try:
ssh_config = self.config['ssh']
# Create SSH client
self.ssh_client = paramiko.SSHClient()
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Load private key
key_path = ssh_config['key_file']
if not os.path.exists(key_path):
print(f"❌ SSH key file not found: {key_path}")
return False
private_key = paramiko.Ed25519Key.from_private_key_file(key_path)
# Connect
port = ssh_config.get('port', 22)
self.ssh_client.connect(
hostname=ssh_config['host'],
port=port,
username=ssh_config['username'],
pkey=private_key,
timeout=30
)
# Create SFTP client
self.sftp_client = self.ssh_client.open_sftp()
print(f"✅ Connected to {ssh_config['host']}")
return True
except Exception as e:
print(f"❌ SSH connection failed: {e}")
return False
def execute_remote(self, command: str, description: str = "", silent: bool = False) -> bool:
"""Execute command on remote server"""
try:
if description and not silent:
print(f"🔧 {description}")
stdin, stdout, stderr = self.ssh_client.exec_command(command)
exit_status = stdout.channel.recv_exit_status()
if exit_status == 0:
if description and not silent:
print(f"{description} completed")
return True
else:
error_output = stderr.read().decode()
if not silent:
print(f"{description} failed: {error_output}")
return False
except Exception as e:
if not silent:
print(f"{description} failed: {e}")
return False
def transfer_file(self, local_path: str, remote_path: str, description: str = "") -> bool:
"""Transfer file to remote server"""
try:
if description:
print(f"📁 {description}")
self.sftp_client.put(local_path, remote_path)
if description:
print(f"{description} completed")
return True
except Exception as e:
print(f"{description} failed: {e}")
return False
def create_deployment_package(self) -> str:
"""Create deployment package excluding sensitive files"""
temp_dir = tempfile.mkdtemp()
package_path = os.path.join(temp_dir, "deployment.tar.gz")
# Create tar.gz package
with tarfile.open(package_path, "w:gz") as tar:
# Add all files except deployment config and keys
for root, dirs, files in os.walk('.'):
# Skip deployment directories
if 'deploy/config' in root or 'deploy/keys' in root:
continue
# Skip hidden directories
dirs[:] = [d for d in dirs if not d.startswith('.')]
for file in files:
# Skip hidden files except .env files
if file.startswith('.') and not file.startswith('.env'):
continue
file_path = os.path.join(root, file)
arcname = os.path.relpath(file_path, '.')
# Handle docker-compose.yml specially for test environment
if file == 'docker-compose.yml' and 'test' in self.config_file:
# Create modified docker-compose for test environment
modified_compose = self.create_test_docker_compose(file_path)
temp_compose_path = os.path.join(temp_dir, 'docker-compose.yml')
with open(temp_compose_path, 'w') as f:
f.write(modified_compose)
tar.add(temp_compose_path, arcname='docker-compose.yml')
# Handle .env files for test environment
elif file.startswith('.env') and 'test' in self.config_file:
if file == '.env.test':
# Copy .env.test as .env for test environment
temp_env_path = os.path.join(temp_dir, '.env')
with open(file_path, 'r') as src, open(temp_env_path, 'w') as dst:
dst.write(src.read())
tar.add(temp_env_path, arcname='.env')
# Skip other .env files in test environment
else:
tar.add(file_path, arcname=arcname)
return package_path
def create_test_docker_compose(self, original_compose_path: str) -> str:
"""Create modified docker-compose.yml for test environment"""
with open(original_compose_path, 'r') as f:
content = f.read()
# Replace container names and ports for test environment
replacements = {
'calejo-control-adapter': 'calejo-control-adapter-test',
'calejo-postgres': 'calejo-postgres-test',
'calejo-prometheus': 'calejo-prometheus-test',
'calejo-grafana': 'calejo-grafana-test',
'"8080:8080"': '"8081:8080"', # Test app port
'"4840:4840"': '"4841:4840"', # Test OPC UA port
'"502:502"': '"503:502"', # Test Modbus port
'"9090:9090"': '"9092:9090"', # Test Prometheus metrics
'"5432:5432"': '"5433:5432"', # Test PostgreSQL port
'"9091:9090"': '"9093:9090"', # Test Prometheus UI
'"3000:3000"': '"3001:3000"', # Test Grafana port
'calejo': 'calejo_test', # Test database name
'calejo-network': 'calejo-network-test',
'@postgres:5432': '@calejo_test-postgres-test:5432', # Fix database hostname
' - DATABASE_URL=postgresql://calejo_test:password@calejo_test-postgres-test:5432/calejo_test': ' # DATABASE_URL removed - using .env file instead' # Remove DATABASE_URL to use .env file
}
for old, new in replacements.items():
content = content.replace(old, new)
return content
def deploy(self, dry_run: bool = False):
"""Main deployment process"""
print("🚀 Starting SSH deployment...")
if dry_run:
print("🔍 DRY RUN MODE - No changes will be made")
# Connect to server
if not self.connect():
return False
try:
deployment_config = self.config['deployment']
target_dir = deployment_config['target_dir']
# Check prerequisites
print("🔍 Checking prerequisites...")
if not self.execute_remote("command -v docker", "Checking Docker"):
return False
if not self.execute_remote("command -v docker-compose", "Checking Docker Compose"):
return False
# Create directories
print("📁 Creating directories...")
dirs = [
target_dir,
deployment_config.get('backup_dir', '/var/backup/calejo'),
deployment_config.get('log_dir', '/var/log/calejo'),
deployment_config.get('config_dir', '/etc/calejo')
]
for dir_path in dirs:
cmd = f"sudo mkdir -p {dir_path} && sudo chown {self.config['ssh']['username']}:{self.config['ssh']['username']} {dir_path}"
if not self.execute_remote(cmd, f"Creating {dir_path}"):
return False
# Create deployment package
print("📦 Creating deployment package...")
package_path = self.create_deployment_package()
if dry_run:
print(f" 📦 Would transfer package: {package_path}")
os.remove(package_path)
return True
# Transfer package
remote_package_path = os.path.join(target_dir, "deployment.tar.gz")
if not self.transfer_file(package_path, remote_package_path, "Transferring deployment package"):
return False
# Extract package
if not self.execute_remote(f"cd {target_dir} && tar -xzf deployment.tar.gz && rm deployment.tar.gz", "Extracting package"):
return False
# Set permissions
if not self.execute_remote(f"chmod +x {target_dir}/scripts/*.sh", "Setting script permissions"):
return False
# Build and start services
print("🐳 Building and starting services...")
if not self.execute_remote(f"cd {target_dir} && sudo docker-compose build", "Building Docker images"):
return False
if not self.execute_remote(f"cd {target_dir} && sudo docker-compose up -d", "Starting services"):
return False
# Wait for services
print("⏳ Waiting for services to start...")
# Determine health check port based on environment
health_port = "8081" if 'test' in self.config_file else "8080"
for i in range(30):
if self.execute_remote(f"curl -s http://localhost:{health_port}/health > /dev/null", "", silent=True):
print(" ✅ Services started successfully")
break
print(f" ⏳ Waiting... ({i+1}/30)")
import time
time.sleep(2)
else:
print(" ❌ Services failed to start within 60 seconds")
return False
# Validate deployment
print("🔍 Validating deployment...")
self.execute_remote(f"cd {target_dir} && ./validate-deployment.sh", "Running validation")
print("🎉 Deployment completed successfully!")
return True
finally:
# Cleanup
if hasattr(self, 'package_path') and os.path.exists(self.package_path):
os.remove(self.package_path)
# Close connections
if self.sftp_client:
self.sftp_client.close()
if self.ssh_client:
self.ssh_client.close()
def main():
"""Main function"""
parser = argparse.ArgumentParser(description='Calejo Control Adapter - SSH Deployment')
parser.add_argument('-c', '--config', required=True, help='Deployment configuration file')
parser.add_argument('--dry-run', action='store_true', help='Dry run mode')
args = parser.parse_args()
# Check if config file exists
if not os.path.exists(args.config):
print(f"❌ Configuration file not found: {args.config}")
sys.exit(1)
# Run deployment
deployer = SSHDeployer(args.config)
success = deployer.deploy(dry_run=args.dry_run)
sys.exit(0 if success else 1)
if __name__ == "__main__":
main()